hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a22bdbe4ac1fa1d87f5161538c519627fe16ec1 | 720 | py | Python | source/lambda/es_loader/siem/sf_networkfirewall.py | acsrujan/siem-on-amazon-opensearch-service | 828bd90aa6720a2ce05aa75925a5ad5035d5427a | [
"MIT-0"
] | 64 | 2021-04-18T06:34:40.000Z | 2021-09-13T02:02:05.000Z | source/lambda/es_loader/siem/sf_networkfirewall.py | acsrujan/siem-on-amazon-opensearch-service | 828bd90aa6720a2ce05aa75925a5ad5035d5427a | [
"MIT-0"
] | 73 | 2021-04-28T02:25:09.000Z | 2021-09-13T02:49:38.000Z | source/lambda/es_loader/siem/sf_networkfirewall.py | acsrujan/siem-on-amazon-opensearch-service | 828bd90aa6720a2ce05aa75925a5ad5035d5427a | [
"MIT-0"
] | 31 | 2021-04-20T03:00:39.000Z | 2021-09-09T04:49:58.000Z | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
__copyright__ = ('Copyright Amazon.com, Inc. or its affiliates. '
'All Rights Reserved.')
__version__ = '2.6.0'
__license__ = 'MIT-0'
__author__ = 'Akihiro Nakajima'
__url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service'
def transform(logdata):
try:
# event.proto: TCP, UDP, ICMP
logdata['network']['transport'] = logdata['event']['proto'].lower()
except KeyError:
pass
if logdata['event']['event_type'] == 'alert':
logdata['event']['kind'] = 'alert'
logdata['event']['category'] = 'intrusion_detection'
return logdata
| 31.304348 | 76 | 0.645833 |
4a22bf5493f6c27c1cd01d9b5793072997b96a62 | 6,943 | py | Python | azure_monitor/src/azure_monitor/storage.py | yao-cqc/opentelemetry-azure-monitor-python | ecd0cd1d323a510be32cf0b1213e82e6d01e74e4 | [
"MIT"
] | 13 | 2020-04-03T17:17:45.000Z | 2021-06-08T15:23:03.000Z | azure_monitor/src/azure_monitor/storage.py | yao-cqc/opentelemetry-azure-monitor-python | ecd0cd1d323a510be32cf0b1213e82e6d01e74e4 | [
"MIT"
] | 72 | 2020-03-24T10:42:06.000Z | 2021-01-28T23:39:42.000Z | azure_monitor/src/azure_monitor/storage.py | microsoft/azure-monitor-opentelemetry-exporters-python | 3bd8b514ebef803ae622e308f867799cfab9dc5c | [
"MIT"
] | 11 | 2020-04-27T20:01:31.000Z | 2021-11-02T14:54:14.000Z | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import datetime
import json
import logging
import os
import random
from azure_monitor.utils import PeriodicTask
logger = logging.getLogger(__name__)
def _fmt(timestamp):
return timestamp.strftime("%Y-%m-%dT%H%M%S.%f")
def _now():
return datetime.datetime.utcnow()
def _seconds(seconds):
return datetime.timedelta(seconds=seconds)
# pylint: disable=broad-except
class LocalFileBlob:
def __init__(self, fullpath):
self.fullpath = fullpath
def delete(self):
try:
os.remove(self.fullpath)
except Exception:
pass # keep silent
def get(self):
try:
with open(self.fullpath, "r") as file:
return tuple(
json.loads(line.strip()) for line in file.readlines()
)
except Exception:
pass # keep silent
def put(self, data, lease_period=0):
try:
fullpath = self.fullpath + ".tmp"
with open(fullpath, "w") as file:
for item in data:
file.write(json.dumps(item))
# The official Python doc: Do not use os.linesep as a line
# terminator when writing files opened in text mode (the
# default); use a single '\n' instead, on all platforms.
file.write("\n")
if lease_period:
timestamp = _now() + _seconds(lease_period)
self.fullpath += "@{}.lock".format(_fmt(timestamp))
os.rename(fullpath, self.fullpath)
return self
except Exception:
pass # keep silent
def lease(self, period):
timestamp = _now() + _seconds(period)
fullpath = self.fullpath
if fullpath.endswith(".lock"):
fullpath = fullpath[: fullpath.rindex("@")]
fullpath += "@{}.lock".format(_fmt(timestamp))
try:
os.rename(self.fullpath, fullpath)
except Exception:
return None
self.fullpath = fullpath
return self
# pylint: disable=broad-except
class LocalFileStorage:
def __init__(
self,
path,
max_size=50 * 1024 * 1024, # 50MiB
maintenance_period=60, # 1 minute
retention_period=7 * 24 * 60 * 60, # 7 days
write_timeout=60, # 1 minute
):
self.path = os.path.abspath(path)
self.max_size = max_size
self.maintenance_period = maintenance_period
self.retention_period = retention_period
self.write_timeout = write_timeout
self._maintenance_routine()
self._maintenance_task = PeriodicTask(
interval=self.maintenance_period,
function=self._maintenance_routine,
)
self._maintenance_task.daemon = True
self._maintenance_task.start()
def close(self):
self._maintenance_task.cancel()
self._maintenance_task.join()
def __enter__(self):
return self
# pylint: disable=redefined-builtin
def __exit__(self, type, value, traceback):
self.close()
# pylint: disable=unused-variable
def _maintenance_routine(self):
try:
if not os.path.isdir(self.path):
os.makedirs(self.path, exist_ok=True)
except Exception:
pass # keep silent
try:
# pylint: disable=unused-variable
for blob in self.gets():
pass # keep silent
except Exception:
pass # keep silent
def gets(self):
now = _now()
lease_deadline = _fmt(now)
retention_deadline = _fmt(now - _seconds(self.retention_period))
timeout_deadline = _fmt(now - _seconds(self.write_timeout))
for name in sorted(os.listdir(self.path)):
path = os.path.join(self.path, name)
if not os.path.isfile(path):
continue # skip if not a file
if path.endswith(".tmp"):
if name < timeout_deadline:
try:
os.remove(path) # TODO: log data loss
except Exception:
pass # keep silent
if path.endswith(".lock"):
if path[path.rindex("@") + 1 : -5] > lease_deadline:
continue # under lease
new_path = path[: path.rindex("@")]
try:
os.rename(path, new_path)
except Exception:
continue # keep silent
path = new_path
if path.endswith(".blob"):
if name < retention_deadline:
try:
os.remove(path) # TODO: log data loss
except Exception:
pass # keep silent
else:
yield LocalFileBlob(path)
def get(self):
cursor = self.gets()
try:
return next(cursor)
except StopIteration:
pass
return None
def put(self, data, lease_period=0):
if not self._check_storage_size():
return None
blob = LocalFileBlob(
os.path.join(
self.path,
"{}-{}.blob".format(
_fmt(_now()),
"{:08x}".format(
random.getrandbits(32)
), # thread-safe random
),
)
)
return blob.put(data, lease_period=lease_period)
def _check_storage_size(self):
size = 0
# pylint: disable=unused-variable
for dirpath, dirnames, filenames in os.walk(self.path):
for filename in filenames:
path = os.path.join(dirpath, filename)
# skip if it is symbolic link
if not os.path.islink(path):
try:
size += os.path.getsize(path)
except OSError:
logger.error(
"Path %s does not exist or is " "inaccessible.",
path,
)
continue
if size >= self.max_size:
# pylint: disable=logging-format-interpolation
logger.warning(
"Persistent storage max capacity has been "
"reached. Currently at {}KB. Telemetry will be "
"lost. Please consider increasing the value of "
"'storage_max_size' in exporter config.".format(
str(size / 1024)
)
)
return False
return True
| 32.75 | 78 | 0.512747 |
4a22bf8e2cf50698c3ba61be499286466de75e27 | 9,090 | py | Python | src/bin/shipyard_airflow/shipyard_airflow/policy.py | att-comdev/shipyard | 14d66afb012025a5289818d8e8d2092ccce19ffa | [
"Apache-2.0"
] | 14 | 2017-07-05T14:12:51.000Z | 2021-05-20T23:31:55.000Z | src/bin/shipyard_airflow/shipyard_airflow/policy.py | att-comdev/shipyard | 14d66afb012025a5289818d8e8d2092ccce19ffa | [
"Apache-2.0"
] | 14 | 2017-06-09T04:33:44.000Z | 2018-05-07T13:07:40.000Z | src/bin/shipyard_airflow/shipyard_airflow/policy.py | att-comdev/shipyard | 14d66afb012025a5289818d8e8d2092ccce19ffa | [
"Apache-2.0"
] | 16 | 2017-05-18T15:22:22.000Z | 2019-06-14T16:43:44.000Z | # Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import functools
import logging
import falcon
from oslo_config import cfg
from oslo_policy import policy
from shipyard_airflow.errors import ApiError, AppError
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
policy_engine = None
class ShipyardPolicy(object):
"""
Initialize policy defaults
"""
RULE_ADMIN_REQUIRED = 'rule:admin_required'
# Base Policy
base_rules = [
policy.RuleDefault(
'admin_required',
'role:admin',
description='Actions requiring admin authority'),
]
# Orchestrator Policy
task_rules = [
policy.DocumentedRuleDefault(
'workflow_orchestrator:list_actions',
RULE_ADMIN_REQUIRED,
'List workflow actions invoked by users',
[{
'path': '/api/v1.0/actions',
'method': 'GET'
}]
),
policy.DocumentedRuleDefault(
'workflow_orchestrator:create_action',
RULE_ADMIN_REQUIRED,
'Create a workflow action',
[{
'path': '/api/v1.0/actions',
'method': 'POST'
}]
),
policy.DocumentedRuleDefault(
'workflow_orchestrator:get_action',
RULE_ADMIN_REQUIRED,
'Retrieve an action by its id',
[{
'path': '/api/v1.0/actions/{action_id}',
'method': 'GET'
}]
),
policy.DocumentedRuleDefault(
'workflow_orchestrator:get_action_step',
RULE_ADMIN_REQUIRED,
'Retrieve an action step by its id',
[{
'path': '/api/v1.0/actions/{action_id}/steps/{step_id}',
'method': 'GET'
}]
),
policy.DocumentedRuleDefault(
'workflow_orchestrator:get_action_step_logs',
RULE_ADMIN_REQUIRED,
'Retrieve logs of an action step by its id',
[{
'path': '/api/v1.0/actions/{action_id}/steps/{step_id}/logs',
'method': 'GET'
}]
),
policy.DocumentedRuleDefault(
'workflow_orchestrator:get_action_validation',
RULE_ADMIN_REQUIRED,
'Retrieve an action validation by its id',
[{
'path':
'/api/v1.0/actions/{action_id}/validations/{validation_id}',
'method': 'GET'
}]
),
policy.DocumentedRuleDefault(
'workflow_orchestrator:invoke_action_control',
RULE_ADMIN_REQUIRED,
'Send a control to an action',
[{
'path': '/api/v1.0/actions/{action_id}/control/{control_verb}',
'method': 'POST'
}]
),
policy.DocumentedRuleDefault(
'workflow_orchestrator:get_configdocs_status',
RULE_ADMIN_REQUIRED,
'Retrieve the status of the configdocs',
[{
'path': '/api/v1.0/configdocs',
'method': 'GET'
}]
),
policy.DocumentedRuleDefault(
'workflow_orchestrator:create_configdocs',
RULE_ADMIN_REQUIRED,
'Ingest configuration documents for the site design',
[{
'path': '/api/v1.0/configdocs/{collection_id}',
'method': 'POST'
}]
),
policy.DocumentedRuleDefault(
'workflow_orchestrator:get_configdocs',
RULE_ADMIN_REQUIRED,
'Retrieve a collection of configuration documents',
[{
'path': '/api/v1.0/configdocs/{collection_id}',
'method': 'GET'
}]
),
policy.DocumentedRuleDefault(
'workflow_orchestrator:commit_configdocs',
RULE_ADMIN_REQUIRED,
('Move documents from the Shipyard buffer to the committed '
'documents'),
[{
'path': '/api/v1.0/commitconfigdocs',
'method': 'POST'
}]
),
policy.DocumentedRuleDefault(
'workflow_orchestrator:get_renderedconfigdocs',
RULE_ADMIN_REQUIRED,
('Retrieve the configuration documents rendered by Deckhand into '
'a complete design'),
[{
'path': '/api/v1.0/renderedconfigdocs',
'method': 'GET'
}]
),
policy.DocumentedRuleDefault(
'workflow_orchestrator:list_workflows',
RULE_ADMIN_REQUIRED,
('Retrieve the list of workflows (DAGs) that have been invoked '
'in Airflow, whether via Shipyard or scheduled'),
[{
'path': '/api/v1.0/workflows',
'method': 'GET'
}]
),
policy.DocumentedRuleDefault(
'workflow_orchestrator:get_workflow',
RULE_ADMIN_REQUIRED,
('Retrieve the detailed information for a workflow (DAG) from '
'Airflow'),
[{
'path': '/api/v1.0/workflows/{id}',
'method': 'GET'
}]
),
]
# Regions Policy
def __init__(self):
self.enforcer = policy.Enforcer(cfg.CONF)
def register_policy(self):
self.enforcer.register_defaults(ShipyardPolicy.base_rules)
self.enforcer.register_defaults(ShipyardPolicy.task_rules)
def authorize(self, action, ctx):
target = {'project_id': ctx.project_id, 'user_id': ctx.user_id}
return self.enforcer.authorize(action, target, ctx.to_policy_view())
class ApiEnforcer(object):
"""
A decorator class for enforcing RBAC policies
"""
def __init__(self, action):
self.action = action
self.logger = LOG
def __call__(self, f):
@functools.wraps(f)
def secure_handler(slf, req, resp, *args, **kwargs):
ctx = req.context
policy_eng = ctx.policy_engine
LOG.info("Policy Engine: %s", policy_eng.__class__.__name__)
# perform auth
LOG.info("Enforcing policy %s on request %s",
self.action, ctx.request_id)
# policy engine must be configured
if policy_eng is None:
LOG.error(
"Error-Policy engine required-action: %s", self.action)
raise AppError(
title="Auth is not being handled by any policy engine",
status=falcon.HTTP_500,
retry=False
)
authorized = False
try:
if policy_eng.authorize(self.action, ctx):
# authorized
LOG.info("Request is authorized")
authorized = True
except:
# couldn't service the auth request
LOG.error(
"Error - Expectation Failed - action: %s", self.action)
raise ApiError(
title="Expectation Failed",
status=falcon.HTTP_417,
retry=False
)
if authorized:
return f(slf, req, resp, *args, **kwargs)
else:
LOG.error("Auth check failed. Authenticated:%s",
ctx.authenticated)
# raise the appropriate response exeception
if ctx.authenticated:
LOG.error("Error: Forbidden access - action: %s",
self.action)
raise ApiError(
title="Forbidden",
status=falcon.HTTP_403,
description="Credentials do not permit access",
retry=False
)
else:
LOG.error("Error - Unauthenticated access")
raise ApiError(
title="Unauthenticated",
status=falcon.HTTP_401,
description="Credentials are not established",
retry=False
)
return secure_handler
def list_policies():
default_policy = []
default_policy.extend(ShipyardPolicy.base_rules)
default_policy.extend(ShipyardPolicy.task_rules)
return default_policy
| 33.91791 | 79 | 0.532893 |
4a22bfe4f7b0e633a9583dfa067b497243c59323 | 39,431 | py | Python | tests/unit/synapseclient/core/upload/unit_test_multipart_upload.py | talkdirty/synapsePythonClient | 436504fe72ab1e85737a47a0d8d2eb3cfc563d37 | [
"Apache-2.0"
] | 45 | 2015-01-13T16:42:44.000Z | 2022-03-13T08:51:47.000Z | tests/unit/synapseclient/core/upload/unit_test_multipart_upload.py | talkdirty/synapsePythonClient | 436504fe72ab1e85737a47a0d8d2eb3cfc563d37 | [
"Apache-2.0"
] | 235 | 2015-01-07T23:07:04.000Z | 2022-03-29T20:48:18.000Z | tests/unit/synapseclient/core/upload/unit_test_multipart_upload.py | thomasyu888/synapsePythonClient | e04213d1437bdd0eda9a292e97926a2f566d2d5b | [
"Apache-2.0"
] | 58 | 2015-04-10T16:23:01.000Z | 2022-03-28T20:14:11.000Z | from concurrent.futures import Future
import hashlib
import json
import requests
import pytest
from unittest import mock
from synapseclient import Synapse
from synapseclient.core.exceptions import (
SynapseHTTPError,
SynapseUploadAbortedException,
SynapseUploadFailedException,
)
import synapseclient.core.upload.multipart_upload as multipart_upload
from synapseclient.core.upload.multipart_upload import (
DEFAULT_PART_SIZE,
MIN_PART_SIZE,
_multipart_upload,
multipart_copy,
multipart_upload_file,
multipart_upload_string,
pool_provider,
UploadAttempt,
)
class TestUploadAttempt:
dest_file_name = 'target.txt'
content_type = 'application/text'
part_size = 256
file_size = 1024
md5_hex = 'abc'
preview = False
storage_location_id = '1234'
def _init_upload_attempt(self, syn):
upload_request_payload = {
'concreteType': 'org.sagebionetworks.repo.model.file.MultipartUploadRequest',
'contentMD5Hex': self.md5_hex,
'contentType': self.content_type,
'fileName': self.dest_file_name,
'fileSizeBytes': self.file_size,
'generatePreview': self.preview,
'storageLocationId': self.storage_location_id,
'partSizeBytes': self.part_size,
}
def part_request_body_provider_fn(part_number):
return (f"{part_number}" * self.part_size).encode('utf-8')
def md5_fn(part, _):
md5 = hashlib.md5()
md5.update(part)
return md5.hexdigest()
max_threads = 8
force_restart = True
return UploadAttempt(
syn,
self.dest_file_name,
upload_request_payload,
part_request_body_provider_fn,
md5_fn,
max_threads,
force_restart=force_restart,
)
def test_create_synapse_upload(self, syn):
upload = self._init_upload_attempt(syn)
expected_upload_request = {
'concreteType': 'org.sagebionetworks.repo.model.file.MultipartUploadRequest',
'contentMD5Hex': self.md5_hex,
'contentType': self.content_type,
'fileName': self.dest_file_name,
'fileSizeBytes': self.file_size,
'generatePreview': self.preview,
'storageLocationId': self.storage_location_id,
'partSizeBytes': self.part_size,
}
expected_uri = "/file/multipart?forceRestart={}".format(
str(upload._force_restart).lower()
)
response = mock.Mock()
with mock.patch.object(syn, 'restPOST') as restPOST:
restPOST.return_value = response
assert upload._create_synapse_upload() == response
restPOST.assert_called_once_with(
expected_uri,
json.dumps(expected_upload_request),
endpoint=upload._syn.fileHandleEndpoint,
)
def test_fetch_presigned_urls(self, syn):
upload = self._init_upload_attempt(syn)
upload_id = '1234'
part_numbers = [1, 3]
session = mock.Mock()
response = {
'partPresignedUrls': [
{
'partNumber': 1,
'uploadPresignedUrl': 'https://foo.com/1',
'signedHeaders': {'a': 'b'},
},
{
'partNumber': 3,
'uploadPresignedUrl': 'https://foo.com/3',
'signedHeaders': {'c': 'd'},
}
]
}
with mock.patch.object(syn, 'restPOST') as restPOST:
restPOST.return_value = response
expected_return = {
i['partNumber']: (i['uploadPresignedUrl'], i['signedHeaders'])
for i in response['partPresignedUrls']
}
pre_signed_urls = upload._fetch_pre_signed_part_urls(
upload_id,
part_numbers,
requests_session=session,
)
assert pre_signed_urls == expected_return
expected_uri =\
"/file/multipart/{upload_id}/presigned/url/batch".format(
upload_id=upload_id
)
expected_body = {
'uploadId': upload_id,
'partNumbers': part_numbers,
}
restPOST.assert_called_once_with(
expected_uri,
json.dumps(expected_body),
requests_session=session,
endpoint=upload._syn.fileHandleEndpoint,
)
def test_refresh_presigned_part_url__fetch_required(self, syn):
"""Verify that when calling the refresh function that if the
url that was passed as expired is that last known available
url for the given part number then we know that no other
thread has already refreshed and this thread needs to do so."""
upload = self._init_upload_attempt(syn)
part_number = 2
current_url = "http://bar.com{}".format(part_number)
original_presigned_urls = {
2: current_url,
}
upload._pre_signed_part_urls = original_presigned_urls
pre_signed_url = "http://foo.com/{}".format(part_number)
with mock.patch.object(
upload,
'_fetch_pre_signed_part_urls',
) as fetch_urls:
fetch_urls.return_value = {
part_number: pre_signed_url,
}
# the passed url that expired is the same that as the last
# one available for this part number, so a refresh is required.
refreshed_url = upload._refresh_pre_signed_part_urls(
part_number,
current_url,
)
assert refreshed_url == pre_signed_url
fetch_urls.assert_called_once_with(
upload._upload_id,
list(original_presigned_urls.keys())
)
def test_refresh_presigned_part_url__no_fetch_required(self, syn):
"""Test that if another thread already refreshed all the
signed urls after this thread's url was detected as expired
then we don't need to fetch new urls from synapse."""
upload = self._init_upload_attempt(syn)
part_number = 2
current_url = "http://bar.com{}".format(part_number)
original_presigned_urls = {
2: current_url,
}
upload._pre_signed_part_urls = original_presigned_urls
pre_signed_url = "http://foo.com/{}".format(part_number)
with mock.patch.object(
upload,
'_fetch_pre_signed_part_urls',
) as fetch_urls:
# the passed url that expired is the same that as the last
# one available for this part number, so a refresh is required.
refreshed_url = upload._refresh_pre_signed_part_urls(
part_number,
pre_signed_url,
)
# should return the new url already on file without having
# to have made a remote call.
assert refreshed_url == current_url
fetch_urls.assert_not_called()
def test_handle_part_aborted(self, syn):
"""Verify that handle part processing short circuits when
the upload attempt has already been aborted."""
upload = self._init_upload_attempt(syn)
upload._aborted = True
with pytest.raises(SynapseUploadAbortedException):
upload._handle_part(5)
def test_handle_part__500(self, syn):
"""Test that we retry if we encounter a 500 from AWS on a PUT to the signed URL"""
upload = self._init_upload_attempt(syn)
upload._upload_id = '123'
part_number = 1
chunk = b'1' * TestUploadAttempt.part_size
pre_signed_url = 'https://foo.com/1'
signed_headers = {'a': 1}
upload._pre_signed_part_urls = {part_number: (pre_signed_url, signed_headers)}
mock_500 = mock.MagicMock(spec=requests.Response)
mock_500.status_code = 500
mock_500.headers = {}
mock_500.reason = ''
self._handle_part_success_test(
syn,
upload,
part_number,
pre_signed_url,
# initial call is expired and results in a 500
# second call is successful
[
(
mock.call(pre_signed_url, chunk, headers=signed_headers),
mock_500
),
(
mock.call(pre_signed_url, chunk, headers=signed_headers),
mock.Mock(status_code=200)
),
],
chunk,
None
)
def test_handle_part__connection_error(self, syn):
"""Test that we retry if we encounter a ConnectionError on a reqeust to PUT to an AWS presigend url"""
upload = self._init_upload_attempt(syn)
upload._upload_id = '123'
part_number = 1
chunk = b'1' * TestUploadAttempt.part_size
pre_signed_url = 'https://foo.com/1'
signed_headers = {'a': 1}
upload._pre_signed_part_urls = {part_number: (pre_signed_url, signed_headers)}
self._handle_part_success_test(
syn,
upload,
part_number,
pre_signed_url,
# initial call is expired and results in a 500
# second call is successful
[
(
mock.call(pre_signed_url, chunk, headers=signed_headers),
requests.exceptions.ConnectionError('aborted')
),
(
mock.call(pre_signed_url, chunk, headers=signed_headers),
mock.Mock(status_code=200)
),
],
chunk,
None
)
def _handle_part_success_test(
self,
syn,
upload,
part_number,
expired_url,
aws_calls,
chunk,
refresh_url_response,
):
mock_session = mock.Mock()
md5_hex = hashlib.md5(chunk).hexdigest()
with mock.patch.object(multipart_upload, '_get_file_chunk') \
as chunk_fn, \
mock.patch.object(upload, '_get_thread_session') \
as get_session, \
mock.patch.object(upload, '_refresh_pre_signed_part_urls') \
as refresh_urls, \
mock.patch.object(syn, 'restPUT'):
get_session.return_value = mock_session
chunk_fn.return_value = chunk
refresh_urls.return_value = refresh_url_response
mock_session.put.side_effect = [
aws_call[1] for aws_call in aws_calls
]
result = upload._handle_part(1)
expected_put_calls = [
aws_call[0] for aws_call in aws_calls
]
assert (
mock_session.put.call_args_list ==
expected_put_calls)
assert result == (part_number, len(chunk))
if refresh_url_response:
refresh_urls.assert_called_once_with(
part_number,
expired_url,
)
else:
assert not refresh_urls.called
syn.restPUT.assert_called_once_with(
"/file/multipart/{upload_id}/add/{part_number}?partMD5Hex={md5}"
.format(
upload_id=upload._upload_id,
part_number=part_number,
md5=md5_hex,
),
requests_session=mock_session,
endpoint=upload._syn.fileHandleEndpoint
)
assert part_number not in upload._pre_signed_part_urls
def test_handle_part_success(self, syn):
"""Verify behavior of a successful processing of a part.
Part bytes should be uploaded to aws, and """
upload = self._init_upload_attempt(syn)
upload._upload_id = '123'
part_number = 1
chunk = b'1' * TestUploadAttempt.part_size
pre_signed_url_1 = 'https://foo.com/1'
signed_headers_1 = {'a': 'b'}
upload._pre_signed_part_urls = {part_number: (pre_signed_url_1, signed_headers_1)}
self._handle_part_success_test(
syn,
upload,
part_number,
pre_signed_url_1,
[(mock.call(pre_signed_url_1, chunk, headers=signed_headers_1), mock.Mock(status_code=200))],
chunk,
None,
)
def test_handle_part_expired_url(self, syn):
"""An initial 403 when invoking a presigned url indicates its
expired, verify that we recovery by refreshing the urls and
invoking the refreshed url."""
upload = self._init_upload_attempt(syn)
upload._upload_id = '123'
part_number = 1
chunk = b'1' * TestUploadAttempt.part_size
pre_signed_url_1 = 'https://foo.com/1'
signed_headers_1 = {'a': 1}
pre_signed_url_2 = 'https://bar.com/1'
signed_headers_2 = {'a': 2}
upload._pre_signed_part_urls = {part_number: (pre_signed_url_1, signed_headers_1)}
mock_403 = mock.MagicMock(spec=requests.Response)
mock_403.status_code = 403
mock_403.headers = {}
mock_403.reason = ''
self._handle_part_success_test(
syn,
upload,
part_number,
pre_signed_url_1,
# initial call is expired and results in a 403
# second call is successful
[
(
mock.call(pre_signed_url_1, chunk, headers=signed_headers_1),
mock_403
),
(
mock.call(pre_signed_url_2, chunk, headers=signed_headers_2),
mock.Mock(status_code=200)
),
],
chunk,
(pre_signed_url_2, signed_headers_2)
)
def test_handle_part__url_expired_twice(self, syn):
"""Verify that consecutive attempts to upload a part resulting
in a 403 from AWS results in the expected error."""
upload = self._init_upload_attempt(syn)
upload._upload_id = '123'
part_number = 1
chunk = b'1' * TestUploadAttempt.part_size
pre_signed_url_1 = 'https://foo.com/1'
pre_signed_url_2 = 'https://bar.com/1'
signed_headers = {'a': 1}
upload._pre_signed_part_urls = {part_number: (pre_signed_url_1, signed_headers)}
mock_session = mock.Mock()
with mock.patch.object(multipart_upload, '_get_file_chunk')\
as chunk_fn,\
mock.patch.object(upload, '_get_thread_session')\
as get_session,\
mock.patch.object(upload, '_refresh_pre_signed_part_urls')\
as refresh_urls:
get_session.return_value = mock_session
chunk_fn.return_value = chunk
refresh_urls.side_effect = [
(url, signed_headers) for url in [
pre_signed_url_1,
pre_signed_url_2,
]
]
mock_response = mock.MagicMock(spec=requests.Response)
mock_response.status_code = 403
mock_response.headers = {}
mock_response.reason = ''
mock_session.put.return_value = mock_response
with pytest.raises(SynapseHTTPError):
upload._handle_part(1)
def test_call_upload(self, syn):
"""Verify the behavior of an upload call, it should trigger
calls to handle each of the individual outstanding parts
and then call Synapse indicating that the upload is complete"""
upload = self._init_upload_attempt(syn)
upload_id = '1234'
parts_state = '010'
upload_status = {
'uploadId': upload_id,
'partsState': parts_state,
}
pre_signed_urls = {
1: 'http://foo.com/1',
3: 'http://foo.com/3',
}
futures = []
for i in pre_signed_urls.keys():
future = Future()
future.set_result((i, upload._part_size))
futures.append(future)
with mock.patch.object(upload, '_create_synapse_upload')\
as create_synapse_upload,\
mock.patch.object(upload, '_fetch_pre_signed_part_urls')\
as fetch_pre_signed_urls,\
mock.patch.object(pool_provider, 'get_executor')\
as get_executor,\
mock.patch.object(upload, '_get_thread_session')\
as get_session,\
mock.patch.object(syn, 'restPUT')\
as restPUT:
mock_session = mock.Mock()
get_session.return_value = mock_session
create_synapse_upload.return_value = upload_status
fetch_pre_signed_urls.return_value = pre_signed_urls
get_executor.return_value.submit.side_effect = futures
upload_response = {
'state': 'COMPLETED'
}
restPUT.return_value = upload_response
result = upload()
assert result == upload_response
restPUT.assert_called_once_with(
"/file/multipart/{upload_id}/complete".format(
upload_id=upload_id
),
requests_session=mock_session,
endpoint=upload._syn.fileHandleEndpoint,
)
def _test_call_upload__part_exception(
self,
syn,
part_exception,
expected_raised_exception
):
upload = self._init_upload_attempt(syn)
upload_id = '1234'
parts_state = '0'
upload_status = {
'uploadId': upload_id,
'partsState': parts_state,
}
pre_signed_urls = {
1: 'http://foo.com/1',
}
future = Future()
future.set_exception(part_exception())
with mock.patch.object(upload, '_create_synapse_upload')\
as create_synapse_upload,\
mock.patch.object(upload, '_fetch_pre_signed_part_urls')\
as fetch_pre_signed_urls,\
mock.patch.object(pool_provider, 'get_executor')\
as get_executor:
create_synapse_upload.return_value = upload_status
fetch_pre_signed_urls.return_value = pre_signed_urls
get_executor.return_value.submit.return_value = future
with pytest.raises(expected_raised_exception):
upload()
def test_call_upload__part_failure(self, syn):
"""Verify that an error raised while processing one part
results in an error on the upload."""
self._test_call_upload__part_exception(
syn,
Exception,
SynapseUploadFailedException,
)
def test_call_upload__interrupt(self, syn):
"""Verify that a KeyboardInterrupt raises an abort exception"""
self._test_call_upload__part_exception(
syn,
KeyboardInterrupt,
SynapseUploadAbortedException,
)
def test_already_completed(self, syn):
"""Verify that uploading a file that is already complete
but that wasn't force restarted returns without attempting
to reupload the file."""
upload = self._init_upload_attempt(syn)
upload_id = '1234'
parts_state = '0'
upload_status_response = {
'uploadId': upload_id,
'partsState': parts_state,
'state': 'COMPLETED',
}
with mock.patch.object(upload, '_create_synapse_upload')\
as create_synapse_upload,\
mock.patch.object(upload, '_fetch_pre_signed_part_urls')\
as fetch_pre_signed_urls,\
mock.patch.object(pool_provider, 'get_executor')\
as get_executor:
create_synapse_upload.return_value = upload_status_response
upload_result = upload()
assert upload_status_response == upload_result
# we should have been able to short circuit any further
# upload work and have returned immediately
assert not fetch_pre_signed_urls.called
assert not get_executor.called
def test_all_parts_completed(self, syn):
"""Verify that if all the parts are already complete but
the upload itself hasn't been marked as complete then
we mark it as such without re-uploading any of the parts."""
upload = self._init_upload_attempt(syn)
upload_id = '1234'
parts_state = '11'
create_status_response = {
'uploadId': upload_id,
'partsState': parts_state,
'state': 'UPLOADING',
}
complete_status_response = {
'uploadId': upload_id,
'state': 'COMPLETED',
}
with mock.patch.object(upload, '_create_synapse_upload')\
as create_synapse_upload,\
mock.patch.object(upload, '_fetch_pre_signed_part_urls')\
as fetch_pre_signed_urls,\
mock.patch.object(pool_provider, 'get_executor')\
as get_executor,\
mock.patch.object(upload._syn, 'restPUT') as restPUT:
create_synapse_upload.return_value = create_status_response
restPUT.return_value = complete_status_response
upload_result = upload()
assert complete_status_response == upload_result
restPUT.assert_called_once()
assert f"/file/multipart/{upload_id}/complete" in restPUT.call_args_list[0][0]
# we should have been able to short circuit any further
# upload work and have returned immediately
assert not fetch_pre_signed_urls.called
assert not get_executor.called
class TestMultipartUpload:
@pytest.fixture(autouse=True, scope='function')
def init_syn(self, syn):
self.syn = syn
def test_multipart_upload_file(self):
"""Verify multipart_upload_file passes through its
args, validating and supplying defaults as expected."""
syn = mock.Mock()
file_path = '/foo/bar/baz'
file_size = 1234
md5_hex = 'abc123'
storage_location_id = 5432
with mock.patch('os.path.exists') as os_path_exists,\
mock.patch('os.path.isdir') as os_path_is_dir,\
mock.patch('os.path.getsize') as os_path_getsize,\
mock.patch.object(
multipart_upload,
'md5_for_file',
) as md5_for_file,\
mock.patch.object(
multipart_upload,
'_multipart_upload',
) as mock_multipart_upload,\
mock.patch.object(multipart_upload, 'Spinner') as mock_spinner:
os_path_getsize.return_value = file_size
md5_for_file.return_value.hexdigest.return_value = md5_hex
os_path_exists.return_value = False
# bad file
with pytest.raises(IOError):
multipart_upload_file(syn, file_path, storage_location_id=storage_location_id)
os_path_exists.return_value = True
os_path_is_dir.return_value = True
with pytest.raises(IOError):
multipart_upload_file(syn, file_path, storage_location_id=storage_location_id)
os_path_is_dir.return_value = False
expected_upload_request = {
'concreteType': 'org.sagebionetworks.repo.model.file.MultipartUploadRequest',
'contentType': 'application/octet-stream',
'contentMD5Hex': md5_hex,
'fileName': 'baz',
'fileSizeBytes': file_size,
'generatePreview': True,
'storageLocationId': storage_location_id,
'partSizeBytes': DEFAULT_PART_SIZE,
}
# call w/ defaults
multipart_upload_file(
syn,
file_path,
storage_location_id=storage_location_id,
)
mock_multipart_upload.assert_called_once_with(
syn,
'baz',
expected_upload_request,
mock.ANY, # part_fn
mock.ANY, # md5_fn,
force_restart=False,
max_threads=None,
)
# Test when call the multipart_upload_file, md5_for_file pass in the correct callback function
syn_with_silent_mode = Synapse(silent=True, skip_checks=True)
multipart_upload_file(
syn_with_silent_mode,
file_path,
storage_location_id=storage_location_id,
)
md5_for_file.assert_called_with(file_path, callback=None)
syn_with_no_silent_mode = Synapse(debug=False, skip_checks=True)
multipart_upload_file(
syn_with_no_silent_mode,
file_path,
storage_location_id=storage_location_id,
)
md5_for_file.assert_called_with(file_path, callback=mock_spinner.return_value.print_tick)
mock_multipart_upload.reset_mock()
# call specifying all optional kwargs
kwargs = {
'dest_file_name': 'blort',
'content_type': 'text/plain',
'part_size': MIN_PART_SIZE * 2,
'preview': False,
'force_restart': True,
'max_threads': 8,
}
expected_upload_request = {
'concreteType': 'org.sagebionetworks.repo.model.file.MultipartUploadRequest',
'contentType': kwargs['content_type'],
'contentMD5Hex': md5_hex,
'fileName': kwargs['dest_file_name'],
'fileSizeBytes': file_size,
'generatePreview': kwargs['preview'],
'storageLocationId': storage_location_id,
'partSizeBytes': kwargs['part_size'],
}
multipart_upload_file(
syn,
file_path,
storage_location_id=storage_location_id,
**kwargs
)
mock_multipart_upload.assert_called_once_with(
syn,
kwargs['dest_file_name'],
expected_upload_request,
mock.ANY, # part_fn
mock.ANY, # md5_fn,
force_restart=kwargs['force_restart'],
max_threads=kwargs['max_threads'],
)
def test_multipart_upload_string(self):
"""Verify multipart_upload_string passes through its
args, validating and supplying defaults as expected."""
syn = mock.Mock()
upload_text = 'foobarbaz'
storage_location_id = 5432
with mock.patch.object(
multipart_upload,
'_multipart_upload',
) as mock_multipart_upload:
encoded = upload_text.encode('utf-8')
md5_hex = hashlib.md5(encoded).hexdigest()
# call w/ default args
multipart_upload_string(syn, upload_text, storage_location_id=storage_location_id)
expected_upload_request = {
'concreteType': 'org.sagebionetworks.repo.model.file.MultipartUploadRequest',
'contentType': 'text/plain; charset=utf-8',
'contentMD5Hex': md5_hex,
'fileName': 'message.txt',
'fileSizeBytes': len(upload_text),
'generatePreview': True,
'storageLocationId': storage_location_id,
'partSizeBytes': DEFAULT_PART_SIZE,
}
mock_multipart_upload.assert_called_once_with(
syn,
'message.txt',
expected_upload_request,
mock.ANY, # part_fn
mock.ANY, # md5_fn,
force_restart=False,
max_threads=None,
)
mock_multipart_upload.reset_mock()
# call specifying all optional kwargs
kwargs = {
'dest_file_name': 'blort',
'content_type': 'text/csv',
'part_size': MIN_PART_SIZE * 2,
'storage_location_id': storage_location_id,
'preview': False,
'force_restart': True,
'max_threads': 8,
}
multipart_upload_string(syn, upload_text, **kwargs)
expected_upload_request = {
'concreteType': 'org.sagebionetworks.repo.model.file.MultipartUploadRequest',
'contentType': kwargs['content_type'],
'contentMD5Hex': md5_hex,
'fileName': kwargs['dest_file_name'],
'fileSizeBytes': len(upload_text),
'generatePreview': kwargs['preview'],
'storageLocationId': storage_location_id,
'partSizeBytes': kwargs['part_size'],
}
mock_multipart_upload.assert_called_once_with(
syn,
kwargs['dest_file_name'],
expected_upload_request,
mock.ANY, # part_request_body_provider_fn
mock.ANY, # md5_fn,
force_restart=kwargs['force_restart'],
max_threads=kwargs['max_threads'],
)
def test_multipart_copy__default_args(self):
"""Test multipart copy using only the required positional args.
Default settings should be used for unspecified params."""
syn = mock.Mock()
part_size_bytes = 9876
file_handle_id = 1234
associate_object_id = 'syn123456'
associate_object_type = 'FileEntity'
source_file_handle_association = {
'fileHandleId': file_handle_id,
'associateObjectId': associate_object_id,
'associateObjectType': associate_object_type,
}
with mock.patch.object(multipart_upload, '_multipart_upload') as mock_multipart_upload:
expected_upload_request = {
'concreteType': 'org.sagebionetworks.repo.model.file.MultipartUploadCopyRequest',
'fileName': None,
'generatePreview': True,
'partSizeBytes': part_size_bytes,
'sourceFileHandleAssociation': source_file_handle_association,
'storageLocationId': None
}
# call w/ defaults
multipart_copy(
syn,
source_file_handle_association,
part_size=part_size_bytes,
)
mock_multipart_upload.assert_called_once_with(
syn,
None,
expected_upload_request,
mock.ANY, # part_fn
mock.ANY, # md5_fn,
force_restart=False,
max_threads=None,
)
assert not syn._print_transfer_progress.called
def test_multipart_copy__explicit_args(self):
"""Test multipart copy explicitly defining all args.
The parameterization should be passed through as expected."""
syn = mock.Mock()
part_size_bytes = 9876
file_handle_id = 1234
associate_object_id = 'syn123456'
associate_object_type = 'FileEntity'
source_file_handle_association = {
'fileHandleId': file_handle_id,
'associateObjectId': associate_object_id,
'associateObjectType': associate_object_type,
}
storage_location_id = 5432
with mock.patch.object(multipart_upload, '_multipart_upload') as mock_multipart_upload:
# call specifying all optional kwargs
kwargs = {
'dest_file_name': 'blort',
'preview': False,
'storage_location_id': storage_location_id,
'force_restart': True,
'max_threads': 8,
'part_size': part_size_bytes,
}
expected_upload_request = {
'concreteType': 'org.sagebionetworks.repo.model.file.MultipartUploadCopyRequest',
'fileName': kwargs['dest_file_name'],
'generatePreview': kwargs['preview'],
'sourceFileHandleAssociation': source_file_handle_association,
'storageLocationId': kwargs['storage_location_id'],
'partSizeBytes': kwargs['part_size'],
}
multipart_copy(
syn,
source_file_handle_association,
**kwargs,
)
mock_multipart_upload.assert_called_once_with(
syn,
kwargs['dest_file_name'],
expected_upload_request,
mock.ANY, # part_fn
mock.ANY, # md5_fn,
force_restart=kwargs['force_restart'],
max_threads=kwargs['max_threads'],
)
assert not syn._print_transfer_progress.called
def _multipart_upload_test(self, upload_side_effect, syn, *args, **kwargs):
with mock.patch.object(
multipart_upload,
'UploadAttempt'
) as mock_upload_attempt:
mock_upload_attempt.side_effect = upload_side_effect
return _multipart_upload(syn, *args, **kwargs), mock_upload_attempt
def test_multipart_upload(self):
""""Verify the behavior of a successful call to multipart_upload
with various parameterizations applied. Verify that parameters
are validated/adjusted as expected."""
syn = mock.Mock()
md5_hex = 'ab123'
dest_file_name = 'foo'
content_type = 'text/plain'
storage_location_id = 3210
result_file_handle_id = 'foo'
upload_side_effect = [
mock.Mock(
return_value={'resultFileHandleId': result_file_handle_id}
)
]
# (file_size, in_part_size, in_max_threads, in_force_restart)
# (out_max_threads, out_force_restart)
tests = [
# non-positive max threads corrected
(
(1234, DEFAULT_PART_SIZE, 0, False),
(1, False)
),
# specify force_restart
(
(pow(2, 28), DEFAULT_PART_SIZE, 8, True),
(8, True),
),
# no max_threads, specified, should use default
(
(pow(2, 28), 1000, None, False),
(pool_provider.DEFAULT_NUM_THREADS, False),
),
# part size specified below min, should be raised
(
(1000, 1, 5, False),
(5, False),
),
# part size would exceed max number of parts,
# should be adjusted accordingly
(
(pow(2, 36), MIN_PART_SIZE + 1, 8, True),
(8, True),
)
]
for (file_size, in_part_size, in_max_threads, in_force_restart),\
(out_max_threads, out_force_restart)\
in tests:
upload_request = {
'concreteType': 'org.sagebionetworks.repo.model.file.MultipartUploadRequest',
'contentType': content_type,
'contentMD5Hex': md5_hex,
'fileName': dest_file_name,
'fileSizeBytes': file_size,
'generatePreview': True,
'storageLocationId': storage_location_id,
'partSizeBytes': in_part_size,
}
result, upload_mock = self._multipart_upload_test(
upload_side_effect,
syn,
dest_file_name,
upload_request,
mock.ANY,
mock.ANY,
max_threads=in_max_threads,
force_restart=in_force_restart,
)
upload_mock.assert_called_once_with(
syn,
dest_file_name,
upload_request,
mock.ANY, # part_fn
mock.ANY, # md5_fn,
out_max_threads,
out_force_restart,
)
def test_multipart_upload__retry_success(self):
"""Verify we recover on a failed upload if a subsequent
retry succeeds."""
syn = mock.Mock()
md5_hex = 'ab123'
file_size = 1234
part_size = 567
dest_file_name = 'foo'
content_type = 'text/plain'
storage_location_id = 3210
result_file_handle_id = 'foo'
max_threads = 5
upload_side_effect = [
SynapseUploadFailedException(),
SynapseUploadFailedException(),
mock.Mock(
return_value={'resultFileHandleId': result_file_handle_id}
)
]
expected_upload_request = {
'concreteType': 'org.sagebionetworks.repo.model.file.MultipartUploadRequest',
'contentType': content_type,
'contentMD5Hex': md5_hex,
'fileName': dest_file_name,
'fileSizeBytes': file_size,
'generatePreview': True,
'storageLocationId': storage_location_id,
'partSizeBytes': part_size,
}
result, upload_mock = self._multipart_upload_test(
upload_side_effect,
syn,
dest_file_name,
expected_upload_request,
mock.ANY, # part_fn
mock.ANY, # md5_fn,
max_threads,
False,
)
# should have been called multiple times but returned
# the result in the end.
assert result_file_handle_id == result
assert len(upload_side_effect) == upload_mock.call_count
def test_multipart_upload__retry_failure(self):
"""Verify if we run out of upload attempts we give up
and raise the failure."""
syn = mock.Mock()
md5_hex = 'ab123'
file_size = 1234
part_size = 567
dest_file_name = 'foo'
content_type = 'text/plain'
storage_location_id = 3210
max_threads = 5
upload_side_effect = SynapseUploadFailedException()
expected_upload_request = {
'concreteType': 'org.sagebionetworks.repo.model.file.MultipartUploadRequest',
'contentType': content_type,
'contentMD5Hex': md5_hex,
'fileName': dest_file_name,
'fileSizeBytes': file_size,
'generatePreview': True,
'storageLocationId': storage_location_id,
'partSizeBytes': part_size
}
with pytest.raises(SynapseUploadFailedException):
self._multipart_upload_test(
upload_side_effect,
syn,
dest_file_name,
expected_upload_request,
mock.ANY, # part_fn
mock.ANY, # md5_fn,
max_threads,
False,
)
| 33.501274 | 110 | 0.569679 |
4a22c03113415ac01f0ff5a2aeea402dba08f7ac | 3,879 | py | Python | evennia/server/throttle.py | Jaykingamez/evennia | cf7cab1fea99ede3efecb70a65c3eb0fba1d3745 | [
"BSD-3-Clause"
] | 1,544 | 2015-01-01T22:16:31.000Z | 2022-03-31T19:17:45.000Z | evennia/server/throttle.py | Jaykingamez/evennia | cf7cab1fea99ede3efecb70a65c3eb0fba1d3745 | [
"BSD-3-Clause"
] | 1,686 | 2015-01-02T18:26:31.000Z | 2022-03-31T20:12:03.000Z | evennia/server/throttle.py | Jaykingamez/evennia | cf7cab1fea99ede3efecb70a65c3eb0fba1d3745 | [
"BSD-3-Clause"
] | 867 | 2015-01-02T21:01:54.000Z | 2022-03-29T00:28:27.000Z | from collections import defaultdict, deque
from evennia.utils import logger
import time
class Throttle(object):
"""
Keeps a running count of failed actions per IP address.
Available methods indicate whether or not the number of failures exceeds a
particular threshold.
This version of the throttle is usable by both the terminal server as well
as the web server, imposes limits on memory consumption by using deques
with length limits instead of open-ended lists, and removes sparse keys when
no recent failures have been recorded.
"""
error_msg = "Too many failed attempts; you must wait a few minutes before trying again."
def __init__(self, **kwargs):
"""
Allows setting of throttle parameters.
Keyword Args:
limit (int): Max number of failures before imposing limiter
timeout (int): number of timeout seconds after
max number of tries has been reached.
cache_size (int): Max number of attempts to record per IP within a
rolling window; this is NOT the same as the limit after which
the throttle is imposed!
"""
self.storage = defaultdict(deque)
self.cache_size = self.limit = kwargs.get("limit", 5)
self.timeout = kwargs.get("timeout", 5 * 60)
def get(self, ip=None):
"""
Convenience function that returns the storage table, or part of.
Args:
ip (str, optional): IP address of requestor
Returns:
storage (dict): When no IP is provided, returns a dict of all
current IPs being tracked and the timestamps of their recent
failures.
timestamps (deque): When an IP is provided, returns a deque of
timestamps of recent failures only for that IP.
"""
if ip:
return self.storage.get(ip, deque(maxlen=self.cache_size))
else:
return self.storage
def update(self, ip, failmsg="Exceeded threshold."):
"""
Store the time of the latest failure.
Args:
ip (str): IP address of requestor
failmsg (str, optional): Message to display in logs upon activation
of throttle.
Returns:
None
"""
# Get current status
previously_throttled = self.check(ip)
# Enforce length limits
if not self.storage[ip].maxlen:
self.storage[ip] = deque(maxlen=self.cache_size)
self.storage[ip].append(time.time())
# See if this update caused a change in status
currently_throttled = self.check(ip)
# If this makes it engage, log a single activation event
if not previously_throttled and currently_throttled:
logger.log_sec(
"Throttle Activated: %s (IP: %s, %i hits in %i seconds.)"
% (failmsg, ip, self.limit, self.timeout)
)
def check(self, ip):
"""
This will check the session's address against the
storage dictionary to check they haven't spammed too many
fails recently.
Args:
ip (str): IP address of requestor
Returns:
throttled (bool): True if throttling is active,
False otherwise.
"""
now = time.time()
ip = str(ip)
# checking mode
latest_fails = self.storage[ip]
if latest_fails and len(latest_fails) >= self.limit:
# too many fails recently
if now - latest_fails[-1] < self.timeout:
# too soon - timeout in play
return True
else:
# timeout has passed. clear faillist
del self.storage[ip]
return False
else:
return False
| 32.596639 | 92 | 0.59139 |
4a22c03c5951076360456f820f4c169dd564f084 | 1,336 | py | Python | var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py | player1537-forks/spack | 822b7632222ec5a91dc7b7cda5fc0e08715bd47c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 3 | 2021-09-29T02:14:40.000Z | 2022-01-27T20:50:36.000Z | var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py | player1537-forks/spack | 822b7632222ec5a91dc7b7cda5fc0e08715bd47c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 8 | 2022-02-28T11:30:18.000Z | 2022-03-23T19:34:56.000Z | var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py | player1537-forks/spack | 822b7632222ec5a91dc7b7cda5fc0e08715bd47c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import platform
from spack import *
class IntelOneapiAdvisor(IntelOneApiPackage):
"""Intel Advisor is a design and analysis tool for achieving
high application performance. This is done through
efficient threading, vectorization, and memory use, and
GPU offload on current and future Intel hardware."""
maintainers = ['rscohn2']
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/advisor.html'
if platform.system() == 'Linux':
version('2022.0.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18369/l_oneapi_advisor_p_2022.0.0.92_offline.sh',
sha256='f1c4317c2222c56fb2e292513f7eec7ec27eb1049d3600cb975bc08ed1477993',
expand=False)
version('2021.4.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18220/l_oneapi_advisor_p_2021.4.0.389_offline.sh',
sha256='dd948f7312629d9975e12a57664f736b8e011de948771b4c05ad444438532be8',
expand=False)
@property
def component_dir(self):
return 'advisor'
| 39.294118 | 131 | 0.707335 |
4a22c05d29a8085309b371f2c30ea033abd3c479 | 1,130 | py | Python | backtracking/all_subsequences.py | Pratiyush27/Python | be48a876c7746611099974e572ea82691a7cbb20 | [
"MIT"
] | 12 | 2020-02-11T22:18:10.000Z | 2021-06-23T02:56:07.000Z | backtracking/all_subsequences.py | Pratiyush27/Python | be48a876c7746611099974e572ea82691a7cbb20 | [
"MIT"
] | 11 | 2019-10-04T08:54:03.000Z | 2019-10-19T09:06:10.000Z | backtracking/all_subsequences.py | Pratiyush27/Python | be48a876c7746611099974e572ea82691a7cbb20 | [
"MIT"
] | 18 | 2020-02-09T13:00:11.000Z | 2021-03-11T08:47:36.000Z | """
In this problem, we want to determine all possible subsequences
of the given sequence. We use backtracking to solve this problem.
Time complexity: O(2^n),
where n denotes the length of the given sequence.
"""
def generate_all_subsequences(sequence):
create_state_space_tree(sequence, [], 0)
def create_state_space_tree(sequence, current_subsequence, index):
"""
Creates a state space tree to iterate through each branch using DFS.
We know that each state has exactly two children.
It terminates when it reaches the end of the given sequence.
"""
if index == len(sequence):
print(current_subsequence)
return
create_state_space_tree(sequence, current_subsequence, index + 1)
current_subsequence.append(sequence[index])
create_state_space_tree(sequence, current_subsequence, index + 1)
current_subsequence.pop()
"""
remove the comment to take an input from the user
print("Enter the elements")
sequence = list(map(int, input().split()))
"""
sequence = [3, 1, 2, 4]
generate_all_subsequences(sequence)
sequence = ["A", "B", "C"]
generate_all_subsequences(sequence)
| 26.27907 | 69 | 0.736283 |
4a22c11d06a85cd2b1afc811781defbc0368ca79 | 10,884 | py | Python | aizynthfinder/aizynthfinder.py | MolecularAI/aizynthfinder | 42f83c9eb9fed3fe80dc966bb7b25ccacf1dc022 | [
"MIT"
] | 219 | 2020-06-15T08:04:53.000Z | 2022-03-31T09:02:47.000Z | aizynthfinder/aizynthfinder.py | wangxr0526/aizynthfinder | 42f83c9eb9fed3fe80dc966bb7b25ccacf1dc022 | [
"MIT"
] | 56 | 2020-08-14T14:50:42.000Z | 2022-03-22T12:49:06.000Z | aizynthfinder/aizynthfinder.py | wangxr0526/aizynthfinder | 42f83c9eb9fed3fe80dc966bb7b25ccacf1dc022 | [
"MIT"
] | 58 | 2020-06-15T13:36:42.000Z | 2022-03-21T06:18:02.000Z | """ Module containing a class that is the main interface the retrosynthesis tool.
"""
from __future__ import annotations
import time
from collections import defaultdict
from typing import TYPE_CHECKING
from tqdm import tqdm
# This must be imported first to setup logging for rdkit, tensorflow etc
from aizynthfinder.utils.logging import logger
from aizynthfinder.utils.loading import load_dynamic_class
from aizynthfinder.context.config import Configuration
from aizynthfinder.search.mcts import MctsSearchTree
from aizynthfinder.reactiontree import ReactionTreeFromExpansion
from aizynthfinder.analysis import (
TreeAnalysis,
RouteCollection,
RouteSelectionArguments,
)
from aizynthfinder.chem import Molecule, TreeMolecule, FixedRetroReaction
from aizynthfinder.search.andor_trees import AndOrSearchTreeBase
if TYPE_CHECKING:
from aizynthfinder.utils.type_utils import (
StrDict,
Optional,
Union,
Callable,
List,
Tuple,
Dict,
)
from aizynthfinder.chem import RetroReaction
class AiZynthFinder:
"""
Public API to the aizynthfinder tool
If instantiated with the path to a yaml file or dictionary of settings
the stocks and policy networks are loaded directly.
Otherwise, the user is responsible for loading them prior to
executing the tree search.
:ivar config: the configuration of the search
:ivar expansion_policy: the expansion policy model
:ivar filter_policy: the filter policy model
:ivar stock: the stock
:ivar scorers: the loaded scores
:ivar tree: the search tree
:ivar analysis: the tree analysis
:ivar routes: the top-ranked routes
:ivar search_stats: statistics of the latest search
:param configfile: the path to yaml file with configuration (has priority over configdict), defaults to None
:param configdict: the config as a dictionary source, defaults to None
"""
def __init__(self, configfile: str = None, configdict: StrDict = None) -> None:
self._logger = logger()
if configfile:
self.config = Configuration.from_file(configfile)
elif configdict:
self.config = Configuration.from_dict(configdict)
else:
self.config = Configuration()
self.expansion_policy = self.config.expansion_policy
self.filter_policy = self.config.filter_policy
self.stock = self.config.stock
self.scorers = self.config.scorers
self.tree: Optional[Union[MctsSearchTree, AndOrSearchTreeBase]] = None
self._target_mol: Optional[Molecule] = None
self.search_stats: StrDict = dict()
self.routes = RouteCollection([])
self.analysis: Optional[TreeAnalysis] = None
@property
def target_smiles(self) -> str:
"""The SMILES representation of the molecule to predict routes on."""
if not self._target_mol:
return ""
return self._target_mol.smiles
@target_smiles.setter
def target_smiles(self, smiles: str) -> None:
self.target_mol = Molecule(smiles=smiles)
@property
def target_mol(self) -> Optional[Molecule]:
"""The molecule to predict routes on"""
return self._target_mol
@target_mol.setter
def target_mol(self, mol: Molecule) -> None:
self.tree = None
self._target_mol = mol
def build_routes(
self, selection: RouteSelectionArguments = None, scorer: str = "state score"
) -> None:
"""
Build reaction routes
This is necessary to call after the tree search has completed in order
to extract results from the tree search.
:param selection: the selection criteria for the routes
:param scorer: a reference to the object used to score the nodes
:raises ValueError: if the search tree not initialized
"""
if not self.tree:
raise ValueError("Search tree not initialized")
self.analysis = TreeAnalysis(self.tree, scorer=self.scorers[scorer])
self.routes = RouteCollection.from_analysis(self.analysis, selection)
def extract_statistics(self) -> StrDict:
"""Extracts tree statistics as a dictionary"""
if not self.analysis:
return {}
stats = {
"target": self.target_smiles,
"search_time": self.search_stats["time"],
"first_solution_time": self.search_stats.get("first_solution_time", 0),
"first_solution_iteration": self.search_stats.get(
"first_solution_iteration", 0
),
}
stats.update(self.analysis.tree_statistics())
return stats
def prepare_tree(self) -> None:
"""
Setup the tree for searching
:raises ValueError: if the target molecule was not set
"""
if not self.target_mol:
raise ValueError("No target molecule set")
self.stock.reset_exclusion_list()
if self.config.exclude_target_from_stock and self.target_mol in self.stock:
self.stock.exclude(self.target_mol)
self._logger.debug("Excluding the target compound from the stock")
self._setup_search_tree()
self.analysis = None
self.routes = RouteCollection([])
def tree_search(self, show_progress: bool = False) -> float:
"""
Perform the actual tree search
:param show_progress: if True, shows a progress bar
:return: the time past in seconds
"""
if not self.tree:
self.prepare_tree()
# This is for type checking, prepare_tree is creating it.
assert self.tree is not None
self.search_stats = {"returned_first": False, "iterations": 0}
time0 = time.time()
i = 1
self._logger.debug("Starting search")
time_past = time.time() - time0
if show_progress:
pbar = tqdm(total=self.config.iteration_limit, leave=False)
while time_past < self.config.time_limit and i <= self.config.iteration_limit:
if show_progress:
pbar.update(1)
self.search_stats["iterations"] += 1
try:
is_solved = self.tree.one_iteration()
except StopIteration:
break
if is_solved and "first_solution_time" not in self.search_stats:
self.search_stats["first_solution_time"] = time.time() - time0
self.search_stats["first_solution_iteration"] = i
if self.config.return_first and is_solved:
self._logger.debug("Found first solved route")
self.search_stats["returned_first"] = True
break
i = i + 1
time_past = time.time() - time0
if show_progress:
pbar.close()
time_past = time.time() - time0
self._logger.debug("Search completed")
self.search_stats["time"] = time_past
return time_past
def _setup_search_tree(self):
self._logger.debug("Defining tree root: %s" % self.target_smiles)
if self.config.search_algorithm.lower() == "mcts":
self.tree = MctsSearchTree(
root_smiles=self.target_smiles, config=self.config
)
else:
cls = load_dynamic_class(self.config.search_algorithm)
self.tree: AndOrSearchTreeBase = cls(
root_smiles=self.target_smiles, config=self.config
)
class AiZynthExpander:
"""
Public API to the AiZynthFinder expansion and filter policies
If instantiated with the path to a yaml file or dictionary of settings
the stocks and policy networks are loaded directly.
Otherwise, the user is responsible for loading them prior to
executing the tree search.
:ivar config: the configuration of the search
:ivar expansion_policy: the expansion policy model
:ivar filter_policy: the filter policy model
:param configfile: the path to yaml file with configuration (has priority over configdict), defaults to None
:param configdict: the config as a dictionary source, defaults to None
"""
def __init__(self, configfile: str = None, configdict: StrDict = None) -> None:
self._logger = logger()
if configfile:
self.config = Configuration.from_file(configfile)
elif configdict:
self.config = Configuration.from_dict(configdict)
else:
self.config = Configuration()
self.expansion_policy = self.config.expansion_policy
self.filter_policy = self.config.filter_policy
def do_expansion(
self,
smiles: str,
return_n: int = 5,
filter_func: Callable[[RetroReaction], bool] = None,
) -> List[Tuple[FixedRetroReaction, ...]]:
"""
Do the expansion of the given molecule returning a list of
reaction tuples. Each tuple in the list contains reactions
producing the same reactants. Hence, nested structure of the
return value is way to group reactions.
If filter policy is setup, the probability of the reactions are
added as metadata to the reaction.
The additional filter functions makes it possible to do customized
filtering. The callable should take as only argument a `RetroReaction`
object and return True if the reaction can be kept or False if it should
be removed.
:param smiles: the SMILES string of the target molecule
:param return_n: the length of the return list
:param filter_func: an additional filter function
:return: the grouped reactions
"""
mol = TreeMolecule(parent=None, smiles=smiles)
actions, _ = self.expansion_policy.get_actions([mol])
results: Dict[Tuple[str, ...], List[FixedRetroReaction]] = defaultdict(list)
for action in actions:
reactants = action.reactants
if not reactants:
continue
if filter_func and not filter_func(action):
continue
for name in self.filter_policy.selection or []:
if hasattr(self.filter_policy[name], "feasibility"):
_, feasibility_prob = self.filter_policy[name].feasibility(action)
action.metadata["feasibility"] = float(feasibility_prob)
break
action.metadata["expansion_rank"] = len(results) + 1
unique_key = tuple(sorted(mol.inchi_key for mol in reactants[0]))
if unique_key not in results and len(results) >= return_n:
continue
rxn = next(ReactionTreeFromExpansion(action).tree.reactions()) # type: ignore
results[unique_key].append(rxn)
return [tuple(reactions) for reactions in results.values()]
| 37.273973 | 112 | 0.65215 |
4a22c2b9e76a68df864191f1fa5e82c238d9f370 | 401 | py | Python | src/app/entities/hashing_algorithms/__init__.py | dieisabel/cypherman | 06d8678b79b18aa256a79ec6967d68274f088dbc | [
"MIT"
] | null | null | null | src/app/entities/hashing_algorithms/__init__.py | dieisabel/cypherman | 06d8678b79b18aa256a79ec6967d68274f088dbc | [
"MIT"
] | 43 | 2021-12-02T21:26:01.000Z | 2022-02-21T08:51:06.000Z | src/app/entities/hashing_algorithms/__init__.py | dieisabel/cypherman | 06d8678b79b18aa256a79ec6967d68274f088dbc | [
"MIT"
] | null | null | null | __all__ = [
'IHashingAlgorithm',
'MD5HashingAlgorithm',
'SHA1HashingAlgorithm',
'SHA256HashingAlgorithm',
]
from entities.hashing_algorithms.hashing_algorithm import IHashingAlgorithm
from entities.hashing_algorithms.md5 import MD5HashingAlgorithm
from entities.hashing_algorithms.sha1 import SHA1HashingAlgorithm
from entities.hashing_algorithms.sha256 import SHA256HashingAlgorithm
| 33.416667 | 75 | 0.840399 |
4a22c3186892060185c3dba56a52d1ca385d661c | 6,124 | py | Python | projects/pareto/plot.py | energyscope/EnergyScope_multi_criteria | 438ca2d3a8502110ce45ed6a1165eb0ff7c2d57c | [
"Apache-2.0"
] | 1 | 2021-12-13T11:53:45.000Z | 2021-12-13T11:53:45.000Z | projects/pareto/plot.py | energyscope/EnergyScope_multi_criteria | 438ca2d3a8502110ce45ed6a1165eb0ff7c2d57c | [
"Apache-2.0"
] | null | null | null | projects/pareto/plot.py | energyscope/EnergyScope_multi_criteria | 438ca2d3a8502110ce45ed6a1165eb0ff7c2d57c | [
"Apache-2.0"
] | null | null | null | from typing import List
import matplotlib.pyplot as plt
import energyscope as es
def plot_pareto(case_study_path: str, test_case: str, epsilons: List[float]) -> None:
cost_opt_cost = es.get_total_cost(f"{case_study_path}/{test_case}/cost")
einv_opt_cost = es.get_total_einv(f"{case_study_path}/{test_case}/cost")
print(f"Optimal Cost {cost_opt_cost:.2f}")
print(f"Einv at optimal cost {einv_opt_cost:.2f}")
cost_opt_einv = es.get_total_cost(f"{case_study_path}/{test_case}/einv")
einv_opt_einv = es.get_total_einv(f"{case_study_path}/{test_case}/einv")
print(f"Cost at optimal einv {cost_opt_einv:.2f}")
print(f"Optimal einv {einv_opt_einv:.2f}")
print()
x = [0.]
y = [(einv_opt_cost/einv_opt_einv-1)*100]
print(y)
for epsilon in epsilons:
dir_name = f"{case_study_path}/{test_case}/cost_epsilon_{epsilon}"
print(dir_name)
cost = es.get_total_cost(dir_name)
einv = es.get_total_einv(dir_name)
print(cost, einv)
x += [(cost/cost_opt_cost-1)*100]
y += [(einv/einv_opt_einv-1)*100]
# Adding CO2 extreme point
x += [(cost_opt_einv/cost_opt_cost-1)*100]
y += [0.]
print([round(i, 2) for i in x])
print([round(j, 2) for j in y])
# plt.plot(x, y,)
fig = plt.plot(x, y, 'o', c='C1')
plt.plot([x[0], x[-1]], [y[0], y[-1]], 'o', c='r')
plt.grid()
plt.xlabel("Deviation from cost optimal (%)")
plt.ylabel("Deviation from Einv optimal (%)")
# plt.title("Pareto front (Cost vs Einv)")
# plt.savefig('pareto_cost_einv.png')
plot_suboptimal_space = False
if plot_suboptimal_space:
plt.grid(False)
x_fill = [x[0]] + x + [100, 100, x[0]]
y_fill = [100] + y + [y[-1], 100, 100]
plt.fill(x_fill, y_fill, c='grey', alpha=0.5)
plt.xlim([-1, x[-1]*1.1])
plt.ylim([-5, y[0]*1.1])
plt.savefig('pareto_cost_einv_space.png')
x_fill = x[1:5] + [x[4], x[1]]
y_fill = y[1:5] + [y[1], y[1]]
plt.fill(x_fill, y_fill, c='green', alpha=0.5)
# x_fill = x[1:5] + [x[4], 0, 0, x[1]]
# y_fill = y[1:5] + [0, 0, y[1], y[1]]
# plt.fill(x_fill, y_fill, c='red', alpha=0.5)
plt.xlim([-1, x[-1]*1.1])
plt.ylim([-5, y[0]*1.1])
plt.savefig('pareto_cost_einv_space_non_empty.png')
plt.show()
def plot_necessary_condition(case_study_path: str, test_case: str):
techs = ["WIND_OFFSHORE", "WIND_ONSHORE", "PV"]
# techs = ["DEC_HP_ELEC", "DEC_THHP_GAS", "DHN_HP_ELEC"]
# techs = ['GAS']
run_name = 'loc_res'
epsilons = [0.003125, 0.00625, 0.0125, 0.025, 0.05, 0.1, 0.15]
epsilons = [1/160, 1/80, 1/40, 1/20]
epsilons_pairs = [(0.025, 0.5), (0.025, 0.4), (0.025, 0.3), (0.025, 0.2),
(0.05, 0.4), (0.05, 0.3), (0.05, 0.2), (0.05, 0.1),
(0.075, 0.3), (0.075, 0.2), (0.075, 0.1), (0.075, 0.05),
(0.1, 0.3), (0.1, 0.2), (0.1, 0.1), (0.1, 0.05),
(0.125, 0.2), (0.125, 0.1), (0.125, 0.05),
(0.15, 0.2), (0.15, 0.1), (0.15, 0.05),
(0.175, 0.1), (0.175, 0.05)]
epsilons_pairs = [(2.5/100, 40/100), (2.5/100, 30/100), (2.5/100, 20/100),
(5/100, 30/100), (5/100, 20/100), (5/100, 10/100),
(7.5/100, 20/100), (7.5/100, 10/100)]
# Optimums
cost_path = f"{case_study_path}/{test_case}/cost"
cost_opt_cost = es.get_total_cost(cost_path)
einv_opt_cost = es.get_total_einv(cost_path)
einv_path = f"{case_study_path}/{test_case}/einv"
cost_opt_einv = es.get_total_cost(einv_path)
einv_opt_einv = es.get_total_einv(einv_path)
xs = [0.0]
ys = [(einv_opt_cost/einv_opt_einv-1)*100]
values = [round(sum([es.get_asset_value(cost_path, "f", tech) for tech in techs]), 3)]
# values = [round(sum([es.get_resource_used(cost_path, tech) for tech in techs]), 3)]
# Pareto front
for epsilon in epsilons:
path = f"{case_study_path}/{test_case}/cost_epsilon_{epsilon}"
cost = es.get_total_cost(path)
einv = es.get_total_einv(path)
xs += [round((cost/cost_opt_cost-1)*100, 3)]
ys += [round((einv/einv_opt_einv-1)*100, 3)]
values += [round(sum([es.get_asset_value(path, "f", tech) for tech in techs]), 3)]
# values += [round(sum([es.get_resource_used(path, tech) for tech in techs]), 3)]
xs += [(cost_opt_einv/cost_opt_cost-1)*100]
ys += [0.0]
values += [round(sum([es.get_asset_value(einv_path, "f", tech) for tech in techs]), 3)]
# values += [round(sum([es.get_resource_used(einv_path, tech) for tech in techs]), 3)]
plt.xlabel("Deviation from cost optimal (%)")
ax1 = plt.subplot()
plt.plot(xs, ys)
plt.ylabel("Deviation from Einv optimal (%)", color='C0')
ax2 = ax1.twinx()
plt.plot(xs, values, c='C1')
plt.grid()
plt.ylabel('Installed capacity', color='C1')
# Limits of the epsilon optimal spaces
for epsilon_cost, epsilon_einv in epsilons_pairs:
path = f"{case_study_path_}/{test_case_}/{run_name}_{epsilon_cost}_{epsilon_einv}"
values += [round(sum([es.get_asset_value(path, "f", tech) for tech in techs]), 2)]
# values += [round(sum([es.get_resource_used(path, tech) for tech in techs]), 2)]
xs += [epsilon_cost*100]
ys += [epsilon_einv*100]
print(xs)
print(ys)
print(list(zip(xs, ys, values)))
plt.figure()
plt.grid(zorder=1)
plt.scatter(xs, ys, s=100, c=values, cmap='viridis', zorder=2)
plt.colorbar()
# plt.grid()
plt.xlabel("Deviation from cost optimal (%)")
plt.ylabel("Deviation from Einv optimal (%)")
plt.show()
exit()
if __name__ == '__main__':
case_study_path_ = "/home/duboisa1/Global_Grid/code/EnergyScope_multi_criteria/case_studies"
test_case_ = 'pareto/gwp_constraint_50000'
epsilons_ = [1/160, 0.0125, 0.025, 0.05] #, 0.15] # [e/100 for e in range(1, 13)]
# plot_pareto(case_study_path_, test_case_, epsilons_)
plot_necessary_condition(case_study_path_, test_case_)
| 37.115152 | 96 | 0.590137 |
4a22c39a7a1bb22c7904f59060f73f132eb86a94 | 30,040 | py | Python | ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py | dkhwangbo/ambari | 8fee8ce8429229b587e2227f3fd73db474432459 | [
"Apache-2.0"
] | null | null | null | ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py | dkhwangbo/ambari | 8fee8ce8429229b587e2227f3fd73db474432459 | [
"Apache-2.0"
] | null | null | null | ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py | dkhwangbo/ambari | 8fee8ce8429229b587e2227f3fd73db474432459 | [
"Apache-2.0"
] | null | null | null | # !/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ambari Agent
"""
import \
ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
import grp
import os
import pwd
import re
import time
from urlparse import urlparse
from resource_management.core import shell
from resource_management.core import sudo
from resource_management.core.base import Fail
from resource_management.core.environment import Environment
from resource_management.core.logger import Logger
from resource_management.core.providers import Provider
from resource_management.core.resources.system import Execute
from resource_management.core.resources.system import File
from resource_management.libraries.functions import format
from resource_management.libraries.functions import namenode_ha_utils
from resource_management.libraries.functions.get_user_call_output import get_user_call_output
from resource_management.libraries.functions.hdfs_utils import is_https_enabled_in_hdfs
JSON_PATH = '/var/lib/ambari-agent/tmp/hdfs_resources_{timestamp}.json'
JAR_PATH = '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar'
RESOURCE_TO_JSON_FIELDS = {
'target': 'target',
'type': 'type',
'action': 'action',
'source': 'source',
'owner': 'owner',
'group': 'group',
'mode': 'mode',
'recursive_chown': 'recursiveChown',
'recursive_chmod': 'recursiveChmod',
'change_permissions_for_parents': 'changePermissionforParents',
'manage_if_exists': 'manageIfExists',
'dfs_type': 'dfs_type'
}
EXCEPTIONS_TO_RETRY = {
# ("ExceptionName"): ("required text fragment", try_count, try_sleep_seconds)
# Happens when multiple nodes try to put same file at the same time.
# Needs a longer retry time, to wait for other nodes success.
"FileNotFoundException": (" does not have any open files", 6, 30),
"LeaseExpiredException": ("", 20, 6),
"RetriableException": ("", 20, 6),
}
class HdfsResourceJar:
"""
This is slower than HdfsResourceWebHDFS implementation of HdfsResouce, but it works in any cases on any DFS types.
The idea is to put all the files/directories/copyFromLocals/copyToLocals we have to create/delete into a json file.
And then perform them with ONLY ONE expensive hadoop call to our custom jar fast-hdfs-resource.jar which grabs this json.
'create_and_execute', 'delete_on_execute' and "download_on_execute do nothing except add actions to this json,
while execute does all the expensive creating/deleting work executing the jar with the json as parameter.
"""
def action_delayed(self, action_name, main_resource):
dfs_type = main_resource.resource.dfs_type
if main_resource.resource.nameservices is None: # all nameservices
nameservices = namenode_ha_utils.get_nameservices(main_resource.resource.hdfs_site)
else:
nameservices = main_resource.resource.nameservices
# non-federated cluster
if not nameservices or len(nameservices) < 2:
self.action_delayed_for_nameservice(None, action_name, main_resource)
else:
default_fs_protocol = urlparse(main_resource.resource.default_fs).scheme
if not default_fs_protocol or default_fs_protocol == "viewfs":
protocol = dfs_type.lower()
else:
protocol = default_fs_protocol
for nameservice in nameservices:
try:
nameservice = protocol + "://" + nameservice
self.action_delayed_for_nameservice(nameservice, action_name, main_resource)
except namenode_ha_utils.NoActiveNamenodeException as ex:
# one of ns can be down (during initial start forexample) no need to worry for federated cluster
if len(nameservices) > 1:
Logger.exception("Cannot run HdfsResource for nameservice {0}. Due to no active namenode present".format(nameservice))
else:
raise
def action_delayed_for_nameservice(self, nameservice, action_name, main_resource):
resource = {}
env = Environment.get_instance()
if not 'hdfs_files' in env.config:
env.config['hdfs_files'] = []
# Put values in dictionary-resource
for field_name, json_field_name in RESOURCE_TO_JSON_FIELDS.iteritems():
if field_name == 'action':
resource[json_field_name] = action_name
elif field_name == 'mode' and main_resource.resource.mode:
resource[json_field_name] = oct(main_resource.resource.mode)[1:]
elif field_name == 'manage_if_exists':
resource[json_field_name] = main_resource.manage_if_exists
elif getattr(main_resource.resource, field_name):
resource[json_field_name] = getattr(main_resource.resource, field_name)
resource['nameservice'] = nameservice
# Add resource to create
env.config['hdfs_files'].append(resource)
def action_execute(self, main_resource):
env = Environment.get_instance()
# Check required parameters
main_resource.assert_parameter_is_set('user')
if not 'hdfs_files' in env.config or not env.config['hdfs_files']:
Logger.info("No resources to create. 'create_on_execute' or 'delete_on_execute' or 'download_on_execute' wasn't triggered before this 'execute' action.")
return
hadoop_bin_dir = main_resource.resource.hadoop_bin_dir
hadoop_conf_dir = main_resource.resource.hadoop_conf_dir
user = main_resource.resource.user
security_enabled = main_resource.resource.security_enabled
keytab_file = main_resource.resource.keytab
kinit_path = main_resource.resource.kinit_path_local
logoutput = main_resource.resource.logoutput
principal_name = main_resource.resource.principal_name
jar_path=JAR_PATH
timestamp = time.time()
json_path=format(JSON_PATH)
if security_enabled:
main_resource.kinit()
# Write json file to disk
File(json_path,
owner = user,
content = json.dumps(env.config['hdfs_files'])
)
# Execute jar to create/delete resources in hadoop
Execute(format("hadoop --config {hadoop_conf_dir} jar {jar_path} {json_path}"),
user=user,
path=[hadoop_bin_dir],
logoutput=logoutput,
)
# Clean
env.config['hdfs_files'] = []
class WebHDFSCallException(Fail):
def __init__(self, message, result_message):
self.result_message = result_message
super(WebHDFSCallException, self).__init__(message)
def get_exception_name(self):
if isinstance(self.result_message, dict) and "RemoteException" in self.result_message and "exception" in self.result_message["RemoteException"]:
return self.result_message["RemoteException"]["exception"]
return None
def get_exception_text(self):
if isinstance(self.result_message, dict) and "RemoteException" in self.result_message and "message" in self.result_message["RemoteException"]:
return self.result_message["RemoteException"]["message"]
return None
class WebHDFSUtil:
def __init__(self, hdfs_site, nameservice, run_user, security_enabled, logoutput=None):
self.is_https_enabled = is_https_enabled_in_hdfs(hdfs_site['dfs.http.policy'], hdfs_site['dfs.https.enable'])
address_property = 'dfs.namenode.https-address' if self.is_https_enabled else 'dfs.namenode.http-address'
address = namenode_ha_utils.get_property_for_active_namenode(hdfs_site, nameservice, address_property,
security_enabled, run_user)
protocol = "https" if self.is_https_enabled else "http"
self.address = format("{protocol}://{address}")
self.run_user = run_user
self.security_enabled = security_enabled
self.logoutput = logoutput
@staticmethod
def is_webhdfs_available(is_webhdfs_enabled, dfs_type):
# only hdfs seems to support webHDFS
return (is_webhdfs_enabled and dfs_type == 'HDFS')
def run_command(self, *args, **kwargs):
"""
This functions is a wrapper for self._run_command which does retry routine for it.
"""
try:
return self._run_command(*args, **kwargs)
except WebHDFSCallException as ex:
exception_name = ex.get_exception_name()
exception_text = ex.get_exception_text()
if exception_name in EXCEPTIONS_TO_RETRY:
required_text, try_count, try_sleep = EXCEPTIONS_TO_RETRY[exception_name]
if not required_text or (exception_text and required_text in exception_text):
last_exception = ex
else:
raise
else:
raise
while True:
Logger.info("Retrying after {0} seconds. Reason: {1}".format(try_sleep, str(last_exception)))
try_count -= 1
time.sleep(try_sleep)
if try_count == 0:
break
try:
self._run_command(*args, **kwargs)
break
except WebHDFSCallException as ex:
last_exception = ex
valid_status_codes = ["200", "201"]
def _run_command(self, target, operation, method='POST', assertable_result=True, file_to_put=None, ignore_status_codes=[], **kwargs):
"""
assertable_result - some POST requests return '{"boolean":false}' or '{"boolean":true}'
depending on if query was successful or not, we can assert this for them
"""
target = HdfsResourceProvider.parse_path(target)
if not target:
raise Fail("Target cannot be empty")
url = format("{address}/webhdfs/v1{target}?op={operation}", address=self.address)
request_args = kwargs
if not self.security_enabled:
request_args['user.name'] = self.run_user
for k,v in request_args.iteritems():
url = format("{url}&{k}={v}")
cmd = ["curl", "-sS","-L", "-w", "%{http_code}", "-X", method]
# When operation is "OPEN" the target is actually the DFS file to download and the file_to_put is actually the target see _download_file
if operation == "OPEN":
cmd += ["-o", file_to_put]
else:
if file_to_put and not os.path.exists(file_to_put):
raise Fail(format("File {file_to_put} is not found."))
if file_to_put:
cmd += ["--data-binary", "@"+file_to_put, "-H", "Content-Type: application/octet-stream"]
if self.security_enabled:
cmd += ["--negotiate", "-u", ":"]
if self.is_https_enabled:
cmd += ["-k"]
cmd.append(url)
_, out, err = get_user_call_output(cmd, user=self.run_user, logoutput=self.logoutput, quiet=False)
status_code = out[-3:]
out = out[:-3] # remove last line from output which is status code
try:
result_dict = json.loads(out)
except ValueError:
result_dict = out
if status_code not in WebHDFSUtil.valid_status_codes+ignore_status_codes or assertable_result and result_dict and not result_dict['boolean']:
formatted_output = json.dumps(result_dict, indent=2) if isinstance(result_dict, dict) else result_dict
formatted_output = err + "\n" + formatted_output
err_msg = "Execution of '%s' returned status_code=%s. %s" % (shell.string_cmd_from_args_list(cmd), status_code, formatted_output)
raise WebHDFSCallException(err_msg, result_dict)
return result_dict
class HdfsResourceWebHDFS:
"""
This is the fastest implementation of HdfsResource using WebHDFS.
Since it's not available on non-hdfs FS and also can be disabled in scope of HDFS.
We should still have the other implementations for such a cases.
"""
"""
If we have more than this count of files to recursively chmod/chown
webhdfs won't be used, but 'hadoop fs -chmod (or chown) -R ..' As it can really slow.
(in one second ~17 files can be chmoded)
"""
MAX_FILES_FOR_RECURSIVE_ACTION_VIA_WEBHDFS = 1000
"""
This is used to avoid a lot of liststatus commands, which can take some time if directory
contains a lot of files. LISTSTATUS of directory with 1000 files takes ~0.5 seconds.
"""
MAX_DIRECTORIES_FOR_RECURSIVE_ACTION_VIA_WEBHDFS = 250
def action_execute(self, main_resource):
pass
def _assert_valid(self):
source = self.main_resource.resource.source
type = self.main_resource.resource.type
target = self.main_resource.resource.target
if source:
if not os.path.exists(source):
raise Fail(format("Source {source} doesn't exist"))
if type == "directory" and os.path.isfile(source):
raise Fail(format("Source {source} is file but type is {type}"))
elif type == "file" and os.path.isdir(source):
raise Fail(format("Source {source} is directory but type is {type}"))
self.target_status = self._get_file_status(target)
if self.target_status and self.target_status['type'].lower() != type:
raise Fail(format("Trying to create file/directory but directory/file exists in the DFS on {target}"))
def _assert_download_valid(self):
source = self.main_resource.resource.source
type = self.main_resource.resource.type
target = self.main_resource.resource.target
if source:
self.source_status = self._get_file_status(source)
if self.source_status == None:
raise Fail(format("Source {source} doesn't exist"))
if type == "directory" and self.source_status['type'] == "FILE":
raise Fail(format("Source {source} is file but type is {type}"))
elif type == "file" and self.source_status['type'] == "DIRECTORY":
raise Fail(format("Source {source} is directory but type is {type}"))
else:
raise Fail(format("No source provided"))
if os.path.exists(target):
if type == "directory" and os.path.isfile(target):
raise Fail(format("Trying to download directory but file exists locally {target}"))
elif type == "file" and os.path.isdir(target):
raise Fail(format("Trying to download file but directory exists locally {target}"))
def action_delayed(self, action_name, main_resource):
main_resource.assert_parameter_is_set('user')
if main_resource.resource.security_enabled:
main_resource.kinit()
if main_resource.resource.nameservices is None:
nameservices = namenode_ha_utils.get_nameservices(main_resource.resource.hdfs_site)
else:
nameservices = main_resource.resource.nameservices
if not nameservices:
self.action_delayed_for_nameservice(None, action_name, main_resource)
else:
for nameservice in nameservices:
try:
self.action_delayed_for_nameservice(nameservice, action_name, main_resource)
except namenode_ha_utils.NoActiveNamenodeException as ex:
# one of ns can be down (during initial start forexample) no need to worry for federated cluster
if len(nameservices) > 1:
Logger.exception("Cannot run HdfsResource for nameservice {0}. Due to no active namenode present".format(nameservice))
else:
raise
def action_delayed_for_nameservice(self, nameservice, action_name, main_resource):
self.util = WebHDFSUtil(main_resource.resource.hdfs_site, nameservice, main_resource.resource.user,
main_resource.resource.security_enabled, main_resource.resource.logoutput)
self.mode = oct(main_resource.resource.mode)[1:] if main_resource.resource.mode else main_resource.resource.mode
self.mode_set = False
self.main_resource = main_resource
if action_name == "download":
self._assert_download_valid()
else:
self._assert_valid()
if self.main_resource.manage_if_exists == False and self.target_status:
Logger.info("Skipping the operation for not managed DFS directory " + str(self.main_resource.resource.target) +
" since immutable_paths contains it.")
return
if action_name == "create":
self._create_resource()
self._set_mode(self.target_status)
self._set_owner(self.target_status)
elif action_name == "download":
self._download_resource()
else:
self._delete_resource()
def _create_resource(self):
is_create = (self.main_resource.resource.source == None)
if is_create and self.main_resource.resource.type == "directory":
self._create_directory(self.main_resource.resource.target)
elif is_create and self.main_resource.resource.type == "file":
self._create_file(self.main_resource.target, mode=self.mode)
elif not is_create and self.main_resource.resource.type == "file":
self._create_file(self.main_resource.resource.target, source=self.main_resource.resource.source, mode=self.mode)
elif not is_create and self.main_resource.resource.type == "directory":
self._create_directory(self.main_resource.resource.target)
self._copy_from_local_directory(self.main_resource.resource.target, self.main_resource.resource.source)
def _copy_from_local_directory(self, target, source):
for next_path_part in sudo.listdir(source):
new_source = os.path.join(source, next_path_part)
new_target = format("{target}/{next_path_part}")
if sudo.path_isdir(new_source):
Logger.info(format("Creating DFS directory {new_target}"))
self._create_directory(new_target)
self._copy_from_local_directory(new_target, new_source)
else:
self._create_file(new_target, new_source)
def _download_resource(self):
if self.main_resource.resource.source == None:
return
if self.main_resource.resource.type == "file":
self._download_file(self.main_resource.resource.target, self.main_resource.resource.source, self.source_status)
elif self.main_resource.resource.type == "directory":
self._download_directory(self.main_resource.resource.target, self.main_resource.resource.source)
def _download_directory(self, target, source):
self._create_local_directory(target)
for file_status in self._list_directory(source):
if not file_status == None:
next_path_part = file_status['pathSuffix']
new_source = format("{source}/{next_path_part}")
new_target = os.path.join(target, next_path_part)
if file_status['type'] == "DIRECTORY":
self._download_directory(new_target, new_source)
else:
self._download_file(new_target, new_source, file_status)
def _create_local_directory(self, target):
if not os.path.exists(target):
Logger.info(format("Creating local directory {target}"))
sudo.makedir(target, "")
owner_name = "" if not self.main_resource.resource.owner else self.main_resource.resource.owner
group_name = "" if not self.main_resource.resource.group else self.main_resource.resource.group
owner = pwd.getpwnam(owner_name)
group = grp.getgrnam(group_name)
sudo.chown(target, owner, group)
def _download_file(self, target, source, file_status):
"""
PUT file command is slow, however _get_file_status is pretty fast,
so we should check if the file really should be put before doing it.
"""
if file_status and os.path.exists(target):
length = file_status['length']
local_file_size = os.stat(target).st_size # TODO: os -> sudo
# TODO: re-implement this using checksums
if local_file_size == length:
Logger.info(format("DFS file {source} is identical to {target}, skipping the download"))
return
elif not self.main_resource.resource.replace_existing_files:
Logger.info(format("Not replacing existing local file {target} which is different from DFS file {source}, due to replace_existing_files=False"))
return
kwargs = {}
self.util.run_command(source, 'OPEN', method='GET', overwrite=True, assertable_result=False, file_to_put=target, **kwargs)
def _create_directory(self, target):
if target == self.main_resource.resource.target and self.target_status:
return
self.util.run_command(target, 'MKDIRS', method='PUT')
def _get_file_status(self, target):
list_status = self.util.run_command(target, 'GETFILESTATUS', method='GET', ignore_status_codes=['404'], assertable_result=False)
return list_status['FileStatus'] if 'FileStatus' in list_status else None
def _list_directory(self, target):
results = self.util.run_command(target, 'LISTSTATUS', method='GET', ignore_status_codes=['404'], assertable_result=False)
entry = results['FileStatuses'] if 'FileStatuses' in results else None
if entry == None:
return []
return entry['FileStatus'] if 'FileStatus' in entry else []
def _create_file(self, target, source=None, mode=""):
"""
PUT file command in slow, however _get_file_status is pretty fast,
so we should check if the file really should be put before doing it.
"""
file_status = self._get_file_status(target) if target!=self.main_resource.resource.target else self.target_status
mode = "" if not mode else mode
if file_status:
if source:
length = file_status['length']
local_file_size = os.stat(source).st_size # TODO: os -> sudo
# TODO: re-implement this using checksums
if local_file_size == length:
Logger.info(format("DFS file {target} is identical to {source}, skipping the copying"))
return
elif not self.main_resource.resource.replace_existing_files:
Logger.info(format("Not replacing existing DFS file {target} which is different from {source}, due to replace_existing_files=False"))
return
else:
Logger.info(format("File {target} already exists in DFS, skipping the creation"))
return
Logger.info(format("Creating new file {target} in DFS"))
kwargs = {'permission': mode} if mode else {}
self.util.run_command(target, 'CREATE', method='PUT', overwrite=True, assertable_result=False, file_to_put=source, **kwargs)
if mode and file_status:
file_status['permission'] = mode
def _delete_resource(self):
if not self.target_status:
return
self.util.run_command(self.main_resource.resource.target, 'DELETE', method='DELETE', recursive=True)
def _set_owner(self, file_status=None):
owner = "" if not self.main_resource.resource.owner else self.main_resource.resource.owner
group = "" if not self.main_resource.resource.group else self.main_resource.resource.group
if not self.main_resource.resource.recursive_chown and (not owner or file_status and file_status['owner'] == owner) and (not group or file_status and file_status['group'] == group):
return
self.util.run_command(self.main_resource.resource.target, 'SETOWNER', method='PUT', owner=owner, group=group, assertable_result=False)
results = []
if self.main_resource.resource.recursive_chown:
content_summary = self.util.run_command(self.main_resource.resource.target, 'GETCONTENTSUMMARY', method='GET', assertable_result=False)
if content_summary['ContentSummary']['fileCount'] <= HdfsResourceWebHDFS.MAX_FILES_FOR_RECURSIVE_ACTION_VIA_WEBHDFS and content_summary['ContentSummary']['directoryCount'] <= HdfsResourceWebHDFS.MAX_DIRECTORIES_FOR_RECURSIVE_ACTION_VIA_WEBHDFS:
self._fill_directories_list(self.main_resource.resource.target, results)
else: # avoid chmowning a lot of files and listing a lot dirs via webhdfs which can take a lot of time.
shell.checked_call(["hadoop", "fs", "-chown", "-R", format("{owner}:{group}"), self.main_resource.resource.target], user=self.main_resource.resource.user)
if self.main_resource.resource.change_permissions_for_parents:
self._fill_in_parent_directories(self.main_resource.resource.target, results)
for path in results:
self.util.run_command(path, 'SETOWNER', method='PUT', owner=owner, group=group, assertable_result=False)
def _set_mode(self, file_status=None):
if not self.mode or file_status and file_status['permission'] == self.mode:
return
if not self.mode_set:
self.util.run_command(self.main_resource.resource.target, 'SETPERMISSION', method='PUT', permission=self.mode, assertable_result=False)
results = []
if self.main_resource.resource.recursive_chmod:
content_summary = self.util.run_command(self.main_resource.resource.target, 'GETCONTENTSUMMARY', method='GET', assertable_result=False)
if content_summary['ContentSummary']['fileCount'] <= HdfsResourceWebHDFS.MAX_FILES_FOR_RECURSIVE_ACTION_VIA_WEBHDFS and content_summary['ContentSummary']['directoryCount'] <= HdfsResourceWebHDFS.MAX_DIRECTORIES_FOR_RECURSIVE_ACTION_VIA_WEBHDFS:
self._fill_directories_list(self.main_resource.resource.target, results)
else: # avoid chmoding a lot of files and listing a lot dirs via webhdfs which can take a lot of time.
shell.checked_call(["hadoop", "fs", "-chmod", "-R", self.mode, self.main_resource.resource.target], user=self.main_resource.resource.user)
if self.main_resource.resource.change_permissions_for_parents:
self._fill_in_parent_directories(self.main_resource.resource.target, results)
for path in results:
self.util.run_command(path, 'SETPERMISSION', method='PUT', permission=self.mode, assertable_result=False)
def _fill_in_parent_directories(self, target, results):
path_parts = HdfsResourceProvider.parse_path(target).split("/")[1:]# [1:] remove '' from parts
path = "/"
for path_part in path_parts:
path += path_part + "/"
results.append(path)
def _fill_directories_list(self, target, results):
list_status = self.util.run_command(target, 'LISTSTATUS', method='GET', assertable_result=False)['FileStatuses']['FileStatus']
for file in list_status:
if file['pathSuffix']:
new_path = target + "/" + file['pathSuffix']
results.append(new_path)
if file['type'] == 'DIRECTORY':
self._fill_directories_list(new_path, results)
class HdfsResourceProvider(Provider):
def __init__(self, resource):
super(HdfsResourceProvider,self).__init__(resource)
self.assert_parameter_is_set('dfs_type')
self.fsType = getattr(resource, 'dfs_type')
self.ignored_resources_list = HdfsResourceProvider.get_ignored_resources_list(self.resource.hdfs_resource_ignore_file)
if self.fsType == 'HDFS':
self.assert_parameter_is_set('hdfs_site')
self.webhdfs_enabled = self.resource.hdfs_site['dfs.webhdfs.enabled']
else:
self.webhdfs_enabled = False
@staticmethod
def parse_path(path):
"""
hdfs://nn_url:1234/a/b/c -> /a/b/c
hdfs://nn_ha_name/a/b/c -> /a/b/c
hdfs:///a/b/c -> /a/b/c
/a/b/c -> /a/b/c
"""
math_with_protocol_and_nn_url = re.match("[a-zA-Z]+://[^/]+(/.+)", path)
math_with_protocol = re.match("[a-zA-Z]+://(/.+)", path)
if math_with_protocol_and_nn_url:
path = math_with_protocol_and_nn_url.group(1)
elif math_with_protocol:
path = math_with_protocol.group(1)
else:
path = path
return re.sub("[/]+", "/", path)
@staticmethod
def get_ignored_resources_list(hdfs_resource_ignore_file):
if not hdfs_resource_ignore_file or not os.path.exists(hdfs_resource_ignore_file):
return []
with open(hdfs_resource_ignore_file, "rb") as fp:
content = fp.read()
hdfs_resources_to_ignore = []
for hdfs_resource_to_ignore in content.split("\n"):
hdfs_resources_to_ignore.append(HdfsResourceProvider.parse_path(hdfs_resource_to_ignore))
return hdfs_resources_to_ignore
def action_delayed(self, action_name):
self.assert_parameter_is_set('type')
path_protocol = urlparse(self.resource.target).scheme.lower()
default_fs_protocol = urlparse(self.resource.default_fs).scheme.lower()
if path_protocol and default_fs_protocol != "viewfs" and path_protocol != default_fs_protocol:
Logger.info("Skipping creation of {0} since it is not in default filesystem.".format(self.resource.target))
return
parsed_path = HdfsResourceProvider.parse_path(self.resource.target)
parsed_not_managed_paths = [HdfsResourceProvider.parse_path(path) for path in self.resource.immutable_paths]
self.manage_if_exists = not parsed_path in parsed_not_managed_paths
if parsed_path in self.ignored_resources_list:
Logger.info("Skipping '{0}' because it is in ignore file {1}.".format(self.resource, self.resource.hdfs_resource_ignore_file))
return
self.get_hdfs_resource_executor().action_delayed(action_name, self)
def action_create_on_execute(self):
self.action_delayed("create")
def action_delete_on_execute(self):
self.action_delayed("delete")
def action_download_on_execute(self):
self.action_delayed("download")
def action_execute(self):
self.get_hdfs_resource_executor().action_execute(self)
def get_hdfs_resource_executor(self):
if WebHDFSUtil.is_webhdfs_available(self.webhdfs_enabled, self.fsType):
return HdfsResourceWebHDFS()
else:
return HdfsResourceJar()
def assert_parameter_is_set(self, parameter_name):
if not getattr(self.resource, parameter_name):
raise Fail("Resource parameter '{0}' is not set.".format(parameter_name))
return True
def kinit(self):
keytab_file = self.resource.keytab
kinit_path = self.resource.kinit_path_local
principal_name = self.resource.principal_name
user = self.resource.user
Execute(format("{kinit_path} -kt {keytab_file} {principal_name}"),
user=user
)
| 42.072829 | 250 | 0.718842 |
4a22c4937eff8c71a89f280368062755d55fdc9e | 4,500 | py | Python | RELdb/glove.py | mickvanhulst/RELdb | 3c11719ae41dcf6970f0120790572503bfd05136 | [
"MIT"
] | null | null | null | RELdb/glove.py | mickvanhulst/RELdb | 3c11719ae41dcf6970f0120790572503bfd05136 | [
"MIT"
] | null | null | null | RELdb/glove.py | mickvanhulst/RELdb | 3c11719ae41dcf6970f0120790572503bfd05136 | [
"MIT"
] | null | null | null | from collections import namedtuple
import zipfile
from numpy import zeros
from RELdb.base import DB
class GloveEmbedding(DB):
"""
Reference: http://nlp.stanford.edu/projects/glove
"""
GloveSetting = namedtuple('GloveSetting', ['url', 'd_embs', 'size', 'description'])
settings = {
'common_crawl_48': GloveSetting('http://nlp.stanford.edu/data/glove.42B.300d.zip',
[300], 1917494, '48B token common crawl'),
'common_crawl_840': GloveSetting('http://nlp.stanford.edu/data/glove.840B.300d.zip',
[300], 2195895, '840B token common crawl'),
'twitter': GloveSetting('http://nlp.stanford.edu/data/glove.twitter.27B.zip',
[25, 50, 100, 200], 1193514, '27B token twitter'),
'wikipedia_gigaword': GloveSetting('http://nlp.stanford.edu/data/glove.6B.zip',
[50, 100, 200, 300], 400000, '6B token wikipedia 2014 + gigaword 5'),
}
def __init__(self, name, save_dir, table_name, columns={"emb": "blob"},
d_emb=300):
"""
Args:
name: name of the embedding to retrieve.
d_emb: embedding dimensions.
show_progress: whether to print progress.
default: how to embed words that are out of vocabulary. Can use zeros, return ``None``, or generate random between ``[-0.1, 0.1]``.
"""
assert name in self.settings, '{} is not a valid corpus. Valid options: {}'.format(name, self.settings)
self.setting = self.settings[name]
assert d_emb in self.setting.d_embs, '{} is not a valid dimension for {}. Valid options: {}'.format(d_emb, name, self.setting)
self.d_emb = d_emb
self.name = name
self.table_name = table_name
self.columns = columns
self.save_dir = save_dir
self.name = name
path_db = "{}/{}.db".format(save_dir, name)
self.avg_cnt = {"cnt": 0, "sum": zeros(d_emb)}
self.db = self.initialize_db(path_db, table_name, columns)
def emb(self, words, table_name):
print(words, table_name)
g = self.lookup(words, table_name)
return g
def load_word2emb(self, batch_size=1000):
self.clear()
fin_name = self.ensure_file('glove', url=self.setting.url)
print(fin_name)
seen = set()
with zipfile.ZipFile(fin_name) as fin:
fname_zipped = [fzipped.filename for fzipped in fin.filelist if str(self.d_emb) in fzipped.filename][0]
with fin.open(fname_zipped, 'r') as fin_zipped:
batch = []
for line in fin_zipped:
elems = line.decode().rstrip().split()
vec = [float(n) for n in elems[-self.d_emb:]]
word = ' '.join(elems[:-self.d_emb])
if word in seen:
continue
seen.add(word)
batch.append((word, vec))
self.avg_cnt["cnt"] += 1
self.avg_cnt["sum"] += vec
if len(batch) == batch_size:
self.insert_batch_emb(batch)
batch.clear()
# Here we are also adding an token based on the average. Take note though that our reported scores
# for the REL package are based on a random vector as this was also used by Le et al.
# He reported, however, that he did not notice a difference between using either of the two.
if self.avg_cnt["cnt"] > 0:
batch.append(
(
"#SND/UNK#",
self.avg_cnt["sum"] / self.avg_cnt["cnt"],
)
)
print("Added average for category: #WORD/UNK#")
if batch:
self.insert_batch(batch)
if __name__ == '__main__':
from time import time
save_dir = "C:/Users/mickv/Desktop/testemb/"
emb = GloveEmbedding('common_crawl_48', save_dir=save_dir, table_name='embeddings',
columns={"emb": "blob"}, d_emb=300)
emb.load_word2emb(5000)
for w in ['canada', 'vancouver', 'toronto']:
start = time()
print('embedding {}'.format(w))
print(emb.emb([w], 'embeddings'))
print('took {}s'.format(time() - start))
| 41.284404 | 143 | 0.541333 |
4a22c4a9e83934d6d7f3738c43a419f333a309ff | 5,368 | py | Python | lambdad.py | icfpcontest2019/lambda-client | 662a78b4121465477b6b62e98663a3ceaf759644 | [
"MIT"
] | 1 | 2019-08-02T19:28:43.000Z | 2019-08-02T19:28:43.000Z | lambdad.py | icfpcontest2019/lambda-client | 662a78b4121465477b6b62e98663a3ceaf759644 | [
"MIT"
] | 8 | 2021-03-19T01:35:09.000Z | 2022-03-11T23:50:33.000Z | lambdad.py | icfpcontest2019/lambda-client | 662a78b4121465477b6b62e98663a3ceaf759644 | [
"MIT"
] | 1 | 2019-06-26T05:52:12.000Z | 2019-06-26T05:52:12.000Z | #!/usr/bin/env python3
from werkzeug.wrappers import Request, Response
from werkzeug.serving import run_simple
from jsonrpc import JSONRPCResponseManager, dispatcher
from cachetools import cached, TTLCache
import urllib, urllib.parse
import requests
import json
import argparse
import threading
import os
import configparser
from datetime import datetime
# https://stackoverflow.com/questions/12435211/python-threading-timer-repeat-function-every-n-seconds
def every(interval):
def decorator(function):
def wrapper(*args, **kwargs):
stopped = threading.Event()
def loop(): # executed in another thread
while not stopped.wait(interval): # until stopped
function(*args, **kwargs)
t = threading.Thread(target=loop)
t.daemon = True # stop if the program exits
t.start()
return stopped
return wrapper
return decorator
# In case of multi-threaded acceses: keep cache coherent
lock = threading.RLock()
CACHE_TIME = 5
REFRESH_TIME = CACHE_TIME + 0 # no reason for this to be smaller than CACHE_TIME
TASK_FILE = "task.desc"
PUZZLE_FILE = "puzzle.cond"
BALANCES_FILE = "balances.json"
TS_FILE = "timestamp.txt"
DONE_FILE = ".done"
CONFIG_FILE = 'lambda.conf'
# Populated by config
DEFAULT_BIND_ADDR = '127.0.0.1'
DEFAULT_PORT = 8332
DATA_DIR = 'blocks/'
BLOCKCHAIN_ENDPOINT = 'http://localhost:5000/lambda/'
PRIVATE_ID = None
PUBLIC_ID = None
# Totally decentralised!
@cached(cache=TTLCache(maxsize=10, ttl=CACHE_TIME), lock=lock)
def pass_through(method_name, arg=None):
url = urllib.parse.urljoin(BLOCKCHAIN_ENDPOINT, method_name)
if arg is not None:
url = urllib.parse.urljoin(url + '/', str(arg))
with urllib.request.urlopen(url) as s:
return json.loads(s.read())
# JSON-RPC methods
@dispatcher.add_method
def getblockchaininfo():
return pass_through('getblockchaininfo')
@dispatcher.add_method
def getmininginfo():
return pass_through('getmininginfo')
@dispatcher.add_method
def getbalances():
return pass_through('getbalances')
@dispatcher.add_method
def getbalance(id=None):
if id is None:
id = PUBLIC_ID
return pass_through('getbalance', id)
@dispatcher.add_method
def getblockinfo(block_num=None):
return pass_through('getblockinfo', block_num)
@dispatcher.add_method
def submit(block_num, sol_path, desc_path):
url = urllib.parse.urljoin(BLOCKCHAIN_ENDPOINT, 'submit')
data = {'private_id': PRIVATE_ID, 'block_num': block_num}
files = {'solution': open(sol_path), 'puzzle': open(desc_path)}
response = requests.post(url, data=data, files=files, allow_redirects=True)
return response.json()
# Auto-update logic
def have_block(block_num):
block_num = str(block_num)
df = os.path.join(DATA_DIR, block_num, DONE_FILE)
return os.path.exists(df)
def save_block(block_info):
block_num = str(block_info['block'])
ts = block_info['block_ts']
balances = block_info['balances']
task = block_info['task']
puzzle = block_info['puzzle']
bd = os.path.join(DATA_DIR, block_num)
os.makedirs(bd, exist_ok=True)
tsf = os.path.join(bd, TS_FILE)
bf = os.path.join(bd, BALANCES_FILE)
tf = os.path.join(bd, TASK_FILE)
pf = os.path.join(bd, PUZZLE_FILE)
df = os.path.join(bd, DONE_FILE)
with open(tsf, 'w') as f:
f.write(str(ts))
with open(bf, 'w') as f:
json.dump(balances, f)
with open(tf, 'w') as f:
f.write(task)
with open(pf, 'w') as f:
f.write(puzzle)
# Create the DONE file
with open(df, 'w') as f:
f.close()
# Update every REFRESH_TIME seconds
@every(REFRESH_TIME)
def update():
try:
block_info = getblockinfo()
block_num = block_info['block']
if not have_block(block_num):
save_block(block_info)
# Fill in gaps if they exist
for b in range(1, block_num):
if not have_block(b):
save_block(getblockinfo(b))
except Exception as e:
now = datetime.now().strftime("%c")
print("[{}] Update exception: {}".format(now, e))
# Daemon
@Request.application
def application(request):
response = JSONRPCResponseManager.handle(
request.data, dispatcher)
return Response(response.json, mimetype='application/json')
if __name__ == '__main__':
config = configparser.ConfigParser()
config.read(CONFIG_FILE)
settings = config['DEFAULT']
keys = config['SECRET']
# Populate global settings
DATA_DIR = settings.get('DataDir')
BLOCKCHAIN_ENDPOINT = settings.get('DecentralisationProvider')
DEFAULT_BIND_ADDR = settings.get('DefaultBindAddress')
DEFAULT_PORT = settings.getint('DefaultPort')
PRIVATE_ID = keys.get('PrivateKey')
PUBLIC_ID = keys.get('PublicKey')
# Parse arguments
parser = argparse.ArgumentParser(description='JSON-RPC daemon for the LambdaCoin blockchain.')
parser.add_argument('-b', '--bind', default=DEFAULT_BIND_ADDR, help='bind on address')
parser.add_argument('-p', '--port', default=DEFAULT_PORT, help='listen on port')
args = parser.parse_args()
try:
args.port = int(args.port)
except ValueError:
parser.error('Port must be an integer.')
updater = update()
run_simple(args.bind, args.port, application)
| 29.988827 | 101 | 0.68424 |
4a22c6287ef5248d1c8c7918fb8df2dd92b7be8a | 1,530 | py | Python | ejercicios/repaso_creatividad_proyecto/tarjeta_credito_depredadora.py | carlosviveros/Soluciones | 115f4fa929c7854ca497e4c994352adc64565456 | [
"MIT"
] | 4 | 2021-12-14T23:51:25.000Z | 2022-03-24T11:14:00.000Z | ejercicios/repaso_creatividad_proyecto/tarjeta_credito_depredadora.py | leugimkm/Soluciones | d71601c8d9b5e86e926f48d9e49462af8a956b6d | [
"MIT"
] | null | null | null | ejercicios/repaso_creatividad_proyecto/tarjeta_credito_depredadora.py | leugimkm/Soluciones | d71601c8d9b5e86e926f48d9e49462af8a956b6d | [
"MIT"
] | 5 | 2021-11-10T06:49:50.000Z | 2022-03-24T01:42:28.000Z | """AyudaEnPython: https://www.facebook.com/groups/ayudapython
NOTE: Al igual que TarjetaCredito, se opta por mejorar la clase.
"""
from tarjetas import TarjetaCredito
class TarjetaCreditoDepredadora(TarjetaCredito):
"""Tarjeta de credito que cobra intereses e impouestos."""
__IMPUESTO = 5
def __init__(
self,
cliente: str,
banco: str,
cuenta: str,
limite: float,
interes: float,
balance: float = 0,
) -> None:
super().__init__(cliente, banco, cuenta, limite, balance)
self._interes = interes
self.__i = 0
self.__adicional = 0
self.__pago_minimo = 0
@property
def pago_minimo(self) -> float:
return self.__pago_minimo
def cargar(self, precio: float) -> bool:
"""Carga el precio dado en la tarjeta, asumiendo que hay
credito.
:param precio: precio a cargar
:return: True si se pudo cargar, False de lo contrario y cobra
$5 de impuesto si el cargo es denegado.
"""
self.__i += 1
if self.__i >= 10:
self.__adicional = 1
exito = super().cargar(precio)
if not exito:
self._balance += self.__class__.__IMPUESTO + self.__adicional
return exito
def proceso_mensual(self) -> None:
"""Asigna un interes mensual sobre el balance."""
if self._balance > 0:
self._balance *= (1 + self._interes)**(1/12)
self.__pago_minimo = self._balance * 0.1 | 29.423077 | 73 | 0.598693 |
4a22c64bf1472fb6c5760d40f9773902290c81e4 | 8,578 | py | Python | src/encoded/ingestion/queue_utils.py | dbmi-bgm/cgap-portal | 1cc2e5574ae38bda9ef29aa4e7ab0daa1195ca05 | [
"MIT"
] | 2 | 2019-07-26T19:17:59.000Z | 2019-09-19T16:58:36.000Z | src/encoded/ingestion/queue_utils.py | dbmi-bgm/cgap-portal | 1cc2e5574ae38bda9ef29aa4e7ab0daa1195ca05 | [
"MIT"
] | 106 | 2019-09-04T14:37:08.000Z | 2022-03-31T18:41:14.000Z | src/encoded/ingestion/queue_utils.py | dbmi-bgm/cgap-portal | 1cc2e5574ae38bda9ef29aa4e7ab0daa1195ca05 | [
"MIT"
] | 2 | 2019-07-01T17:28:56.000Z | 2021-01-12T14:03:53.000Z | import time
import json
import socket
import boto3
import structlog
import datetime
log = structlog.getLogger(__name__)
class IngestionQueueManager:
"""
Similar to QueueManager in snovault in that in manages SQS queues, but that code is not generic
enough to use here, so it is "duplicated" so to speak here. At a later time the functionality of this
class and QueueManager should be refactored into a "helper" class, but for now this is sufficient
and is tested independently here.
We will use a single queue to keep track of File uuids to be indexed. This used to manage only VCFs
but now the Ingestion functionality is generic and can be extended to arbitrary processing on
any type.
"""
BUCKET_EXTENSION = '-ingestion-queue' # XXX: breaking change, matches 4dn-cloud-infra resources
def __init__(self, registry, override_name=None):
""" Does initial setup for interacting with SQS """
self.batch_size = 1 # NOTE: this value is important because we don't want to block other jobs
self.env_name = registry.settings.get('env.name', None)
if not self.env_name: # replace with something usable
backup = socket.gethostname()[:80].replace('.', '-')
self.env_name = backup if backup else 'cgap-backup'
kwargs = {
'region_name': 'us-east-1'
}
self.client = boto3.client('sqs', **kwargs)
self.queue_name = override_name or (self.env_name + self.BUCKET_EXTENSION)
self.queue_attrs = {
self.queue_name: {
'DelaySeconds': '1', # messages initially invisible for 1 sec
'VisibilityTimeout': '10800', # 3 hours
'MessageRetentionPeriod': '604800', # 7 days, in seconds
'ReceiveMessageWaitTimeSeconds': '5', # 5 seconds of long polling
}
}
self.queue_url = self._initialize()
def _initialize(self):
""" Initializes the actual queue - helper method for init """
try:
response = self.client.create_queue(
QueueName=self.queue_name,
Attributes=self.queue_attrs[self.queue_name]
)
queue_url = response['QueueUrl']
except self.client.exceptions.QueueNameExists:
queue_url = self._get_queue_url(self.queue_name)
except Exception as e:
log.error('Error while attempting to create queue: %s' % e)
queue_url = self._get_queue_url(self.queue_name)
return queue_url
def _get_queue_url(self, queue_name):
"""
Simple function that returns url of associated queue name
"""
try:
response = self.client.get_queue_url(
QueueName=queue_name
)
except Exception as e:
log.error('Cannot resolve queue_url: %s' % e)
response = {}
return response.get('QueueUrl', None)
def _chunk_messages(self, msgs):
""" Chunks messages into self.send_batch_size batches (for efficiency).
:param msgs: list of messages to be chunked
"""
for i in range(0, len(msgs), self.batch_size):
yield msgs[i:i + self.batch_size]
def _send_messages(self, msgs, retries=3):
""" Sends msgs to the ingestion queue (with retries for failed messages).
:param msgs: to be sent
:param retries: number of times to resend failed messages, decremented on recursion
:return: list of any failed messages
"""
failed = []
for msg_batch in self._chunk_messages(msgs):
log.info('Trying to chunk messages: %s' % msgs)
entries = []
for msg in msg_batch:
entries.append({
'Id': str(int(time.time() * 1000000)),
'MessageBody': json.dumps(msg)
})
response = self.client.send_message_batch(
QueueUrl=self.queue_url,
Entries=entries
)
failed_messages = response.get('Failed', [])
# attempt resend of failed messages
if failed_messages and retries > 0:
msgs_to_retry = []
for failed_message in failed_messages:
fail_id = failed_message.get('Id')
msgs_to_retry.extend([json.loads(ent['MessageBody']) for ent in entries if ent['Id'] == fail_id])
if msgs_to_retry:
failed_messages = self._send_messages(msgs_to_retry, retries=retries - 1)
failed.extend(failed_messages)
return failed
def delete_messages(self, messages):
"""
Called after a message has been successfully received and processed.
Removes message from the queue.
Input should be the messages directly from receive messages. At the
very least, needs a list of messages with 'Id' and 'ReceiptHandle' as this
metadata is necessary to identify the message in SQS internals.
NOTE: deletion does NOT have a retry mechanism
:param messages: messages to be deleted
:returns: a list with any failed messages
"""
failed = []
for batch in self._chunk_messages(messages):
# need to change message format, since deleting takes slightly
# different fields what's return from receiving
for i in range(len(batch)):
to_delete = {
'Id': batch[i]['MessageId'],
'ReceiptHandle': batch[i]['ReceiptHandle']
}
batch[i] = to_delete
response = self.client.delete_message_batch(
QueueUrl=self.queue_url,
Entries=batch
)
failed.extend(response.get('Failed', []))
return failed
def add_uuids(self, uuids, ingestion_type='vcf'):
""" Takes a list of string uuids and adds them to the ingestion queue.
If ingestion_type is not specified, it defaults to 'vcf'.
:precondition: uuids are all of type FileProcessed
:param uuids: uuids to be added to the queue.
:param ingestion_type: the ingestion type of the uuids (default 'vcf' for legacy reasons)
:returns: 2-tuple: uuids queued, failed messages (if any)
"""
curr_time = datetime.datetime.utcnow().isoformat()
msgs = []
for uuid in uuids:
current_msg = {
'ingestion_type': ingestion_type,
'uuid': uuid,
'timestamp': curr_time
}
msgs.append(current_msg)
failed = self._send_messages(msgs)
return uuids, failed
def get_counts(self):
""" Returns number counts of waiting/inflight messages
* Makes a boto3 API Call to do so *
:returns: 2 tuple of waiting, inflight messages
"""
response = self.client.get_queue_attributes(
QueueUrl=self.queue_url,
AttributeNames=[
'ApproximateNumberOfMessages',
'ApproximateNumberOfMessagesNotVisible'
]
)
formatted = {
'waiting': response.get('Attributes', {}).get('ApproximateNumberOfMessages'),
'inflight': response.get('Attributes', {}).get('ApproximateNumberOfMessagesNotVisible')
}
return formatted['waiting'], formatted['inflight']
def receive_messages(self, batch_size=None):
""" Returns an array of messages, if any that are waiting
:param batch_size: an integer number of messages
:returns: messages received or [] if no messages were ready to be received
"""
response = self.client.receive_message(
QueueUrl=self.queue_url,
MaxNumberOfMessages=self.batch_size if batch_size is None else batch_size
)
return response.get('Messages', [])
def clear_queue(self):
""" Clears the queue by receiving all messages. BE CAREFUL as this has potential to
infinite loop under certain conditions. This risk is preferred to using 'purge', which
has a long timeout. The guarantees this functions provides are minimal at best - it should
really only be used in testing.
"""
while True:
messages = self.receive_messages()
self.delete_messages(messages)
if len(messages) == 0:
break
| 41.043062 | 117 | 0.601422 |
4a22c6ec2ed73854b371f2480ef7610517c3c9d7 | 8,333 | py | Python | sdk/python/pulumi_azure_native/azuredata/v20190724preview/sql_managed_instance.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/azuredata/v20190724preview/sql_managed_instance.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/azuredata/v20190724preview/sql_managed_instance.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = ['SqlManagedInstance']
class SqlManagedInstance(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
admin: Optional[pulumi.Input[str]] = None,
data_controller_id: Optional[pulumi.Input[str]] = None,
end_time: Optional[pulumi.Input[str]] = None,
instance_endpoint: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sql_managed_instance_name: Optional[pulumi.Input[str]] = None,
start_time: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
v_core: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
A SqlManagedInstance.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] admin: The instance admin user
:param pulumi.Input[str] data_controller_id: null
:param pulumi.Input[str] end_time: The instance end time
:param pulumi.Input[str] instance_endpoint: The on premise instance endpoint
:param pulumi.Input[str] location: The geo-location where the resource lives
:param pulumi.Input[str] resource_group_name: The name of the Azure resource group
:param pulumi.Input[str] sql_managed_instance_name: The name of SQL Managed Instances
:param pulumi.Input[str] start_time: The instance start time
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[str] v_core: The instance vCore
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['admin'] = admin
__props__['data_controller_id'] = data_controller_id
__props__['end_time'] = end_time
__props__['instance_endpoint'] = instance_endpoint
__props__['location'] = location
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['sql_managed_instance_name'] = sql_managed_instance_name
__props__['start_time'] = start_time
__props__['tags'] = tags
__props__['v_core'] = v_core
__props__['name'] = None
__props__['system_data'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:azuredata/v20190724preview:SqlManagedInstance"), pulumi.Alias(type_="azure-native:azuredata:SqlManagedInstance"), pulumi.Alias(type_="azure-nextgen:azuredata:SqlManagedInstance"), pulumi.Alias(type_="azure-native:azuredata/v20200908preview:SqlManagedInstance"), pulumi.Alias(type_="azure-nextgen:azuredata/v20200908preview:SqlManagedInstance")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(SqlManagedInstance, __self__).__init__(
'azure-native:azuredata/v20190724preview:SqlManagedInstance',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'SqlManagedInstance':
"""
Get an existing SqlManagedInstance resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["admin"] = None
__props__["data_controller_id"] = None
__props__["end_time"] = None
__props__["instance_endpoint"] = None
__props__["location"] = None
__props__["name"] = None
__props__["start_time"] = None
__props__["system_data"] = None
__props__["tags"] = None
__props__["type"] = None
__props__["v_core"] = None
return SqlManagedInstance(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def admin(self) -> pulumi.Output[Optional[str]]:
"""
The instance admin user
"""
return pulumi.get(self, "admin")
@property
@pulumi.getter(name="dataControllerId")
def data_controller_id(self) -> pulumi.Output[Optional[str]]:
"""
null
"""
return pulumi.get(self, "data_controller_id")
@property
@pulumi.getter(name="endTime")
def end_time(self) -> pulumi.Output[Optional[str]]:
"""
The instance end time
"""
return pulumi.get(self, "end_time")
@property
@pulumi.getter(name="instanceEndpoint")
def instance_endpoint(self) -> pulumi.Output[Optional[str]]:
"""
The on premise instance endpoint
"""
return pulumi.get(self, "instance_endpoint")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> pulumi.Output[Optional[str]]:
"""
The instance start time
"""
return pulumi.get(self, "start_time")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
Read only system data
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="vCore")
def v_core(self) -> pulumi.Output[Optional[str]]:
"""
The instance vCore
"""
return pulumi.get(self, "v_core")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 39.492891 | 432 | 0.633385 |
4a22c7aa3c3fbca13cfe14d2a36aa1700fe5746b | 6,055 | py | Python | examples/client_with_routing.py | Precognize/rsocket-py | 31704d53c232e0c0f53783b9a56117e5bd0645ce | [
"MIT"
] | null | null | null | examples/client_with_routing.py | Precognize/rsocket-py | 31704d53c232e0c0f53783b9a56117e5bd0645ce | [
"MIT"
] | null | null | null | examples/client_with_routing.py | Precognize/rsocket-py | 31704d53c232e0c0f53783b9a56117e5bd0645ce | [
"MIT"
] | null | null | null | import asyncio
import logging
import sys
from asyncio import Event
from typing import AsyncGenerator, Tuple
from reactivestreams.publisher import Publisher
from reactivestreams.subscriber import Subscriber
from reactivestreams.subscription import Subscription
from rsocket.extensions.helpers import route, composite, authenticate_simple
from rsocket.extensions.mimetypes import WellKnownMimeTypes
from rsocket.fragment import Fragment
from rsocket.helpers import single_transport_provider
from rsocket.payload import Payload
from rsocket.rsocket_client import RSocketClient
from rsocket.streams.stream_from_async_generator import StreamFromAsyncGenerator
from rsocket.transports.tcp import TransportTCP
def sample_publisher(wait_for_requester_complete: Event,
response_count: int = 3) -> Publisher:
async def generator() -> AsyncGenerator[Tuple[Fragment, bool], None]:
current_response = 0
for i in range(response_count):
is_complete = (current_response + 1) == response_count
message = 'Item to server from client on channel: %s' % current_response
yield Fragment(message.encode('utf-8')), is_complete
if is_complete:
wait_for_requester_complete.set()
break
current_response += 1
return StreamFromAsyncGenerator(generator)
class ChannelSubscriber(Subscriber):
def __init__(self, wait_for_responder_complete: Event) -> None:
super().__init__()
self._wait_for_responder_complete = wait_for_responder_complete
def on_subscribe(self, subscription: Subscription):
self.subscription = subscription
def on_next(self, value: Payload, is_complete=False):
logging.info('From server on channel: ' + value.data.decode('utf-8'))
if is_complete:
self._wait_for_responder_complete.set()
def on_error(self, exception: Exception):
logging.error('Error from server on channel' + str(exception))
self._wait_for_responder_complete.set()
def on_complete(self):
logging.info('Completed from server on channel')
self._wait_for_responder_complete.set()
class StreamSubscriber(Subscriber):
def __init__(self,
wait_for_complete: Event,
request_n_size=0):
self._request_n_size = request_n_size
self._wait_for_complete = wait_for_complete
def on_next(self, value, is_complete=False):
logging.info('RS: {}'.format(value))
if is_complete:
self._wait_for_complete.set()
else:
if self._request_n_size > 0:
self.subscription.request(self._request_n_size)
def on_complete(self):
logging.info('RS: Complete')
self._wait_for_complete.set()
def on_error(self, exception):
logging.info('RS: error: {}'.format(exception))
self._wait_for_complete.set()
def on_subscribe(self, subscription):
# noinspection PyAttributeOutsideInit
self.subscription = subscription
async def request_response(client: RSocketClient):
payload = Payload(b'The quick brown fox', composite(
route('single_request'),
authenticate_simple('user', '12345')
))
await client.request_response(payload)
async def request_channel(client: RSocketClient):
channel_completion_event = Event()
requester_completion_event = Event()
payload = Payload(b'The quick brown fox', composite(
route('channel'),
authenticate_simple('user', '12345')
))
publisher = sample_publisher(requester_completion_event)
requested = client.request_channel(payload, publisher)
requested.initial_request_n(5).subscribe(ChannelSubscriber(channel_completion_event))
await channel_completion_event.wait()
await requester_completion_event.wait()
async def request_stream_invalid_login(client: RSocketClient):
payload = Payload(b'The quick brown fox', composite(
route('stream'),
authenticate_simple('user', 'wrong_password')
))
completion_event = Event()
client.request_stream(payload).initial_request_n(1).subscribe(StreamSubscriber(completion_event))
await completion_event.wait()
async def request_stream(client: RSocketClient):
payload = Payload(b'The quick brown fox', composite(
route('stream'),
authenticate_simple('user', '12345')
))
completion_event = Event()
client.request_stream(payload).subscribe(StreamSubscriber(completion_event))
await completion_event.wait()
async def request_slow_stream(client: RSocketClient):
payload = Payload(b'The quick brown fox', composite(
route('slow_stream'),
authenticate_simple('user', '12345')
))
completion_event = Event()
client.request_stream(payload).subscribe(StreamSubscriber(completion_event))
await completion_event.wait()
async def request_fragmented_stream(client: RSocketClient):
payload = Payload(b'The quick brown fox', composite(
route('fragmented_stream'),
authenticate_simple('user', '12345')
))
completion_event = Event()
client.request_stream(payload).subscribe(StreamSubscriber(completion_event))
await completion_event.wait()
async def main(server_port):
logging.info('Connecting to server at localhost:%s', server_port)
connection = await asyncio.open_connection('localhost', server_port)
async with RSocketClient(single_transport_provider(TransportTCP(*connection)),
metadata_encoding=WellKnownMimeTypes.MESSAGE_RSOCKET_COMPOSITE_METADATA) as client:
await request_response(client)
await request_stream(client)
await request_slow_stream(client)
await request_channel(client)
await request_stream_invalid_login(client)
await request_fragmented_stream(client)
if __name__ == '__main__':
port = sys.argv[1] if len(sys.argv) > 1 else 6565
logging.basicConfig(level=logging.DEBUG)
asyncio.run(main(port))
| 34.403409 | 112 | 0.715442 |
4a22c7f05e3c467c65f5b20991dc3a898992fb02 | 4,168 | py | Python | tests/test_models_t5.py | Cli212/gluon-nlp | 151e1b049aeb0dd0d2a84d928d314fdbfcf8a7f4 | [
"Apache-2.0"
] | null | null | null | tests/test_models_t5.py | Cli212/gluon-nlp | 151e1b049aeb0dd0d2a84d928d314fdbfcf8a7f4 | [
"Apache-2.0"
] | null | null | null | tests/test_models_t5.py | Cli212/gluon-nlp | 151e1b049aeb0dd0d2a84d928d314fdbfcf8a7f4 | [
"Apache-2.0"
] | null | null | null | import pytest
import mxnet as mx
from mxnet import np, npx
from mxnet.gluon import nn, HybridBlock
from gluonnlp.models.t5 import (
T5Model, T5Inference, t5_cfg_reg, list_pretrained_t5, get_pretrained_t5
)
from gluonnlp.utils.testing import verify_nmt_model, verify_nmt_inference
npx.set_np()
def test_list_pretrained_t5():
assert len(list_pretrained_t5()) > 0
@pytest.mark.parametrize('cfg_key', t5_cfg_reg.list_keys())
@pytest.mark.parametrize('activation', ['relu', 'gated-gelu'])
def test_t5_model(cfg_key, activation, ctx):
with ctx:
cfg = T5Model.get_cfg(cfg_key)
cfg.defrost()
cfg.MODEL.vocab_size = 256
cfg.MODEL.d_model = 128
cfg.MODEL.d_ff = 512
cfg.MODEL.num_layers = 2
cfg.MODEL.num_heads = 4
cfg.MODEL.activation = activation
cfg.MODEL.layout = 'NT'
cfg.freeze()
cfg_tn = cfg.clone()
cfg_tn.defrost()
cfg_tn.MODEL.layout = 'TN'
cfg_tn.freeze()
# test TN and NT consistency
t5_model = T5Model.from_cfg(cfg)
t5_model.initialize()
t5_model.hybridize()
t5_model_tn = T5Model.from_cfg(cfg_tn)
t5_model_tn.share_parameters(t5_model.collect_params())
t5_model_tn.hybridize()
batch_size = 8
src_length = 32
tgt_length = 18
src_data = np.random.randint(0, 255, (batch_size, src_length))
src_valid_length = np.random.randint(src_length // 2, src_length, (batch_size,))
tgt_data = np.random.randint(0, 255, (batch_size, tgt_length))
tgt_valid_length = np.random.randint(tgt_length // 4, tgt_length, (batch_size,))
out = t5_model(src_data, src_valid_length, tgt_data, tgt_valid_length)
out_tn = t5_model_tn(src_data.T, src_valid_length, tgt_data.T, tgt_valid_length)
assert np.allclose(np.swapaxes(out, 0, 1), out_tn, 1E-5, 1E-5)
# test consistency with various target valid length
for shift in range(1, np.min(tgt_valid_length).item()):
for partial_out in [
t5_model(src_data, src_valid_length, tgt_data[:, :-shift], tgt_valid_length - shift),
t5_model(src_data, src_valid_length, tgt_data, tgt_valid_length - shift)
]:
for i in range(batch_size):
vl = tgt_valid_length[i].item() - shift
assert np.allclose(partial_out[i, :vl], out[i, :vl], 1E-5, 1E-5)
@pytest.mark.parametrize('layout', ['NT', 'TN'])
@pytest.mark.parametrize('activation', ['relu', 'gated-gelu'])
def test_t5_inference(layout, activation, ctx):
with ctx:
cfg = T5Model.get_cfg('google_t5_small')
cfg.defrost()
cfg.MODEL.layout = layout
cfg.MODEL.activation = activation
cfg.freeze()
model = T5Model.from_cfg(cfg)
model.initialize()
model.hybridize()
# while keeping T5Model implementation consistent with Huggingface's, this
# temporary class would help the backbone fit into the provided nmt tests.
class TempWithHead(HybridBlock):
def __init__(self, model):
super().__init__()
self.model = model
self.layout = model.layout
self.src_vocab_size = model.vocab_size
self.tgt_vocab_size = model.vocab_size
# append a final output layer
self.output_layer = nn.Dense(
units=model.vocab_size,
in_units=model._d_model,
flatten=False,
use_bias=False,
dtype=model._dtype
)
self.output_layer.weight = model.input_embedding_layer.weight
def forward(self, *args, **kwargs):
return self.output_layer(self.model(*args, **kwargs))
backbone = TempWithHead(model)
backbone.hybridize()
verify_nmt_model(backbone)
inference_model = T5Inference(model)
inference_model.hybridize()
verify_nmt_inference(train_model=backbone, inference_model=inference_model)
| 37.54955 | 102 | 0.620441 |
4a22c856f182ebff2120495d1383569232f230f3 | 2,267 | py | Python | MakeConfounds.py | jbdenniso/general | 14193a77c5d6b58998a91570fcee4e08c28c5753 | [
"MIT"
] | 1 | 2018-07-22T20:19:25.000Z | 2018-07-22T20:19:25.000Z | MakeConfounds.py | jbdenniso/general | 14193a77c5d6b58998a91570fcee4e08c28c5753 | [
"MIT"
] | 3 | 2020-08-17T13:16:44.000Z | 2021-06-25T18:45:28.000Z | MakeConfounds.py | jbdenniso/general | 14193a77c5d6b58998a91570fcee4e08c28c5753 | [
"MIT"
] | 5 | 2020-07-29T16:59:06.000Z | 2021-05-14T23:55:33.000Z |
# coding: utf-8
# E.G. use
#$ python MakeConfounds.py --fmriprepDir="/data/projects/Tensor_game/Data/Raw/NARPS/derivatives/fmriprep"
# TO DO:
# 1. write to subject folders, create if it doesn't exist
# 2. simplify input argument to project name. all paths should be standardized within a project folder
# 3. check for existence of output before overwiting older output. helps with version control on datalad.
# 4. give option to overwrite existing output
import numpy as np
import pandas as pd
import argparse
import os
import re
parser = argparse.ArgumentParser(description='Give me a path to your fmriprep output')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--fmriprepDir',default=None, type=str,help="This is the full path to your fmriprep dir")
args = parser.parse_args()
fmriprep_path = args.fmriprepDir
print("finding confound files located in %s"%(fmriprep_path))
#make list of confound tsvs
cons=[]
for root, dirs, files in os.walk(fmriprep_path):
for f in files:
if f.endswith('-confounds_regressors.tsv'):
cons.append(os.path.join(root, f))
for f in cons:
sub=re.search('/func/(.*)_task', f).group(1)
run=re.search('_run-(.*)_desc', f).group(1)
task=re.search('_task-(.*)_run',f).group(1)
derivitive_path=re.search('(.*)fmriprep/sub',f).group(1)
outfile="%s_task-%s_run-%s_desc-fslConfounds.tsv"%(sub,task,run)
#read in the confounds, aroma mixing, and aroma confound indexes
con_regs=pd.read_csv(f,sep='\t')
other=['csf','white_matter']
cosine = [col for col in con_regs if col.startswith('cosine')]
NSS = [col for col in con_regs if col.startswith('non_steady_state')]
#motion_out=[col for col in con_regs if col.startswith('motion_outlier')]
aroma_motion=[col for col in con_regs if col.startswith('aroma')]
filter_col=np.concatenate([cosine,NSS,aroma_motion,other])#here we combine all NSS AROMA motion & the rest
#This Dataframe will be the full filter matrix
df_all=con_regs[filter_col]
outdir=derivitive_path+"fsl/confounds/%s/" %(sub)
if not os.path.exists(outdir):
os.makedirs(outdir)
output=outdir+outfile
print(sub,run,task)
df_all.to_csv(output,index=False,sep='\t',header=False)
| 33.338235 | 110 | 0.720335 |
4a22c8900db8285430e22226abc82a11153006ba | 959 | py | Python | expHTS/preprocess/fastq_handling.py | B854414/expHTS | 90f5e73bda094044be9d4f43dc96c7490ea36187 | [
"Apache-2.0"
] | null | null | null | expHTS/preprocess/fastq_handling.py | B854414/expHTS | 90f5e73bda094044be9d4f43dc96c7490ea36187 | [
"Apache-2.0"
] | null | null | null | expHTS/preprocess/fastq_handling.py | B854414/expHTS | 90f5e73bda094044be9d4f43dc96c7490ea36187 | [
"Apache-2.0"
] | null | null | null | # fastq handling
class fastqIter:
" A simple file iterator that returns 4 lines for fast fastq iteration. "
def __init__(self, handle):
self.inf = handle
def __iter__(self):
return self
def next(self):
lines = {'id': self.inf.readline().strip(),
'seq': self.inf.readline().strip(),
'+': self.inf.readline().strip(),
'qual': self.inf.readline().strip()}
assert(len(lines['seq']) == len(lines['qual']))
if lines['id'] == '' or lines['seq'] == '' or lines['+'] == '' or lines['qual'] == '':
raise StopIteration
else:
return lines
@staticmethod
def parse(handle):
return fastqIter(handle)
def close(self):
self.inf.close()
def writeFastq(handle, fq):
handle.write(fq['id'] + '\n')
handle.write(fq['seq'] + '\n')
handle.write(fq['+'] + '\n')
handle.write(fq['qual'] + '\n')
| 27.4 | 94 | 0.525547 |
4a22c9a8c54e87a7cf21e00cb0ab40b525ba8a82 | 973 | py | Python | api/migrations/0001_initial.py | macav/ideas-platform | bc65355b9cd5164d6b2d640bf275e55ae8dd843e | [
"MIT"
] | null | null | null | api/migrations/0001_initial.py | macav/ideas-platform | bc65355b9cd5164d6b2d640bf275e55ae8dd843e | [
"MIT"
] | null | null | null | api/migrations/0001_initial.py | macav/ideas-platform | bc65355b9cd5164d6b2d640bf275e55ae8dd843e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-23 08:41
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Idea',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('content', models.TextField()),
('upvotes', models.IntegerField(default=0)),
('downvotes', models.IntegerField(default=0)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 31.387097 | 118 | 0.621788 |
4a22ca3932ae40af1e2925532c943470fca4ccbc | 1,352 | py | Python | weather/weather/pipelines.py | seu-tan/Spiders | 41b622f305fe44538c38676cd40fa719a4657722 | [
"Apache-2.0"
] | null | null | null | weather/weather/pipelines.py | seu-tan/Spiders | 41b622f305fe44538c38676cd40fa719a4657722 | [
"Apache-2.0"
] | null | null | null | weather/weather/pipelines.py | seu-tan/Spiders | 41b622f305fe44538c38676cd40fa719a4657722 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class WeatherPipeline(object):
def __init__(self):
pass
def process_item(self, item, spider):
with open('result.txt', 'w+') as file:
city = item['city'][0].encode('utf-8')
date = item['date']
desc = item['dayDesc']
dayTemp = item['dayTemp']
file.write('city:' + str(city) + '\n\n')
dayDesc = desc[1::2]
nightDesc = desc[0::2]
wea_item = zip(date, dayDesc, nightDesc, dayTemp)
for i in range(len(wea_item)):
item = wea_item[i]
d = item[0] # 日期
dd = item[1] # 白天天气
nd = item[2] # 夜晚天气
ta = item[3].split('/')
dt = ta[0] # 白天气温
nt = ta[1] # 夜晚气温
txt = 'date:{0}\t\tday:{1}({2})\t\tnight:{3}({4})\n\n'.format(
d,
dd.encode('utf-8'),
dt.encode('utf-8'),
nd.encode('utf-8'),
nt.encode('utf-8')
)
file.write(txt)
return item
| 30.727273 | 78 | 0.43713 |
4a22ca692e690dde015e1976acea999e7f01c70d | 3,354 | py | Python | doc/source/conf.py | gaozhengwei/networking-ovn | 4f64542bbbb86e4277d4ad6083e01df86a8248a4 | [
"Apache-2.0"
] | null | null | null | doc/source/conf.py | gaozhengwei/networking-ovn | 4f64542bbbb86e4277d4ad6083e01df86a8248a4 | [
"Apache-2.0"
] | null | null | null | doc/source/conf.py | gaozhengwei/networking-ovn | 4f64542bbbb86e4277d4ad6083e01df86a8248a4 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'openstackdocstheme',
'oslo_config.sphinxext',
'oslo_config.sphinxconfiggen',
'sphinxcontrib.rsvgconverter',
]
# openstackdocstheme options
repository_name = 'openstack/networking-ovn'
bug_project = 'networking-ovn'
bug_tag = ''
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
html_static_path = ['_static']
html_theme = 'openstackdocs'
# Output file base name for HTML help builder.
htmlhelp_basename = 'networking-ovndoc'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index',
'doc-networking-ovn.tex',
u'networking-ovn Documentation',
u'OpenStack Foundation', 'manual'),
]
# Example configuration for intersphinx: refer to the Python standard library.
#intersphinx_mapping = {'http://docs.python.org/': None}
# -- Options for oslo_config.sphinxconfiggen ---------------------------------
_config_generator_config_files = [
'ml2_conf.ini',
'networking_ovn_metadata_agent.ini',
]
def _get_config_generator_config_definition(config_file):
config_file_path = '../../etc/oslo-config-generator/%s' % config_file
# oslo_config.sphinxconfiggen appends '.conf.sample' to the filename,
# strip file extentension (.conf or .ini).
output_file_path = '_static/config_samples/%s' % config_file.rsplit('.',
1)[0]
return (config_file_path, output_file_path)
config_generator_config_file = [
_get_config_generator_config_definition(conf)
for conf in _config_generator_config_files
]
| 33.207921 | 79 | 0.693798 |
4a22cac9c13f13cabb9d92c0ea7eccf84576f7de | 12,218 | py | Python | sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_06_01/operations/_provider_operations.py | beltr0n/azure-sdk-for-python | 2f7fb8bee881b0fc0386a0ad5385755ceedd0453 | [
"MIT"
] | 2 | 2021-03-24T06:26:11.000Z | 2021-04-18T15:55:59.000Z | sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_06_01/operations/_provider_operations.py | beltr0n/azure-sdk-for-python | 2f7fb8bee881b0fc0386a0ad5385755ceedd0453 | [
"MIT"
] | 4 | 2019-04-17T17:57:49.000Z | 2020-04-24T21:11:22.000Z | sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_06_01/operations/_provider_operations.py | beltr0n/azure-sdk-for-python | 2f7fb8bee881b0fc0386a0ad5385755ceedd0453 | [
"MIT"
] | 2 | 2021-05-23T16:46:31.000Z | 2021-05-26T23:51:09.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ProviderOperations(object):
"""ProviderOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.web.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get_available_stacks(
self,
os_type_selected=None, # type: Optional[Union[str, "_models.Enum4"]]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ApplicationStackCollection"]
"""Get available application frameworks and their versions.
Description for Get available application frameworks and their versions.
:param os_type_selected:
:type os_type_selected: str or ~azure.mgmt.web.v2020_06_01.models.Enum4
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationStackCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.web.v2020_06_01.models.ApplicationStackCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationStackCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_available_stacks.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if os_type_selected is not None:
query_parameters['osTypeSelected'] = self._serialize.query("os_type_selected", os_type_selected, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationStackCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
get_available_stacks.metadata = {'url': '/providers/Microsoft.Web/availableStacks'} # type: ignore
def list_operations(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.CsmOperationCollection"]
"""Gets all available operations for the Microsoft.Web resource provider. Also exposes resource metric definitions.
Description for Gets all available operations for the Microsoft.Web resource provider. Also
exposes resource metric definitions.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CsmOperationCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.web.v2020_06_01.models.CsmOperationCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CsmOperationCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_operations.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('CsmOperationCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_operations.metadata = {'url': '/providers/Microsoft.Web/operations'} # type: ignore
def get_available_stacks_on_prem(
self,
os_type_selected=None, # type: Optional[Union[str, "_models.Enum5"]]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ApplicationStackCollection"]
"""Get available application frameworks and their versions.
Description for Get available application frameworks and their versions.
:param os_type_selected:
:type os_type_selected: str or ~azure.mgmt.web.v2020_06_01.models.Enum5
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationStackCollection or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.web.v2020_06_01.models.ApplicationStackCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationStackCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_available_stacks_on_prem.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if os_type_selected is not None:
query_parameters['osTypeSelected'] = self._serialize.query("os_type_selected", os_type_selected, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationStackCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
get_available_stacks_on_prem.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/availableStacks'} # type: ignore
| 46.992308 | 142 | 0.651907 |
4a22cce6f8553655a99cab0f83e6515793f254f4 | 21,729 | py | Python | pyxrf/gui_module/wnd_load_quant_calibration.py | andrewmkiss/PyXRF | 61de2029c255f77279ba5bc3896107c1a2e4212f | [
"BSD-3-Clause"
] | 19 | 2016-05-25T21:40:41.000Z | 2022-01-19T01:58:15.000Z | pyxrf/gui_module/wnd_load_quant_calibration.py | andrewmkiss/PyXRF | 61de2029c255f77279ba5bc3896107c1a2e4212f | [
"BSD-3-Clause"
] | 90 | 2016-01-11T17:22:05.000Z | 2021-12-02T15:59:58.000Z | pyxrf/gui_module/wnd_load_quant_calibration.py | andrewmkiss/PyXRF | 61de2029c255f77279ba5bc3896107c1a2e4212f | [
"BSD-3-Clause"
] | 22 | 2016-10-16T17:19:19.000Z | 2022-02-18T21:45:08.000Z | import textwrap
from qtpy.QtWidgets import (
QPushButton,
QHBoxLayout,
QVBoxLayout,
QGroupBox,
QCheckBox,
QLabel,
QComboBox,
QFileDialog,
QRadioButton,
QButtonGroup,
QTableWidget,
QTableWidgetItem,
QHeaderView,
QWidget,
QScrollArea,
QTabWidget,
QFrame,
QMessageBox,
)
from qtpy.QtGui import QBrush, QColor, QDoubleValidator
from qtpy.QtCore import Qt, Slot, Signal
from .useful_widgets import get_background_css, SecondaryWindow, set_tooltip, LineEditExtended
from .dlg_view_calib_standard import DialogViewCalibStandard
import logging
logger = logging.getLogger(__name__)
class WndLoadQuantitativeCalibration(SecondaryWindow):
signal_quantitative_calibration_changed = Signal()
def __init__(self, *, gpc, gui_vars):
super().__init__()
# Global processing classes
self.gpc = gpc
# Global GUI variables (used for control of GUI state)
self.gui_vars = gui_vars
self.initialize()
def initialize(self):
self.table_header_display_names = False
self.setWindowTitle("PyXRF: Load Quantitative Calibration")
self.setMinimumWidth(750)
self.setMinimumHeight(400)
self.resize(750, 600)
self.pb_load_calib = QPushButton("Load Calibration ...")
self.pb_load_calib.clicked.connect(self.pb_load_calib_clicked)
self._changes_exist = False
self._auto_update = True
self.cb_auto_update = QCheckBox("Auto")
self.cb_auto_update.setCheckState(Qt.Checked if self._auto_update else Qt.Unchecked)
self.cb_auto_update.stateChanged.connect(self.cb_auto_update_state_changed)
self.pb_update_plots = QPushButton("Update Plots")
self.pb_update_plots.clicked.connect(self.pb_update_plots_clicked)
self.grp_current_scan = QGroupBox("Parameters of Currently Processed Scan")
self._distance_to_sample = 0.0
self.le_distance_to_sample = LineEditExtended()
le_dist_validator = QDoubleValidator()
le_dist_validator.setBottom(0)
self.le_distance_to_sample.setValidator(le_dist_validator)
self._set_distance_to_sample()
self.le_distance_to_sample.editingFinished.connect(self.le_distance_to_sample_editing_finished)
self.le_distance_to_sample.focusOut.connect(self.le_distance_to_sample_focus_out)
hbox = QHBoxLayout()
hbox.addWidget(QLabel("Distance-to-sample:"))
hbox.addWidget(self.le_distance_to_sample)
hbox.addStretch(1)
self.grp_current_scan.setLayout(hbox)
self.eline_rb_exclusive = [] # Holds the list of groups of exclusive radio buttons
self._setup_tab_widget()
vbox = QVBoxLayout()
hbox = QHBoxLayout()
hbox.addWidget(self.pb_load_calib)
hbox.addStretch(1)
hbox.addWidget(self.cb_auto_update)
hbox.addWidget(self.pb_update_plots)
vbox.addLayout(hbox)
vbox.addWidget(self.tab_widget)
vbox.addWidget(self.grp_current_scan)
self.setLayout(vbox)
# Display data
self.update_all_data()
self._set_tooltips()
def _setup_tab_widget(self):
self.tab_widget = QTabWidget()
self.loaded_standards = QWidget()
# self.display_loaded_standards()
self.scroll = QScrollArea()
self.scroll.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.scroll.setWidget(self.loaded_standards)
self.tab_widget.addTab(self.scroll, "Loaded Standards")
self.combo_set_table_header = QComboBox()
self.combo_set_table_header.addItems(["Standard Serial #", "Standard Name"])
self.combo_set_table_header.currentIndexChanged.connect(self.combo_set_table_header_index_changed)
vbox = QVBoxLayout()
vbox.addSpacing(5)
hbox = QHBoxLayout()
hbox.addWidget(QLabel("Display in table header:"))
hbox.addWidget(self.combo_set_table_header)
hbox.addStretch(1)
vbox.addLayout(hbox)
self.table = QTableWidget()
self.table.verticalHeader().hide()
self.table.setSelectionMode(QTableWidget.NoSelection)
self.table.horizontalHeader().setSectionResizeMode(QHeaderView.ResizeToContents)
self.table.horizontalHeader().setMinimumSectionSize(150)
vbox.addWidget(self.table)
self.table.setStyleSheet("QTableWidget::item{color: black;}")
frame = QFrame()
vbox.setContentsMargins(0, 0, 0, 0)
frame.setLayout(vbox)
self.tab_widget.addTab(frame, "Selected Emission Lines")
def display_loaded_standards(self):
calib_data = self.gpc.get_quant_calibration_data()
calib_settings = self.gpc.get_quant_calibration_settings()
# Create the new widget (this deletes the old widget)
self.loaded_standards = QWidget()
self.loaded_standards.setMinimumWidth(700)
# Also delete references to all components
self.frames_calib_data = []
self.pbs_view = []
self.pbs_remove = []
# All 'View' buttons are added to the group in order to be connected to the same slot
self.group_view = QButtonGroup()
self.group_view.setExclusive(False)
self.group_view.buttonClicked.connect(self.pb_view_clicked)
# The same for the 'Remove' buttons
self.group_remove = QButtonGroup()
self.group_remove.setExclusive(False)
self.group_remove.buttonClicked.connect(self.pb_remove_clicked)
vbox = QVBoxLayout()
class _LabelBlack(QLabel):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setStyleSheet("color: black")
for cdata, csettings in zip(calib_data, calib_settings):
frame = QFrame()
frame.setFrameStyle(QFrame.StyledPanel)
frame.setStyleSheet(get_background_css((200, 255, 200), widget="QFrame"))
_vbox = QVBoxLayout()
name = cdata["name"] # Standard name (can be arbitrary string
# If name is long, then print it in a separate line
_name_is_long = len(name) > 30
pb_view = QPushButton("View ...")
self.group_view.addButton(pb_view)
pb_remove = QPushButton("Remove")
self.group_remove.addButton(pb_remove)
# Row 1: serial, name
serial = cdata["serial"]
_hbox = QHBoxLayout()
_hbox.addWidget(_LabelBlack(f"<b>Standard</b> #{serial}"))
if not _name_is_long:
_hbox.addWidget(_LabelBlack(f"'{name}'"))
_hbox.addStretch(1)
_hbox.addWidget(pb_view)
_hbox.addWidget(pb_remove)
_vbox.addLayout(_hbox)
# Optional row
if _name_is_long:
# Wrap name if it is extemely long
name = textwrap.fill(name, width=80)
_hbox = QHBoxLayout()
_hbox.addWidget(_LabelBlack("<b>Name:</b> "), 0, Qt.AlignTop)
_hbox.addWidget(_LabelBlack(name), 0, Qt.AlignTop)
_hbox.addStretch(1)
_vbox.addLayout(_hbox)
# Row 2: description
description = textwrap.fill(cdata["description"], width=80)
_hbox = QHBoxLayout()
_hbox.addWidget(_LabelBlack("<b>Description:</b>"), 0, Qt.AlignTop)
_hbox.addWidget(_LabelBlack(f"{description}"), 0, Qt.AlignTop)
_hbox.addStretch(1)
_vbox.addLayout(_hbox)
# Row 3:
incident_energy = cdata["incident_energy"]
scaler = cdata["scaler_name"]
detector_channel = cdata["detector_channel"]
distance_to_sample = cdata["distance_to_sample"]
_hbox = QHBoxLayout()
_hbox.addWidget(_LabelBlack(f"<b>Incident energy, keV:</b> {incident_energy}"))
_hbox.addWidget(_LabelBlack(f" <b>Scaler:</b> {scaler}"))
_hbox.addWidget(_LabelBlack(f" <b>Detector channel:</b> {detector_channel}"))
_hbox.addWidget(_LabelBlack(f" <b>Distance-to-sample:</b> {distance_to_sample}"))
_hbox.addStretch(1)
_vbox.addLayout(_hbox)
# Row 4: file name
fln = textwrap.fill(csettings["file_path"], width=80)
_hbox = QHBoxLayout()
_hbox.addWidget(_LabelBlack("<b>Source file:</b>"), 0, Qt.AlignTop)
_hbox.addWidget(_LabelBlack(fln), 0, Qt.AlignTop)
_hbox.addStretch(1)
_vbox.addLayout(_hbox)
frame.setLayout(_vbox)
# Now the group box is added to the upper level layout
vbox.addWidget(frame)
vbox.addSpacing(5)
self.frames_calib_data.append(frame)
self.pbs_view.append(pb_view)
self.pbs_remove.append(pb_remove)
# Add the layout to the widget
self.loaded_standards.setLayout(vbox)
# ... and put the widget inside the scroll area. This will update the
# contents of the scroll area.
self.scroll.setWidget(self.loaded_standards)
def display_table_header(self):
calib_data = self.gpc.get_quant_calibration_data()
header_by_name = self.table_header_display_names
tbl_labels = ["Lines"]
for n, cdata in enumerate(calib_data):
if header_by_name:
txt = cdata["name"]
else:
txt = cdata["serial"]
txt = textwrap.fill(txt, width=20)
tbl_labels.append(txt)
self.table.setHorizontalHeaderLabels(tbl_labels)
def display_standard_selection_table(self):
calib_data = self.gpc.get_quant_calibration_data()
self._quant_file_paths = self.gpc.get_quant_calibration_file_path_list()
brightness = 220
table_colors = [(255, brightness, brightness), (brightness, 255, brightness)]
# Disconnect all radio button signals before clearing the table
for bgroup in self.eline_rb_exclusive:
bgroup.buttonToggled.disconnect(self.rb_selection_toggled)
# This list will hold radio button groups for horizontal rows
# Those are exclusive groups. They are not going to be
# used directly, but they must be kept alive in order
# for the radiobuttons to work properly. Most of the groups
# will contain only 1 radiobutton, which will always remain checked.
self.eline_rb_exclusive = []
# The following list will contain the list of radio buttons for each
# row. If there is no radiobutton in a position, then the element is
# set to None.
# N rows: the number of emission lines, N cols: the number of standards
self.eline_rb_lists = []
self.table.clear()
if not calib_data:
self.table.setRowCount(0)
self.table.setColumnCount(0)
else:
# Create the sorted list of available element lines
line_set = set()
for cdata in calib_data:
ks = list(cdata["element_lines"].keys())
line_set.update(list(ks))
self.eline_list = list(line_set)
self.eline_list.sort()
for n in range(len(self.eline_list)):
self.eline_rb_exclusive.append(QButtonGroup())
self.eline_rb_lists.append([None] * len(calib_data))
self.table.setColumnCount(len(calib_data) + 1)
self.table.setRowCount(len(self.eline_list))
self.display_table_header()
for n, eline in enumerate(self.eline_list):
rgb = table_colors[n % 2]
item = QTableWidgetItem(eline)
item.setTextAlignment(Qt.AlignCenter)
item.setFlags(item.flags() & ~Qt.ItemIsEditable)
item.setBackground(QBrush(QColor(*rgb)))
self.table.setItem(n, 0, item)
for ns, cdata in enumerate(calib_data):
q_file_path = self._quant_file_paths[ns] # Used to identify standard
if eline in cdata["element_lines"]:
rb = QRadioButton()
if self.gpc.get_quant_calibration_is_eline_selected(eline, q_file_path):
rb.setChecked(True)
rb.setStyleSheet("color: black")
self.eline_rb_lists[n][ns] = rb
# self.eline_rb_by_standard[ns].addButton(rb)
self.eline_rb_exclusive[n].addButton(rb)
item = QWidget()
item_hbox = QHBoxLayout(item)
item_hbox.addWidget(rb)
item_hbox.setAlignment(Qt.AlignCenter)
item_hbox.setContentsMargins(0, 0, 0, 0)
item.setStyleSheet(get_background_css(rgb))
# Generate tooltip
density = cdata["element_lines"][eline]["density"]
fluorescence = cdata["element_lines"][eline]["fluorescence"]
ttip = f"Fluorescence (F): {fluorescence:12g}\nDensity (D): {density:12g}\n"
# Avoid very small values of density (probably zero)
if abs(density) > 1e-30:
ttip += f"F/D: {fluorescence/density:12g}"
item.setToolTip(ttip)
self.table.setCellWidget(n, ns + 1, item)
else:
# There is no radio button, but we still need to fill the cell
item = QTableWidgetItem("")
item.setFlags(item.flags() & ~Qt.ItemIsEditable)
item.setBackground(QBrush(QColor(*rgb)))
self.table.setItem(n, ns + 1, item)
# Now the table is set (specifically radio buttons).
# So we can connect the button groups with the event processing function
for bgroup in self.eline_rb_exclusive:
bgroup.buttonToggled.connect(self.rb_selection_toggled)
@Slot()
def update_all_data(self):
self.display_loaded_standards()
self.display_standard_selection_table()
self._set_distance_to_sample()
def _set_distance_to_sample(self):
"""Set 'le_distance_to_sample` without updating maps"""
distance_to_sample = self.gpc.get_quant_calibration_distance_to_sample()
if distance_to_sample is None:
distance_to_sample = 0.0
self._distance_to_sample = distance_to_sample
self._set_le_distance_to_sample(distance_to_sample)
def _set_tooltips(self):
set_tooltip(self.pb_load_calib, "Load <b>calibration data</b> from JSON file.")
set_tooltip(
self.cb_auto_update,
"Automatically <b>update the plots</b> when changes are made. "
"If unchecked, then button <b>Update Plots</b> must be pressed "
"to update the plots. Automatic update is often undesirable "
"when large maps are displayed and multiple changes to parameters "
"are made.",
)
set_tooltip(self.pb_update_plots, "<b>Update plots</b> based on currently selected parameters.")
set_tooltip(
self.le_distance_to_sample,
"Distance between <b>the sample and the detector</b>. The ratio between of the distances "
"during calibration and measurement is used to scale computed concentrations. "
"If distance-to-sample is 0 for calibration or measurement, then no scaling is performed.",
)
set_tooltip(
self.combo_set_table_header,
"Use <b>Serial Number</b> or <b>Name</b> of the calibration standard in the header of the table",
)
set_tooltip(
self.table,
"Use Radio Buttons to select the <b>source of calibration data</b> for each emission line. "
"This feature is needed if multiple loaded calibration files have data on the same "
"emission line.",
)
def update_widget_state(self, condition=None):
# Update the state of the menu bar
state = not self.gui_vars["gui_state"]["running_computations"]
self.setEnabled(state)
# Hide the window if required by the program state
state_xrf_map_exists = self.gui_vars["gui_state"]["state_xrf_map_exists"]
if not state_xrf_map_exists:
self.hide()
if condition == "tooltips":
self._set_tooltips()
def cb_auto_update_state_changed(self, state):
self._auto_update = state
self.pb_update_plots.setEnabled(not state)
# If changes were made, apply the changes while switching to 'auto' mode
if state and self._changes_exist:
self._update_maps_auto()
def pb_update_plots_clicked(self):
self._update_maps()
def pb_load_calib_clicked(self):
current_dir = self.gpc.get_current_working_directory()
file_name = QFileDialog.getOpenFileName(
self, "Select File with Quantitative Calibration Data", current_dir, "JSON (*.json);; All (*)"
)
file_name = file_name[0]
if file_name:
try:
logger.debug(f"Loading quantitative calibration from file: '{file_name}'")
self.gpc.load_quantitative_calibration_data(file_name)
self.update_all_data()
self._update_maps_auto()
except Exception:
msg = "The selected JSON file has incorrect format. Select a different file."
msgbox = QMessageBox(QMessageBox.Critical, "Data Loading Error", msg, QMessageBox.Ok, parent=self)
msgbox.exec()
def pb_view_clicked(self, button):
try:
n_standard = self.pbs_view.index(button)
calib_settings = self.gpc.get_quant_calibration_settings()
file_path = calib_settings[n_standard]["file_path"]
calib_preview = self.gpc.get_quant_calibration_text_preview(file_path)
dlg = DialogViewCalibStandard(None, file_path=file_path, calib_preview=calib_preview)
dlg.exec()
except ValueError:
logger.error("'View' button was pressed, but not found in the list of buttons")
def pb_remove_clicked(self, button):
try:
n_standard = self.pbs_remove.index(button)
calib_settings = self.gpc.get_quant_calibration_settings()
file_path = calib_settings[n_standard]["file_path"]
self.gpc.quant_calibration_remove_entry(file_path)
self.update_all_data()
self._update_maps_auto()
except ValueError:
logger.error("'Remove' button was pressed, but not found in the list of buttons")
def rb_selection_toggled(self, button, checked):
if checked:
# Find the button in 2D list 'self.eline_rb_lists'
button_found = False
for nr, rb_list in enumerate(self.eline_rb_lists):
try:
nc = rb_list.index(button)
button_found = True
break
except ValueError:
pass
if button_found:
eline = self.eline_list[nr]
n_standard = nc
file_path = self._quant_file_paths[n_standard]
self.gpc.set_quant_calibration_select_eline(eline, file_path)
self._update_maps_auto()
else:
# This should never happen
logger.error("Selection radio button was pressed, but not found in the list")
def combo_set_table_header_index_changed(self, index):
self.table_header_display_names = bool(index)
self.display_table_header()
def le_distance_to_sample_editing_finished(self):
distance_to_sample = float(self.le_distance_to_sample.text())
if distance_to_sample != self._distance_to_sample:
self._distance_to_sample = distance_to_sample
self.gpc.set_quant_calibration_distance_to_sample(distance_to_sample)
self._update_maps_auto()
def le_distance_to_sample_focus_out(self):
try:
float(self.le_distance_to_sample.text())
except ValueError:
# If the text can not be interpreted to float, then replace the text with the old value
self._set_le_distance_to_sample(self._distance_to_sample)
def _set_le_distance_to_sample(self, distance_to_sample):
self.le_distance_to_sample.setText(f"{distance_to_sample:.12g}")
def _update_maps_auto(self):
"""Update maps only if 'auto' update is ON. Used as a 'filter'
to prevent extra plot updates."""
self._changes_exist = True
if self._auto_update:
self._update_maps()
def _update_maps(self):
"""Upload the selections (limit table) and update plot"""
self._changes_exist = False
self._redraw_maps()
# Emit signal only after the maps are redrawn. This should change
# ranges in the respective controls for the plots
self.signal_quantitative_calibration_changed.emit()
def _redraw_maps(self):
# We don't emit any signals here, but we don't really need to.
logger.debug("Redrawing RGB XRF Maps")
self.gpc.compute_map_ranges()
self.gpc.redraw_maps()
self.gpc.compute_rgb_map_ranges()
self.gpc.redraw_rgb_maps()
| 40.463687 | 114 | 0.62672 |
4a22cdae3c1e762c5a8ddfea36d3d0c741814c62 | 2,012 | py | Python | tst/trial_backend/main_checkpoint.py | hfurkanbozkurt/syne-tune | 05ee2668f0155b40c3ee3b61e4b3d58f3f9f3c4f | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | tst/trial_backend/main_checkpoint.py | hfurkanbozkurt/syne-tune | 05ee2668f0155b40c3ee3b61e4b3d58f3f9f3c4f | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2022-02-25T15:56:36.000Z | 2022-02-25T17:53:10.000Z | tst/trial_backend/main_checkpoint.py | hfurkanbozkurt/syne-tune | 05ee2668f0155b40c3ee3b61e4b3d58f3f9f3c4f | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""
Script used for testing checkpointing.
The main reports "nothing" if the checkpoint folder is empty and writes the name given in argument to the checkpoint.
If a checkpoint is present, it reports the content of the checkpointing folder.
"""
import argparse
import logging
import os
from pathlib import Path
from syne_tune.constants import ST_CHECKPOINT_DIR
from syne_tune import Reporter
def load_checkpoint(checkpoint_path: Path):
with open(checkpoint_path, "r") as f:
return f.readline()
def save_checkpoint(checkpoint_path: Path, content: str):
with open(checkpoint_path, "w") as f:
f.write(content)
if __name__ == '__main__':
root = logging.getLogger()
root.setLevel(logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument('--name', type=str, required=True)
# convention the path where to serialize and deserialize is given as checkpoint-dir
parser.add_argument(f'--{ST_CHECKPOINT_DIR}', type=str, default="./")
args, _ = parser.parse_known_args()
checkpoint_path = Path(getattr(args, ST_CHECKPOINT_DIR)) / "checkpoint.txt"
os.makedirs(checkpoint_path.parent, exist_ok=True)
if checkpoint_path.exists():
checkpoint_content = load_checkpoint(checkpoint_path)
else:
checkpoint_content = "nothing"
report = Reporter()
report(checkpoint_content=checkpoint_content)
save_checkpoint(checkpoint_path, args.name)
| 32.451613 | 117 | 0.742545 |
4a22cdb2399285fb677b2b60d900d4d73da9c3f1 | 6,236 | py | Python | my_tt/SocialApp/logics.py | tanproject/tantan | 9d6653f07be0f6d1d8726cce15789e4fae729725 | [
"Apache-2.0"
] | null | null | null | my_tt/SocialApp/logics.py | tanproject/tantan | 9d6653f07be0f6d1d8726cce15789e4fae729725 | [
"Apache-2.0"
] | 1 | 2020-10-05T07:58:24.000Z | 2020-10-05T07:58:24.000Z | my_tt/SocialApp/logics.py | tanproject/tantan | 9d6653f07be0f6d1d8726cce15789e4fae729725 | [
"Apache-2.0"
] | null | null | null | import datetime
from my_tt import config
from UserApp.models import User, Profile
from SocialApp.models import Swiped, Friend
from libs.cache import rds
from common import keys, errors
from django.db.transaction import atomic
from django.db.models import Q
def rcmd_from_list(user_id):
'''从优先推荐列表取出滑动用户'''
uid_list = rds.lrange(keys.PRIOR_RCMD_LIST % user_id, 0, 19)
uid_list = [int(uid) for uid in uid_list] # 将bytes类型强转为int类型
users = User.objects.filter(id__in=uid_list)
return users
def rcmd_from_db(user_id, num=20):
'''从数据库里取出滑动用户'''
user_profile = Profile.objects.get(id=user_id)
today = datetime.date.today()
'''计算最大和最小交友年龄对应的出生日期'''
earliest_birthday = today - \
datetime.timedelta(user_profile.max_dating_age * 365)
latest_birthday = today - \
datetime.timedelta(user_profile.min_dating_age * 365)
'''找出自己所以滑过的用户'''
slide_list = Swiped.objects.filter(user_id=user_id).values_list('sid', flat=True)
# 排除已经滑过的人
users = User.objects.filter(
gender=user_profile.dating_gender,
location=user_profile.dating_location,
birthday__range=[earliest_birthday, latest_birthday]
).exclude(id__in=slide_list)[:num]
# (懒加载)只取前20个,users即便有了切片依然是queryset对象因为这包含多个对象。
return users
def rcmd(user_id):
'''推荐滑动用户'''
first_users = rcmd_from_list(user_id)
lack = 20 - len(first_users) # 计算离20个还差几个
if lack == 0:
return first_users
else:
second_users = rcmd_from_db(user_id, lack)
return set(first_users) | set(second_users)
@atomic
def like_someone(user_id, sid):
'''添加滑动记录'''
Swiped.swpied(user_id, sid, 'like')
'''在自己的优先推荐列表里删除对方id'''
rds.lrem(keys.PRIOR_RCMD_LIST % user_id, value=sid)
'''喜欢对方,给对方的增加分数'''
rds.zincrby(keys.HOT_RANK, sid, config.RANK_SCORE['like'])
'''检查对方是否上滑(superlike)或右滑(like)过你'''
liked = Swiped.is_liked(user_id, sid)
if liked is True:
'''如果对方也喜欢过你,将你们匹配为好友'''
Friend.make_friends(user_id, sid)
return True
else:
return False
@atomic
def superlike_someone(user_id, sid):
'''超级喜欢某人'''
'''添加滑动记录'''
Swiped.swpied(user_id, sid, 'superlike')
'''在自己的优先推荐列表里删除对方id'''
rds.lrem(keys.PRIOR_RCMD_LIST % user_id, value=sid)
'''超级喜欢对方,给对方的增加积分'''
rds.zincrby(keys.HOT_RANK, sid, config.RANK_SCORE['superlike'])
'''检查对方是否上滑(superlike)或右滑(like)过你'''
liked = Swiped.is_liked(user_id, sid)
if liked is True: # 对方喜欢你
'''如果对方也喜欢过你,将你们匹配为好友'''
Friend.make_friends(user_id, sid)
return True
elif liked is False: # 对方不喜欢你
return False
else: # 对方没有滑过你,将你的id给到对方的优先推荐列表
rds.rpush(keys.PRIOR_RCMD_LIST % sid, user_id)
return False
@atomic
def dislike_someone(user_id, sid):
'''添加滑动记录'''
Swiped.objects.create(user_id=user_id, sid=sid, stype='dislike')
'''不喜欢对方,给对方的增加分数'''
rds.zincrby(keys.HOT_RANK, sid, config.RANK_SCORE['dislike'])
'''删除自己Redis缓存的优先推荐列表里,对方的id'''
rds.lrem(keys.PRIOR_RCMD_LIST % user_id, value=sid)
def rewind_last_swiped(user_id):
''' 撤销最后⼀次滑动,每天允许反悔三次,反悔时间距上一次滑动时间不超过5分钟'''
now = datetime.datetime.now()
rewind_key = keys.REWIND_TIMES_K % (now.date(), user_id)
# 1检查今天的反悔次数是否超过三次
rewind_times = rds.get(rewind_key, 0)
if rewind_times > config.REWIND_TIMES:
raise errors.RewindLimit()
# 2找到最后一次滑动
last_swpied = Swiped.objects.filter(user_id=user_id).latest('stime')
# 3检查最后一次滑动时间距离现在是否在5分钟内
past_time = (now - last_swpied.stime).total_seconds()
if past_time > config.REWIND_TIMEOUT:
raise errors.RewindTimeout()
with atomic():
# 4删除好友关系(只要上一次滑动类型是喜欢或超级喜欢,都有可能匹配为好友)
if last_swpied.stype in ['like', 'superlike']:
Friend.remove_relation(user_id, last_swpied.sid)
# 5如果最后一次滑动是超级喜欢删除自己在对方的优先推荐列表
if last_swpied.stype == 'superlike':
rds.lrem(keys.PRIOR_RCMD_LIST % last_swpied.sid, value=user_id)
# 6删除滑动记录
last_swpied.delete()
# 今日反悔次数加一
rds.set(rewind_key, rewind_times + 1, 86500) # 缓存过期时间为一天零100秒
'''撤回最后一次滑动所改变的对方的积分,'''
rds.zincrby(keys.HOT_RANK, last_swpied.sid, -config.RANK_SCORE[last_swpied.stype])
def show_like_me(user_id):
'''找出自己滑过的人'''
sid_list = Swiped.objects.filter(user_id=user_id).values_list('sid', flat=True)
'''找出喜欢或超级喜欢我的人,附加条件:排除那些我划过的人'''
fans_id_list = Swiped.objects.filter(sid=user_id, stype__in=['like', 'superlike']) \
.exclude(user_id__in=sid_list) \
.values_list('user_id', flat=True)
fans = User.objects.filter(id__in=fans_id_list)
return fans
def show_my_friend(uid):
'''查找自己所有好友的ID'''
condition = Q(user_id1=uid) | Q(user_id2=uid)
friends_id_list = []
for friend in Friend.objects.filter(condition):
if friend.user_id1 == uid:
friends_id_list.append(friend.user_id2)
else:
friends_id_list.append(friend.user_id1)
all_friends = User.objects.filter(id__in=friends_id_list)
return all_friends
def get_rank_list(RANK_NUM):
'''从有序集合里取出前10个数据组成一个列表[(b'678', 103.0),(b'43', 100.0),..]'''
data_list = rds.zrevrange(keys.HOT_RANK, 0, RANK_NUM - 1, withscores=True)
cleaned_list = [(int(uid), int(score)) for uid, score in data_list] # 取出列表里的所有UID,score组成强转为int,
uid_list = [uid[0] for uid in cleaned_list] # 所有UID组成列表
rank_users = User.objects.filter(id__in=uid_list) # 这一组用户是以id升序的,需要以zset里的顺序为主
sorted_users = sorted(rank_users, key=lambda user: uid_list.index(user.id))
rank_data = []
for index, (_, score) in enumerate(cleaned_list):
rank = index + 1
user = sorted_users[index]
user_data = user.to_dict(exclude=['phonenum', 'birthday',
'location', 'vip_id', 'vip_end'])
user_data['rank'] = rank
user_data['score'] = score
rank_data.append(user_data)
return rank_data
| 32.14433 | 102 | 0.643682 |
4a22d020e29ccce2f3f43a8b4e9e7485a913e76f | 385 | py | Python | TrackingTools/GsfTracking/python/CkfElectronTrajectoryBuilder_cfi.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 6 | 2017-09-08T14:12:56.000Z | 2022-03-09T23:57:01.000Z | TrackingTools/GsfTracking/python/CkfElectronTrajectoryBuilder_cfi.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 545 | 2017-09-19T17:10:19.000Z | 2022-03-07T16:55:27.000Z | TrackingTools/GsfTracking/python/CkfElectronTrajectoryBuilder_cfi.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 14 | 2017-10-04T09:47:21.000Z | 2019-10-23T18:04:45.000Z | import FWCore.ParameterSet.Config as cms
import copy
from RecoTracker.CkfPattern.CkfTrajectoryBuilder_cfi import *
CkfElectronTrajectoryBuilder = copy.deepcopy(CkfTrajectoryBuilder)
CkfElectronTrajectoryBuilder.propagatorAlong = 'fwdElectronPropagator'
CkfElectronTrajectoryBuilder.propagatorOpposite = 'bwdElectronPropagator'
CkfElectronTrajectoryBuilder.estimator = 'electronChi2'
| 38.5 | 73 | 0.883117 |
4a22d0330d7b3d91f5be859f4639fe68a50cba86 | 338 | py | Python | comments/models.py | prettyirrelevant/instagram-api-clone | 1af6066694a7bed60ab1ddd1141c0832efaccaf2 | [
"MIT"
] | null | null | null | comments/models.py | prettyirrelevant/instagram-api-clone | 1af6066694a7bed60ab1ddd1141c0832efaccaf2 | [
"MIT"
] | null | null | null | comments/models.py | prettyirrelevant/instagram-api-clone | 1af6066694a7bed60ab1ddd1141c0832efaccaf2 | [
"MIT"
] | null | null | null | from django.db import models
from posts.models import Posts
# Create your models here.
class Comments(models.Model):
post = models.ForeignKey(Posts, on_delete=models.CASCADE)
comment = models.CharField(max_length=200)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
| 30.727273 | 61 | 0.775148 |
4a22d07bf027675c17bc826bbf22b39c28c62260 | 2,420 | py | Python | scripts/Msm_Design_Runner.py | trenth12/MSM-Design | b78b0c69f612b8451b0f9d15b99a3b68f817ec4e | [
"MIT"
] | 1 | 2017-10-05T06:50:48.000Z | 2017-10-05T06:50:48.000Z | scripts/Msm_Design_Runner.py | trenth12/MSM-Design | b78b0c69f612b8451b0f9d15b99a3b68f817ec4e | [
"MIT"
] | null | null | null | scripts/Msm_Design_Runner.py | trenth12/MSM-Design | b78b0c69f612b8451b0f9d15b99a3b68f817ec4e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Thu Oct 05 22:09:36 2017
@author: Trent
"""
#import matplotlib
#matplotlib.use('Agg')
from msm_design import msm_model
from msm_design import msm_optimization
#trial = msm_model.SGD_Static(pdbfile='C:/Users/Trent/Desktop/PythonFiles/Project/Package/State0.pdb',
# sequence_energy_file='C:/Users/Trent/Desktop/PythonFiles/Project/ubiquitin_State0_energies_amber_test.txt',
# sequence_alignment_file='C:/Users/Trent/Desktop/PythonFiles/Project/smallalignment.txt',
# wt_seq='MQIFVKTLTGKTITLEVEPSDTIENVKAKIQDKEGIPPDQQRLIFAGKQLEDGRTLSDYNIQKESTLHLVLRLRGG',
# gamma_multiplier=1,regular_linear_regression=True,lasso=False,
# ridge=False,ridge_coef=.0001,lambda_lasso_coef=.0001,pair_select_list=False,pair_dist=3)
##
#trial.static_model(model_output_text_file=False)
#trial = SGD_Online(pdbfile='/home/[email protected]/ubiquitin_structures/State76.pdb',sequence_alignment_file='smallalignment.txt',
# wt_seq='MQIFVKTLTGKTITLEVEPSDTIENVKAKIQDKEGIPPDQQRLIFAGKQLEDGRTLSDYNIQKESTLHLVLRLRGG',
# pair_select_list=True,pair_dist=2,output_energy_file=True)
#print(score_sequence_rosetta(pdbfile='/home/[email protected]/ubiquitin_structures/State6.pdb',newseq='MQIFVKTLTGKTITLEVEPSDTIENVKAKIQDKEGIPPDQQRLIFAGKQLEDGRTLSDYNIQKESTLHLVLRLRGG'))
#print(score_sequence_rosetta(pdbfile=trial.pdbfile,newseq=trial.wt_seq))
#print(score_sequence_amber(pdbfile='/home/[email protected]/ubiquitin_structures/State6.pdb',newseq='MQIFVKTLTGKTITLEVEPSDTIENVKAKIQDKEGIPPDQQRLIFAGKQLEDGRTLSDYNIQKESTLHLVLRLRGG'))
#print(score_sequence_amber(pdbfile=trial.pdbfile,newseq=trial.wt_seq))
#trial.online_model(num_mutations=20,energy_scoring_function='rosetta',max_computation_time=252000,max_training_sets=200,mad_cutoff=2)
#
opt=msm_optimization.Sequence_Optimization(model_file_dir='C:/Users/Trent/Desktop/PythonFiles/Project/w_amber_nopair//',
wt_seq='MQIFVKTLTGKTITLEVEPSDTIENVKAKIQDKEGIPPDQQRLIFAGKQLEDGRTLSDYNIQKESTLHLVLRLRGG',
sequence_alignment_file='C:/Users/Trent/Desktop/PythonFiles/Project/smallalignment.txt',reduce_alignment=1)
So = opt.two_state_design([[7,34],[5,19]],plot_separate=True,num_mutation_pathway_passes=100,
write_opt_file_name='two_state_output.txt',
pdf_output_plot='two_state_plot_output.pdf') | 57.619048 | 187 | 0.788017 |
4a22d20eecb6e710bfcd5e9ce74343876b11714b | 915 | py | Python | conftest.py | pavlovprojects/python_qa_remotes | 7a4401f7e962597c242084467c0cb77e319debf4 | [
"MIT"
] | null | null | null | conftest.py | pavlovprojects/python_qa_remotes | 7a4401f7e962597c242084467c0cb77e319debf4 | [
"MIT"
] | null | null | null | conftest.py | pavlovprojects/python_qa_remotes | 7a4401f7e962597c242084467c0cb77e319debf4 | [
"MIT"
] | null | null | null | import pytest
from selenium import webdriver
def pytest_addoption(parser):
parser.addoption("--browser", action="store", default="opera",
choices=["chrome", "firefox", "opera", "yandex"])
parser.addoption("--executor", action="store", default="192.168.8.112")
@pytest.fixture
def firefox(request):
wd = webdriver.Firefox()
request.addfinalizer(wd.quit)
return wd
@pytest.fixture
def chrome(request):
wd = webdriver.Chrome()
request.addfinalizer(wd.quit)
return wd
@pytest.fixture
def remote(request):
browser = request.config.getoption("--browser")
executor = request.config.getoption("--executor")
wd = webdriver.Remote(command_executor=f"http://{executor}:4444/wd/hub",
desired_capabilities={"browserName": browser}) # "platform": "linux"
wd.maximize_window()
request.addfinalizer(wd.quit)
return wd
| 26.142857 | 95 | 0.666667 |
4a22d41c05558f350913d6a3e57ad5148422e5e4 | 6,072 | py | Python | tests/sparse_product/test_clustered_sparse_product_cpu_v2.py | SamuelCahyawijaya/fast-transformers | 6ae8ed4cc50bd037968db4f5062e4d328aae73fe | [
"MIT"
] | 1,171 | 2020-06-30T01:57:19.000Z | 2022-03-31T15:11:25.000Z | tests/sparse_product/test_clustered_sparse_product_cpu_v2.py | SamuelCahyawijaya/fast-transformers | 6ae8ed4cc50bd037968db4f5062e4d328aae73fe | [
"MIT"
] | 105 | 2020-06-30T14:40:56.000Z | 2022-02-08T16:31:45.000Z | tests/sparse_product/test_clustered_sparse_product_cpu_v2.py | SamuelCahyawijaya/fast-transformers | 6ae8ed4cc50bd037968db4f5062e4d328aae73fe | [
"MIT"
] | 127 | 2020-06-26T09:07:48.000Z | 2022-03-25T06:46:37.000Z | #
# Copyright (c) 2020 Idiap Research Institute, http://www.idiap.ch/
# Written by Angelos Katharopoulos <[email protected]>,
# Apoorv Vyas <[email protected]>
#
import os
import time
import unittest
import numpy as np
import torch
from torch.nn.init import normal_
from fast_transformers.aggregate import aggregate, broadcast, \
clustered_aggregate
from fast_transformers.hashing import compute_hashes
from fast_transformers.clustering.hamming import cluster
from fast_transformers.sparse_product import sparse_dot_product
from fast_transformers.sparse_product import clustered_sparse_dot_product
from fast_transformers.masking import LengthMask
def cluster_queries(Q, query_lengths, C, I, B):
N, H, L, E = Q.shape
planes = Q.new_empty((B, E+1))
normal_(planes)
planes[:, -1] = 0
hashes = compute_hashes(Q.view(N*H*L, E), planes).view(N, H, L)
# Cluster the hashes and return the cluster index per query
groups, counts = cluster(
hashes,
query_lengths,
clusters=C,
iterations=I,
bits=B
)
return groups, counts
def sparse_product(Q, K, groups, topk, counts, lengths, k, Q_grouped_orig):
N, H, L, E = Q.shape
sorted_g, sorted_gi = torch.sort(groups.view(N*H, -1), dim=-1)
sorted_rev_gi = torch.argsort(sorted_gi, dim=-1)
q_offset = torch.arange(N*H, device=Q.device).unsqueeze(-1) * L
q_flat = (sorted_gi + q_offset).reshape(-1)
# rearrage queries
s_queries = Q.reshape(-1, E).index_select(0, q_flat).view(N, H, L, E)
Q_grouped = clustered_aggregate(
s_queries, sorted_g.view(N, H, L), 1/counts.float(), lengths
)
topk = topk.contiguous()
products_sorted = clustered_sparse_dot_product(
s_queries, K, topk, sorted_g.view(N, H, L), counts, lengths
)
q_rev_flat = (sorted_rev_gi + q_offset).reshape(-1)
products = products_sorted.reshape(-1, k).index_select(0, q_rev_flat)
products = products.view(N, H, L, k)
return products, Q_grouped
class TestSparseProductCUDA(unittest.TestCase):
@property
def device(self):
return "cpu"
def test_simple_product(self):
N = 2
H = 2
L = 100
E = 32
S = 50
k = 32
C = 5
I = 5
B = 16
for i in range(20):
k = np.random.randint(10, S)
E = np.random.randint(10, 129)
k = 32
E = 32
if os.getenv("VERBOSE_TESTS", ""):
print(("Testing: N H L S E C k: "
"{} {} {} {} {} {} {}").format(N, H, L, S, E, C, k))
Q = torch.randn(N, H, L, E).to(self.device)
K = torch.randn(N, H, S, E).to(self.device)
lengths = torch.full((N,), L, dtype=torch.int32).to(self.device)
lengths[1] = 50
lengths[1] = 45
lengths[1] = 10
groups, counts = cluster_queries(Q, lengths, C, I, B)
Q_grouped = aggregate(Q, groups, 1/counts.float())
QK = torch.einsum("nhle,nhse->nhls", Q_grouped, K)
_, topk = torch.topk(QK, k, sorted=False, dim=-1)
topk = topk.contiguous()
products, Q_grouped_alt = sparse_product(
Q, K, groups, topk, counts, lengths, k, Q_grouped
)
topk_broadcast = broadcast(
topk.float(),
groups,
torch.ones_like(counts, dtype=torch.float32),
torch.zeros((N, H, L, k), device=Q.device)
)
all_products = torch.einsum("nhle,nhse->nhls", Q, K)
products_2 = all_products[
torch.arange(N).view(N, 1, 1, 1),
torch.arange(H).view(1, H, 1, 1),
torch.arange(L).view(1, 1, L, 1),
topk_broadcast.long()
]
for i in range(N):
p_1 = products[i, :, :lengths[i], :]
p_2 = products_2[i, :, :lengths[i], :]
self.assertLess(
torch.max(torch.abs(
p_2 - p_1
)
),
1e-4
)
def test_difficult_product(self):
N = 12
H = 5
I = 5
B = 16
for exp in range(30):
C = np.random.randint(10, 500)
L = np.random.randint(C, 2000)
E = np.random.randint(10, 128)
S = np.random.randint(100, 1000)
k = np.random.randint(10, 64)
if os.getenv("VERBOSE_TESTS", ""):
print(("Testing: N H L S E C k: "
"{} {} {} {} {} {} {}").format(N, H, L, S, E, C, k))
Q = torch.randn(N, H, L, E).to(self.device)
K = torch.randn(N, H, S, E).to(self.device)
lengths = torch.full((N,), L, dtype=torch.int32).to(self.device)
groups, counts = cluster_queries(Q, lengths, C, I, B)
Q_grouped = aggregate(Q, groups, 1/counts.float())
QK = torch.einsum("nhle,nhse->nhls", Q_grouped, K)
_, topk = torch.topk(QK, k, dim=-1)
topk = topk.contiguous()
products, _ = sparse_product(
Q, K, groups, topk, counts, lengths, k, Q_grouped
)
topk_broadcast = broadcast(
topk.float(),
groups,
torch.ones_like(counts, dtype=torch.float32),
torch.zeros((N, H, L, k), device=Q.device)
)
all_products = torch.einsum("nhle,nhse->nhls", Q, K)
products_2 = all_products[
torch.arange(N).view(N, 1, 1, 1),
torch.arange(H).view(1, H, 1, 1),
torch.arange(L).view(1, 1, L, 1),
topk_broadcast.long()
]
self.assertLess(
torch.max(torch.abs(
products_2 - products
)
),
1e-4
)
if __name__ == "__main__":
unittest.main()
| 32.470588 | 76 | 0.524045 |
4a22d5d341309b7eb5ca8fd1df31c2d08e7d7adf | 47,334 | py | Python | registration/tests/models.py | vtemian/django-registration | 1fc1413cce2ef65b77daa5096ea5ca546786586b | [
"BSD-3-Clause"
] | null | null | null | registration/tests/models.py | vtemian/django-registration | 1fc1413cce2ef65b77daa5096ea5ca546786586b | [
"BSD-3-Clause"
] | null | null | null | registration/tests/models.py | vtemian/django-registration | 1fc1413cce2ef65b77daa5096ea5ca546786586b | [
"BSD-3-Clause"
] | null | null | null | import datetime
import hashlib
import random
import re
import string
import warnings
from copy import copy
from datetime import timedelta
import django
from django.apps import apps
from django.conf import settings
from django.core import mail
from django.core import management
from django.core.exceptions import ImproperlyConfigured
from django.test import TransactionTestCase
from django.test import override_settings
from django.utils.crypto import get_random_string
from django.utils.timezone import now as datetime_now
from registration.models import RegistrationProfile
from registration.models import SupervisedRegistrationProfile
from registration.users import UserModel
if django.VERSION[0] < 3:
from django.utils.six import text_type, b as six_b
else:
text_type = str
def six_b(s):
return s.encode("latin-1")
Site = apps.get_model('sites', 'Site')
@override_settings(ACCOUNT_ACTIVATION_DAYS=7,
REGISTRATION_DEFAULT_FROM_EMAIL='[email protected]',
REGISTRATION_EMAIL_HTML=True,
DEFAULT_FROM_EMAIL='[email protected]')
class RegistrationModelTests(TransactionTestCase):
"""
Test the model and manager used in the default backend.
"""
user_info = {'username': 'alice',
'password': 'swordfish',
'email': '[email protected]'}
registration_profile = RegistrationProfile
def setUp(self):
warnings.simplefilter('always', UserWarning)
def test_profile_creation(self):
"""
Creating a registration profile for a user populates the
profile with the correct user and a SHA256 hash to use as
activation key.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
self.assertEqual(self.registration_profile.objects.count(), 1)
self.assertEqual(profile.user.id, new_user.id)
self.assertTrue(re.match('^[a-f0-9]{40,64}$', profile.activation_key))
self.assertEqual(text_type(profile),
"Registration information for alice")
def test_activation_email(self):
"""
``RegistrationProfile.send_activation_email`` sends an
email.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.send_activation_email(Site.objects.get_current())
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [self.user_info['email']])
@override_settings(ACTIVATION_EMAIL_HTML='does-not-exist')
def test_activation_email_missing_template(self):
"""
``RegistrationProfile.send_activation_email`` sends an
email.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.send_activation_email(Site.objects.get_current())
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [self.user_info['email']])
def test_activation_email_uses_registration_default_from_email(self):
"""
``RegistrationProfile.send_activation_email`` sends an
email.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.send_activation_email(Site.objects.get_current())
self.assertEqual(mail.outbox[0].from_email, '[email protected]')
@override_settings(REGISTRATION_DEFAULT_FROM_EMAIL=None)
def test_activation_email_falls_back_to_django_default_from_email(self):
"""
``RegistrationProfile.send_activation_email`` sends an
email.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.send_activation_email(Site.objects.get_current())
self.assertEqual(mail.outbox[0].from_email, '[email protected]')
@override_settings(REGISTRATION_USE_SITE_EMAIL=True,
REGISTRATION_SITE_USER_EMAIL='admin')
def test_activation_email_uses_site_address(self):
"""
``RegistrationProfile.send_activation_email`` sends an
email with the ``from`` address configured by the site.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
site = Site.objects.get_current()
profile.send_activation_email(site)
from_email = 'admin@{}'.format(site.domain)
self.assertEqual(mail.outbox[0].from_email, from_email)
@override_settings(REGISTRATION_USE_SITE_EMAIL=True)
def test_activation_email_uses_site_address_improperly_configured(self):
"""
``RegistrationProfile.send_activation_email`` won't send an email if
improperly configured.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
with self.assertRaises(ImproperlyConfigured):
profile.send_activation_email(Site.objects.get_current())
def test_activation_email_is_html_by_default(self):
"""
``RegistrationProfile.send_activation_email`` sends an html
email by default.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.send_activation_email(Site.objects.get_current())
self.assertEqual(len(mail.outbox[0].alternatives), 1)
@override_settings(REGISTRATION_EMAIL_HTML=False)
def test_activation_email_is_plain_text_if_html_disabled(self):
"""
``RegistrationProfile.send_activation_email`` sends a plain
text email if settings.REGISTRATION_EMAIL_HTML is False.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.send_activation_email(Site.objects.get_current())
self.assertEqual(len(mail.outbox[0].alternatives), 0)
def test_user_creation(self):
"""
Creating a new user populates the correct data, and sets the
user's account inactive.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
self.assertEqual(new_user.get_username(), 'alice')
self.assertEqual(new_user.email, '[email protected]')
self.assertTrue(new_user.check_password('swordfish'))
self.assertFalse(new_user.is_active)
expiration_date = datetime_now() - timedelta(
settings.ACCOUNT_ACTIVATION_DAYS
)
self.assertGreater(new_user.date_joined, expiration_date)
def test_user_creation_email(self):
"""
By default, creating a new user sends an activation email.
"""
self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
self.assertEqual(len(mail.outbox), 1)
def test_user_creation_no_email(self):
"""
Passing ``send_email=False`` when creating a new user will not
send an activation email.
"""
self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(),
send_email=False, **self.user_info)
self.assertEqual(len(mail.outbox), 0)
def test_user_creation_old_date_joined(self):
"""
If ``user.date_joined`` is well in the past, ensure that we reset it.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
self.assertEqual(new_user.get_username(), 'alice')
self.assertEqual(new_user.email, '[email protected]')
self.assertTrue(new_user.check_password('swordfish'))
self.assertFalse(new_user.is_active)
expiry_date = datetime_now() - timedelta(settings.ACCOUNT_ACTIVATION_DAYS)
self.assertGreater(new_user.date_joined, expiry_date)
def test_unexpired_account_old_date_joined(self):
"""
``RegistrationProfile.activation_key_expired()`` is ``False`` within
the activation window. Even if the user was created in the past.
"""
self.user_info['date_joined'] = datetime_now(
) - timedelta(settings.ACCOUNT_ACTIVATION_DAYS + 1)
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
self.assertFalse(profile.activation_key_expired())
def test_unexpired_account(self):
"""
``RegistrationProfile.activation_key_expired()`` is ``False``
within the activation window.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
self.assertFalse(profile.activation_key_expired())
def test_active_account_activation_key_expired(self):
"""
``RegistrationProfile.activation_key_expired()`` is ``True``
when the account is already active.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
profile.refresh_from_db()
self.assertTrue(profile.activation_key_expired())
def test_active_account_and_expired_accountactivation_key_expired(self):
"""
``RegistrationProfile.activation_key_expired()`` is ``True``
when the account is already active and the activation window has passed.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
new_user.date_joined -= datetime.timedelta(
days=settings.ACCOUNT_ACTIVATION_DAYS + 1)
new_user.save()
profile = self.registration_profile.objects.get(user=new_user)
self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
profile.refresh_from_db()
self.assertTrue(profile.activation_key_expired())
def test_expired_account(self):
"""
``RegistrationProfile.activation_key_expired()`` is ``True``
outside the activation window.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
new_user.date_joined -= datetime.timedelta(
days=settings.ACCOUNT_ACTIVATION_DAYS + 1)
new_user.save()
profile = self.registration_profile.objects.get(user=new_user)
self.assertTrue(profile.activation_key_expired())
def test_valid_activation(self):
"""
Activating a user within the permitted window makes the
account active, and resets the activation key.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
user, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertIsInstance(user, UserModel())
self.assertEqual(user.id, new_user.id)
self.assertTrue(user.is_active)
self.assertTrue(activated)
profile = self.registration_profile.objects.get(user=new_user)
self.assertTrue(profile.activated)
def test_valid_activation_with_profile(self):
"""
Activating a user within the permitted window makes the
account active, and resets the activation key.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
profile, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current(), get_profile=True)
self.assertIsInstance(profile, self.registration_profile)
self.assertEqual(profile.id, profile.id)
self.assertTrue(profile.activated)
self.assertTrue(activated)
new_user.refresh_from_db()
self.assertTrue(profile.user.id, new_user.id)
self.assertTrue(new_user.is_active)
def test_expired_activation(self):
"""
Attempting to activate outside the permitted window does not
activate the account.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
new_user.date_joined -= datetime.timedelta(
days=settings.ACCOUNT_ACTIVATION_DAYS + 1)
new_user.save()
profile = self.registration_profile.objects.get(user=new_user)
user, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertIs(user, False)
self.assertFalse(activated)
new_user = UserModel().objects.get(username='alice')
self.assertFalse(new_user.is_active)
profile = self.registration_profile.objects.get(user=new_user)
self.assertFalse(profile.activated)
def test_activation_invalid_key(self):
"""
Attempting to activate with a key which is not a SHA256 hash
fails.
"""
user, activated = self.registration_profile.objects.activate_user(
'foo', Site.objects.get_current())
self.assertIs(user, False)
self.assertFalse(activated)
def test_activation_already_activated(self):
"""
Attempting to re-activate an already-activated account fails.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
profile = self.registration_profile.objects.get(user=new_user)
user, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertEqual(user, new_user)
self.assertFalse(activated)
def test_activation_deactivated(self):
"""
Attempting to re-activate a deactivated account fails.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
# Deactivate the new user.
new_user.is_active = False
new_user.save()
# Try to activate again and ensure False is returned.
user, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertFalse(activated)
def test_activation_nonexistent_key(self):
"""
Attempting to activate with a non-existent key (i.e., one not
associated with any account) fails.
"""
# Due to the way activation keys are constructed during
# registration, this will never be a valid key.
invalid_key = hashlib.sha256(six_b('foo')).hexdigest()
_, activated = self.registration_profile.objects.activate_user(
invalid_key, Site.objects.get_current())
self.assertFalse(activated)
def test_expired_user_deletion_activation_window(self):
"""
``RegistrationProfile.objects.delete_expired_users()`` only
deletes inactive users whose activation window has expired.
"""
self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
expired_user = (self.registration_profile.objects
.create_inactive_user(
site=Site.objects.get_current(),
username='bob',
password='secret',
email='[email protected]'))
expired_user.date_joined -= datetime.timedelta(
days=settings.ACCOUNT_ACTIVATION_DAYS + 1)
expired_user.save()
self.registration_profile.objects.delete_expired_users()
self.assertEqual(self.registration_profile.objects.count(), 1)
self.assertRaises(UserModel().DoesNotExist,
UserModel().objects.get, username='bob')
def test_expired_user_deletion_ignore_activated(self):
"""
``RegistrationProfile.objects.delete_expired_users()`` only
deletes inactive users whose activation window has expired and if
their profile is not activated.
"""
user = (self.registration_profile.objects
.create_inactive_user(
site=Site.objects.get_current(),
username='bob',
password='secret',
email='[email protected]'))
profile = self.registration_profile.objects.get(user=user)
_, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertTrue(activated)
# Expire the activation window.
user.date_joined -= datetime.timedelta(
days=settings.ACCOUNT_ACTIVATION_DAYS + 1)
user.save()
self.registration_profile.objects.delete_expired_users()
self.assertEqual(self.registration_profile.objects.count(), 1)
self.assertEqual(UserModel().objects.get(username='bob'), user)
def test_expired_user_deletion_missing_user(self):
"""
``RegistrationProfile.objects.delete_expired_users()`` only deletes
inactive users whose activation window has expired. If a ``UserModel``
is not present, the delete continues gracefully.
"""
self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
expired_user = (self.registration_profile.objects
.create_inactive_user(
site=Site.objects.get_current(),
username='bob',
password='secret',
email='[email protected]'))
expired_user.date_joined -= datetime.timedelta(
days=settings.ACCOUNT_ACTIVATION_DAYS + 1)
expired_user.save()
# Ensure that we cleanup the expired profile even if the user does not
# exist
expired_user.delete()
self.registration_profile.objects.delete_expired_users()
self.assertEqual(self.registration_profile.objects.count(), 1)
self.assertRaises(UserModel().DoesNotExist,
UserModel().objects.get, username='bob')
def test_manually_registered_account(self):
"""
Test if a user failed to go through the registration flow but was
manually marked ``is_active`` in the DB. Although the profile is
expired and not active, we should never delete active users.
"""
active_user = (self.registration_profile.objects
.create_inactive_user(
site=Site.objects.get_current(),
username='bob',
password='secret',
email='[email protected]'))
active_user.date_joined -= datetime.timedelta(
days=settings.ACCOUNT_ACTIVATION_DAYS + 1)
active_user.is_active = True
active_user.save()
self.registration_profile.objects.delete_expired_users()
self.assertEqual(self.registration_profile.objects.count(), 1)
self.assertEqual(UserModel().objects.get(username='bob'), active_user)
def test_management_command(self):
"""
The ``cleanupregistration`` management command properly
deletes expired accounts.
"""
self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
expired_user = (self.registration_profile.objects
.create_inactive_user(site=Site.objects.get_current(),
username='bob',
password='secret',
email='[email protected]'))
expired_user.date_joined -= datetime.timedelta(
days=settings.ACCOUNT_ACTIVATION_DAYS + 1)
expired_user.save()
management.call_command('cleanupregistration')
self.assertEqual(self.registration_profile.objects.count(), 1)
self.assertRaises(UserModel().DoesNotExist,
UserModel().objects.get, username='bob')
def test_resend_activation_email(self):
"""
Test resending activation email to an existing user
"""
user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), send_email=False, **self.user_info)
self.assertEqual(len(mail.outbox), 0)
profile = self.registration_profile.objects.get(user=user)
orig_activation_key = profile.activation_key
self.assertTrue(self.registration_profile.objects.resend_activation_mail(
email=self.user_info['email'],
site=Site.objects.get_current(),
))
profile = self.registration_profile.objects.get(pk=profile.pk)
new_activation_key = profile.activation_key
self.assertNotEqual(orig_activation_key, new_activation_key)
self.assertEqual(len(mail.outbox), 1)
def test_resend_activation_email_nonexistent_user(self):
"""
Test resending activation email to a nonexisting user
"""
self.assertFalse(self.registration_profile.objects.resend_activation_mail(
email=self.user_info['email'],
site=Site.objects.get_current(),
))
self.assertEqual(len(mail.outbox), 0)
def test_resend_activation_email_activated_user(self):
"""
Test the scenario where user tries to resend activation code
to the already activated user's email
"""
user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), send_email=False, **self.user_info)
profile = self.registration_profile.objects.get(user=user)
user, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertTrue(user.is_active)
self.assertTrue(activated)
self.assertFalse(self.registration_profile.objects.resend_activation_mail(
email=self.user_info['email'],
site=Site.objects.get_current(),
))
self.assertEqual(len(mail.outbox), 0)
def test_resend_activation_email_expired_user(self):
"""
Test the scenario where user tries to resend activation code
to the expired user's email
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), send_email=False, **self.user_info)
new_user.date_joined -= datetime.timedelta(
days=settings.ACCOUNT_ACTIVATION_DAYS + 1)
new_user.save()
profile = self.registration_profile.objects.get(user=new_user)
self.assertTrue(profile.activation_key_expired())
self.assertFalse(self.registration_profile.objects.resend_activation_mail(
email=self.user_info['email'],
site=Site.objects.get_current(),
))
self.assertEqual(len(mail.outbox), 0)
def test_resend_activation_email_nonunique_email(self):
"""
Test the scenario where user tries to resend activation code
to the expired user's email
"""
user1 = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), send_email=False, **self.user_info)
user2_info = copy(self.user_info)
user2_info['username'] = 'bob'
user2 = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), send_email=False, **user2_info)
self.assertEqual(user1.email, user2.email)
self.assertFalse(self.registration_profile.objects.resend_activation_mail(
email=self.user_info['email'],
site=Site.objects.get_current(),
))
self.assertEqual(len(mail.outbox), 0)
def test_activation_key_backwards_compatibility(self):
"""
Make sure that users created with the old create_new_activation_key
method can still be activated.
"""
current_method = self.registration_profile.create_new_activation_key
def old_method(self, save=True):
salt = hashlib.sha1(
text_type(random.random()).encode('ascii')
).hexdigest()[:5]
salt = salt.encode('ascii')
user_pk = str(self.user.pk)
if isinstance(user_pk, text_type):
user_pk = user_pk.encode('utf-8')
self.activation_key = hashlib.sha1(salt + user_pk).hexdigest()
if save:
self.save()
return self.activation_key
self.registration_profile.create_new_activation_key = old_method
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
self.registration_profile.create_new_activation_key = current_method
user, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertIsInstance(user, UserModel())
self.assertEqual(user.id, new_user.id)
self.assertTrue(user.is_active)
self.assertTrue(activated)
profile = self.registration_profile.objects.get(user=new_user)
self.assertTrue(profile.activated)
def test_activation_key_backwards_compatibility_sha1(self):
"""
Make sure that users created with the old create_new_activation_key
method can still be activated.
"""
current_method = self.registration_profile.create_new_activation_key
def old_method(self, save=True):
random_string = get_random_string(length=32, allowed_chars=string.printable)
self.activation_key = hashlib.sha1(random_string.encode('utf-8')).hexdigest()
if save:
self.save()
return self.activation_key
self.registration_profile.create_new_activation_key = old_method
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
self.registration_profile.create_new_activation_key = current_method
user, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertIsInstance(user, UserModel())
self.assertEqual(user.id, new_user.id)
self.assertTrue(user.is_active)
self.assertTrue(activated)
profile = self.registration_profile.objects.get(user=new_user)
self.assertTrue(profile.activated)
@override_settings(
ADMINS=(
('T-Rex', '[email protected]'),
('Flea', '[email protected]')
)
)
class SupervisedRegistrationModelTests(RegistrationModelTests):
"""
Test the model and manager used in the admin_approval backend.
"""
user_info = {'username': 'alice',
'password': 'swordfish',
'email': '[email protected]'}
registration_profile = SupervisedRegistrationProfile
def test_valid_activation(self):
"""
Activating a user within the permitted window makes the
account active, and resets the activation key.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
user, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertIsInstance(user, UserModel())
self.assertEqual(user.id, new_user.id)
self.assertFalse(user.is_active)
self.assertTrue(activated)
profile = self.registration_profile.objects.get(user=new_user)
self.assertTrue(profile.activated)
def test_valid_activation_with_profile(self):
"""
Activating a user within the permitted window makes the
account active, and resets the activation key.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
profile, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current(), get_profile=True)
self.assertIsInstance(profile, self.registration_profile)
self.assertEqual(profile.id, profile.id)
self.assertTrue(profile.activated)
self.assertTrue(activated)
new_user.refresh_from_db()
self.assertTrue(profile.user.id, new_user.id)
self.assertFalse(new_user.is_active)
def test_resend_activation_email_activated_user(self):
"""
Test the scenario where user tries to resend activation code
to the already activated user's email
"""
user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), send_email=False, **self.user_info)
profile = self.registration_profile.objects.get(user=user)
user, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertFalse(user.is_active)
self.assertTrue(activated)
self.assertFalse(self.registration_profile.objects.resend_activation_mail(
email=self.user_info['email'],
site=Site.objects.get_current(),
))
# Outbox has one mail, admin approve mail
self.assertEqual(len(mail.outbox), 1)
admins_emails = [value[1] for value in settings.REGISTRATION_ADMINS]
for email in mail.outbox[0].to:
self.assertIn(email, admins_emails)
def test_admin_approval_email(self):
"""
``SupervisedRegistrationManager.send_admin_approve_email`` sends an
email to the site administrators
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.activated = True
self.registration_profile.objects.send_admin_approve_email(
new_user, Site.objects.get_current())
self.assertEqual(len(mail.outbox), 1)
admins_emails = [value[1] for value in settings.REGISTRATION_ADMINS]
for email in mail.outbox[0].to:
self.assertIn(email, admins_emails)
def test_admin_approval_email_uses_registration_default_from_email(self):
"""
``SupervisedRegistrationManager.send_admin_approve_email``` sends an
email.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.activated = True
self.registration_profile.objects.send_admin_approve_email(
new_user, Site.objects.get_current())
self.assertEqual(mail.outbox[0].from_email, '[email protected]')
@override_settings(REGISTRATION_DEFAULT_FROM_EMAIL=None)
def test_admin_approval_email_falls_back_to_django_default_from_email(self):
"""
``SupervisedRegistrationManager.send_admin_approve_email`` sends an
email.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.activated = True
self.registration_profile.objects.send_admin_approve_email(
new_user, Site.objects.get_current())
self.assertEqual(mail.outbox[0].from_email, '[email protected]')
def test_admin_approval_email_is_html_by_default(self):
"""
``SupervisedRegistrationProfile.send_activation_email`` sends an html
email by default.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.activated = True
self.registration_profile.objects.send_admin_approve_email(
new_user, Site.objects.get_current())
self.assertEqual(len(mail.outbox[0].alternatives), 1)
@override_settings(REGISTRATION_EMAIL_HTML=False)
def test_admin_approval_email_is_plain_text_if_html_disabled(self):
"""
``SupervisedRegistrationProfile.send_activation_email`` sends a plain
text email if settings.REGISTRATION_EMAIL_HTML is False.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.activated = True
self.registration_profile.objects.send_admin_approve_email(
new_user, Site.objects.get_current())
self.assertEqual(len(mail.outbox[0].alternatives), 0)
def test_active_account_activation_key_expired(self):
"""
``SupervisedRegistrationProfile.activation_key_expired()`` is ``True``
when the account is already active.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.registration_profile.objects.admin_approve_user(
profile.id, Site.objects.get_current())
profile.refresh_from_db()
self.assertTrue(profile.activation_key_expired())
def test_active_account_and_expired_accountactivation_key_expired(self):
"""
``SupervisedRegistrationProfile.activation_key_expired()`` is ``True``
when the account is already active and the activation window has passed.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
new_user.date_joined -= datetime.timedelta(
days=settings.ACCOUNT_ACTIVATION_DAYS + 1)
new_user.save()
profile = self.registration_profile.objects.get(user=new_user)
self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.registration_profile.objects.admin_approve_user(
profile.id, Site.objects.get_current())
profile.refresh_from_db()
self.assertTrue(profile.activation_key_expired())
def test_admin_approval_complete_email(self):
"""
``SupervisedRegistrationManager.send_admin_approve_complete_email``
sends an email to the approved user
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.send_admin_approve_complete_email(Site.objects.get_current())
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [self.user_info['email']])
def test_admin_approval_complete_email_uses_registration_default_from_email(self):
"""
``SupervisedRegistrationManager.send_admin_approve_complete_email``
sends an email
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.send_admin_approve_complete_email(Site.objects.get_current())
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].from_email, '[email protected]')
@override_settings(REGISTRATION_DEFAULT_FROM_EMAIL=None)
def test_admin_approval_complete_email_falls_back_to_django_default_from_email(self):
"""
``SupervisedRegistrationManager.send_admin_approve_complete_email``
sends an email
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.send_admin_approve_complete_email(Site.objects.get_current())
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].from_email, '[email protected]')
def test_admin_approval_complete_email_is_html_by_default(self):
"""
``SupervisedRegistrationProfile.send_admin_approve_complete_email``
sends an html email by default.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.send_admin_approve_complete_email(Site.objects.get_current())
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(len(mail.outbox[0].alternatives), 1)
@override_settings(REGISTRATION_EMAIL_HTML=False)
def test_admin_approval_complete_email_is_plain_text_if_html_disabled(self):
"""
``SupervisedRegistrationProfile.send_admin_approve_complete_email``
sends a plain text email if settings.REGISTRATION_EMAIL_HTML is False.
"""
new_user = UserModel().objects.create_user(**self.user_info)
profile = self.registration_profile.objects.create_profile(new_user)
profile.send_admin_approve_complete_email(Site.objects.get_current())
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(len(mail.outbox[0].alternatives), 0)
def test_valid_admin_approval(self):
"""
Approving an already activated user's account makes the user
active
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
user, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertIsInstance(user, UserModel())
user = self.registration_profile.objects.admin_approve_user(
profile.id, Site.objects.get_current())
self.assertIsInstance(user, UserModel())
self.assertIs(user.is_active, True)
def test_admin_approval_not_activated(self):
"""
Approving a non activated user's account fails
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
user = self.registration_profile.objects.admin_approve_user(
profile.id, Site.objects.get_current())
self.assertIs(user, False)
self.assertIs(profile.user.is_active, False)
def test_admin_approval_already_approved(self):
"""
Approving an already approved user's account returns the User model
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
user, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertIsInstance(user, UserModel())
self.assertTrue(activated)
user = self.registration_profile.objects.admin_approve_user(
profile.id, Site.objects.get_current())
self.assertIsInstance(user, UserModel())
self.assertIs(user.is_active, True)
def test_admin_approval_nonexistent_id(self):
"""
Approving a non existent user profile does nothing and returns False
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
user = self.registration_profile.objects.admin_approve_user(
profile.id, Site.objects.get_current())
self.assertIs(user, False)
def test_activation_already_activated(self):
"""
Attempting to re-activate an already-activated account fails.
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
profile = self.registration_profile.objects.get(user=new_user)
_, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertFalse(activated)
def test_activation_key_backwards_compatibility(self):
"""
Make sure that users created with the old create_new_activation_key
method can still be activated.
"""
current_method = self.registration_profile.create_new_activation_key
def old_method(self, save=True):
salt = hashlib.sha1(
text_type(random.random()).encode('ascii')
).hexdigest()[:5]
salt = salt.encode('ascii')
user_pk = str(self.user.pk)
if isinstance(user_pk, text_type):
user_pk = user_pk.encode('utf-8')
self.activation_key = hashlib.sha1(salt + user_pk).hexdigest()
if save:
self.save()
return self.activation_key
self.registration_profile.create_new_activation_key = old_method
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
self.registration_profile.create_new_activation_key = current_method
user, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertIsInstance(user, UserModel())
self.assertEqual(user.id, new_user.id)
self.assertFalse(user.is_active)
self.assertTrue(activated)
profile = self.registration_profile.objects.get(user=new_user)
self.assertTrue(profile.activated)
def test_activation_key_backwards_compatibility_sha1(self):
"""
Make sure that users created with the old create_new_activation_key
method can still be activated.
"""
current_method = self.registration_profile.create_new_activation_key
def old_method(self, save=True):
random_string = get_random_string(length=32, allowed_chars=string.printable)
self.activation_key = hashlib.sha1(random_string.encode('utf-8')).hexdigest()
if save:
self.save()
return self.activation_key
self.registration_profile.create_new_activation_key = old_method
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
profile = self.registration_profile.objects.get(user=new_user)
self.registration_profile.create_new_activation_key = current_method
user, activated = self.registration_profile.objects.activate_user(
profile.activation_key, Site.objects.get_current())
self.assertIsInstance(user, UserModel())
self.assertEqual(user.id, new_user.id)
self.assertFalse(user.is_active)
self.assertTrue(activated)
profile = self.registration_profile.objects.get(user=new_user)
self.assertTrue(profile.activated)
@override_settings(ADMINS=(), REGISTRATION_ADMINS=())
def test_no_admins_registered(self):
"""
Approving a non existent user profile does nothing and returns False
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
with self.assertRaises(ImproperlyConfigured):
self.registration_profile.objects.send_admin_approve_email(
new_user, Site.objects.get_current())
@override_settings(REGISTRATION_ADMINS=())
def test_no_registration_admins_registered(self):
"""
Approving a non existent user profile does nothing and returns False
"""
new_user = self.registration_profile.objects.create_inactive_user(
site=Site.objects.get_current(), **self.user_info)
with warnings.catch_warnings(record=True) as _warning:
self.registration_profile.objects.send_admin_approve_email(
new_user, Site.objects.get_current())
assertion_error = '''No warning triggered for unregistered
REGISTRATION_ADMINS'''
self.assertTrue(len(_warning) > 0, assertion_error)
self.assertTrue('REGISTRATION_ADMINS' in str(_warning[-1].message),
assertion_error)
| 41.962766 | 89 | 0.678561 |
4a22d6a4cc5d149aa0284ffa1730cac226230e96 | 6,975 | py | Python | cordova/cordova-lite-android-tests/res/util.py | zhuyongyong/crosswalk-test-suite | 24f3f8cfa663a365b0a22685d5bd096a637f72db | [
"BSD-3-Clause"
] | null | null | null | cordova/cordova-lite-android-tests/res/util.py | zhuyongyong/crosswalk-test-suite | 24f3f8cfa663a365b0a22685d5bd096a637f72db | [
"BSD-3-Clause"
] | null | null | null | cordova/cordova-lite-android-tests/res/util.py | zhuyongyong/crosswalk-test-suite | 24f3f8cfa663a365b0a22685d5bd096a637f72db | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
import os
import commands
import sys
import json
import logging
import urllib2
import stat
import shutil
import fnmatch
import subprocess
import time
import re
from bs4 import BeautifulSoup
from optparse import OptionParser
script_path = os.path.realpath(__file__)
const_path = os.path.dirname(script_path)
tool_path = const_path + "/../tools/"
plugin_tool = const_path + "/../tools/cordova-plugin-crosswalk-webview/"
def setUp():
global CROSSWALK_VERSION
global CROSSWALK_BRANCH
global LOG
LOG = logging.getLogger("pack-tool")
with open("../VERSION", "rt") as pkg_version_file:
pkg_version_raw = pkg_version_file.read()
pkg_version_file.close()
pkg_version_json = json.loads(pkg_version_raw)
CROSSWALK_VERSION = pkg_version_json["main-version"]
CROSSWALK_BRANCH = pkg_version_json["crosswalk-branch"]
def getLatestCrosswalkVersion(channel=None, main_version=None):
version = ""
crosswalk_url_tmp = "https://download.01.org/crosswalk/releases/crosswalk-lite/android/maven2/org/xwalk/" \
"xwalk_core_library"
if channel == "beta":
crosswalk_url = "%s_beta/" % crosswalk_url_tmp
elif channel == "stable":
crosswalk_url = "%s/" % crosswalk_url_tmp
elif channel == "canary":
crosswalk_url = "%s_canary/" % crosswalk_url_tmp
else:
LOG.error("getLatestCrosswalkVersion only support stable or beta")
sys.exit(1)
print crosswalk_url
htmlDoc = urllib2.urlopen(crosswalk_url).read()
soup = BeautifulSoup(htmlDoc)
alist = soup.find_all('a')
for index in range(-1, -len(alist)-1, -1):
aEle = alist[index]
version = aEle['href'].strip('/')
if re.search('%s\.[0-9]*\.[0-9]*\.[0-9]*' % main_version, version):
break
print "version----------------------------------------------------------:" + version
return version
def create(app_name, pkg_name, tmp_path):
print "begin to create project:"
project_path = os.path.join(tmp_path, app_name)
os.chdir(tmp_path)
if os.path.exists(project_path):
doRemove([project_path])
os.system("cordova create %s %s %s" % (app_name, pkg_name, app_name))
os.chdir(project_path)
# Set activity name as app_name
replaceUserString(
project_path,
'config.xml',
'<widget',
'<widget android-activityName="%s"' %
app_name)
# Workaround for XWALK-3679
replaceUserString(
project_path,
'config.xml',
'</widget>',
' <allow-navigation href="*" />\n</widget>')
os.system("cordova platform add android")
def installWebviewPlugin(xwalk_mode=None, xwalk_version=None):
print "Install webview plugin----------------> Start"
xwalk_mode_cmd = "--variable XWALK_MODE=\"%s\"" % xwalk_mode
xwalk_version_cmd = ""
if xwalk_version:
xwalk_version_cmd = "--variable XWALK_VERSION=\"%s\"" % xwalk_version
crosswalk_plugin_source = plugin_tool
install_crosswalk_cmd = "cordova plugin add %s %s %s" % (crosswalk_plugin_source, xwalk_version_cmd, xwalk_mode_cmd)
os.system(install_crosswalk_cmd)
print install_crosswalk_cmd
print "Install webview plugin----------------> OK"
def removeWebviewPlugin():
print "Remove webview plugin----------------> Start"
cmd = "cordova plugin remove cordova-plugin-crosswalk-webview"
print cmd
buildstatus = commands.getstatusoutput(cmd)
print "\nRemove webview plugin----------------> OK"
def build(appname, pkgarch="arm"):
print "Build project %s ----------------> START" % appname
cmd = "cordova build android -- --gradleArg=-PcdvBuildArch=%s" % pkgarch
print cmd
buildstatus = os.system(cmd)
print "\nBuild project %s ----------------> OK\n" % appname
def checkApkExist(apk_path):
lsstatus = commands.getstatusoutput("ls %s" % apk_path)
if lsstatus[0] == 0:
print "Build Package Successfully"
else:
print "Build Package Error"
def doCopy(src_item=None, dest_item=None):
LOG.info("Copying %s to %s" % (src_item, dest_item))
try:
if os.path.isdir(src_item):
overwriteCopy(src_item, dest_item, symlinks=True)
else:
if not os.path.exists(os.path.dirname(dest_item)):
LOG.info("Create non-existent dir: %s" %
os.path.dirname(dest_item))
os.makedirs(os.path.dirname(dest_item))
shutil.copy2(src_item, dest_item)
except Exception as e:
LOG.error("Fail to copy file %s: %s" % (src_item, e))
return False
return True
def overwriteCopy(src, dest, symlinks=False, ignore=None):
if not os.path.exists(dest):
os.makedirs(dest)
shutil.copystat(src, dest)
sub_list = os.listdir(src)
if ignore:
excl = ignore(src, sub_list)
sub_list = [x for x in sub_list if x not in excl]
for i_sub in sub_list:
s_path = os.path.join(src, i_sub)
d_path = os.path.join(dest, i_sub)
if symlinks and os.path.islink(s_path):
if os.path.lexists(d_path):
os.remove(d_path)
os.symlink(os.readlink(s_path), d_path)
try:
s_path_s = os.lstat(s_path)
s_path_mode = stat.S_IMODE(s_path_s.st_mode)
os.lchmod(d_path, s_path_mode)
except Exception:
pass
elif os.path.isdir(s_path):
overwriteCopy(s_path, d_path, symlinks, ignore)
else:
shutil.copy2(s_path, d_path)
def replaceUserString(path, fnexp, old_s, new_s):
print "Replace value ----------------> START"
for sub_file in iterfindfiles(path, fnexp):
try:
with open(sub_file, 'r') as sub_read_obj:
read_string = sub_read_obj.read()
except IOError as err:
#LOG.error("Read %s Error : " % sub_file + str(err))
return False
if read_string.find(old_s) >= 0:
try:
with open(sub_file, 'w') as sub_write_obj:
sub_write_obj.write(read_string.replace(old_s, new_s))
except IOError as err:
#LOG.error("Modify %s Error : " % sub_file + str(err))
return False
print "Replace value ----------------> OK"
return True
def iterfindfiles(path, fnexp):
for root, dirs, files in os.walk(path):
for filename in fnmatch.filter(files, fnexp):
yield os.path.join(root, filename)
def doRemove(target_file_list=None):
print target_file_list
for i_file in target_file_list:
print "Removing %s" % i_file
try:
if os.path.isdir(i_file):
shutil.rmtree(i_file)
else:
os.remove(i_file)
except Exception as e:
print "Fail to remove file %s: %s" % (i_file, e)
return False
return True
| 34.875 | 120 | 0.614337 |
4a22d6f5b79f5e21b9664858cbd0cdcf54697576 | 1,177 | py | Python | Python/SearchStat/search_new.py | GeorgiyDemo/scrapheap | 1d298eb63e0e6a8fc92ab84a05372d2ed33beba1 | [
"MIT"
] | null | null | null | Python/SearchStat/search_new.py | GeorgiyDemo/scrapheap | 1d298eb63e0e6a8fc92ab84a05372d2ed33beba1 | [
"MIT"
] | null | null | null | Python/SearchStat/search_new.py | GeorgiyDemo/scrapheap | 1d298eb63e0e6a8fc92ab84a05372d2ed33beba1 | [
"MIT"
] | 1 | 2019-03-14T15:48:23.000Z | 2019-03-14T15:48:23.000Z | # -*- coding: utf-8 -*-
from pytils import numeral
import os, xlrd, time
from keras import backend as K
from keras.models import model_from_json
from utils import *
# кол-во ячеек в таблице
count_range = 153
kun = 0
tyan = 0
i = 1
book = xlrd.open_workbook("CONF.xlsx")
first_sheet = book.sheet_by_index(0)
model_dir = "./output"
K.set_learning_phase(0)
with open(os.path.join(model_dir, "model.json"), "r") as fp:
model = model_from_json(fp.read())
model.compile(loss="categorical_crossentropy", optimizer="adam", metrics=["accuracy"])
model.load_weights(os.path.join(model_dir, "model.h5"))
def predict(m, word):
out = m.predict(np.array([word2input(word, 50)]))
return np.argmax(out[0])
def OutWork(result, files):
f = open(files, "a")
f.write(result + "\n")
f.close()
while i < count_range:
this_name = first_sheet.cell(i, 0).value
print(this_name)
if predict(model, this_name) == 2:
OutWork(this_name, "TYAN")
tyan = tyan + 1
if predict(model, this_name) == 1:
OutWork(this_name, "KUN")
kun = kun + 1
i = i + 1
print("Кол-во кунов: " + str(kun))
print("Кол-во тянок: " + str(tyan))
| 22.207547 | 86 | 0.653356 |
4a22d7c8ba849f66ef0d9a237cba4453bbc0e4f8 | 3,342 | py | Python | python/aoc-2020/aoc24_part2.py | brianjgreen/AOC2020 | c5fd4908934371d9f70328cb39c14194baffb126 | [
"MIT"
] | 2 | 2020-12-02T05:31:14.000Z | 2020-12-18T21:00:40.000Z | python/aoc-2020/aoc24_part2.py | brianjgreen/AOC2020 | c5fd4908934371d9f70328cb39c14194baffb126 | [
"MIT"
] | null | null | null | python/aoc-2020/aoc24_part2.py | brianjgreen/AOC2020 | c5fd4908934371d9f70328cb39c14194baffb126 | [
"MIT"
] | null | null | null | #
# Advent of Code 2020 - Day 24 Part 2
# 24 Dec 2020 Brian Green
#
# Problem:
# How many tiles will be black after 100 days?
#
import os
class Aoc24:
def __init__(self):
self.check_white_tiles = []
self.art = {}
self.adj = ((2, 0), (1, -1), (-1, -1), (-2, 0), (-1, 1), (1, 1))
file_name = "data" + os.sep + "brian_aoc24_test.dat"
with open(file_name) as data_file:
test_data = [x.strip() for x in data_file.readlines()]
file_name = "data" + os.sep + "brian_aoc24.dat"
with open(file_name) as data_file:
data_set = [x.strip() for x in data_file.readlines()]
# self.data = test_data
self.data = data_set
def count_black_tiles(self, tiles, t, tab_white=False):
black = 0
x, y = t
for off_x, off_y in self.adj:
hex = (x+off_x, y+off_y)
if hex in tiles and tiles[hex] == 1:
black += 1
elif tab_white and hex not in self.check_white_tiles:
self.check_white_tiles.append(hex)
return black
# Any black tile with zero or more than 2 black tiles immediately adjacent to it is flipped to white.
def flip_to_white(self, tiles):
self.check_white_tiles = []
self.art = tiles.copy()
for t in tiles:
if tiles[t] == 1:
black = self.count_black_tiles(tiles, t, tab_white=True)
if black == 0 or black > 2:
self.art[t] = 0
elif t not in self.check_white_tiles:
self.check_white_tiles.append(t)
# Any white tile with exactly 2 black tiles immediately adjacent to it is flipped to black.
def flip_to_black(self, tiles):
for t in self.check_white_tiles:
if t not in tiles or tiles[t] == 0:
black = self.count_black_tiles(tiles, t)
if black == 2:
self.art[t] = 1
def run_it(self):
tiles = {}
# e, se, sw, w, nw, and ne
print(self.data)
for t in self.data:
print(t)
max = len(t)
pos = 0
x = y = 0
print(t[pos:pos+2])
while pos < max:
if t[pos] == 'e':
x += 2
pos -= 1
elif t[pos:pos+2] == 'se':
x += 1
y -= 1
elif t[pos:pos+2] == 'sw':
x -= 1
y -= 1
elif t[pos] == 'w':
x -= 2
pos -= 1
elif t[pos:pos+2] == 'nw':
x -= 1
y += 1
elif t[pos:pos+2] == 'ne':
x += 1
y += 1
pos += 2
if (x, y) in tiles and tiles[(x, y)] == 1:
tiles[(x, y)] = 0
else:
tiles[(x, y)] = 1
print(tiles)
print(sum(tiles.values()))
for i in range(100):
self.flip_to_white(tiles)
print(len(self.check_white_tiles))
self.flip_to_black(tiles)
print(f"Day {i+1}: {sum(self.art.values())}")
tiles = self.art
if __name__ == "__main__":
solve_it = Aoc24()
solve_it.run_it()
| 30.944444 | 105 | 0.4617 |
4a22d811d15b3fa01e6e71400388f9ad9923cac0 | 158 | py | Python | Python_Advanced_Softuni/Comprehensions_Lab/venv/no_vowels.py | borisboychev/SoftUni | 22062312f08e29a1d85377a6d41ef74966d37e99 | [
"MIT"
] | 1 | 2020-12-14T23:25:19.000Z | 2020-12-14T23:25:19.000Z | Python_Advanced_Softuni/Comprehensions_Lab/venv/no_vowels.py | borisboychev/SoftUni | 22062312f08e29a1d85377a6d41ef74966d37e99 | [
"MIT"
] | null | null | null | Python_Advanced_Softuni/Comprehensions_Lab/venv/no_vowels.py | borisboychev/SoftUni | 22062312f08e29a1d85377a6d41ef74966d37e99 | [
"MIT"
] | null | null | null | vowels = ['a' , 'o' , 'u' , 'e' , 'i' , 'A' , 'O' , 'U' , 'E' , 'I']
string = input()
result = [s for s in string if s not in vowels]
print(''.join(result)) | 26.333333 | 68 | 0.474684 |
4a22d830f18c2bbdf935d35db8cbafa40577803c | 60,863 | py | Python | src/pyGLMHMM/GLMHMM.py | aslansd/pyGLMHMM | 5930e1322435431c5835b2b3f241b2ca0d2fb887 | [
"MIT"
] | 3 | 2020-08-20T21:35:34.000Z | 2021-04-23T08:02:24.000Z | src/pyGLMHMM/GLMHMM.py | aslansd/pyGLMHMM | 5930e1322435431c5835b2b3f241b2ca0d2fb887 | [
"MIT"
] | null | null | null | src/pyGLMHMM/GLMHMM.py | aslansd/pyGLMHMM | 5930e1322435431c5835b2b3f241b2ca0d2fb887 | [
"MIT"
] | null | null | null | import copy
import warnings
import numpy as np
from sklearn.base import BaseEstimator
from sklearn.exceptions import ConvergenceWarning
from sklearn.utils.validation import check_random_state
from .GLMHMMSymbLik import _GLMHMM_symb_lik
from .GLMHMMAnalogLik import _GLMHMM_analog_lik
from .GLMHMMTransLik import _GLMHMM_trans_lik
from .computeTrialExpectation import _compute_trial_expectation
from .emitLearningFun import _emit_learning_fun
from .emitMultistepLearningFun import _emit_multistep_learning_fun
from .transLearningFun import _trans_learning_fun
from .emitLikelihood import _emit_likelihood
from .emitLearningStats import _emit_learning_stats
from .transLearningStats import _trans_learning_stats
from .weightedLSByState import _weighted_LS_by_state
from .collectWLSInfo import _collect_WLS_info
from .regularizationSchedule import _regularization_schedule
from .minimizeLBFGS import _minimize_LBFGS
from .fitEmissionFilters import _fit_emission_filters
from .fitTransitionFilters import _fit_transition_filters
from .fitAnalogFilters import _fit_analog_filters
from .HMMGLMLikelihoods import _HMMGLM_likelihoods
from .waveletTransform import _w_corr
from .fastASD import _fast_ASD_weighted_group
class GLMHMMEstimator(BaseEstimator):
"""
A pure Python implementation of the GLM-HMM model of "https://github.com/murthylab/GLMHMM" implemented in MATLAB.
It follows the general framework of a scikit-learn estimator while being faithful to the original implementation.
This GLM-HMM model has been developed in (Calhoun et al., 2019) as a method to infer internal states of an animal based on sensory
environment and produced behavior. This technique makes use of a regression method, Generalized Linear Models (GLMs), that identify
a 'filter' that describes how a given sensory cue is integrated over time. Then, it combines it with a hidden state model, Hidden
Markov Models (HMMs), to identify whether the behavior of an animal can be explained by some underlying state. The end goal of this
GLM-HMM model is to best predict the acoustic behaviors of the vinegar fly D. melanogaster. The GLM–HMM model allows each state to
have an associated multinomial GLM to describe the mapping from feedback cues to the probability of emitting a particular type of song.
Each state also has a multinomial GLM that produces a mapping from feedback cues to the transition probabilities from the current state
to the next state. This allows the probabilities to change from moment to moment in a manner that depends on the sensory feedback that
the fly receives and to determine which feedback cues affect the probabilities at each moment. This model was inspired by a previous
work that modeled neural activity (Escola et al., 2011), but instead uses multinomial categorical outputs to account for the discrete
nature of singing behavior.
Inputs
----------
stim (X) : The stimulus to be used for fitting. These should be in the form of a numpy array with size (regressors, time) per sample in a list.
symb (y) : The emitted discrete symbols to be fitted. These should be in the form of a numpy array with size (time) containing integer numbers from 0...N-1
(N: the number of possible outputs, i.e. song types) per sample in a list.
analog_symb (y_analog) : The emitted continuous symbols to be fitted.
Parameters
----------
random_state : int, RandomState instance or None, optional (default = None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
tol : float, defaults to 1e-4.
The convergence threshold. EM iterations will stop when the lower bound average gain on the likelihood
(of the training data with respect to the model) is below this threshold.
max_iter : int, defaults to 1000.
The number of EM iterations to perform.
num_samples : int, defaults to 1.
The number of distinct samples in the input data
num_states : int, defaults to 2.
The number of hidden internal states
num_emissions : int, defaults to 2.
The number of emitted behaviors or actions (like song types)
num_feedbacks : int, defaults to 3.
The number of sensory feedback cues.
num_filter_bins : int, defaults to 30.
The number of bins to discretize the filters of sensory feedback cues.
num_steps : int, defaults to 1.
The number of steps taken in the maximization step of the EM algorithm for calculating the emission matrix
filter_offset : int, defaults to 1.
The number of bias terms added to the sensory feedback cues.
init_loglik : float, defaults to -1e7.
The initial log likelihood.
smooth_lambda : float, defaults to 1.
The regularization scheme.
emit_lambda : float, defaults to 1.
The regularization scheme.
trans_lambda : float, defaults to 0.01.
The regularization scheme.
AR_lambda : float, defaults to -1.
...
AR_vec : array-like, defaults to np.arange(510, 630).
...
stim_vec : array-like, defaults to np.setdiff1d(np.arange(0, 631), np.arange(510, 630)).
...
auto_anneal_vec : array-like, defaults to np.array([0.01, 0.025, 0.05, 0.075, 0.1, 0.25, 0.5, 0.75, 1]).
...
auto_anneal_schedule : array-like, defaults to np.array([1, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]).
...
train_bins : array-like, defaults to np.array([]).
...
symb_exists : bool, defaults to True.
True if symb exists, False otherwise.
use_ASD : bool, defaults to True.
...
add_filters : bool, defaults to False.
True if filters must be added, False otherwise.
fit_emissions : bool, defaults to True.
True if emissions must be fitted, False otherwise.
GLM_emissions : bool, defaults to True.
True if GLM must be performed on emission symbols, False otherwise.
GLM_transitions : bool, defaults to True.
True if GLM must be performed on state transitions, False otherwise.
evaluate : bool, defaults to False.
True if the model must be evaluated, False otherwise.
generate : bool, defaults to False.
True if the model must be generated, False otherwise.
L2_smooth : bool, defaults to True.
True if regularization must be performed, False otherwise.
analog_flag : bool, defaults to False.
True if the analog version of the model must be run, False otherwise.
auto_anneal : bool, defaults to False.
...
anneal_lambda : bool, defaults to False.
...
get_error_bars : bool, defaults to False.
True if error-bars must be calculated, False otherwise.
CV_regularize : bool, defaults to False.
True if cross-validation for regularization must be performed, False otherwise.
cross_validate : bool, defaults to False.
True if cross-validation must be performed, False otherwise.
Attributes
----------
emit_w_ : array-like, shape (states, N - 1, regressors)
The emission filter matrix.
analog_emit_w_ : array-like, ...
The continuous emission filter.
analog_emit_std_ : array-like, ...
The continuous emission filter standard deviation.
trans_w_ : array-like, shape (states, states, regressors)
The transition filter matrix.
emit_w_init_ : array-like, shape (states, N - 1, regressors)
The initial emission filter matrix.
analog_emit_w_init_ : array-like, ...
The initial continuous emission filter.
analog_emit_std_init : array-like, ...
The initial continuous emission filter standard deviation.
trans_w_init_ : array-like, shape (states, states, regressors)
The initial transition filter matrix.
symb_lik_ : array-like (list)
The likelihood of emitted symbols.
analog_lik_ : array-like (list)
The likelihood of continuous emitted symbols.
trans_lik_ : array-like (list)
The likelihood of hidden states.
regular_schedule_ : array-like
The regularization schedule.
regular_schedule_ind_ : int
The regularization index.
train_data_ : array-like
The subset of stim (X) used for training.
test_data_ : array-like
The subset of stim (X) used for validation.
converged_ : bool
True when convergence was reached in fit(), False otherwise.
is_fitted_ : bool
True if the fitting has been already performed, False otherwise.
n_iter_ : int
Number of step used by the best fit of inference to reach the convergence.
References
----------
.. [1] Calhoun, A.J., Pillow, J.W. & Murthy, M. Unsupervised identification of the internal states that shape natural behavior. Nat Neurosci 22, 2040–2049 (2019).
.. [2] Escola, S., Fontanini, A., Katz, D. & Paninski, L. Hidden Markov models for the stimulus-response relationships of multistate neural systems. Neural Comput 23, 1071–1132 (2011).
"""
def __init__(self,
random_state = None,
tol = 1e-4,
max_iter = 1000,
num_samples = 1,
num_states = 2,
num_emissions = 2,
num_feedbacks = 3,
num_filter_bins = 30,
num_steps = 1,
filter_offset = 1,
init_loglik = -1e7,
smooth_lambda = 0,
emit_lambda = 0,
trans_lambda = 0,
AR_lambda = -1,
AR_vec = np.arange(510, 630),
stim_vec = np.setdiff1d(np.arange(0, 631), np.arange(510, 630)),
auto_anneal_vec = np.array([0.01, 0.025, 0.05, 0.075, 0.1, 0.25, 0.5, 0.75, 1]),
auto_anneal_schedule = np.array([1, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]),
train_bins = np.array([]),
symb_exists = True,
use_ASD = True,
add_filters = False,
fit_emissions = True,
GLM_emissions = True,
GLM_transitions = True,
evaluate = False,
generate = False,
L2_smooth = False,
analog_flag = False,
auto_anneal = False,
anneal_lambda = False,
get_error_bars = False,
CV_regularize = False,
cross_validate = False):
self.random_state = random_state
self.tol = tol
self.max_iter = max_iter
self.num_samples = num_samples
self.num_states = num_states
self.num_emissions = num_emissions
self.num_feedbacks = num_feedbacks
self.num_filter_bins = num_filter_bins
self.num_steps = num_steps
self.filter_offset = filter_offset
self.init_loglik = init_loglik
self.smooth_lambda = smooth_lambda
self.emit_lambda = emit_lambda
self.trans_lambda = trans_lambda
self.AR_lambda = AR_lambda
self.AR_vec = AR_vec
self.stim_vec = stim_vec
self.auto_anneal_vec = auto_anneal_vec
self.auto_anneal_schedule = auto_anneal_schedule
self.train_bins = train_bins
self.symb_exists = symb_exists
self.use_ASD = use_ASD
self.add_filters = add_filters
self.fit_emissions = fit_emissions
self.GLM_emissions = GLM_emissions
self.GLM_transitions = GLM_transitions
self.evaluate = evaluate
self.generate = generate
self.L2_smooth = L2_smooth
self.analog_flag = analog_flag
self.auto_anneal = auto_anneal
self.anneal_lambda = anneal_lambda
self.get_error_bars = get_error_bars
self.CV_regularize = CV_regularize
self.cross_validate = cross_validate
def fit(self, X, y, y_analog):
"""Estimate model parameters with the EM algorithm.
The method fits the model and sets the parameters with which the model
has the largest likelihood. Within each trial, the method iterates
between E-step and M-step for `max_iter` times until the change of
likelihood is less than `tol`, otherwise, a `ConvergenceWarning` is
raised.
Parameters
----------
X : array-like, shape (These should be in the form of a numpy array with size (regressors, time) per sample in a list).
The training input samples.
y : array-like, shape (These should be in the form of a numpy array with size (time) containing integer numbers from 0...N-1 (N: the number of possible outputs, i.e. song types) per sample in a list).
The target values (Class labels in classification).
y_analog : array-like, ...
...
Returns
-------
output : an output dictionary which has the emission and transition matrices of all EM iterations of the fit method and also some other attributes of GLMHMMEstimator class.
"""
self.random_state = check_random_state(self.random_state)
self._check_initial_parameters(X)
do_init = not(hasattr(self, 'converged_'))
self.converged_ = False
if do_init:
self._initialize_parameters(X, self.random_state)
###################################################
# First set everything up
###################################################
if len(y) > 0:
self.symb_exists = True
else:
self.symb_exists = False
total_trials = max(len(y), len(y_analog)) # How many different trials are we fitting?
loglik = np.zeros(self.max_iter + 1)
loglik[0] = self.init_loglik
self.num_states = max(self.emit_w_.shape[0], self.analog_emit_w_.shape[0], self.trans_w_.shape[0])
self.num_emissions = self.emit_w_.shape[1]
if self.analog_flag == True:
num_total_bins = max(self.emit_w_.shape[2], self.analog_emit_w_.shape[2], self.trans_w_.shape[2])
num_analog_params = y_analog[0].shape[0]
num_analog_emit = np.zeros(num_analog_params)
else:
num_total_bins = max(self.emit_w_.shape[2], self.trans_w_.shape[2])
num_analog_params = 0
prior = []
gamma = []
xi = []
for trial in range(0, total_trials):
prior.append(np.ones(self.num_states) / self.num_states) # Is this good?!?!
if self.analog_flag == True:
for analog_num in range(0, num_analog_params):
num_analog_emit[analog_num] = num_analog_emit[analog_num] + np.nansum(y_analog[trial][analog_num, :], axis = 0)
gamma.append(np.ones((self.num_states, y_analog[trial].shape[1])))
else:
if len(y[trial].shape) > 1:
gamma.append(np.ones((self.num_states, y[trial].shape[1])))
else:
gamma.append(np.ones((self.num_states, 1)))
gamma[trial] = gamma[trial] / np.tile(np.sum(gamma[trial], axis = 0), (self.num_states, 1))
xi.append([])
###################################################
# Then the E-step
###################################################
# First we need to know the likelihood of seeing each symbol given the filters as well as the likelihood of seeing a transition from state to state
effective_ind = 0
last_try = False
for trial in range(0, total_trials):
if self.symb_exists == True:
self.symb_lik_.append(_GLMHMM_symb_lik(self.emit_w_, X[trial], y[trial]))
self.trans_lik_.append(_GLMHMM_trans_lik(self.trans_w_, X[trial]))
if self.analog_flag == True:
self.analog_lik_ = _GLMHMM_analog_lik(self.analog_emit_w_, X, y_analog, num_analog_emit)
output = []
for ind in range(0, self.max_iter):
print('Fitting iteration: ' + str(ind + 1))
prior, gamma, xi = self._e_step(X, prior, gamma, xi, total_trials)
###################################################
# Now the M-step
###################################################
# Gradient descent for the emission filter
print('Fitting categorical emission filters')
if self.symb_exists == True and self.fit_emissions == True:
new_stim = []
if self.cross_validate == True or self.CV_regularize == True:
if self.CV_regularize == True:
CV_schedule = self.regular_schedule_[max(self.regular_schedule_ind_ - 1, 1):min(self.regular_schedule_ind_ + 1, len(self.regular_schedule_))]
else:
CV_schedule = self.smooth_lambda
CV_ind = -1
new_emit_w = []
new_trans_w = []
new_analog_emit_w = []
new_analog_emit_std = []
test_emit_lik = []
test_trans_lik = []
test_analog_lik = []
test_gamma = []
test_xi = []
test_log_lik = []
for this_lambda in CV_schedule:
CV_ind = CV_ind + 1
# Check if we are doing smoothing regularization or Tikhonov
# Segment data into random subsets for cross validation
if self.smooth_lambda == -1:
self.trans_lambda = this_lambda
self.emit_lambda = this_lambda
else:
self.smooth_lambda = this_lambda
if self.symb_exists == True:
# _fit_emission_filters should take into account the test_data_
# field...
[new_emit_w_temp, pgd_lik, pgd_prob, pgd_prob2, pgd_prob3, pgd_prob4] = _fit_emission_filters(X, y, gamma, xi, self.emit_w_, self.get_params(), self.train_data_)
new_emit_w.append(new_emit_w_temp)
else:
pgd_lik = 0
[new_trans_w_temp, tgd_lik] = _fit_transition_filters(X, y, gamma, xi, self.trans_w_, self.get_params(), self.train_data_)
new_trans_w.append(new_trans_w_temp)
if self.analog_flag == True:
[new_analog_emit_w_temp, new_analog_emit_std_temp, arcorr, arcorr2] = _fit_analog_filters(X, y_analog, gamma, xi, self.analog_emit_w_, self.get_params(), self.train_data_)
new_analog_emit_w.append(new_analog_emit_w_temp)
new_analog_emit_std.append(new_analog_emit_std_temp)
test_symb = []
test_analog_symb = []
test_stim = []
if self.analog_flag == True:
for i in range(0, len(self.test_data_)):
test_symb.append(y[self.test_data_[i]])
test_analog_symb.append(y_analog[self.test_data_[i]])
test_stim.append(X[self.test_data_[i]])
[test_emit_lik_temp, test_trans_lik_temp, test_analog_lik_temp, test_gamma_temp, test_xi_temp] = _HMMGLM_likelihoods(test_symb, new_emit_w[CV_ind], new_trans_w[CV_ind], test_stim, new_analog_emit_w[CV_ind], test_analog_symb, self.get_params())
else:
for i in range(0, len(self.test_data_)):
test_symb.append(y[self.test_data_[i]])
test_stim.append(X[self.test_data_[i]])
[test_emit_lik_temp, test_trans_lik_temp, test_analog_lik_temp, test_gamma_temp, test_xi_temp] = _HMMGLM_likelihoods(test_symb, new_emit_w[CV_ind], new_trans_w[CV_ind], test_stim, [], [], self.get_params())
test_emit_lik.append(test_emit_lik_temp)
test_trans_lik.append(test_trans_lik_temp)
test_analog_lik.append(test_analog_lik_temp)
test_gamma.append(test_gamma_temp)
test_xi.append(test_xi_temp)
test_full_emit_lik = 0
test_full_trans_lik = 0
test_full_analog_emit_lik = 0
test_full_basic_lik = np.zeros((len(self.test_data_), 1))
for i in range(0, len(self.test_data_)):
gamma[i][gamma[i][:, 0] == 0, 0] = np.finfo(float).eps
if self.symb_exists == True:
test_full_emit_lik = test_full_emit_lik - np.mean(np.sum(test_gamma[CV_ind][i] * np.log(test_emit_lik[CV_ind][i]), axis = 0), axis = 0)
test_full_trans_lik = test_full_trans_lik - np.mean(np.sum(np.sum(test_xi[CV_ind][i] * np.log(test_trans_lik[CV_ind][i][:, :, 1:]), axis = 0), axis = 0), axis = 0)
if self.analog_flag == True:
analog_prod = np.prod(test_analog_lik[CV_ind][i], axis = 0)
analog_prod[analog_prod == 0] = np.finfo(float).eps
if self.num_states == 1:
test_full_analog_emit_lik = test_full_analog_emit_lik - np.mean(np.sum(test_gamma[CV_ind][i] * np.log(analog_prod).T, axis = 0), axis = 0)
else:
test_full_analog_emit_lik = test_full_analog_emit_lik - np.mean(np.sum(test_gamma[CV_ind][i] * np.log(analog_prod), axis = 0), axis = 0)
test_full_basic_lik[i] = -np.sum(test_gamma[CV_ind][i][:, 0] * np.log(test_gamma[CV_ind][i][:, 0]), axis = 0)
test_log_lik.append(np.sum(test_full_basic_lik, axis = 0) + test_full_emit_lik + test_full_analog_emit_lik + test_full_trans_lik)
if self.CV_regularize == True:
CV_inds = np.arange(max(self.regular_schedule_ind_ - 1, 1), min(self.regular_schedule_ind_ + 1, len(self.regular_schedule_)))
good_ind = np.argwhere(test_log_lik == min(test_log_lik))
if self.symb_exists == True:
self.emit_w_ = new_emit_w[good_ind]
self.trans_w_ = new_trans_w[good_ind]
if self.analog_flag == True:
self.analog_emit_w_ = new_analog_emit_w[good_ind]
self.analog_emit_std_ = new_analog_emit_std[good_ind]
this_lambda = self.regular_schedule_[CV_inds[good_ind]]
self.regular_schedule_ind_ = CV_inds[good_ind]
if self.smooth_lambda == -1:
self.trans_lambda = this_lambda
self.emit_lambda = this_lambda
else:
self.smooth_lambda = this_lambda
else:
good_ind = 1
if self.cross_validate == True:
output[ind]['lambda'] = this_lambda
output[ind]['loglik_CV'] = test_log_lik[good_ind]
output[ind]['loglik_CV_lambda'] = CV_schedule
output[ind]['loglik_CV_all'] = test_log_lik
else:
for trial in range(0, len(X)):
# Please don't ask me why I decided it was a good idea to call the number of emissions 'num_states' here. Just roll with it!
new_stim.append({'emit' : y[trial], 'gamma' : gamma[trial], 'xi' : xi[trial], 'num_states' : self.num_emissions})
if self.GLM_emissions == True:
new_stim[trial]['data'] = X[trial]
new_stim[trial]['num_total_bins'] = num_total_bins
else:
new_stim[trial]['data'] = X[trial][-1, :]
new_stim[trial]['num_total_bins'] = 1
tmp_pgd1 = np.zeros((self.num_states, 1))
tmp_pgd2 = np.zeros((self.num_states, 1))
tmp_pgd3 = np.zeros((self.num_states, self.num_emissions + 1, self.num_emissions + 1))
tmp_pgd4 = np.zeros((self.num_states, self.num_emissions + 1, self.num_emissions + 1))
tmp_pgd_lik = np.zeros((self.num_states, 1))
hess_diag_emit = np.zeros((self.num_states, self.num_emissions, num_total_bins))
self._m_step_emission(new_stim)
pgd_lik = np.sum(tmp_pgd1, axis = 0)
if self.train_bins.size != 0:
for trial in range(0, len(X)):
new_stim[trial]['data'] = X[trial]
for i in range(0, self.num_states):
[tmp_pgd1[i], hess_d] = _emit_learning_stats(np.reshape(self.emit_w_[i, :, :].T, (self.emit_w_.shape[1] * self.emit_w_.shape[2], 1), order = 'F'), new_stim, i, self.get_params())
hess_diag_emit[i, :, :] = np.reshape(hess_d, (hess_diag_emit.shape[2], hess_diag_emit.shape[1]), order = 'F').T
[tmp_pgd_lik[i], tmp_pgd1[i], tmp_pgd2[i], tmp_pgd3_temp, tmp_pgd4_temp] = _emit_likelihood(np.reshape(self.emit_w_[i, :, :].T, (self.emit_w_.shape[1] * self.emit_w_.shape[2], 1), order = 'F'), new_stim, i)
tmp_pgd3[i, :, :] = tmp_pgd3_temp
tmp_pgd4[i, :, :] = tmp_pgd4_temp
pgd_prob1 = np.sum(tmp_pgd1, axis = 0)
pgd_prob2 = np.sum(tmp_pgd2, axis = 0)
pgd_prob3 = np.sum(tmp_pgd3, axis = 0)
pgd_prob4 = np.sum(tmp_pgd4, axis = 0)
else:
pgd_lik = 0
pgd_prob1 = 0
# Gradient descent for the transition filter
print('Fitting state transition filters')
new_stim = []
for trial in range(0, len(X)):
new_stim.append({'gamma' : gamma[trial], 'xi' : xi[trial], 'num_states' : self.num_states})
if self.GLM_transitions == True:
if self.train_bins.size == 0:
new_stim[trial]['data'] = X[trial]
new_stim[trial]['num_total_bins'] = num_total_bins
else:
new_stim[trial]['data'] = X[trial][self.train_bins, :]
new_stim[trial]['num_total_bins'] = len(self.train_bins)
else:
new_stim[trial]['data'] = X[trial][-1, :]
new_stim[trial]['num_total_bins'] = 1
tmp_tgd = np.zeros(self.num_states)
if self.evaluate == True:
for i in range(0, self.num_states):
tmp_tgd[i] = _trans_learning_fun(np.reshape(self.trans_w_[i, :, :].T, (self.trans_w_.shape[1] * self.trans_w_.shape[2]), order = 'F'), new_stim, i, self.get_params())[0]
tgd_lik = np.sum(tmp_tgd, axis = 0)
else:
self._m_step_transition(new_stim)
if self.train_bins.size != 0:
for trial in range(0, len(X)):
new_stim[trial]['data'] = X[trial]
hess_diag_trans = np.zeros((self.num_states, self.num_states, num_total_bins))
for i in range(0, self.num_states):
tmp_tgd[i] = _trans_learning_fun(np.reshape(self.trans_w_[i, :, :].T, (self.trans_w_.shape[1] * self.trans_w_.shape[2]), order = 'F'), new_stim, i, self.get_params())[0]
if self.num_states > 1:
[tmp_tgd[i], hess_d] = _trans_learning_stats(np.reshape(self.trans_w_[i, :, :].T, (self.trans_w_.shape[1] * self.trans_w_.shape[2], 1), order = 'F'), new_stim, i, self.get_params())
hess_diag_trans[i, :, :] = np.reshape(hess_d, (hess_diag_trans.shape[2], hess_diag_trans.shape[1])).T
else:
hess_diag_trans = 0
tgd_lik = np.sum(tmp_tgd, axis = 0)
# We don't need to gradient descent for straight-up regressions, just need to regress!
# we're just doing weighted least squares here: https://en.wikipedia.org/wiki/Least_squares#Weighted_least_squares
# We need to see how much data we need to accurately reconstruct these filters and why the smooth asd is failing so badly so often
if self.analog_flag == True:
print('Fitting analog emission filters')
new_stim = []
ar_corr1 = np.zeros((self.num_states, num_analog_params))
ar_corr2 = np.zeros(num_analog_params)
if self.evaluate == True:
for analog_num in range(0, num_analog_params):
for trial in range(0, len(X)):
new_stim.append({'data' : X[trial], 'num_total_bins' : num_total_bins})
new_stim[trial]['symb'] = y_analog[trial][analog_num, :]
new_stim[trial]['good_emit'] = ~np.isnan(y_analog[trial][analog_num, :])
[these_stim, these_symb, these_gamma] = _collect_WLS_info(new_stim)
for states in range(0, self.num_states):
ar_corr1[states, analog_num] = _w_corr(these_stim * self.analog_emit_w_[states, analog_num, :], these_symb, these_gamma[states, :].T)
ar_corr2[analog_num] = np.sum(np.mean(these_gamma, axis = 1) * ar_corr1[:, analog_num], axis = 0)
else:
for analog_num in range(0, num_analog_params):
print('Fitting filter ' + str(analog_num) + '/' + str(num_analog_params))
for trial in range(0, len(X)):
new_stim.append({'num_total_bins' : num_total_bins, 'data' : X[trial]})
new_stim[trial]['symb'] = y_analog[trial][analog_num, :]
new_stim[trial]['good_emit'] = ~np.isnan(y_analog[trial][analog_num, :])
[these_stim, these_symb, these_gamma] = _collect_WLS_info(new_stim)
# If more than this, loop until we have gone through all of them. How to deal with e.g. ~1k over this max? Overlapping subsets? Could just do e.g. 4 sub-samples, or however many depending on amount >15k
max_good_pts = 15000
num_analog_iter = np.ceil(these_stim.shape[0] / max_good_pts)
if num_analog_iter > 1:
analog_offset = (these_stim.shape[0] - max_good_pts) / (num_analog_iter - 1)
iter_stim = np.zeros((num_analog_iter, 2))
for nai in range(0, num_analog_iter):
iter_stim[nai, :] = np.floor(analog_offset * (nai - 1)) + [1, max_good_pts]
else:
iter_stim = [1, these_stim.shape[0]]
randomized_stim = np.random.permutation(these_stim.shape[0])
ae_w = np.zeros((num_analog_iter, self.num_states, self.analog_emit_w_.shape[2]))
ae_std = np.zeros((num_analog_iter, self.num_states, self.analog_emit_w_.shape[2]))
iter_weight = np.zeros((num_analog_iter, self.num_states))
for nai in range(0, num_analog_iter):
use_stim = randomized_stim[iter_stim[nai, 0]:iter_stim[nai, 1]]
for states in range(0, self.num_states):
if self.use_ASD == True:
if (these_stim.shape[1] % self.num_filter_bins) == 1:
[out_weights, ASD_stats] = _fast_ASD_weighted_group(these_stim[use_stim, :], these_symb[use_stim], these_gamma[states, use_stim], [np.ones((np.round(these_stim.shape[1] / self.num_filter_bins), 1)) * self.num_filter_bins, [1]], 2)
else:
[out_weights, ASD_stats] = _fast_ASD_weighted_group(these_stim[use_stim, :], these_symb[use_stim], these_gamma[states, use_stim], [np.ones((np.round(these_stim.shape[1] / self.num_filter_bins), 1)) * self.num_filter_bins], 2)
ae_w[nai, states, :] = out_weights
ae_std[nai, states, :] = ASD_stats['L_post_diag']
else:
[out_weights, out_std] = _weighted_LS_by_state(these_stim[use_stim, :], these_symb[use_stim], these_gamma[states, use_stim], 10)
ae_w[nai, states, :] = out_weights
ae_std[nai, states, :] = out_std
iter_weight[nai, states] = np.sum(these_gamma[states, use_stim], axis = 0)
ar_corr1[states, analog_num] = 0
for states in range(0, self.num_states):
self.analog_emit_w_[states, analog_num, :] = np.sum(ae_w[:, states, :] * np.tile(np.expand_dims(np.expand_dims(iter_weight[:, states] / np.sum(iter_weight[:, states], axis = 0), axis = 1), axis = 2), (1, 1, self.analog_emit_w_.shape[2])), axis = 0)
self.analog_emit_std_[states, analog_num, :] = np.sum(ae_std[:, states, :] * np.tile(np.expand_dims(np.expand_dims(iter_weight[:, states] / np.sum(iter_weight[:, states], axis = 0), axis = 1), axis = 2), (1, 1, self.analog_emit_std_.shape[2])), axis = 0)
ar_corr1[states, analog_num] = 0
ar_corr2[analog_num] = 0
# Now we have done the E and the M steps! Just save the likelihoods...
symb_likelihood = 0
analog_likelihood = 0
trans_likelihood = 0
self.symb_lik_ = []
self.trans_lik_ = []
if self.analog_flag == True:
self.analog_lik_ = _GLMHMM_analog_lik(self.analog_emit_w_, X, y_analog, num_analog_emit)
for trial in range(0, total_trials):
if self.symb_exists == True:
self.symb_lik_.append(_GLMHMM_symb_lik(self.emit_w_, X[trial], y[trial]))
self.trans_lik_.append(_GLMHMM_trans_lik(self.trans_w_, X[trial]))
symb_likelihood = symb_likelihood + -np.sum(np.sum(gamma[trial] * np.log(self.symb_lik_[trial]), axis = 0), axis = 0)
trans_likelihood = trans_likelihood + -np.sum(np.sum(np.sum(xi[trial] * np.log(self.trans_lik_[trial][:, :, 1:]), axis = 0), axis = 0), axis = 0)
if self.analog_flag == True:
analog_prod = np.prod(np.array(self.analog_lik_[trial]), axis = 0)
analog_prod[analog_prod < np.finfo(float).eps] = np.finfo(float).eps
analog_likelihood = analog_likelihood + -np.sum(np.sum(gamma[trial] * np.log(analog_prod), axis = 0), axis = 0)
# Basic log likelihood: sum(gamma(n) * log(gamma(n))) + tgd_lik + pgd_lik
basic_likelihood = np.zeros(total_trials)
for i in range(0, total_trials):
gamma[i][gamma[i][:, 0] == 0, 0] = np.finfo(float).eps
basic_likelihood[i] = -np.sum(gamma[i][:, 0] * np.log(gamma[i][:, 0]), axis = 0)
loglik[ind + 1] = np.sum(basic_likelihood, axis = 0) + symb_likelihood + trans_likelihood + analog_likelihood
# Saving variables
output.append({'emit_w' : self.emit_w_, 'trans_w': self.trans_w_})
if self.symb_exists == True and self.fit_emissions == True:
output[ind]['symb_likelihood'] = symb_likelihood
output[ind]['trans_likelihood'] = trans_likelihood
output[ind]['pgd_prob1'] = pgd_prob1
output[ind]['pgd_prob2'] = pgd_prob2
output[ind]['pgd_prob3'] = pgd_prob3
output[ind]['pgd_prob4'] = pgd_prob4
if 'hess_diag_emit' in locals():
output[ind]['hess_diag_emit'] = np.lib.scimath.sqrt(hess_diag_emit)
if 'hess_diag_trans' in locals():
output[ind]['hess_diag_trans'] = np.lib.scimath.sqrt(hess_diag_trans)
if self.analog_flag == True:
output[ind]['analog_emit_w'] = self.analog_emit_w_
output[ind]['analog_emit_std'] = np.sqrt(self.analog_emit_std_)
output[ind]['analog_likelihood'] = analog_likelihood
output[ind]['ar_corr1'] = ar_corr1
output[ind]['ar_corr2'] = ar_corr2
output[ind]['tgd_lik'] = tgd_lik
output[ind]['pgd_lik'] = pgd_lik
output[ind]['loglik'] = loglik[1:ind + 1]
print('Log likelihood: ' + str(loglik[ind + 1]))
# Now do this for not just the loglik but *each* of the likelihoods individually
# I have been stopping if the % change in log likelihood is below some threshold
if (abs(loglik[ind + 1] - loglik[ind]) / abs(loglik[ind]) < self.tol):
if last_try == True:
loglik = loglik[1: ind + 1]
analog_emit_w_ASD = np.zeros((self.num_states, num_analog_params, 1))
analog_emit_std_ASD = np.zeros((self.num_states, num_analog_params, 1))
for analog_num in range(0, num_analog_params):
print('Fitting filter ' + str(analog_num) + '/' + str(num_analog_params))
new_stim = []
for trial in range(0, len(X)):
new_stim.append({'symb' : y_analog[trial][analog_num, :]})
new_stim[trial]['good_emit'] = ~np.isnan(y_analog[trial][analog_num, :])
[these_stim, these_symb, these_gamma] = _collect_WLS_info(new_stim)
# If more than this, loop until we have gone through all of them. How to deal with e.g. ~1k over this max? Overlapping subsets? Could just do e.g. 4 sub-samples, or however many depending on amount >15k
max_good_pts = 15000
num_analog_iter = np.ceil(these_stim.shape[0] / max_good_pts)
if num_analog_iter > 1:
analog_offset = (these_stim.shape[0] - max_good_pts) / (num_analog_iter - 1)
iter_stim = np.zeros((num_analog_iter, 2))
for nai in range(0, num_analog_iter):
iter_stim[nai, :] = np.floor(analog_offset * (nai - 1)) + [1, max_good_pts]
else:
iter_stim = [1, these_stim.shape[0]]
randomized_stim = np.random.permutation(these_stim.shape[0])
ae_w = np.zeros((num_analog_iter, self.num_states, self.analog_emit_w_.shape[2]))
ae_std = np.zeros((num_analog_iter, self.num_states, self.analog_emit_w_.shape[2]))
iter_weight = np.zeros((num_analog_iter, self.num_states))
ar_corr1 = np.zeros((self.num_states, num_analog_params))
for nai in range(0, num_analog_iter):
use_stim = randomized_stim[iter_stim[nai, 0]:iter_stim[nai, 1]]
for states in range(0, self.num_states):
[out_weights, ASD_stats] = _fast_ASD_weighted_group(these_stim[use_stim, :], these_symb[use_stim], these_gamma[states, use_stim], [np.ones((np.round(these_stim.shape[1] / self.num_filter_bins), 1)) * self.num_filter_bins, [1]], 2)
ae_w[nai, states, :] = out_weights
ae_std[nai, states, :] = ASD_stats['L_post_diag']
iter_weight[nai, states] = np.sum(these_gamma[states, use_stim], axis = 0)
ar_corr1[states, analog_num] = 0
for states in range(0, self.num_states):
analog_emit_w_ASD[states, analog_num, :] = np.sum(ae_w[:, states, :] * np.tile(np.expand_dims(np.expand_dims(iter_weight[:, states] / np.sum(iter_weight[:, states], axis = 0), axis = 1), axis = 2), (1, 1, self.analog_emit_w_.shape[2])), axis = 0)
analog_emit_std_ASD[states, analog_num, :] = np.sum(ae_std[:, states, :] * np.tile(np.expand_dims(np.expand_dims(iter_weight[:, states] / np.sum(iter_weight[:, states], axis = 0), axis = 1), axis = 2), (1, 1, self.analog_emit_std_.shape[2])), axis = 0)
output[ind]['analog_emit_w_ASD'] = analog_emit_w_ASD
output[ind]['analog_emit_std_ASD'] = analog_emit_std_ASD
print('Change in log likelihood is below threshold!')
self.converged_ = True
self.n_iter_ = ind + 1
break
else:
last_try = True
if effective_ind < 4:
# Since the regularization schedule starts here...
effective_ind = 5
else:
effective_ind = effective_ind + 1
else:
effective_ind = effective_ind + 1
last_try = False
if self.auto_anneal == True:
this_lambda = _regularization_schedule(effective_ind)
self.trans_lambda = this_lambda
self.emit_lambda = this_lambda
if self.evaluate == True or self.get_error_bars == True:
break
print('FINISHED!')
if not self.converged_:
warnings.warn('Initialization did not converge. '
'Try different init parameters, '
'or increase max_iter, tol '
'or check for degenerate data.'
, ConvergenceWarning)
self.is_fitted_ = True
return output
def predict(self, X):
"""Estimate model parameters using X and predict the labels for X.
The method fits the model and sets the parameters with which the model
has the largest likelihood. Within each trial, the method iterates
between E-step and M-step for `max_iter` times until the change of
likelihood is less than `tol`, otherwise, a `ConvergenceWarning` is
raised. After fitting, it predicts the most probable label for the
input data points.
Parameters
----------
X : array-like, shape (These should be in the form of a numpy array with size (regressors, time) per sample in a list).
The training input samples.
Returns
-------
y : array-like, shape (These should be in the form of a numpy array with size (time) containing integer numbers from 0...N-1 (N: the number of possible outputs, i.e. song types) per sample in a list).
The target values (Class labels in classification).
"""
pass
def _check_initial_parameters(self, X):
"""Check values of the basic parameters.
Parameters
----------
X : array-like, shape (These should be in the form of a numpy array with size (regressors, time) per sample in a list).
The training input samples.
"""
if self.tol < 0.:
raise ValueError("Invalid value for 'tol': %.5f "
"Tolerance used by the EM must be non-negative"
% self.tol)
if type(self.max_iter) != int or self.max_iter < 1:
raise ValueError("Invalid value for 'max_iter': %d "
"It must be an integer value greater than one"
% self.max_iter)
if type(self.num_samples) != int or self.num_samples < 1:
raise ValueError("Invalid value for 'num_samples': %d "
"It must be an integer value greater than one"
% self.num_samples)
if type(self.num_states) != int or self.num_states < 1:
raise ValueError("Invalid value for 'num_states': %d "
"It must be an integer value greater than one"
% self.num_states)
if type(self.num_emissions) != int or self.num_emissions < 1:
raise ValueError("Invalid value for 'num_emissions': %d "
"It must be an integer value greater than one"
% self.num_emissions)
if type(self.num_feedbacks) != int or self.num_feedbacks < 1:
raise ValueError("Invalid value for 'num_feedbacks': %d "
"It must be an integer value greater than one"
% self.num_feedbacks)
if type(self.num_filter_bins) != int or self.num_filter_bins < 1:
raise ValueError("Invalid value for 'num_filter_bins': %d "
"It must be an integer value greater than one"
% self.num_filter_bins)
if type(self.num_steps) != int or self.num_steps < 1:
raise ValueError("Invalid value for 'num_steps': %d "
"It must be an integer value greater than one"
% self.num_steps)
if type(self.filter_offset) != int or self.filter_offset < 1:
raise ValueError("Invalid value for 'filter_offset': %d "
"It must be an integer value greater than one"
% self.filter_offset)
def _initialize_parameters(self, X, random_state):
"""Initialize the model parameters.
Parameters
----------
X : array-like, shape (These should be in the form of a numpy array with size (regressors, time) per sample in a list).
The training input samples.
random_state : RandomState
A random number generator instance.
"""
self.emit_w_ = np.zeros((self.num_states, self.num_emissions - 1, self.num_filter_bins * self.num_feedbacks + self.filter_offset)) # states x emissions-1 x filter bins
self.analog_emit_w_ = np.array([])
self.analog_emit_std_ = np.array([])
self.trans_w_ = np.zeros((self.num_states, self.num_states, self.num_filter_bins * self.num_feedbacks + self.filter_offset)) # states x states x filter bins (diagonals are ignored!)
for ss1 in range(0, self.num_states):
for ff in range(0, self.num_feedbacks):
for ee in range(0, self.num_emissions - 1):
self.emit_w_[ss1, ee, (ff - 1) * self.num_filter_bins + np.arange(self.num_filter_bins)] = np.exp(-np.arange(self.num_filter_bins) / self.num_filter_bins) * np.round(self.random_state.rand(1) * 2 - 1)
for ss2 in range(0, self.num_states):
self.trans_w_[ss1, ss2, (ff - 1) * self.num_filter_bins + np.arange(self.num_filter_bins)] = np.exp(-np.arange(self.num_filter_bins) / self.num_filter_bins) * np.round(self.random_state.rand(1) * 2 - 1)
self.symb_lik_ = []
self.analog_lik_ = []
self.trans_lik_ = []
if self.CV_regularize == False:
self.regular_schedule_ = 1
self.regular_schedule_ind_ = 0
self.train_data_ = np.arange(0, len(X))
self.test_data_ = np.array([])
else:
self.regular_schedule_ = np.logspace(-4, 1, num = 10)
self.regular_schedule_ind_ = 7
rp_data = np.random.permutation(len(X))
self.train_data_ = rp_data[np.ceil(0.25 * len(rp_data)):]
self.test_data_ = rp_data[0:np.ceil(0.25 * len(rp_data))]
if self.cross_validate == False:
if self.CV_regularize == False:
self.train_data_ = np.arange(0, len(X))
self.test_data_ = np.array([])
else:
rp_data = np.random.permutation(len(X))
self.train_data_ = rp_data[np.ceil(0.25 * len(rp_data)):]
self.test_data_ = rp_data[0:np.ceil(0.25 * len(rp_data))]
if self.add_filters == False:
self.emit_w_init_ = copy.copy(self.emit_w_)
self.analog_emit_w_init_ = copy.copy(self.analog_emit_w_)
self.analog_emit_std_init_ = copy.copy(self.analog_emit_std_)
self.trans_w_init_ = copy.copy(self.trans_w_)
else:
self.emit_w_init_ = np.array([])
self.analog_emit_w_init_ = np.array([])
self.analog_emit_std_init_ = np.array([])
self.trans_w_init_ = np.array([])
def _e_step(self, X, prior, gamma, xi, total_trials):
"""E step.
"""
for trial in range(0, total_trials):
# Maybe first compute likelihoods for the symbols?
if self.analog_flag == True and self.symb_exists == True:
emit_likelihood = self.symb_lik_[trial] * np.prod(self.analog_lik_[trial], axis = 0)
elif self.symb_exists == True:
emit_likelihood = self.symb_lik_[trial]
elif self.analog_flag == True:
emit_likelihood = np.prod(self.analog_lik_[trial], axis = 0)
# Things get funky if the likelihood is exactly 0
emit_likelihood[emit_likelihood < np.finfo(float).eps * 1e3] = np.finfo(float).eps * 1e3 # Do we need this?
# Use the forward-backward algorithm to estimate the probability of being in a given state (gamma),
# probability of transitioning between states (xi), and hold the prior for initialization of next round of fitting
prior[trial], gamma[trial], xi[trial], alpha1, alpha2, scale, scale_a, score = _compute_trial_expectation(prior[trial], emit_likelihood, self.trans_lik_[trial])
return prior, gamma, xi
def _m_step_emission(self, new_stim):
"""M step for emitted symbols.
"""
# https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.minimize.html
# I am using the scipy minimization function and passing the analytic value and the gradient to it
# NOTE: I also could compute the Hessian but in my experience, it ends up not speeding the fitting up much because it is very slow and memory intensive to compute on each iteration
# NOTE: This also returns the inverse Hessian so we can get the error bars from that if we want to
for i in range(0, self.num_states):
if self.num_steps == 1:
outweights = _minimize_LBFGS(lambda x: _emit_learning_fun(x, new_stim, i, self.get_params()), np.reshape(self.emit_w_[i, :, :].T, (self.emit_w_.shape[1] * self.emit_w_.shape[2]), order = 'F'), lr = 1, max_iter = 500, tol = 1e-5, line_search = 'Wolfe', interpolate = True, max_ls = 25, history_size = 100, out = True)
else:
outweights = _minimize_LBFGS(lambda x: _emit_multistep_learning_fun(x, new_stim, i, self.get_params()), np.reshape(self.emit_w_[i, :, :].T, (self.emit_w_.shape[1] * self.emit_w_.shape[2]), order = 'F'), lr = 1, max_iter = 500, tol = 1e-5, line_search = 'Wolfe', interpolate = True, max_ls = 25, history_size = 100, out = True)
self.emit_w_[i, :, :] = np.reshape(outweights, (self.emit_w_.shape[2], self.emit_w_.shape[1]), order = 'F').T # Make sure this is reformatted properly!!!
def _m_step_transition(self, new_stim):
"""M step for state transition.
"""
for i in range(0, self.num_states):
if self.train_bins.size == 0:
outweights = _minimize_LBFGS(lambda x: _trans_learning_fun(x, new_stim, i, self.get_params()), np.reshape(self.trans_w_[i, :, :].T, (self.trans_w_.shape[1] * self.trans_w_.shape[2]), order = 'F'), lr = 1, max_iter = 500, tol = 1e-5, line_search = 'Wolfe', interpolate = True, max_ls = 25, history_size = 100, out = True)
self.trans_w_[i, :, :] = np.reshape(outweights, (self.trans_w_.shape[2], self.trans_w_.shape[1]), order = 'F').T
else:
outweights = _minimize_LBFGS(lambda x: _trans_learning_fun(x, new_stim, i, self.get_params()), np.reshape(self.trans_w_[i, :, self.train_bins].T, (self.trans_w_.shape[1] * len(self.train_bins)), order = 'F'), lr = 1, max_iter = 500, tol = 1e-5, line_search = 'Wolfe', interpolate = True, max_ls = 25, history_size = 100, out = True)
self.trans_w_[i, :, self.train_bins] = np.reshape(outweights['x'], (len(self.train_bins), self.trans_w_.shape[1]), order = 'F').T
def get_params(self, deep = True):
return {"random_state" : self.random_state,
"tol" : self.tol,
"max_iter" : self.max_iter,
"num_samples" : self.num_samples,
"num_states" : self.num_states,
"num_emissions" : self.num_emissions,
"num_feedbacks" : self.num_feedbacks,
"num_filter_bins" : self.num_filter_bins,
"num_steps" : self.num_steps,
"filter_offset" : self.filter_offset,
"init_loglik" : self.init_loglik,
"smooth_lambda" : self.smooth_lambda,
"emit_lambda" : self.emit_lambda,
"trans_lambda" : self.trans_lambda,
"AR_lambda" : self.AR_lambda,
"AR_vec" : self.AR_vec,
"stim_vec" : self.stim_vec,
"auto_anneal_vec" : self.auto_anneal_vec,
"auto_anneal_schedule" : self.auto_anneal_schedule,
"train_bins" : self.train_bins,
"symb_exists" : self.symb_exists,
"use_ASD" : self.use_ASD,
"add_filters" : self.add_filters,
"fit_emissions" : self.fit_emissions,
"GLM_emissions" : self.GLM_emissions,
"GLM_transitions" : self.GLM_transitions,
"evaluate" : self.evaluate,
"generate" : self.generate,
"L2_smooth" : self.L2_smooth,
"analog_flag" : self.analog_flag,
"auto_anneal" : self.auto_anneal,
"anneal_lambda" : self.anneal_lambda,
"get_error_bars" : self.get_error_bars,
"CV_regularize" : self.CV_regularize,
"cross_validate" : self.cross_validate}
def set_params(self, **parameters):
for parameter, value in parameters.items():
setattr(self, parameter, value)
return self
if __name__ == "__main__":
import scipy.stats
import scipy.ndimage.filters
num_samples = 5
num_states = 2
num_emissions = 2
num_feedbacks = 3
num_filter_bins = 30
num_steps = 1
filter_offset = 1
tau = 4
total_time = 10000
noiseSD = 0.1
stim_scale = 1
num_real_states = 2
stim = []
states = []
output_stim = []
output_symb = []
for ns in range(0, num_samples):
output = np.zeros((num_real_states, total_time))
stim_temp = np.zeros((num_filter_bins, total_time + num_filter_bins - 1, num_feedbacks))
stim_temp[0, :, :] = scipy.ndimage.filters.gaussian_filter(np.random.randn(total_time + num_filter_bins - 1, num_feedbacks), stim_scale)
for i in range(1, num_filter_bins):
stim_temp[i, 0:total_time, :] = stim_temp[0, i:(total_time + i), :]
stim.append(stim_temp[:, 0:total_time, :] + np.random.randn(num_filter_bins, total_time, num_feedbacks) * noiseSD)
final_stim = np.append(stim[ns][:, :, 0], stim[ns][:, :, 1], axis = 0)
final_stim = np.append(final_stim, stim[ns][:, :, 2], axis = 0)
final_stim = np.append(final_stim, np.ones((filter_offset, total_time)), axis = 0)
output_stim.append(final_stim)
filt = scipy.stats.gamma.pdf(np.linspace(0, num_filter_bins), a = tau)[0:num_filter_bins]
p1 = np.exp(np.matmul(stim[ns][:, :, 0].T, filt.T) + np.matmul(stim[ns][:, :, 1].T, -filt.T))
output[0, :] = p1 / (1 + p1) > 0.5
p2 = np.exp(np.matmul(stim[ns][:, :, 0].T, -filt.T) + np.matmul(stim[ns][:, :, 1].T, filt.T))
output[1, :] = p2 / (1 + p2) > 0.5
p3 = np.exp(np.matmul(stim[ns][:, :, 2].T, filt.T))
states.append(p3 / (1 + p3) > 0.5)
output_symb.append(np.zeros(total_time))
for ss in range(0, num_real_states):
output_symb[ns][states[ns] == ss] = output[ss][states[ns] == ss]
estimator = GLMHMMEstimator(num_samples = num_samples, num_states = num_states, num_emissions = num_emissions, num_feedbacks = num_feedbacks, num_filter_bins = num_filter_bins, num_steps = num_steps, filter_offset = filter_offset)
output = estimator.fit(output_stim, output_symb, [])
estimator.predict(output_stim) | 54.439177 | 348 | 0.550088 |
4a22d86eaf7bd6e1b490cce8bf2c293330cbb42c | 6,997 | py | Python | acme/environment_loop.py | ricklentz/acme | 441c44008ba94c9459befa2b2f3a7e806eeb64bf | [
"Apache-2.0"
] | 1 | 2022-03-31T17:24:10.000Z | 2022-03-31T17:24:10.000Z | acme/environment_loop.py | GACWR/acme | 764a92c09673cb826cdaf7ad157c1aab451507df | [
"Apache-2.0"
] | null | null | null | acme/environment_loop.py | GACWR/acme | 764a92c09673cb826cdaf7ad157c1aab451507df | [
"Apache-2.0"
] | null | null | null | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple agent-environment training loop."""
import operator
import time
from typing import Optional, Sequence
from acme import core
from acme.utils import counting
from acme.utils import loggers
from acme.utils import observers as observers_lib
from acme.utils import signals
import dm_env
from dm_env import specs
import numpy as np
import tree
class EnvironmentLoop(core.Worker):
"""A simple RL environment loop.
This takes `Environment` and `Actor` instances and coordinates their
interaction. Agent is updated if `should_update=True`. This can be used as:
loop = EnvironmentLoop(environment, actor)
loop.run(num_episodes)
A `Counter` instance can optionally be given in order to maintain counts
between different Acme components. If not given a local Counter will be
created to maintain counts between calls to the `run` method.
A `Logger` instance can also be passed in order to control the output of the
loop. If not given a platform-specific default logger will be used as defined
by utils.loggers.make_default_logger. A string `label` can be passed to easily
change the label associated with the default logger; this is ignored if a
`Logger` instance is given.
A list of 'Observer' instances can be specified to generate additional metrics
to be logged by the logger. They have access to the 'Environment' instance,
the current timestep datastruct and the current action.
"""
def __init__(
self,
environment: dm_env.Environment,
actor: core.Actor,
counter: Optional[counting.Counter] = None,
logger: Optional[loggers.Logger] = None,
should_update: bool = True,
label: str = 'environment_loop',
observers: Sequence[observers_lib.EnvLoopObserver] = (),
):
# Internalize agent and environment.
self._environment = environment
self._actor = actor
self._counter = counter or counting.Counter()
self._logger = logger or loggers.make_default_logger(label)
self._should_update = should_update
self._observers = observers
def run_episode(self) -> loggers.LoggingData:
"""Run one episode.
Each episode is a loop which interacts first with the environment to get an
observation and then give that observation to the agent in order to retrieve
an action.
Returns:
An instance of `loggers.LoggingData`.
"""
# Reset any counts and start the environment.
start_time = time.time()
episode_steps = 0
# For evaluation, this keeps track of the total undiscounted reward
# accumulated during the episode.
episode_return = tree.map_structure(_generate_zeros_from_spec,
self._environment.reward_spec())
timestep = self._environment.reset()
# Make the first observation.
self._actor.observe_first(timestep)
for observer in self._observers:
# Initialize the observer with the current state of the env after reset
# and the initial timestep.
observer.observe_first(self._environment, timestep)
# Run an episode.
while not timestep.last():
# Generate an action from the agent's policy and step the environment.
action = self._actor.select_action(timestep.observation)
timestep = self._environment.step(action)
# Have the agent observe the timestep and let the actor update itself.
self._actor.observe(action, next_timestep=timestep)
for observer in self._observers:
# One environment step was completed. Observe the current state of the
# environment, the current timestep and the action.
observer.observe(self._environment, timestep, action)
if self._should_update:
self._actor.update()
# Book-keeping.
episode_steps += 1
# Equivalent to: episode_return += timestep.reward
# We capture the return value because if timestep.reward is a JAX
# DeviceArray, episode_return will not be mutated in-place. (In all other
# cases, the returned episode_return will be the same object as the
# argument episode_return.)
episode_return = tree.map_structure(operator.iadd,
episode_return,
timestep.reward)
# Record counts.
counts = self._counter.increment(episodes=1, steps=episode_steps)
# Collect the results and combine with counts.
steps_per_second = episode_steps / (time.time() - start_time)
result = {
'episode_length': episode_steps,
'episode_return': episode_return,
'steps_per_second': steps_per_second,
}
result.update(counts)
for observer in self._observers:
result.update(observer.get_metrics())
return result
def run(self,
num_episodes: Optional[int] = None,
num_steps: Optional[int] = None):
"""Perform the run loop.
Run the environment loop either for `num_episodes` episodes or for at
least `num_steps` steps (the last episode is always run until completion,
so the total number of steps may be slightly more than `num_steps`).
At least one of these two arguments has to be None.
Upon termination of an episode a new episode will be started. If the number
of episodes and the number of steps are not given then this will interact
with the environment infinitely.
Args:
num_episodes: number of episodes to run the loop for.
num_steps: minimal number of steps to run the loop for.
Raises:
ValueError: If both 'num_episodes' and 'num_steps' are not None.
"""
if not (num_episodes is None or num_steps is None):
raise ValueError('Either "num_episodes" or "num_steps" should be None.')
def should_terminate(episode_count: int, step_count: int) -> bool:
return ((num_episodes is not None and episode_count >= num_episodes) or
(num_steps is not None and step_count >= num_steps))
episode_count, step_count = 0, 0
with signals.runtime_terminator():
while not should_terminate(episode_count, step_count):
result = self.run_episode()
episode_count += 1
step_count += result['episode_length']
# Log the given episode results.
self._logger.write(result)
def _generate_zeros_from_spec(spec: specs.Array) -> np.ndarray:
return np.zeros(spec.shape, spec.dtype)
| 37.61828 | 80 | 0.709018 |
4a22da8817cbe098f6b8efd15ae1fed2b04f89c1 | 338 | py | Python | spec/python/test_zlib_surrounded.py | DarkShadow44/kaitai_struct_tests | 4bb13cef82965cca66dda2eb2b77cd64e9f70a12 | [
"MIT"
] | 11 | 2018-04-01T03:58:15.000Z | 2021-08-14T09:04:55.000Z | spec/python/test_zlib_surrounded.py | DarkShadow44/kaitai_struct_tests | 4bb13cef82965cca66dda2eb2b77cd64e9f70a12 | [
"MIT"
] | 73 | 2016-07-20T10:27:15.000Z | 2020-12-17T18:56:46.000Z | spec/python/test_zlib_surrounded.py | DarkShadow44/kaitai_struct_tests | 4bb13cef82965cca66dda2eb2b77cd64e9f70a12 | [
"MIT"
] | 37 | 2016-08-15T08:25:56.000Z | 2021-08-28T14:48:46.000Z | # Autogenerated from KST: please remove this line if doing any edits by hand!
import unittest
from zlib_surrounded import ZlibSurrounded
class TestZlibSurrounded(unittest.TestCase):
def test_zlib_surrounded(self):
with ZlibSurrounded.from_file('src/zlib_surrounded.bin') as r:
self.assertEqual(r.zlib.num, -1)
| 28.166667 | 77 | 0.754438 |
4a22db061c70db549eeaa691be8e0abe7a348fd2 | 5,365 | py | Python | sdk/python/pulumi_azure_native/network/v20190601/get_route.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/network/v20190601/get_route.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/network/v20190601/get_route.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'GetRouteResult',
'AwaitableGetRouteResult',
'get_route',
]
@pulumi.output_type
class GetRouteResult:
"""
Route resource.
"""
def __init__(__self__, address_prefix=None, etag=None, id=None, name=None, next_hop_ip_address=None, next_hop_type=None, provisioning_state=None):
if address_prefix and not isinstance(address_prefix, str):
raise TypeError("Expected argument 'address_prefix' to be a str")
pulumi.set(__self__, "address_prefix", address_prefix)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if next_hop_ip_address and not isinstance(next_hop_ip_address, str):
raise TypeError("Expected argument 'next_hop_ip_address' to be a str")
pulumi.set(__self__, "next_hop_ip_address", next_hop_ip_address)
if next_hop_type and not isinstance(next_hop_type, str):
raise TypeError("Expected argument 'next_hop_type' to be a str")
pulumi.set(__self__, "next_hop_type", next_hop_type)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="addressPrefix")
def address_prefix(self) -> Optional[str]:
"""
The destination CIDR to which the route applies.
"""
return pulumi.get(self, "address_prefix")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="nextHopIpAddress")
def next_hop_ip_address(self) -> Optional[str]:
"""
The IP address packets should be forwarded to. Next hop values are only allowed in routes where the next hop type is VirtualAppliance.
"""
return pulumi.get(self, "next_hop_ip_address")
@property
@pulumi.getter(name="nextHopType")
def next_hop_type(self) -> str:
"""
The type of Azure hop the packet should be sent to.
"""
return pulumi.get(self, "next_hop_type")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
class AwaitableGetRouteResult(GetRouteResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetRouteResult(
address_prefix=self.address_prefix,
etag=self.etag,
id=self.id,
name=self.name,
next_hop_ip_address=self.next_hop_ip_address,
next_hop_type=self.next_hop_type,
provisioning_state=self.provisioning_state)
def get_route(resource_group_name: Optional[str] = None,
route_name: Optional[str] = None,
route_table_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetRouteResult:
"""
Route resource.
:param str resource_group_name: The name of the resource group.
:param str route_name: The name of the route.
:param str route_table_name: The name of the route table.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['routeName'] = route_name
__args__['routeTableName'] = route_table_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20190601:getRoute', __args__, opts=opts, typ=GetRouteResult).value
return AwaitableGetRouteResult(
address_prefix=__ret__.address_prefix,
etag=__ret__.etag,
id=__ret__.id,
name=__ret__.name,
next_hop_ip_address=__ret__.next_hop_ip_address,
next_hop_type=__ret__.next_hop_type,
provisioning_state=__ret__.provisioning_state)
| 36.496599 | 150 | 0.658527 |
4a22dc0246908590dbc19d81fcd7497228fb9cb1 | 31,291 | py | Python | tests/fixtures_regress/tests.py | ni-ning/django | 2e7ba6057cfc82a15a22b6021cd60cf307152e2d | [
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | 61,676 | 2015-01-01T00:05:13.000Z | 2022-03-31T20:37:54.000Z | tests/fixtures_regress/tests.py | ni-ning/django | 2e7ba6057cfc82a15a22b6021cd60cf307152e2d | [
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | 8,884 | 2015-01-01T00:12:05.000Z | 2022-03-31T19:53:11.000Z | tests/fixtures_regress/tests.py | mustafa0x/django | d7394cfa13a4d1a02356e3a83e10ec100fbb9948 | [
"BSD-3-Clause",
"0BSD"
] | 33,143 | 2015-01-01T02:04:52.000Z | 2022-03-31T19:42:46.000Z | # Unittests for fixtures.
import json
import os
import re
from io import StringIO
from pathlib import Path
from django.core import management, serializers
from django.core.exceptions import ImproperlyConfigured
from django.core.serializers.base import DeserializationError
from django.db import IntegrityError, transaction
from django.db.models import signals
from django.test import (
TestCase, TransactionTestCase, override_settings, skipIfDBFeature,
skipUnlessDBFeature,
)
from .models import (
Absolute, Animal, Article, Book, Child, Circle1, Circle2, Circle3,
ExternalDependency, M2MCircular1ThroughAB, M2MCircular1ThroughBC,
M2MCircular1ThroughCA, M2MCircular2ThroughAB, M2MComplexA, M2MComplexB,
M2MComplexCircular1A, M2MComplexCircular1B, M2MComplexCircular1C,
M2MComplexCircular2A, M2MComplexCircular2B, M2MSimpleA, M2MSimpleB,
M2MSimpleCircularA, M2MSimpleCircularB, M2MThroughAB, NKChild, Parent,
Person, RefToNKChild, Store, Stuff, Thingy, Widget,
)
_cur_dir = os.path.dirname(os.path.abspath(__file__))
class TestFixtures(TestCase):
def animal_pre_save_check(self, signal, sender, instance, **kwargs):
self.pre_save_checks.append(
(
'Count = %s (%s)' % (instance.count, type(instance.count)),
'Weight = %s (%s)' % (instance.weight, type(instance.weight)),
)
)
def test_duplicate_pk(self):
"""
This is a regression test for ticket #3790.
"""
# Load a fixture that uses PK=1
management.call_command(
'loaddata',
'sequence',
verbosity=0,
)
# Create a new animal. Without a sequence reset, this new object
# will take a PK of 1 (on Postgres), and the save will fail.
animal = Animal(
name='Platypus',
latin_name='Ornithorhynchus anatinus',
count=2,
weight=2.2,
)
animal.save()
self.assertGreater(animal.id, 1)
def test_loaddata_not_found_fields_not_ignore(self):
"""
Test for ticket #9279 -- Error is raised for entries in
the serialized data for fields that have been removed
from the database when not ignored.
"""
with self.assertRaises(DeserializationError):
management.call_command(
'loaddata',
'sequence_extra',
verbosity=0,
)
def test_loaddata_not_found_fields_ignore(self):
"""
Test for ticket #9279 -- Ignores entries in
the serialized data for fields that have been removed
from the database.
"""
management.call_command(
'loaddata',
'sequence_extra',
ignore=True,
verbosity=0,
)
self.assertEqual(Animal.specimens.all()[0].name, 'Lion')
def test_loaddata_not_found_fields_ignore_xml(self):
"""
Test for ticket #19998 -- Ignore entries in the XML serialized data
for fields that have been removed from the model definition.
"""
management.call_command(
'loaddata',
'sequence_extra_xml',
ignore=True,
verbosity=0,
)
self.assertEqual(Animal.specimens.all()[0].name, 'Wolf')
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_pretty_print_xml(self):
"""
Regression test for ticket #4558 -- pretty printing of XML fixtures
doesn't affect parsing of None values.
"""
# Load a pretty-printed XML fixture with Nulls.
management.call_command(
'loaddata',
'pretty.xml',
verbosity=0,
)
self.assertIsNone(Stuff.objects.all()[0].name)
self.assertIsNone(Stuff.objects.all()[0].owner)
@skipUnlessDBFeature('interprets_empty_strings_as_nulls')
def test_pretty_print_xml_empty_strings(self):
"""
Regression test for ticket #4558 -- pretty printing of XML fixtures
doesn't affect parsing of None values.
"""
# Load a pretty-printed XML fixture with Nulls.
management.call_command(
'loaddata',
'pretty.xml',
verbosity=0,
)
self.assertEqual(Stuff.objects.all()[0].name, '')
self.assertIsNone(Stuff.objects.all()[0].owner)
def test_absolute_path(self):
"""
Regression test for ticket #6436 --
os.path.join will throw away the initial parts of a path if it
encounters an absolute path.
This means that if a fixture is specified as an absolute path,
we need to make sure we don't discover the absolute path in every
fixture directory.
"""
load_absolute_path = os.path.join(
os.path.dirname(__file__),
'fixtures',
'absolute.json'
)
management.call_command(
'loaddata',
load_absolute_path,
verbosity=0,
)
self.assertEqual(Absolute.objects.count(), 1)
def test_relative_path(self, path=['fixtures', 'absolute.json']):
relative_path = os.path.join(*path)
cwd = os.getcwd()
try:
os.chdir(_cur_dir)
management.call_command(
'loaddata',
relative_path,
verbosity=0,
)
finally:
os.chdir(cwd)
self.assertEqual(Absolute.objects.count(), 1)
@override_settings(FIXTURE_DIRS=[os.path.join(_cur_dir, 'fixtures_1')])
def test_relative_path_in_fixture_dirs(self):
self.test_relative_path(path=['inner', 'absolute.json'])
def test_path_containing_dots(self):
management.call_command(
'loaddata',
'path.containing.dots.json',
verbosity=0,
)
self.assertEqual(Absolute.objects.count(), 1)
def test_unknown_format(self):
"""
Test for ticket #4371 -- Loading data of an unknown format should fail
Validate that error conditions are caught correctly
"""
msg = "Problem installing fixture 'bad_fix.ture1': unkn is not a known serialization format."
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command(
'loaddata',
'bad_fix.ture1.unkn',
verbosity=0,
)
@override_settings(SERIALIZATION_MODULES={'unkn': 'unexistent.path'})
def test_unimportable_serializer(self):
"""
Failing serializer import raises the proper error
"""
with self.assertRaisesMessage(ImportError, "No module named 'unexistent'"):
management.call_command(
'loaddata',
'bad_fix.ture1.unkn',
verbosity=0,
)
def test_invalid_data(self):
"""
Test for ticket #4371 -- Loading a fixture file with invalid data
using explicit filename.
Test for ticket #18213 -- warning conditions are caught correctly
"""
msg = "No fixture data found for 'bad_fixture2'. (File format may be invalid.)"
with self.assertWarnsMessage(RuntimeWarning, msg):
management.call_command(
'loaddata',
'bad_fixture2.xml',
verbosity=0,
)
def test_invalid_data_no_ext(self):
"""
Test for ticket #4371 -- Loading a fixture file with invalid data
without file extension.
Test for ticket #18213 -- warning conditions are caught correctly
"""
msg = "No fixture data found for 'bad_fixture2'. (File format may be invalid.)"
with self.assertWarnsMessage(RuntimeWarning, msg):
management.call_command(
'loaddata',
'bad_fixture2',
verbosity=0,
)
def test_empty(self):
"""
Test for ticket #18213 -- Loading a fixture file with no data output a warning.
Previously empty fixture raises an error exception, see ticket #4371.
"""
msg = "No fixture data found for 'empty'. (File format may be invalid.)"
with self.assertWarnsMessage(RuntimeWarning, msg):
management.call_command(
'loaddata',
'empty',
verbosity=0,
)
def test_error_message(self):
"""
Regression for #9011 - error message is correct.
Change from error to warning for ticket #18213.
"""
msg = "No fixture data found for 'bad_fixture2'. (File format may be invalid.)"
with self.assertWarnsMessage(RuntimeWarning, msg):
management.call_command(
'loaddata',
'bad_fixture2',
'animal',
verbosity=0,
)
def test_pg_sequence_resetting_checks(self):
"""
Test for ticket #7565 -- PostgreSQL sequence resetting checks shouldn't
ascend to parent models when inheritance is used
(since they are treated individually).
"""
management.call_command(
'loaddata',
'model-inheritance.json',
verbosity=0,
)
self.assertEqual(Parent.objects.all()[0].id, 1)
self.assertEqual(Child.objects.all()[0].id, 1)
def test_close_connection_after_loaddata(self):
"""
Test for ticket #7572 -- MySQL has a problem if the same connection is
used to create tables, load data, and then query over that data.
To compensate, we close the connection after running loaddata.
This ensures that a new connection is opened when test queries are
issued.
"""
management.call_command(
'loaddata',
'big-fixture.json',
verbosity=0,
)
articles = Article.objects.exclude(id=9)
self.assertEqual(
list(articles.values_list('id', flat=True)),
[1, 2, 3, 4, 5, 6, 7, 8]
)
# Just for good measure, run the same query again.
# Under the influence of ticket #7572, this will
# give a different result to the previous call.
self.assertEqual(
list(articles.values_list('id', flat=True)),
[1, 2, 3, 4, 5, 6, 7, 8]
)
def test_field_value_coerce(self):
"""
Test for tickets #8298, #9942 - Field values should be coerced into the
correct type by the deserializer, not as part of the database write.
"""
self.pre_save_checks = []
signals.pre_save.connect(self.animal_pre_save_check)
try:
management.call_command(
'loaddata',
'animal.xml',
verbosity=0,
)
self.assertEqual(
self.pre_save_checks,
[
("Count = 42 (<class 'int'>)",
"Weight = 1.2 (<class 'float'>)")
]
)
finally:
signals.pre_save.disconnect(self.animal_pre_save_check)
def test_dumpdata_uses_default_manager(self):
"""
Regression for #11286
Dumpdata honors the default manager. Dump the current contents of
the database as a JSON fixture
"""
management.call_command(
'loaddata',
'animal.xml',
verbosity=0,
)
management.call_command(
'loaddata',
'sequence.json',
verbosity=0,
)
animal = Animal(
name='Platypus',
latin_name='Ornithorhynchus anatinus',
count=2,
weight=2.2,
)
animal.save()
out = StringIO()
management.call_command(
'dumpdata',
'fixtures_regress.animal',
format='json',
stdout=out,
)
# Output order isn't guaranteed, so check for parts
data = out.getvalue()
# Get rid of artifacts like '000000002' to eliminate the differences
# between different Python versions.
data = re.sub('0{6,}[0-9]', '', data)
animals_data = sorted([
{
"pk": 1, "model": "fixtures_regress.animal",
"fields": {"count": 3, "weight": 1.2, "name": "Lion", "latin_name": "Panthera leo"}
},
{
"pk": 10, "model": "fixtures_regress.animal",
"fields": {"count": 42, "weight": 1.2, "name": "Emu", "latin_name": "Dromaius novaehollandiae"}
},
{
"pk": animal.pk, "model": "fixtures_regress.animal",
"fields": {"count": 2, "weight": 2.2, "name": "Platypus", "latin_name": "Ornithorhynchus anatinus"}
},
], key=lambda x: x["pk"])
data = sorted(json.loads(data), key=lambda x: x["pk"])
self.maxDiff = 1024
self.assertEqual(data, animals_data)
def test_proxy_model_included(self):
"""
Regression for #11428 - Proxy models aren't included when you dumpdata
"""
out = StringIO()
# Create an instance of the concrete class
widget = Widget.objects.create(name='grommet')
management.call_command(
'dumpdata',
'fixtures_regress.widget',
'fixtures_regress.widgetproxy',
format='json',
stdout=out,
)
self.assertJSONEqual(
out.getvalue(),
"""[{"pk": %d, "model": "fixtures_regress.widget", "fields": {"name": "grommet"}}]"""
% widget.pk
)
@skipUnlessDBFeature('supports_forward_references')
def test_loaddata_works_when_fixture_has_forward_refs(self):
"""
Regression for #3615 - Forward references cause fixtures not to load in MySQL (InnoDB)
"""
management.call_command(
'loaddata',
'forward_ref.json',
verbosity=0,
)
self.assertEqual(Book.objects.all()[0].id, 1)
self.assertEqual(Person.objects.all()[0].id, 4)
def test_loaddata_raises_error_when_fixture_has_invalid_foreign_key(self):
"""
Regression for #3615 - Ensure data with nonexistent child key references raises error
"""
with self.assertRaisesMessage(IntegrityError, "Problem installing fixture"):
management.call_command(
'loaddata',
'forward_ref_bad_data.json',
verbosity=0,
)
@skipUnlessDBFeature('supports_forward_references')
@override_settings(FIXTURE_DIRS=[os.path.join(_cur_dir, 'fixtures_1'),
os.path.join(_cur_dir, 'fixtures_2')])
def test_loaddata_forward_refs_split_fixtures(self):
"""
Regression for #17530 - should be able to cope with forward references
when the fixtures are not in the same files or directories.
"""
management.call_command(
'loaddata',
'forward_ref_1.json',
'forward_ref_2.json',
verbosity=0,
)
self.assertEqual(Book.objects.all()[0].id, 1)
self.assertEqual(Person.objects.all()[0].id, 4)
def test_loaddata_no_fixture_specified(self):
"""
Regression for #7043 - Error is quickly reported when no fixtures is provided in the command line.
"""
msg = "No database fixture specified. Please provide the path of at least one fixture in the command line."
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command(
'loaddata',
verbosity=0,
)
def test_ticket_20820(self):
"""
Regression for ticket #20820 -- loaddata on a model that inherits
from a model with a M2M shouldn't blow up.
"""
management.call_command(
'loaddata',
'special-article.json',
verbosity=0,
)
def test_ticket_22421(self):
"""
Regression for ticket #22421 -- loaddata on a model that inherits from
a grand-parent model with a M2M but via an abstract parent shouldn't
blow up.
"""
management.call_command(
'loaddata',
'feature.json',
verbosity=0,
)
def test_loaddata_with_m2m_to_self(self):
"""
Regression test for ticket #17946.
"""
management.call_command(
'loaddata',
'm2mtoself.json',
verbosity=0,
)
@override_settings(FIXTURE_DIRS=[os.path.join(_cur_dir, 'fixtures_1'),
os.path.join(_cur_dir, 'fixtures_1')])
def test_fixture_dirs_with_duplicates(self):
"""
settings.FIXTURE_DIRS cannot contain duplicates in order to avoid
repeated fixture loading.
"""
with self.assertRaisesMessage(ImproperlyConfigured, "settings.FIXTURE_DIRS contains duplicates."):
management.call_command('loaddata', 'absolute.json', verbosity=0)
@override_settings(FIXTURE_DIRS=[os.path.join(_cur_dir, 'fixtures')])
def test_fixture_dirs_with_default_fixture_path(self):
"""
settings.FIXTURE_DIRS cannot contain a default fixtures directory
for application (app/fixtures) in order to avoid repeated fixture loading.
"""
msg = (
"'%s' is a default fixture directory for the '%s' app "
"and cannot be listed in settings.FIXTURE_DIRS."
% (os.path.join(_cur_dir, 'fixtures'), 'fixtures_regress')
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
management.call_command('loaddata', 'absolute.json', verbosity=0)
@override_settings(FIXTURE_DIRS=[os.path.join(_cur_dir, 'fixtures_1'),
os.path.join(_cur_dir, 'fixtures_2')])
def test_loaddata_with_valid_fixture_dirs(self):
management.call_command(
'loaddata',
'absolute.json',
verbosity=0,
)
@override_settings(FIXTURE_DIRS=[Path(_cur_dir) / 'fixtures_1'])
def test_fixtures_dir_pathlib(self):
management.call_command('loaddata', 'inner/absolute.json', verbosity=0)
self.assertQuerysetEqual(Absolute.objects.all(), [1], transform=lambda o: o.pk)
class NaturalKeyFixtureTests(TestCase):
def test_nk_deserialize(self):
"""
Test for ticket #13030 - Python based parser version
natural keys deserialize with fk to inheriting model
"""
management.call_command(
'loaddata',
'model-inheritance.json',
verbosity=0,
)
management.call_command(
'loaddata',
'nk-inheritance.json',
verbosity=0,
)
self.assertEqual(
NKChild.objects.get(pk=1).data,
'apple'
)
self.assertEqual(
RefToNKChild.objects.get(pk=1).nk_fk.data,
'apple'
)
def test_nk_deserialize_xml(self):
"""
Test for ticket #13030 - XML version
natural keys deserialize with fk to inheriting model
"""
management.call_command(
'loaddata',
'model-inheritance.json',
verbosity=0,
)
management.call_command(
'loaddata',
'nk-inheritance.json',
verbosity=0,
)
management.call_command(
'loaddata',
'nk-inheritance2.xml',
verbosity=0,
)
self.assertEqual(
NKChild.objects.get(pk=2).data,
'banana'
)
self.assertEqual(
RefToNKChild.objects.get(pk=2).nk_fk.data,
'apple'
)
def test_nk_on_serialize(self):
"""
Natural key requirements are taken into account when serializing models.
"""
management.call_command(
'loaddata',
'forward_ref_lookup.json',
verbosity=0,
)
out = StringIO()
management.call_command(
'dumpdata',
'fixtures_regress.book',
'fixtures_regress.person',
'fixtures_regress.store',
verbosity=0,
format='json',
use_natural_foreign_keys=True,
use_natural_primary_keys=True,
stdout=out,
)
self.assertJSONEqual(
out.getvalue(),
"""
[{"fields": {"main": null, "name": "Amazon"}, "model": "fixtures_regress.store"},
{"fields": {"main": null, "name": "Borders"}, "model": "fixtures_regress.store"},
{"fields": {"name": "Neal Stephenson"}, "model": "fixtures_regress.person"},
{"pk": 1, "model": "fixtures_regress.book", "fields": {"stores": [["Amazon"], ["Borders"]],
"name": "Cryptonomicon", "author": ["Neal Stephenson"]}}]
"""
)
def test_dependency_sorting(self):
"""
It doesn't matter what order you mention the models, Store *must* be
serialized before then Person, and both must be serialized before Book.
"""
sorted_deps = serializers.sort_dependencies(
[('fixtures_regress', [Book, Person, Store])]
)
self.assertEqual(
sorted_deps,
[Store, Person, Book]
)
def test_dependency_sorting_2(self):
sorted_deps = serializers.sort_dependencies(
[('fixtures_regress', [Book, Store, Person])]
)
self.assertEqual(
sorted_deps,
[Store, Person, Book]
)
def test_dependency_sorting_3(self):
sorted_deps = serializers.sort_dependencies(
[('fixtures_regress', [Store, Book, Person])]
)
self.assertEqual(
sorted_deps,
[Store, Person, Book]
)
def test_dependency_sorting_4(self):
sorted_deps = serializers.sort_dependencies(
[('fixtures_regress', [Store, Person, Book])]
)
self.assertEqual(
sorted_deps,
[Store, Person, Book]
)
def test_dependency_sorting_5(self):
sorted_deps = serializers.sort_dependencies(
[('fixtures_regress', [Person, Book, Store])]
)
self.assertEqual(
sorted_deps,
[Store, Person, Book]
)
def test_dependency_sorting_6(self):
sorted_deps = serializers.sort_dependencies(
[('fixtures_regress', [Person, Store, Book])]
)
self.assertEqual(
sorted_deps,
[Store, Person, Book]
)
def test_dependency_sorting_dangling(self):
sorted_deps = serializers.sort_dependencies(
[('fixtures_regress', [Person, Circle1, Store, Book])]
)
self.assertEqual(
sorted_deps,
[Circle1, Store, Person, Book]
)
def test_dependency_sorting_tight_circular(self):
with self.assertRaisesMessage(
RuntimeError,
"Can't resolve dependencies for fixtures_regress.Circle1, "
"fixtures_regress.Circle2 in serialized app list."
):
serializers.sort_dependencies([('fixtures_regress', [Person, Circle2, Circle1, Store, Book])])
def test_dependency_sorting_tight_circular_2(self):
with self.assertRaisesMessage(
RuntimeError,
"Can't resolve dependencies for fixtures_regress.Circle1, "
"fixtures_regress.Circle2 in serialized app list."
):
serializers.sort_dependencies([('fixtures_regress', [Circle1, Book, Circle2])])
def test_dependency_self_referential(self):
with self.assertRaisesMessage(
RuntimeError,
"Can't resolve dependencies for fixtures_regress.Circle3 in "
"serialized app list."
):
serializers.sort_dependencies([('fixtures_regress', [Book, Circle3])])
def test_dependency_sorting_long(self):
with self.assertRaisesMessage(
RuntimeError,
"Can't resolve dependencies for fixtures_regress.Circle1, "
"fixtures_regress.Circle2, fixtures_regress.Circle3 in serialized "
"app list."
):
serializers.sort_dependencies([('fixtures_regress', [Person, Circle2, Circle1, Circle3, Store, Book])])
def test_dependency_sorting_normal(self):
sorted_deps = serializers.sort_dependencies(
[('fixtures_regress', [Person, ExternalDependency, Book])]
)
self.assertEqual(
sorted_deps,
[Person, Book, ExternalDependency]
)
def test_normal_pk(self):
"""
Normal primary keys work on a model with natural key capabilities.
"""
management.call_command(
'loaddata',
'non_natural_1.json',
verbosity=0,
)
management.call_command(
'loaddata',
'forward_ref_lookup.json',
verbosity=0,
)
management.call_command(
'loaddata',
'non_natural_2.xml',
verbosity=0,
)
books = Book.objects.all()
self.assertQuerysetEqual(
books, [
"<Book: Cryptonomicon by Neal Stephenson (available at Amazon, Borders)>",
"<Book: Ender's Game by Orson Scott Card (available at Collins Bookstore)>",
"<Book: Permutation City by Greg Egan (available at Angus and Robertson)>",
],
transform=repr,
)
class M2MNaturalKeyFixtureTests(TestCase):
"""Tests for ticket #14426."""
def test_dependency_sorting_m2m_simple(self):
"""
M2M relations without explicit through models SHOULD count as dependencies
Regression test for bugs that could be caused by flawed fixes to
#14226, namely if M2M checks are removed from sort_dependencies
altogether.
"""
sorted_deps = serializers.sort_dependencies(
[('fixtures_regress', [M2MSimpleA, M2MSimpleB])]
)
self.assertEqual(sorted_deps, [M2MSimpleB, M2MSimpleA])
def test_dependency_sorting_m2m_simple_circular(self):
"""
Resolving circular M2M relations without explicit through models should
fail loudly
"""
with self.assertRaisesMessage(
RuntimeError,
"Can't resolve dependencies for fixtures_regress.M2MSimpleCircularA, "
"fixtures_regress.M2MSimpleCircularB in serialized app list."
):
serializers.sort_dependencies([('fixtures_regress', [M2MSimpleCircularA, M2MSimpleCircularB])])
def test_dependency_sorting_m2m_complex(self):
"""
M2M relations with explicit through models should NOT count as
dependencies. The through model itself will have dependencies, though.
"""
sorted_deps = serializers.sort_dependencies(
[('fixtures_regress', [M2MComplexA, M2MComplexB, M2MThroughAB])]
)
# Order between M2MComplexA and M2MComplexB doesn't matter. The through
# model has dependencies to them though, so it should come last.
self.assertEqual(sorted_deps[-1], M2MThroughAB)
def test_dependency_sorting_m2m_complex_circular_1(self):
"""
Circular M2M relations with explicit through models should be serializable
"""
A, B, C, AtoB, BtoC, CtoA = (M2MComplexCircular1A, M2MComplexCircular1B,
M2MComplexCircular1C, M2MCircular1ThroughAB,
M2MCircular1ThroughBC, M2MCircular1ThroughCA)
sorted_deps = serializers.sort_dependencies(
[('fixtures_regress', [A, B, C, AtoB, BtoC, CtoA])]
)
# The dependency sorting should not result in an error, and the
# through model should have dependencies to the other models and as
# such come last in the list.
self.assertEqual(sorted_deps[:3], [A, B, C])
self.assertEqual(sorted_deps[3:], [AtoB, BtoC, CtoA])
def test_dependency_sorting_m2m_complex_circular_2(self):
"""
Circular M2M relations with explicit through models should be serializable
This test tests the circularity with explicit natural_key.dependencies
"""
sorted_deps = serializers.sort_dependencies([
('fixtures_regress', [M2MComplexCircular2A, M2MComplexCircular2B, M2MCircular2ThroughAB])
])
self.assertEqual(sorted_deps[:2], [M2MComplexCircular2A, M2MComplexCircular2B])
self.assertEqual(sorted_deps[2:], [M2MCircular2ThroughAB])
def test_dump_and_load_m2m_simple(self):
"""
Test serializing and deserializing back models with simple M2M relations
"""
a = M2MSimpleA.objects.create(data="a")
b1 = M2MSimpleB.objects.create(data="b1")
b2 = M2MSimpleB.objects.create(data="b2")
a.b_set.add(b1)
a.b_set.add(b2)
out = StringIO()
management.call_command(
'dumpdata',
'fixtures_regress.M2MSimpleA',
'fixtures_regress.M2MSimpleB',
use_natural_foreign_keys=True,
stdout=out,
)
for model in [M2MSimpleA, M2MSimpleB]:
model.objects.all().delete()
objects = serializers.deserialize("json", out.getvalue())
for obj in objects:
obj.save()
new_a = M2MSimpleA.objects.get_by_natural_key("a")
self.assertCountEqual(new_a.b_set.all(), [b1, b2])
class TestTicket11101(TransactionTestCase):
available_apps = ['fixtures_regress']
@skipUnlessDBFeature('supports_transactions')
def test_ticket_11101(self):
"""Fixtures can be rolled back (ticket #11101)."""
with transaction.atomic():
management.call_command(
'loaddata',
'thingy.json',
verbosity=0,
)
self.assertEqual(Thingy.objects.count(), 1)
transaction.set_rollback(True)
self.assertEqual(Thingy.objects.count(), 0)
class TestLoadFixtureFromOtherAppDirectory(TestCase):
"""
#23612 -- fixtures path should be normalized to allow referencing relative
paths on Windows.
"""
current_dir = os.path.abspath(os.path.dirname(__file__))
# relative_prefix is something like tests/fixtures_regress or
# fixtures_regress depending on how runtests.py is invoked.
# All path separators must be / in order to be a proper regression test on
# Windows, so replace as appropriate.
relative_prefix = os.path.relpath(current_dir, os.getcwd()).replace('\\', '/')
fixtures = [relative_prefix + '/fixtures/absolute.json']
def test_fixtures_loaded(self):
count = Absolute.objects.count()
self.assertGreater(count, 0, "Fixtures not loaded properly.")
| 35.317156 | 115 | 0.589914 |
4a22de3ae47e5480aa1e68bbb0abb8639c9fbfce | 296 | py | Python | src/modules/position_appender.py | gmum/LocoGAN | 0200c80cff614ede39cdba6114ab17ef0acc2744 | [
"MIT"
] | 12 | 2020-02-21T08:34:53.000Z | 2021-06-10T13:55:05.000Z | src/modules/position_appender.py | gmum/LocoGAN | 0200c80cff614ede39cdba6114ab17ef0acc2744 | [
"MIT"
] | null | null | null | src/modules/position_appender.py | gmum/LocoGAN | 0200c80cff614ede39cdba6114ab17ef0acc2744 | [
"MIT"
] | 3 | 2021-02-05T07:18:20.000Z | 2021-08-17T03:06:31.000Z | import torch
import torch.nn as nn
class PositionAppender(nn.Module):
def forward(self, tensor_tuple: tuple) -> tuple:
input_data, positions = tensor_tuple
input_with_added_position = torch.cat((input_data, positions), 1)
return input_with_added_position
| 26.909091 | 74 | 0.702703 |
4a22df4694e003d2e92159e315f7563b86875fd4 | 2,252 | py | Python | Logs_Generation/generate_log.py | ericsun95/Real_Time_Logs_Analysis_System | 463ec7a68a669d45dcf07764df0b284232853cd5 | [
"Apache-2.0"
] | null | null | null | Logs_Generation/generate_log.py | ericsun95/Real_Time_Logs_Analysis_System | 463ec7a68a669d45dcf07764df0b284232853cd5 | [
"Apache-2.0"
] | null | null | null | Logs_Generation/generate_log.py | ericsun95/Real_Time_Logs_Analysis_System | 463ec7a68a669d45dcf07764df0b284232853cd5 | [
"Apache-2.0"
] | 1 | 2021-02-17T02:30:46.000Z | 2021-02-17T02:30:46.000Z | # coding=UTF-8
# coding to generate logs
# crontab -e
# */1 * * * * sh /Users/eric_sun/IdeaProjects/SparkProject/log_generator.sh
import random
import time
url_paths = [
"class/112.html",
"class/130.html",
"class/131.html",
"class/145.html",
"class/128.html",
"class/146.html",
"learn/821",
"course/list"
]
ip_slices = [132, 156, 124, 10, 29, 167, 143, 187, 30, 46, 55, 63, 72, 87, 98]
http_referrers = [
"http://www.baidu.com/s?wd={query}",
"http://www.sogou.com/we?query=={query}",
"http://cn.bing.com/search?q={query}",
"http://search.yahoo.com/search?p={query}"
]
search_keywords = [
"Spark SQL",
"Hadoop",
"Storm",
"Spark Streaming",
"Kafka",
"Python"
]
match_dict = {
"class/112.html" : "Spark SQL",
"class/128.html" : "Kafka",
"class/130.html" : "Hadoop",
"class/131.html" : "Storm",
"class/145.html" : "Spark Streaming",
"class/146.html" : "Python",
"learn/821" : "learnMaterials",
"course/list" : "courseList"
}
status_codes = ["200", "404", "500"]
def sample_url():
return random.sample(url_paths, 1)[0]
def sample_ip():
slice = random.sample(ip_slices, 4)
return ".".join([str(item) for item in slice])
def sample_referer(query_str):
if random.uniform(0, 1) > 0.2:
return "-"
refer_str = random.sample(http_referrers, 1)
# query_str = random.sample(search_keywords, 1)
return refer_str[0].format(query=query_str)
def sample_status_code():
return random.sample(status_codes, 1)[0]
def generate_log(count = 10):
time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
f = open("/Users/eric_sun/Documents/GitHub/Real_Time_Logs_Analysis_System/Data/access.log", "w+")
while count >= 1:
url_path = sample_url()
query_str = match_dict[url_path]
query_log = "{ip}\t{local_time}\t\"GET /{url} HTTP/1.1\"\t{status_code}\t{refer}" \
.format(ip=sample_ip(), \
local_time= time_str, \
url=url_path, \
status_code=sample_status_code(), \
refer=sample_referer(query_str))
print(query_log)
f.write(query_log + "\n")
count -= 1
if __name__ == "__main__":
generate_log(20)
| 26.186047 | 101 | 0.607904 |
4a22df4be7ea2aa5d47270ce9c3cf858a95fcab4 | 10,248 | py | Python | few_shots_clf/triplet_classifier/triplet_classifier.py | delmalih/few-shots-classification | 8b06ff673882fd0d8b99cd705e5e5fab0ec93fb3 | [
"MIT"
] | null | null | null | few_shots_clf/triplet_classifier/triplet_classifier.py | delmalih/few-shots-classification | 8b06ff673882fd0d8b99cd705e5e5fab0ec93fb3 | [
"MIT"
] | null | null | null | few_shots_clf/triplet_classifier/triplet_classifier.py | delmalih/few-shots-classification | 8b06ff673882fd0d8b99cd705e5e5fab0ec93fb3 | [
"MIT"
] | null | null | null | # pylint: disable=attribute-defined-outside-init, no-member, line-too-long, too-many-instance-attributes
##########################
# Imports
##########################
import os
from typing import Dict, List
import pickle
import numpy as np
from tensorflow import keras
from easydict import EasyDict as edict
from few_shots_clf import utils
from few_shots_clf.triplet_classifier import constants
from few_shots_clf.triplet_classifier import utils as triplet_utils
##########################
# TripletClassifier
##########################
class TripletClassifier:
"""Class implementing the Classifier trained on triplet loss (TripletClassifier)
Args:
catalog_path (string): [description]
params (dict): [description]
"""
##########################
# Init
##########################
def __init__(self, catalog_path: str, params: Dict = {}):
self.catalog_path = catalog_path
self._config_classifier(catalog_path, params)
##########################
# Config
##########################
def _config_classifier(self, catalog_path, params):
self._get_classifier_config(params)
self._get_catalog_images(catalog_path)
self._get_catalog_labels(catalog_path)
self._get_catalog_images2labels()
self._get_triplet_model()
self._compile_triplet_model()
self._load_fingerprints()
def _get_classifier_config(self, params):
self.config = edict({
"verbose": params.get("verbose", constants.VERBOSE),
"image_size": params.get("image_size", constants.IMAGE_SIZE),
"triplet_margin": params.get("triplet_margin", constants.TRIPLET_MARGIN),
"mining_strategy": params.get("mining_strategy", constants.MINING_STRATEGY),
"embedding_size": params.get("embedding_size", constants.EMBEDDING_SIZE),
"basic_batch_size": params.get("basic_batch_size", constants.BASIC_BATCH_SIZE),
"augment_factor": params.get("augment_factor", constants.AUGMENT_FACTOR),
"n_epochs": params.get("n_epochs", constants.N_EPOCHS),
"model_backbone": params.get("model_backbone", constants.MODEL_BACKBONE),
"learning_rate": params.get("learning_rate", constants.LEARNING_RATE),
"model_path": params.get("model_path", constants.MODEL_PATH),
"fingerprint_path": params.get("fingerprint_path", constants.FINGERPRINT_PATH),
})
self.config.batch_size = self.config.basic_batch_size * self.config.augment_factor
def _get_catalog_images(self, catalog_path):
self.catalog_images = utils.get_all_images_from_folder(catalog_path)
if self.config.verbose:
print(f"Found {len(self.catalog_images)} images!")
def _get_catalog_labels(self, catalog_path):
self.catalog_labels = utils.get_labels_from_catalog(catalog_path)
if self.config.verbose:
print(f"Found {len(self.catalog_labels)} labels!")
def _get_catalog_images2labels(self):
self.catalog_images2labels = utils.compute_images2labels(self.catalog_images,
self.catalog_labels)
def _get_triplet_model(self):
self.triplet_model = triplet_utils.TripletModel(self.config.embedding_size,
self.config.model_backbone)
self.triplet_model.build(input_shape=(self.config.batch_size,
self.config.image_size,
self.config.image_size,
3))
if self.config.verbose:
self.triplet_model.summary()
def _compile_triplet_model(self):
triplet_loss = triplet_utils.triplet_loss_function(self.config.triplet_margin,
self.config.mining_strategy)
triplet_metric = triplet_utils.triplet_loss_metric(
self.config.triplet_margin)
self.triplet_model.compile(optimizer=keras.optimizers.Adam(lr=self.config.learning_rate),
loss=triplet_loss,
metrics=[triplet_metric])
def _load_fingerprints(self):
# Previous fingerprint
if os.path.exists(self.config.fingerprint_path):
with open(self.config.fingerprint_path, "rb") as pickle_file:
self.config.fingerprint = pickle.load(pickle_file)
else:
self.config.fingerprint = ""
# Current fingerprint
self.fingerprint = triplet_utils.compute_fingerprint(self.catalog_path,
self.config)
##########################
# Train
##########################
def train(self):
"""Method used to train the classifier.
"""
train_generator = self._get_data_generator()
self.triplet_model.fit_generator(generator=train_generator,
epochs=self.config.n_epochs,
verbose=self.config.verbose,
use_multiprocessing=False,
callbacks=self._get_model_callbacks())
def _get_data_generator(self) -> triplet_utils.DataGenerator:
catalog_labels = list(
map(lambda img: self.catalog_images2labels[img], self.catalog_images))
catalog_label_ids = np.float32(
list(map(self.label_str2id, catalog_labels)))
return triplet_utils.DataGenerator(self.catalog_images,
catalog_label_ids,
self.config.image_size,
self.config.basic_batch_size,
self.config.augment_factor)
def _get_model_callbacks(self) -> List:
reduce_lr_on_plateau_callback = keras.callbacks.ReduceLROnPlateau(monitor='loss',
verbose=self.config.verbose)
checkpointer_callback = keras.callbacks.ModelCheckpoint(self.config.model_path,
save_best_only=True,
monitor='loss',
verbose=self.config.verbose)
early_stopping_callback = keras.callbacks.EarlyStopping(monitor='loss',
patience=10,
verbose=self.config.verbose)
return [reduce_lr_on_plateau_callback,
checkpointer_callback,
early_stopping_callback]
def compute_catalog_embeddings(self) -> np.array:
"""[summary]
Returns:
np.array: [description]
"""
# Init. catalog embeddings
self.catalog_embeddings = []
# Loop over catalog images
for catalog_img_path in utils.get_iterator(self.catalog_images,
verbose=self.config.verbose):
# Read catalog image
catalog_image = utils.read_image(catalog_img_path,
size=self.config.image_size)
catalog_image = np.expand_dims(catalog_image, axis=0)
# Compute embedding
catalog_emdding = self.triplet_model.predict(catalog_image)[0]
# Update catalog_emddings
self.catalog_embeddings.append(catalog_emdding)
self.catalog_embeddings = np.array(self.catalog_embeddings)
##########################
# Predict
##########################
def load_best_model(self):
"""Loads the best weights from previous training
"""
self.triplet_model.load_weights(self.config.model_path)
def predict(self, query_path: str) -> np.array:
"""Method used to predict a score per class for a given query.
Args:
query_path (str): The local path of the query.
Returns:
np.array: The list of scores per class.
"""
# Read img
query_img = utils.read_image(query_path, size=self.config.image_size)
query_img = np.expand_dims(query_img, axis=0)
# Get query embedding
query_embedding = self.triplet_model.predict(query_img)
# Get scores
scores = self._get_query_scores(query_embedding)
scores = np.array(scores)
return scores
def _get_query_scores(self, query_embedding: np.array):
# Compute pairwise distances
pairwise_distances = np.linalg.norm(query_embedding[:, None, :] -
self.catalog_embeddings[None, :, :],
axis=-1)
# Compute scores
scores = np.exp(-pairwise_distances ** 2)
# Compute predicted label and score
predicted_catalog_image_id = np.argmax(scores, axis=-1)[0]
predicted_catalog_image = self.catalog_images[predicted_catalog_image_id]
predicted_label = self.catalog_images2labels[predicted_catalog_image]
predicted_score = np.max(scores, axis=-1)[0]
return predicted_label, predicted_score
##########################
# Utils
##########################
def label_id2str(self, label_id: int) -> str:
"""Gets the label_str given the label_id.
Args:
label_id (int): The given label_id.
Returns:
str: The label_str of the given label_id.
"""
return self.catalog_labels[label_id]
def label_str2id(self, label_str: str) -> int:
"""Gets the label_id given the label_str.
Args:
label_str (str): The given label_str.
Returns:
int: The label_id of the given label_id.
"""
if label_str in self.catalog_labels:
return self.catalog_labels.index(label_str)
return -1
| 40.03125 | 104 | 0.569672 |
4a22df5817c8df5a2c4e436c1c75cadd0bf76db7 | 743 | py | Python | xdl/xdl/python/training/filter.py | Ru-Xiang/x-deeplearning | 04cc0497150920c64b06bb8c314ef89977a3427a | [
"Apache-2.0"
] | 4,071 | 2018-12-13T04:17:38.000Z | 2022-03-30T03:29:35.000Z | xdl/xdl/python/training/filter.py | laozhuang727/x-deeplearning | 781545783a4e2bbbda48fc64318fb2c6d8bbb3cc | [
"Apache-2.0"
] | 359 | 2018-12-21T01:14:57.000Z | 2022-02-15T07:18:02.000Z | xdl/xdl/python/training/filter.py | laozhuang727/x-deeplearning | 781545783a4e2bbbda48fc64318fb2c6d8bbb3cc | [
"Apache-2.0"
] | 1,054 | 2018-12-20T09:57:42.000Z | 2022-03-29T07:16:53.000Z | # Copyright 2018 Alibaba Group. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
def filter(global_step, x, y):
return y - global_step > x
| 41.277778 | 80 | 0.671602 |
4a22e03b5638c66a2097c5d987063481f2bbc164 | 4,983 | py | Python | python/ql/test/library-tests/frameworks/rest_framework/taint_test.py | angie1148/codeql | ab3cad749cabe762437d2eb0aab554acea6ce84d | [
"MIT"
] | 643 | 2018-08-03T11:16:54.000Z | 2020-04-27T23:10:55.000Z | python/ql/test/library-tests/frameworks/rest_framework/taint_test.py | angie1148/codeql | ab3cad749cabe762437d2eb0aab554acea6ce84d | [
"MIT"
] | 1,880 | 2018-08-03T11:28:32.000Z | 2020-04-28T13:18:51.000Z | python/ql/test/library-tests/frameworks/rest_framework/taint_test.py | ScriptBox99/github-codeql | 2ecf0d3264db8fb4904b2056964da469372a235c | [
"MIT"
] | 218 | 2018-08-03T11:16:58.000Z | 2020-04-24T02:24:00.000Z | from rest_framework.decorators import api_view, parser_classes
from rest_framework.views import APIView
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.parsers import JSONParser
from django.urls import path
ensure_tainted = ensure_not_tainted = print
# function based view
# see https://www.django-rest-framework.org/api-guide/views/#function-based-views
@api_view(["POST"])
@parser_classes([JSONParser])
def test_taint(request: Request, routed_param): # $ requestHandler routedParameter=routed_param
ensure_tainted(routed_param) # $ tainted
ensure_tainted(request) # $ tainted
# Has all the standard attributes of a django HttpRequest
# see https://github.com/encode/django-rest-framework/blob/00cd4ef864a8bf6d6c90819a983017070f9f08a5/rest_framework/request.py#L410-L418
ensure_tainted(request.resolver_match.args) # $ tainted
# special new attributes added, see https://www.django-rest-framework.org/api-guide/requests/
ensure_tainted(
request.data, # $ tainted
request.data["key"], # $ tainted
# alias for .GET
request.query_params, # $ tainted
request.query_params["key"], # $ tainted
request.query_params.get("key"), # $ tainted
request.query_params.getlist("key"), # $ tainted
request.query_params.getlist("key")[0], # $ tainted
request.query_params.pop("key"), # $ tainted
request.query_params.pop("key")[0], # $ tainted
# see more detailed tests of `request.user` below
request.user, # $ tainted
request.auth, # $ tainted
# seems much more likely attack vector than .method, so included
request.content_type, # $ tainted
# file-like
request.stream, # $ tainted
request.stream.read(), # $ tainted
)
ensure_not_tainted(
# although these could technically be user-controlled, it seems more likely to lead to FPs than interesting results.
request.accepted_media_type,
# In normal Django, if you disable CSRF middleware, you're allowed to use custom
# HTTP methods, like `curl -X FOO <url>`.
# However, with Django REST framework, doing that will yield:
# `{"detail":"Method \"FOO\" not allowed."}`
#
# In the end, since we model a Django REST framework request entirely as a
# extension of a Django request, we're not easily able to remove the taint from
# `.method`.
request.method, # $ SPURIOUS: tainted
)
# --------------------------------------------------------------------------
# request.user
# --------------------------------------------------------------------------
#
# This will normally be an instance of django.contrib.auth.models.User
# (authenticated) so we assume that normally user-controlled fields such as
# username/email is user-controlled, but that password isn't (since it's a hash).
# see https://docs.djangoproject.com/en/3.2/ref/contrib/auth/#fields
ensure_tainted(
request.user.username, # $ tainted
request.user.first_name, # $ tainted
request.user.last_name, # $ tainted
request.user.email, # $ tainted
)
ensure_not_tainted(request.user.password)
return Response("ok") # $ HttpResponse responseBody="ok"
# class based view
# see https://www.django-rest-framework.org/api-guide/views/#class-based-views
class MyClass(APIView):
def initial(self, request, *args, **kwargs): # $ requestHandler
# this method will be called before processing any request
ensure_tainted(request) # $ tainted
def get(self, request: Request, routed_param): # $ requestHandler routedParameter=routed_param
ensure_tainted(routed_param) # $ tainted
# request taint is the same as in function_based_view above
ensure_tainted(
request, # $ tainted
request.data # $ tainted
)
# same as for standard Django view
ensure_tainted(self.args, self.kwargs) # $ tainted
return Response("ok") # $ HttpResponse responseBody="ok"
# fake setup, you can't actually run this
urlpatterns = [
path("test-taint/<routed_param>", test_taint), # $ routeSetup="test-taint/<routed_param>"
path("ClassView/<routed_param>", MyClass.as_view()), # $ routeSetup="ClassView/<routed_param>"
]
# tests with no route-setup, but we can still tell that these are using Django REST
# framework
@api_view(["POST"])
def function_based_no_route(request: Request, possible_routed_param): # $ requestHandler routedParameter=possible_routed_param
ensure_tainted(
request, # $ tainted
possible_routed_param, # $ tainted
)
class ClassBasedNoRoute(APIView):
def get(self, request: Request, possible_routed_param): # $ requestHandler routedParameter=possible_routed_param
ensure_tainted(request, possible_routed_param) # $ tainted
| 37.75 | 139 | 0.668673 |
4a22e098428fe5d3e75ffa6853f0e1cf6f8e3b3b | 8,518 | py | Python | dev/Gems/CloudGemFramework/v1/ResourceManager/resource_manager/test/test_integration_shared_resource.py | BadDevCode/lumberyard | 3d688932f919dbf5821f0cb8a210ce24abe39e9e | [
"AML"
] | 1,738 | 2017-09-21T10:59:12.000Z | 2022-03-31T21:05:46.000Z | dev/Gems/CloudGemFramework/v1/ResourceManager/resource_manager/test/test_integration_shared_resource.py | olivier-be/lumberyard | 3d688932f919dbf5821f0cb8a210ce24abe39e9e | [
"AML"
] | 427 | 2017-09-29T22:54:36.000Z | 2022-02-15T19:26:50.000Z | dev/Gems/CloudGemFramework/v1/ResourceManager/resource_manager/test/test_integration_shared_resource.py | olivier-be/lumberyard | 3d688932f919dbf5821f0cb8a210ce24abe39e9e | [
"AML"
] | 671 | 2017-09-21T08:04:01.000Z | 2022-03-29T14:30:07.000Z | #
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
import os
import resource_manager.util
import datetime
import time
from . import test_constant
from .lib.filelock import FileLock
class SharedResourceManager:
FILE_SHARED_RESOURCES_CONTEXT = 'tmp_last_running_cgf_test_shared_context'
CONTEXT_REGISTERED_ATTR = 'ListOfRegistered'
CONTEXT_REGISTERED_GEM = 'EnabledGems'
PROJECT_LOCK_FILE_NAME = 'tmp_last_running_cgf_test_project_resource'
PROJECT_CONTEXT_STATE_NAME = 'ProjectStackState'
PROJECT_CREATOR = 'OriginatingCloudGem'
PROJECT_CREATOR_PATH = 'OriginatingCloudGemPath'
DEPLOYMENT_LOCK_FILE_NAME = 'tmp_last_running_cgf_test_deployment_resource'
DEPLOYMENT_CONTEXT_STATE_NAME = 'DeploymentStackState'
LAST_MODIFIED_BY = 'LastModifiedBy'
LOCAL_PROJECT_SETTING_ATTRIBUTE = 'ProjectStackId'
@property
def is_registered_for_shared(self):
return self._is_registered_for_shared
def __init__(self):
self.root_dir = None
self._is_registered_for_shared = False
def register_for_shared_resource(self, path):
self.root_dir = path
self.update_shared_resource_context_attr(self.CONTEXT_REGISTERED_ATTR,
lambda items: self.__append_registered_to_list(items, path))
self._is_registered_for_shared = True
def unregister_for_shared_resource(self, path):
self.update_shared_resource_context_attr(self.CONTEXT_REGISTERED_ATTR,
lambda items: self.__remove_registered_to_list(items, path))
self._is_registered_for_shared = False
def append_shared_gem(self, gem_name, version, value, path=None):
item = {"Name": gem_name}
if version:
item["Version"] = version
if path:
item["Path"] = path
if value is None or len(value) == 0:
value = [item]
return value
is_found = False
for entry in value:
entry_name = entry.get("Name")
entry_version = entry.get("Version", None)
entry_path = entry.get("Path", None)
if entry_name == gem_name and entry_version == version and path == entry_path:
is_found = True
break
if not is_found:
value.append(item)
return value
def lock_path(self, path, append_relative=True):
if append_relative:
file_path = os.path.join(os.path.join(self.root_dir, ".."), "{}.txt".format(path))
else:
file_path = path
lock_path = "{}.lock".format(file_path)
return file_path, lock_path
def lock_file_and_execute(self, file_name, func):
file_path, lock_path = self.lock_path(file_name)
lock_file = FileLock("{}".format(lock_path))
print("Acquiring lock file for file {}".format(file_path))
with lock_file:
print("File {} locked... {}".format(file_path, datetime.datetime.utcnow()))
context = resource_manager.util.load_json(file_path, {})
func(context)
resource_manager.util.save_json(file_path, context)
print("File {} unlocked... {}".format(file_path, datetime.datetime.utcnow()))
def update_context_attribute(self, context, name, value):
context[name] = value
def remove_json_file_attribute(self, context, file_name, attr_name):
file_path, lock_path = self.lock_path(file_name)
print("REMOVING lock {} {} {}".format(file_path, attr_name, context))
if attr_name in context:
del context[attr_name]
return context
def remove_json_file_attribute_with_lock(self, file_name, attr_name, append_relative=True):
file_path, lock_path = self.lock_path(file_name, append_relative)
context = resource_manager.util.load_json(file_path, {})
lock_file = FileLock("{}".format(lock_path))
print("Acquiring lock file for file {}".format(file_path))
with lock_file:
print("REMOVING lock".format(file_path, attr_name, context))
if attr_name in context:
del context[attr_name]
print(context)
resource_manager.util.save_json(file_path, context)
def sync_registered_gems(self, game_dir, enable_gem_func):
self.update_shared_resource_context_attr(
SharedResourceManager.CONTEXT_REGISTERED_GEM,
lambda registered_gems: (
self.__sync_registered_gems(game_dir, enable_gem_func, registered_gems)
))
def sync_project_settings_file(self, context, file_path_src, file_path_target):
# sync the local_project_settings with the process that created the stack
shared_project_settings = resource_manager.util.load_json(file_path_src, {})
resource_manager.util.save_json(file_path_target, shared_project_settings)
def remove_project_settings_file(self, path):
project_settings = resource_manager.util.load_json(path, {})
set = project_settings[test_constant.DEFAULT][test_constant.SET]
if self.LOCAL_PROJECT_SETTING_ATTRIBUTE in project_settings[set]:
del project_settings[set][self.LOCAL_PROJECT_SETTING_ATTRIBUTE]
resource_manager.util.save_json(path, project_settings)
def update_shared_resource_context_attr(self, attr_name, func):
file_name = self.FILE_SHARED_RESOURCES_CONTEXT
file_path, lock_path = self.lock_path(file_name)
lock_file = FileLock("{}".format(lock_path))
with lock_file:
context = resource_manager.util.load_json(file_path, {})
items = context.get(attr_name, [])
context[attr_name] = func(items)
resource_manager.util.save_json(file_path, context)
# provide a small buffer between IO read/writes to help stability
time.sleep(1)
def __sync_registered_gems(self, game_dir, enable_gem_func, shared_deployment_gems):
context_gems = 'Gems'
path = os.path.join(game_dir, "gems.json")
gem_settings = resource_manager.util.load_json(path, {})
for gemA in shared_deployment_gems:
shared_gem_name = gemA.get('Name', None)
shared_gem_version = gemA.get('Version', None)
shared_gem_path = gemA.get('Path', None)
found = False
for gemB in gem_settings[context_gems]:
name = gemB.get('_comment', None)
gem_path = gemB.get('Path', None)
version = None
if gem_path:
parts = gem_path.split("/")
if len(parts) == 3:
version = parts[2]
if name == shared_gem_name and version == shared_gem_version:
found = True
if not found:
if shared_gem_path:
print("MISSING the gem named '{}' at path '{}'. Adding version '{}'".format(shared_gem_name, shared_gem_path, shared_gem_version))
else:
print("MISSING the gem named '{}'. Adding version '{}'".format(shared_gem_name, shared_gem_version))
enable_gem_func(shared_gem_name, shared_gem_version, True, shared_gem_path)
gem_settings[context_gems] = shared_deployment_gems
return shared_deployment_gems
def get_attribute(self, file_name, name, default):
file_path, lock_path = self.lock_path(file_name)
lock_file = FileLock("{}".format(lock_path))
with lock_file:
context = resource_manager.util.load_json(file_path, {})
return context.get(name, default)
def __append_registered_to_list(self, items, item):
if items is None:
items = []
items.append(item)
return list(set(items))
def __remove_registered_to_list(self, items, item):
if item in items:
items.remove(item)
return items
| 42.59 | 152 | 0.656257 |
4a22e30eff72aa735dba72efdbcfde2525fe2fef | 3,079 | py | Python | connectomics/data/augmentation/cutnoise.py | Shray64/pytorch_connectomics | d6c814f11ac2f8418ede5ae220a93016f50214fc | [
"MIT"
] | null | null | null | connectomics/data/augmentation/cutnoise.py | Shray64/pytorch_connectomics | d6c814f11ac2f8418ede5ae220a93016f50214fc | [
"MIT"
] | null | null | null | connectomics/data/augmentation/cutnoise.py | Shray64/pytorch_connectomics | d6c814f11ac2f8418ede5ae220a93016f50214fc | [
"MIT"
] | null | null | null | from __future__ import print_function, division
from typing import Optional
import numpy as np
from .augmentor import DataAugment
class CutNoise(DataAugment):
r"""3D CutNoise data augmentation.
Randomly add noise to a cuboid region in the volume to force the model
to learn denoising when making predictions. This augmentation is only
applied to images.
Args:
length_ratio (float): the ratio of the cuboid length compared with volume length.
mode (string): the distribution of the noise pattern. Default: ``'uniform'``.
scale (float): scale of the random noise. Default: 0.2.
p (float): probability of applying the augmentation. Default: 0.5
additional_targets(dict, optional): additional targets to augment. Default: None
"""
def __init__(self,
length_ratio: float = 0.25,
mode: str = 'uniform',
scale: float = 0.2,
p: float = 0.5,
additional_targets: Optional[dict] = None):
super(CutNoise, self).__init__(p, additional_targets)
self.length_ratio = length_ratio
self.mode = mode
self.scale = scale
def set_params(self):
r"""There is no change in sample size.
"""
pass
def cut_noise(self, images, zl, zh, yl, yh, xl, xh, noise):
zdim = images.shape[0]
if zdim == 1:
temp = images[:, yl:yh, xl:xh].copy()
else:
temp = images[zl:zh, yl:yh, xl:xh].copy()
temp = temp + noise
temp = np.clip(temp, 0, 1)
if zdim == 1:
images[:, yl:yh, xl:xh] = temp
else:
images[zl:zh, yl:yh, xl:xh] = temp
return images
def random_region(self, vol_len, random_state):
cuboid_len = int(self.length_ratio * vol_len)
low = random_state.randint(0, vol_len-cuboid_len)
high = low + cuboid_len
return low, high
def get_random_params(self, images, random_state):
zdim = images.shape[0]
if zdim > 1:
zl, zh = self.random_region(images.shape[0], random_state)
else:
zl, zh = None, None
yl, yh = self.random_region(images.shape[1], random_state)
xl, xh = self.random_region(images.shape[2], random_state)
z_len = zh - zl if zdim > 1 else 1
noise_shape = (z_len, yh-yl, xh-xl)
noise = random_state.uniform(-self.scale, self.scale, noise_shape)
return zl, zh, yl, yh, xl, xh, noise
def __call__(self, sample, random_state=np.random.RandomState()):
images = sample['image'].copy()
random_params = self.get_random_params(images, random_state)
sample['image'] = self.cut_noise(images, *random_params)
for key in self.additional_targets.keys():
if self.additional_targets[key] == 'img':
sample[key] = self.cut_noise(sample[key].copy(), *random_params)
return sample
| 37.096386 | 90 | 0.586879 |
4a22e415e248d1db13a91d20dd14b3d3b203dbe6 | 168,966 | py | Python | modules/s3/s3forms.py | JamesGlub/eden-1 | 179073cb011a837494db9f040b23105b34d4c82c | [
"MIT"
] | 1 | 2021-11-08T16:38:22.000Z | 2021-11-08T16:38:22.000Z | modules/s3/s3forms.py | JamesGlub/eden-1 | 179073cb011a837494db9f040b23105b34d4c82c | [
"MIT"
] | null | null | null | modules/s3/s3forms.py | JamesGlub/eden-1 | 179073cb011a837494db9f040b23105b34d4c82c | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
""" S3 SQL Forms
@copyright: 2012-2021 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3SQLCustomForm",
"S3SQLDefaultForm",
"S3SQLDummyField",
"S3SQLInlineInstruction",
"S3SQLSectionBreak",
"S3SQLVirtualField",
"S3SQLSubFormLayout",
"S3SQLVerticalSubFormLayout",
"S3SQLInlineComponent",
"S3SQLInlineLink",
"S3WithIntro",
)
import json
from itertools import chain
from gluon import *
from gluon.storage import Storage
from gluon.sqlhtml import StringWidget
from gluon.tools import callback
from gluon.validators import Validator
from s3dal import Field, original_tablename
from .s3query import FS
from .s3utils import s3_mark_required, s3_store_last_record_id, s3_str, s3_validate
from .s3widgets import S3Selector, S3UploadWidget
from .s3validators import JSONERRORS
# Compact JSON encoding
SEPARATORS = (",", ":")
DEFAULT = lambda: None
# =============================================================================
class S3SQLForm(object):
""" SQL Form Base Class"""
# -------------------------------------------------------------------------
def __init__(self, *elements, **attributes):
"""
Constructor to define the form and its elements.
@param elements: the form elements
@param attributes: form attributes
"""
self.elements = []
append = self.elements.append
debug = current.deployment_settings.get_base_debug()
for element in elements:
if not element:
continue
if isinstance(element, S3SQLFormElement):
append(element)
elif isinstance(element, str):
append(S3SQLField(element))
elif isinstance(element, tuple):
l = len(element)
if l > 1:
label, selector = element[:2]
widget = element[2] if l > 2 else DEFAULT
else:
selector = element[0]
label = widget = DEFAULT
append(S3SQLField(selector,
label = label,
widget = widget,
))
else:
msg = "Invalid form element: %s" % str(element)
if debug:
raise SyntaxError(msg)
else:
current.log.error(msg)
opts = {}
attr = {}
for k in attributes:
value = attributes[k]
if k[:1] == "_":
attr[k] = value
else:
opts[k] = value
self.attr = attr
self.opts = opts
self.prefix = None
self.name = None
self.resource = None
self.tablename = None
self.table = None
self.record_id = None
self.subtables = None
self.subrows = None
self.components = None
# -------------------------------------------------------------------------
# Rendering/Processing
# -------------------------------------------------------------------------
def __call__(self,
request = None,
resource = None,
record_id = None,
readonly = False,
message = "Record created/updated",
format = None,
**options):
"""
Render/process the form. To be implemented in subclass.
@param request: the S3Request
@param resource: the target S3Resource
@param record_id: the record ID
@param readonly: render the form read-only
@param message: message upon successful form submission
@param format: data format extension (for audit)
@param options: keyword options for the form
@return: a FORM instance
"""
return None
# -------------------------------------------------------------------------
# Utility functions
# -------------------------------------------------------------------------
def __len__(self):
"""
Support len(crud_form)
"""
return len(self.elements)
# -------------------------------------------------------------------------
def _config(self, key, default=None):
"""
Get a configuration setting for the current table
@param key: the setting key
@param default: fallback value if the setting is not available
"""
tablename = self.tablename
if tablename:
return current.s3db.get_config(tablename, key, default)
else:
return default
# -------------------------------------------------------------------------
@staticmethod
def _submit_buttons(readonly=False):
"""
Render submit buttons
@param readonly: render the form read-only
@return: list of submit buttons
"""
T = current.T
s3 = current.response.s3
settings = s3.crud
if settings.custom_submit:
submit = [(None,
settings.submit_button,
settings.submit_style,
),
]
submit.extend(settings.custom_submit)
buttons = []
for name, label, _class in submit:
if isinstance(label, str):
label = T(label)
button = INPUT(_type = "submit",
_class = "btn crud-submit-button",
_name = name,
_value = label,
)
if _class:
button.add_class(_class)
buttons.append(button)
else:
buttons = ["submit"]
# Cancel button
if not readonly and s3.cancel:
if not settings.custom_submit:
if settings.submit_button:
submit_label = T(settings.submit_button)
else:
submit_label = T("Save")
submit_button = INPUT(_type = "submit",
_value = submit_label,
)
if settings.submit_style:
submit_button.add_class(settings.submit_style)
buttons = [submit_button]
cancel = s3.cancel
if isinstance(cancel, DIV):
cancel_button = cancel
else:
cancel_button = A(T("Cancel"),
_class = "cancel-form-btn action-lnk",
)
if isinstance(cancel, dict):
# Script-controlled cancel button (embedded form)
if "script" in cancel:
# Custom script
script = cancel["script"]
else:
# Default script: hide form, show add-button
script = \
'''$('.cancel-form-btn').click(function(){$('#%(hide)s').slideUp('medium',function(){$('#%(show)s').show()})})'''
s3.jquery_ready.append(script % cancel)
elif s3.cancel is True:
cancel_button.add_class("s3-cancel")
else:
cancel_button.update(_href = s3.cancel)
buttons.append(cancel_button)
return buttons
# -------------------------------------------------------------------------
@staticmethod
def _insert_subheadings(form, tablename, formstyle, subheadings):
"""
Insert subheadings into forms
@param form: the form
@param tablename: the tablename
@param formstyle: the formstyle
@param subheadings:
{"fieldname": "Heading"} or {"fieldname": ["Heading1", "Heading2"]}
"""
if not subheadings:
return
if tablename in subheadings:
subheadings = subheadings.get(tablename)
if formstyle.__name__ in ("formstyle_table",
"formstyle_table_inline",
):
def create_subheading(represent, tablename, f, level=""):
return TR(TD(represent,
_colspan = 3,
_class="subheading",
),
_class = "subheading",
_id = "%s_%s__subheading%s" % (tablename, f, level),
)
else:
def create_subheading(represent, tablename, f, level=""):
return DIV(represent,
_class = "subheading",
_id = "%s_%s__subheading%s" % (tablename, f, level),
)
form_rows = iter(form[0])
tr = next(form_rows)
i = 0
while tr:
# @ToDo: We need a better way of working than this!
f = tr.attributes.get("_id", None)
if not f:
try:
# DIV-based form-style
f = tr[0][0].attributes.get("_id", None)
if not f:
# DRRPP formstyle
f = tr[0][0][1][0].attributes.get("_id", None)
if not f:
# Date fields are inside an extra TAG()
f = tr[0][0][1][0][0].attributes.get("_id", None)
except:
# Something else
f = None
if f:
if f.endswith("__row"):
f = f[:-5]
if f.startswith(tablename):
f = f[len(tablename) + 1:] # : -6
if f.startswith("sub_"):
# Component
f = f[4:]
elif f.startswith("sub-default"):
# S3SQLInlineComponent[CheckBox]
f = f[11:]
elif f.startswith("sub_"):
# S3GroupedOptionsWidget
f = f[4:]
headings = subheadings.get(f)
if not headings:
try:
tr = next(form_rows)
except StopIteration:
break
else:
i += 1
continue
if not isinstance(headings, list):
headings = [headings]
inserted = 0
for heading in headings:
subheading = create_subheading(heading, tablename, f, inserted if inserted else "")
form[0].insert(i, subheading)
i += 1
inserted += 1
if inserted:
tr.attributes.update(_class="%s after_subheading" % tr.attributes["_class"])
for _i in range(0, inserted):
# Iterate over the rows we just created
tr = next(form_rows)
try:
tr = next(form_rows)
except StopIteration:
break
else:
i += 1
# -------------------------------------------------------------------------
def _populate(self,
from_table = None,
from_record = None,
map_fields = None,
data = None,
formfields = None,
format = None,
):
"""
Pre-populate the form with values from a previous record or
controller-submitted data
@param from_table: the table to copy the data from
@param from_record: the record to copy the data from
@param map_fields: field selection/mapping
@param data: the data to prepopulate the form with
@param format: the request format extension
"""
table = self.table
record = None
# Pre-populate from a previous record?
if from_table is not None:
# Field mapping
if map_fields:
if isinstance(map_fields, dict):
# Map fields with other names
fields = [from_table[map_fields[f]]
for f in map_fields
if f in table.fields and
map_fields[f] in from_table.fields and
table[f].writable]
elif isinstance(map_fields, (list, tuple)):
# Only use a subset of the fields
fields = [from_table[f]
for f in map_fields
if f in table.fields and
f in from_table.fields and
table[f].writable]
else:
raise TypeError
else:
# Use all writable fields
fields = [from_table[f]
for f in table.fields
if f in from_table.fields and
table[f].writable]
# Audit read => this is a read method, after all
prefix, name = from_table._tablename.split("_", 1)
current.audit("read", prefix, name,
record = from_record,
representation = format,
)
# Get original record
query = (from_table.id == from_record)
row = current.db(query).select(limitby = (0, 1),
*fields).first()
if row:
if isinstance(map_fields, dict):
record = {f: row[map_fields[f]] for f in map_fields}
else:
record = row.as_dict()
# Pre-populate from call?
elif isinstance(data, dict):
record = {f: data[f] for f in data
if f in table.fields and table[f].writable}
# Add missing fields to pre-populated record
if record:
missing_fields = {}
if formfields:
for f in formfields:
fname = f.name
if fname not in record and f.writable:
missing_fields[fname] = f.default
else:
for f in table.fields:
if f not in record and table[f].writable:
missing_fields[f] = table[f].default
record.update(missing_fields)
record[table._id.name] = None
return record
# =============================================================================
class S3SQLDefaultForm(S3SQLForm):
""" Standard SQL form """
# -------------------------------------------------------------------------
# Rendering/Processing
# -------------------------------------------------------------------------
def __call__(self,
request = None,
resource = None,
record_id = None,
readonly = False,
message = "Record created/updated",
format = None,
**options):
"""
Render/process the form.
@param request: the S3Request
@param resource: the target S3Resource
@param record_id: the record ID
@param readonly: render the form read-only
@param message: message upon successful form submission
@param format: data format extension (for audit)
@param options: keyword options for the form
@todo: describe keyword arguments
@return: a FORM instance
"""
if resource is None:
self.resource = request.resource
self.prefix, self.name, self.table, self.tablename = \
request.target()
else:
self.resource = resource
self.prefix = resource.prefix
self.name = resource.name
self.tablename = resource.tablename
self.table = resource.table
response = current.response
s3 = response.s3
settings = s3.crud
prefix = self.prefix
name = self.name
tablename = self.tablename
table = self.table
record = None
labels = None
self.record_id = record_id
if not readonly:
get_option = options.get
# Populate create-form from another record?
if record_id is None:
data = get_option("data")
from_table = get_option("from_table")
from_record = get_option("from_record")
map_fields = get_option("map_fields")
record = self._populate(from_table = from_table,
from_record = from_record,
map_fields = map_fields,
data = data,
format = format,
)
# De-duplicate link table entries
self.record_id = record_id = self.deduplicate_link(request, record_id)
# Add asterisk to labels of required fields
mark_required = self._config("mark_required", default=[])
labels, required = s3_mark_required(table, mark_required)
# Show required-hint if there are any required fields.
s3.has_required = required
# Determine form style
if format == "plain":
# Default formstyle works best when we have no formatting
formstyle = "table3cols"
elif readonly:
formstyle = settings.formstyle_read
else:
formstyle = settings.formstyle
# Submit buttons
buttons = self._submit_buttons(readonly)
# Generate the form
if record is None:
record = record_id
response.form_label_separator = ""
form = SQLFORM(table,
record = record,
record_id = record_id,
readonly = readonly,
comments = not readonly,
deletable = False,
showid = False,
upload = s3.download_url,
labels = labels,
formstyle = formstyle,
separator = "",
submit_button = settings.submit_button,
buttons = buttons,
)
# Style the Submit button, if-requested
if settings.submit_style and not settings.custom_submit:
try:
form[0][-1][0][0]["_class"] = settings.submit_style
except:
# Submit button has been removed or a different formstyle,
# such as Bootstrap (which is already styled anyway)
pass
# Subheadings
subheadings = options.get("subheadings", None)
if subheadings:
self._insert_subheadings(form, tablename, formstyle, subheadings)
# Process the form
logged = False
if not readonly:
success, error = self.process(form,
request.post_vars,
onvalidation = get_option("onvalidation"),
onaccept = get_option("onaccept"),
hierarchy = get_option("hierarchy"),
link = get_option("link"),
http = request.http,
format = format,
)
if success:
response.confirmation = message
logged = True
elif error:
response.error = error
# Audit read
if not logged and not form.errors:
current.audit("read", prefix, name,
record = record_id,
representation = format,
)
return form
# -------------------------------------------------------------------------
def deduplicate_link(self, request, record_id):
"""
Change to update if this request attempts to create a
duplicate entry in a link table
@param request: the request
@param record_id: the record ID
"""
linked = self.resource.linked
table = self.table
session = current.session
if request.env.request_method == "POST" and linked is not None:
pkey = table._id.name
post_vars = request.post_vars
if not post_vars[pkey]:
lkey = linked.lkey
rkey = linked.rkey
def parse_key(value):
key = s3_str(value)
if key.startswith("{"):
# JSON-based selector (e.g. S3LocationSelector)
return json.loads(key).get("id")
else:
# Normal selector (e.g. OptionsWidget)
return value
try:
lkey_ = parse_key(post_vars[lkey])
rkey_ = parse_key(post_vars[rkey])
except Exception:
return record_id
query = (table[lkey] == lkey_) & \
(table[rkey] == rkey_)
row = current.db(query).select(table._id,
limitby = (0, 1),
).first()
if row is not None:
tablename = self.tablename
record_id = row[pkey]
formkey = session.get("_formkey[%s/None]" % tablename)
formname = "%s/%s" % (tablename, record_id)
session["_formkey[%s]" % formname] = formkey
post_vars["_formname"] = formname
post_vars[pkey] = record_id
return record_id
# -------------------------------------------------------------------------
def process(self, form, vars,
onvalidation = None,
onaccept = None,
hierarchy = None,
link = None,
http = "POST",
format = None,
):
"""
Process the form
@param form: FORM instance
@param vars: request POST variables
@param onvalidation: callback(function) upon successful form validation
@param onaccept: callback(function) upon successful form acceptance
@param hierarchy: the data for the hierarchy link to create
@param link: component link
@param http: HTTP method
@param format: request extension
"""
table = self.table
tablename = self.tablename
# Get the proper onvalidation routine
if isinstance(onvalidation, dict):
onvalidation = onvalidation.get(tablename, [])
# Append link.postprocess to onvalidation
if link and link.postprocess:
postprocess = link.postprocess
if isinstance(onvalidation, list):
onvalidation.insert(0, postprocess)
elif onvalidation is not None:
onvalidation = [postprocess, onvalidation]
else:
onvalidation = [postprocess]
success = True
error = None
record_id = self.record_id
formname = "%s/%s" % (tablename, record_id)
if form.accepts(vars,
current.session,
formname = formname,
onvalidation = onvalidation,
keepvalues = False,
hideerror = False,
):
# Undelete?
if vars.get("_undelete"):
undelete = form.vars.get("deleted") is False
else:
undelete = False
# Audit
prefix = self.prefix
name = self.name
if record_id is None or undelete:
current.audit("create", prefix, name, form=form,
representation = format,
)
else:
current.audit("update", prefix, name, form=form,
record = record_id,
representation = format,
)
form_vars = form.vars
# Update super entity links
s3db = current.s3db
s3db.update_super(table, form_vars)
# Update component link
if link and link.postprocess is None:
resource = link.resource
master = link.master
resource.update_link(master, form_vars)
if form_vars.id:
if record_id is None or undelete:
# Create hierarchy link
if hierarchy:
from .s3hierarchy import S3Hierarchy
h = S3Hierarchy(tablename)
if h.config:
h.postprocess_create_node(hierarchy, form_vars)
# Set record owner
auth = current.auth
auth.s3_set_record_owner(table, form_vars.id)
auth.s3_make_session_owner(table, form_vars.id)
else:
# Update realm
update_realm = s3db.get_config(table, "update_realm")
if update_realm:
current.auth.set_realm_entity(table, form_vars,
force_update = True,
)
# Store session vars
self.resource.lastid = str(form_vars.id)
s3_store_last_record_id(tablename, form_vars.id)
# Execute onaccept
try:
callback(onaccept, form) # , tablename=tablename (if we ever define callbacks as a dict with tablename)
except:
error = "onaccept failed: %s" % str(onaccept)
current.log.error(error)
# This is getting swallowed
raise
else:
success = False
if form.errors:
# Revert any records created within widgets/validators
current.db.rollback()
# IS_LIST_OF validation errors need special handling
errors = []
for fieldname in form.errors:
if fieldname in table:
if isinstance(table[fieldname].requires, IS_LIST_OF):
errors.append("%s: %s" % (fieldname,
form.errors[fieldname],
))
else:
errors.append(str(form.errors[fieldname]))
if errors:
error = "\n".join(errors)
elif http == "POST":
# Invalid form
error = current.T("Invalid form (re-opened in another window?)")
return success, error
# =============================================================================
class S3SQLCustomForm(S3SQLForm):
""" Custom SQL Form """
# -------------------------------------------------------------------------
def insert(self, index, element):
"""
S.insert(index, object) -- insert object before index
"""
if not element:
return
if isinstance(element, S3SQLFormElement):
self.elements.insert(index, element)
elif isinstance(element, str):
self.elements.insert(index, S3SQLField(element))
elif isinstance(element, tuple):
l = len(element)
if l > 1:
label, selector = element[:2]
widget = element[2] if l > 2 else DEFAULT
else:
selector = element[0]
label = widget = DEFAULT
self.elements.insert(index, S3SQLField(selector,
label = label,
widget = widget,
))
else:
msg = "Invalid form element: %s" % str(element)
if current.deployment_settings.get_base_debug():
raise SyntaxError(msg)
else:
current.log.error(msg)
# -------------------------------------------------------------------------
def append(self, element):
"""
S.append(object) -- append object to the end of the sequence
"""
self.insert(len(self), element)
# -------------------------------------------------------------------------
# Rendering/Processing
# -------------------------------------------------------------------------
def __call__(self,
request = None,
resource = None,
record_id = None,
readonly = False,
message = "Record created/updated",
format = None,
**options):
"""
Render/process the form.
@param request: the S3Request
@param resource: the target S3Resource
@param record_id: the record ID
@param readonly: render the form read-only
@param message: message upon successful form submission
@param format: data format extension (for audit)
@param options: keyword options for the form
@return: a FORM instance
"""
db = current.db
response = current.response
s3 = response.s3
# Determine the target resource
if resource is None:
resource = request.resource
self.prefix, self.name, self.table, self.tablename = \
request.target()
else:
self.prefix = resource.prefix
self.name = resource.name
self.tablename = resource.tablename
self.table = resource.table
self.resource = resource
# Resolve all form elements against the resource
subtables = set()
subtable_fields = {}
fields = []
components = []
for element in self.elements:
alias, name, field = element.resolve(resource)
if isinstance(alias, str):
subtables.add(alias)
if field is not None:
fields_ = subtable_fields.get(alias)
if fields_ is None:
fields_ = []
fields_.append((name, field))
subtable_fields[alias] = fields_
elif isinstance(alias, S3SQLFormElement):
components.append(alias)
if field is not None:
fields.append((alias, name, field))
self.subtables = subtables
self.components = components
rcomponents = resource.components
# Customise subtables
if subtables:
if not request:
# Create dummy S3Request
from .s3rest import S3Request
r = S3Request(resource.prefix,
resource.name,
# Current request args/vars could be in a different
# resource context, so must override them here:
args = [],
get_vars = {},
)
else:
r = request
customise_resource = current.deployment_settings.customise_resource
for alias in subtables:
# Get tablename
component = rcomponents.get(alias)
if not component:
continue
tablename = component.tablename
# Run customise_resource
customise = customise_resource(tablename)
if customise:
customise(r, tablename)
# Apply customised attributes to renamed fields
# => except default, label, requires and widget, which can be overridden
# in S3SQLField.resolve instead
renamed_fields = subtable_fields.get(alias)
if renamed_fields:
table = component.table
for name, renamed_field in renamed_fields:
original_field = table[name]
for attr in ("comment",
"default",
"readable",
"represent",
"requires",
"update",
"writable",
):
setattr(renamed_field,
attr,
getattr(original_field, attr),
)
# Mark required fields with asterisk
if not readonly:
mark_required = self._config("mark_required", default=[])
labels, required = s3_mark_required(self.table, mark_required)
# Show the required-hint if there are any required fields.
s3.has_required = required
else:
labels = None
# Choose formstyle
crud_settings = s3.crud
if format == "plain":
# Simple formstyle works best when we have no formatting
formstyle = "table3cols"
elif readonly:
formstyle = crud_settings.formstyle_read
else:
formstyle = crud_settings.formstyle
# Retrieve the record
record = None
if record_id is not None:
query = (self.table._id == record_id)
# @ToDo: limit fields (at least not meta)
record = db(query).select(limitby = (0, 1),
).first()
self.record_id = record_id
self.subrows = Storage()
# Populate the form
data = None
noupdate = []
forbidden = []
has_permission = current.auth.s3_has_permission
if record is not None:
# Retrieve the subrows
subrows = self.subrows
for alias in subtables:
# Get the component
component = rcomponents.get(alias)
if not component or component.multiple:
continue
# Get the subtable row from the DB
subfields = subtable_fields.get(alias)
if subfields:
subfields = [f[0] for f in subfields]
row = self._subrow(query, component, fields=subfields)
# Check permission for this subtable row
ctname = component.tablename
if not row:
permitted = has_permission("create", ctname)
if not permitted:
forbidden.append(alias)
continue
else:
cid = row[component.table._id]
permitted = has_permission("read", ctname, cid)
if not permitted:
forbidden.append(alias)
continue
permitted = has_permission("update", ctname, cid)
if not permitted:
noupdate.append(alias)
# Add the row to the subrows
subrows[alias] = row
# Build the data Storage for the form
pkey = self.table._id
data = Storage({pkey.name:record[pkey]})
for alias, name, field in fields:
if alias is None:
# Field in the master table
if name in record:
value = record[name]
# Field Method?
if callable(value):
value = value()
data[field.name] = value
elif alias in subtables:
# Field in a subtable
if alias in subrows and \
subrows[alias] is not None and \
name in subrows[alias]:
data[field.name] = subrows[alias][name]
elif hasattr(alias, "extract"):
# Form element with custom extraction method
data[field.name] = alias.extract(resource, record_id)
else:
# Record does not exist
self.record_id = record_id = None
# Check create-permission for subtables
for alias in subtables:
component = rcomponents.get(alias)
if not component:
continue
permitted = has_permission("create", component.tablename)
if not permitted:
forbidden.append(alias)
# Apply permissions for subtables
fields = [f for f in fields if f[0] not in forbidden]
for a, n, f in fields:
if a:
if a in noupdate:
f.writable = False
if labels is not None and f.name not in labels:
if f.required:
flabels = s3_mark_required([f], mark_required=[f])[0]
labels[f.name] = flabels[f.name]
elif f.label:
labels[f.name] = "%s:" % f.label
else:
labels[f.name] = ""
if readonly:
# Strip all comments
for a, n, f in fields:
f.comment = None
else:
# Mark required subtable-fields (retaining override-labels)
for alias in subtables:
component = rcomponents.get(alias)
if not component:
continue
mark_required = component.get_config("mark_required", [])
ctable = component.table
sfields = dict((n, (f.name, f.label))
for a, n, f in fields
if a == alias and n in ctable)
slabels = s3_mark_required([ctable[n] for n in sfields],
mark_required = mark_required,
map_names = sfields)[0]
if labels:
labels.update(slabels)
else:
labels = slabels
self.subtables = [s for s in self.subtables if s not in forbidden]
# Aggregate the form fields
formfields = [f[-1] for f in fields]
# Prepopulate from another record?
get_option = options.get
if not record_id and request.http == "GET":
data = self._populate(from_table = get_option("from_table"),
from_record = get_option("from_record"),
map_fields = get_option("map_fields"),
data = get_option("data"),
format = format,
formfields = formfields,
)
# Submit buttons
buttons = self._submit_buttons(readonly)
# Render the form
tablename = self.tablename
response.form_label_separator = ""
form = SQLFORM.factory(record = data,
showid = False,
labels = labels,
formstyle = formstyle,
table_name = tablename,
upload = s3.download_url,
readonly = readonly,
separator = "",
submit_button = crud_settings.submit_button,
buttons = buttons,
*formfields)
# Style the Submit button, if-requested
if crud_settings.submit_style and not crud_settings.custom_submit:
try:
form[0][-1][0][0]["_class"] = crud_settings.submit_style
except (KeyError, IndexError, TypeError):
# Submit button has been removed or a different formstyle,
# such as Bootstrap (which is already styled anyway)
pass
# Subheadings
subheadings = get_option("subheadings", None)
if subheadings:
self._insert_subheadings(form, tablename, formstyle, subheadings)
# Process the form
formname = "%s/%s" % (tablename, record_id)
post_vars = request.post_vars
if form.accepts(post_vars,
current.session,
onvalidation = self.validate,
formname = formname,
keepvalues = False,
hideerror = False,
):
# Undelete?
if post_vars.get("_undelete"):
undelete = post_vars.get("deleted") is False
else:
undelete = False
self.accept(form,
format = format,
link = get_option("link"),
hierarchy = get_option("hierarchy"),
undelete = undelete,
)
# Post-process the form submission after all records have
# been accepted and linked together (self.accept() has
# already updated the form data with any new keys here):
postprocess = self.opts.get("postprocess", None)
if postprocess:
try:
callback(postprocess, form) # , tablename=tablename (if we ever define callbacks as a dict with tablename)
except:
error = "postprocess failed: %s" % postprocess
current.log.error(error)
raise
response.confirmation = message
if form.errors:
# Revert any records created within widgets/validators
db.rollback()
response.error = current.T("There are errors in the form, please check your input")
return form
# -------------------------------------------------------------------------
def validate(self, form):
"""
Run the onvalidation callbacks for the master table
and all subtables in the form, and store any errors
in the form.
@param form: the form
"""
s3db = current.s3db
config = self._config
# Validate against the main table
if self.record_id:
onvalidation = config("update_onvalidation",
config("onvalidation", None))
else:
onvalidation = config("create_onvalidation",
config("onvalidation", None))
if onvalidation is not None:
try:
callback(onvalidation, form) # , tablename=self.tablename (if we ever define callbacks as a dict with tablename)
except:
error = "onvalidation failed: %s" % str(onvalidation)
current.log.error(error)
raise
# Validate against all subtables
get_config = s3db.get_config
for alias in self.subtables:
# Extract the subtable data
subdata = self._extract(form, alias)
if not subdata:
continue
# Get the onvalidation callback for this subtable
subtable = self.resource.components[alias].table
subform = Storage(vars = subdata,
errors = Storage(),
)
rows = self.subrows
if alias in rows and rows[alias] is not None:
# Add the record ID for update-onvalidation
pkey = subtable._id
subform.vars[pkey.name] = rows[alias][pkey]
subonvalidation = get_config(subtable._tablename,
"update_onvalidation",
get_config(subtable._tablename,
"onvalidation", None))
else:
subonvalidation = get_config(subtable._tablename,
"create_onvalidation",
get_config(subtable._tablename,
"onvalidation", None))
# Validate against the subtable, store errors in form
if subonvalidation is not None:
try:
callback(subonvalidation, subform) # , tablename=subtable._tablename (if we ever define callbacks as a dict with tablename)
except:
error = "onvalidation failed: %s" % str(subonvalidation)
current.log.error(error)
raise
for fn in subform.errors:
dummy = "sub_%s_%s" % (alias, fn)
form.errors[dummy] = subform.errors[fn]
# Validate components (e.g. Inline-Forms)
for component in self.components:
if hasattr(component, "validate"):
# Currently just S3SQLInlineLink
component.validate(form)
# -------------------------------------------------------------------------
def accept(self,
form,
format = None,
link = None,
hierarchy = None,
undelete = False,
):
"""
Create/update all records from the form.
@param form: the form
@param format: data format extension (for audit)
@param link: resource.link for linktable components
@param hierarchy: the data for the hierarchy link to create
@param undelete: reinstate a previously deleted record
"""
db = current.db
resource = self.resource
table = self.table
accept_row = self._accept
input_data = self._extract
# Create/update the main record
main_data = input_data(form)
master_id, master_form_vars = accept_row(self.record_id,
main_data,
format = format,
link = link,
hierarchy = hierarchy,
undelete = undelete,
)
if not master_id:
return
else:
master_query = (table._id == master_id)
main_data[table._id.name] = master_id
# Make sure lastid is set even if master has no data
# (otherwise *_next redirection will fail)
resource.lastid = str(master_id)
# Create or update the subtables
get_subrow = self._subrow
for alias in self.subtables:
# Get the data for this subtable from the form
subdata = input_data(form, alias=alias)
if not subdata:
continue
component = resource.components[alias]
if not component or component.multiple:
return
subtable = component.table
# Get the key (pkey) of the master record to link the
# subtable record to, and update the subdata with it
pkey = component.pkey
if pkey != table._id.name and pkey not in main_data:
row = db(table._id == master_id).select(table[pkey],
limitby = (0, 1),
).first()
if not row:
return
main_data[pkey] = row[table[pkey]]
if component.link:
link = Storage(resource = component.link,
master = main_data,
)
else:
link = None
subdata[component.fkey] = main_data[pkey]
# Do we already have a record for this component?
subrow = get_subrow(master_query, component, fields=[subtable._id.name])
if subrow:
# Yes => get the subrecord ID
subid = subrow[subtable._id]
else:
# No => apply component defaults
subid = None
subdata = component.get_defaults(main_data,
data = subdata,
)
# Accept the subrecord
accept_row(subid,
subdata,
alias = alias,
link = link,
format = format,
)
# Accept components (e.g. Inline-Forms)
for item in self.components:
if hasattr(item, "accept"):
item.accept(form,
master_id = master_id,
format = format,
)
# Update form with master form_vars
form_vars = form.vars
# ID
form_vars[table._id.name] = master_id
# Super entities (& anything added manually in table's onaccept)
for var in master_form_vars:
if var not in form_vars:
form_vars[var] = master_form_vars[var]
# -------------------------------------------------------------------------
@staticmethod
def _subrow(master_query, component, fields=None):
"""
Extract the current row from a single-component
@param master_query: query for the master record
@param component: the single-component (S3Resource)
@param fields: list of field names to extract
"""
# Get the join for this subtable
if not component or component.multiple:
return None
query = master_query & component.get_join()
table = component.table
if fields:
# Map field names to component table
try:
fields = [table[f] for f in fields]
except (KeyError, AttributeError):
fields = None
else:
fields.insert(0, table._id)
if not fields:
fields = [table.ALL]
# Retrieve the row
return current.db(query).select(*fields,
limitby = (0, 1)
).first()
# -------------------------------------------------------------------------
# Utility functions
# -------------------------------------------------------------------------
def _extract(self, form, alias=None):
"""
Extract data for a subtable from the form
@param form: the form
@param alias: the component alias of the subtable
"""
if alias is None:
# Main Table
return self.table._filter_fields(form.vars)
# Sub Table
subform = Storage()
alias_length = len(alias)
form_vars = form.vars
for k in form_vars:
if k[:4] == "sub_" and \
k[4:4 + alias_length + 1] == "%s_" % alias:
fn = k[4 + alias_length + 1:]
subform[fn] = form_vars[k]
return subform
# -------------------------------------------------------------------------
def _accept(self,
record_id,
data,
alias = None,
format = None,
hierarchy = None,
link = None,
undelete = False,
):
"""
Create or update a record
@param record_id: the record ID
@param data: the data
@param alias: the component alias
@param format: the request format (for audit)
@param hierarchy: the data for the hierarchy link to create
@param link: resource.link for linktable components
@param undelete: reinstate a previously deleted record
"""
if alias is not None:
# Subtable
if not data or \
not record_id and all(value is None for value in data.values()):
# No data => skip
return None, Storage()
elif record_id and not data:
# Existing master record, no data => skip, but return
# record_id to allow update of inline-components:
return record_id, Storage()
s3db = current.s3db
if alias is None:
component = self.resource
else:
component = self.resource.components[alias]
# Get the DB table (without alias)
table = component.table
tablename = component.tablename
if component._alias != tablename:
unaliased = s3db.table(component.tablename)
# Must retain custom defaults of the aliased component:
for field in table:
field_ = unaliased[field.name]
field_.default = field.default
field_.update = field.update
table = unaliased
get_config = s3db.get_config
oldrecord = None
if record_id:
# Update existing record
accept_id = record_id
db = current.db
onaccept = get_config(tablename, "update_onaccept",
get_config(tablename, "onaccept", None))
table_fields = table.fields
query = (table._id == record_id)
if onaccept:
# Get oldrecord in full to save in form
oldrecord = db(query).select(limitby = (0, 1),
).first()
elif "deleted" in table_fields:
oldrecord = db(query).select(table.deleted,
limitby = (0, 1),
).first()
else:
oldrecord = None
if undelete:
# Restoring a previously deleted record
if "deleted" in table_fields:
data["deleted"] = False
if "created_by" in table_fields and current.auth.user:
data["created_by"] = current.auth.user.id
if "created_on" in table_fields:
data["created_on"] = current.request.utcnow
elif oldrecord and "deleted" in oldrecord and oldrecord.deleted:
# Do not (ever) update a deleted record that we don't
# want to restore, otherwise this may set foreign keys
# in a deleted record!
return accept_id
db(table._id == record_id).update(**data)
else:
# Insert new record
accept_id = table.insert(**data)
if not accept_id:
raise RuntimeError("Could not create record")
onaccept = get_config(tablename, "create_onaccept",
get_config(tablename, "onaccept", None))
data[table._id.name] = accept_id
prefix, name = tablename.split("_", 1)
form_vars = Storage(data)
form = Storage(vars = form_vars,
record = oldrecord,
)
# Audit
if record_id is None or undelete:
current.audit("create", prefix, name,
form = form,
representation = format,
)
else:
current.audit("update", prefix, name,
form = form,
record = accept_id,
representation = format,
)
# Update super entity links
s3db.update_super(table, form_vars)
# Update component link
if link and link.postprocess is None:
resource = link.resource
master = link.master
resource.update_link(master, form_vars)
if accept_id:
if record_id is None or undelete:
# Create hierarchy link
if hierarchy:
from .s3hierarchy import S3Hierarchy
h = S3Hierarchy(tablename)
if h.config:
h.postprocess_create_node(hierarchy, form_vars)
# Set record owner
auth = current.auth
auth.s3_set_record_owner(table, accept_id)
auth.s3_make_session_owner(table, accept_id)
else:
# Update realm
update_realm = get_config(table, "update_realm")
if update_realm:
current.auth.set_realm_entity(table, form_vars,
force_update = True,
)
# Store session vars
component.lastid = str(accept_id)
s3_store_last_record_id(tablename, accept_id)
# Execute onaccept
try:
callback(onaccept, form) # , tablename=tablename (if we ever define callbacks as a dict with tablename)
except:
error = "onaccept failed: %s" % str(onaccept)
current.log.error(error)
# This is getting swallowed
raise
if alias is None:
# Return master_form_vars
return accept_id, form.vars
else:
return accept_id
# =============================================================================
class S3SQLFormElement(object):
""" SQL Form Element Base Class """
# -------------------------------------------------------------------------
def __init__(self, selector, **options):
"""
Constructor to define the form element, to be extended
in subclass.
@param selector: the data object selector
@param options: options for the form element
"""
self.selector = selector
self.options = Storage(options)
# -------------------------------------------------------------------------
def resolve(self, resource):
"""
Method to resolve this form element against the calling resource.
To be implemented in subclass.
@param resource: the resource
@return: a tuple
(
form element,
original field name,
Field instance for the form renderer
)
The form element can be None for the main table, the component
alias for a subtable, or this form element instance for a
subform.
If None is returned as Field instance, this form element will
not be rendered at all. Besides setting readable/writable
in the Field instance, this can be another mechanism to
control access to form elements.
"""
return None, None, None
# -------------------------------------------------------------------------
# Utility methods
# -------------------------------------------------------------------------
@staticmethod
def _rename_field(field, name,
comments = True,
label = DEFAULT,
popup = None,
skip_post_validation = False,
widget = DEFAULT,
):
"""
Rename a field (actually: create a new Field instance with the
same attributes as the given Field, but a different field name).
@param field: the original Field instance
@param name: the new name
@param comments: render comments - if set to False, only
navigation items with an inline() renderer
method will be rendered (unless popup is None)
@param label: override option for the original field label
@param popup: only if comments=False, additional vars for comment
navigation items (e.g. S3PopupLink), None prevents
rendering of navigation items
@param skip_post_validation: skip field validation during POST,
useful for client-side processed
dummy fields.
@param widget: override option for the original field widget
"""
if label is DEFAULT:
label = field.label
if widget is DEFAULT:
# Some widgets may need disabling during POST
widget = field.widget
if not hasattr(field, "type"):
# Virtual Field
field = Storage(comment = None,
type = "string",
length = 255,
unique = False,
uploadfolder = None,
autodelete = False,
label = "",
writable = False,
readable = True,
default = None,
update = None,
compute = None,
represent = lambda v: v or "",
)
requires = None
required = False
notnull = False
elif skip_post_validation and \
current.request.env.request_method == "POST":
requires = SKIP_POST_VALIDATION(field.requires)
required = False
notnull = False
else:
requires = field.requires
required = field.required
notnull = field.notnull
if not comments:
if popup:
comment = field.comment
if hasattr(comment, "renderer") and \
hasattr(comment, "inline"):
if hasattr(comment, "clone"):
comment = comment.clone()
comment_vars = comment.vars
comment_vars["caller"] = popup["caller"]
if "parent" not in comment_vars:
comment_vars["parent"] = popup["parent"]
comment.renderer = comment.inline
else:
comment = None
else:
comment = None
else:
comment = field.comment
f = Field(str(name),
type = field.type,
length = field.length,
required = required,
notnull = notnull,
unique = field.unique,
uploadfolder = field.uploadfolder,
autodelete = field.autodelete,
comment = comment,
label = label,
widget = widget,
default = field.default,
writable = field.writable,
readable = field.readable,
update = field.update,
compute = field.compute,
represent = field.represent,
requires = requires,
)
return f
# =============================================================================
class S3SQLField(S3SQLFormElement):
"""
Base class for regular form fields
A regular form field is a field in the main form, which can be
fields in the main record or in a subtable (single-record-component).
"""
# -------------------------------------------------------------------------
def resolve(self, resource):
"""
Method to resolve this form element against the calling resource.
@param resource: the resource
@return: a tuple
(
subtable alias (or None for main table),
original field name,
Field instance for the form renderer
)
"""
# Import S3ResourceField only here, to avoid circular dependency
from .s3query import S3ResourceField
rfield = S3ResourceField(resource, self.selector)
field = rfield.field
if field is None:
raise SyntaxError("Invalid selector: %s" % self.selector)
tname = rfield.tname
options_get = self.options.get
label = options_get("label", DEFAULT)
widget = options_get("widget", DEFAULT)
if resource._alias:
tablename = resource._alias
else:
tablename = resource.tablename
if tname == tablename:
# Field in the main table
if label is not DEFAULT:
field.label = label
if widget is not DEFAULT:
field.widget = widget
return None, field.name, field
else:
for alias, component in resource.components.loaded.items():
if component.multiple:
continue
if component._alias:
tablename = component._alias
else:
tablename = component.tablename
if tablename == tname:
name = "sub_%s_%s" % (alias, rfield.fname)
renamed_field = self._rename_field(field,
name,
label = label,
widget = widget,
)
return alias, field.name, renamed_field
raise SyntaxError("Invalid subtable: %s" % tname)
# =============================================================================
class S3SQLVirtualField(S3SQLFormElement):
"""
A form element to embed values of field methods (virtual fields),
always read-only
"""
# -------------------------------------------------------------------------
def resolve(self, resource):
"""
Method to resolve this form element against the calling resource.
@param resource: the resource
@return: a tuple
(
subtable alias (or None for main table),
original field name,
Field instance for the form renderer
)
"""
table = resource.table
selector = self.selector
if not hasattr(table, selector):
raise SyntaxError("Undefined virtual field: %s" % selector)
label = self.options.label
if not label:
label = " ".join(s.capitalize() for s in selector.split("_"))
field = Field(selector,
label = label,
widget = self,
)
return None, selector, field
# -------------------------------------------------------------------------
def __call__(self, field, value, **attributes):
"""
Widget renderer for field method values, renders a simple
read-only DIV with the value
"""
widget = DIV(value, **attributes)
widget.add_class("s3-virtual-field")
return widget
# =============================================================================
class S3SQLDummyField(S3SQLFormElement):
"""
A Dummy Field
A simple DIV which can then be acted upon with JavaScript
- used by dc_question Grids
"""
# -------------------------------------------------------------------------
def resolve(self, resource):
"""
Method to resolve this form element against the calling resource.
@param resource: the resource
@return: a tuple
(
subtable alias (or None for main table),
original field name,
Field instance for the form renderer
)
"""
selector = self.selector
field = Field(selector,
default = "",
label = "",
widget = self,
)
return None, selector, field
# -------------------------------------------------------------------------
def __call__(self, field, value, **attributes):
"""
Widget renderer for the input field. To be implemented in
subclass (if required) and to be set as widget=self for the
field returned by the resolve()-method of this form element.
@param field: the input field
@param value: the value to populate the widget
@param attributes: attributes for the widget
@return: the widget for this form element as HTML helper
"""
return DIV(_class = "s3-dummy-field",
)
# =============================================================================
class S3SQLSectionBreak(S3SQLFormElement):
"""
A Section Break
A simple DIV which can then be acted upon with JavaScript &/or Styled
- used by dc_template.layout
"""
# -------------------------------------------------------------------------
def __init__(self):
"""
Constructor to define the form element, to be extended
in subclass.
"""
super(S3SQLSectionBreak, self).__init__(None)
# -------------------------------------------------------------------------
def resolve(self, resource):
"""
Method to resolve this form element against the calling resource.
@param resource: the resource
@return: a tuple
(
subtable alias (or None for main table),
original field name,
Field instance for the form renderer
)
"""
selector = ""
field = Field(selector,
default = "",
label = "",
widget = self,
)
return None, selector, field
# -------------------------------------------------------------------------
def __call__(self, field, value, **attributes):
"""
Widget renderer for the input field. To be implemented in
subclass (if required) and to be set as widget=self for the
field returned by the resolve()-method of this form element.
@param field: the input field
@param value: the value to populate the widget
@param attributes: attributes for the widget
@return: the widget for this form element as HTML helper
"""
return DIV(_class = "s3-section-break",
)
# =============================================================================
class S3SQLInlineInstruction(S3SQLFormElement):
"""
Inline Instructions
A simple DIV which can then be acted upon with JavaScript &/or Styled
- used by dc_template.layout
"""
# -------------------------------------------------------------------------
def __init__(self, do, say, **options):
"""
Constructor to define the form element, to be extended
in subclass.
@param do: What to Do
@param say: What to Say
"""
super(S3SQLInlineInstruction, self).__init__(None)
self.do = do
self.say = say
# -------------------------------------------------------------------------
def resolve(self, resource):
"""
Method to resolve this form element against the calling resource.
@param resource: the resource
@return: a tuple
(
subtable alias (or None for main table),
original field name,
Field instance for the form renderer
)
"""
selector = ""
field = Field(selector,
default = "",
label = "",
widget = self,
)
return None, selector, field
# -------------------------------------------------------------------------
def __call__(self, field, value, **attributes):
"""
Widget renderer for the input field. To be implemented in
subclass (if required) and to be set as widget=self for the
field returned by the resolve()-method of this form element.
@param field: the input field
@param value: the value to populate the widget
@param attributes: attributes for the widget
@return: the widget for this form element as HTML helper
"""
element = DIV(_class = "s3-inline-instructions",
)
element["data-do"] = self.do
element["data-say"] = self.say
return element
# =============================================================================
class S3SQLSubForm(S3SQLFormElement):
"""
Base class for subforms
A subform is a form element to be processed after the main
form. Subforms render a single (usually hidden) input field
and a client-side controlled widget to manipulate its contents.
"""
# -------------------------------------------------------------------------
def __init__(self, selector, **options):
"""
Constructor to define the form element, to be extended
in subclass.
@param selector: the data object selector
@param options: options for the form element
"""
super(S3SQLSubForm, self).__init__(selector, **options)
self.alias = None
# -------------------------------------------------------------------------
def extract(self, resource, record_id):
"""
Initialize this form element for a particular record. This
method will be called by the form renderer to populate the
form for an existing record. To be implemented in subclass.
@param resource: the resource the record belongs to
@param record_id: the record ID
@return: the value for the input field that corresponds
to the specified record.
"""
return None
# -------------------------------------------------------------------------
def parse(self, value, record_id=None):
"""
Validator method for the input field, used to extract the
data from the input field and prepare them for further
processing by the accept()-method. To be implemented in
subclass and set as requires=self.parse for the input field
in the resolve()-method of this form element.
@param value: the value returned from the input field
@param record_id: usused (for API compatibility with validators)
@return: tuple of (value, error) where value is the
pre-processed field value and error an error
message in case of invalid data, or None.
"""
return (value, None)
# -------------------------------------------------------------------------
def __call__(self, field, value, **attributes):
"""
Widget renderer for the input field. To be implemented in
subclass (if required) and to be set as widget=self for the
field returned by the resolve()-method of this form element.
@param field: the input field
@param value: the value to populate the widget
@param attributes: attributes for the widget
@return: the widget for this form element as HTML helper
"""
raise NotImplementedError
# -------------------------------------------------------------------------
def represent(self, value):
"""
Read-only representation of this form element. This will be
used instead of the __call__() method when the form element
is to be rendered read-only.
@param value: the value as returned from extract()
@return: the read-only representation of this element as
string or HTML helper
"""
return ""
# -------------------------------------------------------------------------
def accept(self, form, master_id=None, format=None):
"""
Post-process this form element and perform the related
transactions. This method will be called after the main
form has been accepted, where the master record ID will
be provided.
@param form: the form
@param master_id: the master record ID
@param format: the data format extension
@return: True on success, False on error
"""
return True
# =============================================================================
class SKIP_POST_VALIDATION(Validator):
"""
Pseudo-validator that allows introspection of field options
during GET, but does nothing during POST. Used for Ajax-validated
inline-components to prevent them from throwing validation errors
when the outer form gets submitted.
"""
def __init__(self, other=None):
"""
Constructor, used like:
field.requires = SKIP_POST_VALIDATION(field.requires)
@param other: the actual field validator
"""
if other and isinstance(other, (list, tuple)):
other = other[0]
self.other = other
if other:
if hasattr(other, "multiple"):
self.multiple = other.multiple
if hasattr(other, "options"):
self.options = other.options
if hasattr(other, "formatter"):
self.formatter = other.formatter
def __call__(self, value, record_id=None):
"""
Validation
@param value: the value
@param record_id: the record ID (unused, for API compatibility)
"""
other = self.other
if current.request.env.request_method == "POST" or not other:
return value, None
if not isinstance(other, (list, tuple)):
other = [other]
for r in other:
value, error = r(value)
if error:
return value, error
return value, None
# =============================================================================
class S3SQLSubFormLayout(object):
""" Layout for S3SQLInlineComponent (Base Class) """
# Layout-specific CSS class for the inline component
layout_class = "subform-default"
def __init__(self):
""" Constructor """
self.inject_script()
self.columns = None
self.row_actions = True
# -------------------------------------------------------------------------
def set_columns(self, columns, row_actions=True):
"""
Set column widths for inline-widgets, can be used by subclasses
to render CSS classes for grid-width
@param columns: iterable of column widths
@param actions: whether the subform contains an action column
"""
self.columns = columns
self.row_actions = row_actions
# -------------------------------------------------------------------------
def subform(self,
data,
item_rows,
action_rows,
empty = False,
readonly = False,
):
"""
Outer container for the subform
@param data: the data dict (as returned from extract())
@param item_rows: the item rows
@param action_rows: the (hidden) action rows
@param empty: no data in this component
@param readonly: render read-only
"""
if empty:
subform = current.T("No entries currently available")
else:
headers = self.headers(data, readonly=readonly)
subform = TABLE(headers,
TBODY(item_rows),
TFOOT(action_rows),
_class = " ".join(("embeddedComponent", self.layout_class)),
)
return subform
# -------------------------------------------------------------------------
def readonly(self, resource, data):
"""
Render this component read-only (table-style)
@param resource: the S3Resource
@param data: the data dict (as returned from extract())
"""
audit = current.audit
prefix, name = resource.prefix, resource.name
xml_decode = current.xml.xml_decode
items = data["data"]
fields = data["fields"]
trs = []
for item in items:
if "_id" in item:
record_id = item["_id"]
else:
continue
audit("read", prefix, name,
record=record_id, representation="html")
trow = TR(_class="read-row")
for f in fields:
text = xml_decode(item[f["name"]]["text"])
trow.append(XML(xml_decode(text)))
trs.append(trow)
return self.subform(data, trs, [], empty=False, readonly=True)
# -------------------------------------------------------------------------
@staticmethod
def render_list(resource, data):
"""
Render this component read-only (list-style)
@param resource: the S3Resource
@param data: the data dict (as returned from extract())
"""
audit = current.audit
prefix, name = resource.prefix, resource.name
xml_decode = current.xml.xml_decode
items = data["data"]
fields = data["fields"]
# Render as comma-separated list of values (no header)
elements = []
for item in items:
if "_id" in item:
record_id = item["_id"]
else:
continue
audit("read", prefix, name,
record=record_id, representation="html")
t = []
for f in fields:
t.append([XML(xml_decode(item[f["name"]]["text"])), " "])
elements.append([TAG[""](list(chain.from_iterable(t))[:-1]), ", "])
return DIV(list(chain.from_iterable(elements))[:-1],
_class = "embeddedComponent",
)
# -------------------------------------------------------------------------
def headers(self, data, readonly=False):
"""
Render the header row with field labels
@param data: the input field data as Python object
@param readonly: whether the form is read-only
"""
fields = data["fields"]
# Don't render a header row if there are no labels
render_header = False
header_row = TR(_class = "label-row static")
happend = header_row.append
for f in fields:
label = f["label"]
if label:
render_header = True
label = TD(LABEL(label))
happend(label)
if render_header:
if not readonly:
# Add columns for the Controls
happend(TD())
happend(TD())
return THEAD(header_row)
else:
return THEAD(_class = "hide")
# -------------------------------------------------------------------------
@staticmethod
def actions(subform,
formname,
index,
item = None,
readonly = True,
editable = True,
deletable = True,
):
"""
Render subform row actions into the row
@param subform: the subform row
@param formname: the form name
@param index: the row index
@param item: the row data
@param readonly: this is a read-row
@param editable: this row is editable
@param deletable: this row is deletable
"""
T = current.T
action_id = "%s-%s" % (formname, index)
# Action button helper
def action(title, name, throbber=False):
btn = DIV(_id = "%s-%s" % (name, action_id),
_class = "inline-%s" % name,
)
if throbber:
return DIV(btn,
DIV(_class = "inline-throbber hide",
_id = "throbber-%s" % action_id,
),
)
else:
return DIV(btn)
# CSS class for action-columns
_class = "subform-action"
# Render the action icons for this row
append = subform.append
if readonly:
if editable:
append(TD(action(T("Edit this entry"), "edt"),
_class = _class,
))
else:
append(TD(_class = _class))
if deletable:
append(TD(action(T("Remove this entry"), "rmv"),
_class = _class,
))
else:
append(TD(_class = _class))
else:
if index != "none" or item:
append(TD(action(T("Update this entry"), "rdy", throbber=True),
_class = _class,
))
append(TD(action(T("Cancel editing"), "cnc"),
_class = _class,
))
else:
append(TD(action(T("Discard this entry"), "dsc"),
_class = _class,
))
append(TD(action(T("Add this entry"), "add", throbber=True),
_class = _class,
))
# -------------------------------------------------------------------------
def rowstyle_read(self, form, fields, *args, **kwargs):
"""
Formstyle for subform read-rows, normally identical
to rowstyle, but can be different in certain layouts
"""
return self.rowstyle(form, fields, *args, **kwargs)
# -------------------------------------------------------------------------
def rowstyle(self, form, fields, *args, **kwargs):
"""
Formstyle for subform action-rows
"""
def render_col(col_id, label, widget, comment, hidden=False):
if col_id == "submit_record__row":
if hasattr(widget, "add_class"):
widget.add_class("inline-row-actions")
col = TD(widget)
elif comment:
col = TD(DIV(widget,
comment,
),
_id = col_id,
)
else:
col = TD(widget,
_id = col_id,
)
return col
if args:
col_id = form
label = fields
widget, comment = args
hidden = kwargs.get("hidden", False)
return render_col(col_id, label, widget, comment, hidden)
else:
parent = TR()
for col_id, label, widget, comment in fields:
parent.append(render_col(col_id, label, widget, comment))
return parent
# -------------------------------------------------------------------------
@staticmethod
def inject_script():
""" Inject custom JS to render new read-rows """
# Example:
#appname = current.request.application
#scripts = current.response.s3.scripts
#script = "/%s/static/themes/CRMT/js/inlinecomponent.layout.js" % appname
#if script not in scripts:
#scripts.append(script)
# No custom JS in the default layout
return
# =============================================================================
class S3SQLVerticalSubFormLayout(S3SQLSubFormLayout):
"""
Vertical layout for inline-components
- renders an vertical layout for edit-rows
- standard horizontal layout for read-rows
- hiding header row if there are no visible read-rows
"""
# Layout-specific CSS class for the inline component
layout_class = "subform-vertical"
# -------------------------------------------------------------------------
def headers(self, data, readonly=False):
"""
Header-row layout: same as default, but non-static (i.e. hiding
if there are no visible read-rows, because edit-rows have their
own labels)
"""
headers = super(S3SQLVerticalSubFormLayout, self).headers
header_row = headers(data, readonly = readonly)
element = header_row.element("tr")
if hasattr(element, "remove_class"):
element.remove_class("static")
return header_row
# -------------------------------------------------------------------------
def rowstyle_read(self, form, fields, *args, **kwargs):
"""
Formstyle for subform read-rows, same as standard
horizontal layout.
"""
rowstyle = super(S3SQLVerticalSubFormLayout, self).rowstyle
return rowstyle(form, fields, *args, **kwargs)
# -------------------------------------------------------------------------
def rowstyle(self, form, fields, *args, **kwargs):
"""
Formstyle for subform edit-rows, using a vertical
formstyle because multiple fields combined with
location-selector are too complex for horizontal
layout.
"""
# Use standard foundation formstyle
from s3theme import formstyle_foundation as formstyle
if args:
col_id = form
label = fields
widget, comment = args
hidden = kwargs.get("hidden", False)
return formstyle(col_id, label, widget, comment, hidden)
else:
parent = TD(_colspan = len(fields))
for col_id, label, widget, comment in fields:
parent.append(formstyle(col_id, label, widget, comment))
return TR(parent)
# =============================================================================
class S3SQLInlineComponent(S3SQLSubForm):
"""
Form element for an inline-component-form
This form element allows CRUD of multi-record-components within
the main record form. It renders a single hidden text field with a
JSON representation of the component records, and a widget which
facilitates client-side manipulation of this JSON.
This widget is a row of fields per component record.
The widget uses the s3.ui.inline_component.js script for client-side
manipulation of the JSON data. Changes made by the script will be
validated through Ajax-calls to the CRUD.validate() method.
During accept(), the component gets updated according to the JSON
returned.
@ToDo: Support filtering of field options
Usecase is inline project_organisation for IFRC
PartnerNS needs to be filtered differently from Partners/Donors,
so can't just set a global requires for the field in the controller
- needs to be inside the widget.
See private/templates/IFRC/config.py
"""
prefix = "sub"
def __init__(self, selector, **options):
super(S3SQLInlineComponent, self).__init__(selector, **options)
self.resource = None
self.upload = {}
# -------------------------------------------------------------------------
def resolve(self, resource):
"""
Method to resolve this form element against the calling resource.
@param resource: the resource
@return: a tuple (self, None, Field instance)
"""
selector = self.selector
# Check selector
try:
component = resource.components[selector]
except KeyError:
raise SyntaxError("Undefined component: %s" % selector)
# Check permission
permitted = current.auth.s3_has_permission("read",
component.tablename,
)
if not permitted:
return (None, None, None)
options = self.options
if "name" in options:
self.alias = options["name"]
label = self.alias
else:
self.alias = "default"
label = self.selector
if "label" in options:
label = options["label"]
else:
label = " ".join([s.capitalize() for s in label.split("_")])
fname = self._formname(separator = "_")
field = Field(fname, "text",
comment = options.get("comment", None),
default = self.extract(resource, None),
label = label,
represent = self.represent,
required = options.get("required", False),
requires = self.parse,
widget = self,
)
return (self, None, field)
# -------------------------------------------------------------------------
def extract(self, resource, record_id):
"""
Initialize this form element for a particular record. Retrieves
the component data for this record from the database and
converts them into a JSON string to populate the input field with.
@param resource: the resource the record belongs to
@param record_id: the record ID
@return: the JSON for the input field.
"""
self.resource = resource
component_name = self.selector
try:
component = resource.components[component_name]
except KeyError:
raise AttributeError("Undefined component")
options = self.options
if component.link:
link = options.get("link", True)
if link:
# For link-table components, embed the link
# table rather than the component
component = component.link
table = component.table
tablename = component.tablename
pkey = table._id.name
fields_opt = options.get("fields", None)
labels = {}
if fields_opt:
fields = []
for f in fields_opt:
if isinstance(f, tuple):
label, f = f
labels[f] = label
if f in table.fields:
fields.append(f)
else:
# Really?
fields = [f.name for f in table if f.readable or f.writable]
if pkey not in fields:
fields.insert(0, pkey)
# Support read-only Virtual Fields
if "virtual_fields" in options:
virtual_fields = options["virtual_fields"]
else:
virtual_fields = []
if "orderby" in options:
orderby = options["orderby"]
else:
orderby = component.get_config("orderby")
if record_id:
if "filterby" in options:
# Filter
f = self._filterby_query()
if f is not None:
component.build_query(filter=f)
if "extra_fields" in options:
extra_fields = options["extra_fields"]
else:
extra_fields = []
all_fields = fields + virtual_fields + extra_fields
start = 0
limit = 1 if options.multiple is False else None
data = component.select(all_fields,
start = start,
limit = limit,
represent = True,
raw_data = True,
show_links = False,
orderby = orderby,
)
records = data["rows"]
rfields = data["rfields"]
for f in rfields:
if f.fname in extra_fields:
rfields.remove(f)
else:
s = f.selector
if s.startswith("~."):
s = s[2:]
label = labels.get(s, None)
if label is not None:
f.label = label
else:
records = []
rfields = []
for s in fields:
rfield = component.resolve_selector(s)
label = labels.get(s, None)
if label is not None:
rfield.label = label
rfields.append(rfield)
for f in virtual_fields:
rfield = component.resolve_selector(f[1])
rfield.label = f[0]
rfields.append(rfield)
headers = [{"name": rfield.fname,
"label": s3_str(rfield.label),
}
for rfield in rfields if rfield.fname != pkey]
items = []
has_permission = current.auth.s3_has_permission
for record in records:
row = record["_row"]
row_id = row[str(table._id)]
item = {"_id": row_id}
permitted = has_permission("update", tablename, row_id)
if not permitted:
item["_readonly"] = True
for rfield in rfields:
fname = rfield.fname
if fname == pkey:
continue
colname = rfield.colname
field = rfield.field
widget = field.widget
if isinstance(widget, S3Selector):
# Use the widget extraction/serialization method
value = widget.serialize(widget.extract(row[colname]))
elif hasattr(field, "formatter"):
value = field.formatter(row[colname])
else:
# Virtual Field
value = row[colname]
text = s3_str(record[colname])
# Text representation is only used in read-forms where
# representation markup cannot interfere with the inline
# form logic - so stripping the markup should not be
# necessary here:
#if "<" in text:
# text = s3_strip_markup(text)
item[fname] = {"value": value, "text": text}
items.append(item)
validate = options.get("validate", None)
if not validate or \
not isinstance(validate, tuple) or \
not len(validate) == 2:
request = current.request
validate = (request.controller, request.function)
c, f = validate
data = {"controller": c,
"function": f,
"resource": resource.tablename,
"component": component_name,
"fields": headers,
"defaults": self._filterby_defaults(),
"data": items,
}
return json.dumps(data, separators=SEPARATORS)
# -------------------------------------------------------------------------
def parse(self, value, record_id=None):
"""
Validator method, converts the JSON returned from the input
field into a Python object.
@param value: the JSON from the input field.
@param record_id: usused (for API compatibility with validators)
@return: tuple of (value, error), where value is the converted
JSON, and error the error message if the decoding
fails, otherwise None
"""
# @todo: catch uploads during validation errors
if isinstance(value, str):
try:
value = json.loads(value)
except JSONERRORS:
import sys
error = sys.exc_info()[1]
if hasattr(error, "message"):
error = error.message
else:
error = None
else:
value = None
error = None
return (value, error)
# -------------------------------------------------------------------------
def __call__(self, field, value, **attributes):
"""
Widget method for this form element. Renders a table with
read-rows for existing entries, a variable edit-row to update
existing entries, and an add-row to add new entries. This widget
uses s3.inline_component.js to facilitate manipulation of the
entries.
@param field: the Field for this form element
@param value: the current value for this field
@param attributes: keyword attributes for this widget
"""
T = current.T
settings = current.deployment_settings
options = self.options
if options.readonly is True:
# Render read-only
return self.represent(value)
if value is None:
value = field.default
if isinstance(value, str):
data = json.loads(value)
else:
data = value
value = json.dumps(value, separators=SEPARATORS)
if data is None:
raise SyntaxError("No resource structure information")
if options.multiple is False:
multiple = False
else:
multiple = True
required = options.get("required", False)
# Get the table
resource = self.resource
component_name = data["component"]
component = resource.components[component_name]
table = component.table
# @ToDo: Hide completely if the user is not permitted to read this
# component
formname = self._formname()
fields = data["fields"]
items = data["data"]
# Flag whether there are any rows (at least an add-row) in the widget
has_rows = False
# Add the item rows
item_rows = []
prefix = component.prefix
name = component.name
audit = current.audit
has_permission = current.auth.s3_has_permission
tablename = component.tablename
# Configure the layout
layout = self._layout()
columns = options.get("columns")
if columns:
layout.set_columns(columns, row_actions = multiple)
get_config = current.s3db.get_config
_editable = get_config(tablename, "editable")
if _editable is None:
_editable = True
_deletable = get_config(tablename, "deletable")
if _deletable is None:
_deletable = True
_class = "read-row inline-form"
if not multiple:
# Mark to client-side JS that we should open Edit Row
_class = "%s single" % _class
item = None
for i in range(len(items)):
has_rows = True
item = items[i]
# Get the item record ID
if "_delete" in item and item["_delete"]:
continue
elif "_id" in item:
record_id = item["_id"]
# Check permissions to edit this item
if _editable:
editable = has_permission("update", tablename, record_id)
else:
editable = False
if _deletable:
deletable = has_permission("delete", tablename, record_id)
else:
deletable = False
else:
record_id = None
editable = bool(_editable)
deletable = bool(_deletable)
# Render read-row accordingly
rowname = "%s-%s" % (formname, i)
read_row = self._render_item(table, item, fields,
editable = editable,
deletable = deletable,
readonly = True,
multiple = multiple,
index = i,
layout = layout,
_id = "read-row-%s" % rowname,
_class = _class,
)
if record_id:
audit("read", prefix, name,
record = record_id,
representation = "html",
)
item_rows.append(read_row)
# Add the action rows
action_rows = []
# Edit-row
_class = "edit-row inline-form hide"
if required and has_rows:
_class = "%s required" % _class
if not multiple:
_class = "%s single" % _class
edit_row = self._render_item(table, item, fields,
editable = _editable,
deletable = _deletable,
readonly = False,
multiple = multiple,
index = 0,
layout = layout,
_id = "edit-row-%s" % formname,
_class = _class,
)
action_rows.append(edit_row)
# Add-row
inline_open_add = ""
insertable = get_config(tablename, "insertable")
if insertable is None:
insertable = True
if insertable:
insertable = has_permission("create", tablename)
if insertable:
_class = "add-row inline-form"
explicit_add = options.explicit_add
if not multiple:
explicit_add = False
if has_rows:
# Add Rows not relevant
_class = "%s hide" % _class
else:
# Mark to client-side JS that we should always validate
_class = "%s single" % _class
if required and not has_rows:
explicit_add = False
_class = "%s required" % _class
# Explicit open-action for add-row (optional)
if explicit_add:
# Hide add-row for explicit open-action
_class = "%s hide" % _class
if explicit_add is True:
label = T("Add another")
else:
label = explicit_add
inline_open_add = A(label,
_class = "inline-open-add action-lnk",
)
has_rows = True
add_row = self._render_item(table, None, fields,
editable = True,
deletable = True,
readonly = False,
multiple = multiple,
layout = layout,
_id = "add-row-%s" % formname,
_class = _class,
)
action_rows.append(add_row)
# Empty edit row
empty_row = self._render_item(table, None, fields,
editable = _editable,
deletable = _deletable,
readonly = False,
multiple = multiple,
index = "default",
layout = layout,
_id = "empty-edit-row-%s" % formname,
_class = "empty-row inline-form hide",
)
action_rows.append(empty_row)
# Empty read row
empty_row = self._render_item(table, None, fields,
editable = _editable,
deletable = _deletable,
readonly = True,
multiple = multiple,
index = "none",
layout = layout,
_id = "empty-read-row-%s" % formname,
_class = "empty-row inline-form hide",
)
action_rows.append(empty_row)
# Real input: a hidden text field to store the JSON data
real_input = "%s_%s" % (resource.tablename, field.name)
default = {"_type": "hidden",
"_value": value,
"requires": lambda v: (v, None),
}
attr = StringWidget._attributes(field, default, **attributes)
attr["_class"] = "%s hide" % attr["_class"]
attr["_id"] = real_input
widget = layout.subform(data,
item_rows,
action_rows,
empty = not has_rows,
)
if self.upload:
hidden = DIV(_class="hidden", _style="display:none")
for k, v in self.upload.items():
hidden.append(INPUT(_type = "text",
_id = k,
_name = k,
_value = v,
_style = "display:none",
))
else:
hidden = ""
# Render output HTML
output = DIV(INPUT(**attr),
hidden,
widget,
inline_open_add,
_id = self._formname(separator="-"),
_field = real_input,
_class = "inline-component",
)
# Reset the layout
layout.set_columns(None)
# Script options
js_opts = {"implicitCancelEdit": settings.get_ui_inline_cancel_edit(),
"confirmCancelEdit": s3_str(T("Discard changes?")),
}
script = '''S3.inlineComponentsOpts=%s''' % json.dumps(js_opts)
js_global = current.response.s3.js_global
if script not in js_global:
js_global.append(script)
return output
# -------------------------------------------------------------------------
def represent(self, value):
"""
Read-only representation of this sub-form
@param value: the value returned from extract()
"""
if isinstance(value, str):
data = json.loads(value)
else:
data = value
if data["data"] == []:
# Don't render a subform for NONE
return current.messages["NONE"]
resource = self.resource
component = resource.components[data["component"]]
layout = self._layout()
columns = self.options.get("columns")
if columns:
layout.set_columns(columns, row_actions=False)
fields = data["fields"]
if len(fields) == 1 and self.options.get("render_list", False):
output = layout.render_list(component, data)
else:
output = layout.readonly(component, data)
# Reset the layout
layout.set_columns(None)
return DIV(output,
_id = self._formname(separator = "-"),
_class = "inline-component readonly",
)
# -------------------------------------------------------------------------
def accept(self, form, master_id=None, format=None):
"""
Post-processes this form element against the POST data of the
request, and create/update/delete any related records.
@param form: the form
@param master_id: the ID of the master record in the form
@param format: the data format extension (for audit)
"""
# Name of the real input field
fname = self._formname(separator = "_")
options_get = self.options.get
multiple = options_get("multiple", True)
defaults = options_get("default", {})
if fname in form.vars:
# Retrieve the data
try:
data = json.loads(form.vars[fname])
except ValueError:
return False
component_name = data.get("component", None)
if not component_name:
return False
data = data.get("data", None)
if not data:
return False
# Get the component
resource = self.resource
component = resource.components.get(component_name)
if not component:
return False
# Link table handling
link = component.link
if link and options_get("link", True):
# Data are for the link table
actuate_link = False
component = link
else:
# Data are for the component
actuate_link = True
# Table, tablename, prefix and name of the component
prefix = component.prefix
name = component.name
tablename = component.tablename
db = current.db
table = db[tablename]
s3db = current.s3db
auth = current.auth
# Process each item
has_permission = auth.s3_has_permission
audit = current.audit
onaccept = s3db.onaccept
for item in data:
if not "_changed" in item and not "_delete" in item:
# No changes made to this item - skip
continue
delete = item.get("_delete")
values = Storage()
valid = True
if not delete:
# Get the values
for f, d in item.items():
if f[0] != "_" and d and isinstance(d, dict):
field = table[f]
widget = field.widget
if not hasattr(field, "type"):
# Virtual Field
continue
elif field.type == "upload":
# Find, rename and store the uploaded file
rowindex = item.get("_index", None)
if rowindex is not None:
filename = self._store_file(table, f, rowindex)
if filename:
values[f] = filename
elif isinstance(widget, S3Selector):
# Value must be processed by widget post-process
value, error = widget.postprocess(d["value"])
if not error:
values[f] = value
else:
valid = False
break
else:
# Must run through validator again (despite pre-validation)
# in order to post-process widget output properly (e.g. UTC
# offset subtraction)
try:
value, error = s3_validate(table, f, d["value"])
except AttributeError:
continue
if not error:
values[f] = value
else:
valid = False
break
if not valid:
# Skip invalid items
continue
record_id = item.get("_id")
if not record_id:
if delete:
# Item has been added and then removed again,
# so just ignore it
continue
elif not component.multiple or not multiple:
# Do not create a second record in this component
query = (resource._id == master_id) & \
component.get_join()
f = self._filterby_query()
if f is not None:
query &= f
DELETED = current.xml.DELETED
if DELETED in table.fields:
query &= table[DELETED] != True
row = db(query).select(table._id,
limitby = (0, 1),
).first()
if row:
record_id = row[table._id]
if record_id:
# Delete..?
if delete:
authorized = has_permission("delete", tablename, record_id)
if not authorized:
continue
c = s3db.resource(tablename, id=record_id)
# Audit happens inside .delete()
# Use cascade=True so that the deletion gets
# rolled back in case subsequent items fail:
success = c.delete(cascade=True, format="html")
# ...or update?
else:
authorized = has_permission("update", tablename, record_id)
if not authorized:
continue
success = db(table._id == record_id).update(**values)
values[table._id.name] = record_id
# Post-process update
if success:
audit("update", prefix, name,
record=record_id, representation=format)
# Update super entity links
s3db.update_super(table, values)
# Update realm
update_realm = s3db.get_config(table, "update_realm")
if update_realm:
auth.set_realm_entity(table, values,
force_update = True)
# Onaccept
onaccept(table, Storage(vars = values), method = "update")
else:
# Create a new record
authorized = has_permission("create", tablename)
if not authorized:
continue
# Get master record ID
pkey = component.pkey
mastertable = resource.table
if pkey != mastertable._id.name:
master = db(mastertable._id == master_id).select(mastertable._id,
mastertable[pkey],
limitby = (0, 1),
).first()
if not master:
return False
else:
master = Storage({pkey: master_id})
if actuate_link:
# Data are for component => apply component defaults
values = component.get_defaults(master,
defaults = defaults,
data = values,
)
if not actuate_link or not link:
# Add master record ID as linked directly
values[component.fkey] = master[pkey]
else:
# Check whether the component is a link table and
# we're linking to that via something like pr_person
# from hrm_human_resource
fkey = component.fkey
if fkey != "id" and fkey in component.fields and fkey not in values:
if fkey == "pe_id" and pkey == "person_id":
# Need to lookup the pe_id manually (bad that we need this
# special case, must be a better way but this works for now)
ptable = s3db.pr_person
person = db(ptable.id == master[pkey]).select(ptable.pe_id,
limitby = (0, 1),
).first()
try:
values["pe_id"] = person.pe_id
except TypeError:
current.log.debug("S3Forms: Cannot find person with ID: %s" % master[pkey])
elif resource.tablename == "pr_person" and \
fkey == "case_id" and pkey == "id":
# Using dvr_case as a link between pr_person & e.g. project_activity
# @ToDo: Work out generalisation & move to option if-possible
ltable = component.link.table
link_record = db(ltable.person_id == master[pkey]).select(ltable.id,
limitby = (0, 1),
).first()
try:
values[fkey] = link_record[pkey]
except TypeError:
current.log.debug("S3Forms: Cannot find case for person ID: %s" % master[pkey])
else:
values[fkey] = master[pkey]
# Create the new record
# use _table in case we are using an alias
try:
record_id = component._table.insert(**values)
except:
current.log.debug("S3Forms: Cannot insert values %s into table: %s" % (values, component._table))
raise
# Post-process create
if record_id:
# Ensure we're using the real table, not an alias
table = db[tablename]
# Audit
audit("create", prefix, name,
record = record_id,
representation = format,
)
# Add record_id
values[table._id.name] = record_id
# Update super entity link
s3db.update_super(table, values)
# Update link table
if link and actuate_link and \
options_get("update_link", True):
link.update_link(master, values)
# Set record owner
auth.s3_set_record_owner(table, record_id)
# onaccept
subform = Storage(vars = Storage(values))
onaccept(table, subform, method="create")
# Success
return True
else:
return False
# -------------------------------------------------------------------------
# Utility methods
# -------------------------------------------------------------------------
def _formname(self, separator=None):
"""
Generate a string representing the formname
@param separator: separator to prepend a prefix
"""
if separator:
return "%s%s%s%s" % (self.prefix,
separator,
self.alias,
self.selector,
)
else:
return "%s%s" % (self.alias, self.selector)
# -------------------------------------------------------------------------
def _layout(self):
""" Get the current layout """
layout = self.options.layout
if not layout:
layout = current.deployment_settings.get_ui_inline_component_layout()
elif isinstance(layout, type):
layout = layout()
return layout
# -------------------------------------------------------------------------
def _render_item(self,
table,
item,
fields,
readonly = True,
editable = False,
deletable = False,
multiple = True,
index = "none",
layout = None,
**attributes):
"""
Render a read- or edit-row.
@param table: the database table
@param item: the data
@param fields: the fields to render (list of strings)
@param readonly: render a read-row (otherwise edit-row)
@param editable: whether the record can be edited
@param deletable: whether the record can be deleted
@param multiple: whether multiple records can be added
@param index: the row index
@param layout: the subform layout (S3SQLSubFormLayout)
@param attributes: HTML attributes for the row
"""
s3 = current.response.s3
rowtype = "read" if readonly else "edit"
pkey = table._id.name
data = {}
formfields = []
formname = self._formname()
for f in fields:
# Construct a row-specific field name
fname = f["name"]
idxname = "%s_i_%s_%s_%s" % (formname, fname, rowtype, index)
# Parent and caller for add-popup
if not readonly:
# Use unaliased name to avoid need to create additional controllers
parent = original_tablename(table).split("_", 1)[1]
caller = "sub_%s_%s" % (formname, idxname)
popup = {"parent": parent,
"caller": caller,
}
else:
popup = None
# Custom label
label = f.get("label", DEFAULT)
# Use S3UploadWidget for upload fields
if str(table[fname].type) == "upload":
widget = S3UploadWidget.widget
else:
widget = DEFAULT
# Get a Field instance for SQLFORM.factory
formfield = self._rename_field(table[fname],
idxname,
comments = False,
label = label,
popup = popup,
skip_post_validation = True,
widget = widget,
)
# Reduced options set?
if "filterby" in self.options:
options = self._filterby_options(fname)
if options:
if len(options) < 2:
requires = IS_IN_SET(options, zero=None)
else:
requires = IS_IN_SET(options)
formfield.requires = SKIP_POST_VALIDATION(requires)
# Get filterby-default
filterby_defaults = self._filterby_defaults()
if filterby_defaults and fname in filterby_defaults:
default = filterby_defaults[fname]["value"]
formfield.default = default
# Add the data for this field (for existing rows)
if index is not None and item and fname in item:
if formfield.type == "upload":
filename = item[fname]["value"]
if current.request.env.request_method == "POST":
if "_index" in item and item.get("_changed", False):
rowindex = item["_index"]
filename = self._store_file(table, fname, rowindex)
data[idxname] = filename
else:
value = item[fname]["value"]
if type(value) is str:
value = s3_str(value)
widget = formfield.widget
if isinstance(widget, S3Selector):
# Use the widget parser to get at the selected ID
value, error = widget.parse(value).get("id"), None
else:
# Use the validator to get at the original value
value, error = s3_validate(table, fname, value)
if error:
value = None
data[idxname] = value
formfields.append(formfield)
if not data:
data = None
elif pkey not in data:
data[pkey] = None
# Render the subform
subform_name = "sub_%s" % formname
rowstyle = layout.rowstyle_read if readonly else layout.rowstyle
subform = SQLFORM.factory(*formfields,
record = data,
showid = False,
formstyle = rowstyle,
upload = s3.download_url,
readonly = readonly,
table_name = subform_name,
separator = ":",
submit = False,
buttons = [],
)
subform = subform[0]
# Retain any CSS classes added by the layout
subform_class = subform["_class"]
subform.update(**attributes)
if subform_class:
subform.add_class(subform_class)
if multiple:
# Render row actions
layout.actions(subform,
formname,
index,
item = item,
readonly = readonly,
editable = editable,
deletable = deletable,
)
return subform
# -------------------------------------------------------------------------
def _filterby_query(self):
"""
Render the filterby-options as Query to apply when retrieving
the existing rows in this inline-component
"""
filterby = self.options["filterby"]
if not filterby:
return None
if not isinstance(filterby, (list, tuple)):
filterby = [filterby]
component = self.resource.components[self.selector]
table = component.table
query = None
for f in filterby:
fieldname = f["field"]
if fieldname not in table.fields:
continue
field = table[fieldname]
if "options" in f:
options = f["options"]
else:
continue
if "invert" in f:
invert = f["invert"]
else:
invert = False
if not isinstance(options, (list, tuple)):
if invert:
q = (field != options)
else:
q = (field == options)
else:
if invert:
q = (~(field.belongs(options)))
else:
q = (field.belongs(options))
if query is None:
query = q
else:
query &= q
return query
# -------------------------------------------------------------------------
def _filterby_defaults(self):
"""
Render the defaults for this inline-component as a dict
for the real-input JSON
"""
filterby = self.options.get("filterby")
if filterby is None:
return None
if not isinstance(filterby, (list, tuple)):
filterby = [filterby]
component = self.resource.components[self.selector]
table = component.table
defaults = dict()
for f in filterby:
fieldname = f["field"]
if fieldname not in table.fields:
continue
if "default" in f:
default = f["default"]
elif "options" in f:
options = f["options"]
if "invert" in f and f["invert"]:
continue
if isinstance(options, (list, tuple)):
if len(options) != 1:
continue
else:
default = options[0]
else:
default = options
else:
continue
if default is not None:
defaults[fieldname] = {"value": default}
return defaults
# -------------------------------------------------------------------------
def _filterby_options(self, fieldname):
"""
Re-render the options list for a field if there is a
filterby-restriction.
@param fieldname: the name of the field
"""
component = self.resource.components[self.selector]
table = component.table
if fieldname not in table.fields:
return None
field = table[fieldname]
filterby = self.options["filterby"]
if filterby is None:
return None
if not isinstance(filterby, (list, tuple)):
filterby = [filterby]
filter_fields = dict((f["field"], f) for f in filterby)
if fieldname not in filter_fields:
return None
filterby = filter_fields[fieldname]
if "options" not in filterby:
return None
# Get the options list for the original validator
requires = field.requires
if not isinstance(requires, (list, tuple)):
requires = [requires]
if requires:
r = requires[0]
if isinstance(r, IS_EMPTY_OR):
#empty = True
r = r.other
# Currently only supporting IS_IN_SET
if not isinstance(r, IS_IN_SET):
return None
else:
return None
r_opts = r.options()
# Get the filter options
options = filterby["options"]
if not isinstance(options, (list, tuple)):
options = [options]
subset = []
if "invert" in filterby:
invert = filterby["invert"]
else:
invert = False
# Compute reduced options list
for o in r_opts:
if invert:
if isinstance(o, (list, tuple)):
if o[0] not in options:
subset.append(o)
elif isinstance(r_opts, dict):
if o not in options:
subset.append((o, r_opts[o]))
elif o not in options:
subset.append(o)
else:
if isinstance(o, (list, tuple)):
if o[0] in options:
subset.append(o)
elif isinstance(r_opts, dict):
if o in options:
subset.append((o, r_opts[o]))
elif o in options:
subset.append(o)
return subset
# -------------------------------------------------------------------------
def _store_file(self, table, fieldname, rowindex):
"""
Find, rename and store an uploaded file and return it's
new pathname
"""
field = table[fieldname]
formname = self._formname()
upload = "upload_%s_%s_%s" % (formname, fieldname, rowindex)
post_vars = current.request.post_vars
if upload in post_vars:
f = post_vars[upload]
if hasattr(f, "file"):
# Newly uploaded file (FieldStorage)
(sfile, ofilename) = (f.file, f.filename)
nfilename = field.store(sfile,
ofilename,
field.uploadfolder)
self.upload[upload] = nfilename
return nfilename
elif isinstance(f, str):
# Previously uploaded file
return f
return None
# =============================================================================
class S3SQLInlineLink(S3SQLInlineComponent):
"""
Subform to edit link table entries for the master record
Constructor options:
** Common options:
readonly..........True|False......render read-only always
multiple..........True|False......allow selection of multiple
options (default True)
widget............string..........which widget to use, one of:
- multiselect (default)
- groupedopts (default when cols is specified)
- hierarchy (requires hierarchical lookup-table)
- cascade (requires hierarchical lookup-table)
render_list.......True|False......in read-only mode, render HTML
list rather than comma-separated
strings (default False)
** Options for groupedopts widget:
cols..............integer.........number of columns for grouped
options (default: None)
orientation.......string..........orientation for grouped options
order, one of:
- cols
- rows
size..............integer.........maximum number of items per group
in grouped options, None to disable
grouping
sort..............True|False......sort grouped options (always True
when grouping, i.e. size!=None)
help_field........string..........additional field in the look-up
table to render as tooltip for
grouped options
table.............True|False......render grouped options as HTML
TABLE rather than nested DIVs
(default True)
** Options for multi-select widget:
header............True|False......multi-select to show a header with
bulk-select options and optional
search-field
search............True|False......show the search-field in the header
selectedList......integer.........how many items to show on multi-select
button before collapsing into number
noneSelectedText..string..........placeholder text on multi-select button
columns...........integer.........Foundation column-width for the
widget (for custom forms)
create............dict............Options to create a new record {"c": "controller",
"f": "function",
"label": "label",
"parent": "parent", (optional: which function to lookup options from)
"child": "child", (optional: which field to lookup options for)
}
** Options-filtering:
- multiselect and groupedopts only
- for hierarchy and cascade widgets, use the "filter" option
requires..........Validator.......validator to determine the
selectable options (defaults to
field validator)
filterby..........field selector..filter look-up options by this field
(can be a field in the look-up table
itself or in another table linked to it)
options...........value|list......filter for these values, or:
match.............field selector..lookup the filter value from this
field (can be a field in the master
table, or in linked table)
** Options for hierarchy and cascade widgets:
levels............list............ordered list of labels for hierarchy
levels (top-down order), to override
the lookup-table's "hierarchy_levels"
setting, cascade-widget only
represent.........callback........representation method for hierarchy
nodes (defaults to field represent)
leafonly..........True|False......only leaf nodes can be selected
cascade...........True|False......automatically select the entire branch
when a parent node is newly selected;
with multiple=False, this will
auto-select single child options
(default True when leafonly=True)
filter............resource query..filter expression to filter the
selectable options
"""
prefix = "link"
# -------------------------------------------------------------------------
def extract(self, resource, record_id):
"""
Get all existing links for record_id.
@param resource: the resource the record belongs to
@param record_id: the record ID
@return: list of component record IDs this record is
linked to via the link table
"""
self.resource = resource
component, link = self.get_link()
# Customise resources
from .s3rest import S3Request
r = S3Request(resource.prefix,
resource.name,
# Current request args/vars could be in a different
# resource context, so must override them here:
args = [],
get_vars = {},
)
customise_resource = current.deployment_settings.customise_resource
for tablename in (component.tablename, link.tablename):
customise = customise_resource(tablename)
if customise:
customise(r, tablename)
self.initialized = True
if record_id:
rkey = component.rkey
rows = link.select([rkey], as_rows=True)
if rows:
rkey = str(link.table[rkey])
values = [row[rkey] for row in rows]
else:
values = []
else:
# Use default
values = [link.table[self.options.field].default]
return values
# -------------------------------------------------------------------------
def __call__(self, field, value, **attributes):
"""
Widget renderer, currently supports multiselect (default),
hierarchy and groupedopts widgets.
@param field: the input field
@param value: the value to populate the widget
@param attributes: attributes for the widget
@return: the widget
"""
options = self.options
options_get = options.get
component, link = self.get_link()
has_permission = current.auth.s3_has_permission
ltablename = link.tablename
# User must have permission to create and delete
# link table entries (which is what this widget is about):
if options.readonly is True or \
not has_permission("create", ltablename) or \
not has_permission("delete", ltablename):
# Render read-only
return self.represent(value)
multiple = options_get("multiple", True)
options["multiple"] = multiple
# Field dummy
kfield = link.table[component.rkey]
dummy_field = Storage(name = field.name,
type = kfield.type,
label = options.label or kfield.label,
represent = kfield.represent,
)
# Widget type
widget = options_get("widget")
if widget not in ("hierarchy", "cascade"):
requires = options_get("requires")
if requires is None:
# Get the selectable entries for the widget and construct
# a validator from it
opts = self.get_options()
zero = options_get("zero", XML(" "))
if multiple or zero is not None:
# Drop the empty option
# - multiple does not need one (must de-select all instead)
# - otherwise, it shall be replaced by the zero-option
opts = {k: v for k, v in opts.items() if k != ""}
requires = IS_IN_SET(opts,
multiple = multiple,
zero = None if multiple else zero,
sort = options.get("sort", True),
)
if zero is not None:
# Allow deselecting all (or single: selection of explicit none)
# NB this is the default, unless zero is explicitly set to None
requires = IS_EMPTY_OR(requires)
dummy_field.requires = requires
# Helper to extract widget options
widget_opts = lambda keys: {k: v for k, v in options.items() if k in keys}
# Instantiate the widget
if widget == "groupedopts" or not widget and "cols" in options:
from .s3widgets import S3GroupedOptionsWidget
w_opts = widget_opts(("cols",
"help_field",
"multiple",
"orientation",
"size",
"sort",
"table",
))
w = S3GroupedOptionsWidget(**w_opts)
elif widget == "hierarchy":
from .s3widgets import S3HierarchyWidget
w_opts = widget_opts(("multiple",
"filter",
"leafonly",
"cascade",
"represent",
))
w_opts["lookup"] = component.tablename
w = S3HierarchyWidget(**w_opts)
elif widget == "cascade":
from .s3widgets import S3CascadeSelectWidget
w_opts = widget_opts(("levels",
"multiple",
"filter",
"leafonly",
"cascade",
"represent",
))
w_opts["lookup"] = component.tablename
w = S3CascadeSelectWidget(**w_opts)
else:
# Default to multiselect
from .s3widgets import S3MultiSelectWidget
w_opts = widget_opts(("multiple",
"search",
"header",
"selectedList",
"noneSelectedText",
"columns",
"create",
))
w = S3MultiSelectWidget(**w_opts)
# Render the widget
attr = dict(attributes)
attr["_id"] = field.name
if not link.table[options.field].writable:
_class = attr.get("_class", None)
if _class:
attr["_class"] = "%s hide" % _class
else:
attr["_class"] = "hide"
widget = w(dummy_field, value, **attr)
if hasattr(widget, "add_class"):
widget.add_class("inline-link")
# Append the attached script to jquery_ready
script = options_get("script")
if script:
current.response.s3.jquery_ready.append(script)
return widget
# -------------------------------------------------------------------------
def validate(self, form):
"""
Validate this link, currently only checking whether it has
a value when required=True
@param form: the form
"""
required = self.options.required
if not required:
return
fname = self._formname(separator = "_")
values = form.vars.get(fname)
if not values:
error = current.T("Value Required") \
if required is True else required
form.errors[fname] = error
# -------------------------------------------------------------------------
def accept(self, form, master_id=None, format=None):
"""
Post-processes this subform element against the POST data,
and create/update/delete any related records.
@param form: the master form
@param master_id: the ID of the master record in the form
@param format: the data format extension (for audit)
@todo: implement audit
"""
s3db = current.s3db
# Name of the real input field
fname = self._formname(separator = "_")
resource = self.resource
success = False
if fname in form.vars:
# Extract the new values from the form
values = form.vars[fname]
if values is None:
values = []
elif not isinstance(values, (list, tuple, set)):
values = [values]
values = set(str(v) for v in values)
# Get the link table
component, link = self.get_link()
# Get the master identity (pkey)
pkey = component.pkey
if pkey == resource._id.name:
master = {pkey: master_id}
else:
# Different pkey (e.g. super-key) => reload the master
master = current.db(resource._id == master_id).select(resource.table[pkey],
limitby = (0, 1),
).first()
if master:
# Find existing links
query = FS(component.lkey) == master[pkey]
lresource = s3db.resource(link.tablename,
filter = query,
)
rows = lresource.select([component.rkey], as_rows=True)
# Determine which to delete and which to add
if rows:
rkey = link.table[component.rkey]
current_ids = set(str(row[rkey]) for row in rows)
delete = current_ids - values
insert = values - current_ids
else:
delete = None
insert = values
# Delete links which are no longer used
# @todo: apply filterby to only delete within the subset?
if delete:
query &= FS(component.rkey).belongs(delete)
lresource = s3db.resource(link.tablename,
filter = query,
)
lresource.delete()
# Insert new links
insert.discard("")
if insert:
# Insert new links
for record_id in insert:
record = {component.fkey: record_id}
link.update_link(master, record)
success = True
return success
# -------------------------------------------------------------------------
def represent(self, value):
"""
Read-only representation of this subform.
@param value: the value as returned from extract()
@return: the read-only representation
"""
component, link = self.get_link()
# Use the represent of rkey if it supports bulk, otherwise
# instantiate an S3Represent from scratch:
rkey = link.table[component.rkey]
represent = rkey.represent
if not hasattr(represent, "bulk"):
# Pick the first field from the list that is available:
lookup_field = None
for fname in ("name", "tag"):
if fname in component.fields:
lookup_field = fname
break
from .s3fields import S3Represent
represent = S3Represent(lookup = component.tablename,
fields = [lookup_field],
)
# Represent all values
if isinstance(value, (list, tuple, set)):
result = represent.bulk(list(value))
if None not in value:
result.pop(None, None)
else:
result = represent.bulk([value])
# Sort them
def labels_sorted(labels):
try:
s = sorted(labels)
except TypeError:
if any(isinstance(l, DIV) for l in labels):
# Don't sort labels if they contain markup
s = labels
else:
s = sorted(s3_str(l) if l is not None else "-" for l in labels)
return s
labels = labels_sorted(result.values())
if self.options.get("render_list"):
if value is None or value == [None]:
# Don't render as list if empty
return current.messages.NONE
else:
# Render as HTML list
return UL([LI(l) for l in labels],
_class = "s3-inline-link",
)
else:
# Render as comma-separated list of strings
# (using TAG rather than join() to support HTML labels)
return TAG[""](list(chain.from_iterable([[l, ", "]
for l in labels]))[:-1])
# -------------------------------------------------------------------------
def get_options(self):
"""
Get the options for the widget
@return: dict {value: representation} of options
"""
resource = self.resource
component, link = self.get_link()
rkey = link.table[component.rkey]
# Lookup rkey options from rkey validator
opts = []
requires = rkey.requires
if not isinstance(requires, (list, tuple)):
requires = [requires]
if requires:
validator = requires[0]
if isinstance(validator, IS_EMPTY_OR):
validator = validator.other
try:
opts = validator.options()
except:
pass
# Filter these options?
widget_opts_get = self.options.get
filterby = widget_opts_get("filterby")
filteropts = widget_opts_get("options")
filterexpr = widget_opts_get("match")
if filterby and \
(filteropts is not None or filterexpr and resource._rows):
# filterby is a field selector for the component
# that shall match certain conditions
filter_selector = FS(filterby)
filter_query = None
if filteropts is not None:
# filterby-field shall match one of the given filteropts
if isinstance(filteropts, (list, tuple, set)):
filter_query = (filter_selector.belongs(list(filteropts)))
else:
filter_query = (filter_selector == filteropts)
elif filterexpr:
# filterby-field shall match one of the values for the
# filterexpr-field of the master record
rfield = resource.resolve_selector(filterexpr)
colname = rfield.colname
rows = resource.select([filterexpr], as_rows=True)
values = set(row[colname] for row in rows)
values.discard(None)
if values:
filter_query = (filter_selector.belongs(values)) | \
(filter_selector == None)
# Select the filtered component rows
filter_resource = current.s3db.resource(component.tablename,
filter = filter_query,
)
rows = filter_resource.select(["id"], as_rows=True)
filtered_opts = []
values = set(str(row[component.table._id]) for row in rows)
for opt in opts:
if str(opt[0]) in values:
filtered_opts.append(opt)
opts = filtered_opts
return dict(opts)
# -------------------------------------------------------------------------
def get_link(self):
"""
Find the target component and its linktable
@return: tuple of S3Resource instances (component, link)
"""
selector = self.selector
try:
component = self.resource.components[selector]
except KeyError:
raise SyntaxError("Undefined component: %s" % selector)
link = component.link
if not link:
# @todo: better error message
raise SyntaxError("No linktable for %s" % selector)
return (component, link)
# =============================================================================
class S3WithIntro(S3SQLFormElement):
"""
Wrapper for widgets to add an introductory text above them
"""
def __init__(self, widget, intro=None, cmsxml=False):
"""
Constructor
@param widget: the widget
@param intro: the intro, string|DIV|tuple,
if specified as tuple (module, resource, name),
the intro text will be looked up from CMS
@param cmsxml: do not XML-escape CMS contents, should only
be used with safe origin content (=normally never)
"""
self.widget = widget
self.intro = intro
self.cmsxml = cmsxml
# -------------------------------------------------------------------------
def resolve(self, resource):
"""
Override S3SQLFormElement.resolve() to map to widget
@param resource: the S3Resource to resolve this form element
against
"""
resolved = self.widget.resolve(resource)
field = resolved[2]
if field:
field.widget = self
return resolved
# -------------------------------------------------------------------------
def __getattr__(self, key):
"""
Attribute access => map to widget
@param key: the attribute key
"""
if key in self.__dict__:
return self.__dict__[key]
sentinel = object()
value = getattr(self.widget, key, sentinel)
if value is sentinel:
raise AttributeError
return value
# -------------------------------------------------------------------------
def __call__(self, *args, **kwargs):
"""
Widget renderer => map to widget, then add intro
"""
w = self.widget(*args, **kwargs)
intro = self.intro
if isinstance(intro, tuple):
if len(intro) == 3 and current.deployment_settings.has_module("cms"):
intro = self.get_cms_intro(intro)
else:
intro = None
if intro:
return TAG[""](DIV(intro,
_class = "s3-widget-intro",
), w)
else:
return w
# -------------------------------------------------------------------------
def get_cms_intro(self, intro):
"""
Get intro from CMS
@param intro: the intro spec as tuple (module, resource, postname)
"""
# Get intro text from CMS
db = current.db
s3db = current.s3db
ctable = s3db.cms_post
ltable = s3db.cms_post_module
join = ltable.on((ltable.post_id == ctable.id) & \
(ltable.module == intro[0]) & \
(ltable.resource == intro[1]) & \
(ltable.deleted == False))
query = (ctable.name == intro[2]) & \
(ctable.deleted == False)
row = db(query).select(ctable.body,
join = join,
cache = s3db.cache,
limitby = (0, 1),
).first()
if not row:
return None
return XML(row.body) if self.cmsxml else row.body
# END =========================================================================
| 37.73247 | 147 | 0.453192 |
4a22e430dee519a00d251ae75e9a919bc93891e3 | 430 | py | Python | src/foxdot/sandbox/180619_1908_compo_014.py | Neko250/aisthesis | 1d4a2c3070d10596c28b25ea2170523583e7eff0 | [
"Apache-2.0"
] | 4 | 2018-06-29T18:39:34.000Z | 2021-06-20T16:44:29.000Z | src/foxdot/sandbox/180619_1908_compo_014.py | Neko250/aisthesis | 1d4a2c3070d10596c28b25ea2170523583e7eff0 | [
"Apache-2.0"
] | null | null | null | src/foxdot/sandbox/180619_1908_compo_014.py | Neko250/aisthesis | 1d4a2c3070d10596c28b25ea2170523583e7eff0 | [
"Apache-2.0"
] | null | null | null | Scale.default = "minor"
p1.reset() >> play('X(---[--]---[---])', amp=.5).stop()
p2.reset() >> play('V O ', lpf=800, amp=.25).stop()
p3.reset() >> play('{ ppP[pP][Pp]}', amp=.5, sample=PRand(7), rate=PRand([.5,1,2]), shape=PWhite(0,.5))
s1.reset() >> space(P[0,2,[3,5],4]+(0,var([7,12],8),const(5)), dur=4, oct=4, delay=(0,.5,.25), vib=.2, amp=.5)
a1.reset() >> ambi(P[0,2]+(0,7), dur=8, oct=3, chop=24)
Group(p1,p3,a1).stop()
| 39.090909 | 110 | 0.525581 |
4a22e4d73b67256db494f25462a715479973b5bf | 4,244 | py | Python | allauth/socialaccount/providers/battlenet/views.py | s-tatus/django-allauth | 25fe632acf12571ae2ac9e692e8890019d5a6e7b | [
"MIT"
] | 2 | 2018-02-12T09:54:33.000Z | 2019-03-31T14:32:32.000Z | allauth/socialaccount/providers/battlenet/views.py | s-tatus/django-allauth | 25fe632acf12571ae2ac9e692e8890019d5a6e7b | [
"MIT"
] | 15 | 2020-06-05T19:26:26.000Z | 2022-03-11T23:33:53.000Z | allauth/socialaccount/providers/battlenet/views.py | s-tatus/django-allauth | 25fe632acf12571ae2ac9e692e8890019d5a6e7b | [
"MIT"
] | 1 | 2021-07-24T12:47:00.000Z | 2021-07-24T12:47:00.000Z | """
OAuth2 Adapter for Battle.net
Resources:
* Battle.net OAuth2 documentation:
https://dev.battle.net/docs/read/oauth
* Battle.net API documentation:
https://dev.battle.net/io-docs
* Original announcement:
https://us.battle.net/en/forum/topic/13979297799
* The Battle.net API forum:
https://us.battle.net/en/forum/15051532/
"""
import requests
from allauth.socialaccount.providers.oauth2.client import OAuth2Error
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
from .provider import BattleNetProvider
def _check_errors(response):
try:
data = response.json()
except ValueError: # JSONDecodeError on py3
raise OAuth2Error(
"Invalid JSON from Battle.net API: %r" % (response.text)
)
if response.status_code >= 400 or "error" in data:
# For errors, we expect the following format:
# {"error": "error_name", "error_description": "Oops!"}
# For example, if the token is not valid, we will get:
# {
# "error": "invalid_token",
# "error_description": "Invalid access token: abcdef123456"
# }
# For the profile API, this may also look like the following:
# {"code": 403, "type": "Forbidden", "detail": "Account Inactive"}
error = data.get("error", "") or data.get("type", "")
desc = data.get("error_description", "") or data.get("detail", "")
raise OAuth2Error("Battle.net error: %s (%s)" % (error, desc))
# The expected output from the API follows this format:
# {"id": 12345, "battletag": "Example#12345"}
# The battletag is optional.
if "id" not in data:
# If the id is not present, the output is not usable (no UID)
raise OAuth2Error("Invalid data from Battle.net API: %r" % (data))
return data
class BattleNetOAuth2Adapter(OAuth2Adapter):
"""
OAuth2 adapter for Battle.net
https://dev.battle.net/docs/read/oauth
Region is set to us by default, but can be overridden with the
`region` GET parameter when performing a login.
Can be any of eu, us, kr, sea, tw or cn
"""
provider_id = BattleNetProvider.id
valid_regions = ("us", "eu", "kr", "sea", "tw", "cn")
@property
def battlenet_region(self):
region = self.request.GET.get("region", "").lower()
if region == "sea":
# South-East Asia uses the same region as US everywhere
return "us"
if region in self.valid_regions:
return region
return "us"
@property
def battlenet_base_url(self):
region = self.battlenet_region
if region == "cn":
return "https://www.battlenet.com.cn"
return "https://%s.battle.net" % (region)
@property
def battlenet_api_url(self):
if self.battlenet_region == "cn":
return "https://api.battlenet.com.cn"
return "https://%s.api.battle.net" % (self.battlenet_region)
@property
def access_token_url(self):
return self.battlenet_base_url + "/oauth/token"
@property
def authorize_url(self):
return self.battlenet_base_url + "/oauth/authorize"
@property
def profile_url(self):
return self.battlenet_api_url + "/account/user"
def complete_login(self, request, app, token, **kwargs):
params = {"access_token": token.token}
response = requests.get(self.profile_url, params=params)
data = _check_errors(response)
# Add the region to the data so that we can have it in `extra_data`.
data["region"] = self.battlenet_region
return self.get_provider().sociallogin_from_response(request, data)
def get_callback_url(self, request, app):
r = super(BattleNetOAuth2Adapter, self).get_callback_url(request, app)
region = request.GET.get("region", "").lower()
# Pass the region down to the callback URL if we specified it
if region and region in self.valid_regions:
r += "?region=%s" % (region)
return r
oauth2_login = OAuth2LoginView.adapter_view(BattleNetOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(BattleNetOAuth2Adapter)
| 33.15625 | 78 | 0.645853 |
4a22e5233f054615082e480f1b9008287eea36a9 | 3,904 | py | Python | demo.py | wtyhub/University1652-Baseline | 26becc2b73b74bd9e7c7de31f9d7f6baebbd64c8 | [
"MIT"
] | 1 | 2021-06-03T03:06:52.000Z | 2021-06-03T03:06:52.000Z | demo.py | gold-pipe/University1652-Baseline | bad3c7555decf8e5213bfdda85dc317057ff3cc2 | [
"MIT"
] | null | null | null | demo.py | gold-pipe/University1652-Baseline | bad3c7555decf8e5213bfdda85dc317057ff3cc2 | [
"MIT"
] | null | null | null | import argparse
import scipy.io
import torch
import numpy as np
import os
from torchvision import datasets
import matplotlib
#matplotlib.use('agg')
import matplotlib.pyplot as plt
#######################################################################
# Evaluate
parser = argparse.ArgumentParser(description='Demo')
parser.add_argument('--query_index', default=0, type=int, help='test_image_index')
parser.add_argument('--test_dir',default='./data/test',type=str, help='./test_data')
opts = parser.parse_args()
#gallery_name = 'gallery_satellite'
#query_name = 'query_drone'
gallery_name = 'gallery_drone'
query_name = 'query_satellite'
data_dir = opts.test_dir
image_datasets = {x: datasets.ImageFolder( os.path.join(data_dir,x) ) for x in [gallery_name, query_name]}
#####################################################################
#Show result
def imshow(path, title=None):
"""Imshow for Tensor."""
im = plt.imread(path)
plt.imshow(im)
if title is not None:
plt.title(title)
plt.pause(0.1) # pause a bit so that plots are updated
######################################################################
result = scipy.io.loadmat('pytorch_result.mat')
query_feature = torch.FloatTensor(result['query_f'])
query_label = result['query_label'][0]
gallery_feature = torch.FloatTensor(result['gallery_f'])
gallery_label = result['gallery_label'][0]
multi = os.path.isfile('multi_query.mat')
if multi:
m_result = scipy.io.loadmat('multi_query.mat')
mquery_feature = torch.FloatTensor(m_result['mquery_f'])
mquery_cam = m_result['mquery_cam'][0]
mquery_label = m_result['mquery_label'][0]
mquery_feature = mquery_feature.cuda()
query_feature = query_feature.cuda()
gallery_feature = gallery_feature.cuda()
#######################################################################
# sort the images
def sort_img(qf, ql, gf, gl):
query = qf.view(-1,1)
# print(query.shape)
score = torch.mm(gf,query)
score = score.squeeze(1).cpu()
score = score.numpy()
# predict index
index = np.argsort(score) #from small to large
index = index[::-1]
# index = index[0:2000]
# good index
query_index = np.argwhere(gl==ql)
#good_index = np.setdiff1d(query_index, camera_index, assume_unique=True)
junk_index = np.argwhere(gl==-1)
mask = np.in1d(index, junk_index, invert=True)
index = index[mask]
return index
i = opts.query_index
index = sort_img(query_feature[i],query_label[i],gallery_feature,gallery_label)
########################################################################
# Visualize the rank result
query_path, _ = image_datasets[query_name].imgs[i]
query_label = query_label[i]
print(query_path)
print('Top 10 images are as follow:')
save_folder = 'image_show/%02d'%opts.query_index
if not os.path.isdir(save_folder):
os.mkdir(save_folder)
os.system('cp %s %s/query.jpg'%(query_path, save_folder))
try: # Visualize Ranking Result
# Graphical User Interface is needed
fig = plt.figure(figsize=(16,4))
ax = plt.subplot(1,11,1)
ax.axis('off')
imshow(query_path,'query')
for i in range(10):
ax = plt.subplot(1,11,i+2)
ax.axis('off')
img_path, _ = image_datasets[gallery_name].imgs[index[i]]
label = gallery_label[index[i]]
print(label)
imshow(img_path)
os.system('cp %s %s/s%02d.jpg'%(img_path, save_folder, i))
if label == query_label:
ax.set_title('%d'%(i+1), color='green')
else:
ax.set_title('%d'%(i+1), color='red')
print(img_path)
#plt.pause(100) # pause a bit so that plots are updated
except RuntimeError:
for i in range(10):
img_path = image_datasets.imgs[index[i]]
print(img_path[0])
print('If you want to see the visualization of the ranking result, graphical user interface is needed.')
fig.savefig("show.png")
| 32.533333 | 108 | 0.626793 |
4a22e53d19b53b7e5ab4f9092eaba4e78873017d | 19,952 | py | Python | synapse/tests/test_lib_stormhttp.py | vishalbelsare/synapse | c0f0d318cc5d3098b3a8d80222e2b0b1d19c5740 | [
"Apache-2.0"
] | null | null | null | synapse/tests/test_lib_stormhttp.py | vishalbelsare/synapse | c0f0d318cc5d3098b3a8d80222e2b0b1d19c5740 | [
"Apache-2.0"
] | null | null | null | synapse/tests/test_lib_stormhttp.py | vishalbelsare/synapse | c0f0d318cc5d3098b3a8d80222e2b0b1d19c5740 | [
"Apache-2.0"
] | null | null | null | import os
import json
import shutil
import synapse.exc as s_exc
import synapse.common as s_common
import synapse.lib.certdir as s_certdir
import synapse.lib.httpapi as s_httpapi
import synapse.tests.utils as s_test
class TstWebSock(s_httpapi.WebSocket):
def initialize(self):
pass
async def open(self):
await self.sendJsonMesg({'hi': 'woot', 'headers': dict(self.request.headers)})
async def on_message(self, byts):
mesg = json.loads(byts)
await self.sendJsonMesg(('echo', mesg), binary=True)
async def sendJsonMesg(self, item, binary=False):
byts = json.dumps(item)
await self.write_message(byts, binary=binary)
class HttpNotJson(s_httpapi.Handler):
async def get(self):
self.write(b"{'not':'json!'}")
class StormHttpTest(s_test.SynTest):
async def test_storm_http_get(self):
async with self.getTestCore() as core:
addr, port = await core.addHttpsPort(0)
root = await core.auth.getUserByName('root')
await root.setPasswd('root')
core.addHttpApi('/api/v0/test', s_test.HttpReflector, {'cell': core})
core.addHttpApi('/api/v0/notjson', HttpNotJson, {'cell': core})
url = f'https://root:[email protected]:{port}/api/v0/test'
opts = {'vars': {'url': url}}
# Header and params as dict
q = '''
$params=$lib.dict(key=valu, foo=bar, baz=$lib.false)
$hdr = $lib.dict(true=$lib.true)
$hdr."User-Agent"="Storm HTTP Stuff"
$k = (0)
$hdr.$k="Why"
$resp = $lib.inet.http.get($url, headers=$hdr, params=$params, ssl_verify=$lib.false)
return ( $resp.json() )
'''
resp = await core.callStorm(q, opts=opts)
data = resp.get('result')
self.eq(data.get('params'), {'key': ('valu',), 'foo': ('bar',), 'baz': ('False',)})
self.eq(data.get('headers').get('User-Agent'), 'Storm HTTP Stuff')
self.eq(data.get('headers').get('0'), 'Why')
self.eq(data.get('headers').get('True'), 'True')
# headers / params as list of key/value pairs
q = '''
$params=((foo, bar), (key, valu), (baz, $lib.false))
$hdr = (
("User-Agent", "Storm HTTP Stuff"),
((0), "Why"),
("true", $lib.true),
)
$resp = $lib.inet.http.get($url, headers=$hdr, params=$params, ssl_verify=$lib.false)
return ( $resp.json() )
'''
resp = await core.callStorm(q, opts=opts)
data = resp.get('result')
self.eq(data.get('params'), {'key': ('valu',), 'foo': ('bar',), 'baz': ('False',)})
self.eq(data.get('headers').get('User-Agent'), 'Storm HTTP Stuff')
self.eq(data.get('headers').get('0'), 'Why')
self.eq(data.get('headers').get('True'), 'True')
# headers
q = '''
$resp = $lib.inet.http.get($url, ssl_verify=$lib.false)
return ( $resp.headers."Content-Type" )
'''
resp = await core.callStorm(q, opts=opts)
self.eq(resp, 'application/json; charset=UTF-8')
badurl = f'https://root:[email protected]:{port}/api/v0/notjson'
badopts = {'vars': {'url': badurl}}
q = '''
$resp = $lib.inet.http.get($url, ssl_verify=$lib.false)
return ( $resp.json() )
'''
with self.raises(s_exc.BadJsonText) as cm:
resp = await core.callStorm(q, opts=badopts)
# params as a urlencoded string
q = '''
$params="foo=bar&key=valu&foo=baz"
$resp = $lib.inet.http.get($url, params=$params, ssl_verify=$lib.false)
return ( $resp.json() )
'''
resp = await core.callStorm(q, opts=opts)
data = resp.get('result')
self.eq(data.get('params'), {'key': ('valu',), 'foo': ('bar', 'baz')})
# Bad param
q = '''
$params=(1138)
$resp = $lib.inet.http.get($url, params=$params, ssl_verify=$lib.false)
return ( ($resp.code, $resp.err) )
'''
code, (errname, _) = await core.callStorm(q, opts=opts)
self.eq(code, -1)
self.eq('TypeError', errname)
# SSL Verify enabled results in a aiohttp.ClientConnectorCertificateError
q = '''
$params=((foo, bar), (key, valu))
$resp = $lib.inet.http.get($url, params=$params)
return ( ($resp.code, $resp.err) )
'''
code, (errname, _) = await core.callStorm(q, opts=opts)
self.eq(code, -1)
self.eq('ClientConnectorCertificateError', errname)
async def test_storm_http_inject_ca(self):
with self.getTestDir() as dirn:
cdir = s_common.gendir(dirn, 'certs')
cadir = s_common.gendir(cdir, 'cas')
tdir = s_certdir.CertDir(cdir)
tdir.genCaCert('somelocalca')
tdir.genHostCert('localhost', signas='somelocalca')
localkeyfp = tdir.getHostKeyPath('localhost')
localcertfp = tdir.getHostCertPath('localhost')
shutil.copyfile(localkeyfp, s_common.genpath(dirn, 'sslkey.pem'))
shutil.copyfile(localcertfp, s_common.genpath(dirn, 'sslcert.pem'))
tlscadir = s_common.gendir(dirn, 'cadir')
for fn in os.listdir(cadir):
if fn.endswith('.crt'):
shutil.copyfile(os.path.join(cadir, fn), os.path.join(tlscadir, fn))
async with self.getTestCore(dirn=dirn) as core:
root = await core.auth.getUserByName('root')
await root.setPasswd('root')
addr, port = await core.addHttpsPort(0)
core.addHttpApi('/api/v0/test', s_test.HttpReflector, {'cell': core})
url = f'https://root:root@localhost:{port}/api/v0/test'
opts = {'vars': {'url': url}}
q = '''
$params=((foo, bar), (key, valu))
$resp = $lib.inet.http.get($url, params=$params)
return ( ($resp.code, $resp.err) )
'''
code, (errname, _) = await core.callStorm(q, opts=opts)
self.eq(code, -1)
self.eq('ClientConnectorCertificateError', errname)
conf = {'tls:ca:dir': tlscadir}
async with self.getTestCore(dirn=dirn, conf=conf) as core:
addr, port = await core.addHttpsPort(0)
core.addHttpApi('/api/v0/test', s_test.HttpReflector, {'cell': core})
url = f'https://root:root@localhost:{port}/api/v0/test'
opts = {'vars': {'url': url}}
q = '''
$params=((foo, bar), (key, valu))
$resp = $lib.inet.http.get($url, params=$params)
return ( $resp.json() )
'''
resp = await core.callStorm(q, opts=opts)
data = resp.get('result')
self.eq(data.get('params'), {'key': ('valu',), 'foo': ('bar',)})
async def test_storm_http_head(self):
async with self.getTestCore() as core:
addr, port = await core.addHttpsPort(0)
root = await core.auth.getUserByName('root')
await root.setPasswd('root')
core.addHttpApi('/api/v0/test', s_test.HttpReflector, {'cell': core})
url = f'https://root:[email protected]:{port}/api/v0/test'
noauth_url = f'https://127.0.0.1:{port}/api/v0/test'
newp_url = noauth_url + 'newpnewp'
opts = {'vars': {'url': url, 'noauth_url': noauth_url, 'newp_url': newp_url}}
q = '''
$params=$lib.dict(key=valu, foo=bar)
$hdr = (
("User-Agent", "Storm HTTP Stuff"),
)
$resp = $lib.inet.http.head($url, headers=$hdr, params=$params, ssl_verify=$lib.false)
return ( ($resp.code, $resp.headers, $resp.body) )
'''
resp = await core.callStorm(q, opts=opts)
code, headers, body = resp
self.eq(code, 200)
self.eq(b'', body)
self.eq('0', headers.get('Content-Length'))
self.eq('1', headers.get('Head'))
q = '''
$params=$lib.dict(key=valu, redirect='http://test.newp/')
$hdr = (
("User-Agent", "Storm HTTP Stuff"),
)
$resp = $lib.inet.http.head($url, headers=$hdr, params=$params, ssl_verify=$lib.false)
return ( ($resp.code, $resp.headers, $resp.body) )
'''
resp = await core.callStorm(q, opts=opts)
code, headers, body = resp
self.eq(code, 302)
self.eq(b'', body)
self.eq('0', headers.get('Content-Length'))
self.eq('1', headers.get('Head'))
self.eq('1', headers.get('Redirected'))
self.eq('http://test.newp/', headers.get('Location'))
q = '''
$params=$lib.dict(key=valu, redirect=$noauth_url)
$hdr = (
("User-Agent", "Storm HTTP Stuff"),
)
$resp = $lib.inet.http.head($url, headers=$hdr, params=$params, ssl_verify=$lib.false, allow_redirects=$lib.true)
return ( ($resp.code, $resp.headers, $resp.body) )
'''
resp = await core.callStorm(q, opts=opts)
code, headers, body = resp
self.eq(code, 200)
self.eq(b'', body)
q = '''
$params=$lib.dict(key=valu, redirect=$newp_url)
$hdr = (
("User-Agent", "Storm HTTP Stuff"),
)
$resp = $lib.inet.http.head($url, headers=$hdr, params=$params, ssl_verify=$lib.false, allow_redirects=$lib.true)
return ( ($resp.code, $resp.headers, $resp.body) )
'''
resp = await core.callStorm(q, opts=opts)
code, headers, body = resp
self.eq(code, 404)
self.eq(b'', body)
q = '''
$params=$lib.dict(key=valu, redirect="http://127.0.0.1/newp")
$hdr = (
("User-Agent", "Storm HTTP Stuff"),
)
$resp = $lib.inet.http.head($url, headers=$hdr, params=$params, ssl_verify=$lib.false, allow_redirects=$lib.true)
return ( ($resp.code, $resp.headers, $resp.body) )
'''
resp = await core.callStorm(q, opts=opts)
code, headers, body = resp
self.eq(code, -1)
self.eq(b'', body)
async def test_storm_http_request(self):
async with self.getTestCore() as core:
addr, port = await core.addHttpsPort(0)
root = await core.auth.getUserByName('root')
await root.setPasswd('root')
core.addHttpApi('/api/v0/test', s_test.HttpReflector, {'cell': core})
url = f'https://root:[email protected]:{port}/api/v0/test'
opts = {'vars': {'url': url}}
q = '''
$params=$lib.dict(key=valu, foo=bar)
$hdr = (
("User-Agent", "Storm HTTP Stuff"),
)
$resp = $lib.inet.http.request(GET, $url, headers=$hdr, params=$params, ssl_verify=$lib.false)
return ( $resp.json() )
'''
resp = await core.callStorm(q, opts=opts)
data = resp.get('result')
self.eq(data.get('params'), {'key': ('valu',), 'foo': ('bar',)})
self.eq(data.get('headers').get('User-Agent'), 'Storm HTTP Stuff')
# Timeout
url = f'https://root:[email protected]:{port}/api/v0/test'
opts = {'vars': {'url': url, 'sleep': 1, 'timeout': 2}}
q = '''
$params=$lib.dict(key=valu, foo=bar, sleep=$sleep)
$hdr = (
("User-Agent", "Storm HTTP Stuff"),
)
$resp = $lib.inet.http.request(GET, $url, headers=$hdr, params=$params, ssl_verify=$lib.false, timeout=$timeout)
$code = $resp.code
return ($code)
'''
code = await core.callStorm(q, opts=opts)
self.eq(200, code)
url = f'https://root:[email protected]:{port}/api/v0/test'
opts = {'vars': {'url': url, 'sleep': 10, 'timeout': 1}}
q = '''
$params=$lib.dict(key=valu, foo=bar, sleep=$sleep)
$hdr = (
("User-Agent", "Storm HTTP Stuff"),
)
$resp = $lib.inet.http.request(GET, $url, headers=$hdr, params=$params, ssl_verify=$lib.false, timeout=$timeout)
$code = $resp.code
return (($code, $resp.err))
'''
code, (errname, errinfo) = await core.callStorm(q, opts=opts)
self.eq(code, -1)
self.eq('TimeoutError', errname)
self.isin('mesg', errinfo)
self.eq('', errinfo.get('mesg')) # timeouterror has no mesg
async def test_storm_http_post(self):
async with self.getTestCore() as core:
addr, port = await core.addHttpsPort(0)
root = await core.auth.getUserByName('root')
await root.setPasswd('root')
adduser = '''
$url = $lib.str.format("https://root:[email protected]:{port}/api/v1/auth/adduser", port=$port)
$user = $lib.dict(name=$name, passwd=$passwd)
$post = $lib.inet.http.post($url, json=$user, ssl_verify=$(0)).json().result.name
$lib.print($post)
[ test:str=$post ]
'''
opts = {'vars': {'port': port, 'name': 'foo', 'passwd': 'bar'}}
nodes = await core.nodes(adduser, opts=opts)
self.len(1, nodes)
self.assertIn('foo', [u.name for u in core.auth.users()])
adduser = '''
$url = $lib.str.format("https://root:[email protected]:{port}/api/v1/auth/adduser", port=$port)
$user = $lib.str.format('{"name": "{name}", "passwd": "{passwd}"}', name=$name, passwd=$passwd)
$header = $lib.dict("Content-Type"="application/json")
$post = $lib.inet.http.post($url, headers=$header, body=$user, ssl_verify=$(0)).json().result.name
[ test:str=$post ]
'''
opts = {'vars': {'port': port, 'name': 'vertex', 'passwd': 'project'}}
nodes = await core.nodes(adduser, opts=opts)
self.len(1, nodes)
self.assertIn('vertex', [u.name for u in core.auth.users()])
core.addHttpApi('/api/v0/test', s_test.HttpReflector, {'cell': core})
url = f'https://root:[email protected]:{port}/api/v0/test'
opts = {'vars': {'url': url, 'buf': b'1234'}}
q = '''
$params=$lib.dict(key=valu, foo=bar)
$resp = $lib.inet.http.post($url, params=$params, body=$buf, ssl_verify=$lib.false)
return ( $resp.json() )
'''
resp = await core.callStorm(q, opts=opts)
data = resp.get('result')
self.eq(data.get('params'), {'key': ('valu',), 'foo': ('bar',)})
self.eq(data.get('body'), 'MTIzNA==')
q = '''
$fields=$lib.list(
$lib.dict(name=foo, value=bar),
$lib.dict(name=foo, value=bar2),
$lib.dict(name=baz, value=cool)
)
$resp = $lib.inet.http.post($url, fields=$fields, ssl_verify=$lib.false)
return ( $resp.json() )
'''
resp = await core.callStorm(q, opts=opts)
data = resp.get('result')
self.eq(data.get('params'), {'foo': ('bar', 'bar2'), 'baz': ('cool',)})
async def test_storm_http_post_file(self):
async with self.getTestCore() as core:
addr, port = await core.addHttpsPort(0)
root = await core.auth.getUserByName('root')
await root.setPasswd('root')
text = '''
$url = $lib.str.format("https://root:[email protected]:{port}/api/v1/storm", port=$port)
$stormq = "($size, $sha2) = $lib.bytes.put($lib.base64.decode('dmVydGV4')) [ test:str = $sha2 ] [ test:int = $size ]"
$json = $lib.dict(query=$stormq)
$bytez = $lib.inet.http.post($url, json=$json, ssl_verify=$(0))
'''
opts = {'vars': {'port': port}}
nodes = await core.nodes(text, opts=opts)
nodes = await core.nodes('test:str')
self.len(1, nodes)
self.eq(nodes[0].ndef, ('test:str', 'e1b683e26a3aad218df6aa63afe9cf57fdb5dfaf5eb20cddac14305d67f48a02'))
nodes = await core.nodes('test:int')
self.len(1, nodes)
self.eq(nodes[0].ndef, ('test:int', 6))
text = '''
$url = $lib.str.format("https://root:[email protected]:{port}/api/v1/storm", port=$port)
$json = $lib.dict(query="test:str")
$body = $json
$resp=$lib.inet.http.post($url, json=$json, body=$body, ssl_verify=$(0))
return ( ($resp.code, $resp.err) )
'''
code, (errname, _) = await core.callStorm(text, opts=opts)
self.eq(code, -1)
self.eq('ValueError', errname)
async def test_storm_http_proxy(self):
conf = {'http:proxy': 'socks5://user:[email protected]:1'}
async with self.getTestCore(conf=conf) as core:
resp = await core.callStorm('return($lib.axon.wget("http://vertex.link"))')
self.ne(-1, resp['mesg'].find('Can not connect to proxy 127.0.0.1:1'))
q = '$resp=$lib.inet.http.get("http://vertex.link") return(($resp.code, $resp.err))'
code, (errname, _) = await core.callStorm(q)
self.eq(code, -1)
self.eq('ProxyConnectionError', errname)
async def test_storm_http_connect(self):
async with self.getTestCore() as core:
core.addHttpApi('/test/ws', TstWebSock, {})
addr, port = await core.addHttpsPort(0)
mesg = await core.callStorm('''
$hdr=$lib.dict(key=$lib.false)
$url = $lib.str.format('https://127.0.0.1:{port}/test/ws', port=$port)
($ok, $sock) = $lib.inet.http.connect($url, headers=$hdr)
if (not $ok) { $lib.exit($sock) }
($ok, $mesg) = $sock.rx()
if (not $ok) { $lib.exit($mesg) }
return($mesg)
''', opts={'vars': {'port': port}})
self.eq(mesg.get('hi'), 'woot')
self.eq(mesg.get('headers').get('Key'), 'False')
mesg = await core.callStorm('''
$hdr=( (key, $lib.false), )
$url = $lib.str.format('https://127.0.0.1:{port}/test/ws', port=$port)
($ok, $sock) = $lib.inet.http.connect($url, headers=$hdr)
if (not $ok) { $lib.exit($sock) }
($ok, $mesg) = $sock.rx()
if (not $ok) { $lib.exit($mesg) }
return($mesg)
''', opts={'vars': {'port': port}})
self.eq(mesg.get('hi'), 'woot')
self.eq(mesg.get('headers').get('Key'), 'False')
self.eq((True, ('echo', 'lololol')), await core.callStorm('''
$url = $lib.str.format('https://127.0.0.1:{port}/test/ws', port=$port)
($ok, $sock) = $lib.inet.http.connect($url)
if (not $ok) { $lib.exit($sock) }
($ok, $mesg) = $sock.rx()
if (not $ok) { $lib.exit($mesg) }
($ok, $valu) = $sock.tx(lololol)
return($sock.rx())
''', opts={'vars': {'port': port}}))
| 42.632479 | 129 | 0.502656 |
4a22e64eeff285aa265f4098989a131c2234022d | 17 | py | Python | src/__init__.py | algerbrex/Schemey | 4a499054758d33d2f170b0b536f1c1a7372385b7 | [
"Unlicense"
] | 6 | 2017-02-07T19:03:38.000Z | 2021-12-12T01:42:42.000Z | src/__init__.py | algerbrex/Schemey | 4a499054758d33d2f170b0b536f1c1a7372385b7 | [
"Unlicense"
] | 5 | 2017-03-13T23:59:49.000Z | 2017-06-02T03:45:21.000Z | src/__init__.py | algerbrex/Schemey | 4a499054758d33d2f170b0b536f1c1a7372385b7 | [
"Unlicense"
] | null | null | null | """
package.
"""
| 4.25 | 8 | 0.411765 |
4a22e69838edf902fca03d413c168fc1cd7b7d86 | 801 | py | Python | tests/providers/test_memset.py | chibiegg/lexicon | 6230ea1e567a730243dc77c08ff6c4c16f136157 | [
"MIT"
] | null | null | null | tests/providers/test_memset.py | chibiegg/lexicon | 6230ea1e567a730243dc77c08ff6c4c16f136157 | [
"MIT"
] | null | null | null | tests/providers/test_memset.py | chibiegg/lexicon | 6230ea1e567a730243dc77c08ff6c4c16f136157 | [
"MIT"
] | null | null | null | # Test for one implementation of the interface
from lexicon.providers.memset import Provider
from integration_tests import IntegrationTests
from unittest import TestCase
import pytest
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class MemsetProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'memset'
domain = 'testzone.com'
def _filter_headers(self):
return ['Authorization']
# TODO: this should be enabled
@pytest.mark.skip(reason="regenerating auth keys required")
def test_Provider_when_calling_update_record_should_modify_record_name_specified(self):
return | 34.826087 | 91 | 0.787765 |
4a22e69a40fa7ddd01e5347834880dc483439f09 | 10,621 | py | Python | sdk/python/pulumi_azure_native/apimanagement/v20191201preview/backend.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/apimanagement/v20191201preview/backend.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/apimanagement/v20191201preview/backend.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['Backend']
class Backend(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
backend_id: Optional[pulumi.Input[str]] = None,
credentials: Optional[pulumi.Input[pulumi.InputType['BackendCredentialsContractArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['BackendPropertiesArgs']]] = None,
protocol: Optional[pulumi.Input[Union[str, 'BackendProtocol']]] = None,
proxy: Optional[pulumi.Input[pulumi.InputType['BackendProxyContractArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
title: Optional[pulumi.Input[str]] = None,
tls: Optional[pulumi.Input[pulumi.InputType['BackendTlsPropertiesArgs']]] = None,
url: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Backend details.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] backend_id: Identifier of the Backend entity. Must be unique in the current API Management service instance.
:param pulumi.Input[pulumi.InputType['BackendCredentialsContractArgs']] credentials: Backend Credentials Contract Properties
:param pulumi.Input[str] description: Backend Description.
:param pulumi.Input[pulumi.InputType['BackendPropertiesArgs']] properties: Backend Properties contract
:param pulumi.Input[Union[str, 'BackendProtocol']] protocol: Backend communication protocol.
:param pulumi.Input[pulumi.InputType['BackendProxyContractArgs']] proxy: Backend Proxy Contract Properties
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] resource_id: Management Uri of the Resource in External System. This url can be the Arm Resource Id of Logic Apps, Function Apps or Api Apps.
:param pulumi.Input[str] service_name: The name of the API Management service.
:param pulumi.Input[str] title: Backend Title.
:param pulumi.Input[pulumi.InputType['BackendTlsPropertiesArgs']] tls: Backend TLS Properties
:param pulumi.Input[str] url: Runtime Url of the Backend.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['backend_id'] = backend_id
__props__['credentials'] = credentials
__props__['description'] = description
__props__['properties'] = properties
if protocol is None and not opts.urn:
raise TypeError("Missing required property 'protocol'")
__props__['protocol'] = protocol
__props__['proxy'] = proxy
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['resource_id'] = resource_id
if service_name is None and not opts.urn:
raise TypeError("Missing required property 'service_name'")
__props__['service_name'] = service_name
__props__['title'] = title
__props__['tls'] = tls
if url is None and not opts.urn:
raise TypeError("Missing required property 'url'")
__props__['url'] = url
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201preview:Backend"), pulumi.Alias(type_="azure-native:apimanagement:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement:Backend"), pulumi.Alias(type_="azure-native:apimanagement/latest:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/latest:Backend"), pulumi.Alias(type_="azure-native:apimanagement/v20160707:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20160707:Backend"), pulumi.Alias(type_="azure-native:apimanagement/v20161010:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20161010:Backend"), pulumi.Alias(type_="azure-native:apimanagement/v20170301:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20170301:Backend"), pulumi.Alias(type_="azure-native:apimanagement/v20180101:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180101:Backend"), pulumi.Alias(type_="azure-native:apimanagement/v20180601preview:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180601preview:Backend"), pulumi.Alias(type_="azure-native:apimanagement/v20190101:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20190101:Backend"), pulumi.Alias(type_="azure-native:apimanagement/v20191201:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201:Backend"), pulumi.Alias(type_="azure-native:apimanagement/v20200601preview:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20200601preview:Backend"), pulumi.Alias(type_="azure-native:apimanagement/v20201201:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20201201:Backend")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Backend, __self__).__init__(
'azure-native:apimanagement/v20191201preview:Backend',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Backend':
"""
Get an existing Backend resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["credentials"] = None
__props__["description"] = None
__props__["name"] = None
__props__["properties"] = None
__props__["protocol"] = None
__props__["proxy"] = None
__props__["resource_id"] = None
__props__["title"] = None
__props__["tls"] = None
__props__["type"] = None
__props__["url"] = None
return Backend(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def credentials(self) -> pulumi.Output[Optional['outputs.BackendCredentialsContractResponse']]:
"""
Backend Credentials Contract Properties
"""
return pulumi.get(self, "credentials")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Backend Description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.BackendPropertiesResponse']:
"""
Backend Properties contract
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def protocol(self) -> pulumi.Output[str]:
"""
Backend communication protocol.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def proxy(self) -> pulumi.Output[Optional['outputs.BackendProxyContractResponse']]:
"""
Backend Proxy Contract Properties
"""
return pulumi.get(self, "proxy")
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> pulumi.Output[Optional[str]]:
"""
Management Uri of the Resource in External System. This url can be the Arm Resource Id of Logic Apps, Function Apps or Api Apps.
"""
return pulumi.get(self, "resource_id")
@property
@pulumi.getter
def title(self) -> pulumi.Output[Optional[str]]:
"""
Backend Title.
"""
return pulumi.get(self, "title")
@property
@pulumi.getter
def tls(self) -> pulumi.Output[Optional['outputs.BackendTlsPropertiesResponse']]:
"""
Backend TLS Properties
"""
return pulumi.get(self, "tls")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type for API Management resource.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def url(self) -> pulumi.Output[str]:
"""
Runtime Url of the Backend.
"""
return pulumi.get(self, "url")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 47.415179 | 1,638 | 0.660201 |
4a22e6ec890e9c5fd6c4d9b15123685994622609 | 161 | py | Python | read_xnb.py | fesh0r/xnb_parse | cc3aef8fff4be1320068a78001795e7e1c8bd548 | [
"MIT"
] | 31 | 2015-01-15T01:57:48.000Z | 2021-06-26T17:08:56.000Z | read_xnb.py | fesh0r/xnb_parse | cc3aef8fff4be1320068a78001795e7e1c8bd548 | [
"MIT"
] | 1 | 2016-05-11T13:48:30.000Z | 2016-05-11T13:48:30.000Z | read_xnb.py | fesh0r/xnb_parse | cc3aef8fff4be1320068a78001795e7e1c8bd548 | [
"MIT"
] | 5 | 2015-01-26T08:58:20.000Z | 2020-02-25T11:03:33.000Z | #!/usr/bin/python
"""
Dump info from XNB
"""
from __future__ import print_function
from xnb_parse.read_xnb import main
if __name__ == '__main__':
main()
| 12.384615 | 37 | 0.708075 |
4a22e7bf41db1d4cc91f827d69654801815525ff | 633 | py | Python | manage.py | vamshisowdaboina/pythonrestapi | d69352e58bc55ae36182909a26dd49bd45fc0792 | [
"MIT"
] | null | null | null | manage.py | vamshisowdaboina/pythonrestapi | d69352e58bc55ae36182909a26dd49bd45fc0792 | [
"MIT"
] | null | null | null | manage.py | vamshisowdaboina/pythonrestapi | d69352e58bc55ae36182909a26dd49bd45fc0792 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pythonrestapi.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.772727 | 77 | 0.685624 |
4a22e7cd45875e852447e55d7e10a2f8ae710358 | 382 | py | Python | backend/jmeter/studentquestion/_genfolders.py | gh-jsoares/ES20-AL14-Project | 2bc8c212064c13071015699636c2e83edbc61b88 | [
"MIT"
] | null | null | null | backend/jmeter/studentquestion/_genfolders.py | gh-jsoares/ES20-AL14-Project | 2bc8c212064c13071015699636c2e83edbc61b88 | [
"MIT"
] | null | null | null | backend/jmeter/studentquestion/_genfolders.py | gh-jsoares/ES20-AL14-Project | 2bc8c212064c13071015699636c2e83edbc61b88 | [
"MIT"
] | null | null | null | from os import walk, makedirs, path
for r, d, fs in walk('.'):
if r != '.':
break
for f in fs:
s = f.split('.')
name = s[0]
ext = s[1]
if ext == 'jmx':
dirpath = "pictures/{}".format(name)
if not path.exists(dirpath):
makedirs(dirpath)
print("R:{}, D:{}, F:{}".format(r, d, name))
| 25.466667 | 56 | 0.431937 |
4a22e819336a61d03e001a1c1b1cd4b657f18408 | 10,498 | py | Python | tests/test_memoryfile.py | mraspaud/rasterio | 474d614e3839bee6a9fa4d9adc7e85936d41f7f1 | [
"BSD-3-Clause"
] | null | null | null | tests/test_memoryfile.py | mraspaud/rasterio | 474d614e3839bee6a9fa4d9adc7e85936d41f7f1 | [
"BSD-3-Clause"
] | null | null | null | tests/test_memoryfile.py | mraspaud/rasterio | 474d614e3839bee6a9fa4d9adc7e85936d41f7f1 | [
"BSD-3-Clause"
] | null | null | null | """MemoryFile tests. MemoryFile requires GDAL 2.0+.
Tests in this file will ONLY run for GDAL >= 2.x"""
from io import BytesIO
import logging
import os.path
from affine import Affine
import numpy
import pytest
import rasterio
from rasterio.io import MemoryFile, ZipMemoryFile
from rasterio.enums import MaskFlags
from rasterio.env import GDALVersion
from rasterio.shutil import copyfiles
# Skip ENTIRE module if not GDAL >= 2.x.
# pytestmark is a keyword that instructs pytest to skip this module.
pytestmark = pytest.mark.skipif(
not GDALVersion.runtime().major >= 2,
reason="MemoryFile requires GDAL 2.x")
@pytest.fixture(scope='session')
def rgb_file_bytes(path_rgb_byte_tif):
"""Get the bytes of our RGB.bytes.tif file"""
return open(path_rgb_byte_tif, 'rb').read()
@pytest.fixture(scope='session')
def rgb_lzw_file_bytes():
"""Get the bytes of our RGB.bytes.tif file"""
return open('tests/data/rgb_lzw.tif', 'rb').read()
@pytest.fixture(scope='function')
def rgb_file_object(path_rgb_byte_tif):
"""Get RGB.bytes.tif file opened in 'rb' mode"""
return open(path_rgb_byte_tif, 'rb')
@pytest.fixture(scope='session')
def rgb_data_and_profile(path_rgb_byte_tif):
with rasterio.open(path_rgb_byte_tif) as src:
data = src.read()
profile = src.profile
return data, profile
def test_initial_not_bytes():
"""Creating a MemoryFile from not bytes fails."""
with pytest.raises(TypeError):
MemoryFile(u'lolwut')
def test_initial_bytes(rgb_file_bytes):
"""MemoryFile contents can initialized from bytes and opened."""
with MemoryFile(rgb_file_bytes) as memfile:
with memfile.open() as src:
assert src.driver == 'GTiff'
assert src.count == 3
assert src.dtypes == ('uint8', 'uint8', 'uint8')
assert src.read().shape == (3, 718, 791)
def test_initial_lzw_bytes(rgb_lzw_file_bytes):
"""MemoryFile contents can initialized from bytes and opened."""
with MemoryFile(rgb_lzw_file_bytes) as memfile:
with memfile.open() as src:
assert src.driver == 'GTiff'
assert src.count == 3
assert src.dtypes == ('uint8', 'uint8', 'uint8')
assert src.read().shape == (3, 718, 791)
def test_initial_file_object(rgb_file_object):
"""MemoryFile contents can initialized from bytes and opened."""
with MemoryFile(rgb_file_object) as memfile:
with memfile.open() as src:
assert src.driver == 'GTiff'
assert src.count == 3
assert src.dtypes == ('uint8', 'uint8', 'uint8')
assert src.read().shape == (3, 718, 791)
def test_closed():
"""A closed MemoryFile can not be opened"""
with MemoryFile() as memfile:
pass
with pytest.raises(IOError):
memfile.open()
def test_non_initial_bytes(rgb_file_bytes):
"""MemoryFile contents can be read from bytes and opened."""
with MemoryFile() as memfile:
assert memfile.write(rgb_file_bytes) == len(rgb_file_bytes)
with memfile.open() as src:
assert src.driver == 'GTiff'
assert src.count == 3
assert src.dtypes == ('uint8', 'uint8', 'uint8')
assert src.read().shape == (3, 718, 791)
def test_non_initial_bytes_in_two(rgb_file_bytes):
"""MemoryFile contents can be read from bytes in two steps and opened."""
with MemoryFile() as memfile:
assert memfile.write(rgb_file_bytes[:10]) == 10
assert memfile.write(rgb_file_bytes[10:]) == len(rgb_file_bytes) - 10
with memfile.open() as src:
assert src.driver == 'GTiff'
assert src.count == 3
assert src.dtypes == ('uint8', 'uint8', 'uint8')
assert src.read().shape == (3, 718, 791)
def test_no_initial_bytes(rgb_data_and_profile):
"""An empty MemoryFile can be opened and written into."""
data, profile = rgb_data_and_profile
with MemoryFile() as memfile:
with memfile.open(**profile) as dst:
dst.write(data)
view = memfile.getbuffer()
# Exact size of the in-memory GeoTIFF varies with GDAL
# version and configuration.
assert view.size > 1000000
# NB: bytes(view) doesn't return what you'd expect with python 2.7.
data = bytes(bytearray(view))
with MemoryFile(data) as memfile:
with memfile.open() as src:
assert sorted(src.profile.items()) == sorted(profile.items())
def test_read(tmpdir, rgb_file_bytes):
"""Reading from a MemoryFile works"""
with MemoryFile(rgb_file_bytes) as memfile:
tmptiff = tmpdir.join('test.tif')
while 1:
chunk = memfile.read(8192)
if not chunk:
break
tmptiff.write(chunk, 'ab')
with rasterio.open(str(tmptiff)) as src:
assert src.count == 3
def test_file_object_read(rgb_file_object):
"""An example of reading from a file object"""
with rasterio.open(rgb_file_object) as src:
assert src.driver == 'GTiff'
assert src.count == 3
assert src.dtypes == ('uint8', 'uint8', 'uint8')
assert src.read().shape == (3, 718, 791)
def test_file_object_read_variant(rgb_file_bytes):
"""An example of reading from a MemoryFile object"""
with rasterio.open(MemoryFile(rgb_file_bytes)) as src:
assert src.driver == 'GTiff'
assert src.count == 3
assert src.dtypes == ('uint8', 'uint8', 'uint8')
assert src.read().shape == (3, 718, 791)
def test_file_object_read_variant2(rgb_file_bytes):
"""An example of reading from a BytesIO object"""
with rasterio.open(BytesIO(rgb_file_bytes)) as src:
assert src.driver == 'GTiff'
assert src.count == 3
assert src.dtypes == ('uint8', 'uint8', 'uint8')
assert src.read().shape == (3, 718, 791)
def test_test_file_object_write(tmpdir, rgb_data_and_profile):
"""An example of writing to a file object"""
data, profile = rgb_data_and_profile
with tmpdir.join('test.tif').open('wb') as fout:
with rasterio.open(fout, 'w', **profile) as dst:
dst.write(data)
with rasterio.open(str(tmpdir.join('test.tif'))) as src:
assert src.driver == 'GTiff'
assert src.count == 3
assert src.dtypes == ('uint8', 'uint8', 'uint8')
assert src.read().shape == (3, 718, 791)
def test_nonpersistemt_memfile_fail_example(rgb_data_and_profile):
"""An example of writing to a file object"""
data, profile = rgb_data_and_profile
with BytesIO() as fout:
with rasterio.open(fout, 'w', **profile) as dst:
dst.write(data)
# This fails because the MemoryFile created in open() is
# gone.
rasterio.open(fout)
def test_zip_closed():
"""A closed ZipMemoryFile can not be opened"""
with ZipMemoryFile() as zipmemfile:
pass
with pytest.raises(IOError):
zipmemfile.open('foo')
def test_zip_file_object_read(path_zip_file):
"""An example of reading from a zip file object"""
with open(path_zip_file, 'rb') as zip_file_object:
with ZipMemoryFile(zip_file_object) as zipmemfile:
with zipmemfile.open('white-gemini-iv.vrt') as src:
assert src.driver == 'VRT'
assert src.count == 3
assert src.dtypes == ('uint8', 'uint8', 'uint8')
assert src.read().shape == (3, 768, 1024)
def test_vrt_memfile():
"""Successfully read an in-memory VRT"""
with open('tests/data/white-gemini-iv.vrt') as vrtfile:
source = vrtfile.read()
source = source.replace('<SourceFilename relativeToVRT="1">389225main_sw_1965_1024.jpg</SourceFilename>', '<SourceFilename relativeToVRT="0">{}/389225main_sw_1965_1024.jpg</SourceFilename>'.format(os.path.abspath("tests/data")))
with MemoryFile(source.encode('utf-8'), ext='vrt') as memfile:
with memfile.open() as src:
assert src.driver == 'VRT'
assert src.count == 3
assert src.dtypes == ('uint8', 'uint8', 'uint8')
assert src.read().shape == (3, 768, 1024)
def test_write_plus_mode():
with MemoryFile() as memfile:
with memfile.open(driver='GTiff', dtype='uint8', count=3, height=32, width=32, crs='epsg:3226', transform=Affine.identity() * Affine.scale(0.5, -0.5)) as dst:
dst.write(numpy.full((32, 32), 255, dtype='uint8'), 1)
dst.write(numpy.full((32, 32), 204, dtype='uint8'), 2)
dst.write(numpy.full((32, 32), 153, dtype='uint8'), 3)
data = dst.read()
assert (data[0] == 255).all()
assert (data[1] == 204).all()
assert (data[2] == 153).all()
def test_write_plus_model_jpeg():
with rasterio.Env(), MemoryFile() as memfile:
with memfile.open(driver='JPEG', dtype='uint8', count=3, height=32, width=32, crs='epsg:3226', transform=Affine.identity() * Affine.scale(0.5, -0.5)) as dst:
dst.write(numpy.full((32, 32), 255, dtype='uint8'), 1)
dst.write(numpy.full((32, 32), 204, dtype='uint8'), 2)
dst.write(numpy.full((32, 32), 153, dtype='uint8'), 3)
data = dst.read()
assert (data[0] == 255).all()
assert (data[1] == 204).all()
assert (data[2] == 153).all()
def test_memfile_copyfiles(path_rgb_msk_byte_tif):
"""Multiple files can be copied to a MemoryFile using copyfiles"""
with rasterio.open(path_rgb_msk_byte_tif) as src:
src_basename = os.path.basename(src.name)
with MemoryFile(filename=src_basename) as memfile:
copyfiles(src.name, memfile.name)
with memfile.open() as rgb2:
assert sorted(rgb2.files) == sorted(['/vsimem/{}'.format(src_basename), '/vsimem/{}.msk'.format(src_basename)])
def test_multi_memfile(path_rgb_msk_byte_tif):
"""Multiple files can be copied to a MemoryFile using copyfiles"""
with open(path_rgb_msk_byte_tif, 'rb') as tif_fp:
tif_bytes = tif_fp.read()
with open(path_rgb_msk_byte_tif + '.msk', 'rb') as msk_fp:
msk_bytes = msk_fp.read()
with MemoryFile(tif_bytes, filename='foo.tif') as tifmemfile, MemoryFile(msk_bytes, filename='foo.tif.msk') as mskmemfile:
with tifmemfile.open() as src:
assert sorted(src.files) == sorted(['/vsimem/foo.tif', '/vsimem/foo.tif.msk'])
assert src.mask_flag_enums == ([MaskFlags.per_dataset],) * 3
| 36.964789 | 236 | 0.636216 |
4a22e9efe0e177af5a16467e8e24da5520b89845 | 21,414 | py | Python | gps_pb2.py | djhedges/exit_speed | 86c8a36dd7c53d9f67157c359625b8d33715f917 | [
"Apache-2.0"
] | 10 | 2020-09-25T19:48:50.000Z | 2021-10-13T13:42:56.000Z | gps_pb2.py | djhedges/exit_speed | 86c8a36dd7c53d9f67157c359625b8d33715f917 | [
"Apache-2.0"
] | null | null | null | gps_pb2.py | djhedges/exit_speed | 86c8a36dd7c53d9f67157c359625b8d33715f917 | [
"Apache-2.0"
] | 1 | 2021-02-08T14:50:48.000Z | 2021-02-08T14:50:48.000Z | # pytype: skip-file
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: gps.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='gps.proto',
package='gps',
syntax='proto3',
serialized_options=b'Z8github.com/djhedges/exit_speed/gps_go_proto;gps_go_proto',
serialized_pb=b'\n\tgps.proto\x12\x03gps\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xa7\x06\n\x05Point\x12\x0b\n\x03lat\x18\x01 \x01(\x01\x12\x0b\n\x03lon\x18\x02 \x01(\x01\x12\x0b\n\x03\x61lt\x18\x03 \x01(\x01\x12\r\n\x05speed\x18\x04 \x01(\x01\x12(\n\x04time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1d\n\x15start_finish_distance\x18\x06 \x01(\x01\x12\x13\n\x0btps_voltage\x18\x07 \x01(\x01\x12\x1a\n\x12water_temp_voltage\x18\x08 \x01(\x01\x12\x1c\n\x14oil_pressure_voltage\x18\t \x01(\x01\x12\x0b\n\x03rpm\x18\n \x01(\x01\x12\x0b\n\x03\x61\x66r\x18\x0b \x01(\x01\x12\x1a\n\x12\x66uel_level_voltage\x18\x0c \x01(\x01\x12\x17\n\x0f\x61\x63\x63\x65lerometer_x\x18\r \x01(\x01\x12\x17\n\x0f\x61\x63\x63\x65lerometer_y\x18\x0e \x01(\x01\x12\x17\n\x0f\x61\x63\x63\x65lerometer_z\x18\x0f \x01(\x01\x12\r\n\x05pitch\x18\x10 \x01(\x01\x12\x0c\n\x04roll\x18\x11 \x01(\x01\x12\x0e\n\x06gyro_x\x18\x12 \x01(\x01\x12\x0e\n\x06gyro_y\x18\x13 \x01(\x01\x12\x0e\n\x06gyro_z\x18\x14 \x01(\x01\x12\x0f\n\x07geohash\x18\x15 \x01(\t\x12$\n\x1c\x66ront_brake_pressure_voltage\x18\x16 \x01(\x01\x12#\n\x1brear_brake_pressure_voltage\x18\x17 \x01(\x01\x12\x17\n\x0f\x62\x61ttery_voltage\x18\x18 \x01(\x01\x12\x18\n\x10oil_temp_voltage\x18\x19 \x01(\x01\x12\x1b\n\x13\x65lapsed_duration_ms\x18\x1a \x01(\x01\x12\x1a\n\x12\x65lapsed_distance_m\x18\x1b \x01(\x01\x12\x16\n\x0elabjack_temp_f\x18\x1c \x01(\x01\x12\'\n\x0clf_tire_temp\x18\x1d \x01(\x0b\x32\x11.gps.TireIrSensor\x12\'\n\x0crf_tire_temp\x18\x1e \x01(\x0b\x32\x11.gps.TireIrSensor\x12\'\n\x0clr_tire_temp\x18\x1f \x01(\x0b\x32\x11.gps.TireIrSensor\x12\'\n\x0crr_tire_temp\x18 \x01(\x0b\x32\x11.gps.TireIrSensor\"^\n\x03Lap\x12\x1a\n\x06points\x18\x01 \x03(\x0b\x32\n.gps.Point\x12+\n\x08\x64uration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0e\n\x06number\x18\x03 \x01(\x05\"R\n\x07Session\x12\x16\n\x04laps\x18\x01 \x03(\x0b\x32\x08.gps.Lap\x12\r\n\x05track\x18\x02 \x01(\t\x12 \n\x0cstart_finish\x18\x03 \x01(\x0b\x32\n.gps.Point\"<\n\x0cTireIrSensor\x12\r\n\x05inner\x18\x01 \x01(\x01\x12\x0e\n\x06middle\x18\x02 \x01(\x01\x12\r\n\x05outer\x18\x03 \x01(\x01\x42:Z8github.com/djhedges/exit_speed/gps_go_proto;gps_go_protob\x06proto3'
,
dependencies=[google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,])
_POINT = _descriptor.Descriptor(
name='Point',
full_name='gps.Point',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='lat', full_name='gps.Point.lat', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lon', full_name='gps.Point.lon', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alt', full_name='gps.Point.alt', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='speed', full_name='gps.Point.speed', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time', full_name='gps.Point.time', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='start_finish_distance', full_name='gps.Point.start_finish_distance', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tps_voltage', full_name='gps.Point.tps_voltage', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='water_temp_voltage', full_name='gps.Point.water_temp_voltage', index=7,
number=8, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='oil_pressure_voltage', full_name='gps.Point.oil_pressure_voltage', index=8,
number=9, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='rpm', full_name='gps.Point.rpm', index=9,
number=10, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='afr', full_name='gps.Point.afr', index=10,
number=11, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fuel_level_voltage', full_name='gps.Point.fuel_level_voltage', index=11,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='accelerometer_x', full_name='gps.Point.accelerometer_x', index=12,
number=13, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='accelerometer_y', full_name='gps.Point.accelerometer_y', index=13,
number=14, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='accelerometer_z', full_name='gps.Point.accelerometer_z', index=14,
number=15, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pitch', full_name='gps.Point.pitch', index=15,
number=16, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='roll', full_name='gps.Point.roll', index=16,
number=17, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='gyro_x', full_name='gps.Point.gyro_x', index=17,
number=18, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='gyro_y', full_name='gps.Point.gyro_y', index=18,
number=19, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='gyro_z', full_name='gps.Point.gyro_z', index=19,
number=20, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='geohash', full_name='gps.Point.geohash', index=20,
number=21, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='front_brake_pressure_voltage', full_name='gps.Point.front_brake_pressure_voltage', index=21,
number=22, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='rear_brake_pressure_voltage', full_name='gps.Point.rear_brake_pressure_voltage', index=22,
number=23, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='battery_voltage', full_name='gps.Point.battery_voltage', index=23,
number=24, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='oil_temp_voltage', full_name='gps.Point.oil_temp_voltage', index=24,
number=25, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='elapsed_duration_ms', full_name='gps.Point.elapsed_duration_ms', index=25,
number=26, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='elapsed_distance_m', full_name='gps.Point.elapsed_distance_m', index=26,
number=27, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='labjack_temp_f', full_name='gps.Point.labjack_temp_f', index=27,
number=28, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lf_tire_temp', full_name='gps.Point.lf_tire_temp', index=28,
number=29, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='rf_tire_temp', full_name='gps.Point.rf_tire_temp', index=29,
number=30, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lr_tire_temp', full_name='gps.Point.lr_tire_temp', index=30,
number=31, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='rr_tire_temp', full_name='gps.Point.rr_tire_temp', index=31,
number=32, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=84,
serialized_end=891,
)
_LAP = _descriptor.Descriptor(
name='Lap',
full_name='gps.Lap',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='points', full_name='gps.Lap.points', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='duration', full_name='gps.Lap.duration', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='number', full_name='gps.Lap.number', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=893,
serialized_end=987,
)
_SESSION = _descriptor.Descriptor(
name='Session',
full_name='gps.Session',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='laps', full_name='gps.Session.laps', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='track', full_name='gps.Session.track', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='start_finish', full_name='gps.Session.start_finish', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=989,
serialized_end=1071,
)
_TIREIRSENSOR = _descriptor.Descriptor(
name='TireIrSensor',
full_name='gps.TireIrSensor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='inner', full_name='gps.TireIrSensor.inner', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='middle', full_name='gps.TireIrSensor.middle', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='outer', full_name='gps.TireIrSensor.outer', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1073,
serialized_end=1133,
)
_POINT.fields_by_name['time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_POINT.fields_by_name['lf_tire_temp'].message_type = _TIREIRSENSOR
_POINT.fields_by_name['rf_tire_temp'].message_type = _TIREIRSENSOR
_POINT.fields_by_name['lr_tire_temp'].message_type = _TIREIRSENSOR
_POINT.fields_by_name['rr_tire_temp'].message_type = _TIREIRSENSOR
_LAP.fields_by_name['points'].message_type = _POINT
_LAP.fields_by_name['duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
_SESSION.fields_by_name['laps'].message_type = _LAP
_SESSION.fields_by_name['start_finish'].message_type = _POINT
DESCRIPTOR.message_types_by_name['Point'] = _POINT
DESCRIPTOR.message_types_by_name['Lap'] = _LAP
DESCRIPTOR.message_types_by_name['Session'] = _SESSION
DESCRIPTOR.message_types_by_name['TireIrSensor'] = _TIREIRSENSOR
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Point = _reflection.GeneratedProtocolMessageType('Point', (_message.Message,), {
'DESCRIPTOR' : _POINT,
'__module__' : 'gps_pb2'
# @@protoc_insertion_point(class_scope:gps.Point)
})
_sym_db.RegisterMessage(Point)
Lap = _reflection.GeneratedProtocolMessageType('Lap', (_message.Message,), {
'DESCRIPTOR' : _LAP,
'__module__' : 'gps_pb2'
# @@protoc_insertion_point(class_scope:gps.Lap)
})
_sym_db.RegisterMessage(Lap)
Session = _reflection.GeneratedProtocolMessageType('Session', (_message.Message,), {
'DESCRIPTOR' : _SESSION,
'__module__' : 'gps_pb2'
# @@protoc_insertion_point(class_scope:gps.Session)
})
_sym_db.RegisterMessage(Session)
TireIrSensor = _reflection.GeneratedProtocolMessageType('TireIrSensor', (_message.Message,), {
'DESCRIPTOR' : _TIREIRSENSOR,
'__module__' : 'gps_pb2'
# @@protoc_insertion_point(class_scope:gps.TireIrSensor)
})
_sym_db.RegisterMessage(TireIrSensor)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 46.653595 | 2,244 | 0.736761 |
4a22ea2ade7c81075c3531ffda1732ae18c96732 | 39 | py | Python | src/tuplenet/version.py | darcy-xiao/TupleNet | c44275501b43fbf02da824a21ba60ef1e29a0fbe | [
"Apache-2.0"
] | 1 | 2018-12-07T09:12:30.000Z | 2018-12-07T09:12:30.000Z | src/tuplenet/version.py | darcy-xiao/TupleNet | c44275501b43fbf02da824a21ba60ef1e29a0fbe | [
"Apache-2.0"
] | null | null | null | src/tuplenet/version.py | darcy-xiao/TupleNet | c44275501b43fbf02da824a21ba60ef1e29a0fbe | [
"Apache-2.0"
] | null | null | null | __version__ = '0.1.8'
git_version = ''
| 13 | 21 | 0.641026 |
4a22ea5329c709a72c79a0390da302b927221e6c | 138 | py | Python | src/pyinterp/interpolator/__init__.py | readthedocs-assistant/pangeo-pyinterp | e9dc18445dce36638d5a90f64c8e2f1b53164f90 | [
"BSD-3-Clause"
] | 67 | 2019-07-09T09:10:22.000Z | 2022-03-01T09:46:35.000Z | src/pyinterp/interpolator/__init__.py | readthedocs-assistant/pangeo-pyinterp | e9dc18445dce36638d5a90f64c8e2f1b53164f90 | [
"BSD-3-Clause"
] | 8 | 2019-07-15T13:54:31.000Z | 2021-06-28T05:06:34.000Z | src/pyinterp/interpolator/__init__.py | readthedocs-assistant/pangeo-pyinterp | e9dc18445dce36638d5a90f64c8e2f1b53164f90 | [
"BSD-3-Clause"
] | 7 | 2019-07-15T17:28:16.000Z | 2022-01-19T19:43:47.000Z | from .bicubic import bicubic
from .bivariate import bivariate
from .trivariate import trivariate
from .quadrivariate import quadrivariate
| 27.6 | 40 | 0.855072 |
4a22ea718a336a8f88c2a73ce5801b06be99c203 | 2,069 | py | Python | tests/event_store/test_reading_all_streams.py | mpsiva89/protean | 315fa56da3f64178bbbf0edf1995af46d5eb3da7 | [
"BSD-3-Clause"
] | null | null | null | tests/event_store/test_reading_all_streams.py | mpsiva89/protean | 315fa56da3f64178bbbf0edf1995af46d5eb3da7 | [
"BSD-3-Clause"
] | null | null | null | tests/event_store/test_reading_all_streams.py | mpsiva89/protean | 315fa56da3f64178bbbf0edf1995af46d5eb3da7 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import annotations
from datetime import datetime
from uuid import uuid4
import pytest
from protean import BaseEvent, BaseEventSourcedAggregate
from protean.fields import DateTime, Identifier, String, Text
class User(BaseEventSourcedAggregate):
email = String()
name = String(max_length=50)
class Registered(BaseEvent):
id = Identifier()
email = String()
class Meta:
aggregate_cls = User
class Activated(BaseEvent):
id = Identifier(required=True)
class Meta:
aggregate_cls = User
class Renamed(BaseEvent):
id = Identifier(required=True)
name = String(required=True, max_length=50)
class Meta:
aggregate_cls = User
class Post(BaseEventSourcedAggregate):
topic = String()
content = Text()
class Created(BaseEvent):
id = Identifier(identifier=True)
topic = String()
content = Text()
class Meta:
aggregate_cls = Post
class Published(BaseEvent):
id = Identifier(required=True)
published_time = DateTime(default=datetime.utcnow)
class Meta:
aggregate_cls = Post
@pytest.mark.eventstore
def test_reading_messages_from_all_streams(test_domain):
user_identifier = str(uuid4())
event1 = Registered(id=user_identifier, email="[email protected]")
user = User(**event1.to_dict())
test_domain.event_store.store.append_aggregate_event(user, event1)
event2 = Activated(id=user_identifier)
test_domain.event_store.store.append_aggregate_event(user, event2)
event3 = Renamed(id=user_identifier, name="Jane Doe")
test_domain.event_store.store.append_aggregate_event(user, event3)
post_identifier = str(uuid4())
event4 = Created(id=post_identifier, topic="Foo", content="Bar")
post = Post(**event4.to_dict())
test_domain.event_store.store.append_aggregate_event(post, event4)
event5 = Published(id=post_identifier)
test_domain.event_store.store.append_aggregate_event(post, event5)
messages = test_domain.event_store.store.read("$all")
assert len(messages) == 5
| 24.341176 | 73 | 0.724021 |
4a22ebf74d58332a12f012badacfc6df046513fe | 2,412 | py | Python | plot/figure7.py | leelew/AttConvLSTM | 62f614efa8654125014e3e31efd309a946f23b6c | [
"MIT"
] | 4 | 2021-09-26T10:01:43.000Z | 2022-02-08T00:01:45.000Z | plot/figure7.py | leelew/AttConvLSTM | 62f614efa8654125014e3e31efd309a946f23b6c | [
"MIT"
] | null | null | null | plot/figure7.py | leelew/AttConvLSTM | 62f614efa8654125014e3e31efd309a946f23b6c | [
"MIT"
] | null | null | null | import sys
sys.path.append('../')
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits.basemap import Basemap
import matplotlib.colors as mcolors
plt.rc('font', family='Times New Roman')
from utils import fillna, gen_meshgrid, gen_metric, gen_tac_sac
def figure7():
tac, sac = gen_tac_sac()
# fig
plt.figure(figsize=(10, 7))
# generate meshgrid
lon, lat = gen_meshgrid()
colors = ('white', 'lightcyan', 'cyan', 'darkturquoise',
'deepskyblue', 'dodgerblue', 'lightgreen','gold','yellow')
clrmap = mcolors.LinearSegmentedColormap.from_list("mycmap", colors)
# ----------------------------- Figure 7 (a) -------------------------------
ax1 = plt.subplot2grid((1, 2), (0, 0))
# projection
m = Basemap(projection='mill',
llcrnrlat=27, urcrnrlat=50,
llcrnrlon=-122.9, urcrnrlon=-70.5,)
# lines
m.drawcoastlines()
# generate meshgrid
x, y = m(lon, lat)
# countourf
sc = m.contourf(x, y,
sac,
cmap=clrmap,
vmin=0, vmax=0.2)
m.colorbar(sc, location='bottom', extend='both', fraction=0.7,
shrink=0.8, pad=0.1, label='Local Moran Index')
# text
x, y = m(-123, 50.5)
plt.text(x, y, "(a) spatial autocorrelation (SAC)", fontweight='bold', fontsize=14)
# inset colorbar
#axin1 = ax1.inset_axes([0.899, 0.024, 0.02, 0.35])
#plt.colorbar(sc, cax=axin1,)
# ----------------------------- Figure 7 (b) -------------------------------
plt.subplot2grid((1, 2), (0, 1))
# projection
m = Basemap(projection='mill',
llcrnrlat=27, urcrnrlat=50,
llcrnrlon=-122.9, urcrnrlon=-70.5,)
# lines
m.drawcoastlines()
# meshgrid
x, y = m(lon, lat)
# countourf
sc = m.contourf(x, y,
tac,
cmap='jet',
vmin=0.8, vmax=1)
m.colorbar(sc, location='bottom',pad=0.1, label='Time Autocorrelation Index')
# text
x, y = m(-123, 50.5)
plt.text(x, y, '(b) temporal autocorrelation (TAC)', fontweight='bold', fontsize=14)
plt.subplots_adjust(wspace=0.1)
# --------------------------------------------------------------------------
# save
plt.savefig('/Users/lewlee/Desktop/figure7.pdf', dpi=600)
if __name__ == "__main__":
figure7()
| 26.217391 | 88 | 0.524046 |
4a22ec26a60275e04101d2a4b69ad0941be966f5 | 317 | py | Python | pdfreader/models.py | vishalagrawal22/JEEMainsMarksCalculator | 4de5c1c510abc19b78fd02532298957e9d243e02 | [
"MIT"
] | 2 | 2020-12-04T18:36:29.000Z | 2021-03-13T15:21:31.000Z | pdfreader/models.py | vishalagrawal22/JEEMainsMarksCalculator | 4de5c1c510abc19b78fd02532298957e9d243e02 | [
"MIT"
] | null | null | null | pdfreader/models.py | vishalagrawal22/JEEMainsMarksCalculator | 4de5c1c510abc19b78fd02532298957e9d243e02 | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
class MCQQuestion(models.Model):
QuestionId = models.CharField(max_length = 30)
AnswerId = models.CharField(max_length = 30)
class SAQuestion(models.Model):
QuestionId = models.CharField(max_length = 30)
Answer = models.CharField(max_length = 30) | 31.7 | 48 | 0.753943 |
4a22ed029e84b52733035aa3bf29af6dc0e0547f | 21,891 | py | Python | ExtentionPackages/cryptography/x509/base.py | hongsofwing/PyQYT-master | 9a112d9adbf9885a8b7535b7ef7759b60a0f9a29 | [
"CNRI-Python"
] | 2 | 2017-04-24T03:04:45.000Z | 2017-09-19T03:38:37.000Z | ExtentionPackages/cryptography/x509/base.py | hongsofwing/PyQYT-master | 9a112d9adbf9885a8b7535b7ef7759b60a0f9a29 | [
"CNRI-Python"
] | 4 | 2017-10-24T21:47:53.000Z | 2019-09-22T13:12:57.000Z | ExtentionPackages/cryptography/x509/base.py | hongsofwing/PyQYT-master | 9a112d9adbf9885a8b7535b7ef7759b60a0f9a29 | [
"CNRI-Python"
] | 2 | 2019-06-17T11:51:56.000Z | 2020-07-25T08:29:56.000Z | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import datetime
from enum import Enum
import six
from cryptography import utils
from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
from cryptography.x509.extensions import Extension, ExtensionType
from cryptography.x509.name import Name
_UNIX_EPOCH = datetime.datetime(1970, 1, 1)
class Version(Enum):
v1 = 0
v3 = 2
def load_pem_x509_certificate(data, backend):
return backend.load_pem_x509_certificate(data)
def load_der_x509_certificate(data, backend):
return backend.load_der_x509_certificate(data)
def load_pem_x509_csr(data, backend):
return backend.load_pem_x509_csr(data)
def load_der_x509_csr(data, backend):
return backend.load_der_x509_csr(data)
def load_pem_x509_crl(data, backend):
return backend.load_pem_x509_crl(data)
def load_der_x509_crl(data, backend):
return backend.load_der_x509_crl(data)
class InvalidVersion(Exception):
def __init__(self, msg, parsed_version):
super(InvalidVersion, self).__init__(msg)
self.parsed_version = parsed_version
@six.add_metaclass(abc.ABCMeta)
class Certificate(object):
@abc.abstractmethod
def fingerprint(self, algorithm):
"""
Returns bytes using digest passed.
"""
@abc.abstractproperty
def serial(self):
"""
Returns certificate serial number
"""
@abc.abstractproperty
def version(self):
"""
Returns the certificate version
"""
@abc.abstractmethod
def public_key(self):
"""
Returns the public key
"""
@abc.abstractproperty
def not_valid_before(self):
"""
Not before time (represented as UTC datetime)
"""
@abc.abstractproperty
def not_valid_after(self):
"""
Not after time (represented as UTC datetime)
"""
@abc.abstractproperty
def issuer(self):
"""
Returns the issuer name object.
"""
@abc.abstractproperty
def subject(self):
"""
Returns the subject name object.
"""
@abc.abstractproperty
def signature_hash_algorithm(self):
"""
Returns a HashAlgorithm corresponding to the type of the digest signed
in the certificate.
"""
@abc.abstractproperty
def extensions(self):
"""
Returns an Extensions object.
"""
@abc.abstractproperty
def signature(self):
"""
Returns the signature bytes.
"""
@abc.abstractproperty
def tbs_certificate_bytes(self):
"""
Returns the tbsCertificate payload bytes as defined in RFC 5280.
"""
@abc.abstractmethod
def __eq__(self, other):
"""
Checks equality.
"""
@abc.abstractmethod
def __ne__(self, other):
"""
Checks not equal.
"""
@abc.abstractmethod
def __hash__(self):
"""
Computes a hash.
"""
@abc.abstractmethod
def public_bytes(self, encoding):
"""
Serializes the certificate to PEM or DER format.
"""
@six.add_metaclass(abc.ABCMeta)
class CertificateRevocationList(object):
@abc.abstractmethod
def public_bytes(self, encoding):
"""
Serializes the CRL to PEM or DER format.
"""
@abc.abstractmethod
def fingerprint(self, algorithm):
"""
Returns bytes using digest passed.
"""
@abc.abstractproperty
def signature_hash_algorithm(self):
"""
Returns a HashAlgorithm corresponding to the type of the digest signed
in the certificate.
"""
@abc.abstractproperty
def issuer(self):
"""
Returns the X509Name with the issuer of this CRL.
"""
@abc.abstractproperty
def next_update(self):
"""
Returns the date of next update for this CRL.
"""
@abc.abstractproperty
def last_update(self):
"""
Returns the date of last update for this CRL.
"""
@abc.abstractproperty
def extensions(self):
"""
Returns an Extensions object containing a list of CRL extensions.
"""
@abc.abstractproperty
def signature(self):
"""
Returns the signature bytes.
"""
@abc.abstractproperty
def tbs_certlist_bytes(self):
"""
Returns the tbsCertList payload bytes as defined in RFC 5280.
"""
@abc.abstractmethod
def __eq__(self, other):
"""
Checks equality.
"""
@abc.abstractmethod
def __ne__(self, other):
"""
Checks not equal.
"""
@six.add_metaclass(abc.ABCMeta)
class CertificateSigningRequest(object):
@abc.abstractmethod
def __eq__(self, other):
"""
Checks equality.
"""
@abc.abstractmethod
def __ne__(self, other):
"""
Checks not equal.
"""
@abc.abstractmethod
def __hash__(self):
"""
Computes a hash.
"""
@abc.abstractmethod
def public_key(self):
"""
Returns the public key
"""
@abc.abstractproperty
def subject(self):
"""
Returns the subject name object.
"""
@abc.abstractproperty
def signature_hash_algorithm(self):
"""
Returns a HashAlgorithm corresponding to the type of the digest signed
in the certificate.
"""
@abc.abstractproperty
def extensions(self):
"""
Returns the extensions in the signing request.
"""
@abc.abstractmethod
def public_bytes(self, encoding):
"""
Encodes the request to PEM or DER format.
"""
@abc.abstractproperty
def signature(self):
"""
Returns the signature bytes.
"""
@abc.abstractproperty
def tbs_certrequest_bytes(self):
"""
Returns the PKCS#10 CertificationRequestInfo bytes as defined in RFC
2986.
"""
@abc.abstractproperty
def is_signature_valid(self):
"""
Verifies signature of signing request.
"""
@six.add_metaclass(abc.ABCMeta)
class RevokedCertificate(object):
@abc.abstractproperty
def serial_number(self):
"""
Returns the serial number of the revoked certificate.
"""
@abc.abstractproperty
def revocation_date(self):
"""
Returns the date of when this certificate was revoked.
"""
@abc.abstractproperty
def extensions(self):
"""
Returns an Extensions object containing a list of Revoked extensions.
"""
class CertificateSigningRequestBuilder(object):
def __init__(self, subject_name=None, extensions=[]):
"""
Creates an empty X.509 certificate request (v1).
"""
self._subject_name = subject_name
self._extensions = extensions
def subject_name(self, name):
"""
Sets the certificate requestor's distinguished name.
"""
if not isinstance(name, Name):
raise TypeError('Expecting x509.Name object.')
if self._subject_name is not None:
raise ValueError('The subject name may only be set once.')
return CertificateSigningRequestBuilder(name, self._extensions)
def add_extension(self, extension, critical):
"""
Adds an X.509 extension to the certificate request.
"""
if not isinstance(extension, ExtensionType):
raise TypeError("extension must be an ExtensionType")
extension = Extension(extension.oid, critical, extension)
# TODO: This is quadratic in the number of extensions
for e in self._extensions:
if e.oid == extension.oid:
raise ValueError('This extension has already been set.')
return CertificateSigningRequestBuilder(
self._subject_name, self._extensions + [extension]
)
def sign(self, private_key, algorithm, backend):
"""
Signs the request using the requestor's private key.
"""
if self._subject_name is None:
raise ValueError("A CertificateSigningRequest must have a subject")
return backend.create_x509_csr(self, private_key, algorithm)
class CertificateBuilder(object):
def __init__(self, issuer_name=None, subject_name=None,
public_key=None, serial_number=None, not_valid_before=None,
not_valid_after=None, extensions=[]):
self._version = Version.v3
self._issuer_name = issuer_name
self._subject_name = subject_name
self._public_key = public_key
self._serial_number = serial_number
self._not_valid_before = not_valid_before
self._not_valid_after = not_valid_after
self._extensions = extensions
def issuer_name(self, name):
"""
Sets the CA's distinguished name.
"""
if not isinstance(name, Name):
raise TypeError('Expecting x509.Name object.')
if self._issuer_name is not None:
raise ValueError('The issuer name may only be set once.')
return CertificateBuilder(
name, self._subject_name, self._public_key,
self._serial_number, self._not_valid_before,
self._not_valid_after, self._extensions
)
def subject_name(self, name):
"""
Sets the requestor's distinguished name.
"""
if not isinstance(name, Name):
raise TypeError('Expecting x509.Name object.')
if self._subject_name is not None:
raise ValueError('The subject name may only be set once.')
return CertificateBuilder(
self._issuer_name, name, self._public_key,
self._serial_number, self._not_valid_before,
self._not_valid_after, self._extensions
)
def public_key(self, key):
"""
Sets the requestor's public key (as found in the signing request).
"""
if not isinstance(key, (dsa.DSAPublicKey, rsa.RSAPublicKey,
ec.EllipticCurvePublicKey)):
raise TypeError('Expecting one of DSAPublicKey, RSAPublicKey,'
' or EllipticCurvePublicKey.')
if self._public_key is not None:
raise ValueError('The public key may only be set once.')
return CertificateBuilder(
self._issuer_name, self._subject_name, key,
self._serial_number, self._not_valid_before,
self._not_valid_after, self._extensions
)
def serial_number(self, number):
"""
Sets the certificate serial number.
"""
if not isinstance(number, six.integer_types):
raise TypeError('Serial number must be of integral type.')
if self._serial_number is not None:
raise ValueError('The serial number may only be set once.')
if number < 0:
raise ValueError('The serial number should be non-negative.')
if utils.bit_length(number) > 160: # As defined in RFC 5280
raise ValueError('The serial number should not be more than 160 '
'bits.')
return CertificateBuilder(
self._issuer_name, self._subject_name,
self._public_key, number, self._not_valid_before,
self._not_valid_after, self._extensions
)
def not_valid_before(self, time):
"""
Sets the certificate activation time.
"""
if not isinstance(time, datetime.datetime):
raise TypeError('Expecting datetime object.')
if self._not_valid_before is not None:
raise ValueError('The not valid before may only be set once.')
if time <= _UNIX_EPOCH:
raise ValueError('The not valid before date must be after the unix'
' epoch (1970 January 1).')
if self._not_valid_after is not None and time > self._not_valid_after:
raise ValueError(
'The not valid before date must be before the not valid after '
'date.'
)
return CertificateBuilder(
self._issuer_name, self._subject_name,
self._public_key, self._serial_number, time,
self._not_valid_after, self._extensions
)
def not_valid_after(self, time):
"""
Sets the certificate expiration time.
"""
if not isinstance(time, datetime.datetime):
raise TypeError('Expecting datetime object.')
if self._not_valid_after is not None:
raise ValueError('The not valid after may only be set once.')
if time <= _UNIX_EPOCH:
raise ValueError('The not valid after date must be after the unix'
' epoch (1970 January 1).')
if (self._not_valid_before is not None and
time < self._not_valid_before):
raise ValueError(
'The not valid after date must be after the not valid before '
'date.'
)
return CertificateBuilder(
self._issuer_name, self._subject_name,
self._public_key, self._serial_number, self._not_valid_before,
time, self._extensions
)
def add_extension(self, extension, critical):
"""
Adds an X.509 extension to the certificate.
"""
if not isinstance(extension, ExtensionType):
raise TypeError("extension must be an ExtensionType")
extension = Extension(extension.oid, critical, extension)
# TODO: This is quadratic in the number of extensions
for e in self._extensions:
if e.oid == extension.oid:
raise ValueError('This extension has already been set.')
return CertificateBuilder(
self._issuer_name, self._subject_name,
self._public_key, self._serial_number, self._not_valid_before,
self._not_valid_after, self._extensions + [extension]
)
def sign(self, private_key, algorithm, backend):
"""
Signs the certificate using the CA's private key.
"""
if self._subject_name is None:
raise ValueError("A certificate must have a subject name")
if self._issuer_name is None:
raise ValueError("A certificate must have an issuer name")
if self._serial_number is None:
raise ValueError("A certificate must have a serial number")
if self._not_valid_before is None:
raise ValueError("A certificate must have a not valid before time")
if self._not_valid_after is None:
raise ValueError("A certificate must have a not valid after time")
if self._public_key is None:
raise ValueError("A certificate must have a public key")
return backend.create_x509_certificate(self, private_key, algorithm)
class CertificateRevocationListBuilder(object):
def __init__(self, issuer_name=None, last_update=None, next_update=None,
extensions=[], revoked_certificates=[]):
self._issuer_name = issuer_name
self._last_update = last_update
self._next_update = next_update
self._extensions = extensions
self._revoked_certificates = revoked_certificates
def issuer_name(self, issuer_name):
if not isinstance(issuer_name, Name):
raise TypeError('Expecting x509.Name object.')
if self._issuer_name is not None:
raise ValueError('The issuer name may only be set once.')
return CertificateRevocationListBuilder(
issuer_name, self._last_update, self._next_update,
self._extensions, self._revoked_certificates
)
def last_update(self, last_update):
if not isinstance(last_update, datetime.datetime):
raise TypeError('Expecting datetime object.')
if self._last_update is not None:
raise ValueError('Last update may only be set once.')
if last_update <= _UNIX_EPOCH:
raise ValueError('The last update date must be after the unix'
' epoch (1970 January 1).')
if self._next_update is not None and last_update > self._next_update:
raise ValueError(
'The last update date must be before the next update date.'
)
return CertificateRevocationListBuilder(
self._issuer_name, last_update, self._next_update,
self._extensions, self._revoked_certificates
)
def next_update(self, next_update):
if not isinstance(next_update, datetime.datetime):
raise TypeError('Expecting datetime object.')
if self._next_update is not None:
raise ValueError('Last update may only be set once.')
if next_update <= _UNIX_EPOCH:
raise ValueError('The last update date must be after the unix'
' epoch (1970 January 1).')
if self._last_update is not None and next_update < self._last_update:
raise ValueError(
'The next update date must be after the last update date.'
)
return CertificateRevocationListBuilder(
self._issuer_name, self._last_update, next_update,
self._extensions, self._revoked_certificates
)
def add_extension(self, extension, critical):
"""
Adds an X.509 extension to the certificate revocation list.
"""
if not isinstance(extension, ExtensionType):
raise TypeError("extension must be an ExtensionType")
extension = Extension(extension.oid, critical, extension)
# TODO: This is quadratic in the number of extensions
for e in self._extensions:
if e.oid == extension.oid:
raise ValueError('This extension has already been set.')
return CertificateRevocationListBuilder(
self._issuer_name, self._last_update, self._next_update,
self._extensions + [extension], self._revoked_certificates
)
def add_revoked_certificate(self, revoked_certificate):
"""
Adds a revoked certificate to the CRL.
"""
if not isinstance(revoked_certificate, RevokedCertificate):
raise TypeError("Must be an instance of RevokedCertificate")
return CertificateRevocationListBuilder(
self._issuer_name, self._last_update,
self._next_update, self._extensions,
self._revoked_certificates + [revoked_certificate]
)
def sign(self, private_key, algorithm, backend):
if self._issuer_name is None:
raise ValueError("A CRL must have an issuer name")
if self._last_update is None:
raise ValueError("A CRL must have a last update time")
if self._next_update is None:
raise ValueError("A CRL must have a next update time")
return backend.create_x509_crl(self, private_key, algorithm)
class RevokedCertificateBuilder(object):
def __init__(self, serial_number=None, revocation_date=None,
extensions=[]):
self._serial_number = serial_number
self._revocation_date = revocation_date
self._extensions = extensions
def serial_number(self, number):
if not isinstance(number, six.integer_types):
raise TypeError('Serial number must be of integral type.')
if self._serial_number is not None:
raise ValueError('The serial number may only be set once.')
if number < 0:
raise ValueError('The serial number should be non-negative.')
if utils.bit_length(number) > 160: # As defined in RFC 5280
raise ValueError('The serial number should not be more than 160 '
'bits.')
return RevokedCertificateBuilder(
number, self._revocation_date, self._extensions
)
def revocation_date(self, time):
if not isinstance(time, datetime.datetime):
raise TypeError('Expecting datetime object.')
if self._revocation_date is not None:
raise ValueError('The revocation date may only be set once.')
if time <= _UNIX_EPOCH:
raise ValueError('The revocation date must be after the unix'
' epoch (1970 January 1).')
return RevokedCertificateBuilder(
self._serial_number, time, self._extensions
)
def add_extension(self, extension, critical):
if not isinstance(extension, ExtensionType):
raise TypeError("extension must be an ExtensionType")
extension = Extension(extension.oid, critical, extension)
# TODO: This is quadratic in the number of extensions
for e in self._extensions:
if e.oid == extension.oid:
raise ValueError('This extension has already been set.')
return RevokedCertificateBuilder(
self._serial_number, self._revocation_date,
self._extensions + [extension]
)
def build(self, backend):
if self._serial_number is None:
raise ValueError("A revoked certificate must have a serial number")
if self._revocation_date is None:
raise ValueError(
"A revoked certificate must have a revocation date"
)
return backend.create_x509_revoked_certificate(self)
| 32.051245 | 79 | 0.622722 |
4a22ed4fbc6fcd209e30c27da28e19c136b7180e | 2,830 | py | Python | app/auth/views.py | Leina33/Blog | 743e4d2a8036a74b1406f6f857b4703f014b458d | [
"MIT"
] | null | null | null | app/auth/views.py | Leina33/Blog | 743e4d2a8036a74b1406f6f857b4703f014b458d | [
"MIT"
] | null | null | null | app/auth/views.py | Leina33/Blog | 743e4d2a8036a74b1406f6f857b4703f014b458d | [
"MIT"
] | null | null | null | from flask import render_template,redirect,url_for,flash,request
from . import auth
from flask_login import login_required,current_user, login_user,logout_user
import re
from app.models import User
@auth.route('/login',methods=['GET','POST'])
def login():
if request.method == 'POST':
form = request.form
username = form.get("username")
password = form.get("password")
user= User.query.filter_by(username=username).first()
if user == None :
error ="User with username does not exist"
return render_template('login.html', error = error)
is_correct_password = user.check_password(password)
if is_correct_password == False:
error ="User with password does not exist"
return render_template('login.html', error=error)
login_user(user)
return redirect(url_for('main.home'))
return render_template('login.html', title='Login')
@auth.route('/registration', methods=['POST','GET'])
def register():
if request.method == 'POST':
form = request.form
firstname = form.get("firstname")
secondname = form.get("secondname")
username = form.get("username")
email = form.get("email")
password = form.get("password")
confirm_password = form.get("confirm_password")
if username==None or password==None or confirm_password==None or email==None:
error = "username,password and email are required"
return render_template('register.html', error=error)
# for validating an Email
regex = '^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,3})+$'
if re.match(regex,email)== None:
error = "Invalid Email.Please use correct email format"
return render_template('register.html',error=error)
if password != confirm_password:
error = " Passwords Not a match"
return render_template('register.html',error=error)
else:
user = User.query.filter_by(username= username).first()
if user!= None:
error = "Username exists"
return render_template('register.html', error = error)
user = User.query.filter_by(email=email).first()
if user!= None:
error = "Email exists"
return render_template('register.html', error = error)
user = User(firstname=firstname,secondname=secondname,username=username,email=email)
user.set_password(password)
user.save()
return redirect(url_for("auth.login"))
return render_template('register.html',title='Register')
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been successfully logged out')
return redirect(url_for("main.home")) | 44.920635 | 96 | 0.625795 |
4a22eddcf632e74b4ad3b31e327a0a7cbb86fc84 | 13,671 | py | Python | tracker/south_migrations/0002_add_upper_name_index_on_alias.py | AB-informatica-service/swat4stats.com | c3a7e83953373b8d876a4ca7055b59168f345442 | [
"MIT"
] | 14 | 2015-04-02T06:50:10.000Z | 2021-02-18T13:26:46.000Z | tracker/south_migrations/0002_add_upper_name_index_on_alias.py | AB-informatica-service/swat4stats.com | c3a7e83953373b8d876a4ca7055b59168f345442 | [
"MIT"
] | 4 | 2016-09-09T19:27:44.000Z | 2020-05-08T17:59:31.000Z | tracker/south_migrations/0002_add_upper_name_index_on_alias.py | AB-informatica-service/swat4stats.com | c3a7e83953373b8d876a4ca7055b59168f345442 | [
"MIT"
] | 8 | 2015-01-03T02:44:04.000Z | 2022-03-23T19:52:00.000Z | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.execute('CREATE INDEX tracker_alias_upper_name_isp_id ON tracker_alias (upper(name), isp_id)')
def backwards(self, orm):
db.execute('DROP INDEX tracker_alias_upper_name_isp_id')
models = {
'tracker.alias': {
'Meta': {'index_together': "(('name', 'isp'),)", 'object_name': 'Alias'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isp': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'on_delete': 'models.SET_NULL', 'null': 'True', 'to': "orm['tracker.ISP']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'profile': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Profile']"})
},
'tracker.game': {
'Meta': {'object_name': 'Game'},
'coop_score': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'date_finished': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now_add': 'True'}),
'gametype': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mapname': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True'}),
'outcome': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'player_num': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'rd_bombs_defused': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'rd_bombs_total': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score_sus': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score_swat': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'server': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Server']", 'on_delete': 'models.SET_NULL', 'null': 'True'}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'unique': 'True'}),
'time': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'vict_sus': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'vict_swat': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'})
},
'tracker.ip': {
'Meta': {'object_name': 'IP', 'unique_together': "(('range_from', 'range_to'),)"},
'date_created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now_add': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isp': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.ISP']", 'null': 'True'}),
'range_from': ('django.db.models.fields.BigIntegerField', [], {}),
'range_to': ('django.db.models.fields.BigIntegerField', [], {})
},
'tracker.isp': {
'Meta': {'object_name': 'ISP'},
'country': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
},
'tracker.loadout': {
'Meta': {'object_name': 'Loadout'},
'body': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'breacher': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'equip_five': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'equip_four': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'equip_one': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'equip_three': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'equip_two': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'head': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'primary_ammo': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'secondary': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'secondary_ammo': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'})
},
'tracker.objective': {
'Meta': {'object_name': 'Objective'},
'game': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Game']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.SmallIntegerField', [], {}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'})
},
'tracker.player': {
'Meta': {'object_name': 'Player'},
'admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'alias': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Alias']"}),
'arrest_streak': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'arrested': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'arrests': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'coop_enemy_arrests': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'coop_enemy_incaps': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'coop_enemy_incaps_invalid': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'coop_enemy_kills': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'coop_enemy_kills_invalid': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'coop_hostage_arrests': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'coop_hostage_hits': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'coop_hostage_incaps': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'coop_hostage_kills': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'coop_status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'coop_toc_reports': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'death_streak': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'deaths': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'dropped': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'game': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Game']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39'}),
'kill_streak': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'kills': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'loadout': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Loadout']", 'on_delete': 'models.SET_NULL', 'null': 'True'}),
'rd_bombs_defused': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'sg_escapes': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'sg_kills': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'suicides': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'team': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True'}),
'teamkills': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'time': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'vip': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'vip_captures': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'vip_escapes': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'vip_kills_invalid': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'vip_kills_valid': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'vip_rescues': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'})
},
'tracker.procedure': {
'Meta': {'object_name': 'Procedure'},
'game': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Game']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.SmallIntegerField', [], {}),
'score': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '7'})
},
'tracker.profile': {
'Meta': {'object_name': 'Profile'},
'country': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}),
'game_first': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'on_delete': 'models.SET_NULL', 'null': 'True', 'to': "orm['tracker.Game']"}),
'game_last': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'on_delete': 'models.SET_NULL', 'null': 'True', 'to': "orm['tracker.Game']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'loadout': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Loadout']", 'on_delete': 'models.SET_NULL', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'team': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True'})
},
'tracker.rank': {
'Meta': {'object_name': 'Rank', 'unique_together': "(('year', 'category', 'profile'),)"},
'category': ('django.db.models.fields.SmallIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'points': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True'}),
'profile': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Profile']"}),
'year': ('django.db.models.fields.SmallIntegerField', [], {})
},
'tracker.server': {
'Meta': {'object_name': 'Server', 'unique_together': "(('ip', 'port'),)"},
'country': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '2', 'null': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'listed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'port': ('django.db.models.fields.PositiveIntegerField', [], {}),
'port_gs1': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True', 'null': 'True'}),
'port_gs2': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True', 'null': 'True'}),
'streamed': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'tracker.weapon': {
'Meta': {'object_name': 'Weapon'},
'distance': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'hits': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kills': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.SmallIntegerField', [], {}),
'player': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Player']"}),
'shots': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'teamhits': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'teamkills': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'time': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'})
}
}
complete_apps = ['tracker'] | 76.803371 | 179 | 0.561773 |
4a22edf0037413fe22b202774ec280dfd0dfbd82 | 6,837 | py | Python | SARS CoV-2/Station C/Old/Thermo Taqpath P20 Multi - 1 input plate/v1_station_c_S14_taqpath_p20multi.py | biogerm-pt/OT-2 | fed24f06db0ee19216a1f793482f588f07e3762a | [
"Apache-2.0"
] | null | null | null | SARS CoV-2/Station C/Old/Thermo Taqpath P20 Multi - 1 input plate/v1_station_c_S14_taqpath_p20multi.py | biogerm-pt/OT-2 | fed24f06db0ee19216a1f793482f588f07e3762a | [
"Apache-2.0"
] | null | null | null | SARS CoV-2/Station C/Old/Thermo Taqpath P20 Multi - 1 input plate/v1_station_c_S14_taqpath_p20multi.py | biogerm-pt/OT-2 | fed24f06db0ee19216a1f793482f588f07e3762a | [
"Apache-2.0"
] | null | null | null | from opentrons import protocol_api
import json
import os
import math
# metadata
metadata = {
'protocolName': 'Version 1 S14 Station C Thermo Taqpath P20 Multi',
'author': 'Nick <[email protected]>',
'source': 'Custom Protocol Request',
'apiLevel': '2.3'
}
NUM_SAMPLES = 96 # start with 8 samples, slowly increase to 48, then 94 (max is 94)
SAMPLE_VOL = 10
PREPARE_MASTERMIX = True
TIP_TRACK = False
def run(ctx: protocol_api.ProtocolContext):
global MM_TYPE
# check source (elution) labware type
source_plate = ctx.load_labware(
'opentrons_96_aluminumblock_nest_wellplate_100ul', '1',
'chilled elution plate on block from Station B')
tips20 = [
ctx.load_labware('opentrons_96_filtertiprack_20ul', slot)
for slot in ['3', '6', '8', '9', '10', '11']
]
tips300 = [ctx.load_labware('opentrons_96_filtertiprack_200ul', '2')]
tempdeck = ctx.load_module('Temperature Module Gen2', '4')
pcr_plate = tempdeck.load_labware(
'opentrons_96_aluminumblock_nest_wellplate_100ul', 'PCR plate')
mm_strips = ctx.load_labware(
'opentrons_96_aluminumblock_nest_wellplate_100ul', '7',
'mastermix strips')
tempdeck.set_temperature(4)
tube_block = ctx.load_labware(
'opentrons_24_aluminumblock_nest_2ml_screwcap', '5',
'2ml screw tube aluminum block for mastermix + controls')
# pipette
m20 = ctx.load_instrument('p20_multi_gen2', 'right', tip_racks=tips20)
p300 = ctx.load_instrument('p300_single_gen2', 'left', tip_racks=tips300)
# setup up sample sources and destinations
num_cols = math.ceil(NUM_SAMPLES/8)
sources = source_plate.rows()[0][:num_cols]
sample_dests = pcr_plate.rows()[0][:num_cols]
tip_log = {'count': {}}
folder_path = '/data/C'
tip_file_path = folder_path + '/tip_log.json'
if TIP_TRACK and not ctx.is_simulating():
if os.path.isfile(tip_file_path):
with open(tip_file_path) as json_file:
data = json.load(json_file)
if 'tips20' in data:
tip_log['count'][m20] = data['tips20']
else:
tip_log['count'][m20] = 0
if 'tips300' in data:
tip_log['count'][p300] = data['tips300']
else:
tip_log['count'][p300] = 0
else:
tip_log['count'] = {m20: 0, p300: 0}
else:
tip_log['count'] = {m20: 0, p300: 0}
tip_log['tips'] = {
m20: [tip for rack in tips20 for tip in rack.rows()[0]],
p300: [tip for rack in tips300 for tip in rack.wells()]
}
tip_log['max'] = {
pip: len(tip_log['tips'][pip])
for pip in [m20, p300]
}
def pick_up(pip):
nonlocal tip_log
if tip_log['count'][pip] == tip_log['max'][pip]:
ctx.pause('Replace ' + str(pip.max_volume) + 'µl tipracks before \
resuming.')
pip.reset_tipracks()
tip_log['count'][pip] = 0
pip.pick_up_tip(tip_log['tips'][pip][tip_log['count'][pip]])
tip_log['count'][pip] += 1
""" mastermix component maps """
mm_tube = tube_block.wells()[0]
mm_dict = {
'volume': 15,
'components': {
tube: vol for tube, vol in zip(tube_block.columns()[1][:3],
[6.25, 1.25, 7.5])
}
}
vol_overage = 1.2 if NUM_SAMPLES > 48 else 1.1 # decrease overage for small sample number
total_mm_vol = mm_dict['volume']*(NUM_SAMPLES+2)*vol_overage
# translate total mastermix volume to starting height
r = mm_tube.diameter/2
mm_height = total_mm_vol/(math.pi*(r**2)) - 5
def h_track(vol):
nonlocal mm_height
dh = 1.1*vol/(math.pi*(r**2)) # compensate for 10% theoretical volume loss
mm_height = mm_height - dh if mm_height - dh > 2 else 2 # stop at 2mm above mm tube bottom
return mm_tube.bottom(mm_height)
if PREPARE_MASTERMIX:
vol_overage = 1.2 if NUM_SAMPLES > 48 else 1.1
for i, (tube, vol) in enumerate(mm_dict['components'].items()):
comp_vol = vol*(NUM_SAMPLES)*vol_overage
pick_up(p300)
num_trans = math.ceil(comp_vol/160)
vol_per_trans = comp_vol/num_trans
for _ in range(num_trans):
p300.air_gap(20)
p300.aspirate(vol_per_trans, tube)
ctx.delay(seconds=3)
p300.touch_tip(tube)
p300.air_gap(20)
p300.dispense(20, mm_tube.top()) # void air gap
p300.dispense(vol_per_trans, mm_tube.bottom(2))
p300.dispense(20, mm_tube.top()) # void pre-loaded air gap
p300.blow_out(mm_tube.top())
p300.touch_tip(mm_tube)
if i < len(mm_dict['components'].items()) - 1: # only keep tip if last component and p300 in use
p300.drop_tip()
mm_total_vol = mm_dict['volume']*(NUM_SAMPLES)*vol_overage
if not p300.hw_pipette['has_tip']: # pickup tip with P300 if necessary for mixing
pick_up(p300)
mix_vol = mm_total_vol / 2 if mm_total_vol / 2 <= 200 else 200 # mix volume is 1/2 MM total, maxing at 200µl
mix_loc = mm_tube.bottom(20) if NUM_SAMPLES > 48 else mm_tube.bottom(5)
p300.mix(7, mix_vol, mix_loc)
p300.blow_out(mm_tube.top())
p300.touch_tip()
# transfer mastermix to strips
mm_strip = mm_strips.columns()[0]
if not p300.hw_pipette['has_tip']:
pick_up(p300)
for i, well in enumerate(mm_strip):
if NUM_SAMPLES % 8 == 0 or i < NUM_SAMPLES % 8:
vol = num_cols*mm_dict['volume']*((vol_overage-1)/2+1)
else:
vol = (num_cols-1)*mm_dict['volume']*((vol_overage-1)/2+1)
p300.transfer(vol, mm_tube, well, new_tip='never')
p300.drop_tip()
# transfer mastermix to plate
mm_vol = mm_dict['volume']
pick_up(m20)
m20.transfer(mm_vol, mm_strip[0].bottom(0.5), sample_dests,
new_tip='never')
m20.drop_tip()
# transfer samples to corresponding locations
for s, d in zip(sources, sample_dests):
pick_up(m20)
m20.transfer(SAMPLE_VOL, s.bottom(2), d.bottom(2), new_tip='never')
m20.mix(1, 10, d.bottom(2))
m20.blow_out(d.top(-2))
m20.aspirate(5, d.top(2)) # suck in any remaining droplets on way to trash
m20.drop_tip()
# track final used tip
if TIP_TRACK and not ctx.is_simulating():
if not os.path.isdir(folder_path):
os.mkdir(folder_path)
data = {
'tips20': tip_log['count'][m20],
'tips300': tip_log['count'][p300]
}
with open(tip_file_path, 'w') as outfile:
json.dump(data, outfile)
| 37.773481 | 117 | 0.597484 |
4a22ee46cce5939943ba1af4e95a483f16a01b9d | 14,376 | py | Python | applications/ContactStructuralMechanicsApplication/python_scripts/contact_convergence_criteria_factory.py | lcirrott/Kratos | 8406e73e0ad214c4f89df4e75e9b29d0eb4a47ea | [
"BSD-4-Clause"
] | null | null | null | applications/ContactStructuralMechanicsApplication/python_scripts/contact_convergence_criteria_factory.py | lcirrott/Kratos | 8406e73e0ad214c4f89df4e75e9b29d0eb4a47ea | [
"BSD-4-Clause"
] | null | null | null | applications/ContactStructuralMechanicsApplication/python_scripts/contact_convergence_criteria_factory.py | lcirrott/Kratos | 8406e73e0ad214c4f89df4e75e9b29d0eb4a47ea | [
"BSD-4-Clause"
] | null | null | null | from __future__ import print_function, absolute_import, division # makes KM backward compatible with python 2.6 and 2.7
#import kratos core and applications
import KratosMultiphysics as KM
# Import applications
import KratosMultiphysics.ContactStructuralMechanicsApplication as CSMA
# Construction of the class convergence_criterion
from KratosMultiphysics.StructuralMechanicsApplication import convergence_criteria_factory
# Import auxiliar methods
from KratosMultiphysics.ContactStructuralMechanicsApplication import auxiliar_methods_solvers
# Import eigen solver factory
from KratosMultiphysics import eigen_solver_factory
# Convergence criteria class
class ContactConvergenceCriteriaFactory:
def __init__(self, model_part, convergence_criterion_parameters):
# The main model part
self.model_part = model_part
# Note that all the convergence settings are introduced via a Kratos parameters object.
self.echo_level = convergence_criterion_parameters["echo_level"].GetInt()
self.convergence_criterion_name = convergence_criterion_parameters["convergence_criterion"].GetString()
self.mortar_type = convergence_criterion_parameters["mortar_type"].GetString()
self.frictional_decomposed = convergence_criterion_parameters["frictional_decomposed"].GetBool()
self.print_convergence_criterion = convergence_criterion_parameters["print_convergence_criterion"].GetBool()
self.compute_dynamic_factor = convergence_criterion_parameters["compute_dynamic_factor"].GetBool()
self.gidio_debug = convergence_criterion_parameters["gidio_debug"].GetBool()
if "contact" in self.convergence_criterion_name:
D_RT = convergence_criterion_parameters["displacement_relative_tolerance"].GetDouble()
D_AT = convergence_criterion_parameters["displacement_absolute_tolerance"].GetDouble()
R_RT = convergence_criterion_parameters["residual_relative_tolerance"].GetDouble()
R_AT = convergence_criterion_parameters["residual_absolute_tolerance"].GetDouble()
CD_RT = convergence_criterion_parameters["contact_displacement_relative_tolerance"].GetDouble()
CD_AT = convergence_criterion_parameters["contact_displacement_absolute_tolerance"].GetDouble()
CR_RT = convergence_criterion_parameters["contact_residual_relative_tolerance"].GetDouble()
CR_AT = convergence_criterion_parameters["contact_residual_absolute_tolerance"].GetDouble()
FCD_RT = convergence_criterion_parameters["frictional_contact_displacement_relative_tolerance"].GetDouble()
FCD_AT = convergence_criterion_parameters["frictional_contact_displacement_absolute_tolerance"].GetDouble()
FCR_RT = convergence_criterion_parameters["frictional_contact_residual_relative_tolerance"].GetDouble()
FCR_AT = convergence_criterion_parameters["frictional_contact_residual_absolute_tolerance"].GetDouble()
RNTT = convergence_criterion_parameters["ratio_normal_tangent_threshold"].GetDouble()
condn_convergence_criterion = convergence_criterion_parameters["condn_convergence_criterion"].GetBool()
ensure_contact = convergence_criterion_parameters["ensure_contact"].GetBool()
if self.echo_level >= 1:
KM.Logger.PrintInfo("::[Mechanical Solver]:: ", "CONVERGENCE CRITERION : " + self.convergence_criterion_name)
if self.convergence_criterion_name == "contact_displacement_criterion":
if "ALMContactFrictional" in self.mortar_type and self.frictional_decomposed:
if "PureSlip" in self.mortar_type:
pure_slip = True
else:
pure_slip = auxiliar_methods_solvers.AuxiliarPureSlipCheck(self.model_part)
self.mechanical_convergence_criterion = CSMA.DisplacementLagrangeMultiplierFrictionalContactCriteria(D_RT, D_AT, CD_RT, CD_AT, FCD_RT, FCD_AT, RNTT, ensure_contact, pure_slip, self.print_convergence_criterion)
elif "Penalty" in self.mortar_type:
self.mechanical_convergence_criterion = CSMA.DisplacementContactCriteria(D_RT, D_AT, self.print_convergence_criterion)
else:
self.mechanical_convergence_criterion = CSMA.DisplacementLagrangeMultiplierContactCriteria(D_RT, D_AT, CD_RT, CD_AT, ensure_contact, self.print_convergence_criterion)
self.mechanical_convergence_criterion.SetEchoLevel(self.echo_level)
elif self.convergence_criterion_name == "contact_residual_criterion":
if "ALMContactFrictional" in self.mortar_type and self.frictional_decomposed:
if "PureSlip" in self.mortar_type:
pure_slip = True
else:
pure_slip = auxiliar_methods_solvers.AuxiliarPureSlipCheck(self.model_part)
self.mechanical_convergence_criterion = CSMA.DisplacementLagrangeMultiplierResidualFrictionalContactCriteria(R_RT, R_AT, CR_RT, CR_AT, FCR_RT, FCR_AT, RNTT, ensure_contact, pure_slip, self.print_convergence_criterion)
elif "Penalty" in self.mortar_type:
self.mechanical_convergence_criterion = CSMA.DisplacementResidualContactCriteria(R_RT, R_AT, self.print_convergence_criterion)
else:
self.mechanical_convergence_criterion = CSMA.DisplacementLagrangeMultiplierResidualContactCriteria(R_RT, R_AT, CR_RT, CR_AT, ensure_contact, self.print_convergence_criterion)
self.mechanical_convergence_criterion.SetEchoLevel(self.echo_level)
elif self.convergence_criterion_name == "contact_mixed_criterion":
if "ALMContactFrictional" in self.mortar_type and self.frictional_decomposed:
if "PureSlip" in self.mortar_type:
pure_slip = True
else:
pure_slip = auxiliar_methods_solvers.AuxiliarPureSlipCheck(self.model_part)
self.mechanical_convergence_criterion = CSMA.DisplacementLagrangeMultiplierMixedFrictionalContactCriteria(R_RT, R_AT, CR_RT, CR_AT, FCR_RT, FCR_AT, RNTT, ensure_contact, pure_slip, self.print_convergence_criterion)
elif "Penalty" in self.mortar_type:
self.mechanical_convergence_criterion = CSMA.DisplacementResidualContactCriteria(R_RT, R_AT, self.print_convergence_criterion)
else:
self.mechanical_convergence_criterion = CSMA.DisplacementLagrangeMultiplierMixedContactCriteria(R_RT, R_AT, CR_RT, CR_AT, ensure_contact, self.print_convergence_criterion)
self.mechanical_convergence_criterion.SetEchoLevel(self.echo_level)
elif self.convergence_criterion_name == "contact_and_criterion":
if "Penalty" in self.mortar_type:
Displacement = CSMA.DisplacementContactCriteria(D_RT, D_AT, self.print_convergence_criterion)
Residual = CSMA.DisplacementResidualContactCriteria(R_RT, R_AT, self.print_convergence_criterion)
else:
Displacement = CSMA.DisplacementLagrangeMultiplierContactCriteria(D_RT, D_AT, CD_RT, CD_AT, ensure_contact, self.print_convergence_criterion)
Residual = CSMA.DisplacementLagrangeMultiplierResidualContactCriteria(R_RT, R_AT, CR_RT, CR_AT, ensure_contact, self.print_convergence_criterion)
Displacement.SetEchoLevel(self.echo_level)
Residual.SetEchoLevel(self.echo_level)
self.mechanical_convergence_criterion = KM.AndCriteria(Residual, Displacement)
elif self.convergence_criterion_name == "contact_or_criterion":
if "Penalty" in self.mortar_type:
Displacement = CSMA.DisplacementContactCriteria(D_RT, D_AT, self.print_convergence_criterion)
Residual = CSMA.DisplacementResidualContactCriteria(R_RT, R_AT, self.print_convergence_criterion)
else:
Displacement = CSMA.DisplacementLagrangeMultiplierContactCriteria(D_RT, D_AT, CD_RT, CD_AT, ensure_contact, self.print_convergence_criterion)
Residual = CSMA.DisplacementLagrangeMultiplierResidualContactCriteria(R_RT, R_AT, CR_RT, CR_AT, ensure_contact, self.print_convergence_criterion)
Displacement.SetEchoLevel(self.echo_level)
Residual.SetEchoLevel(self.echo_level)
self.mechanical_convergence_criterion = KM.OrCriteria(Residual, Displacement)
# Adding the mortar criteria
Mortar = self.GetMortarCriteria()
if condn_convergence_criterion:
# Construct the solver
settings_max = KM.Parameters("""
{
"solver_type" : "power_iteration_highest_eigenvalue_solver",
"max_iteration" : 10000,
"tolerance" : 1e-9,
"required_eigen_number" : 1,
"verbosity" : 0,
"linear_solver_settings" : {
"solver_type" : "SuperLUSolver",
"max_iteration" : 500,
"tolerance" : 1e-9,
"scaling" : false,
"verbosity" : 0
}
}
""")
eigen_solver_max = eigen_solver_factory.ConstructSolver(settings_max)
settings_min = KM.Parameters("""
{
"solver_type" : "power_iteration_eigenvalue_solver",
"max_iteration" : 10000,
"tolerance" : 1e-9,
"required_eigen_number" : 1,
"verbosity" : 0,
"linear_solver_settings" : {
"solver_type" : "SuperLUSolver",
"max_iteration" : 500,
"tolerance" : 1e-9,
"scaling" : false,
"verbosity" : 0
}
}
""")
eigen_solver_min = eigen_solver_factory.ConstructSolver(settings_min)
condition_number_utility = KM.ConditionNumberUtility(eigen_solver_max, eigen_solver_min)
else:
condition_number_utility = None
self.mechanical_convergence_criterion = CSMA.MortarAndConvergenceCriteria(self.mechanical_convergence_criterion, Mortar, self.print_convergence_criterion, condition_number_utility)
self.mechanical_convergence_criterion.SetEchoLevel(self.echo_level)
self.mechanical_convergence_criterion.SetActualizeRHSFlag(True)
elif self.convergence_criterion_name == "adaptative_remesh_criteria":
self.mechanical_convergence_criterion = None
else: # Standard criteria (same as structural mechanics application)
base_mechanical_convergence_criterion = convergence_criteria_factory.convergence_criterion(convergence_criterion_parameters)
# Adding the mortar criteria
if "ALMContact" in self.mortar_type or "MeshTying" in self.mortar_type:
Mortar = self.GetMortarCriteria(False)
self.mechanical_convergence_criterion = KM.AndCriteria( base_mechanical_convergence_criterion.mechanical_convergence_criterion, Mortar)
self.mechanical_convergence_criterion.SetActualizeRHSFlag(True)
else:
self.mechanical_convergence_criterion = base_mechanical_convergence_criterion.mechanical_convergence_criterion
def GetMortarCriteria(self, include_table = True):
# Adding the mortar criteria
if self.mortar_type == "ALMContactFrictionless":
if include_table:
Mortar = CSMA.ALMFrictionlessMortarConvergenceCriteria(self.print_convergence_criterion, self.compute_dynamic_factor, self.gidio_debug)
else:
Mortar = CSMA.ALMFrictionlessMortarConvergenceCriteria()
elif self.mortar_type == "ALMContactFrictionlessComponents":
if include_table:
Mortar = CSMA.ALMFrictionlessComponentsMortarConvergenceCriteria(self.print_convergence_criterion, self.compute_dynamic_factor, self.gidio_debug)
else:
Mortar = CSMA.ALMFrictionlessComponentsMortarConvergenceCriteria()
elif "ALMContactFrictional" in self.mortar_type:
if "PureSlip" in self.mortar_type:
pure_slip = True
else:
pure_slip = auxiliar_methods_solvers.AuxiliarPureSlipCheck(self.model_part)
if include_table:
Mortar = CSMA.ALMFrictionalMortarConvergenceCriteria(pure_slip, self.print_convergence_criterion, self.compute_dynamic_factor, self.gidio_debug)
else:
Mortar = CSMA.ALMFrictionalMortarConvergenceCriteria(pure_slip)
elif self.mortar_type == "PenaltyContactFrictionless":
if include_table:
Mortar = CSMA.PenaltyFrictionlessMortarConvergenceCriteria(self.print_convergence_criterion, self.compute_dynamic_factor, self.gidio_debug)
else:
Mortar = CSMA.PenaltyFrictionlessMortarConvergenceCriteria()
elif "PenaltyContactFrictional" in self.mortar_type:
if "PureSlip" in self.mortar_type:
pure_slip = True
else:
pure_slip = auxiliar_methods_solvers.AuxiliarPureSlipCheck(self.model_part)
if include_table:
Mortar = CSMA.PenaltyFrictionalMortarConvergenceCriteria(pure_slip, self.print_convergence_criterion, self.compute_dynamic_factor, self.gidio_debug)
else:
Mortar = CSMA.PenaltyFrictionalMortarConvergenceCriteria(pure_slip)
elif "MeshTying" in self.mortar_type:
Mortar = CSMA.MeshTyingMortarConvergenceCriteria()
Mortar.SetEchoLevel(self.echo_level)
return Mortar
| 65.944954 | 237 | 0.679605 |
4a22ef67d9a8b2f4ebf241c5e29b5eb17f238b2f | 1,694 | py | Python | train.py | bigbang87/NNPathfinder | 89e4283c604224d2b2d2552a8e90c97176f436c9 | [
"Apache-2.0"
] | null | null | null | train.py | bigbang87/NNPathfinder | 89e4283c604224d2b2d2552a8e90c97176f436c9 | [
"Apache-2.0"
] | null | null | null | train.py | bigbang87/NNPathfinder | 89e4283c604224d2b2d2552a8e90c97176f436c9 | [
"Apache-2.0"
] | null | null | null | import tensorflow as tf
import numpy as np
import os
import matplotlib.pyplot as plt
from data_loader import load_3D, load_2D
def create_model():
model = tf.keras.Sequential([
tf.keras.layers.Dense(units=1200, activation=tf.keras.activations.relu, input_shape=[441]),
tf.keras.layers.Dense(units=1200, activation=tf.keras.activations.relu),
tf.keras.layers.Dense(units=441)
])
model.compile(loss='mean_squared_error', optimizer=tf.keras.optimizers.Adam(0.0001), metrics=['accuracy'])
return model
def main():
map_size = 21
features_train, labels_train = load_2D(100000, map_size, map_size)
features_test, labels_test = load_2D(1000, map_size, map_size, "test_created_data_")
checkpoint_path = "output_dense"
checkpoint_dir = os.path.dirname(checkpoint_path)
cp_callback = tf.keras.callbacks.ModelCheckpoint(checkpoint_path, verbose = 1, period = 5)
model = create_model()
model.load_weights(checkpoint_path)
#model.fit(features_train, labels_train, epochs = 10, callbacks = [cp_callback], validation_split=0.1)
size = 69
test = np.array(features_test[size]).reshape(map_size, map_size)
prediction = model.predict(features_test)
fig, axs = plt.subplot(1,3,1), plt.imshow(test)
fig.axis('off')
fig.set_title('Map')
pred = np.array(prediction[size]).reshape(map_size, map_size) * features_test[size].reshape(map_size, map_size)
array = np.clip(pred, -0.25, 0.25)
fig, axs = plt.subplot(1,3,2), plt.imshow(array)
fig.axis('off')
fig.set_title('Predicted path')
fig, axs = plt.subplot(1,3,3), plt.imshow(np.array(labels_test[size]).reshape(map_size, map_size))
fig.axis('off')
fig.set_title('Desired path')
plt.show()
if __name__ == '__main__':
main() | 34.571429 | 112 | 0.747934 |
4a22ef87d0efbf8651f1b55b2b6ff8a552bfe0ef | 892 | py | Python | tests/terraform/runner/extra_checks/S3EnvironmentCheck.py | kylelaker/checkov | 6eada26030a87f397a6bf1831827b3dc6c5dad2d | [
"Apache-2.0"
] | 1 | 2021-04-26T10:09:52.000Z | 2021-04-26T10:09:52.000Z | tests/terraform/runner/extra_checks/S3EnvironmentCheck.py | kylelaker/checkov | 6eada26030a87f397a6bf1831827b3dc6c5dad2d | [
"Apache-2.0"
] | null | null | null | tests/terraform/runner/extra_checks/S3EnvironmentCheck.py | kylelaker/checkov | 6eada26030a87f397a6bf1831827b3dc6c5dad2d | [
"Apache-2.0"
] | null | null | null | from checkov.common.models.enums import CheckResult, CheckCategories
from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
class S3EnvironmentCheck(BaseResourceCheck):
def __init__(self):
name = "Ensure s3 has environment tag of developemnt/staging/production"
id = "CUSTOM_AWS_1"
supported_resources = ['aws_s3_bucket']
categories = [CheckCategories.GENERAL_SECURITY]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf, entity_type):
if conf.get("tags") and isinstance(conf['tags'][0], dict):
env = conf["tags"][0].get("Environment",{})
if env in ["Developemnt","Staging","Production"]:
return CheckResult.PASSED
return CheckResult.FAILED
scanner = S3EnvironmentCheck()
| 40.545455 | 106 | 0.707399 |
4a22f1018071a54612c0565d76fc85ed2052e978 | 547 | py | Python | src/solution/374_guess_number_higher_or_lo.py | rsj217/leetcode-in-python3 | f5d9fa50e55ce60a159f9a8ccf6080dc86f56852 | [
"MIT"
] | 1 | 2021-03-01T07:33:45.000Z | 2021-03-01T07:33:45.000Z | src/solution/374_guess_number_higher_or_lo.py | rsj217/leetcode-in-python3 | f5d9fa50e55ce60a159f9a8ccf6080dc86f56852 | [
"MIT"
] | null | null | null | src/solution/374_guess_number_higher_or_lo.py | rsj217/leetcode-in-python3 | f5d9fa50e55ce60a159f9a8ccf6080dc86f56852 | [
"MIT"
] | null | null | null | from typing import List
class Solution:
def guessNumber(self, n: int) -> int:
pass
import unittest
class TestSolution(unittest.TestCase):
def setUp(self):
self.test_case = [
(10, 6),
(1, 1),
(2, 1),
(2, 2),
]
self.s = Solution()
def test_solution(self):
for nums, answer in self.test_case:
ans = self.s.guessNumber(nums)
self.assertEqual(answer,ans , (nums, answer))
if __name__ == '__main__':
unittest.main()
| 18.233333 | 57 | 0.531993 |
4a22f1833fb5ca549ed3af7d841fbb5da1ea509d | 4,096 | py | Python | reachy_sdk/trajectory/__init__.py | Pandinosaurus/reachy-sdk | c155bc8f56488de305e6fe5bacdb77aad0383295 | [
"Apache-2.0"
] | null | null | null | reachy_sdk/trajectory/__init__.py | Pandinosaurus/reachy-sdk | c155bc8f56488de305e6fe5bacdb77aad0383295 | [
"Apache-2.0"
] | null | null | null | reachy_sdk/trajectory/__init__.py | Pandinosaurus/reachy-sdk | c155bc8f56488de305e6fe5bacdb77aad0383295 | [
"Apache-2.0"
] | null | null | null | """Trajectory utility package.
Provides goto and goto_async functions. They let you easily create and compose movements on joint coordinates.
"""
import asyncio
import numpy as np
import time
from concurrent.futures import ThreadPoolExecutor
from queue import Queue
from typing import Dict, Optional
from .interpolation import InterpolationMode
from ..joint import Joint
def goto(
goal_positions: Dict[Joint, float],
duration: float,
starting_positions: Optional[Dict[Joint, float]] = None,
sampling_freq: float = 100,
interpolation_mode: InterpolationMode = InterpolationMode.LINEAR,
):
"""Send joints command to move the robot to a goal_positions within the specified duration.
This function will block until the movement is over. See goto_async for an asynchronous version.
The goal positions is expressed in joints coordinates. You can use as many joints target as you want.
The duration is expressed in seconds.
You can specify the starting_position, otherwise its current position is used,
for instance to start from its goal position and avoid bumpy start of move.
The sampling freq sets the frequency of intermediate goal positions commands.
You can also select an interpolation method use (linear or minimum jerk) which will influence directly the trajectory.
"""
exc_queue: Queue[Exception] = Queue()
def _wrapped_goto():
try:
asyncio.run(
goto_async(
goal_positions=goal_positions,
duration=duration,
starting_positions=starting_positions,
sampling_freq=sampling_freq,
interpolation_mode=interpolation_mode,
),
)
except Exception as e:
exc_queue.put(e)
with ThreadPoolExecutor() as exec:
exec.submit(_wrapped_goto)
if not exc_queue.empty():
raise exc_queue.get()
async def goto_async(
goal_positions: Dict[Joint, float],
duration: float,
starting_positions: Optional[Dict[Joint, float]] = None,
sampling_freq: float = 100,
interpolation_mode: InterpolationMode = InterpolationMode.LINEAR,
):
"""Send joints command to move the robot to a goal_positions within the specified duration.
This function is asynchronous and will return a Coroutine. This can be used to easily combined multiple gotos.
See goto for an blocking version.
The goal positions is expressed in joints coordinates. You can use as many joints target as you want.
The duration is expressed in seconds.
You can specify the starting_position, otherwise its current position is used,
for instance to start from its goal position and avoid bumpy start of move.
The sampling freq sets the frequency of intermediate goal positions commands.
You can also select an interpolation method use (linear or minimum jerk) which will influence directly the trajectory.
"""
for key in goal_positions.keys():
if not isinstance(key, Joint):
raise ValueError('goal_positions keys should be Joint!')
if starting_positions is None:
starting_positions = {j: j.goal_position for j in goal_positions.keys()}
# Make sure both starting and goal positions are in the same order
starting_positions = {j: starting_positions[j] for j in goal_positions.keys()}
length = round(duration * sampling_freq)
if length < 1:
raise ValueError('Goto length too short! (incoherent duration {duration} or sampling_freq {sampling_freq})!')
joints = starting_positions.keys()
dt = 1 / sampling_freq
traj_func = interpolation_mode(
np.array(list(starting_positions.values())),
np.array(list(goal_positions.values())),
duration,
)
t0 = time.time()
while True:
elapsed_time = time.time() - t0
if elapsed_time > duration:
break
point = traj_func(elapsed_time)
for j, pos in zip(joints, point):
j.goal_position = pos
await asyncio.sleep(dt)
| 36.571429 | 122 | 0.698486 |
4a22f1aa5837583d0fe9ffc5d53c5d24ce6efe3a | 410 | py | Python | holmes/migrations/versions/17346cf564bc_renaming_delimiters_table_to_limiters.py | scorphus/holmes-api | 6b3c76d4299fecf2d8799d7b5c3c6a6442cacd59 | [
"MIT"
] | null | null | null | holmes/migrations/versions/17346cf564bc_renaming_delimiters_table_to_limiters.py | scorphus/holmes-api | 6b3c76d4299fecf2d8799d7b5c3c6a6442cacd59 | [
"MIT"
] | null | null | null | holmes/migrations/versions/17346cf564bc_renaming_delimiters_table_to_limiters.py | scorphus/holmes-api | 6b3c76d4299fecf2d8799d7b5c3c6a6442cacd59 | [
"MIT"
] | null | null | null | """Renaming delimiters table to limiters
Revision ID: 17346cf564bc
Revises: 4d45dd3d8ce5
Create Date: 2014-03-07 14:45:27.909631
"""
# revision identifiers, used by Alembic.
revision = '17346cf564bc'
down_revision = '4d45dd3d8ce5'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.rename_table('delimiters', 'limiters')
def downgrade():
op.rename_table('limiters', 'delimiters')
| 17.826087 | 45 | 0.746341 |
4a22f1f9f2ecd5152b3c191259d4e3c2df16b533 | 4,010 | py | Python | src/syft/ast/static_attr.py | dnabanita7/PySyft | ce2510e65f5bad382e88806bcde30fa38c3c76c4 | [
"Apache-2.0"
] | 1 | 2021-06-06T06:57:36.000Z | 2021-06-06T06:57:36.000Z | src/syft/ast/static_attr.py | dnabanita7/PySyft | ce2510e65f5bad382e88806bcde30fa38c3c76c4 | [
"Apache-2.0"
] | null | null | null | src/syft/ast/static_attr.py | dnabanita7/PySyft | ce2510e65f5bad382e88806bcde30fa38c3c76c4 | [
"Apache-2.0"
] | null | null | null | # stdlib
from typing import Any
from typing import Callable as CallableT
from typing import Dict
from typing import List
from typing import Optional
from typing import Union
# syft relative
from .. import ast
from .. import lib
from ..core.common.pointer import AbstractPointer
from ..core.node.common.action.get_or_set_static_attribute_action import (
GetSetStaticAttributeAction,
)
from ..core.node.common.action.get_or_set_static_attribute_action import (
StaticAttributeAction,
)
from ..logger import traceback_and_raise
class StaticAttribute(ast.attribute.Attribute):
"""A method, function, or constructor which can be directly executed"""
def __init__(
self,
parent: ast.attribute.Attribute,
path_and_name: str,
return_type_name: Optional[str] = None,
client: Optional[Any] = None,
):
super().__init__(
path_and_name=path_and_name,
return_type_name=return_type_name,
client=client,
parent=parent,
)
def get_remote_value(self) -> AbstractPointer:
if self.path_and_name is None:
traceback_and_raise(
ValueError("Can't execute remote get if path is not specified.")
)
if self.client is None:
if self.client is None:
traceback_and_raise(
ValueError(
"Can't get remote enum attribute if there is no client"
"set to get it from"
)
)
return_tensor_type_pointer_type = self.client.lib_ast.query(
path=self.return_type_name
).pointer_type
ptr = return_tensor_type_pointer_type(client=self.client)
msg = GetSetStaticAttributeAction(
path=self.path_and_name,
id_at_location=ptr.id_at_location,
address=self.client.address,
action=StaticAttributeAction.GET,
)
self.client.send_immediate_msg_without_reply(msg=msg)
return ptr
def solve_get_value(self) -> Any:
self.apply_node_changes()
if self.path_and_name is None:
raise ValueError("path_and_none should not be None")
return getattr(self.parent.object_ref, self.path_and_name.rsplit(".")[-1])
def solve_set_value(self, set_value: Any) -> None:
self.apply_node_changes()
if self.path_and_name is None:
raise ValueError("path_and_none should not be None")
setattr(self.parent.object_ref, self.path_and_name.rsplit(".")[-1], set_value)
def set_remote_value(self, set_arg: Any) -> None:
if self.client is None:
raise ValueError(
"MAKE PROPER SCHEMA - Can't get remote value if there is no remote "
"client"
)
if self.path_and_name is None:
raise ValueError("MAKE PROPER SCHEMA")
resolved_pointer_type = self.client.lib_ast.query(self.return_type_name)
result = resolved_pointer_type.pointer_type(client=self.client)
result_id_at_location = getattr(result, "id_at_location", None)
downcasted_set_arg = lib.python.util.downcast(set_arg)
downcasted_set_arg_ptr = downcasted_set_arg.send(self.client)
cmd = GetSetStaticAttributeAction(
path=self.path_and_name,
id_at_location=result_id_at_location,
address=self.client.address,
action=StaticAttributeAction.SET,
set_arg=downcasted_set_arg_ptr,
)
self.client.send_immediate_msg_without_reply(msg=cmd)
return result
def __call__( # type: ignore
self, action: StaticAttributeAction
) -> Optional[Union["ast.callable.Callable", CallableT]]:
raise ValueError("MAKE PROPER SCHEMA, THIS SHOULD NEVER BE CALLED")
def add_path(self, *args: List[Any], **kwargs: Dict[Any, Any]) -> None: # type: ignore
raise ValueError("MAKE PROPER SCHEMA")
| 33.697479 | 91 | 0.64788 |
4a22f1fba7edb1b7dc99b2d780c024a5e7442570 | 16,354 | py | Python | lib/JumpScale/data/models/Models.py | Jumpscale/jumpscale_core8 | f80ac9b1ab99b833ee7adb17700dcf4ef35f3734 | [
"Apache-2.0"
] | 8 | 2016-04-14T14:04:57.000Z | 2020-06-09T00:24:34.000Z | lib/JumpScale/data/models/Models.py | Jumpscale/jumpscale_core8 | f80ac9b1ab99b833ee7adb17700dcf4ef35f3734 | [
"Apache-2.0"
] | 418 | 2016-01-25T10:30:00.000Z | 2021-09-08T12:29:13.000Z | lib/JumpScale/data/models/Models.py | Jumpscale/jumpscale_core8 | f80ac9b1ab99b833ee7adb17700dcf4ef35f3734 | [
"Apache-2.0"
] | 9 | 2016-04-21T07:21:17.000Z | 2022-01-24T10:35:54.000Z |
from mongoengine.fields import IntField, StringField, ListField, BooleanField, DictField, EmbeddedDocumentField, FloatField
from mongoengine import DoesNotExist, EmbeddedDocument, Document
import hmac
from JumpScale import j
try:
import fcrypt as crypt
except ImportError:
import crypt
DB = 'jumpscale_system'
default_meta = {'allow_inheritance': True, "db_alias": DB}
def extend(a, b):
if isinstance(a, list):
return a + b
elif isinstance(a, dict):
tmp = a.copy()
for i in b:
if not i in tmp:
tmp[i] = b[i]
else:
tmp[i] = extend(tmp[i], b[i])
return tmp
else:
return b
class ModelBase:
DoesNotExist = DoesNotExist
gid = IntField(
default=lambda: j.application.whoAmI.gid if j.application.whoAmI else 0)
nid = IntField(
default=lambda: j.application.whoAmI.nid if j.application.whoAmI else 0)
epoch = IntField(default=j.data.time.getTimeEpoch)
meta = default_meta
@property
def guid(self):
return self.pk
@guid.setter
def guid(self, value):
self.pk = value
def to_dict(self):
d = j.data.serializer.json.loads(Document.to_json(self))
d.pop("_cls")
if "_id" in d:
d.pop("_id")
return d
@classmethod
def find(cls, query):
redis = getattr(cls, '__redis__', False)
if redis:
raise j.exceptions.RuntimeError("not implemented")
else:
return cls.objects(__raw__=query)
@classmethod
def _getKey(cls, guid):
"""
@return hsetkey,key
"""
ttype = cls._class_name.split(".")[-1]
key = "models.%s" % ttype
key = '%s_%s' % (key, guid)
key = key.encode('utf-8')
return key
@classmethod
def get(cls, guid, returnObjWhenNonExist=False):
"""
default needs to be in redis, need to mention if not
"""
redis = getattr(cls, '__redis__', False)
if redis:
modelraw = j.core.db.get(cls._getKey(guid))
if modelraw:
modelraw = modelraw.decode()
obj = cls.from_json(modelraw)
return obj
else:
res = None
else:
try:
res = cls.objects.get(id=guid)
except DoesNotExist:
res = None
return res
@classmethod
def _save_redis(cls, obj):
key = cls._getKey(obj.guid)
meta = cls._meta['indexes']
expire = meta[0].get('expireAfterSeconds', None) if meta else None
raw = j.data.serializer.json.dumps(obj.to_dict())
j.core.db.set(key, raw)
if expire:
j.core.db.expire(key, expire)
return obj
def validate(self, clean):
return Document.validate(self, clean)
def _datatomodel(self, data):
for key, value in data.items():
setattr(self, key, value)
def save(self, data=None):
redis = getattr(self, '__redis__', False)
if data:
self._datatomodel(data)
if redis:
return self._save_redis(self)
else:
return Document.save(self)
def delete(self):
redis = getattr(self, '__redis__', False)
if redis:
key = self._getKey(self.guid)
j.core.db.delete(key)
else:
return Document.delete(self)
@classmethod
def exists(cls, guid):
return bool(cls.get(guid=guid))
def getset(cls):
redis = getattr(cls, '__redis__', False)
key = cls._getKey(cls.guid)
if redis:
model = cls.get(key)
if model is None:
model = cls.save()
return model
else:
if not cls.get(cls.guid):
cls.save()
return cls.get(cls.guid)
def __str__(self):
return j.data.serializer.json.dumps(self.to_dict(), indent=2)
__repr__ = __str__
class Errorcondition(ModelBase, Document):
nid = IntField(required=True)
gid = IntField(required=True)
aid = IntField(default=0)
pid = IntField(default=0)
jid = StringField(default='') # TODO: *2 is this right, string???
masterjid = IntField(default=0)
appname = StringField(default="")
level = IntField(default=1, required=True)
type = StringField(choices=("BUG", "PERF", "OPS", "UNKNOWN"),
default="UNKNOWN", required=True)
state = StringField(choices=("NEW", "ALERT", "CLOSED"),
default="NEW", required=True)
# StringField() <--- available starting version 0.9
errormessage = StringField(default="")
errormessagePub = StringField(default="") # StringField()
category = StringField(default="")
tags = StringField(default="")
code = StringField()
funcname = StringField(default="")
funcfilename = StringField(default="")
funclinenr = IntField(default=0)
backtrace = StringField()
backtraceDetailed = StringField()
extra = StringField()
lasttime = IntField(default=j.data.time.getTimeEpoch())
closetime = IntField(default=j.data.time.getTimeEpoch())
occurrences = IntField(default=0)
class Log(ModelBase, Document):
aid = IntField(default=0)
pid = IntField(default=0)
jid = StringField(default='')
masterjid = IntField(default=0)
appname = StringField(default="")
level = IntField(default=1, required=True)
message = StringField(default='')
type = StringField(choices=("BUG", "PERF", "OPS", "UNKNOWN"),
default="UNKNOWN", required=True)
state = StringField(choices=("NEW", "ALERT", "CLOSED"),
default="NEW", required=True)
# StringField() <--- available starting version 0.9
category = StringField(default="")
tags = StringField(default="")
epoch = IntField(default=j.data.time.getTimeEpoch())
class Grid(ModelBase, Document):
name = StringField(default='master')
# id = IntField(default=1)
class Group(ModelBase, Document):
name = StringField(default='')
domain = StringField(default='')
gid = IntField(default=1)
roles = ListField(StringField())
active = BooleanField(default=True)
description = StringField(default='master')
lastcheck = IntField(default=j.data.time.getTimeEpoch())
class Job(EmbeddedDocument):
nid = IntField(required=True)
gid = IntField(required=True)
data = StringField(default='')
streams = ListField(StringField())
level = IntField()
state = StringField(required=True, choices=(
'SUCCESS', 'ERROR', 'TIMEOUT', 'KILLED', 'QUEUED', 'RUNNING'))
starttime = IntField()
time = IntField()
tags = StringField()
critical = StringField()
meta = extend(default_meta, {
'indexes': [{'fields': ['epoch'], 'expireAfterSeconds': 3600 * 24 * 5}],
'allow_inheritance': True,
"db_alias": DB
})
class Command(ModelBase, Document):
guid = StringField(unique=True, required=True)
gid = IntField(default=0)
nid = IntField(default=0)
cmd = StringField()
roles = ListField(StringField())
fanout = BooleanField(default=False)
args = DictField()
data = StringField()
tags = StringField()
starttime = IntField()
jobs = ListField(EmbeddedDocumentField(Job))
meta = extend(default_meta, {
'indexes': [{'fields': ['guid']}]
})
class Audit(ModelBase, Document):
user = StringField(default='')
result = StringField(default='')
call = StringField(default='')
status_code = IntField(default=0)
args = StringField(default='')
kwargs = StringField(default='')
timestamp = IntField(default=j.data.time.getTimeEpoch())
meta = extend(default_meta, {'indexes': [
{'fields': ['epoch'], 'expireAfterSeconds': 3600 * 24 * 5}
], 'allow_inheritance': True, "db_alias": DB})
class Disk(ModelBase, Document):
partnr = IntField()
path = StringField(default='')
size = IntField(default=0)
free = IntField()
ssd = IntField()
fs = StringField(default='')
mounted = BooleanField()
mountpoint = StringField(default='')
active = BooleanField()
model = StringField(default='')
description = StringField(default='')
type = ListField(StringField()) # BOOT, DATA, ...
# epoch of last time the info was checked from reality
lastcheck = IntField(default=j.data.time.getTimeEpoch())
class VDisk(ModelBase, Document):
machineguid = StringField(required=True)
diskid = IntField()
fs = StringField(default='')
size = IntField(default=0)
free = IntField()
sizeondisk = IntField()
mounted = BooleanField()
path = StringField(default='')
description = StringField(default='')
mountpoint = StringField(default='')
role = ListField(StringField())
type = ListField(StringField())
order = IntField()
devicename = StringField(default='') # if known device name in vmachine
lastcheck = IntField(default=j.data.time.getTimeEpoch())
backup = BooleanField()
backuplocation = StringField()
backuptime = IntField(default=j.data.time.getTimeEpoch())
backupexpiration = IntField()
class Alert(ModelBase, Document):
username = StringField(default='')
description = StringField(default='')
descriptionpub = StringField(default='')
level = IntField(min_value=1, max_value=3, default=1)
# dot notation e.g. machine.start.failed
category = StringField(default='')
tags = StringField(default='') # e.g. machine:2323
state = StringField(choices=("NEW", "ALERT", "CLOSED"),
default='NEW', required=True)
history = ListField(DictField())
# first time there was an error condition linked to this alert
inittime = IntField(default=j.data.time.getTimeEpoch())
# last time there was an error condition linked to this alert
lasttime = IntField()
closetime = IntField() # alert is closed, no longer active
# $nr of times this error condition happened
nrerrorconditions = IntField()
errorconditions = ListField(IntField()) # ids of errorconditions
class Heartbeat(ModelBase, Document):
"""
"""
lastcheck = IntField(default=j.data.time.getTimeEpoch())
class Machine(ModelBase, Document):
name = StringField(default='')
roles = ListField(StringField())
netaddr = StringField(default='')
ipaddr = ListField(StringField())
active = BooleanField()
# STARTED,STOPPED,RUNNING,FROZEN,CONFIGURED,DELETED
state = StringField(choices=("STARTED", "STOPPED", "RUNNING", "FROZEN",
"CONFIGURED", "DELETED"), default='CONFIGURED', required=True)
mem = IntField() # $in MB
cpucore = IntField()
description = StringField(default='')
otherid = StringField(default='')
type = StringField(default='') # VM,LXC
# epoch of last time the info was checked from reality
lastcheck = IntField(default=j.data.time.getTimeEpoch())
class Nic(ModelBase, Document):
name = StringField(default='')
mac = StringField(default='')
ipaddr = ListField(StringField())
active = BooleanField(default=True)
# poch of last time the info was checked from reality
lastcheck = IntField(default=j.data.time.getTimeEpoch())
class Node(ModelBase, Document):
name = StringField(default='')
roles = ListField(StringField())
netaddr = DictField(default={})
machineguid = StringField(default='')
ipaddr = ListField(StringField())
active = BooleanField()
peer_stats = IntField() # node which has stats for this node
# node which has transactionlog or other logs for this node
peer_log = IntField()
peer_backup = IntField() # node which has backups for this node
description = StringField(default='')
lastcheck = IntField(default=j.data.time.getTimeEpoch())
# osisrootobj,$namespace,$category,$version
_meta = ListField(StringField())
class Process(ModelBase, Document):
aysdomain = StringField(default='')
aysname = StringField(default='')
pname = StringField(default='') # process name
sname = StringField(default='') # name as specified in startup manager
ports = ListField(IntField())
instance = StringField(default='')
systempid = ListField(IntField()) # system process id (PID) at this point
epochstart = IntField()
epochstop = IntField()
active = BooleanField()
lastcheck = IntField(default=j.data.time.getTimeEpoch())
cmd = StringField(default='')
workingdir = StringField(default='')
parent = StringField(default='')
type = StringField(default='')
statkey = StringField(default='')
nr_file_descriptors = FloatField()
nr_ctx_switches_voluntary = FloatField()
nr_ctx_switches_involuntary = FloatField()
nr_threads = FloatField()
cpu_time_user = FloatField()
cpu_time_system = FloatField()
cpu_percent = FloatField()
mem_vms = FloatField()
mem_rss = FloatField()
io_read_count = FloatField()
io_write_count = FloatField()
io_read_bytes = FloatField()
io_write_bytes = FloatField()
nr_connections_in = FloatField()
nr_connections_out = FloatField()
class Test(ModelBase, Document):
name = StringField(default='')
testrun = StringField(default='')
path = StringField(default='')
state = StringField(choices=("OK", "ERROR", "DISABLED"),
default='OK', required=True)
priority = IntField() # lower is highest priority
organization = StringField(default='')
author = StringField(default='')
version = IntField()
categories = ListField(StringField())
starttime = IntField(default=j.data.time.getTimeEpoch())
endtime = IntField()
enable = BooleanField()
result = DictField()
output = DictField(default={})
eco = DictField(default={})
license = StringField(default='')
source = DictField(default={})
class User(ModelBase, Document):
name = StringField(default='')
domain = StringField(default='')
passwd = StringField(default='') # stored hashed
roles = ListField(StringField())
active = BooleanField()
description = StringField(default='')
emails = ListField(StringField())
xmpp = ListField(StringField())
mobile = ListField(StringField())
# epoch of last time the info updated
lastcheck = IntField(default=j.data.time.getTimeEpoch())
groups = ListField(StringField())
authkey = StringField(default='')
data = StringField(default='')
authkeys = ListField(StringField())
def authenticate(username, passwd):
for user in User.find({'name': username}):
if hmac.compare_digest(user.passwd, j.sal.unix.crypt(passwd, user.passwd)):
return True
return False
def save(user):
if not user.id:
user.passwd = j.sal.unix.crypt(user.passwd)
else:
olduser = User.get(user.id)
if olduser.passwd != user.passwd: # change passwd
user.passwd = j.sal.unix.crypt(user.passwd)
super(ModelBase, user).save()
class SessionCache(ModelBase, Document):
__redis__ = True
user = StringField()
kwargs = DictField()
_creation_time = IntField(default=j.data.time.getTimeEpoch())
_accessed_time = IntField(default=j.data.time.getTimeEpoch())
_expire_at = IntField(default=None)
guid = StringField()
meta = extend(default_meta, {'indexes':
[{'fields': ['epoch'], 'expireAfterSeconds': 432000}],
'allow_inheritance': True,
'db_alias': DB})
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def _save_redis(self, obj):
key = self._getKey(obj.guid)
indexes = self._meta['indexes']
expire = next(iter(indexes), {}).get('expireAfterSeconds', None)
raw = j.data.serializer.json.dumps(obj.to_dict())
j.core.db.set(key, raw)
if self._expire_at:
j.core.db.expireat(self._getKey(self.guid), self._expire_at)
elif expire:
j.core.db.expire(key, expire)
return obj
del EmbeddedDocument
| 32.708 | 123 | 0.627369 |
4a22f2e6391b07b8691a118b076966ca9efef7b9 | 7,268 | py | Python | benchmarks/benchmarks/stats_sampling.py | drammock/scipy | f1c6b54f51dfc8c3e83d826867a68026f20b243b | [
"BSD-3-Clause"
] | 1 | 2021-05-03T06:55:01.000Z | 2021-05-03T06:55:01.000Z | benchmarks/benchmarks/stats_sampling.py | grlee77/scipy | 7c904a77ad3d239bc35f18222810c9454e5441c3 | [
"BSD-3-Clause"
] | null | null | null | benchmarks/benchmarks/stats_sampling.py | grlee77/scipy | 7c904a77ad3d239bc35f18222810c9454e5441c3 | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
from .common import Benchmark, safe_import
with safe_import():
from scipy import stats
with safe_import():
from scipy import special
# Beta distribution with a = 2, b = 3
class contdist1:
def pdf(self, x):
return 12 * x * (1-x)**2
def dpdf(self, x):
return 12 * ((1-x)**2 - 2*x*(1-x))
def cdf(self, x):
return 12 * (x**2/2 - x**3/3 + x**4/4)
def support(self):
return 0, 1
def __repr__(self):
# asv prints this.
return 'beta(2, 3)'
# Standard Normal Distribution
class contdist2:
def pdf(self, x):
return 1./np.sqrt(2*np.pi) * np.exp(-0.5 * x*x)
def dpdf(self, x):
return 1./np.sqrt(2*np.pi) * -x * np.exp(-0.5 * x*x)
def cdf(self, x):
return special.ndtr(x)
def __repr__(self):
return 'norm(0, 1)'
# pdf with piecewise linear function as transformed density with T = -1/sqrt
# Taken from UNU.RAN test suite (from file t_tdr_ps.c)
class contdist3:
def __init__(self, shift=0.):
self.shift = shift
def pdf(self, x):
x -= self.shift
y = 1. / (abs(x) + 1.)
return y * y
def dpdf(self, x):
x -= self.shift
y = 1. / (abs(x) + 1.)
y = 2. * y * y * y
return y if (x < 0.) else -y
def cdf(self, x):
x -= self.shift
if x <= 0.:
return 0.5 / (1. - x)
return 1. - 0.5 / (1. + x)
def __repr__(self):
return f'sqrtlinshft({self.shift})'
# Sin 2 distribution
# / 0.05 + 0.45*(1 +sin(2 Pi x)) if |x| <= 1
# f(x) = <
# \ 0 otherwise
# Taken from UNU.RAN test suite (from file t_pinv.c)
class contdist4:
def pdf(self, x):
return 0.05 + 0.45 * (1 + np.sin(2*np.pi*x))
def dpdf(self, x):
return 0.2 * 0.45 * (2*np.pi) * np.cos(2*np.pi*x)
def cdf(self, x):
return (0.05*(x + 1) +
0.9*(1. + 2.*np.pi*(1 + x) - np.cos(2.*np.pi*x)) /
(4.*np.pi))
def support(self):
return -1, 1
def __repr__(self):
return 'sin2'
# Sin 10 distribution
# / 0.05 + 0.45*(1 +sin(2 Pi x)) if |x| <= 5
# f(x) = <
# \ 0 otherwise
# Taken from UNU.RAN test suite (from file t_pinv.c)
class contdist5:
def pdf(self, x):
return 0.2 * (0.05 + 0.45 * (1 + np.sin(2*np.pi*x)))
def dpdf(self, x):
return 0.2 * 0.45 * (2*np.pi) * np.cos(2*np.pi*x)
def cdf(self, x):
return x/10. + 0.5 + 0.09/(2*np.pi) * (np.cos(10*np.pi) -
np.cos(2*np.pi*x))
def support(self):
return -5, 5
def __repr__(self):
return 'sin10'
allcontdists = [contdist1(), contdist2(), contdist3(), contdist3(10000.),
contdist4(), contdist5()]
class TransformedDensityRejection(Benchmark):
param_names = ['dist', 'c']
params = [allcontdists, [0., -0.5]]
def setup(self, dist, c):
self.urng = np.random.default_rng(0xfaad7df1c89e050200dbe258636b3265)
with np.testing.suppress_warnings() as sup:
sup.filter(RuntimeWarning)
try:
self.rng = stats.TransformedDensityRejection(
dist, c=c, random_state=self.urng
)
except stats.UNURANError:
# contdist3 is not T-concave for c=0. So, skip such test-cases
raise NotImplementedError(f"{dist} not T-concave for c={c}")
def time_tdr_setup(self, dist, c):
with np.testing.suppress_warnings() as sup:
sup.filter(RuntimeWarning)
stats.TransformedDensityRejection(
dist, c=c, random_state=self.urng
)
def time_tdr_rvs(self, dist, c):
self.rng.rvs(100000)
class NumericalInversePolynomial(Benchmark):
param_names = ['dist']
params = [allcontdists]
def setup(self, dist):
self.urng = np.random.default_rng(0xb235b58c1f616c59c18d8568f77d44d1)
with np.testing.suppress_warnings() as sup:
sup.filter(RuntimeWarning)
try:
self.rng = stats.NumericalInversePolynomial(
dist, random_state=self.urng
)
except stats.UNURANError:
raise NotImplementedError(f"setup failed for {dist}")
def time_pinv_setup(self, dist):
with np.testing.suppress_warnings() as sup:
sup.filter(RuntimeWarning)
stats.NumericalInversePolynomial(
dist, random_state=self.urng
)
def time_pinv_rvs(self, dist):
self.rng.rvs(100000)
class NumericalInverseHermite(Benchmark):
param_names = ['dist', 'order']
params = [allcontdists, [3, 5]]
def setup(self, dist, order):
self.urng = np.random.default_rng(0xb235b58c1f616c59c18d8568f77d44d1)
with np.testing.suppress_warnings() as sup:
sup.filter(RuntimeWarning)
try:
self.rng = stats.NumericalInverseHermite(
dist, order=order, random_state=self.urng
)
except stats.UNURANError:
raise NotImplementedError(f"setup failed for {dist}")
def time_hinv_setup(self, dist, order):
with np.testing.suppress_warnings() as sup:
sup.filter(RuntimeWarning)
stats.NumericalInverseHermite(
dist, order=order, random_state=self.urng
)
def time_hinv_rvs(self, dist, order):
self.rng.rvs(100000)
class NaiveRatioUniforms(Benchmark):
param_names = ['dist']
# only benchmark a few distributions since NROU is quite slow
params = [contdist2(), contdist3(), contdist5()]
def setup(self, dist):
self.urng = np.random.default_rng(0xb235b58c1f616c59c18d8568f77d44d1)
with np.testing.suppress_warnings() as sup:
sup.filter(RuntimeWarning)
try:
self.rng = stats.NaiveRatioUniforms(
dist, random_state=self.urng
)
except stats.UNURANError:
raise NotImplementedError(f"setup failed for {dist}")
def time_nrou_setup(self, dist):
self.rng = stats.NaiveRatioUniforms(dist, random_state=self.urng)
def time_nrou_rvs(self, dist):
self.rng.rvs(100000)
class DiscreteAliasUrn(Benchmark):
param_names = ['distribution']
params = [
# a subset of discrete distributions with finite domain.
[['nhypergeom', (20, 7, 1)],
['hypergeom', (30, 12, 6)],
['nchypergeom_wallenius', (140, 80, 60, 0.5)],
['binom', (5, 0.4)]]
]
def setup(self, distribution):
distname, params = distribution
dist = getattr(stats, distname)
domain = dist.support(*params)
self.urng = np.random.default_rng(0x2fc9eb71cd5120352fa31b7a048aa867)
x = np.arange(domain[0], domain[1] + 1)
self.pv = dist.pmf(x, *params)
self.rng = stats.DiscreteAliasUrn(self.pv, random_state=self.urng)
def time_dau_setup(self, distribution):
stats.DiscreteAliasUrn(self.pv, random_state=self.urng)
def time_dau_rvs(self, distribution):
self.rng.rvs(100000)
| 28.390625 | 78 | 0.566731 |
4a22f2f2a082e8cf8acb1e8bb3230fdc1a8e63ab | 654 | py | Python | contacts/app/migrations/0014_auto_20200221_1937.py | Jeevan-Kiran-Lenka/IPARK | 2d1c64672708a21e7b3c4608cbcd63b06996504f | [
"MIT"
] | 5 | 2020-03-04T05:12:35.000Z | 2021-11-17T11:31:40.000Z | contacts/app/migrations/0014_auto_20200221_1937.py | Jeevan-Kiran-Lenka/IPARK | 2d1c64672708a21e7b3c4608cbcd63b06996504f | [
"MIT"
] | null | null | null | contacts/app/migrations/0014_auto_20200221_1937.py | Jeevan-Kiran-Lenka/IPARK | 2d1c64672708a21e7b3c4608cbcd63b06996504f | [
"MIT"
] | 2 | 2020-03-21T17:35:26.000Z | 2021-03-27T16:25:51.000Z | # Generated by Django 2.2.1 on 2020-02-21 14:07
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('app', '0013_contact_sl_no'),
]
operations = [
migrations.AlterField(
model_name='contact',
name='number_plate',
field=models.ForeignKey(blank=True, default=None, on_delete=django.db.models.deletion.CASCADE, to='app.Car'),
),
migrations.AlterField(
model_name='contact',
name='sl_no',
field=models.CharField(max_length=20, unique=True),
),
]
| 26.16 | 121 | 0.611621 |
4a22f2f39af394a375ee94a1fc3be99e444437e1 | 752 | py | Python | BabaBarghi/BabaBarghi/urls.py | alirahmani93/store | e7218fc4aa9307421af39433c3f113283404132b | [
"MIT"
] | null | null | null | BabaBarghi/BabaBarghi/urls.py | alirahmani93/store | e7218fc4aa9307421af39433c3f113283404132b | [
"MIT"
] | 1 | 2021-08-17T03:13:18.000Z | 2021-08-17T03:24:35.000Z | BabaBarghi/BabaBarghi/urls.py | alirahmani93/store | e7218fc4aa9307421af39433c3f113283404132b | [
"MIT"
] | null | null | null | """BabaBarghi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
| 34.181818 | 77 | 0.710106 |
4a22f307c0b4f352a9ccd638716b0031136c49e4 | 1,349 | py | Python | ingestion/src/metadata/ingestion/source/oracle.py | chaitrarao4/OpenMetadata | c28f7ac22263fd325a1124e9758f97cc9ac9d5d3 | [
"Apache-2.0"
] | null | null | null | ingestion/src/metadata/ingestion/source/oracle.py | chaitrarao4/OpenMetadata | c28f7ac22263fd325a1124e9758f97cc9ac9d5d3 | [
"Apache-2.0"
] | null | null | null | ingestion/src/metadata/ingestion/source/oracle.py | chaitrarao4/OpenMetadata | c28f7ac22263fd325a1124e9758f97cc9ac9d5d3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This import verifies that the dependencies are available.
import cx_Oracle # noqa: F401
from ..ometa.openmetadata_rest import MetadataServerConfig
from .sql_source import SQLConnectionConfig, SQLSource
class OracleConfig(SQLConnectionConfig):
# defaults
scheme = "oracle+cx_oracle"
def get_connection_url(self):
return super().get_connection_url()
class OracleSource(SQLSource):
def __init__(self, config, metadata_config, ctx):
super().__init__(config, metadata_config, ctx)
@classmethod
def create(cls, config_dict, metadata_config_dict, ctx):
config = OracleConfig.parse_obj(config_dict)
metadata_config = MetadataServerConfig.parse_obj(metadata_config_dict)
return cls(config, metadata_config, ctx)
| 37.472222 | 78 | 0.756857 |
4a22f36fe2a0ce639266c92e3bce4c57ec31190b | 196 | py | Python | collect_file_names.py | probably-not-porter/random-item-generator | 74d3d57c8e07e48e6a41e121574fc318df2c8406 | [
"0BSD"
] | null | null | null | collect_file_names.py | probably-not-porter/random-item-generator | 74d3d57c8e07e48e6a41e121574fc318df2c8406 | [
"0BSD"
] | null | null | null | collect_file_names.py | probably-not-porter/random-item-generator | 74d3d57c8e07e48e6a41e121574fc318df2c8406 | [
"0BSD"
] | null | null | null |
#literally make my life so much easier with 4 lines of python
import os
list = os.listdir("images")
for i in range(len(list)):
print("theImages[" + str(i) + "] = 'images/" + list[i] + "';") | 28 | 66 | 0.627551 |
4a22f3a335036de5a6c90960f338bb7f12f05100 | 2,080 | py | Python | app/app/urls.py | maenibreigheth/social_media_app | 242aa570ca41203d4e840a245b556c16158212fe | [
"MIT"
] | null | null | null | app/app/urls.py | maenibreigheth/social_media_app | 242aa570ca41203d4e840a245b556c16158212fe | [
"MIT"
] | null | null | null | app/app/urls.py | maenibreigheth/social_media_app | 242aa570ca41203d4e840a245b556c16158212fe | [
"MIT"
] | null | null | null | """app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.urls import path, include
from drf_yasg import openapi
from drf_yasg.views import get_schema_view
from rest_framework import permissions
from rest_framework_simplejwt.views import (
TokenRefreshView,
)
from accounts.views import ActivateUserView
schema_view = get_schema_view(
openapi.Info(
title="Snippets API",
default_version='v1',
description="Test description",
terms_of_service="https://www.google.com/policies/terms/",
contact=openapi.Contact(email="[email protected]"),
license=openapi.License(name="BSD License"),
),
public=True,
permission_classes=(permissions.AllowAny,),
)
urlpatterns = [
path('admin/', admin.site.urls),
path('activate/<int:pk>/<str:token>/', ActivateUserView.as_view()),
path('', include('accounts.urls')),
path('', include('profiles.urls')),
path('', include('friendship.urls')),
path('', include('posts.urls')),
path('stories/', include('stories.urls')),
path('api/token/refresh/', TokenRefreshView.as_view(), name='token_refresh'),
url(r'^swagger(?P<format>\.json|\.yaml)$', schema_view.without_ui(cache_timeout=0), name='schema-json'),
url(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
url(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
]
| 37.818182 | 108 | 0.699519 |
4a22f3cb82777be318a5c80d67a11d12035d37cc | 8,374 | py | Python | notifications.py | labscript-suite-temp-archive/blacs-fork--rbchip-blacs--forked-from--labscript_suite-blacs | 2f023c6564dee46b555922e949d4bee81de8ca58 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | notifications.py | labscript-suite-temp-archive/blacs-fork--rbchip-blacs--forked-from--labscript_suite-blacs | 2f023c6564dee46b555922e949d4bee81de8ca58 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | notifications.py | labscript-suite-temp-archive/blacs-fork--rbchip-blacs--forked-from--labscript_suite-blacs | 2f023c6564dee46b555922e949d4bee81de8ca58 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | #####################################################################
# #
# /notifications.py #
# #
# Copyright 2013, Monash University #
# #
# This file is part of the program BLACS, in the labscript suite #
# (see http://labscriptsuite.org), and is licensed under the #
# Simplified BSD License. See the license.txt file in the root of #
# the project for the full license. #
# #
#####################################################################
from __future__ import division, unicode_literals, print_function, absolute_import
import logging
import os
from qtutils import UiLoader
from blacs import BLACS_DIR
logger = logging.getLogger('BLACS.NotificationManager')
class Notifications(object):
def __init__(self, BLACS):
self._BLACS = BLACS
self._notifications = {}
self._widgets = {}
self._minimized_widgets = {}
self._closed_callbacks = {}
self._hidden_callbacks = {}
self._shown_callbacks = {}
def add_notification(self, notification_class):
if notification_class in self._notifications:
return False
try:
# instantiate the notification class
# TODO: Do we need to pass anything in here?
self._notifications[notification_class] = notification_class(self._BLACS)
# get the widget
widget = self._notifications[notification_class].get_widget()
# get details on whether the widget can be closed or hidden
properties = self._notifications[notification_class].get_properties()
# Function shortcuts
show_func = lambda callback=False: self.show_notification(notification_class, callback)
hide_func = lambda callback=False: self.minimize_notification(notification_class, callback)
close_func = lambda callback=False: self.close_notification(notification_class, callback)
get_state = lambda: self.get_state(notification_class)
# create layout/widget with appropriate buttons and the widget from the notification class
ui = UiLoader().load(os.path.join(BLACS_DIR, 'notification_widget.ui'))
ui.hide_button.setVisible(bool(properties['can_hide']))
ui.hide_button.clicked.connect(lambda: hide_func(True))
ui.close_button.setVisible(bool(properties['can_close']))
ui.close_button.clicked.connect(lambda: close_func(True))
ui.widget_layout.addWidget(widget)
#ui.hide()
#save callbacks
if 'closed_callback' in properties and callable(properties['closed_callback']):
self._closed_callbacks[notification_class] = properties['closed_callback']
elif 'closed_callback' in properties:
logger.warning('"Closed" callback for notification class %s is not callable (and will not be called when the notification is closed. The callback specified was %s.'%(notification_class,properties['closed_callback']))
if 'hidden_callback' in properties and callable(properties['hidden_callback']):
self._hidden_callbacks[notification_class] = properties['hidden_callback']
elif 'hidden_callback' in properties:
logger.warning('"Hidden" callback for notification class %s is not callable (and will not be called when the notification is closed. The callback specified was %s.'%(notification_class,properties['hidden_callback']))
if 'shown_callback' in properties and callable(properties['shown_callback']):
self._shown_callbacks[notification_class] = properties['shown_callback']
elif 'shown_callback' in properties:
logger.warning('"Shown" callback for notification class %s is not callable (and will not be called when the notification is closed. The callback specified was %s.'%(notification_class,properties['shown_callback']))
#TODO: Make the minimized widget
ui2 = UiLoader().load(os.path.join(BLACS_DIR, 'notification_minimized_widget.ui'))
#ui2.hide()
if not hasattr(self._notifications[notification_class], 'name'):
self._notifications[notification_class].name = notification_class.__name__
ui2.name.setText(self._notifications[notification_class].name)
ui2.show_button.setVisible(bool(properties['can_hide'])) #If you can hide, you can also show
ui2.show_button.clicked.connect(lambda: show_func(True))
ui2.close_button.setVisible(bool(properties['can_close']))
ui2.close_button.clicked.connect(lambda: close_func(True))
# pass the show/hide/close functions to the notfication class
self._widgets[notification_class] = ui
self._minimized_widgets[notification_class] = ui2
self._notifications[notification_class].set_functions(show_func,hide_func,close_func,get_state)
except:
logger.exception('Failed to instantiate Notification class %s.'%notification_class)
# Cleanup
# TODO: cleanup a little more
if notification_class in self._notifications:
del self._notifications[notification_class]
return False
# add the widgets, initially hidden
ui.setVisible(False)
ui2.setVisible(False)
self._BLACS['ui'].notifications.insertWidget(1,ui)
self._BLACS['ui'].notifications_minimized.insertWidget(0,ui2)
return True
def get_instance(self, notification_class):
if notification_class in self._notifications:
return self._notifications[notification_class]
return None
def show_notification(self, notification_class, callback):
self._widgets[notification_class].setVisible(True)
self._minimized_widgets[notification_class].setVisible(False)
if callback and notification_class in self._shown_callbacks:
try:
self._shown_callbacks[notification_class]()
except:
logger.exception('Failed to run "shown" callback for notification class %s'%notification_class)
def close_notification(self, notification_class, callback):
self._widgets[notification_class].setVisible(False)
self._minimized_widgets[notification_class].setVisible(False)
if callback and notification_class in self._closed_callbacks:
try:
self._closed_callbacks[notification_class]()
except:
logger.exception('Failed to run "closed" callback for notification class %s'%notification_class)
def minimize_notification(self,notification_class, callback):
self._widgets[notification_class].setVisible(False)
self._minimized_widgets[notification_class].setVisible(True)
if callback and notification_class in self._hidden_callbacks:
try:
self._hidden_callbacks[notification_class]()
except:
logger.exception('Failed to run "hidden" callback for notification class %s'%notification_class)
def get_state(self,notification_class):
if self._widgets[notification_class].isVisible():
return 'shown'
elif self._minimized_widgets[notification_class].isVisible():
return 'hidden'
else:
return 'closed'
def close_all(self):
for notification in self._notifications:
try:
notification.close()
except:
pass
| 52.666667 | 233 | 0.602938 |
4a22f46de4268f659661cfe1031cdad6caf3ee63 | 604 | py | Python | tests/data/python2.py | StarryInternet/black | f90f50a7436ca13517933c290ef007e7cb2e7258 | [
"MIT"
] | 16,110 | 2019-07-22T21:54:54.000Z | 2022-03-31T22:52:39.000Z | tests/data/python2.py | StarryInternet/black | f90f50a7436ca13517933c290ef007e7cb2e7258 | [
"MIT"
] | 1,981 | 2019-07-22T21:26:16.000Z | 2022-03-31T23:14:35.000Z | tests/data/python2.py | StarryInternet/black | f90f50a7436ca13517933c290ef007e7cb2e7258 | [
"MIT"
] | 1,762 | 2019-07-22T21:23:00.000Z | 2022-03-31T06:10:22.000Z | #!/usr/bin/env python2
import sys
print >> sys.stderr , "Warning:" ,
print >> sys.stderr , "this is a blast from the past."
print >> sys.stderr , "Look, a repr:", `sys`
def function((_globals, _locals)):
exec ur"print 'hi from exec!'" in _globals, _locals
function((globals(), locals()))
# output
#!/usr/bin/env python2
import sys
print >>sys.stderr, "Warning:",
print >>sys.stderr, "this is a blast from the past."
print >>sys.stderr, "Look, a repr:", ` sys `
def function((_globals, _locals)):
exec ur"print 'hi from exec!'" in _globals, _locals
function((globals(), locals()))
| 17.764706 | 55 | 0.652318 |
4a22f5b92059905aa858d550edd90b2ce87f2fa5 | 3,921 | py | Python | dbd-course-recommender/course_recommender/course/models.py | singh-priyank/DBMS_Course | 6538cd7bc2172b8a54c6c71776a2f5ad4daeeb32 | [
"MIT"
] | 1 | 2020-11-13T12:37:28.000Z | 2020-11-13T12:37:28.000Z | dbd-course-recommender/course_recommender/course/models.py | singh-priyank/DBMS_Course | 6538cd7bc2172b8a54c6c71776a2f5ad4daeeb32 | [
"MIT"
] | 1 | 2020-11-17T07:17:29.000Z | 2021-04-23T20:39:59.000Z | dbd-course-recommender/course_recommender/course/models.py | singh-priyank/DBMS_Course | 6538cd7bc2172b8a54c6c71776a2f5ad4daeeb32 | [
"MIT"
] | null | null | null | from django.core.validators import MaxValueValidator
from django.db import models
from django.utils import timezone
from django.db.models import Avg
import math
class Platform(models.Model):
name = name = models.CharField(max_length=80, blank=True, null=True)
image = models.ImageField(upload_to='platform/%Y/%m/%d', blank=True)
def __str__(self):
return f'{self.name}'
class Domain(models.Model):
name = models.CharField(max_length=80, blank=True, null=True)
image = models.ImageField(upload_to='domain/%Y/%m/%d', blank=True)
description = models.TextField(blank=True, null=True)
def __str__(self):
return f'{self.name}'
class Category(models.Model):
name = models.CharField(max_length=80, blank=True, null=True)
domain = models.ForeignKey(Domain, on_delete=models.CASCADE, null = True)
image = models.ImageField(upload_to='category/%Y/%m/%d', blank=True)
def __str__(self):
return f'{self.name} | {self.domain.name}'
class Course(models.Model):
LEVEL_CHOICES = (
('A', 'Avanced'),
('B', 'Beginner'),
('I', 'Intermediate')
)
# id = models.IntegerField(primary_key=True)
name = models.CharField(max_length=80, blank=True, null=True)
course_describtion = models.TextField(blank=True, null=True)
instructor = models.CharField(max_length=80, blank=True, null=True)
category = models.ForeignKey(Category, on_delete=models.CASCADE, null =True)
image = models.ImageField(upload_to='course/%Y/%m/%d', blank=True, null =True)
cost = models.BooleanField(default= False, null =True)
link = models.URLField(null= True)
platform = models.ForeignKey(Platform, on_delete=models.CASCADE, null = True)
language = models.CharField(max_length=80, blank=True, null=True)
duration = models.IntegerField(blank=True, null=True)
level = models.CharField(max_length=1, choices=LEVEL_CHOICES, blank= True, null=True)
certificate = models.BooleanField(default= True, null =True)
@property
def ratings(self):
rating_list = SubjectRating.objects.filter(subject = self)
count = len(rating_list)
if count == 0:
return [True,False,False,False,False]
s=0
for i in range(count):
s+=rating_list[i].rating
arr=[False,False,False,False,False,]
ceil = math.ceil(s/count)
for i in range(ceil):
arr[i]=True
return arr
@property
def people(self):
rating_list = SubjectRating.objects.filter(subject = self)
count = len(rating_list)
return count
def __str__(self):
return f'{self.name}'
class SubjectRating(models.Model):
subject = models.ForeignKey(Course,on_delete=models.CASCADE, null = True)
student = models.ForeignKey('users.Student', on_delete=models.CASCADE, null = True)
rating = models.IntegerField(default = 1)
comment = models.TextField(blank=True, null=True)
timestamp = models.DateTimeField(auto_now_add=True, blank=True, null=True)
@property
def ratings(self):
arr=[False,False,False,False,False,]
for i in range(self.rating):
arr[i]=True
return arr
def __str__(self):
return f'Course: {self.subject.name} | Student: {self.student.account.username} | Rating: {self.rating}'
class Enrollment(models.Model):
course = models.ForeignKey('Course', on_delete=models.CASCADE, null =True)
student = models.ForeignKey('users.Student', on_delete=models.CASCADE, null = True)
status = models.IntegerField(blank=True, null=True)
#completed = models.BooleanField(default= False, blank=True, null=True)
# lesson = models.ForeignKey('Lesson', models.DO_NOTHING, db_column='lesson', blank=True, null=True)
def __str__(self):
return f'Student {self.student.account.username} | Course: {self.course.name}' | 38.821782 | 112 | 0.674828 |
4a22f5b94471d87380421376ce23fa98b85c7e47 | 57,565 | py | Python | source/NemohImproved/NemohPython/nemoh/preprocessor.py | NREL/OpenWARP | ca49c4cbde17e0cead69bd9e55a81d5c0fafe4df | [
"Apache-2.0"
] | 22 | 2015-06-22T07:35:04.000Z | 2021-07-23T05:10:09.000Z | source/NemohImproved/NemohPython/nemoh/preprocessor.py | NREL/OpenWARP | ca49c4cbde17e0cead69bd9e55a81d5c0fafe4df | [
"Apache-2.0"
] | 9 | 2015-07-30T20:01:35.000Z | 2020-08-28T17:29:18.000Z | source/NemohImproved/NemohPython/nemoh/preprocessor.py | NREL/OpenWARP | ca49c4cbde17e0cead69bd9e55a81d5c0fafe4df | [
"Apache-2.0"
] | 13 | 2016-04-01T07:45:27.000Z | 2021-04-06T08:33:33.000Z | #!/usr/bin/env python
"""
This is the main program for the pre processor. It reads and prepares the Mesh and
calculation cases. (radiation and diffraction; set of body conditions).
Changes in version 1.1:
Added possibility to run the code with custom settings
Changes in version 1.2 (Code Acceleration of the Calculation of Influence Coefficients of Nemoh):
Added switch influence to ode hdf5 settings
Changes in version 1.3 (Implementation of Higher Order Panel Methods):
Added logic to store USE_HIGHER_ORDER, NUM_PANEL_HIGHER_ORDER, B_SPLINE_ORDER settings
in hdf5 file.
Changes in version 1.4 (Dipoles Implementation in NEMOH):
Added logic to store USE_DIPOLES_IMPLEMENTATION and THIN_PANELS settings
in hdf5 file.
Changes in version 1.5 (Hydrodynamic Data Exporter Assembly v1.0)
Added parameters controlling wether or not to compute the drift forces or yaw moment
Changes in version 1.6 (Irregular Frequencies Assembly)
Added logic to discretize the interior of the free surface when the newly added settings
to remove irregular frequencies is on.
Applied some bug fixes to allow the shape of hdf5 file dataset
to be automatically resized.
Changes in version 1.7 (OpenWarp - Add Logging Functionality)
Added support for logging.
Changes in version 1.8 (OPENWARP - FIX WAVE FREQUENCY AND DIRECTION CRASH BUG):
1. Corrected the fact that we were computing the normal velocities beta
using the wrong shape.
2. Changed the way we do logging from this module when it is run
as a child process.
"""
import utility
import numpy as np
import math
import sys
import h5py
import structure
from models import TMesh
from models import TCase
from utility import cih
from utility import sih
import settings
import os
from scipy.spatial import Delaunay
import logging
__author__ = "yedtoss"
__copyright__ = "Copyright (C) 2014-2016 TopCoder Inc. All rights reserved."
__version__ = "1.8"
def read_mesh(hdf5_data, custom_config):
"""
Read the mesh data from the hdf5 file
Args:
hdf5_data: object, the hdf5 opened file
Return:
the mesh data
"""
# Getting the logger here and not globally following recommendation from http://victorlin.me/posts/2012/08/26/good-logging-practice-in-python
logger = logging.getLogger(__name__)
signature = __name__ + '.read_mesh(hdf5_data, custom_config)'
# No need to log the parameter of the method here as it will only be duplicate.
# This function is never called directly by the user and always call from the preprocess function
# which already logs the configuration.
utility.log_entrance(logger, signature,
{})
n_points=0
n_panels=0
bodies = hdf5_data.get(structure.H5_BODIES).values()
n_bodies = len(bodies)
interior_mesh_points = np.empty((3, 0))
interior_mesh_panels = np.empty((4, 0))
interior_c_panels = np.empty((0))
interior_n_points = 0
interior_n_panels = 0
remove_irregular_frequencies = utility.get_setting(settings.REMOVE_IRREGULAR_FREQUENCIES, custom_config,
'REMOVE_IRREGULAR_FREQUENCIES')
for c in range(n_bodies):
body = bodies[c]
dset = body.get(structure.H5_BODY_NUM_POINTS)
utility.check_dataset_type(dset, name='The number of points for body ' + str(c), location=structure.H5_BODY_NUM_POINTS)
n_points += dset[0]
dset = body.get(structure.H5_BODY_NUM_PANELS)
utility.check_dataset_type(dset, name='The number of panels for body ' + str(c), location=structure.H5_BODY_NUM_PANELS)
n_panels += dset[0]
mesh = TMesh(n_points=n_points, n_panels=n_panels, n_bodies=n_bodies)
logger.info('Found ' + str(n_points) + ' points and '
+ str(n_panels) + ' panels with ' + str(n_bodies) + ' bodies for the mesh')
n_points = 0
n_panels = 0
logger.info('Retrieving the panels coordinates as well as the indices between the body and the problems')
for c in range(n_bodies):
body = bodies[c]
# Both are already checked and logged
m = body.get(structure.H5_BODY_NUM_POINTS)[0]
n = body.get(structure.H5_BODY_NUM_PANELS)[0]
mesh_arr = body.get(structure.H5_BODY_MESH)
utility.check_dataset_type(mesh_arr, name='The mesh body ' + str(c), location=structure.H5_BODY_MESH)
utility.check_array_shape(logger, mesh_arr, name='the mesh array for body number ' + str(c), expected_shape = (m+n+1, 4))
ns = mesh_arr[0, 1]
if c > 0 and (ns != mesh.i_sym):
raise ValueError('There is an inconsistency in the mesh files regarding the xOz symmetries:'
'The symmetry detected in the body of the first mesh is different from the one on body ' + str(c))
else:
mesh.i_sym = int(ns)
for i in range(m):
mesh.x[:, n_points + i] = np.array(mesh_arr[i + 1, 1:4])
if remove_irregular_frequencies:
# If we have to remove frequencies, then we need to discretize the free surface
int_mesh = generate_mesh(np.asarray(mesh_arr[1:m, 1:4]))
interior_mesh_points = np.concatenate((interior_mesh_points, int_mesh["x"]), axis=1)
interior_mesh_panels = np.concatenate((interior_mesh_panels, int_mesh["p"]+mesh.n_points+interior_n_points), axis=1)
interior_c_panels = np.concatenate((interior_c_panels, c*np.ones(int_mesh["n_panels"])), axis=0)
interior_n_points += int_mesh["n_points"]
interior_n_panels += int_mesh["n_panels"]
for i in range(m, m+n):
mesh.p[:, n_panels+i-m] = np.array(mesh_arr[i + 1, 0:4]) - 1
for j in range(4):
mesh.p[j, n_panels + i-m] += n_points
mesh.c_panel[n_panels+i-m] = c
n_points += m
n_panels += n
mesh.last_panel[c] = n_panels
if remove_irregular_frequencies:
# If we have to remove frequencies, then we need to extend the mesh so
# that it contains the panels of the free surface too
mesh_interior = TMesh(n_points=n_points +interior_n_points , n_panels=n_panels + interior_n_panels, n_bodies=n_bodies)
mesh_interior.x[:, 0:n_points] = mesh.x
mesh_interior.x[:, n_points:] = interior_mesh_points
mesh_interior.p[:, 0:n_panels] = mesh.p
mesh_interior.p[:, n_panels:] = interior_mesh_panels
mesh_interior.last_panel = mesh.last_panel
mesh_interior.c_panel[0:n_panels] = mesh.c_panel
mesh_interior.c_panel[n_panels: ] = interior_c_panels
mesh_interior.i_sym = mesh.i_sym
mesh = mesh_interior
is_interior_domain = np.zeros((n_panels + interior_n_panels))
is_interior_domain[n_panels:] = 1
dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_IS_INTERIOR_DOMAIN, is_interior_domain.shape, dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_SOLVER_IS_INTERIOR_DOMAIN_ATTR)
dset[:] = is_interior_domain
n_panels += interior_n_panels
n_points += interior_n_points
logger.info('Computing surface, center of gravity and normal vector of the mesh panels')
for i in range(mesh.n_panels):
u = mesh.x[:, mesh.p[1, i]] - mesh.x[:, mesh.p[0, i]]
v = mesh.x[:, mesh.p[3, i]] - mesh.x[:, mesh.p[1, i]]
w1 = np.cross(u, v)
a1 = 0.5*np.linalg.norm(w1)
u = mesh.x[:, mesh.p[3, i]] - mesh.x[:, mesh.p[2, i]]
v = mesh.x[:, mesh.p[1, i]] - mesh.x[:, mesh.p[2, i]]
w2 = np.cross(u, v)
a2 = 0.5*np.linalg.norm(w2)
mesh.a[i]= a1+a2
if mesh.a[i] < utility.EPS:
raise ValueError('Error: surface of panel ' + str(i) + ' is too small (' + str(mesh.a[i]) + ')')
mesh.xm[:, i] = (1./3)*(mesh.x[:, mesh.p[0, i]] + mesh.x[:, mesh.p[1, i]] + mesh.x[:, mesh.p[3, i]])*a1/mesh.a[i]
mesh.xm[:, i] += (1./3)*(mesh.x[:, mesh.p[1, i]] + mesh.x[:, mesh.p[2, i]] + mesh.x[:, mesh.p[3, i]])*a2/mesh.a[i]
u = w1 + w2
mesh.n[:, i] = u/np.linalg.norm(u)
utility.log_exit(logger, signature, [str(mesh)])
return mesh
def generate_mesh(raw_points):
"""
Given a list of points corresponding to the discretization of the body domain,
determine the points belonging to the plan where the body touches the water (or
free surface); then use the free surface to generate triangle meshing.
No debug/info logging here to avoid log pollution as the function is expected to be called many times
internally.
Args:
raw_points: The 2D array containing the list of points of shape
(n_points, 3)
Return:
A dictionary containing the points and triangles panel.
"""
# Get points in the waterline plane. The water plane is z=0 and we allow a tolerance of 1e-3
points = raw_points[np.abs(raw_points[:, 2]) < 1e-3]
# Generate a triangle mesh from the waterline segments such that each triangle angle is not
# too small
tri_mesh = Delaunay(points[:, 0:2])
n_panels = tri_mesh.simplices.shape[0]
# Get the points of the interior of the free surface
x = points[:, :]
x[:, 2] = 0
# Get the meshing connectivity
p = np.zeros((n_panels, 4))
p[:, 0:3] = tri_mesh.simplices
p[:, 3] = tri_mesh.simplices[:, 0]
return {"n_points" : x.shape[0],
"n_panels": n_panels,
"x": x.transpose(),
"p": p.transpose()
}
def write_mesh_l12(mesh, hdf5_data):
"""
Write the l12 data to hdf5 from the mesh
Args:
mesh: object, the mesh
hdf5_data: object, the hdf5 opened file
"""
# Getting the logger here and not globally following recommendation from http://victorlin.me/posts/2012/08/26/good-logging-practice-in-python
logger = logging.getLogger(__name__)
signature = __name__ + '.write_mesh_l12(mesh, hdf5_data)'
# No need to log the hdf5_data parameter of the method here as it will only be duplicate.
# This function is never called directly by the user and always call from the preprocess function
# which already logs the configuration.
utility.log_entrance(logger, signature,
{"mesh" : str(mesh)})
dset = utility.require_dataset(hdf5_data, structure.H5_L12_COUNT, (2, ), dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_L12_COUNT_ATTR)
dset[0] = 2
dset[1] = int(mesh.i_sym)
dset = utility.require_dataset(hdf5_data, structure.H5_L12_X, mesh.x.shape, dtype='f')
utility.set_hdf5_attributes(dset, structure.H5_L12_X_ATTR)
dset[:, :] = mesh.x
dset = utility.require_dataset(hdf5_data, structure.H5_L12_P, mesh.p.shape, dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_L12_P_ATTR)
dset[:, :] = mesh.p + 1
utility.log_exit(logger, signature, [None])
def write_mesh_l10(mesh, hdf5_data):
"""
Write the l10 data to hdf5 from the mesh
Args:
mesh: object, the mesh
hdf5_data: object, the hdf5 opened file
"""
# Getting the logger here and not globally following recommendation from http://victorlin.me/posts/2012/08/26/good-logging-practice-in-python
logger = logging.getLogger(__name__)
signature = __name__ + '.write_mesh_l10(mesh, hdf5_data)'
# No need to log the hdf5_data parameter of the method here as it will only be duplicate.
# This function is never called directly by the user and always call from the preprocess function
# which already logs the configuration.
utility.log_entrance(logger, signature,
{"mesh" : str(mesh)})
dset = utility.require_dataset(hdf5_data, structure.H5_L10_COUNT, (4, ), dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_L10_COUNT_ATTR)
dset[0] = mesh.i_sym
dset[1] = mesh.n_points
dset[2] = mesh.n_panels
dset[3] = mesh.n_bodies
dset = utility.require_dataset(hdf5_data, structure.H5_L10_CPANEL, mesh.c_panel.shape, dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_L10_CPANEL_ATTR)
dset[:] = mesh.c_panel + 1
dset = utility.require_dataset(hdf5_data, structure.H5_L10_XM, mesh.xm.shape, dtype='f')
utility.set_hdf5_attributes(dset, structure.H5_L10_XM_ATTR)
dset[:, :] = mesh.xm
dset = utility.require_dataset(hdf5_data, structure.H5_L10_N, mesh.n.shape, dtype='f')
utility.set_hdf5_attributes(dset, structure.H5_L10_N_ATTR)
dset[:, :] = mesh.n
dset = utility.require_dataset(hdf5_data, structure.H5_L10_A, mesh.a.shape, dtype='f')
utility.set_hdf5_attributes(dset, structure.H5_L10_A_ATTR)
dset[:] = mesh.a
utility.log_exit(logger, signature, [None])
def write_mesh_tec(mesh, mesh_tec_file):
"""
Export the mesh to tec file
Args:
mesh: object, the mesh
mesh_tec_file: string, the path to the mesh tec file to save
"""
# Getting the logger here and not globally following recommendation from http://victorlin.me/posts/2012/08/26/good-logging-practice-in-python
logger = logging.getLogger(__name__)
signature = __name__ + '.write_mesh_tec(mesh, mesh_tec_file)'
utility.log_entrance(logger, signature,
{"mesh" : str(mesh),
"mesh_tec_file": mesh_tec_file})
utility.mkdir_p(os.path.abspath(os.path.dirname(mesh_tec_file)))
logger.info('Converting the mesh file in a tecplot format at ' + str(mesh_tec_file))
with open(mesh_tec_file, 'w') as inp:
inp.write('VARIABLES="X" "Y" "Z" "NX" "NY" "NZ" "A"\n')
inp.write('ZONE N=\t' + str(mesh.n_points) + '\t, E=\t' + str(mesh.n_panels) + '\t, F=FEPOINT,ET=QUADRILATERAL\n')
for i in range(mesh.n_points):
s = str(mesh.x[0, i]) + '\t' + str(mesh.x[1, i]) + '\t' + str(mesh.x[2, i]) + '\t0.\t0.\t0.\t0.\n'
inp.write(s)
for i in range(mesh.n_panels):
s = str(mesh.p[0, i] + 1) + '\t' + str(mesh.p[1, i] + 1) + '\t' + str(mesh.p[2, i] + 1) + '\t' + str(mesh.p[3, i] + 1) + '\n'
inp.write(s)
inp.write('ZONE t="normales", F=POINT, I=\t' + str(mesh.n_panels) + '\n')
for i in range(mesh.n_panels):
s = str(mesh.xm[0, i]) + '\t' + str(mesh.xm[1, i]) + '\t' + str(mesh.xm[2, i]) + '\t'
s += str(mesh.n[0, i]) + '\t' + str(mesh.n[1, i]) + '\t' + str(mesh.n[2, i]) + '\t'
s += str(mesh.a[i]) + '\n'
inp.write(s)
utility.log_and_print(logger, 'The mesh is converted to tec format in '
+ utility.get_abs(mesh_tec_file))
utility.log_exit(logger, signature, [None])
def write_fk_force_tec(int_case, fk_force, w, beta, filename):
"""
Writes the froude krylov forces to .tec format
Args:
int_case: 1D array, the integration cases
fk_forces: 3D array, the froudkrylov forces
w: 1D array, represents the wave frequencies omega
beta: 1D array, represents the wave directions beta
filename: string, the path to the file where to save the forces
"""
# Getting the logger here and not globally following recommendation from http://victorlin.me/posts/2012/08/26/good-logging-practice-in-python
logger = logging.getLogger(__name__)
signature = __name__ + '.write_fk_force_tec(int_case, fk_force, w, beta, filename)'
utility.log_entrance(logger, signature,
{"int_case" : str(int_case),
"filename": filename})
utility.mkdir_p(os.path.abspath(os.path.dirname(filename)))
n_integration = len(int_case)
n_beta = len(beta)
n_w = len(w)
logger.info('Converting the froude krylov forces in a tecplot format at ' + str(filename))
with open(filename, 'w') as inp:
inp.write('VARIABLES="w (rad/s)"\n')
for k in range(n_integration):
s = '"abs(F\t' + str(int_case[k].body + 1) + '\t' + str(k+1) + ')" "angle(F\t'
s += str(int_case[k].body + 1) + '\t' + str(k+1) + ')"\n'
inp.write(s)
for c in range(n_beta):
inp.write('Zone t="FKforce - beta =\t' + str(beta[c]*180./np.pi) + '",I=\t' + str(n_w) + ',F=POINT\n')
for i in range(n_w):
s = str(w[i]) + '\t'
for k in range(n_integration):
val = str(np.arctan2(np.imag(fk_force[i, c, k]), np.real(fk_force[i, c, k])))
s += str(np.abs(fk_force[i, c, k])) + '\t' + val + '\t'
inp.write(s)
inp.write('\n')
utility.log_and_print(logger, 'The fk forces were converted to tec format in '
+ utility.get_abs(filename))
utility.log_exit(logger, signature, [None])
def compute_nds(mesh, c, i_case, direction, axis):
"""
Compute the integration nds
Args:
mesh: object The mesh
c: int, the panel index
i_case: int, the integration case
direction 1D array of length 3: The direction (x, y or z)
axis 1D array of length 3: The axis coordinate
No debug/info logging here to avoid log pollution as the function is expected to be called many times
internally.
Returns:
the integration array nds
"""
nds = np.zeros(mesh.n_panels*2**mesh.i_sym, settings.NEMOH_FLOAT)
vel = np.copy(direction[0:3])
if i_case == 1:
for i in range(mesh.n_panels):
if mesh.c_panel[i] == c:
#vel = np.copy(direction[0:3])
nds[i] = - mesh.a[i] * (mesh.n[0, i] *vel[0] + mesh.n[1, i] *vel[1] + mesh.n[2, i] *vel[2])
else:
nds[i]=0.
if mesh.i_sym == 1:
if mesh.c_panel[i] == c:
#vel = np.copy(direction[0:3])
nds[i+ mesh.n_panels] = -mesh.a[i]*(mesh.n[0, i]*vel[0]-mesh.n[1, i]*vel[1] + mesh.n[2, i]*vel[2])
else:
nds[i+ mesh.n_panels] = 0.
elif i_case == 2:
for i in range(mesh.n_panels):
if mesh.c_panel[i] == c:
vel[0] = direction[1]*(mesh.xm[2, i] - axis[2]) - direction[2]*(mesh.xm[1, i] - axis[1])
vel[1] = direction[2]*(mesh.xm[0, i] - axis[0]) - direction[0]*(mesh.xm[2, i] - axis[2])
vel[2] = direction[0]*(mesh.xm[1, i] - axis[1]) - direction[1]*(mesh.xm[0, i] - axis[0])
nds[i] = - mesh.a[i] * (mesh.n[0, i] *vel[0] + mesh.n[1, i] *vel[1] + mesh.n[2, i] *vel[2])
else:
nds[i]=0.
if mesh.i_sym == 1:
if mesh.c_panel[i] == c:
vel[0] = direction[1]*(mesh.xm[2, i] - axis[2]) - direction[2]*(-mesh.xm[1, i] - axis[1])
vel[1] = direction[2]*(mesh.xm[0, i] - axis[0]) - direction[0]*(mesh.xm[2, i] - axis[2])
vel[2] = direction[0]*(-mesh.xm[1, i] - axis[1]) - direction[1]*(mesh.xm[0, i] - axis[0])
nds[i+ mesh.n_panels] = -mesh.a[i]*(mesh.n[0, i]*vel[0] - mesh.n[1, i]*vel[1] + mesh.n[2, i]*vel[2])
else:
nds[i+ mesh.n_panels] = 0.
elif i_case == 3:
raise NotImplementedError('Force case 3 is not implemented yet')
else:
raise RuntimeError('The radiation case of index ' + str(i_case) + ' is unknown')
return nds
def compute_radiation_condition(mesh, c, i_case, direction, axis):
"""
Compute the radiation condition
Args:
mesh: object The mesh
c: int, the panel index
i_case: int, the integration case
direction 1D array of length 3: The direction (x, y or z)
axis 1D array of length 3: The axis coordinate
No debug/info logging here to avoid log pollution as the function is expected to be called many times
internally.
Returns:
the radiation condition array n_vel
"""
n_vel = np.zeros(mesh.n_panels*2**mesh.i_sym, settings.NEMOH_COMPLEX)
vel = np.copy(direction[0:3])
if i_case == 1:
for i in range(mesh.n_panels):
if mesh.c_panel[i] == c:
vel = np.copy(direction[0:3])
n_vel[i] = complex(np.sum(mesh.n[:, i].flatten()*vel.flatten()), 0)
else:
n_vel[i] = complex(0, 0)
if mesh.i_sym == 1:
if mesh.c_panel[i] == c:
vel = np.copy(direction[0:3])
#nn = mesh.n[:, i]
#nn[1] *= -1
#n_vel[i + mesh.n_panels] = complex(np.sum(nn.flatten()*vel.flatten()), 0)
n_vel[i + mesh.n_panels] = complex(mesh.n[0,i]*vel[0]-mesh.n[1,i]*vel[1]+ mesh.n[2,i]*vel[2], 0)
else:
n_vel[i+ mesh.n_panels] = complex(0, 0)
elif i_case == 2:
for i in range(mesh.n_panels):
if mesh.c_panel[i] == c:
vel[0] = direction[1]*(mesh.xm[2, i] - axis[2]) - direction[2]*(mesh.xm[1, i] - axis[1])
vel[1] = direction[2]*(mesh.xm[0, i] - axis[0]) - direction[0]*(mesh.xm[2, i] - axis[2])
vel[2] = direction[0]*(mesh.xm[1, i] - axis[1]) - direction[1]*(mesh.xm[0, i] - axis[0])
n_vel[i] = complex(np.sum(mesh.n[:, i].flatten()*vel.flatten()), 0)
else:
n_vel[i] = complex(0, 0)
if mesh.i_sym == 1:
if mesh.c_panel[i] == c:
vel[0] = direction[1]*(mesh.xm[2, i] - axis[2]) - direction[2]*(-mesh.xm[1, i] - axis[1])
vel[1] = direction[2]*(mesh.xm[0, i] - axis[0]) - direction[0]*(mesh.xm[2, i] - axis[2])
vel[2] = direction[0]*(-mesh.xm[1, i] - axis[1]) - direction[1]*(mesh.xm[0, i] - axis[0])
#nn = mesh.n[:, i]
#nn[1] *= -1
#n_vel[i+ mesh.n_panels] = complex(np.sum(nn.flatten()*vel.flatten()), 0)
n_vel[i + mesh.n_panels] = complex(mesh.n[0,i]*vel[0]-mesh.n[1,i]*vel[1]+ mesh.n[2,i]*vel[2], 0)
else:
n_vel[i + mesh.n_panels] = complex(0, 0)
elif i_case == 3:
raise NotImplementedError('Force case 3 is not implemented yet')
else:
raise RuntimeError('The radiation case of index ' + str(i_case) + ' is unknown')
return n_vel
def compute_one_wave(k, w, beta, wt, environment):
"""
Calculate the complex potential, pressure and fluid velocities for a regular wave eta=sin(k*wbar-wt)
Args:
k: float, the wave number
w: float, the wave frequency
beta: float, the wave direction
wt: 1D array of length 3, the wave position
environment: object, the environment
No debug/info logging here to avoid log pollution as the function is expected to be called many times
internally.
Returns
A dictionary containing the potential, pressure and fluid velocities
"""
x = wt[0]
y = wt[1]
z = wt[2]
w_bar = (x-environment.x_eff)*np.cos(beta)+(y-environment.y_eff)*np.sin(beta)
phi = -environment.g/w*cih(k, z, environment.depth)*np.exp(utility.II*k*w_bar)
p = -environment.rho*environment.g*utility.II*cih(k, z, environment.depth)*np.exp(utility.II*k*w_bar)
vx = -environment.g/w*utility.II*k*np.cos(beta)*cih(k, z, environment.depth)*np.exp(utility.II*k*w_bar)
vy = -environment.g/w*utility.II*k*np.sin(beta)*cih(k, z, environment.depth)*np.exp(utility.II*k*w_bar)
vz = -environment.g/w*k*sih(k, z, environment.depth)*np.exp(utility.II*k*w_bar)
return {"phi": phi, "p": p, "vx": vx, "vy": vy, "vz": vz, "v": np.array([vx, vy, vz])}
def compute_wave(mesh, w, beta, environment):
"""
Calculate the array of complex potential, pressure and fluid velocities for a wave
Args:
mesh: object, the mesh
w: float, the wave frequency
beta: float, the wave direction
environment: object, the environment
No debug/info logging here to avoid log pollution as the function is expected to be called many times
internally.
Returns
A dictionary containing the pressure and fluid velocities
"""
n_vel = np.zeros(mesh.n_panels*2**mesh.i_sym, settings.NEMOH_COMPLEX)
pressure = np.zeros(mesh.n_panels*2**mesh.i_sym, settings.NEMOH_COMPLEX)
k_wave = utility.compute_wave_number(w, environment)
for i in range(2**mesh.i_sym*mesh.n_panels):
if i < mesh.n_panels:
wbar = (mesh.xm[0, i] - environment.x_eff)*np.cos(beta) + (mesh.xm[1, i] - environment.y_eff)*np.sin(beta)
pressure[i] = -environment.g/w*np.exp(utility.II*k_wave*wbar)
n_vel[i] = pressure[i]*(utility.II*k_wave*(np.cos(beta)*mesh.n[0,i]+ \
np.sin(beta)*mesh.n[1,i])*cih(k_wave,mesh.xm[2, i], environment.depth)+ \
k_wave*mesh.n[2,i]*sih(k_wave,mesh.xm[2,i],environment.depth))
pressure[i] *= cih(k_wave,mesh.xm[2,i], environment.depth)
one_wave = compute_one_wave(k_wave, w, beta, mesh.xm[0:3, i], environment)
# This makes previous pressure[i] statement useless
pressure[i] = one_wave["p"]
n_vel[i] = np.sum(one_wave["v"].flatten()*mesh.n[:, i].flatten())
else:
wbar=(mesh.xm[0, i-mesh.n_panels]-environment.x_eff)*np.cos(beta)+ \
(-mesh.xm[1, i-mesh.n_panels]-environment.y_eff)*np.sin(beta)
pressure[i] = -environment.g/w*np.exp(utility.II*k_wave*wbar)
n_vel[i] = pressure[i]*(utility.II*k_wave*(np.cos(beta)*mesh.n[0,i-mesh.n_panels]+\
np.sin(beta)*(-1.*mesh.n[1,i-mesh.n_panels]))*cih(k_wave, mesh.xm[2, i-mesh.n_panels],\
environment.depth)+k_wave*mesh.n[2,i-mesh.n_panels]*sih(k_wave,mesh.xm[2,i-mesh.n_panels],\
environment.depth))
pressure[i] *= cih(k_wave, mesh.xm[2, i-mesh.n_panels], environment.depth)
#one_wave = compute_one_wave(k_wave, w, beta, mesh.xm[0:3, i-mesh.n_panels], environment)
#xm = mesh.xm[0:3, i-mesh.n_panels]
#xm[1] = - xm[1]
xm = np.array([mesh.xm[0, i-mesh.n_panels], -mesh.xm[1, i-mesh.n_panels], mesh.xm[2, i-mesh.n_panels]])
one_wave = compute_one_wave(k_wave, w, beta, xm, environment)
pressure[i] = one_wave["p"]
#one_wave["v"][1] *= -1
#n_vel[i] = np.sum(one_wave["v"]*mesh.n[:, i-mesh.n_panels])
vx = one_wave["v"][0]
vy = one_wave["v"][1]
vz = one_wave["v"][2]
n_vel[i] = vx*mesh.n[0, i-mesh.n_panels] - vy*mesh.n[1, i-mesh.n_panels] + vz*mesh.n[2, i-mesh.n_panels]
return {"n_vel": n_vel, "pressure": pressure}
def run(hdf5_data, custom_config):
"""
This function run the preprocessor
Args:
hdf5_data: object, the hdf5 opened file
custom_config, dict The custom configuration dictionary
"""
# Getting the logger here and not globally following recommendation from http://victorlin.me/posts/2012/08/26/good-logging-practice-in-python
logger = logging.getLogger(__name__)
signature = __name__ + '.run(hdf5_data, custom_config)'
# No need to log the parameter of the method here as it will only be duplicate.
# This function is never called directly by the user and always call from the preprocess function
# which already logs the configuration.
utility.log_entrance(logger, signature,
{})
n_radiation = 0
n_integration = 0
bodies = hdf5_data.get(structure.H5_BODIES)
utility.check_group_type(bodies, name='The bodies group', location=structure.H5_BODIES)
logger.info("Processing the bodies group: " + str(bodies))
bodies = bodies.values()
for body in bodies:
utility.check_group_type(body, name='The sub-body group', location=structure.H5_BODIES)
dset = body.get(structure.H5_FREEDOM_DEGREE)
utility.check_dataset_type(dset, name='The freedom degree', location=structure.H5_FREEDOM_DEGREE)
n_radiation += dset.shape[0]
dset = body.get(structure.H5_GENERALISED_FORCES)
utility.check_dataset_type(dset, name='The generalised forces', location=structure.H5_GENERALISED_FORCES)
n_integration += dset.shape[0]
logger.info("Solving " + str(n_radiation) + " problems with " + str(n_integration) + " forces")
logger.info("Processing wave frequencies information")
dset = hdf5_data.get(structure.H5_NUM_WAVE_FREQUENCIES)
utility.check_dataset_type(dset, name='The number of wave frequencies', location=structure.H5_NUM_WAVE_FREQUENCIES)
n_w = dset[0]
dset = hdf5_data.get(structure.H5_MIN_WAVE_FREQUENCIES)
utility.check_dataset_type(dset, name='The minimum wave frequency', location=structure.H5_MIN_WAVE_FREQUENCIES)
w_min = dset[0]
dset = hdf5_data.get(structure.H5_MAX_WAVE_FREQUENCIES)
utility.check_dataset_type(dset, name='The maximum wave frequency', location=structure.H5_MAX_WAVE_FREQUENCIES)
w_max = dset[0]
w = np.zeros(n_w, settings.NEMOH_FLOAT)
if n_w > 1:
for j in range(n_w):
w[j] = w_min+(w_max-w_min)*j/(n_w-1)
else:
w[0] = w_min
logger.info(' Using ' + str(n_w) + ' equally spaced wave frequencies from ' + str(w[0]) + ' to ' + str(w[n_w-1]))
logger.info("Processing wave directions information")
dset = hdf5_data.get(structure.H5_NUM_WAVE_DIRECTIONS)
utility.check_dataset_type(dset, name='The number of wave directions', location=structure.H5_NUM_WAVE_DIRECTIONS)
n_beta = dset[0]
dset = hdf5_data.get(structure.H5_MIN_WAVE_DIRECTIONS)
utility.check_dataset_type(dset, name='The minimum wave direction', location=structure.H5_MIN_WAVE_DIRECTIONS)
beta_min = dset[0]
dset = hdf5_data.get(structure.H5_MAX_WAVE_DIRECTIONS)
utility.check_dataset_type(dset, name='The maximum wave direction', location=structure.H5_MAX_WAVE_DIRECTIONS)
beta_max = dset[0]
beta = np.zeros(n_beta, settings.NEMOH_FLOAT)
if n_beta > 1:
for j in range(n_beta):
beta[j] = (beta_min+(beta_max-beta_min)*j/(n_beta-1))*math.pi/180.
else:
beta[0] = beta_min * math.pi/180.
logger.info(' Using ' + str(n_beta) + str(' equally spaced wave directions from ') + str(beta[0]) + ' to ' + str(beta[n_beta-1]))
dset = hdf5_data.get(structure.H5_SHOW_PRESSURE)
utility.check_dataset_type(dset, name='The switch for showing pressure', location=structure.H5_SHOW_PRESSURE)
switch_potential = dset[0] >= 1
dset = hdf5_data.get(structure.H5_KOCHIN_NUMBER)
utility.check_dataset_type(dset, name='The number of direction for the computation of far field coefficients (Kochin function)',
location=structure.H5_KOCHIN_NUMBER)
n_theta = dset[0]
dset = hdf5_data.get(structure.H5_KOCHIN_MIN)
utility.check_dataset_type(dset, name='The minimum number of direction for the computation of far field coefficients (Kochin function)',
location=structure.H5_KOCHIN_MIN)
theta_min = dset[0]
dset = hdf5_data.get(structure.H5_KOCHIN_MAX)
utility.check_dataset_type(dset, name='The maximum number of direction for the computation of far field coefficients (Kochin function)',
location=structure.H5_KOCHIN_MAX)
theta_max = dset[0]
switch_kochin = n_theta > 0
if switch_kochin:
logger.info('The computation of far field coefficients (Kochin function) is enabled with '
+ str(n_theta) + ' directions from ' + str(theta_min) + ' to ' + str(theta_max))
else:
logger.info('The computation of far field coefficients (Kochin function) is disabled')
dset = hdf5_data.get(structure.H5_FREE_SURFACE_POINTS_X)
utility.check_dataset_type(dset, name=str(structure.H5_FREE_SURFACE_POINTS_X_ATTR['description']),
location=structure.H5_FREE_SURFACE_POINTS_X)
n_x = dset[0]
dset = hdf5_data.get(structure.H5_FREE_SURFACE_POINTS_Y)
utility.check_dataset_type(dset, name=str(structure.H5_FREE_SURFACE_POINTS_Y_ATTR['description']),
location=structure.H5_FREE_SURFACE_POINTS_Y)
n_y = dset[0]
dset = hdf5_data.get(structure.H5_FREE_SURFACE_DIMENSION_X)
utility.check_dataset_type(dset, name=str(structure.H5_FREE_SURFACE_DIMENSION_X_ATTR['description']),
location=structure.H5_FREE_SURFACE_DIMENSION_X)
l_x = dset[0]
dset = hdf5_data.get(structure.H5_FREE_SURFACE_DIMENSION_Y)
utility.check_dataset_type(dset, name=str(structure.H5_FREE_SURFACE_DIMENSION_Y_ATTR['description']),
location=structure.H5_FREE_SURFACE_DIMENSION_Y)
l_y = dset[0]
switch_free_surface = n_x > 0
if switch_free_surface:
logger.info('The computation of the wave elevation is enabled with '
+ str(n_x) + ' points in x direction of dimension' + str(l_x)
+ ' and ' + str(n_y) + ' points in y direction of dimension ' + str(l_y))
else:
logger.info('The computation of the wave elevation is disabled')
rad_case = [TCase() for x in range(n_radiation)]
int_case = [TCase() for x in range(n_integration)]
j_rad = 0
j_int = 0
for c in range(len(bodies)):
body = bodies[c]
# This has already been checked
freedom_degree = body.get(structure.H5_FREEDOM_DEGREE)
utility.check_array_ndim(freedom_degree, name='the freedom degree for body number ' + str(c),
expected_ndim=2)
utility.check_array_dim(logger, freedom_degree, name='the freedom degree for body number ' + str(c), expected_dim=7, dim_idx=1)
m = freedom_degree.len()
for i in range(m):
case = TCase()
case.i_case = freedom_degree[i, 0]
case.direction = np.array(freedom_degree[i, 1:4])
case.axis = np.array(freedom_degree[i, 4:7])
case.i_body = c
case.mode = i
rad_case[j_rad + i] = case
j_rad += m
# This has already been checked
generalised_forces = body.get(structure.H5_GENERALISED_FORCES)
utility.check_array_ndim(generalised_forces, name='the generalised forces for body number ' + str(c),
expected_ndim=2)
m = generalised_forces.len()
utility.check_array_dim(logger, generalised_forces, name='generalised forces for body number ' + str(c), expected_dim=7, dim_idx=1)
for i in range(m):
case = TCase()
case.i_case = generalised_forces[i, 0]
case.direction = np.array(generalised_forces[i, 1:4])
case.axis = np.array(generalised_forces[i, 4:7])
case.i_body = c
case.mode = i
int_case[j_int + i] = case
j_int += m
utility.log_and_print(logger, 'Summary of calculation')
dset = hdf5_data.get(structure.H5_ENV_DEPTH)
utility.check_dataset_type(dset, name='The depth of fluid (water)',
location=structure.H5_ENV_DEPTH)
depth = dset[0]
if depth > 0:
utility.log_and_print(logger, ' The water depth is ' + str(depth) + ' meter')
else:
utility.log_and_print(logger, ' The water depth is infinite ')
utility.log_and_print(logger, ' -> ' + str(n_w) + ' wave frequencies from ' + str(w[0]) + ' to ' + str(w[n_w-1]))
utility.log_and_print(logger, ' -> ' + str(n_beta) + str(' wave directions from ')
+ str(beta[0]) + ' to ' + str(beta[n_beta-1]))
utility.log_and_print(logger, ' -> ' + str(n_radiation) + ' radiation problems')
utility.log_and_print(logger, ' -> ' + str(n_integration) + ' forces')
logger.info('Reading the mesh from the hdf5 file')
mesh = read_mesh(hdf5_data, custom_config)
write_mesh_l12(mesh, hdf5_data)
write_mesh_l10(mesh, hdf5_data)
mesh_tec_file = utility.get_setting(settings.MESH_TEC_FILE, custom_config, 'MESH_TEC_FILE')
if mesh_tec_file:
write_mesh_tec(mesh, mesh_tec_file)
fnds = np.zeros((n_integration, mesh.n_panels*2**mesh.i_sym), settings.NEMOH_FLOAT)
logger.info('Computing the integration nds')
for j in range(n_integration):
fnds[j, :] = compute_nds(mesh, int_case[j].body, int_case[j].i_case, int_case[j].direction, int_case[j].axis)
dset = utility.require_dataset(hdf5_data, structure.H5_MESH_INTEGRATION, fnds.shape, dtype='f')
utility.set_hdf5_attributes(dset, structure.H5_MESH_INTEGRATION_ATTR)
dset[:, :] = fnds
logger.info('The information about how the pressure has to be integrated '
'over the body surfaces to obtain the requested forces has been saved in '
+ str(structure.H5_MESH_INTEGRATION) + '. Its characteristic is: '
+ str(dset))
environment = utility.read_environment(hdf5_data)
normal_velocity = np.zeros((mesh.n_panels*2**mesh.i_sym, (n_beta+n_radiation)*n_w), settings.NEMOH_COMPLEX)
fk_force = np.zeros((n_w, n_beta, n_integration), settings.NEMOH_COMPLEX)
logger.info('Computing the body conditions for each radiation and diffraction problem (normal velocities)'
' and the Froude Krylov forces for each of the diffraction problem (FK forces)')
for i in range(n_w):
for j in range(n_beta):
result = compute_wave(mesh, w[i], beta[j], environment)
pressure = result["pressure"]
n_vel = result["n_vel"]
normal_velocity[:, j+ i*(n_beta+n_radiation)] = n_vel
# Calculate the corresponding FK forces
for k in range(n_integration):
#for c in range(mesh.n_panels*2**mesh.i_sym):
#fk_force[i, j, k] += pressure[c]*fnds[k, c]
fk_force[i, j, k] = np.sum(pressure.flatten()*fnds[k, :].flatten())
for j in range(n_radiation):
n_vel = compute_radiation_condition(mesh, rad_case[j].body, rad_case[j].i_case, rad_case[j].direction,
rad_case[j].axis)
normal_velocity[:, j + n_beta + i*(n_beta+n_radiation)] = n_vel
# Save body conditions
n_problems = n_w*(n_radiation+n_beta)
bc_omega = w.repeat(n_beta + n_radiation)
dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_W, bc_omega.shape, dtype='f', maxshape=(None))
utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_W_ATTR)
dset[:] = bc_omega
bc_switch_type = -np.ones(n_problems, dtype='f') # Set the whole array to -1
# Set the first n_beta values to beta, skip the next n_radiation and so on until the end of the array
for i in xrange(0, n_problems, n_beta + n_radiation):
bc_switch_type[i:i + n_beta] = beta
dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_BETA, bc_switch_type.shape, dtype='f')
utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_BETA_ATTR)
dset[:] = bc_switch_type
logger.info('Saved the wave frequencies and directions for each of the ' + str(n_beta + n_radiation)
+ ' radiation or integration cases leading to a total of ' + str(n_problems) + ' problems to be solved')
temp = int(switch_potential)*np.ones(n_problems, dtype='i')
dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_SWITCH_POTENTIAL, temp.shape, dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_SWITCH_POTENTIAL_ATTR)
dset[:] = temp
temp = int(switch_free_surface)*np.ones(n_problems, dtype='i')
dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_SWITCH_FREE_SURFACE, temp.shape, dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_SWITCH_FREE_SURFACE_ATTR)
dset[:] = temp
temp = int(switch_kochin)*np.ones(n_problems, dtype='i')
dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_SWITCH_KOCHIN, temp.shape, dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_SWITCH_KOCHIN_ATTR)
dset[:] = temp
logger.info('Saved wether or not to compute the potential, the free surface visualization coefficients '
'and the Kochin function for each of the ' + str(n_problems) + ' problems to be solved; '
'respectively at ' + structure.H5_NORMAL_VELOCITY_SWITCH_POTENTIAL + ', '
+ structure.H5_NORMAL_VELOCITY_SWITCH_FREE_SURFACE + ', '
+ structure.H5_NORMAL_VELOCITY_SWITCH_KOCHIN)
dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_VELOCITIES, normal_velocity.shape, dtype='F')
utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_VELOCITIES_ATTR)
dset[:, :] = normal_velocity
logger.info('Saved the normal velocities at ' + structure.H5_NORMAL_VELOCITY_VELOCITIES
+ ' with characteristics ' + str(dset))
#fk_force_f = fk_force.flatten()
#fk_force_o = np.vstack((np.abs(fk_force_f), np.arctan2(np.imag(fk_force_f), np.real(fk_force_f)))).transpose()
logger.info('Computing the magnitude and angle of the FK forces')
fk_force_o = np.zeros((n_integration*n_w, 2*n_beta+2*n_radiation), dtype='f')
idx = 0
for k in range(n_integration):
for i in range(n_w):
for c in range(n_beta):
fk_force_o[idx, 2*c] = np.abs(fk_force[i, c, k])
fk_force_o[idx, 2*c+1] = np.arctan2(np.imag(fk_force[i, c, k]), np.real(fk_force[i, c, k]))
for c in range(2*n_radiation):
fk_force_o[idx, 2*n_beta + c] = 0
idx += 1
dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_FK_FORCES, fk_force_o.shape, dtype='f')
utility.set_hdf5_attributes(dset, structure.H5_RESULTS_FK_FORCES_ATTR)
dset[:, :] = fk_force_o
logger.info('Saved the magnitude and angle of the fk forces at'
+ str(structure.H5_RESULTS_FK_FORCES) + ' with characteristics: ' + str(dset))
dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_FK_FORCES_RAW, fk_force.shape, dtype='F')
utility.set_hdf5_attributes(dset, structure.H5_RESULTS_FK_FORCES_RAW_ATTR)
dset[:, :, :] = fk_force
logger.info('Saved the raw imaginary fk forces numbers at'
+ str(structure.H5_RESULTS_FK_FORCES_RAW) + ' with characteristics: ' + str(dset))
fk_force_tec_file = utility.get_setting(settings.FK_FORCE_TEC_FILE, custom_config, 'FK_FORCE_TEC_FILE')
if fk_force_tec_file:
logger.info('Converting the FK forces to the Tecplot format at the file ' + str(fk_force_tec_file))
write_fk_force_tec(int_case, fk_force, w, beta, fk_force_tec_file)
#free_surface_v = [[-0.5*l_x+l_x*i/(n_x-1), -0.5*l_y+l_y*j/(n_y-1), 0.] for i in range(n_x) for j in range(
# n_y)]
free_surface_v = np.zeros((3, n_x*n_y))
logger.info('Computing the free surface coefficients matrix of shape ' + str(free_surface_v.shape))
k = 0
for i in range(n_x):
for j in range(n_y):
free_surface_v[0, k] = -0.5*l_x+l_x*i/(n_x-1)
free_surface_v[1, k] = -0.5*l_y+l_y*j/(n_y-1)
free_surface_v[2, k] = 0.
k += 1
#free_surface_v = np.array(free_surface_v)
dset = utility.require_dataset(hdf5_data, structure.H5_MESH_FREE_SURFACE_VECTORS, free_surface_v.shape, dtype='f')
utility.set_hdf5_attributes(dset, structure.H5_MESH_FREE_SURFACE_VECTORS_ATTR)
dset[:, :] = free_surface_v
logger.info('Saved the free surface coefficients at ' + str( structure.H5_MESH_FREE_SURFACE_VECTORS)
+ ' with characteristics ' + str(dset))
free_surface_v = np.zeros((0, 0))
logger.info('Computing the free surface index matrix (identifying the corresponding problem) of shape ' + str(free_surface_v.shape))
if (n_x-1) > 0 and (n_y-1) >0:
#free_surface_v = [[j+i*n_y, j+1+i*n_y, j+1+(i+1)*n_y, j+(i+1)*n_y] for i in range(n_x-1) for j in
#range(n_y-1)]
free_surface_v = np.zeros((4, (n_x-1)*(n_y-1)))
k = 0
for i in range(n_x-1):
for j in range(n_y-1):
free_surface_v[0, k] = j+i*n_y
free_surface_v[1, k] = j+1+i*n_y
free_surface_v[2, k] = j+1+(i+1)*n_y
free_surface_v[3, k] = j+(i+1)*n_y
k += 1
#free_surface_v = np.array(free_surface_v)
dset = utility.require_dataset(hdf5_data, structure.H5_MESH_FREE_SURFACE_INDEX, free_surface_v.shape, dtype='f')
utility.set_hdf5_attributes(dset, structure.H5_MESH_FREE_SURFACE_INDEX_ATTR)
dset[:, :] = free_surface_v
logger.info('Saved the free surface index at ' + str(structure.H5_MESH_FREE_SURFACE_INDEX)
+ ' with characteristics ' + str(dset))
# Generate Kochin
kochin = np.array([])
if n_theta > 0:
logger.info('Computing the ' + str(n_theta) + ' angles for which the Kochin function will be calculated equi-distantly '
' from ' + str(theta_min) + ' to ' + str(theta_max))
if n_theta > 1:
kochin = [(theta_min+(theta_max-theta_min)*j/(n_theta-1))*np.pi/180. for j in range(n_theta)]
else:
kochin = [theta_min*np.pi/180.]
kochin = np.array(kochin)
dset = utility.require_dataset(hdf5_data, structure.H5_MESH_KOCHIN, kochin.shape, dtype='f', maxshape=(None, ))
utility.set_hdf5_attributes(dset, structure.H5_MESH_KOCHIN_ATTR)
dset[:] = kochin
logger.info('Saved the angle for which the Kochin function will be calculated at '
+ str(structure.H5_MESH_KOCHIN) + ' with characteristics ' + str(dset))
# Save index of cases
out = np.array([[k+1, int_case[k].body+1, int_case[k].mode+1] for k in range(n_integration)])
dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_FORCE, out.shape, dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_FORCE_ATTR)
dset[:, :] = out
logger.info('Saved correlation between force ID number and body ID number at '
+ structure.H5_RESULTS_CASE_FORCE)
out = np.array([[k+1, rad_case[k].body+1, rad_case[k].mode+1] for k in range(n_radiation)])
dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_MOTION, out.shape, dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_MOTION_ATTR)
dset[:, :] = out
logger.info('Saved correlation between ID number of radiation problem,'
' ID of body which is actually moving and the number of degree of freedom '
+ structure.H5_RESULTS_CASE_MOTION)
dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_BETA, beta.shape, dtype='f', maxshape=(None))
utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_BETA_ATTR)
dset[:] = beta
logger.info('Saved the wave directions at ' + structure.H5_RESULTS_CASE_BETA)
dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_W, w.shape, dtype='f', maxshape=(None))
utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_W_ATTR)
dset[:] = w
logger.info('Saved the wave frequencies at ' + structure.H5_RESULTS_CASE_W)
out = np.array([(theta_min+(theta_max-theta_min)*k/(n_theta-1))*np.pi/180. for k in range(n_theta)])
dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_THETA, out.shape, dtype='f', maxshape=(None))
utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_THETA_ATTR)
dset[:] = out
logger.info('Saved the kochin angles at ' + structure.H5_RESULTS_CASE_THETA)
# Save radiation cases
out = np.array([[rad_case[k].body+1, rad_case[k].i_case+1, rad_case[k].direction[0], rad_case[k].direction[1], rad_case[k].direction[2], rad_case[k].axis[0], rad_case[k].axis[1] , rad_case[k].axis[2]] for k in range(n_radiation)])
dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_RADIATION, out.shape, dtype='f')
utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_RADIATION_ATTR)
dset[:, :] = out
logger.info('Saved the radiation cases (directions and axis) at ' + structure.H5_RESULTS_CASE_RADIATION)
dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_BETA, beta.shape, dtype='f')
utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_BETA_ATTR)
dset[:] = beta
logger.info('Saved the radiation wave directions at ' + structure.H5_RESULTS_CASE_BETA)
# All the following are switches already logged
switch_ode_influence = utility.get_setting(settings.USE_ODE_INFLUENCE_COEFFICIENTS, custom_config,
'USE_ODE_INFLUENCE_COEFFICIENTS')
use_higher_order = utility.get_setting(settings.USE_HIGHER_ORDER, custom_config,
'USE_HIGHER_ORDER')
num_panel_higher_order = utility.get_setting(settings.NUM_PANEL_HIGHER_ORDER, custom_config,
'NUM_PANEL_HIGHER_ORDER')
b_spline_order = utility.get_setting(settings.B_SPLINE_ORDER, custom_config,
'B_SPLINE_ORDER')
use_dipoles_implementation = utility.get_setting(settings.USE_DIPOLES_IMPLEMENTATION, custom_config,
'USE_DIPOLES_IMPLEMENTATION')
compute_yaw_moment = utility.get_setting(settings.COMPUTE_YAW_MOMENT, custom_config,
'COMPUTE_YAW_MOMENT')
compute_drift_forces = utility.get_setting(settings.COMPUTE_DRIFT_FORCES, custom_config,
'COMPUTE_DRIFT_FORCES')
thin_panels = utility.get_setting(settings.THIN_PANELS, custom_config,
'THIN_PANELS')
if num_panel_higher_order is not None and num_panel_higher_order > 0:
dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_NUM_PANEL_HIGHER_ORDER, (1, ), dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_SOLVER_NUM_PANEL_HIGHER_ORDER_ATTR)
dset[:] = int(num_panel_higher_order)
logger.info('Saved the number of panel per patch in the higher order method at ' + structure.H5_SOLVER_NUM_PANEL_HIGHER_ORDER)
if b_spline_order is not None and b_spline_order > 0:
dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_B_SPLINE_ORDER, (1, ), dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_SOLVER_B_SPLINE_ORDER_ATTR)
dset[:] = int(b_spline_order)
logger.info('Saved The order of the B-Spline for the potential in the higher order panel method at'
+ structure.H5_SOLVER_B_SPLINE_ORDER)
if use_higher_order is not None:
dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_USE_HIGHER_ORDER, (1, ), dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_SOLVER_USE_HIGHER_ORDER_ATTR)
dset[:] = int(use_higher_order)
logger.info('Saved the switch for using of the higher order panel method at ' + structure.H5_SOLVER_USE_HIGHER_ORDER)
if switch_ode_influence is not None:
temp = int(switch_ode_influence)*np.ones(n_problems, dtype='i')
dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_SWITCH_ODE_INFLUENCE, temp.shape, dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_SOLVER_SWITCH_ODE_INFLUENCE_ATTR)
dset[:] = temp
logger.info('Saved the switch for computing the influence coefficients using an Ordinary Differential equation at '
+ structure.H5_SOLVER_SWITCH_ODE_INFLUENCE)
if use_dipoles_implementation is not None:
dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION, (1, ), dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION_ATTR)
dset[:] = int(use_dipoles_implementation)
logger.info('Saved the switch for using the dipoles Implementation for thin bodies at '
+ structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION)
if compute_yaw_moment is not None:
dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_COMPUTE_YAW_MOMENT, (1, ), dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_SOLVER_COMPUTE_YAW_MOMENT_ATTR)
dset[:] = int(compute_yaw_moment)
logger.info('Saved the switch for computing the yaw moment at ' + structure.H5_SOLVER_COMPUTE_YAW_MOMENT)
if compute_drift_forces is not None:
dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_COMPUTE_DRIFT_FORCES, (1, ), dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_SOLVER_COMPUTE_DRIFT_FORCES_ATTR)
dset[:] = int(compute_drift_forces)
logger.info('Saved the switch for computing the drift forces at ' + structure.H5_SOLVER_COMPUTE_DRIFT_FORCES)
if thin_panels is not None:
temp = np.zeros(mesh.n_panels, dtype='i')
for idx in thin_panels:
if idx == -1:
temp = np.ones(mesh.n_panels, dtype='i')
break
elif idx >= 0:
temp[idx] = 1
dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_THIN_PANELS, temp.shape, dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_SOLVER_THIN_PANELS_ATTR)
dset[:] = temp
logger.info('Saved the thin or not panels index at ' + structure.H5_SOLVER_THIN_PANELS)
utility.log_exit(logger, signature, [None])
def preprocess(custom_config):
"""
Configure and then run the preprocessor
Args:
custom_config, dict The custom configuration dictionary
"""
signature = __name__ + '.preprocess(custom_config)'
logger = logging.getLogger(__name__)
utility.log_entrance(logging.getLogger(__name__), signature,
{'custom_config': custom_config})
if not custom_config:
custom_config = {}
# Check if custom_config is a valid dict
utility.check_type_value(custom_config, 'The input custom_config', dict, True)
hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config, 'HDF5_FILE')
nemoh_cal = utility.get_setting(settings.NEMOH_CALCULATIONS_FILE, custom_config, 'NEMOH_CALCULATIONS_FILE')
input_file = utility.get_setting(settings.NEMOH_INPUT_FILE, custom_config, 'NEMOH_INPUT_FILE')
# Check if hdf5_file is a string
utility.check_str(hdf5_file, 'The path to the hdf5 file configured by HDF5_FILE')
# Check wether or not nemoh_cal and input_file are valid string
nemoh_cal_validation = utility.validate_str(nemoh_cal)
input_file_validation = utility.validate_str(input_file)
# If both input_file and nemoh_cal are not valid string then hdf5_file must be a valid file
if not nemoh_cal_validation and not input_file_validation:
utility.check_is_file(hdf5_file, 'The path to the hdf5 file configured by HDF5_FILE')
else:
# Otherwise we make sure that the directory to the hdf5_file exists and we will create the hdf5_file as part of the program
utility.mkdir_p(os.path.abspath(os.path.dirname(hdf5_file)))
utility.touch(hdf5_file) # This is to solve a bug in old version of hdf5: If file does not exist
# when trying to append, it will create error
with h5py.File(hdf5_file, "a") as hdf5_db:
if nemoh_cal_validation:
# If nemoh_cal is a valid string then it must be a valid file and we will convert its entries into the hdf5_file
utility.check_is_file(nemoh_cal, 'The path to the nemoh calculations configured by NEMOH_CALCULATIONS_FILE')
utility.convert_calculations(nemoh_cal, hdf5_db)
if input_file_validation:
# If input_file is a valid string then it must be a valid file and we will convert its entries into the hdf5_file
utility.check_is_file(input_file, 'The path to the nemoh input configured by NEMOH_INPUT_FILE')
utility.convert_input(input_file, hdf5_db)
remove_irregular_frequencies = utility.get_setting(settings.REMOVE_IRREGULAR_FREQUENCIES, custom_config,
'REMOVE_IRREGULAR_FREQUENCIES')
if remove_irregular_frequencies is not None:
dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES, (1, ), dtype='i')
utility.set_hdf5_attributes(dset, structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES_ATTR)
dset[:] = int(remove_irregular_frequencies)
else:
settings.REMOVE_IRREGULAR_FREQUENCIES = hdf5_db.get(structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)[0]
run(hdf5_db, custom_config)
utility.log_and_print(logger, 'The preprocessing results are saved in the hdf5 file '
+ utility.get_abs(hdf5_file))
utility.log_exit(logging.getLogger(__name__), signature, [None])
def run_as_process(custom_config, queue):
utility.setup_subprocess_logging(queue, logging.getLogger())
return preprocess(custom_config)
if __name__ == '__main__':
utility.setup_logging(default_conf_path=settings.LOGGING_CONFIGURATION_FILE, logging_path=settings.LOG_FILE)
try:
preprocess({})
print('Pre processing successfully completed \n')
except Exception as e:
# exc_info=True means the stack trace will be printed automatically
print('There was an error when running the application. Check the log file for more details \n')
logging.getLogger(__name__).error('Program halted due to a fatal error whose detail is as follow: ',
exc_info=True) | 45.722796 | 238 | 0.654026 |
4a22f634d818f8c55b89b8fdec9e092569f20b9c | 3,636 | py | Python | build_analyzer.py | jefflongo/sdmmc-analyzer | bea83fec1de0a86a5bab2cfd9f9e62805e8329d5 | [
"MIT"
] | null | null | null | build_analyzer.py | jefflongo/sdmmc-analyzer | bea83fec1de0a86a5bab2cfd9f9e62805e8329d5 | [
"MIT"
] | null | null | null | build_analyzer.py | jefflongo/sdmmc-analyzer | bea83fec1de0a86a5bab2cfd9f9e62805e8329d5 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os, glob, platform
#find out if we're running on mac or linux and set the dynamic library extension
dylib_ext = ""
if platform.system().lower() == "darwin":
dylib_ext = ".dylib"
else:
dylib_ext = ".so"
print("Running on " + platform.system())
#make sure the release folder exists, and clean out any .o/.so file if there are any
if not os.path.exists( "release" ):
os.makedirs( "release" )
os.chdir( "release" )
o_files = glob.glob( "*.o" )
o_files.extend( glob.glob( "*" + dylib_ext ) )
for o_file in o_files:
os.remove( o_file )
os.chdir( ".." )
#make sure the debug folder exists, and clean out any .o/.so files if there are any
if not os.path.exists( "debug" ):
os.makedirs( "debug" )
os.chdir( "debug" )
o_files = glob.glob( "*.o" );
o_files.extend( glob.glob( "*" + dylib_ext ) )
for o_file in o_files:
os.remove( o_file )
os.chdir( ".." )
#find all the cpp files in /src. We'll compile all of them
os.chdir( "src" )
cpp_files = glob.glob( "*.cpp" );
os.chdir( ".." )
#specify the search paths/dependencies/options for gcc
include_paths = [ "../AnalyzerSDK/include" ]
link_paths = [ "../AnalyzerSDK/lib" ]
link_dependencies = [ "-lAnalyzer" ] #refers to libAnalyzer.dylib or libAnalyzer.so
debug_compile_flags = "-O0 -w -c -fpic -g3"
release_compile_flags = "-O3 -w -c -fpic"
#loop through all the cpp files, build up the gcc command line, and attempt to compile each cpp file
for cpp_file in cpp_files:
#g++
command = "g++ "
#include paths
for path in include_paths:
command += "-I\"" + path + "\" "
release_command = command
release_command += release_compile_flags
release_command += " -o\"release/" + cpp_file.replace( ".cpp", ".o" ) + "\" " #the output file
release_command += "\"" + "src/" + cpp_file + "\"" #the cpp file to compile
debug_command = command
debug_command += debug_compile_flags
debug_command += " -o\"debug/" + cpp_file.replace( ".cpp", ".o" ) + "\" " #the output file
debug_command += "\"" + "src/" + cpp_file + "\"" #the cpp file to compile
#run the commands from the command line
print(release_command)
os.system( release_command )
print(debug_command)
os.system( debug_command )
#lastly, link
#g++
command = "g++ "
#add the library search paths
for link_path in link_paths:
command += "-L\"" + link_path + "\" "
#add libraries to link against
for link_dependency in link_dependencies:
command += link_dependency + " "
#make a dynamic (shared) library (.so/.dylib)
if dylib_ext == ".dylib":
command += "-dynamiclib "
else:
command += "-shared "
#figgure out what the name of this analyzer is
analyzer_name = ""
for cpp_file in cpp_files:
if cpp_file.endswith( "Analyzer.cpp" ):
analyzer_name = cpp_file.replace( "Analyzer.cpp", "" )
break
#the files to create (.so/.dylib files)
if dylib_ext == ".dylib":
release_command = command + "-o release/lib" + analyzer_name + "Analyzer.dylib "
debug_command = command + "-o debug/lib" + analyzer_name + "Analyzer.dylib "
else:
release_command = command + "-o\"release/lib" + analyzer_name + "Analyzer.so\" "
debug_command = command + "-o\"debug/lib" + analyzer_name + "Analyzer.so\" "
#add all the object files to link
for cpp_file in cpp_files:
release_command += "release/" + cpp_file.replace( ".cpp", ".o" ) + " "
debug_command += "debug/" + cpp_file.replace( ".cpp", ".o" ) + " "
#run the commands from the command line
print(release_command)
os.system( release_command )
print(debug_command)
os.system( debug_command )
| 30.049587 | 100 | 0.653465 |
4a22f6a2953dc8e36b3bf944f8894db98c6e7d56 | 4,880 | py | Python | cogs/public/User.py | nathanielfernandes/HamoodBot | e22b321e0594360fb427080322ed8731b48cec16 | [
"MIT"
] | 17 | 2020-07-28T19:40:22.000Z | 2022-03-29T16:42:36.000Z | cogs/public/User.py | nathanielfernandes/HamoodBot | e22b321e0594360fb427080322ed8731b48cec16 | [
"MIT"
] | 4 | 2021-05-25T21:46:50.000Z | 2022-03-12T00:39:07.000Z | cogs/public/User.py | nathanielfernandes/HamoodBot | e22b321e0594360fb427080322ed8731b48cec16 | [
"MIT"
] | 6 | 2021-01-14T17:17:02.000Z | 2021-09-14T23:12:13.000Z | import random, json, datetime
import discord
from discord.ext import commands
class User(commands.Cog):
"""Get Users Information"""
def __init__(self, bot):
self.bot = bot
self.Hamood = bot.Hamood
@commands.command()
@commands.bot_has_permissions(embed_links=True)
async def joined(self, ctx, member: discord.Member = None):
"""[@mention]|||Findout when a member joined the server."""
member = ctx.author if not member else member
await self.Hamood.quick_embed(
ctx,
author={"name": str(member), "icon_url": member.avatar.url},
description=f'Joined **{ctx.guild.name}** on:```yaml\n{member.joined_at.strftime("%a, %d %B %Y, %I:%M %p UTC")}```\n**Total time here**:\n{self.Hamood.pretty_dt((datetime.datetime.now() - member.joined_at).total_seconds())}',
)
@commands.command(aliases=["avatar"])
@commands.bot_has_permissions(embed_links=True)
async def pfp(self, ctx, member: discord.Member = None):
"""[@mention]|||Get the profile picture of user."""
member = ctx.author if not member else member
await self.Hamood.quick_embed(
ctx,
author={"name": f"{member}'s avatar", "url": member.avatar.url},
image_url=member.avatar.url,
)
@commands.command()
@commands.bot_has_permissions(embed_links=True)
async def roles(self, ctx, member: discord.Member = None):
"""[@mention]|||Lists the roles of a user."""
member = ctx.author if not member else member
roles = [role.mention for role in member.roles]
await self.Hamood.quick_embed(
ctx,
author={"name": f"{member}'s roles"},
thumbnail=member.avatar.url,
fields=[
{
"name": "Top Role",
"value": member.top_role.mention,
"inline": "False",
},
{"name": f"All Roles ({len(roles)})", "value": " ".join(roles)},
],
)
@commands.command(aliases=["perms"])
@commands.bot_has_permissions(embed_links=True)
async def permissions(self, ctx, member: discord.Member = None):
"""[@mention]|||Get a list of a users permissions in the server."""
member = ctx.author if not member else member
perms = "\n".join(
f"• {str(perm[0]).replace('_', ' ').capitalize()}"
for perm in member.guild_permissions
if perm[1]
)
await self.Hamood.quick_embed(
ctx,
author={"name": f"{member}'s permissions", "icon_url": member.avatar.url},
description=perms,
)
@commands.command(aliases=["ui"])
@commands.bot_has_permissions(embed_links=True)
async def userinfo(self, ctx, member: discord.Member = None):
"""[@mention]|||Findout allot of information on a user."""
member = ctx.author if not member else member
roles = [role.mention for role in member.roles]
perms = [
f"`{str(perm[0]).replace('_', ' ').capitalize()}`"
for perm in member.guild_permissions
if perm[1]
]
if "`Administrator`" in perms:
perms = ["`Adminstrator`"]
await self.Hamood.quick_embed(
ctx,
author={"name": f"User Info - {member}"},
description=f"**Nick Name:** {member.display_name}\n**ID:** {member.id}\n**Is Bot:** {member.bot}\n**Vibe:** {random.choice(self.Hamood.RANDOMWORDS)} {random.choice(self.Hamood.RANDOMEMOJIS)}\u200b",
thumbnail=member.avatar.url,
fields=[
{
"name": "Top Role",
"value": member.top_role.mention,
"inline": False,
},
{
"name": f"All Roles ({len(roles)})",
"value": " ".join(roles),
"inline": False,
},
{
"name": "Joined Discord",
"value": f'{member.created_at.strftime("%a, %d %B %Y, %I:%M %p UTC")}\n*{self.Hamood.pretty_dt((datetime.datetime.now() - member.created_at).total_seconds())} ago*',
"inline": False,
},
{
"name": "Joined Server",
"value": f'{member.joined_at.strftime("%a, %d %B %Y, %I:%M %p UTC")}\n*{self.Hamood.pretty_dt((datetime.datetime.now() - member.joined_at).total_seconds())} ago*',
"inline": False,
},
{
"name": f"Permissions ({len(perms)})",
"value": ", ".join(perms),
"inline": False,
},
],
)
def setup(bot):
bot.add_cog(User(bot))
| 39.674797 | 237 | 0.525615 |
4a22f8428d25d16d40c202c7addc556556f73233 | 9,777 | bzl | Python | rust/private/rust_analyzer.bzl | scentini/rules_rust | 0e3593fc5d839e4a74523f07e885b761ee19e662 | [
"Apache-2.0"
] | 349 | 2016-03-15T20:38:00.000Z | 2022-03-28T07:03:02.000Z | rust/private/rust_analyzer.bzl | aproxs/rules_rust | 77285c1aaebc1c55e6e80acc27be4c54aa30076d | [
"Apache-2.0"
] | 872 | 2016-03-18T06:40:26.000Z | 2022-03-31T16:04:04.000Z | rust/private/rust_analyzer.bzl | aproxs/rules_rust | 77285c1aaebc1c55e6e80acc27be4c54aa30076d | [
"Apache-2.0"
] | 231 | 2016-03-16T11:34:47.000Z | 2022-03-25T23:01:35.000Z | # Copyright 2020 Google
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Rust Analyzer Bazel rules.
rust_analyzer will generate a rust-project.json file for the
given targets. This file can be consumed by rust-analyzer as an alternative
to Cargo.toml files.
"""
load("//rust/platform:triple_mappings.bzl", "system_to_dylib_ext", "triple_to_system")
load("//rust/private:common.bzl", "rust_common")
load("//rust/private:rustc.bzl", "BuildInfo")
load("//rust/private:utils.bzl", "dedent", "find_toolchain")
RustAnalyzerInfo = provider(
doc = "RustAnalyzerInfo holds rust crate metadata for targets",
fields = {
"build_info": "BuildInfo: build info for this crate if present",
"cfgs": "List[String]: features or other compilation --cfg settings",
"crate": "rust_common.crate_info",
"crate_specs": "Depset[File]: transitive closure of OutputGroupInfo files",
"deps": "List[RustAnalyzerInfo]: direct dependencies",
"env": "Dict{String: String}: Environment variables, used for the `env!` macro",
"proc_macro_dylib_path": "File: compiled shared library output of proc-macro rule",
},
)
def _rust_analyzer_aspect_impl(target, ctx):
if rust_common.crate_info not in target:
return []
toolchain = find_toolchain(ctx)
# Always add `test` & `debug_assertions`. See rust-analyzer source code:
# https://github.com/rust-analyzer/rust-analyzer/blob/2021-11-15/crates/project_model/src/workspace.rs#L529-L531
cfgs = ["test", "debug_assertions"]
if hasattr(ctx.rule.attr, "crate_features"):
cfgs += ['feature="{}"'.format(f) for f in ctx.rule.attr.crate_features]
if hasattr(ctx.rule.attr, "rustc_flags"):
cfgs += [f[6:] for f in ctx.rule.attr.rustc_flags if f.startswith("--cfg ") or f.startswith("--cfg=")]
# Save BuildInfo if we find any (for build script output)
build_info = None
for dep in ctx.rule.attr.deps:
if BuildInfo in dep:
build_info = dep[BuildInfo]
dep_infos = [dep[RustAnalyzerInfo] for dep in ctx.rule.attr.deps if RustAnalyzerInfo in dep]
if hasattr(ctx.rule.attr, "proc_macro_deps"):
dep_infos += [dep[RustAnalyzerInfo] for dep in ctx.rule.attr.proc_macro_deps if RustAnalyzerInfo in dep]
if hasattr(ctx.rule.attr, "crate") and ctx.rule.attr.crate != None:
dep_infos.append(ctx.rule.attr.crate[RustAnalyzerInfo])
crate_spec = ctx.actions.declare_file(ctx.label.name + ".rust_analyzer_crate_spec")
crate_info = target[rust_common.crate_info]
rust_analyzer_info = RustAnalyzerInfo(
crate = crate_info,
cfgs = cfgs,
env = getattr(ctx.rule.attr, "rustc_env", {}),
deps = dep_infos,
crate_specs = depset(direct = [crate_spec], transitive = [dep.crate_specs for dep in dep_infos]),
proc_macro_dylib_path = find_proc_macro_dylib_path(toolchain, target),
build_info = build_info,
)
ctx.actions.write(
output = crate_spec,
content = json.encode(_create_single_crate(ctx, rust_analyzer_info)),
)
return [
rust_analyzer_info,
OutputGroupInfo(rust_analyzer_crate_spec = rust_analyzer_info.crate_specs),
]
def find_proc_macro_dylib_path(toolchain, target):
"""Find the proc_macro_dylib_path of target. Returns None if target crate is not type proc-macro.
Args:
toolchain: The current rust toolchain.
target: The current target.
Returns:
(path): The path to the proc macro dylib, or None if this crate is not a proc-macro.
"""
if target[rust_common.crate_info].type != "proc-macro":
return None
dylib_ext = system_to_dylib_ext(triple_to_system(toolchain.target_triple))
for action in target.actions:
for output in action.outputs.to_list():
if output.extension == dylib_ext[1:]:
return output.path
# Failed to find the dylib path inside a proc-macro crate.
# TODO: Should this be an error?
return None
rust_analyzer_aspect = aspect(
attr_aspects = ["deps", "proc_macro_deps", "crate"],
implementation = _rust_analyzer_aspect_impl,
toolchains = [str(Label("//rust:toolchain"))],
incompatible_use_toolchain_transition = True,
doc = "Annotates rust rules with RustAnalyzerInfo later used to build a rust-project.json",
)
_exec_root_tmpl = "__EXEC_ROOT__/"
def _crate_id(crate_info):
"""Returns a unique stable identifier for a crate
Returns:
(string): This crate's unique stable id.
"""
return "ID-" + crate_info.root.path
def _create_single_crate(ctx, info):
"""Creates a crate in the rust-project.json format.
Args:
ctx (ctx): The rule context
info (RustAnalyzerInfo): RustAnalyzerInfo for the current crate
Returns:
(dict) The crate rust-project.json representation
"""
crate_name = info.crate.name
crate = dict()
crate_id = _crate_id(info.crate)
crate["crate_id"] = crate_id
crate["display_name"] = crate_name
crate["edition"] = info.crate.edition
crate["env"] = {}
crate["crate_type"] = info.crate.type
# Switch on external/ to determine if crates are in the workspace or remote.
# TODO: Some folks may want to override this for vendored dependencies.
root_path = info.crate.root.path
root_dirname = info.crate.root.dirname
if root_path.startswith("external/"):
crate["is_workspace_member"] = False
crate["root_module"] = _exec_root_tmpl + root_path
crate_root = _exec_root_tmpl + root_dirname
else:
crate["is_workspace_member"] = True
crate["root_module"] = root_path
crate_root = root_dirname
if info.build_info != None:
out_dir_path = info.build_info.out_dir.path
crate["env"].update({"OUT_DIR": _exec_root_tmpl + out_dir_path})
crate["source"] = {
# We have to tell rust-analyzer about our out_dir since it's not under the crate root.
"exclude_dirs": [],
"include_dirs": [crate_root, _exec_root_tmpl + out_dir_path],
}
# TODO: The only imagined use case is an env var holding a filename in the workspace passed to a
# macro like include_bytes!. Other use cases might exist that require more complex logic.
expand_targets = getattr(ctx.rule.attr, "data", []) + getattr(ctx.rule.attr, "compile_data", [])
crate["env"].update({k: ctx.expand_location(v, expand_targets) for k, v in info.env.items()})
# Omit when a crate appears to depend on itself (e.g. foo_test crates).
# It can happen a single source file is present in multiple crates - there can
# be a `rust_library` with a `lib.rs` file, and a `rust_test` for the `test`
# module in that file. Tests can declare more dependencies than what library
# had. Therefore we had to collect all RustAnalyzerInfos for a given crate
# and take deps from all of them.
# There's one exception - if the dependency is the same crate name as the
# the crate being processed, we don't add it as a dependency to itself. This is
# common and expected - `rust_test.crate` pointing to the `rust_library`.
crate["deps"] = [_crate_id(dep.crate) for dep in info.deps if _crate_id(dep.crate) != crate_id]
crate["cfg"] = info.cfgs
crate["target"] = find_toolchain(ctx).target_triple
if info.proc_macro_dylib_path != None:
crate["proc_macro_dylib_path"] = _exec_root_tmpl + info.proc_macro_dylib_path
return crate
def _rust_analyzer_detect_sysroot_impl(ctx):
rust_toolchain = find_toolchain(ctx)
if not rust_toolchain.rustc_srcs:
fail(
"Current Rust toolchain doesn't contain rustc sources in `rustc_srcs` attribute.",
"These are needed by rust analyzer.",
"If you are using the default Rust toolchain, add `rust_repositories(include_rustc_srcs = True, ...).` to your WORKSPACE file.",
)
sysroot_src = rust_toolchain.rustc_srcs.label.package + "/library"
if rust_toolchain.rustc_srcs.label.workspace_root:
sysroot_src = _exec_root_tmpl + rust_toolchain.rustc_srcs.label.workspace_root + "/" + sysroot_src
sysroot_src_file = ctx.actions.declare_file(ctx.label.name + ".rust_analyzer_sysroot_src")
ctx.actions.write(
output = sysroot_src_file,
content = sysroot_src,
)
return [DefaultInfo(files = depset([sysroot_src_file]))]
rust_analyzer_detect_sysroot = rule(
implementation = _rust_analyzer_detect_sysroot_impl,
toolchains = ["@rules_rust//rust:toolchain"],
incompatible_use_toolchain_transition = True,
doc = dedent("""\
Detect the sysroot and store in a file for use by the gen_rust_project tool.
"""),
)
def _rust_analyzer_impl(ctx):
pass
rust_analyzer = rule(
attrs = {
"targets": attr.label_list(
aspects = [rust_analyzer_aspect],
doc = "List of all targets to be included in the index",
),
},
implementation = _rust_analyzer_impl,
toolchains = [str(Label("//rust:toolchain"))],
incompatible_use_toolchain_transition = True,
doc = dedent("""\
Deprecated: gen_rust_project can now create a rust-project.json without a rust_analyzer rule.
"""),
)
| 40.568465 | 140 | 0.689884 |
4a22f8608996ddaca87baae3571633cb1694cc87 | 573 | py | Python | fdk_client/platform/models/SlideshowMedia.py | kavish-d/fdk-client-python | a1023eb530473322cb52e095fc4ceb226c1e6037 | [
"MIT"
] | null | null | null | fdk_client/platform/models/SlideshowMedia.py | kavish-d/fdk-client-python | a1023eb530473322cb52e095fc4ceb226c1e6037 | [
"MIT"
] | null | null | null | fdk_client/platform/models/SlideshowMedia.py | kavish-d/fdk-client-python | a1023eb530473322cb52e095fc4ceb226c1e6037 | [
"MIT"
] | null | null | null | """Platform Models."""
from marshmallow import fields, Schema
from marshmallow.validate import OneOf
from ..enums import *
from ..models.BaseSchema import BaseSchema
from .Action import Action
class SlideshowMedia(BaseSchema):
# Content swagger.json
type = fields.Str(required=False)
url = fields.Str(required=False)
bg_color = fields.Str(required=False)
duration = fields.Int(required=False)
auto_decide_duration = fields.Boolean(required=False)
action = fields.Nested(Action, required=False)
| 15.078947 | 57 | 0.689354 |
4a22fa29be5ec5f15d08795d60a7d1809dc162fc | 6,124 | py | Python | Products/CMFTopic/DateCriteria.py | zopefoundation/Products.CMFTopic | 634077f393ce95939d4d349e90db10be90754660 | [
"ZPL-2.1"
] | null | null | null | Products/CMFTopic/DateCriteria.py | zopefoundation/Products.CMFTopic | 634077f393ce95939d4d349e90db10be90754660 | [
"ZPL-2.1"
] | null | null | null | Products/CMFTopic/DateCriteria.py | zopefoundation/Products.CMFTopic | 634077f393ce95939d4d349e90db10be90754660 | [
"ZPL-2.1"
] | null | null | null | ##############################################################################
#
# Copyright (c) 2001 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
""" Various date criteria
"""
from AccessControl.SecurityInfo import ClassSecurityInfo
from App.class_init import InitializeClass
from DateTime.DateTime import DateTime
from zope.interface import implements
from Products.CMFTopic.AbstractCriterion import AbstractCriterion
from Products.CMFTopic.interfaces import ICriterion
from Products.CMFTopic.permissions import ChangeTopics
from Products.CMFTopic.permissions import View
from Products.CMFTopic.Topic import Topic
_as_of = DateTime # Allow for controlled value when testing
class FriendlyDateCriterion(AbstractCriterion):
"""
Put a friendly interface on date range searches, like
'where effective date is less than 5 days old'.
"""
implements(ICriterion)
meta_type = 'Friendly Date Criterion'
security = ClassSecurityInfo()
_editableAttributes = ('value', 'operation', 'daterange')
_defaultDateOptions = ((0, 'Now'),
(1, '1 Day'),
(2, '2 Days'),
(5, '5 Days'),
(7, '1 Week'),
(14, '2 Weeks'),
(31, '1 Month'),
(31 * 3, '3 Months'),
(31 * 6, '6 Months'),
(365, '1 Year'),
(365 * 2, '2 years'))
def __init__(self, id, field):
self.id = id
self.field = field
self.value = None
self.operation = 'min'
self.daterange = 'old'
security.declarePublic('defaultDateOptions')
def defaultDateOptions(self):
"""
Return a list of default values and labels for date options.
"""
return self._defaultDateOptions
security.declareProtected(ChangeTopics, 'getEditForm')
def getEditForm(self):
"""
Return the name of the skin method used by Topic to edit
criteria of this type.
"""
return 'friendlydatec_editform'
security.declareProtected(ChangeTopics, 'edit')
def edit(self, value=None, operation='min', daterange='old'):
"""
Update the values to match against.
"""
if value in (None, ''):
self.value = None
else:
try:
self.value = int(value)
except:
raise ValueError('Supplied value should be an int')
if operation in ('min', 'max', 'within_day'):
self.operation = operation
else:
raise ValueError('Operation type not in set {min,max,within_day}')
if daterange in ('old', 'ahead'):
self.daterange = daterange
else:
raise ValueError('Date range not in set {old,ahead}')
security.declareProtected(View, 'getCriteriaItems')
def getCriteriaItems(self):
"""
Return a sequence of items to be used to build the catalog query.
"""
if self.value is not None:
field = self.Field()
value = self.value
operation = self.operation
# Negate the value for 'old' days
if self.daterange == 'old' and value != 0:
value = -value
# Also reverse the operator to match what a user would expect.
# Queries such as "More than 2 days ago" should match dates
# *earlier* than "today minus 2", and "Less than 2 days ago"
# would be expected to return dates *later* then "today minus
# two".
if operation == 'max':
operation = 'min'
elif operation == 'min':
operation = 'max'
now = _as_of()
date = now + value
if operation == 'within_day':
# When items within a day are requested, the range is between
# the earliest and latest time of that particular day
range = (date.earliestTime(), date.latestTime())
return ((field, {'query': range, 'range': 'min:max'}),)
elif operation == 'min':
if value != 0:
if self.daterange == 'old':
date_range = (date, now)
return ((field, {'query': date_range,
'range': 'min:max'}),)
else:
return ((field, {'query': date.earliestTime(),
'range': operation}),)
else:
# Value 0 means "Now", so get everything from now on
return ((field, {'query': date, 'range': operation}),)
elif operation == 'max':
if value != 0:
if self.daterange == 'old':
return ((field, {'query': date, 'range': operation}),)
else:
date_range = (now, date.latestTime())
return ((field, {'query': date_range,
'range': 'min:max'}),)
else:
# Value is 0, meaning "Now", get everything before "Now"
return ((field, {'query': date, 'range': operation}),)
else:
return ()
InitializeClass(FriendlyDateCriterion)
# Register as a criteria type with the Topic class
Topic._criteriaTypes.append(FriendlyDateCriterion)
| 36.452381 | 78 | 0.522861 |
4a22fab9ea1dec1e1ce62fcb4c34b964fa009b8c | 166,330 | py | Python | airflow/www/views.py | imcecil/incubator-airflow | 461ec4c7e6efb2f6fc0c342afdc44a350469ff1d | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | airflow/www/views.py | imcecil/incubator-airflow | 461ec4c7e6efb2f6fc0c342afdc44a350469ff1d | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | airflow/www/views.py | imcecil/incubator-airflow | 461ec4c7e6efb2f6fc0c342afdc44a350469ff1d | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import collections
import copy
import json
import logging
import math
import re
import socket
import sys
import traceback
from collections import defaultdict
from datetime import timedelta
from functools import wraps
from json import JSONDecodeError
from operator import itemgetter
from typing import Any, Callable, Iterable, List, Optional, Set, Tuple, Union
from urllib.parse import parse_qsl, unquote, urlencode, urlparse
import lazy_object_proxy
import markupsafe
import nvd3
import sqlalchemy as sqla
from flask import (
Markup,
Response,
abort,
before_render_template,
current_app,
escape,
flash,
g,
jsonify,
make_response,
redirect,
render_template,
request,
send_from_directory,
session as flask_session,
url_for,
)
from flask_appbuilder import BaseView, ModelView, expose
from flask_appbuilder.actions import action
from flask_appbuilder.fieldwidgets import Select2Widget
from flask_appbuilder.models.sqla.filters import BaseFilter
from flask_appbuilder.security.decorators import has_access
from flask_appbuilder.security.views import (
PermissionModelView,
PermissionViewModelView,
ResetMyPasswordView,
ResetPasswordView,
RoleModelView,
UserDBModelView,
UserInfoEditView,
UserLDAPModelView,
UserOAuthModelView,
UserOIDModelView,
UserRemoteUserModelView,
UserStatsChartView,
ViewMenuModelView,
)
from flask_appbuilder.widgets import FormWidget
from flask_babel import lazy_gettext
from jinja2.utils import htmlsafe_json_dumps, pformat # type: ignore
from pendulum.datetime import DateTime
from pendulum.parsing.exceptions import ParserError
from pygments import highlight, lexers
from pygments.formatters import HtmlFormatter
from sqlalchemy import Date, and_, desc, func, union_all
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import joinedload
from wtforms import SelectField, validators
from wtforms.validators import InputRequired
import airflow
from airflow import models, plugins_manager, settings
from airflow.api.common.experimental.mark_tasks import (
set_dag_run_state_to_failed,
set_dag_run_state_to_success,
)
from airflow.configuration import AIRFLOW_CONFIG, conf
from airflow.exceptions import AirflowException
from airflow.executors.executor_loader import ExecutorLoader
from airflow.jobs.base_job import BaseJob
from airflow.jobs.scheduler_job import SchedulerJob
from airflow.jobs.triggerer_job import TriggererJob
from airflow.models import DAG, Connection, DagModel, DagTag, Log, SlaMiss, TaskFail, XCom, errors
from airflow.models.baseoperator import BaseOperator
from airflow.models.dagcode import DagCode
from airflow.models.dagrun import DagRun, DagRunType
from airflow.models.serialized_dag import SerializedDagModel
from airflow.models.taskinstance import TaskInstance
from airflow.providers_manager import ProvidersManager
from airflow.security import permissions
from airflow.ti_deps.dep_context import DepContext
from airflow.ti_deps.dependencies_deps import RUNNING_DEPS, SCHEDULER_QUEUED_DEPS
from airflow.utils import json as utils_json, timezone, yaml
from airflow.utils.dates import infer_time_unit, scale_time_units
from airflow.utils.docs import get_doc_url_for_provider, get_docs_url
from airflow.utils.helpers import alchemy_to_dict
from airflow.utils.log import secrets_masker
from airflow.utils.log.log_reader import TaskLogReader
from airflow.utils.session import create_session, provide_session
from airflow.utils.state import State
from airflow.utils.strings import to_boolean
from airflow.version import version
from airflow.www import auth, utils as wwwutils
from airflow.www.decorators import action_logging, gzipped
from airflow.www.forms import (
ConnectionForm,
DagRunEditForm,
DateTimeForm,
DateTimeWithNumRunsForm,
DateTimeWithNumRunsWithDagRunsForm,
TaskInstanceEditForm,
)
from airflow.www.widgets import AirflowModelListWidget
PAGE_SIZE = conf.getint('webserver', 'page_size')
FILTER_TAGS_COOKIE = 'tags_filter'
FILTER_STATUS_COOKIE = 'dag_status_filter'
def truncate_task_duration(task_duration):
"""
Cast the task_duration to an int was for optimization for large/huge dags if task_duration > 10s
otherwise we keep it as a float with 3dp
"""
return int(task_duration) if task_duration > 10.0 else round(task_duration, 3)
def get_safe_url(url):
"""Given a user-supplied URL, ensure it points to our web server"""
valid_schemes = ['http', 'https', '']
valid_netlocs = [request.host, '']
if not url:
return url_for('Airflow.index')
parsed = urlparse(url)
# If the url contains semicolon, redirect it to homepage to avoid
# potential XSS. (Similar to https://github.com/python/cpython/pull/24297/files (bpo-42967))
if ';' in unquote(url):
return url_for('Airflow.index')
query = parse_qsl(parsed.query, keep_blank_values=True)
url = parsed._replace(query=urlencode(query)).geturl()
if parsed.scheme in valid_schemes and parsed.netloc in valid_netlocs:
return url
return url_for('Airflow.index')
def get_date_time_num_runs_dag_runs_form_data(www_request, session, dag):
"""Get Execution Data, Base Date & Number of runs from a Request"""
date_time = www_request.args.get('execution_date')
if date_time:
date_time = timezone.parse(date_time)
else:
date_time = dag.get_latest_execution_date(session=session) or timezone.utcnow()
base_date = www_request.args.get('base_date')
if base_date:
base_date = timezone.parse(base_date)
else:
# The DateTimeField widget truncates milliseconds and would loose
# the first dag run. Round to next second.
base_date = (date_time + timedelta(seconds=1)).replace(microsecond=0)
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
num_runs = www_request.args.get('num_runs', default=default_dag_run, type=int)
drs = (
session.query(DagRun)
.filter(DagRun.dag_id == dag.dag_id, DagRun.execution_date <= base_date)
.order_by(desc(DagRun.execution_date))
.limit(num_runs)
.all()
)
dr_choices = []
dr_state = None
for dr in drs:
dr_choices.append((dr.execution_date.isoformat(), dr.run_id))
if date_time == dr.execution_date:
dr_state = dr.state
# Happens if base_date was changed and the selected dag run is not in result
if not dr_state and drs:
dr = drs[0]
date_time = dr.execution_date
dr_state = dr.state
return {
'dttm': date_time,
'base_date': base_date,
'num_runs': num_runs,
'execution_date': date_time.isoformat(),
'dr_choices': dr_choices,
'dr_state': dr_state,
}
def task_group_to_dict(task_group):
"""
Create a nested dict representation of this TaskGroup and its children used to construct
the Graph.
"""
if isinstance(task_group, BaseOperator):
return {
'id': task_group.task_id,
'value': {
'label': task_group.label,
'labelStyle': f"fill:{task_group.ui_fgcolor};",
'style': f"fill:{task_group.ui_color};",
'rx': 5,
'ry': 5,
},
}
children = [
task_group_to_dict(child) for child in sorted(task_group.children.values(), key=lambda t: t.label)
]
if task_group.upstream_group_ids or task_group.upstream_task_ids:
children.append(
{
'id': task_group.upstream_join_id,
'value': {
'label': '',
'labelStyle': f"fill:{task_group.ui_fgcolor};",
'style': f"fill:{task_group.ui_color};",
'shape': 'circle',
},
}
)
if task_group.downstream_group_ids or task_group.downstream_task_ids:
# This is the join node used to reduce the number of edges between two TaskGroup.
children.append(
{
'id': task_group.downstream_join_id,
'value': {
'label': '',
'labelStyle': f"fill:{task_group.ui_fgcolor};",
'style': f"fill:{task_group.ui_color};",
'shape': 'circle',
},
}
)
return {
"id": task_group.group_id,
'value': {
'label': task_group.label,
'labelStyle': f"fill:{task_group.ui_fgcolor};",
'style': f"fill:{task_group.ui_color}",
'rx': 5,
'ry': 5,
'clusterLabelPos': 'top',
},
'tooltip': task_group.tooltip,
'children': children,
}
def get_key_paths(input_dict):
"""Return a list of dot-separated dictionary paths"""
for key, value in input_dict.items():
if isinstance(value, dict):
for sub_key in get_key_paths(value):
yield '.'.join((key, sub_key))
else:
yield key
def get_value_from_path(key_path, content):
"""Return the value from a dictionary based on dot-separated path of keys"""
elem = content
for x in key_path.strip(".").split("."):
try:
x = int(x)
elem = elem[x]
except ValueError:
elem = elem.get(x)
return elem
def dag_edges(dag):
"""
Create the list of edges needed to construct the Graph view.
A special case is made if a TaskGroup is immediately upstream/downstream of another
TaskGroup or task. Two dummy nodes named upstream_join_id and downstream_join_id are
created for the TaskGroup. Instead of drawing an edge onto every task in the TaskGroup,
all edges are directed onto the dummy nodes. This is to cut down the number of edges on
the graph.
For example: A DAG with TaskGroups group1 and group2:
group1: task1, task2, task3
group2: task4, task5, task6
group2 is downstream of group1:
group1 >> group2
Edges to add (This avoids having to create edges between every task in group1 and group2):
task1 >> downstream_join_id
task2 >> downstream_join_id
task3 >> downstream_join_id
downstream_join_id >> upstream_join_id
upstream_join_id >> task4
upstream_join_id >> task5
upstream_join_id >> task6
"""
# Edges to add between TaskGroup
edges_to_add = set()
# Edges to remove between individual tasks that are replaced by edges_to_add.
edges_to_skip = set()
task_group_map = dag.task_group.get_task_group_dict()
def collect_edges(task_group):
"""Update edges_to_add and edges_to_skip according to TaskGroups."""
if isinstance(task_group, BaseOperator):
return
for target_id in task_group.downstream_group_ids:
# For every TaskGroup immediately downstream, add edges between downstream_join_id
# and upstream_join_id. Skip edges between individual tasks of the TaskGroups.
target_group = task_group_map[target_id]
edges_to_add.add((task_group.downstream_join_id, target_group.upstream_join_id))
for child in task_group.get_leaves():
edges_to_add.add((child.task_id, task_group.downstream_join_id))
for target in target_group.get_roots():
edges_to_skip.add((child.task_id, target.task_id))
edges_to_skip.add((child.task_id, target_group.upstream_join_id))
for child in target_group.get_roots():
edges_to_add.add((target_group.upstream_join_id, child.task_id))
edges_to_skip.add((task_group.downstream_join_id, child.task_id))
# For every individual task immediately downstream, add edges between downstream_join_id and
# the downstream task. Skip edges between individual tasks of the TaskGroup and the
# downstream task.
for target_id in task_group.downstream_task_ids:
edges_to_add.add((task_group.downstream_join_id, target_id))
for child in task_group.get_leaves():
edges_to_add.add((child.task_id, task_group.downstream_join_id))
edges_to_skip.add((child.task_id, target_id))
# For every individual task immediately upstream, add edges between the upstream task
# and upstream_join_id. Skip edges between the upstream task and individual tasks
# of the TaskGroup.
for source_id in task_group.upstream_task_ids:
edges_to_add.add((source_id, task_group.upstream_join_id))
for child in task_group.get_roots():
edges_to_add.add((task_group.upstream_join_id, child.task_id))
edges_to_skip.add((source_id, child.task_id))
for child in task_group.children.values():
collect_edges(child)
collect_edges(dag.task_group)
# Collect all the edges between individual tasks
edges = set()
def get_downstream(task):
for child in task.downstream_list:
edge = (task.task_id, child.task_id)
if edge not in edges:
edges.add(edge)
get_downstream(child)
for root in dag.roots:
get_downstream(root)
result = []
# Build result dicts with the two ends of the edge, plus any extra metadata
# if we have it.
for source_id, target_id in sorted(edges.union(edges_to_add) - edges_to_skip):
record = {"source_id": source_id, "target_id": target_id}
label = dag.get_edge_info(source_id, target_id).get("label")
if label:
record["label"] = label
result.append(record)
return result
######################################################################################
# Error handlers
######################################################################################
def not_found(error):
"""Show Not Found on screen for any error in the Webserver"""
return (
render_template(
'airflow/not_found.html',
hostname=socket.getfqdn()
if conf.getboolean('webserver', 'EXPOSE_HOSTNAME', fallback=True)
else 'redact',
),
404,
)
def show_traceback(error):
"""Show Traceback for a given error"""
return (
render_template(
'airflow/traceback.html',
python_version=sys.version.split(" ")[0],
airflow_version=version,
hostname=socket.getfqdn()
if conf.getboolean('webserver', 'EXPOSE_HOSTNAME', fallback=True)
else 'redact',
info=traceback.format_exc()
if conf.getboolean('webserver', 'EXPOSE_STACKTRACE', fallback=True)
else 'Error! Please contact server admin.',
),
500,
)
######################################################################################
# BaseViews
######################################################################################
class AirflowBaseView(BaseView):
"""Base View to set Airflow related properties"""
from airflow import macros
route_base = ''
extra_args = {
# Make our macros available to our UI templates too.
'macros': macros,
'get_docs_url': get_docs_url,
}
if not conf.getboolean('core', 'unit_test_mode'):
extra_args['sqlite_warning'] = settings.Session.bind.dialect.name == 'sqlite'
extra_args['sequential_executor_warning'] = conf.get('core', 'executor') == 'LocalExecutor'
line_chart_attr = {
'legend.maxKeyLength': 200,
}
def render_template(self, *args, **kwargs):
# Add triggerer_job only if we need it
if TriggererJob.is_needed():
kwargs["triggerer_job"] = lazy_object_proxy.Proxy(TriggererJob.most_recent_job)
return super().render_template(
*args,
# Cache this at most once per request, not for the lifetime of the view instance
scheduler_job=lazy_object_proxy.Proxy(SchedulerJob.most_recent_job),
**kwargs,
)
def add_user_permissions_to_dag(sender, template, context, **extra):
"""
Adds `.can_edit`, `.can_trigger`, and `.can_delete` properties
to DAG based on current user's permissions.
Located in `views.py` rather than the DAG model to keep
permissions logic out of the Airflow core.
"""
if 'dag' in context:
dag = context['dag']
can_create_dag_run = current_app.appbuilder.sm.has_access(
permissions.ACTION_CAN_CREATE, permissions.RESOURCE_DAG_RUN
)
dag.can_edit = current_app.appbuilder.sm.can_edit_dag(dag.dag_id)
dag.can_trigger = dag.can_edit and can_create_dag_run
dag.can_delete = current_app.appbuilder.sm.has_access(
permissions.ACTION_CAN_DELETE,
permissions.RESOURCE_DAG,
)
context['dag'] = dag
before_render_template.connect(add_user_permissions_to_dag)
class Airflow(AirflowBaseView):
"""Main Airflow application."""
@expose('/health')
def health(self):
"""
An endpoint helping check the health status of the Airflow instance,
including metadatabase and scheduler.
"""
payload = {'metadatabase': {'status': 'unhealthy'}}
latest_scheduler_heartbeat = None
scheduler_status = 'unhealthy'
payload['metadatabase'] = {'status': 'healthy'}
try:
scheduler_job = SchedulerJob.most_recent_job()
if scheduler_job:
latest_scheduler_heartbeat = scheduler_job.latest_heartbeat.isoformat()
if scheduler_job.is_alive():
scheduler_status = 'healthy'
except Exception:
payload['metadatabase']['status'] = 'unhealthy'
payload['scheduler'] = {
'status': scheduler_status,
'latest_scheduler_heartbeat': latest_scheduler_heartbeat,
}
return wwwutils.json_response(payload)
@expose('/home')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE),
]
)
def index(self):
"""Home view."""
hide_paused_dags_by_default = conf.getboolean('webserver', 'hide_paused_dags_by_default')
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
num_runs = request.args.get('num_runs', default=default_dag_run, type=int)
current_page = request.args.get('page', default=0, type=int)
arg_search_query = request.args.get('search')
arg_tags_filter = request.args.getlist('tags')
arg_status_filter = request.args.get('status')
if request.args.get('reset_tags') is not None:
flask_session[FILTER_TAGS_COOKIE] = None
# Remove the reset_tags=reset from the URL
return redirect(url_for('Airflow.index'))
cookie_val = flask_session.get(FILTER_TAGS_COOKIE)
if arg_tags_filter:
flask_session[FILTER_TAGS_COOKIE] = ','.join(arg_tags_filter)
elif cookie_val:
# If tags exist in cookie, but not URL, add them to the URL
return redirect(url_for('Airflow.index', tags=cookie_val.split(',')))
if arg_status_filter is None:
cookie_val = flask_session.get(FILTER_STATUS_COOKIE)
if cookie_val:
arg_status_filter = cookie_val
else:
arg_status_filter = 'active' if hide_paused_dags_by_default else 'all'
flask_session[FILTER_STATUS_COOKIE] = arg_status_filter
else:
status = arg_status_filter.strip().lower()
flask_session[FILTER_STATUS_COOKIE] = status
arg_status_filter = status
dags_per_page = PAGE_SIZE
start = current_page * dags_per_page
end = start + dags_per_page
# Get all the dag id the user could access
filter_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user)
with create_session() as session:
# read orm_dags from the db
dags_query = session.query(DagModel).filter(~DagModel.is_subdag, DagModel.is_active)
if arg_search_query:
dags_query = dags_query.filter(
DagModel.dag_id.ilike('%' + arg_search_query + '%')
| DagModel.owners.ilike('%' + arg_search_query + '%')
)
if arg_tags_filter:
dags_query = dags_query.filter(DagModel.tags.any(DagTag.name.in_(arg_tags_filter)))
dags_query = dags_query.filter(DagModel.dag_id.in_(filter_dag_ids))
all_dags = dags_query
active_dags = dags_query.filter(~DagModel.is_paused)
paused_dags = dags_query.filter(DagModel.is_paused)
is_paused_count = dict(
all_dags.with_entities(DagModel.is_paused, func.count(DagModel.dag_id))
.group_by(DagModel.is_paused)
.all()
)
status_count_active = is_paused_count.get(False, 0)
status_count_paused = is_paused_count.get(True, 0)
all_dags_count = status_count_active + status_count_paused
if arg_status_filter == 'active':
current_dags = active_dags
num_of_all_dags = status_count_active
elif arg_status_filter == 'paused':
current_dags = paused_dags
num_of_all_dags = status_count_paused
else:
current_dags = all_dags
num_of_all_dags = all_dags_count
dags = (
current_dags.order_by(DagModel.dag_id)
.options(joinedload(DagModel.tags))
.offset(start)
.limit(dags_per_page)
.all()
)
user_permissions = current_app.appbuilder.sm.get_current_user_permissions()
all_dags_editable = (permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG) in user_permissions
can_create_dag_run = (
permissions.ACTION_CAN_CREATE,
permissions.RESOURCE_DAG_RUN,
) in user_permissions
can_delete_dag = (
permissions.ACTION_CAN_DELETE,
permissions.RESOURCE_DAG,
) in user_permissions
for dag in dags:
if all_dags_editable:
dag.can_edit = True
else:
dag_resource_name = permissions.RESOURCE_DAG_PREFIX + dag.dag_id
dag.can_edit = (permissions.ACTION_CAN_EDIT, dag_resource_name) in user_permissions
dag.can_trigger = dag.can_edit and can_create_dag_run
dag.can_delete = can_delete_dag
dagtags = session.query(DagTag.name).distinct(DagTag.name).all()
tags = [
{"name": name, "selected": bool(arg_tags_filter and name in arg_tags_filter)}
for name, in dagtags
]
import_errors = session.query(errors.ImportError).order_by(errors.ImportError.id)
if (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG) not in user_permissions:
# if the user doesn't have access to all DAGs, only display errors from visible DAGs
import_errors = import_errors.join(
DagModel, DagModel.fileloc == errors.ImportError.filename
).filter(DagModel.dag_id.in_(filter_dag_ids))
for import_error in import_errors:
flash(
"Broken DAG: [{ie.filename}] {ie.stacktrace}".format(ie=import_error),
"dag_import_error",
)
from airflow.plugins_manager import import_errors as plugin_import_errors
for filename, stacktrace in plugin_import_errors.items():
flash(
f"Broken plugin: [{filename}] {stacktrace}",
"error",
)
num_of_pages = int(math.ceil(num_of_all_dags / float(dags_per_page)))
state_color_mapping = State.state_color.copy()
state_color_mapping["null"] = state_color_mapping.pop(None)
page_title = conf.get(section="webserver", key="instance_name", fallback="DAGs")
dashboard_alerts = [
fm for fm in settings.DASHBOARD_UIALERTS if fm.should_show(current_app.appbuilder.sm)
]
return self.render_template(
'airflow/dags.html',
dags=dags,
dashboard_alerts=dashboard_alerts,
current_page=current_page,
search_query=arg_search_query if arg_search_query else '',
page_title=page_title,
page_size=dags_per_page,
num_of_pages=num_of_pages,
num_dag_from=min(start + 1, num_of_all_dags),
num_dag_to=min(end, num_of_all_dags),
num_of_all_dags=num_of_all_dags,
paging=wwwutils.generate_pages(
current_page,
num_of_pages,
search=escape(arg_search_query) if arg_search_query else None,
status=arg_status_filter if arg_status_filter else None,
tags=arg_tags_filter if arg_tags_filter else None,
),
num_runs=num_runs,
tags=tags,
state_color=state_color_mapping,
status_filter=arg_status_filter,
status_count_all=all_dags_count,
status_count_active=status_count_active,
status_count_paused=status_count_paused,
tags_filter=arg_tags_filter,
)
@expose('/dag_stats', methods=['POST'])
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
]
)
@provide_session
def dag_stats(self, session=None):
"""Dag statistics."""
dr = models.DagRun
allowed_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user)
dag_state_stats = session.query(dr.dag_id, dr.state, sqla.func.count(dr.state)).group_by(
dr.dag_id, dr.state
)
# Filter by post parameters
selected_dag_ids = {unquote(dag_id) for dag_id in request.form.getlist('dag_ids') if dag_id}
if selected_dag_ids:
filter_dag_ids = selected_dag_ids.intersection(allowed_dag_ids)
else:
filter_dag_ids = allowed_dag_ids
if not filter_dag_ids:
return wwwutils.json_response({})
payload = {}
dag_state_stats = dag_state_stats.filter(dr.dag_id.in_(filter_dag_ids))
data = {}
for dag_id, state, count in dag_state_stats:
if dag_id not in data:
data[dag_id] = {}
data[dag_id][state] = count
for dag_id in filter_dag_ids:
payload[dag_id] = []
for state in State.dag_states:
count = data.get(dag_id, {}).get(state, 0)
payload[dag_id].append({'state': state, 'count': count})
return wwwutils.json_response(payload)
@expose('/task_stats', methods=['POST'])
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
@provide_session
def task_stats(self, session=None):
"""Task Statistics"""
allowed_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user)
if not allowed_dag_ids:
return wwwutils.json_response({})
# Filter by post parameters
selected_dag_ids = {unquote(dag_id) for dag_id in request.form.getlist('dag_ids') if dag_id}
if selected_dag_ids:
filter_dag_ids = selected_dag_ids.intersection(allowed_dag_ids)
else:
filter_dag_ids = allowed_dag_ids
running_dag_run_query_result = (
session.query(DagRun.dag_id, DagRun.run_id)
.join(DagModel, DagModel.dag_id == DagRun.dag_id)
.filter(DagRun.state == State.RUNNING, DagModel.is_active)
)
running_dag_run_query_result = running_dag_run_query_result.filter(DagRun.dag_id.in_(filter_dag_ids))
running_dag_run_query_result = running_dag_run_query_result.subquery('running_dag_run')
# Select all task_instances from active dag_runs.
running_task_instance_query_result = session.query(
TaskInstance.dag_id.label('dag_id'), TaskInstance.state.label('state')
).join(
running_dag_run_query_result,
and_(
running_dag_run_query_result.c.dag_id == TaskInstance.dag_id,
running_dag_run_query_result.c.run_id == TaskInstance.run_id,
),
)
if conf.getboolean('webserver', 'SHOW_RECENT_STATS_FOR_COMPLETED_RUNS', fallback=True):
last_dag_run = (
session.query(DagRun.dag_id, sqla.func.max(DagRun.execution_date).label('execution_date'))
.join(DagModel, DagModel.dag_id == DagRun.dag_id)
.filter(DagRun.state != State.RUNNING, DagModel.is_active)
.group_by(DagRun.dag_id)
)
last_dag_run = last_dag_run.filter(DagRun.dag_id.in_(filter_dag_ids))
last_dag_run = last_dag_run.subquery('last_dag_run')
# Select all task_instances from active dag_runs.
# If no dag_run is active, return task instances from most recent dag_run.
last_task_instance_query_result = (
session.query(TaskInstance.dag_id.label('dag_id'), TaskInstance.state.label('state'))
.join(TaskInstance.dag_run)
.join(
last_dag_run,
and_(
last_dag_run.c.dag_id == TaskInstance.dag_id,
last_dag_run.c.execution_date == DagRun.execution_date,
),
)
)
final_task_instance_query_result = union_all(
last_task_instance_query_result, running_task_instance_query_result
).alias('final_ti')
else:
final_task_instance_query_result = running_task_instance_query_result.subquery('final_ti')
qry = session.query(
final_task_instance_query_result.c.dag_id,
final_task_instance_query_result.c.state,
sqla.func.count(),
).group_by(final_task_instance_query_result.c.dag_id, final_task_instance_query_result.c.state)
data = {}
for dag_id, state, count in qry:
if dag_id not in data:
data[dag_id] = {}
data[dag_id][state] = count
payload = {}
for dag_id in filter_dag_ids:
payload[dag_id] = []
for state in State.task_states:
count = data.get(dag_id, {}).get(state, 0)
payload[dag_id].append({'state': state, 'count': count})
return wwwutils.json_response(payload)
@expose('/last_dagruns', methods=['POST'])
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
]
)
@provide_session
def last_dagruns(self, session=None):
"""Last DAG runs"""
allowed_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user)
# Filter by post parameters
selected_dag_ids = {unquote(dag_id) for dag_id in request.form.getlist('dag_ids') if dag_id}
if selected_dag_ids:
filter_dag_ids = selected_dag_ids.intersection(allowed_dag_ids)
else:
filter_dag_ids = allowed_dag_ids
if not filter_dag_ids:
return wwwutils.json_response({})
last_runs_subquery = (
session.query(
DagRun.dag_id,
sqla.func.max(DagRun.execution_date).label("max_execution_date"),
)
.group_by(DagRun.dag_id)
.filter(DagRun.dag_id.in_(filter_dag_ids)) # Only include accessible/selected DAGs.
.subquery("last_runs")
)
query = session.query(
DagRun.dag_id,
DagRun.start_date,
DagRun.end_date,
DagRun.state,
DagRun.execution_date,
DagRun.data_interval_start,
DagRun.data_interval_end,
).join(
last_runs_subquery,
and_(
last_runs_subquery.c.dag_id == DagRun.dag_id,
last_runs_subquery.c.max_execution_date == DagRun.execution_date,
),
)
def _datetime_to_string(value: Optional[DateTime]) -> Optional[str]:
if value is None:
return None
return value.isoformat()
resp = {
r.dag_id.replace('.', '__dot__'): {
"dag_id": r.dag_id,
"state": r.state,
"execution_date": _datetime_to_string(r.execution_date),
"start_date": _datetime_to_string(r.start_date),
"end_date": _datetime_to_string(r.end_date),
"data_interval_start": _datetime_to_string(r.data_interval_start),
"data_interval_end": _datetime_to_string(r.data_interval_end),
}
for r in query
}
return wwwutils.json_response(resp)
@expose('/code')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_CODE),
]
)
@provide_session
def code(self, session=None):
"""Dag Code."""
all_errors = ""
dag_orm = None
dag_id = None
try:
dag_id = request.args.get('dag_id')
dag_orm = DagModel.get_dagmodel(dag_id, session=session)
code = DagCode.get_code_by_fileloc(dag_orm.fileloc)
html_code = Markup(highlight(code, lexers.PythonLexer(), HtmlFormatter(linenos=True)))
except Exception as e:
all_errors += (
"Exception encountered during "
+ f"dag_id retrieval/dag retrieval fallback/code highlighting:\n\n{e}\n"
)
html_code = Markup('<p>Failed to load DAG file Code.</p><p>Details: {}</p>').format(
escape(all_errors)
)
wwwutils.check_import_errors(dag_orm.fileloc, session)
return self.render_template(
'airflow/dag_code.html',
html_code=html_code,
dag=dag_orm,
dag_model=dag_orm,
title=dag_id,
root=request.args.get('root'),
wrapped=conf.getboolean('webserver', 'default_wrap'),
)
@expose('/dag_details')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
]
)
@provide_session
def dag_details(self, session=None):
"""Get Dag details."""
dag_id = request.args.get('dag_id')
dag = current_app.dag_bag.get_dag(dag_id)
dag_model = DagModel.get_dagmodel(dag_id)
title = "DAG Details"
root = request.args.get('root', '')
wwwutils.check_import_errors(dag.fileloc, session)
states = (
session.query(TaskInstance.state, sqla.func.count(TaskInstance.dag_id))
.filter(TaskInstance.dag_id == dag_id)
.group_by(TaskInstance.state)
.all()
)
active_runs = models.DagRun.find(dag_id=dag_id, state=State.RUNNING, external_trigger=False)
tags = session.query(models.DagTag).filter(models.DagTag.dag_id == dag_id).all()
return self.render_template(
'airflow/dag_details.html',
dag=dag,
title=title,
root=root,
states=states,
State=State,
active_runs=active_runs,
tags=tags,
dag_model=dag_model,
)
@expose('/rendered-templates')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
@provide_session
def rendered_templates(self, session):
"""Get rendered Dag."""
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = timezone.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
root = request.args.get('root', '')
logging.info("Retrieving rendered templates.")
dag = current_app.dag_bag.get_dag(dag_id)
dag_run = dag.get_dagrun(execution_date=dttm, session=session)
task = copy.copy(dag.get_task(task_id))
if dag_run is None:
# No DAG run matching given logical date. This usually means this
# DAG has never been run. Task instance rendering does not really
# make sense in this situation, but "works" prior to AIP-39. This
# "fakes" a temporary DagRun-TaskInstance association (not saved to
# database) for presentation only.
ti = TaskInstance(task)
ti.dag_run = DagRun(dag_id=dag_id, execution_date=dttm)
else:
ti = dag_run.get_task_instance(task_id=task.task_id, session=session)
ti.refresh_from_task(task)
try:
ti.get_rendered_template_fields(session=session)
except AirflowException as e:
msg = "Error rendering template: " + escape(e)
if e.__cause__:
msg += Markup("<br><br>OriginalError: ") + escape(e.__cause__)
flash(msg, "error")
except Exception as e:
flash("Error rendering template: " + str(e), "error")
title = "Rendered Template"
html_dict = {}
renderers = wwwutils.get_attr_renderer()
for template_field in task.template_fields:
content = getattr(task, template_field)
renderer = task.template_fields_renderers.get(template_field, template_field)
if renderer in renderers:
if isinstance(content, (dict, list)):
json_content = json.dumps(content, sort_keys=True, indent=4)
html_dict[template_field] = renderers[renderer](json_content)
else:
html_dict[template_field] = renderers[renderer](content)
else:
html_dict[template_field] = Markup("<pre><code>{}</pre></code>").format(pformat(content))
if isinstance(content, dict):
if template_field == 'op_kwargs':
for key, value in content.items():
renderer = task.template_fields_renderers.get(key, key)
if renderer in renderers:
html_dict['.'.join([template_field, key])] = renderers[renderer](value)
else:
html_dict['.'.join([template_field, key])] = Markup(
"<pre><code>{}</pre></code>"
).format(pformat(value))
else:
for dict_keys in get_key_paths(content):
template_path = '.'.join((template_field, dict_keys))
renderer = task.template_fields_renderers.get(template_path, template_path)
if renderer in renderers:
content_value = get_value_from_path(dict_keys, content)
html_dict[template_path] = renderers[renderer](content_value)
return self.render_template(
'airflow/ti_code.html',
html_dict=html_dict,
dag=dag,
task_id=task_id,
execution_date=execution_date,
form=form,
root=root,
title=title,
)
@expose('/rendered-k8s')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
def rendered_k8s(self):
"""Get rendered k8s yaml."""
if not settings.IS_K8S_OR_K8SCELERY_EXECUTOR:
abort(404)
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = timezone.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
root = request.args.get('root', '')
logging.info("Retrieving rendered templates.")
dag = current_app.dag_bag.get_dag(dag_id)
task = dag.get_task(task_id)
dag_run = dag.get_dagrun(execution_date=dttm)
ti = dag_run.get_task_instance(task_id=task.task_id)
pod_spec = None
try:
pod_spec = ti.get_rendered_k8s_spec()
except AirflowException as e:
msg = "Error rendering Kubernetes POD Spec: " + escape(e)
if e.__cause__:
msg += Markup("<br><br>OriginalError: ") + escape(e.__cause__)
flash(msg, "error")
except Exception as e:
flash("Error rendering Kubernetes Pod Spec: " + str(e), "error")
title = "Rendered K8s Pod Spec"
html_dict = {}
renderers = wwwutils.get_attr_renderer()
if pod_spec:
content = yaml.dump(pod_spec)
content = renderers["yaml"](content)
else:
content = Markup("<pre><code>Error rendering Kubernetes POD Spec</pre></code>")
html_dict['k8s'] = content
return self.render_template(
'airflow/ti_code.html',
html_dict=html_dict,
dag=dag,
task_id=task_id,
execution_date=execution_date,
form=form,
root=root,
title=title,
)
@expose('/get_logs_with_metadata')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG),
]
)
@action_logging
@provide_session
def get_logs_with_metadata(self, session=None):
"""Retrieve logs including metadata."""
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
try_number = request.args.get('try_number', type=int)
metadata = request.args.get('metadata')
metadata = json.loads(metadata)
response_format = request.args.get('format', 'json')
# metadata may be null
if not metadata:
metadata = {}
# Convert string datetime into actual datetime
try:
execution_date = timezone.parse(execution_date)
except ValueError:
error_message = (
'Given execution date, {}, could not be identified '
'as a date. Example date format: 2015-11-16T14:34:15+00:00'.format(execution_date)
)
response = jsonify({'error': error_message})
response.status_code = 400
return response
task_log_reader = TaskLogReader()
if not task_log_reader.supports_read:
return jsonify(
message="Task log handler does not support read logs.",
error=True,
metadata={"end_of_log": True},
)
ti = (
session.query(models.TaskInstance)
.filter(
models.TaskInstance.dag_id == dag_id,
models.TaskInstance.task_id == task_id,
models.TaskInstance.execution_date == execution_date,
)
.first()
)
if ti is None:
return jsonify(
message="*** Task instance did not exist in the DB\n",
error=True,
metadata={"end_of_log": True},
)
try:
dag = current_app.dag_bag.get_dag(dag_id)
if dag:
ti.task = dag.get_task(ti.task_id)
if response_format == 'json':
logs, metadata = task_log_reader.read_log_chunks(ti, try_number, metadata)
message = logs[0] if try_number is not None else logs
return jsonify(message=message, metadata=metadata)
metadata['download_logs'] = True
attachment_filename = task_log_reader.render_log_filename(ti, try_number)
log_stream = task_log_reader.read_log_stream(ti, try_number, metadata)
return Response(
response=log_stream,
mimetype="text/plain",
headers={"Content-Disposition": f"attachment; filename={attachment_filename}"},
)
except AttributeError as e:
error_message = [f"Task log handler does not support read logs.\n{str(e)}\n"]
metadata['end_of_log'] = True
return jsonify(message=error_message, error=True, metadata=metadata)
@expose('/log')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG),
]
)
@action_logging
@provide_session
def log(self, session=None):
"""Retrieve log."""
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = timezone.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
dag_model = DagModel.get_dagmodel(dag_id)
ti = (
session.query(models.TaskInstance)
.filter(
models.TaskInstance.dag_id == dag_id,
models.TaskInstance.task_id == task_id,
models.TaskInstance.execution_date == dttm,
)
.first()
)
num_logs = 0
if ti is not None:
num_logs = ti.next_try_number - 1
if ti.state in (State.UP_FOR_RESCHEDULE, State.DEFERRED):
# Tasks in reschedule state decremented the try number
num_logs += 1
logs = [''] * num_logs
root = request.args.get('root', '')
return self.render_template(
'airflow/ti_log.html',
logs=logs,
dag=dag_model,
title="Log by attempts",
dag_id=dag_id,
task_id=task_id,
execution_date=execution_date,
form=form,
root=root,
wrapped=conf.getboolean('webserver', 'default_wrap'),
)
@expose('/redirect_to_external_log')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG),
]
)
@action_logging
@provide_session
def redirect_to_external_log(self, session=None):
"""Redirects to external log."""
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = timezone.parse(execution_date)
try_number = request.args.get('try_number', 1)
ti = (
session.query(models.TaskInstance)
.filter(
models.TaskInstance.dag_id == dag_id,
models.TaskInstance.task_id == task_id,
models.TaskInstance.execution_date == dttm,
)
.first()
)
if not ti:
flash(f"Task [{dag_id}.{task_id}] does not exist", "error")
return redirect(url_for('Airflow.index'))
task_log_reader = TaskLogReader()
if not task_log_reader.supports_external_link:
flash("Task log handler does not support external links", "error")
return redirect(url_for('Airflow.index'))
handler = task_log_reader.log_handler
url = handler.get_external_log_url(ti, try_number)
return redirect(url)
@expose('/task')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
@provide_session
def task(self, session):
"""Retrieve task."""
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = timezone.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
root = request.args.get('root', '')
dag = current_app.dag_bag.get_dag(dag_id)
if not dag or task_id not in dag.task_ids:
flash(f"Task [{dag_id}.{task_id}] doesn't seem to exist at the moment", "error")
return redirect(url_for('Airflow.index'))
task = copy.copy(dag.get_task(task_id))
task.resolve_template_files()
ti: Optional[TaskInstance] = (
session.query(TaskInstance)
.options(
# HACK: Eager-load relationships. This is needed because
# multiple properties mis-use provide_session() that destroys
# the session object ti is bounded to.
joinedload(TaskInstance.queued_by_job, innerjoin=False),
joinedload(TaskInstance.trigger, innerjoin=False),
)
.join(TaskInstance.dag_run)
.filter(
DagRun.execution_date == dttm,
TaskInstance.dag_id == dag_id,
TaskInstance.task_id == task_id,
)
.one_or_none()
)
if ti is None:
ti_attrs: Optional[List[Tuple[str, Any]]] = None
else:
ti.refresh_from_task(task)
all_ti_attrs = ((name, getattr(ti, name)) for name in dir(ti) if not name.startswith("_"))
ti_attrs = sorted((name, attr) for name, attr in all_ti_attrs if not callable(attr))
attr_renderers = wwwutils.get_attr_renderer()
task_attrs = [
(attr_name, attr)
for attr_name, attr in (
(attr_name, getattr(task, attr_name))
for attr_name in dir(task)
if not attr_name.startswith("_") and attr_name not in attr_renderers
)
if not callable(attr)
]
# Color coding the special attributes that are code
special_attrs_rendered = {
attr_name: renderer(getattr(task, attr_name))
for attr_name, renderer in attr_renderers.items()
if hasattr(task, attr_name)
}
no_failed_deps_result = [
(
"Unknown",
"All dependencies are met but the task instance is not running. In most "
"cases this just means that the task will probably be scheduled soon "
"unless:<br>\n- The scheduler is down or under heavy load<br>\n{}\n"
"<br>\nIf this task instance does not start soon please contact your "
"Airflow administrator for assistance.".format(
"- This task instance already ran and had it's state changed manually "
"(e.g. cleared in the UI)<br>"
if ti and ti.state == State.NONE
else ""
),
)
]
# Use the scheduler's context to figure out which dependencies are not met
if ti is None:
failed_dep_reasons: List[Tuple[str, str]] = []
else:
dep_context = DepContext(SCHEDULER_QUEUED_DEPS)
failed_dep_reasons = [
(dep.dep_name, dep.reason) for dep in ti.get_failed_dep_statuses(dep_context=dep_context)
]
title = "Task Instance Details"
return self.render_template(
'airflow/task.html',
task_attrs=task_attrs,
ti_attrs=ti_attrs,
failed_dep_reasons=failed_dep_reasons or no_failed_deps_result,
task_id=task_id,
execution_date=execution_date,
special_attrs_rendered=special_attrs_rendered,
form=form,
root=root,
dag=dag,
title=title,
)
@expose('/xcom')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_XCOM),
]
)
@action_logging
@provide_session
def xcom(self, session=None):
"""Retrieve XCOM."""
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
# Carrying execution_date through, even though it's irrelevant for
# this context
execution_date = request.args.get('execution_date')
dttm = timezone.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
root = request.args.get('root', '')
ti_db = models.TaskInstance
dag = DagModel.get_dagmodel(dag_id)
ti = session.query(ti_db).filter(and_(ti_db.dag_id == dag_id, ti_db.task_id == task_id)).first()
if not ti:
flash(f"Task [{dag_id}.{task_id}] doesn't seem to exist at the moment", "error")
return redirect(url_for('Airflow.index'))
xcomlist = (
session.query(XCom)
.filter(XCom.dag_id == dag_id, XCom.task_id == task_id, XCom.execution_date == dttm)
.all()
)
attributes = []
for xcom in xcomlist:
if not xcom.key.startswith('_'):
attributes.append((xcom.key, xcom.value))
title = "XCom"
return self.render_template(
'airflow/xcom.html',
attributes=attributes,
task_id=task_id,
execution_date=execution_date,
form=form,
root=root,
dag=dag,
title=title,
)
@expose('/run', methods=['POST'])
@auth.has_access(
[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
def run(self):
"""Runs Task Instance."""
dag_id = request.form.get('dag_id')
task_id = request.form.get('task_id')
origin = get_safe_url(request.form.get('origin'))
dag = current_app.dag_bag.get_dag(dag_id)
task = dag.get_task(task_id)
execution_date = request.form.get('execution_date')
execution_date = timezone.parse(execution_date)
ignore_all_deps = request.form.get('ignore_all_deps') == "true"
ignore_task_deps = request.form.get('ignore_task_deps') == "true"
ignore_ti_state = request.form.get('ignore_ti_state') == "true"
executor = ExecutorLoader.get_default_executor()
valid_celery_config = False
valid_kubernetes_config = False
try:
from airflow.executors.celery_executor import CeleryExecutor
valid_celery_config = isinstance(executor, CeleryExecutor)
except ImportError:
pass
try:
from airflow.executors.kubernetes_executor import KubernetesExecutor
valid_kubernetes_config = isinstance(executor, KubernetesExecutor)
except ImportError:
pass
if not valid_celery_config and not valid_kubernetes_config:
flash("Only works with the Celery or Kubernetes executors, sorry", "error")
return redirect(origin)
dag_run = dag.get_dagrun(execution_date=execution_date)
ti = dag_run.get_task_instance(task_id=task.task_id)
ti.refresh_from_task(task)
# Make sure the task instance can be run
dep_context = DepContext(
deps=RUNNING_DEPS,
ignore_all_deps=ignore_all_deps,
ignore_task_deps=ignore_task_deps,
ignore_ti_state=ignore_ti_state,
)
failed_deps = list(ti.get_failed_dep_statuses(dep_context=dep_context))
if failed_deps:
failed_deps_str = ", ".join(f"{dep.dep_name}: {dep.reason}" for dep in failed_deps)
flash(
"Could not queue task instance for execution, dependencies not met: "
"{}".format(failed_deps_str),
"error",
)
return redirect(origin)
executor.job_id = "manual"
executor.start()
executor.queue_task_instance(
ti,
ignore_all_deps=ignore_all_deps,
ignore_task_deps=ignore_task_deps,
ignore_ti_state=ignore_ti_state,
)
executor.heartbeat()
flash(f"Sent {ti} to the message queue, it should start any moment now.")
return redirect(origin)
@expose('/delete', methods=['POST'])
@auth.has_access(
[
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_DAG),
]
)
@action_logging
def delete(self):
"""Deletes DAG."""
from airflow.api.common.experimental import delete_dag
from airflow.exceptions import DagNotFound
dag_id = request.values.get('dag_id')
origin = get_safe_url(request.values.get('origin'))
try:
delete_dag.delete_dag(dag_id)
except DagNotFound:
flash(f"DAG with id {dag_id} not found. Cannot delete", 'error')
return redirect(request.referrer)
except AirflowException:
flash(
f"Cannot delete DAG with id {dag_id} because some task instances of the DAG "
"are still running. Please mark the task instances as "
"failed/succeeded before deleting the DAG",
"error",
)
return redirect(request.referrer)
flash(f"Deleting DAG with id {dag_id}. May take a couple minutes to fully disappear.")
# Upon success return to origin.
return redirect(origin)
@expose('/trigger', methods=['POST', 'GET'])
@auth.has_access(
[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_DAG_RUN),
]
)
@action_logging
@provide_session
def trigger(self, session=None):
"""Triggers DAG Run."""
dag_id = request.values.get('dag_id')
origin = get_safe_url(request.values.get('origin'))
unpause = request.values.get('unpause')
request_conf = request.values.get('conf')
request_execution_date = request.values.get('execution_date', default=timezone.utcnow().isoformat())
is_dag_run_conf_overrides_params = conf.getboolean('core', 'dag_run_conf_overrides_params')
dag = current_app.dag_bag.get_dag(dag_id)
if request.method == 'GET':
# Populate conf textarea with conf requests parameter, or dag.params
default_conf = ''
doc_md = wwwutils.wrapped_markdown(getattr(dag, 'doc_md', None))
form = DateTimeForm(data={'execution_date': request_execution_date})
if request_conf:
default_conf = request_conf
else:
try:
default_conf = json.dumps(
{str(k): v.resolve(suppress_exception=True) for k, v in dag.params.items()}, indent=4
)
except TypeError:
flash("Could not pre-populate conf field due to non-JSON-serializable data-types")
return self.render_template(
'airflow/trigger.html',
dag_id=dag_id,
origin=origin,
conf=default_conf,
doc_md=doc_md,
form=form,
is_dag_run_conf_overrides_params=is_dag_run_conf_overrides_params,
)
dag_orm = session.query(models.DagModel).filter(models.DagModel.dag_id == dag_id).first()
if not dag_orm:
flash(f"Cannot find dag {dag_id}")
return redirect(origin)
try:
execution_date = timezone.parse(request_execution_date)
except ParserError:
flash("Invalid execution date", "error")
form = DateTimeForm(data={'execution_date': timezone.utcnow().isoformat()})
return self.render_template(
'airflow/trigger.html',
dag_id=dag_id,
origin=origin,
conf=request_conf,
form=form,
is_dag_run_conf_overrides_params=is_dag_run_conf_overrides_params,
)
dr = DagRun.find(dag_id=dag_id, execution_date=execution_date, run_type=DagRunType.MANUAL)
if dr:
flash(f"This run_id {dr.run_id} already exists")
return redirect(origin)
run_conf = {}
if request_conf:
try:
run_conf = json.loads(request_conf)
if not isinstance(run_conf, dict):
flash("Invalid JSON configuration, must be a dict", "error")
form = DateTimeForm(data={'execution_date': execution_date})
return self.render_template(
'airflow/trigger.html',
dag_id=dag_id,
origin=origin,
conf=request_conf,
form=form,
is_dag_run_conf_overrides_params=is_dag_run_conf_overrides_params,
)
except json.decoder.JSONDecodeError:
flash("Invalid JSON configuration, not parseable", "error")
form = DateTimeForm(data={'execution_date': execution_date})
return self.render_template(
'airflow/trigger.html',
dag_id=dag_id,
origin=origin,
conf=request_conf,
form=form,
is_dag_run_conf_overrides_params=is_dag_run_conf_overrides_params,
)
if unpause and dag.is_paused:
models.DagModel.get_dagmodel(dag_id).set_is_paused(is_paused=False)
try:
dag.create_dagrun(
run_type=DagRunType.MANUAL,
execution_date=execution_date,
data_interval=dag.timetable.infer_manual_data_interval(run_after=execution_date),
state=State.QUEUED,
conf=run_conf,
external_trigger=True,
dag_hash=current_app.dag_bag.dags_hash.get(dag_id),
)
except ValueError as ve:
flash(f"{ve}", "error")
form = DateTimeForm(data={'execution_date': execution_date})
return self.render_template(
'airflow/trigger.html',
dag_id=dag_id,
origin=origin,
conf=request_conf,
form=form,
is_dag_run_conf_overrides_params=is_dag_run_conf_overrides_params,
)
flash(f"Triggered {dag_id}, it should start any moment now.")
return redirect(origin)
def _clear_dag_tis(
self, dag, start_date, end_date, origin, recursive=False, confirmed=False, only_failed=False
):
if confirmed:
count = dag.clear(
start_date=start_date,
end_date=end_date,
include_subdags=recursive,
include_parentdag=recursive,
only_failed=only_failed,
)
flash(f"{count} task instances have been cleared")
return redirect(origin)
try:
tis = dag.clear(
start_date=start_date,
end_date=end_date,
include_subdags=recursive,
include_parentdag=recursive,
only_failed=only_failed,
dry_run=True,
)
except AirflowException as ex:
flash(str(ex), 'error')
return redirect(origin)
if not tis:
flash("No task instances to clear", 'error')
response = redirect(origin)
else:
details = "\n".join(str(t) for t in tis)
response = self.render_template(
'airflow/confirm.html',
endpoint=None,
message="Here's the list of task instances you are about to clear:",
details=details,
)
return response
@expose('/clear', methods=['POST'])
@auth.has_access(
[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
def clear(self):
"""Clears the Dag."""
dag_id = request.form.get('dag_id')
task_id = request.form.get('task_id')
origin = get_safe_url(request.form.get('origin'))
dag = current_app.dag_bag.get_dag(dag_id)
execution_date = request.form.get('execution_date')
execution_date = timezone.parse(execution_date)
confirmed = request.form.get('confirmed') == "true"
upstream = request.form.get('upstream') == "true"
downstream = request.form.get('downstream') == "true"
future = request.form.get('future') == "true"
past = request.form.get('past') == "true"
recursive = request.form.get('recursive') == "true"
only_failed = request.form.get('only_failed') == "true"
dag = dag.partial_subset(
task_ids_or_regex=fr"^{task_id}$",
include_downstream=downstream,
include_upstream=upstream,
)
end_date = execution_date if not future else None
start_date = execution_date if not past else None
return self._clear_dag_tis(
dag,
start_date,
end_date,
origin,
recursive=recursive,
confirmed=confirmed,
only_failed=only_failed,
)
@expose('/dagrun_clear', methods=['POST'])
@auth.has_access(
[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
def dagrun_clear(self):
"""Clears the DagRun"""
dag_id = request.form.get('dag_id')
origin = get_safe_url(request.form.get('origin'))
execution_date = request.form.get('execution_date')
confirmed = request.form.get('confirmed') == "true"
dag = current_app.dag_bag.get_dag(dag_id)
execution_date = timezone.parse(execution_date)
start_date = execution_date
end_date = execution_date
return self._clear_dag_tis(dag, start_date, end_date, origin, recursive=True, confirmed=confirmed)
@expose('/blocked', methods=['POST'])
@auth.has_access(
[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
]
)
@provide_session
def blocked(self, session=None):
"""Mark Dag Blocked."""
allowed_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user)
# Filter by post parameters
selected_dag_ids = {unquote(dag_id) for dag_id in request.form.getlist('dag_ids') if dag_id}
if selected_dag_ids:
filter_dag_ids = selected_dag_ids.intersection(allowed_dag_ids)
else:
filter_dag_ids = allowed_dag_ids
if not filter_dag_ids:
return wwwutils.json_response([])
dags = (
session.query(DagRun.dag_id, sqla.func.count(DagRun.id))
.filter(DagRun.state == State.RUNNING)
.filter(DagRun.dag_id.in_(filter_dag_ids))
.group_by(DagRun.dag_id)
)
payload = []
for dag_id, active_dag_runs in dags:
max_active_runs = 0
dag = current_app.dag_bag.get_dag(dag_id)
if dag:
# TODO: Make max_active_runs a column so we can query for it directly
max_active_runs = dag.max_active_runs
payload.append(
{
'dag_id': dag_id,
'active_dag_run': active_dag_runs,
'max_active_runs': max_active_runs,
}
)
return wwwutils.json_response(payload)
def _mark_dagrun_state_as_failed(self, dag_id, execution_date, confirmed, origin):
if not execution_date:
flash('Invalid execution date', 'error')
return redirect(origin)
execution_date = timezone.parse(execution_date)
dag = current_app.dag_bag.get_dag(dag_id)
if not dag:
flash(f'Cannot find DAG: {dag_id}', 'error')
return redirect(origin)
new_dag_state = set_dag_run_state_to_failed(dag, execution_date, commit=confirmed)
if confirmed:
flash(f'Marked failed on {len(new_dag_state)} task instances')
return redirect(origin)
else:
details = '\n'.join(str(t) for t in new_dag_state)
response = self.render_template(
'airflow/confirm.html',
message="Here's the list of task instances you are about to mark as failed",
details=details,
)
return response
def _mark_dagrun_state_as_success(self, dag_id, execution_date, confirmed, origin):
if not execution_date:
flash('Invalid execution date', 'error')
return redirect(origin)
execution_date = timezone.parse(execution_date)
dag = current_app.dag_bag.get_dag(dag_id)
if not dag:
flash(f'Cannot find DAG: {dag_id}', 'error')
return redirect(origin)
new_dag_state = set_dag_run_state_to_success(dag, execution_date, commit=confirmed)
if confirmed:
flash(f'Marked success on {len(new_dag_state)} task instances')
return redirect(origin)
else:
details = '\n'.join(str(t) for t in new_dag_state)
response = self.render_template(
'airflow/confirm.html',
message="Here's the list of task instances you are about to mark as success",
details=details,
)
return response
@expose('/dagrun_failed', methods=['POST'])
@auth.has_access(
[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG_RUN),
]
)
@action_logging
def dagrun_failed(self):
"""Mark DagRun failed."""
dag_id = request.form.get('dag_id')
execution_date = request.form.get('execution_date')
confirmed = request.form.get('confirmed') == 'true'
origin = get_safe_url(request.form.get('origin'))
return self._mark_dagrun_state_as_failed(dag_id, execution_date, confirmed, origin)
@expose('/dagrun_success', methods=['POST'])
@auth.has_access(
[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG_RUN),
]
)
@action_logging
def dagrun_success(self):
"""Mark DagRun success"""
dag_id = request.form.get('dag_id')
execution_date = request.form.get('execution_date')
confirmed = request.form.get('confirmed') == 'true'
origin = get_safe_url(request.form.get('origin'))
return self._mark_dagrun_state_as_success(dag_id, execution_date, confirmed, origin)
def _mark_task_instance_state(
self,
dag_id,
task_id,
origin,
execution_date,
upstream,
downstream,
future,
past,
state,
):
dag = current_app.dag_bag.get_dag(dag_id)
latest_execution_date = dag.get_latest_execution_date()
if not latest_execution_date:
flash(f"Cannot mark tasks as {state}, seem that dag {dag_id} has never run", "error")
return redirect(origin)
execution_date = timezone.parse(execution_date)
altered = dag.set_task_instance_state(
task_id, execution_date, state, upstream=upstream, downstream=downstream, future=future, past=past
)
flash(f"Marked {state} on {len(altered)} task instances")
return redirect(origin)
@expose('/confirm', methods=['GET'])
@auth.has_access(
[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
def confirm(self):
"""Show confirmation page for marking tasks as success or failed."""
args = request.args
dag_id = args.get('dag_id')
task_id = args.get('task_id')
execution_date = args.get('execution_date')
state = args.get('state')
upstream = to_boolean(args.get('upstream'))
downstream = to_boolean(args.get('downstream'))
future = to_boolean(args.get('future'))
past = to_boolean(args.get('past'))
dag = current_app.dag_bag.get_dag(dag_id)
if not dag:
flash(f'DAG {dag_id} not found', "error")
return redirect(request.referrer or url_for('Airflow.index'))
try:
task = dag.get_task(task_id)
except airflow.exceptions.TaskNotFound:
flash(f"Task {task_id} not found", "error")
return redirect(request.referrer or url_for('Airflow.index'))
task.dag = dag
if state not in (
'success',
'failed',
):
flash(f"Invalid state {state}, must be either 'success' or 'failed'", "error")
return redirect(request.referrer or url_for('Airflow.index'))
latest_execution_date = dag.get_latest_execution_date()
if not latest_execution_date:
flash(f"Cannot mark tasks as {state}, seem that dag {dag_id} has never run", "error")
return redirect(request.referrer or url_for('Airflow.index'))
execution_date = timezone.parse(execution_date)
from airflow.api.common.experimental.mark_tasks import set_state
to_be_altered = set_state(
tasks=[task],
execution_date=execution_date,
upstream=upstream,
downstream=downstream,
future=future,
past=past,
state=state,
commit=False,
)
details = "\n".join(str(t) for t in to_be_altered)
response = self.render_template(
"airflow/confirm.html",
endpoint=url_for(f'Airflow.{state}'),
message=f"Here's the list of task instances you are about to mark as {state}:",
details=details,
)
return response
@expose('/failed', methods=['POST'])
@auth.has_access(
[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
def failed(self):
"""Mark task as failed."""
args = request.form
dag_id = args.get('dag_id')
task_id = args.get('task_id')
origin = get_safe_url(args.get('origin'))
execution_date = args.get('execution_date')
upstream = to_boolean(args.get('upstream'))
downstream = to_boolean(args.get('downstream'))
future = to_boolean(args.get('future'))
past = to_boolean(args.get('past'))
return self._mark_task_instance_state(
dag_id,
task_id,
origin,
execution_date,
upstream,
downstream,
future,
past,
State.FAILED,
)
@expose('/success', methods=['POST'])
@auth.has_access(
[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
def success(self):
"""Mark task as success."""
args = request.form
dag_id = args.get('dag_id')
task_id = args.get('task_id')
origin = get_safe_url(args.get('origin'))
execution_date = args.get('execution_date')
upstream = to_boolean(args.get('upstream'))
downstream = to_boolean(args.get('downstream'))
future = to_boolean(args.get('future'))
past = to_boolean(args.get('past'))
return self._mark_task_instance_state(
dag_id,
task_id,
origin,
execution_date,
upstream,
downstream,
future,
past,
State.SUCCESS,
)
def _get_tree_data(
self,
dag_runs: Iterable[DagRun],
dag: DAG,
base_date: DateTime,
session: settings.Session,
):
"""Returns formatted dag_runs for Tree view"""
dates = sorted(dag_runs.keys())
min_date = min(dag_runs, default=None)
task_instances = {
(ti.task_id, ti.execution_date): ti
for ti in dag.get_task_instances(start_date=min_date, end_date=base_date, session=session)
}
expanded = set()
# The default recursion traces every path so that tree view has full
# expand/collapse functionality. After 5,000 nodes we stop and fall
# back on a quick DFS search for performance. See PR #320.
node_count = 0
node_limit = 5000 / max(1, len(dag.leaves))
def encode_ti(task_instance: Optional[models.TaskInstance]) -> Optional[List]:
if not task_instance:
return None
# NOTE: order of entry is important here because client JS relies on it for
# tree node reconstruction. Remember to change JS code in tree.html
# whenever order is altered.
task_instance_data = [
task_instance.state,
task_instance.try_number,
None, # start_ts
None, # duration
]
if task_instance.start_date:
# round to seconds to reduce payload size
task_instance_data[2] = int(task_instance.start_date.timestamp())
if task_instance.duration is not None:
task_instance_data[3] = truncate_task_duration(task_instance.duration)
return task_instance_data
def recurse_nodes(task, visited):
nonlocal node_count
node_count += 1
visited.add(task)
task_id = task.task_id
node = {
'name': task.task_id,
'instances': [encode_ti(task_instances.get((task_id, d))) for d in dates],
'num_dep': len(task.downstream_list),
'operator': task.task_type,
'retries': task.retries,
'owner': task.owner,
'ui_color': task.ui_color,
}
if task.downstream_list:
children = [
recurse_nodes(t, visited)
for t in task.downstream_list
if node_count < node_limit or t not in visited
]
# D3 tree uses children vs _children to define what is
# expanded or not. The following block makes it such that
# repeated nodes are collapsed by default.
if task.task_id not in expanded:
children_key = 'children'
expanded.add(task.task_id)
else:
children_key = "_children"
node[children_key] = children
if task.depends_on_past:
node['depends_on_past'] = task.depends_on_past
if task.start_date:
# round to seconds to reduce payload size
node['start_ts'] = int(task.start_date.timestamp())
if task.end_date:
# round to seconds to reduce payload size
node['end_ts'] = int(task.end_date.timestamp())
if task.extra_links:
node['extra_links'] = task.extra_links
return node
return {
'name': '[DAG]',
'children': [recurse_nodes(t, set()) for t in dag.roots],
'instances': [dag_runs.get(d) or {'execution_date': d.isoformat()} for d in dates],
}
@expose('/tree')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG),
]
)
@gzipped
@action_logging
@provide_session
def tree(self, session=None):
"""Get Dag as tree."""
dag_id = request.args.get('dag_id')
dag = current_app.dag_bag.get_dag(dag_id)
dag_model = DagModel.get_dagmodel(dag_id)
if not dag:
flash(f'DAG "{dag_id}" seems to be missing from DagBag.', "error")
return redirect(url_for('Airflow.index'))
wwwutils.check_import_errors(dag.fileloc, session)
root = request.args.get('root')
if root:
dag = dag.partial_subset(task_ids_or_regex=root, include_downstream=False, include_upstream=True)
num_runs = request.args.get('num_runs', type=int)
if num_runs is None:
num_runs = conf.getint('webserver', 'default_dag_run_display_number')
try:
base_date = timezone.parse(request.args["base_date"])
except (KeyError, ValueError):
base_date = dag.get_latest_execution_date() or timezone.utcnow()
dag_runs = (
session.query(DagRun)
.filter(DagRun.dag_id == dag.dag_id, DagRun.execution_date <= base_date)
.order_by(DagRun.execution_date.desc())
.limit(num_runs)
.all()
)
dag_runs = {dr.execution_date: alchemy_to_dict(dr) for dr in dag_runs}
max_date = max(dag_runs.keys(), default=None)
form = DateTimeWithNumRunsForm(
data={
'base_date': max_date or timezone.utcnow(),
'num_runs': num_runs,
}
)
doc_md = wwwutils.wrapped_markdown(getattr(dag, 'doc_md', None))
task_log_reader = TaskLogReader()
if task_log_reader.supports_external_link:
external_log_name = task_log_reader.log_handler.log_name
else:
external_log_name = None
data = self._get_tree_data(dag_runs, dag, base_date, session=session)
# avoid spaces to reduce payload size
data = htmlsafe_json_dumps(data, separators=(',', ':'))
return self.render_template(
'airflow/tree.html',
operators=sorted({op.task_type: op for op in dag.tasks}.values(), key=lambda x: x.task_type),
root=root,
form=form,
dag=dag,
doc_md=doc_md,
data=data,
num_runs=num_runs,
show_external_log_redirect=task_log_reader.supports_external_link,
external_log_name=external_log_name,
dag_model=dag_model,
auto_refresh_interval=conf.getint('webserver', 'auto_refresh_interval'),
)
@expose('/calendar')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
@gzipped
@action_logging
@provide_session
def calendar(self, session=None):
"""Get DAG runs as calendar"""
def _convert_to_date(session, column):
"""Convert column to date."""
if session.bind.dialect.name == 'mssql':
return column.cast(Date)
else:
return func.date(column)
dag_id = request.args.get('dag_id')
dag = current_app.dag_bag.get_dag(dag_id)
dag_model = DagModel.get_dagmodel(dag_id)
if not dag:
flash(f'DAG "{dag_id}" seems to be missing from DagBag.', "error")
return redirect(url_for('Airflow.index'))
wwwutils.check_import_errors(dag.fileloc, session)
root = request.args.get('root')
if root:
dag = dag.partial_subset(task_ids_or_regex=root, include_downstream=False, include_upstream=True)
dag_states = (
session.query(
(_convert_to_date(session, DagRun.execution_date)).label('date'),
DagRun.state,
func.count('*').label('count'),
)
.filter(DagRun.dag_id == dag.dag_id)
.group_by(_convert_to_date(session, DagRun.execution_date), DagRun.state)
.order_by(_convert_to_date(session, DagRun.execution_date).asc())
.all()
)
dag_states = [
{
# DATE() in SQLite and MySQL behave differently:
# SQLite returns a string, MySQL returns a date.
'date': dr.date if isinstance(dr.date, str) else dr.date.isoformat(),
'state': dr.state,
'count': dr.count,
}
for dr in dag_states
]
data = {
'dag_states': dag_states,
'start_date': (dag.start_date or DateTime.utcnow()).date().isoformat(),
'end_date': (dag.end_date or DateTime.utcnow()).date().isoformat(),
}
doc_md = wwwutils.wrapped_markdown(getattr(dag, 'doc_md', None))
# avoid spaces to reduce payload size
data = htmlsafe_json_dumps(data, separators=(',', ':'))
return self.render_template(
'airflow/calendar.html',
dag=dag,
doc_md=doc_md,
data=data,
root=root,
dag_model=dag_model,
)
@expose('/graph')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG),
]
)
@gzipped
@action_logging
@provide_session
def graph(self, session=None):
"""Get DAG as Graph."""
dag_id = request.args.get('dag_id')
dag = current_app.dag_bag.get_dag(dag_id)
dag_model = DagModel.get_dagmodel(dag_id)
if not dag:
flash(f'DAG "{dag_id}" seems to be missing.', "error")
return redirect(url_for('Airflow.index'))
wwwutils.check_import_errors(dag.fileloc, session)
root = request.args.get('root')
if root:
dag = dag.partial_subset(task_ids_or_regex=root, include_upstream=True, include_downstream=False)
arrange = request.args.get('arrange', dag.orientation)
nodes = task_group_to_dict(dag.task_group)
edges = dag_edges(dag)
dt_nr_dr_data = get_date_time_num_runs_dag_runs_form_data(request, session, dag)
dt_nr_dr_data['arrange'] = arrange
dttm = dt_nr_dr_data['dttm']
class GraphForm(DateTimeWithNumRunsWithDagRunsForm):
"""Graph Form class."""
arrange = SelectField(
"Layout",
choices=(
('LR', "Left > Right"),
('RL', "Right > Left"),
('TB', "Top > Bottom"),
('BT', "Bottom > Top"),
),
)
form = GraphForm(data=dt_nr_dr_data)
form.execution_date.choices = dt_nr_dr_data['dr_choices']
task_instances = {ti.task_id: alchemy_to_dict(ti) for ti in dag.get_task_instances(dttm, dttm)}
tasks = {
t.task_id: {
'dag_id': t.dag_id,
'task_type': t.task_type,
'extra_links': t.extra_links,
}
for t in dag.tasks
}
if not tasks:
flash("No tasks found", "error")
session.commit()
doc_md = wwwutils.wrapped_markdown(getattr(dag, 'doc_md', None))
task_log_reader = TaskLogReader()
if task_log_reader.supports_external_link:
external_log_name = task_log_reader.log_handler.log_name
else:
external_log_name = None
return self.render_template(
'airflow/graph.html',
dag=dag,
form=form,
width=request.args.get('width', "100%"),
height=request.args.get('height', "800"),
execution_date=dttm.isoformat(),
state_token=wwwutils.state_token(dt_nr_dr_data['dr_state']),
doc_md=doc_md,
arrange=arrange,
operators=sorted({op.task_type: op for op in dag.tasks}.values(), key=lambda x: x.task_type),
root=root or '',
task_instances=task_instances,
tasks=tasks,
nodes=nodes,
edges=edges,
show_external_log_redirect=task_log_reader.supports_external_link,
external_log_name=external_log_name,
dag_run_state=dt_nr_dr_data['dr_state'],
dag_model=dag_model,
auto_refresh_interval=conf.getint('webserver', 'auto_refresh_interval'),
)
@expose('/duration')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
@provide_session
def duration(self, session=None):
"""Get Dag as duration graph."""
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
dag_model = DagModel.get_dagmodel(dag_id)
dag: Optional[DAG] = current_app.dag_bag.get_dag(dag_id)
if dag is None:
flash(f'DAG "{dag_id}" seems to be missing.', "error")
return redirect(url_for('Airflow.index'))
wwwutils.check_import_errors(dag.fileloc, session)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs', default=default_dag_run, type=int)
if base_date:
base_date = timezone.parse(base_date)
else:
base_date = dag.get_latest_execution_date() or timezone.utcnow()
root = request.args.get('root')
if root:
dag = dag.partial_subset(task_ids_or_regex=root, include_upstream=True, include_downstream=False)
chart_height = wwwutils.get_chart_height(dag)
chart = nvd3.lineChart(
name="lineChart", x_is_date=True, height=chart_height, chart_attr=self.line_chart_attr
)
cum_chart = nvd3.lineChart(
name="cumLineChart", x_is_date=True, height=chart_height, chart_attr=self.line_chart_attr
)
y_points = defaultdict(list)
x_points = defaultdict(list)
cumulative_y = defaultdict(list)
task_instances = dag.get_task_instances_before(base_date, num_runs, session=session)
if task_instances:
min_date = task_instances[0].execution_date
else:
min_date = timezone.utc_epoch()
ti_fails = (
session.query(TaskFail)
.filter(
TaskFail.dag_id == dag.dag_id,
TaskFail.execution_date >= min_date,
TaskFail.execution_date <= base_date,
TaskFail.task_id.in_([t.task_id for t in dag.tasks]),
)
.all()
)
fails_totals = defaultdict(int)
for failed_task_instance in ti_fails:
dict_key = (
failed_task_instance.dag_id,
failed_task_instance.task_id,
failed_task_instance.execution_date,
)
if failed_task_instance.duration:
fails_totals[dict_key] += failed_task_instance.duration
for task_instance in task_instances:
if task_instance.duration:
date_time = wwwutils.epoch(task_instance.execution_date)
x_points[task_instance.task_id].append(date_time)
y_points[task_instance.task_id].append(float(task_instance.duration))
fails_dict_key = (task_instance.dag_id, task_instance.task_id, task_instance.execution_date)
fails_total = fails_totals[fails_dict_key]
cumulative_y[task_instance.task_id].append(float(task_instance.duration + fails_total))
# determine the most relevant time unit for the set of task instance
# durations for the DAG
y_unit = infer_time_unit([d for t in y_points.values() for d in t])
cum_y_unit = infer_time_unit([d for t in cumulative_y.values() for d in t])
# update the y Axis on both charts to have the correct time units
chart.create_y_axis('yAxis', format='.02f', custom_format=False, label=f'Duration ({y_unit})')
chart.axislist['yAxis']['axisLabelDistance'] = '-15'
cum_chart.create_y_axis('yAxis', format='.02f', custom_format=False, label=f'Duration ({cum_y_unit})')
cum_chart.axislist['yAxis']['axisLabelDistance'] = '-15'
for task_id in x_points:
chart.add_serie(
name=task_id,
x=x_points[task_id],
y=scale_time_units(y_points[task_id], y_unit),
)
cum_chart.add_serie(
name=task_id,
x=x_points[task_id],
y=scale_time_units(cumulative_y[task_id], cum_y_unit),
)
dates = sorted({ti.execution_date for ti in task_instances})
max_date = max(ti.execution_date for ti in task_instances) if dates else None
session.commit()
form = DateTimeWithNumRunsForm(
data={
'base_date': max_date or timezone.utcnow(),
'num_runs': num_runs,
}
)
chart.buildcontent()
cum_chart.buildcontent()
s_index = cum_chart.htmlcontent.rfind('});')
cum_chart.htmlcontent = (
cum_chart.htmlcontent[:s_index]
+ "$( document ).trigger('chartload')"
+ cum_chart.htmlcontent[s_index:]
)
return self.render_template(
'airflow/duration_chart.html',
dag=dag,
root=root,
form=form,
chart=Markup(chart.htmlcontent),
cum_chart=Markup(cum_chart.htmlcontent),
dag_model=dag_model,
)
@expose('/tries')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
@provide_session
def tries(self, session=None):
"""Shows all tries."""
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
dag = current_app.dag_bag.get_dag(dag_id)
dag_model = DagModel.get_dagmodel(dag_id)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs', default=default_dag_run, type=int)
if base_date:
base_date = timezone.parse(base_date)
else:
base_date = dag.get_latest_execution_date() or timezone.utcnow()
wwwutils.check_import_errors(dag.fileloc, session)
root = request.args.get('root')
if root:
dag = dag.partial_subset(task_ids_or_regex=root, include_upstream=True, include_downstream=False)
chart_height = wwwutils.get_chart_height(dag)
chart = nvd3.lineChart(
name="lineChart",
x_is_date=True,
y_axis_format='d',
height=chart_height,
chart_attr=self.line_chart_attr,
)
tis = dag.get_task_instances_before(base_date, num_runs, session=session)
for task in dag.tasks:
y_points = []
x_points = []
for ti in tis:
dttm = wwwutils.epoch(ti.execution_date)
x_points.append(dttm)
# y value should reflect completed tries to have a 0 baseline.
y_points.append(ti.prev_attempted_tries)
if x_points:
chart.add_serie(name=task.task_id, x=x_points, y=y_points)
tries = sorted({ti.try_number for ti in tis})
max_date = max(ti.execution_date for ti in tis) if tries else None
chart.create_y_axis('yAxis', format='.02f', custom_format=False, label='Tries')
chart.axislist['yAxis']['axisLabelDistance'] = '-15'
session.commit()
form = DateTimeWithNumRunsForm(
data={
'base_date': max_date or timezone.utcnow(),
'num_runs': num_runs,
}
)
chart.buildcontent()
return self.render_template(
'airflow/chart.html',
dag=dag,
root=root,
form=form,
chart=Markup(chart.htmlcontent),
tab_title='Tries',
dag_model=dag_model,
)
@expose('/landing_times')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
@provide_session
def landing_times(self, session=None):
"""Shows landing times."""
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
dag: DAG = current_app.dag_bag.get_dag(dag_id)
dag_model = DagModel.get_dagmodel(dag_id)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs', default=default_dag_run, type=int)
if base_date:
base_date = timezone.parse(base_date)
else:
base_date = dag.get_latest_execution_date() or timezone.utcnow()
wwwutils.check_import_errors(dag.fileloc, session)
root = request.args.get('root')
if root:
dag = dag.partial_subset(task_ids_or_regex=root, include_upstream=True, include_downstream=False)
tis = dag.get_task_instances_before(base_date, num_runs, session=session)
chart_height = wwwutils.get_chart_height(dag)
chart = nvd3.lineChart(
name="lineChart", x_is_date=True, height=chart_height, chart_attr=self.line_chart_attr
)
y_points = {}
x_points = {}
for task in dag.tasks:
task_id = task.task_id
y_points[task_id] = []
x_points[task_id] = []
for ti in tis:
ts = dag.get_run_data_interval(ti.dag_run).end
if ti.end_date:
dttm = wwwutils.epoch(ti.execution_date)
secs = (ti.end_date - ts).total_seconds()
x_points[task_id].append(dttm)
y_points[task_id].append(secs)
# determine the most relevant time unit for the set of landing times
# for the DAG
y_unit = infer_time_unit([d for t in y_points.values() for d in t])
# update the y Axis to have the correct time units
chart.create_y_axis('yAxis', format='.02f', custom_format=False, label=f'Landing Time ({y_unit})')
chart.axislist['yAxis']['axisLabelDistance'] = '-15'
for task_id in x_points:
chart.add_serie(
name=task_id,
x=x_points[task_id],
y=scale_time_units(y_points[task_id], y_unit),
)
dates = sorted({ti.execution_date for ti in tis})
max_date = max(ti.execution_date for ti in tis) if dates else None
session.commit()
form = DateTimeWithNumRunsForm(
data={
'base_date': max_date or timezone.utcnow(),
'num_runs': num_runs,
}
)
chart.buildcontent()
return self.render_template(
'airflow/chart.html',
dag=dag,
chart=Markup(chart.htmlcontent),
height=str(chart_height + 100) + "px",
root=root,
form=form,
tab_title='Landing times',
dag_model=dag_model,
)
@expose('/paused', methods=['POST'])
@auth.has_access(
[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
]
)
@action_logging
def paused(self):
"""Toggle paused."""
dag_id = request.args.get('dag_id')
is_paused = request.args.get('is_paused') == 'false'
models.DagModel.get_dagmodel(dag_id).set_is_paused(is_paused=is_paused)
return "OK"
@expose('/gantt')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
@provide_session
def gantt(self, session=None):
"""Show GANTT chart."""
dag_id = request.args.get('dag_id')
dag = current_app.dag_bag.get_dag(dag_id)
dag_model = DagModel.get_dagmodel(dag_id)
root = request.args.get('root')
if root:
dag = dag.partial_subset(task_ids_or_regex=root, include_upstream=True, include_downstream=False)
wwwutils.check_import_errors(dag.fileloc, session)
dt_nr_dr_data = get_date_time_num_runs_dag_runs_form_data(request, session, dag)
dttm = dt_nr_dr_data['dttm']
form = DateTimeWithNumRunsWithDagRunsForm(data=dt_nr_dr_data)
form.execution_date.choices = dt_nr_dr_data['dr_choices']
tis = (
session.query(TaskInstance)
.join(TaskInstance.dag_run)
.filter(
DagRun.execution_date == dttm,
TaskInstance.dag_id == dag_id,
TaskInstance.start_date.isnot(None),
TaskInstance.state.isnot(None),
)
.order_by(TaskInstance.start_date)
)
ti_fails = (
session.query(TaskFail)
.join(DagRun, DagRun.execution_date == TaskFail.execution_date)
.filter(DagRun.execution_date == dttm, TaskFail.dag_id == dag_id)
)
tasks = []
for ti in tis:
# prev_attempted_tries will reflect the currently running try_number
# or the try_number of the last complete run
# https://issues.apache.org/jira/browse/AIRFLOW-2143
try_count = ti.prev_attempted_tries if ti.prev_attempted_tries != 0 else ti.try_number
task_dict = alchemy_to_dict(ti)
task_dict['end_date'] = task_dict['end_date'] or timezone.utcnow()
task_dict['extraLinks'] = dag.get_task(ti.task_id).extra_links
task_dict['try_number'] = try_count
tasks.append(task_dict)
tf_count = 0
try_count = 1
prev_task_id = ""
for failed_task_instance in ti_fails:
if tf_count != 0 and failed_task_instance.task_id == prev_task_id:
try_count += 1
else:
try_count = 1
prev_task_id = failed_task_instance.task_id
tf_count += 1
task = dag.get_task(failed_task_instance.task_id)
task_dict = alchemy_to_dict(failed_task_instance)
end_date = task_dict['end_date'] or timezone.utcnow()
task_dict['end_date'] = end_date
task_dict['start_date'] = task_dict['start_date'] or end_date
task_dict['state'] = State.FAILED
task_dict['operator'] = task.task_type
task_dict['try_number'] = try_count
task_dict['extraLinks'] = task.extra_links
tasks.append(task_dict)
task_names = [ti.task_id for ti in tis]
data = {
'taskNames': task_names,
'tasks': tasks,
'height': len(task_names) * 25 + 25,
}
session.commit()
return self.render_template(
'airflow/gantt.html',
dag=dag,
execution_date=dttm.isoformat(),
form=form,
data=data,
base_date='',
root=root,
dag_model=dag_model,
)
@expose('/extra_links')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
def extra_links(self):
"""
A restful endpoint that returns external links for a given Operator
It queries the operator that sent the request for the links it wishes
to provide for a given external link name.
API: GET
Args: dag_id: The id of the dag containing the task in question
task_id: The id of the task in question
execution_date: The date of execution of the task
link_name: The name of the link reference to find the actual URL for
Returns:
200: {url: <url of link>, error: None} - returned when there was no problem
finding the URL
404: {url: None, error: <error message>} - returned when the operator does
not return a URL
"""
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
link_name = request.args.get('link_name')
dttm = timezone.parse(execution_date)
dag = current_app.dag_bag.get_dag(dag_id)
if not dag or task_id not in dag.task_ids:
response = jsonify(
{
'url': None,
'error': f"can't find dag {dag} or task_id {task_id}",
}
)
response.status_code = 404
return response
task = dag.get_task(task_id)
try:
url = task.get_extra_links(dttm, link_name)
except ValueError as err:
response = jsonify({'url': None, 'error': str(err)})
response.status_code = 404
return response
if url:
response = jsonify({'error': None, 'url': url})
response.status_code = 200
return response
else:
response = jsonify({'url': None, 'error': f'No URL found for {link_name}'})
response.status_code = 404
return response
@expose('/object/task_instances')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
def task_instances(self):
"""Shows task instances."""
dag_id = request.args.get('dag_id')
dag = current_app.dag_bag.get_dag(dag_id)
dttm = request.args.get('execution_date')
if dttm:
dttm = timezone.parse(dttm)
else:
return "Error: Invalid execution_date"
task_instances = {ti.task_id: alchemy_to_dict(ti) for ti in dag.get_task_instances(dttm, dttm)}
return json.dumps(task_instances, cls=utils_json.AirflowJsonEncoder)
@expose('/object/tree_data')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
@action_logging
def tree_data(self):
"""Returns tree data"""
dag_id = request.args.get('dag_id')
dag = current_app.dag_bag.get_dag(dag_id)
if not dag:
response = jsonify({'error': f"can't find dag {dag_id}"})
response.status_code = 404
return response
root = request.args.get('root')
if root:
dag = dag.partial_subset(task_ids_or_regex=root, include_downstream=False, include_upstream=True)
num_runs = request.args.get('num_runs', type=int)
if num_runs is None:
num_runs = conf.getint('webserver', 'default_dag_run_display_number')
try:
base_date = timezone.parse(request.args["base_date"])
except (KeyError, ValueError):
base_date = dag.get_latest_execution_date() or timezone.utcnow()
with create_session() as session:
dag_runs = (
session.query(DagRun)
.filter(DagRun.dag_id == dag.dag_id, DagRun.execution_date <= base_date)
.order_by(DagRun.execution_date.desc())
.limit(num_runs)
.all()
)
dag_runs = {dr.execution_date: alchemy_to_dict(dr) for dr in dag_runs}
tree_data = self._get_tree_data(dag_runs, dag, base_date, session=session)
# avoid spaces to reduce payload size
return htmlsafe_json_dumps(tree_data, separators=(',', ':'))
@expose('/robots.txt')
@action_logging
def robots(self):
"""
Returns a robots.txt file for blocking certain search engine crawlers. This mitigates some
of the risk associated with exposing Airflow to the public internet, however it does not
address the real security risks associated with such a deployment.
"""
return send_from_directory(current_app.static_folder, 'robots.txt')
class ConfigurationView(AirflowBaseView):
"""View to show Airflow Configurations"""
default_view = 'conf'
class_permission_name = permissions.RESOURCE_CONFIG
base_permissions = [
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_ACCESS_MENU,
]
@expose('/configuration')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONFIG),
]
)
def conf(self):
"""Shows configuration."""
raw = request.args.get('raw') == "true"
title = "Airflow Configuration"
subtitle = AIRFLOW_CONFIG
# Don't show config when expose_config variable is False in airflow config
if conf.getboolean("webserver", "expose_config"):
with open(AIRFLOW_CONFIG) as file:
config = file.read()
table = [
(section, key, value, source)
for section, parameters in conf.as_dict(True, True).items()
for key, (value, source) in parameters.items()
]
else:
config = (
"# Your Airflow administrator chose not to expose the "
"configuration, most likely for security reasons."
)
table = None
if raw:
return Response(response=config, status=200, mimetype="application/text")
else:
code_html = Markup(
highlight(
config,
lexers.IniLexer(), # Lexer call
HtmlFormatter(noclasses=True),
)
)
return self.render_template(
'airflow/config.html',
pre_subtitle=settings.HEADER + " v" + airflow.__version__,
code_html=code_html,
title=title,
subtitle=subtitle,
table=table,
)
class RedocView(AirflowBaseView):
"""Redoc Open API documentation"""
default_view = 'redoc'
@expose('/redoc')
def redoc(self):
"""Redoc API documentation."""
openapi_spec_url = url_for("/api/v1./api/v1_openapi_yaml")
return self.render_template('airflow/redoc.html', openapi_spec_url=openapi_spec_url)
######################################################################################
# ModelViews
######################################################################################
class DagFilter(BaseFilter):
"""Filter using DagIDs"""
def apply(self, query, func):
if current_app.appbuilder.sm.has_all_dags_access():
return query
filter_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user)
return query.filter(self.model.dag_id.in_(filter_dag_ids))
class DagEditFilter(BaseFilter):
"""Filter using DagIDs"""
def apply(self, query, func): # pylint: disable=redefined-outer-name,unused-argument
filter_dag_ids = current_app.appbuilder.sm.get_editable_dag_ids(g.user)
return query.filter(self.model.dag_id.in_(filter_dag_ids))
class AirflowModelView(ModelView):
"""Airflow Mode View."""
list_widget = AirflowModelListWidget
page_size = PAGE_SIZE
CustomSQLAInterface = wwwutils.CustomSQLAInterface
class AirflowPrivilegeVerifierModelView(AirflowModelView):
"""
This ModelView prevents ability to pass primary keys of objects relating to DAGs you shouldn't be able to
edit. This only holds for the add, update and delete operations.
You will still need to use the `action_has_dag_edit_access()` for actions.
"""
@staticmethod
def validate_dag_edit_access(item: Union[DagRun, TaskInstance]):
"""Validates whether the user has 'can_edit' access for this specific DAG."""
if not current_app.appbuilder.sm.can_edit_dag(item.dag_id):
raise AirflowException(f"Access denied for dag_id {item.dag_id}")
def pre_add(self, item: Union[DagRun, TaskInstance]):
self.validate_dag_edit_access(item)
def pre_update(self, item: Union[DagRun, TaskInstance]):
self.validate_dag_edit_access(item)
def pre_delete(self, item: Union[DagRun, TaskInstance]):
self.validate_dag_edit_access(item)
def post_add_redirect(self): # Required to prevent redirect loop
return redirect(self.get_default_url())
def post_edit_redirect(self): # Required to prevent redirect loop
return redirect(self.get_default_url())
def post_delete_redirect(self): # Required to prevent redirect loop
return redirect(self.get_default_url())
def action_has_dag_edit_access(action_func: Callable) -> Callable:
"""Decorator for actions which verifies you have DAG edit access on the given tis/drs."""
@wraps(action_func)
def check_dag_edit_acl_for_actions(
self,
items: Optional[Union[List[TaskInstance], List[DagRun], TaskInstance, DagRun]],
*args,
**kwargs,
) -> None:
if items is None:
dag_ids: Set[str] = set()
elif isinstance(items, list):
dag_ids = {item.dag_id for item in items if item is not None}
elif isinstance(items, TaskInstance) or isinstance(items, DagRun):
dag_ids = {items.dag_id}
else:
raise ValueError(
"Was expecting the first argument of the action to be of type "
"Optional[Union[List[TaskInstance], List[DagRun], TaskInstance, DagRun]]."
f"Was of type: {type(items)}"
)
for dag_id in dag_ids:
if not current_app.appbuilder.sm.can_edit_dag(dag_id):
flash(f"Access denied for dag_id {dag_id}", "danger")
logging.warning("User %s tried to modify %s without having access.", g.user.username, dag_id)
return redirect(self.get_default_url())
return action_func(self, items, *args, **kwargs)
return check_dag_edit_acl_for_actions
class SlaMissModelView(AirflowModelView):
"""View to show SlaMiss table"""
route_base = '/slamiss'
datamodel = AirflowModelView.CustomSQLAInterface(SlaMiss) # type: ignore
class_permission_name = permissions.RESOURCE_SLA_MISS
method_permission_name = {
'list': 'read',
}
base_permissions = [
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_ACCESS_MENU,
]
list_columns = ['dag_id', 'task_id', 'execution_date', 'email_sent', 'timestamp']
add_columns = ['dag_id', 'task_id', 'execution_date', 'email_sent', 'timestamp']
edit_columns = ['dag_id', 'task_id', 'execution_date', 'email_sent', 'timestamp']
search_columns = ['dag_id', 'task_id', 'email_sent', 'timestamp', 'execution_date']
base_order = ('execution_date', 'desc')
base_filters = [['dag_id', DagFilter, lambda: []]]
formatters_columns = {
'task_id': wwwutils.task_instance_link,
'execution_date': wwwutils.datetime_f('execution_date'),
'timestamp': wwwutils.datetime_f('timestamp'),
'dag_id': wwwutils.dag_link,
}
class XComModelView(AirflowModelView):
"""View to show records from XCom table"""
route_base = '/xcom'
list_title = 'List XComs'
datamodel = AirflowModelView.CustomSQLAInterface(XCom)
class_permission_name = permissions.RESOURCE_XCOM
method_permission_name = {
'list': 'read',
'delete': 'delete',
'action_muldelete': 'delete',
}
base_permissions = [
permissions.ACTION_CAN_CREATE,
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_DELETE,
permissions.ACTION_CAN_ACCESS_MENU,
]
search_columns = ['key', 'value', 'timestamp', 'execution_date', 'task_id', 'dag_id']
list_columns = ['key', 'value', 'timestamp', 'execution_date', 'task_id', 'dag_id']
base_order = ('execution_date', 'desc')
base_filters = [['dag_id', DagFilter, lambda: []]]
formatters_columns = {
'task_id': wwwutils.task_instance_link,
'execution_date': wwwutils.datetime_f('execution_date'),
'timestamp': wwwutils.datetime_f('timestamp'),
'dag_id': wwwutils.dag_link,
}
@action('muldelete', 'Delete', "Are you sure you want to delete selected records?", single=False)
def action_muldelete(self, items):
"""Multiple delete action."""
self.datamodel.delete_all(items)
self.update_redirect()
return redirect(self.get_redirect())
def pre_add(self, item):
"""Pre add hook."""
item.execution_date = timezone.make_aware(item.execution_date)
item.value = XCom.serialize_value(item.value)
def pre_update(self, item):
"""Pre update hook."""
item.execution_date = timezone.make_aware(item.execution_date)
item.value = XCom.serialize_value(item.value)
def lazy_add_provider_discovered_options_to_connection_form():
"""Adds provider-discovered connection parameters as late as possible"""
def _get_connection_types() -> List[Tuple[str, str]]:
"""Returns connection types available."""
_connection_types = [
('fs', 'File (path)'),
('mesos_framework-id', 'Mesos Framework ID'),
('email', 'Email'),
]
providers_manager = ProvidersManager()
for connection_type, provider_info in providers_manager.hooks.items():
if provider_info:
_connection_types.append((connection_type, provider_info.hook_name))
return _connection_types
ConnectionForm.conn_type = SelectField(
lazy_gettext('Conn Type'),
choices=sorted(_get_connection_types(), key=itemgetter(1)),
widget=Select2Widget(),
validators=[InputRequired()],
description="""
Conn Type missing?
Make sure you've installed the corresponding Airflow Provider Package.
""",
)
for key, value in ProvidersManager().connection_form_widgets.items():
setattr(ConnectionForm, key, value.field)
# Used to store a dictionary of field behaviours used to dynamically change available
# fields in ConnectionForm based on type of connection chosen
# See airflow.hooks.base_hook.DiscoverableHook for details on how to customize your Hooks.
# those field behaviours are rendered as scripts in the conn_create.html and conn_edit.html templates
class ConnectionFormWidget(FormWidget):
"""Form widget used to display connection"""
field_behaviours = json.dumps(ProvidersManager().field_behaviours)
class ConnectionModelView(AirflowModelView):
"""View to show records from Connections table"""
route_base = '/connection'
datamodel = AirflowModelView.CustomSQLAInterface(Connection) # type: ignore
class_permission_name = permissions.RESOURCE_CONNECTION
method_permission_name = {
'add': 'create',
'list': 'read',
'edit': 'edit',
'delete': 'delete',
'action_muldelete': 'delete',
'action_mulduplicate': 'create',
}
base_permissions = [
permissions.ACTION_CAN_CREATE,
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_EDIT,
permissions.ACTION_CAN_DELETE,
permissions.ACTION_CAN_ACCESS_MENU,
]
extra_fields = list(ProvidersManager().connection_form_widgets.keys())
list_columns = [
'conn_id',
'conn_type',
'description',
'host',
'port',
'is_encrypted',
'is_extra_encrypted',
]
add_columns = edit_columns = [
'conn_id',
'conn_type',
'description',
'host',
'schema',
'login',
'password',
'port',
'extra',
] + extra_fields
add_form = edit_form = ConnectionForm
add_template = 'airflow/conn_create.html'
edit_template = 'airflow/conn_edit.html'
add_widget = ConnectionFormWidget
edit_widget = ConnectionFormWidget
base_order = ('conn_id', 'asc')
@action('muldelete', 'Delete', 'Are you sure you want to delete selected records?', single=False)
@auth.has_access(
[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
]
)
def action_muldelete(self, items):
"""Multiple delete."""
self.datamodel.delete_all(items)
self.update_redirect()
return redirect(self.get_redirect())
@action(
'mulduplicate',
'Duplicate',
'Are you sure you want to duplicate the selected connections?',
single=False,
)
@provide_session
@auth.has_access(
[
(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_CONNECTION),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONNECTION),
]
)
def action_mulduplicate(self, connections, session=None):
"""Duplicate Multiple connections"""
for selected_conn in connections:
new_conn_id = selected_conn.conn_id
match = re.search(r"_copy(\d+)$", selected_conn.conn_id)
if match:
conn_id_prefix = selected_conn.conn_id[: match.start()]
new_conn_id = f"{conn_id_prefix}_copy{int(match.group(1)) + 1}"
else:
new_conn_id += '_copy1'
dup_conn = Connection(
new_conn_id,
selected_conn.conn_type,
selected_conn.description,
selected_conn.host,
selected_conn.login,
selected_conn.password,
selected_conn.schema,
selected_conn.port,
selected_conn.extra,
)
try:
session.add(dup_conn)
session.commit()
flash(f"Connection {new_conn_id} added successfully.", "success")
except IntegrityError:
flash(
f"Connection {new_conn_id} can't be added. Integrity error, probably unique constraint.",
"warning",
)
session.rollback()
self.update_redirect()
return redirect(self.get_redirect())
def process_form(self, form, is_created):
"""Process form data."""
conn_type = form.data['conn_type']
conn_id = form.data["conn_id"]
extra = {
key: form.data[key]
for key in self.extra_fields
if key in form.data and key.startswith(f"extra__{conn_type}__")
}
# If parameters are added to the classic `Extra` field, include these values along with
# custom-field extras.
extra_conn_params = form.data.get("extra")
if extra_conn_params:
try:
extra.update(json.loads(extra_conn_params))
except (JSONDecodeError, TypeError):
flash(
Markup(
"<p>The <em>Extra</em> connection field contained an invalid value for Conn ID: "
f"<q>{conn_id}</q>.</p>"
"<p>If connection parameters need to be added to <em>Extra</em>, "
"please make sure they are in the form of a single, valid JSON object.</p><br>"
"The following <em>Extra</em> parameters were <b>not</b> added to the connection:<br>"
f"{extra_conn_params}",
),
category="error",
)
if extra.keys():
form.extra.data = json.dumps(extra)
def prefill_form(self, form, pk):
"""Prefill the form."""
try:
extra = form.data.get('extra')
if extra is None:
extra_dictionary = {}
else:
extra_dictionary = json.loads(extra)
except JSONDecodeError:
extra_dictionary = {}
if not isinstance(extra_dictionary, dict):
logging.warning('extra field for %s is not a dictionary', form.data.get('conn_id', '<unknown>'))
return
for field in self.extra_fields:
value = extra_dictionary.get(field, '')
if value:
field = getattr(form, field)
field.data = value
class PluginView(AirflowBaseView):
"""View to show Airflow Plugins"""
default_view = 'list'
class_permission_name = permissions.RESOURCE_PLUGIN
method_permission_name = {
'list': 'read',
}
base_permissions = [
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_ACCESS_MENU,
]
plugins_attributes_to_dump = [
"hooks",
"executors",
"macros",
"admin_views",
"flask_blueprints",
"menu_links",
"appbuilder_views",
"appbuilder_menu_items",
"global_operator_extra_links",
"operator_extra_links",
"source",
]
@expose('/plugin')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_PLUGIN),
]
)
def list(self):
"""List loaded plugins."""
plugins_manager.ensure_plugins_loaded()
plugins_manager.integrate_executor_plugins()
plugins_manager.initialize_extra_operators_links_plugins()
plugins_manager.initialize_web_ui_plugins()
plugins = []
for plugin_no, plugin in enumerate(plugins_manager.plugins, 1):
plugin_data = {
'plugin_no': plugin_no,
'plugin_name': plugin.name,
'attrs': {},
}
for attr_name in self.plugins_attributes_to_dump:
attr_value = getattr(plugin, attr_name)
plugin_data['attrs'][attr_name] = attr_value
plugins.append(plugin_data)
title = "Airflow Plugins"
doc_url = get_docs_url("plugins.html")
return self.render_template(
'airflow/plugin.html',
plugins=plugins,
title=title,
doc_url=doc_url,
)
class ProviderView(AirflowBaseView):
"""View to show Airflow Providers"""
default_view = 'list'
class_permission_name = permissions.RESOURCE_PROVIDER
method_permission_name = {
'list': 'read',
}
base_permissions = [
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_ACCESS_MENU,
]
@expose('/provider')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_PROVIDER),
]
)
def list(self):
"""List providers."""
providers_manager = ProvidersManager()
providers = []
for pi in providers_manager.providers.values():
provider_info = pi[1]
provider_data = {
"package_name": provider_info["package-name"],
"description": self._clean_description(provider_info["description"]),
"version": pi[0],
"documentation_url": get_doc_url_for_provider(provider_info["package-name"], pi[0]),
}
providers.append(provider_data)
title = "Providers"
doc_url = get_docs_url("apache-airflow-providers/index.html")
return self.render_template(
'airflow/providers.html',
providers=providers,
title=title,
doc_url=doc_url,
)
def _clean_description(self, description):
def _build_link(match_obj):
text = match_obj.group(1)
url = match_obj.group(2)
return markupsafe.Markup(f'<a href="{url}">{text}</a>')
cd = markupsafe.escape(description)
cd = re.sub(r"`(.*)[\s+]+<(.*)>`__", _build_link, cd)
cd = re.sub(r"\n", r"<br>", cd)
return markupsafe.Markup(cd)
class PoolModelView(AirflowModelView):
"""View to show records from Pool table"""
route_base = '/pool'
datamodel = AirflowModelView.CustomSQLAInterface(models.Pool) # type: ignore
class_permission_name = permissions.RESOURCE_POOL
method_permission_name = {
'add': 'create',
'list': 'read',
'edit': 'edit',
'delete': 'delete',
'action_muldelete': 'delete',
}
base_permissions = [
permissions.ACTION_CAN_CREATE,
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_EDIT,
permissions.ACTION_CAN_DELETE,
permissions.ACTION_CAN_ACCESS_MENU,
]
list_columns = ['pool', 'slots', 'running_slots', 'queued_slots']
add_columns = ['pool', 'slots', 'description']
edit_columns = ['pool', 'slots', 'description']
base_order = ('pool', 'asc')
@action('muldelete', 'Delete', 'Are you sure you want to delete selected records?', single=False)
def action_muldelete(self, items):
"""Multiple delete."""
if any(item.pool == models.Pool.DEFAULT_POOL_NAME for item in items):
flash("default_pool cannot be deleted", 'error')
self.update_redirect()
return redirect(self.get_redirect())
self.datamodel.delete_all(items)
self.update_redirect()
return redirect(self.get_redirect())
def pool_link(self):
"""Pool link rendering."""
pool_id = self.get('pool')
if pool_id is not None:
url = url_for('TaskInstanceModelView.list', _flt_3_pool=pool_id)
return Markup("<a href='{url}'>{pool_id}</a>").format(url=url, pool_id=pool_id)
else:
return Markup('<span class="label label-danger">Invalid</span>')
def frunning_slots(self):
"""Running slots rendering."""
pool_id = self.get('pool')
running_slots = self.get('running_slots')
if pool_id is not None and running_slots is not None:
url = url_for('TaskInstanceModelView.list', _flt_3_pool=pool_id, _flt_3_state='running')
return Markup("<a href='{url}'>{running_slots}</a>").format(url=url, running_slots=running_slots)
else:
return Markup('<span class="label label-danger">Invalid</span>')
def fqueued_slots(self):
"""Queued slots rendering."""
pool_id = self.get('pool')
queued_slots = self.get('queued_slots')
if pool_id is not None and queued_slots is not None:
url = url_for('TaskInstanceModelView.list', _flt_3_pool=pool_id, _flt_3_state='queued')
return Markup("<a href='{url}'>{queued_slots}</a>").format(url=url, queued_slots=queued_slots)
else:
return Markup('<span class="label label-danger">Invalid</span>')
formatters_columns = {'pool': pool_link, 'running_slots': frunning_slots, 'queued_slots': fqueued_slots}
validators_columns = {'pool': [validators.DataRequired()], 'slots': [validators.NumberRange(min=-1)]}
def _can_create_variable() -> bool:
return current_app.appbuilder.sm.has_access(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_VARIABLE)
class VariableModelView(AirflowModelView):
"""View to show records from Variable table"""
route_base = '/variable'
list_template = 'airflow/variable_list.html'
edit_template = 'airflow/variable_edit.html'
datamodel = AirflowModelView.CustomSQLAInterface(models.Variable) # type: ignore
class_permission_name = permissions.RESOURCE_VARIABLE
method_permission_name = {
'add': 'create',
'list': 'read',
'edit': 'edit',
'delete': 'delete',
'action_muldelete': 'delete',
'action_varexport': 'read',
}
base_permissions = [
permissions.ACTION_CAN_CREATE,
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_EDIT,
permissions.ACTION_CAN_DELETE,
permissions.ACTION_CAN_ACCESS_MENU,
]
list_columns = ['key', 'val', 'description', 'is_encrypted']
add_columns = ['key', 'val', 'description']
edit_columns = ['key', 'val', 'description']
search_columns = ['key', 'val']
base_order = ('key', 'asc')
def hidden_field_formatter(self):
"""Formats hidden fields"""
key = self.get('key')
val = self.get('val')
if secrets_masker.should_hide_value_for_key(key):
return Markup('*' * 8)
if val:
return val
else:
return Markup('<span class="label label-danger">Invalid</span>')
formatters_columns = {
'val': hidden_field_formatter,
}
validators_columns = {'key': [validators.DataRequired()]}
def prefill_form(self, form, request_id):
if secrets_masker.should_hide_value_for_key(form.key.data):
form.val.data = '*' * 8
extra_args = {"can_create_variable": _can_create_variable}
@action('muldelete', 'Delete', 'Are you sure you want to delete selected records?', single=False)
def action_muldelete(self, items):
"""Multiple delete."""
self.datamodel.delete_all(items)
self.update_redirect()
return redirect(self.get_redirect())
@action('varexport', 'Export', '', single=False)
def action_varexport(self, items):
"""Export variables."""
var_dict = {}
decoder = json.JSONDecoder()
for var in items:
try:
val = decoder.decode(var.val)
except Exception:
val = var.val
var_dict[var.key] = val
response = make_response(json.dumps(var_dict, sort_keys=True, indent=4))
response.headers["Content-Disposition"] = "attachment; filename=variables.json"
response.headers["Content-Type"] = "application/json; charset=utf-8"
return response
@expose('/varimport', methods=["POST"])
@auth.has_access([(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_VARIABLE)])
@action_logging
def varimport(self):
"""Import variables"""
try:
variable_dict = json.loads(request.files['file'].read())
except Exception:
self.update_redirect()
flash("Missing file or syntax error.", 'error')
return redirect(self.get_redirect())
else:
suc_count = fail_count = 0
for k, v in variable_dict.items():
try:
models.Variable.set(k, v, serialize_json=not isinstance(v, str))
except Exception as e:
logging.info('Variable import failed: %s', repr(e))
fail_count += 1
else:
suc_count += 1
flash(f"{suc_count} variable(s) successfully updated.")
if fail_count:
flash(f"{fail_count} variable(s) failed to be updated.", 'error')
self.update_redirect()
return redirect(self.get_redirect())
class JobModelView(AirflowModelView):
"""View to show records from Job table"""
route_base = '/job'
datamodel = AirflowModelView.CustomSQLAInterface(BaseJob) # type: ignore
class_permission_name = permissions.RESOURCE_JOB
method_permission_name = {
'list': 'read',
}
base_permissions = [
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_ACCESS_MENU,
]
list_columns = [
'id',
'dag_id',
'state',
'job_type',
'start_date',
'end_date',
'latest_heartbeat',
'executor_class',
'hostname',
'unixname',
]
search_columns = [
'id',
'dag_id',
'state',
'job_type',
'start_date',
'end_date',
'latest_heartbeat',
'executor_class',
'hostname',
'unixname',
]
base_order = ('start_date', 'desc')
base_filters = [['dag_id', DagFilter, lambda: []]]
formatters_columns = {
'start_date': wwwutils.datetime_f('start_date'),
'end_date': wwwutils.datetime_f('end_date'),
'hostname': wwwutils.nobr_f('hostname'),
'state': wwwutils.state_f,
'latest_heartbeat': wwwutils.datetime_f('latest_heartbeat'),
}
class DagRunModelView(AirflowPrivilegeVerifierModelView):
"""View to show records from DagRun table"""
route_base = '/dagrun'
datamodel = AirflowModelView.CustomSQLAInterface(models.DagRun) # type: ignore
class_permission_name = permissions.RESOURCE_DAG_RUN
method_permission_name = {
'list': 'read',
'action_clear': 'delete',
'action_muldelete': 'delete',
'action_set_running': 'edit',
'action_set_failed': 'edit',
'action_set_success': 'edit',
}
base_permissions = [
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_EDIT,
permissions.ACTION_CAN_DELETE,
permissions.ACTION_CAN_ACCESS_MENU,
]
list_columns = [
'state',
'dag_id',
'execution_date',
'run_id',
'run_type',
'queued_at',
'start_date',
'end_date',
'external_trigger',
'conf',
]
search_columns = [
'state',
'dag_id',
'execution_date',
'run_id',
'run_type',
'start_date',
'end_date',
'external_trigger',
]
edit_columns = ['state', 'dag_id', 'execution_date', 'start_date', 'end_date', 'run_id', 'conf']
base_order = ('execution_date', 'desc')
base_filters = [['dag_id', DagEditFilter, lambda: []]]
edit_form = DagRunEditForm
formatters_columns = {
'execution_date': wwwutils.datetime_f('execution_date'),
'state': wwwutils.state_f,
'start_date': wwwutils.datetime_f('start_date'),
'end_date': wwwutils.datetime_f('end_date'),
'dag_id': wwwutils.dag_link,
'run_id': wwwutils.dag_run_link,
'conf': wwwutils.json_f('conf'),
}
@action('muldelete', "Delete", "Are you sure you want to delete selected records?", single=False)
@action_has_dag_edit_access
@provide_session
def action_muldelete(self, items, session=None):
"""Multiple delete."""
self.datamodel.delete_all(items)
self.update_redirect()
return redirect(self.get_redirect())
@action('set_running', "Set state to 'running'", '', single=False)
@action_has_dag_edit_access
@provide_session
def action_set_running(self, drs, session=None):
"""Set state to running."""
try:
count = 0
for dr in session.query(DagRun).filter(DagRun.id.in_([dagrun.id for dagrun in drs])).all():
count += 1
dr.start_date = timezone.utcnow()
dr.state = State.RUNNING
session.commit()
flash(f"{count} dag runs were set to running")
except Exception as ex:
flash(str(ex), 'error')
flash('Failed to set state', 'error')
return redirect(self.get_default_url())
@action(
'set_failed',
"Set state to 'failed'",
"All running task instances would also be marked as failed, are you sure?",
single=False,
)
@action_has_dag_edit_access
@provide_session
def action_set_failed(self, drs, session=None):
"""Set state to failed."""
try:
count = 0
altered_tis = []
for dr in session.query(DagRun).filter(DagRun.id.in_([dagrun.id for dagrun in drs])).all():
count += 1
altered_tis += set_dag_run_state_to_failed(
current_app.dag_bag.get_dag(dr.dag_id), dr.execution_date, commit=True, session=session
)
altered_ti_count = len(altered_tis)
flash(
"{count} dag runs and {altered_ti_count} task instances "
"were set to failed".format(count=count, altered_ti_count=altered_ti_count)
)
except Exception:
flash('Failed to set state', 'error')
return redirect(self.get_default_url())
@action(
'set_success',
"Set state to 'success'",
"All task instances would also be marked as success, are you sure?",
single=False,
)
@action_has_dag_edit_access
@provide_session
def action_set_success(self, drs, session=None):
"""Set state to success."""
try:
count = 0
altered_tis = []
for dr in session.query(DagRun).filter(DagRun.id.in_([dagrun.id for dagrun in drs])).all():
count += 1
altered_tis += set_dag_run_state_to_success(
current_app.dag_bag.get_dag(dr.dag_id), dr.execution_date, commit=True, session=session
)
altered_ti_count = len(altered_tis)
flash(
"{count} dag runs and {altered_ti_count} task instances "
"were set to success".format(count=count, altered_ti_count=altered_ti_count)
)
except Exception:
flash('Failed to set state', 'error')
return redirect(self.get_default_url())
@action('clear', "Clear the state", "All task instances would be cleared, are you sure?", single=False)
@action_has_dag_edit_access
@provide_session
def action_clear(self, drs, session=None):
"""Clears the state."""
try:
count = 0
cleared_ti_count = 0
dag_to_tis = {}
for dr in session.query(DagRun).filter(DagRun.id.in_([dagrun.id for dagrun in drs])).all():
count += 1
dag = current_app.dag_bag.get_dag(dr.dag_id)
tis_to_clear = dag_to_tis.setdefault(dag, [])
tis_to_clear += dr.get_task_instances()
for dag, tis in dag_to_tis.items():
cleared_ti_count += len(tis)
models.clear_task_instances(tis, session, dag=dag)
flash(f"{count} dag runs and {cleared_ti_count} task instances were cleared")
except Exception:
flash('Failed to clear state', 'error')
return redirect(self.get_default_url())
class LogModelView(AirflowModelView):
"""View to show records from Log table"""
route_base = '/log'
datamodel = AirflowModelView.CustomSQLAInterface(Log) # type:ignore
class_permission_name = permissions.RESOURCE_AUDIT_LOG
method_permission_name = {
'list': 'read',
}
base_permissions = [
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_ACCESS_MENU,
]
list_columns = ['id', 'dttm', 'dag_id', 'task_id', 'event', 'execution_date', 'owner', 'extra']
search_columns = ['dag_id', 'task_id', 'event', 'execution_date', 'owner', 'extra']
base_order = ('dttm', 'desc')
base_filters = [['dag_id', DagFilter, lambda: []]]
formatters_columns = {
'dttm': wwwutils.datetime_f('dttm'),
'execution_date': wwwutils.datetime_f('execution_date'),
'dag_id': wwwutils.dag_link,
}
class TaskRescheduleModelView(AirflowModelView):
"""View to show records from Task Reschedule table"""
route_base = '/taskreschedule'
datamodel = AirflowModelView.CustomSQLAInterface(models.TaskReschedule) # type: ignore
related_views = [DagRunModelView]
class_permission_name = permissions.RESOURCE_TASK_RESCHEDULE
method_permission_name = {
'list': 'read',
}
base_permissions = [
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_ACCESS_MENU,
]
list_columns = [
'id',
'dag_id',
'run_id',
'dag_run.execution_date',
'task_id',
'try_number',
'start_date',
'end_date',
'duration',
'reschedule_date',
]
label_columns = {
'dag_run.execution_date': 'Execution Date',
}
search_columns = [
'dag_id',
'task_id',
'run_id',
'execution_date',
'start_date',
'end_date',
'reschedule_date',
]
base_order = ('id', 'desc')
base_filters = [['dag_id', DagFilter, lambda: []]]
def duration_f(self):
"""Duration calculation."""
end_date = self.get('end_date')
duration = self.get('duration')
if end_date and duration:
return timedelta(seconds=duration)
return None
formatters_columns = {
'dag_id': wwwutils.dag_link,
'task_id': wwwutils.task_instance_link,
'start_date': wwwutils.datetime_f('start_date'),
'end_date': wwwutils.datetime_f('end_date'),
'dag_run.execution_date': wwwutils.datetime_f('dag_run.execution_date'),
'reschedule_date': wwwutils.datetime_f('reschedule_date'),
'duration': duration_f,
}
class TriggerModelView(AirflowModelView):
"""View to show records from Task Reschedule table"""
route_base = '/triggerview'
datamodel = AirflowModelView.CustomSQLAInterface(models.Trigger) # type: ignore
class_permission_name = permissions.RESOURCE_TRIGGER
method_permission_name = {
'list': 'read',
}
base_permissions = [
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_ACCESS_MENU,
]
list_columns = [
'id',
'classpath',
'created_date',
'triggerer_id',
]
search_columns = [
'id',
'classpath',
'created_date',
'triggerer_id',
]
# add_exclude_columns = ["kwargs"]
base_order = ('id', 'created_date')
formatters_columns = {
'created_date': wwwutils.datetime_f('created_date'),
}
class TaskInstanceModelView(AirflowPrivilegeVerifierModelView):
"""View to show records from TaskInstance table"""
route_base = '/taskinstance'
datamodel = AirflowModelView.CustomSQLAInterface(models.TaskInstance) # type: ignore
class_permission_name = permissions.RESOURCE_TASK_INSTANCE
method_permission_name = {
'list': 'read',
'action_clear': 'edit',
'action_set_running': 'edit',
'action_set_failed': 'edit',
'action_set_success': 'edit',
'action_set_retry': 'edit',
}
base_permissions = [
permissions.ACTION_CAN_CREATE,
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_EDIT,
permissions.ACTION_CAN_DELETE,
permissions.ACTION_CAN_ACCESS_MENU,
]
page_size = PAGE_SIZE
list_columns = [
'state',
'dag_id',
'task_id',
'run_id',
'dag_run.execution_date',
'operator',
'start_date',
'end_date',
'duration',
'job_id',
'hostname',
'unixname',
'priority_weight',
'queue',
'queued_dttm',
'try_number',
'pool',
'queued_by_job_id',
'external_executor_id',
'log_url',
]
order_columns = [
item for item in list_columns if item not in ['try_number', 'log_url', 'external_executor_id']
]
label_columns = {
'dag_run.execution_date': 'Execution Date',
}
search_columns = [
'state',
'dag_id',
'task_id',
'run_id',
'execution_date',
'hostname',
'queue',
'pool',
'operator',
'start_date',
'end_date',
'queued_dttm',
]
edit_columns = [
'state',
'start_date',
'end_date',
]
add_exclude_columns = ["next_method", "next_kwargs", "trigger_id"]
edit_form = TaskInstanceEditForm
base_order = ('job_id', 'asc')
base_filters = [['dag_id', DagEditFilter, lambda: []]]
def log_url_formatter(self):
"""Formats log URL."""
log_url = self.get('log_url')
return Markup(
'<a href="{log_url}"><span class="material-icons" aria-hidden="true">reorder</span></a>'
).format(log_url=log_url)
def duration_f(self):
"""Formats duration."""
end_date = self.get('end_date')
duration = self.get('duration')
if end_date and duration:
return timedelta(seconds=duration)
return None
formatters_columns = {
'log_url': log_url_formatter,
'task_id': wwwutils.task_instance_link,
'run_id': wwwutils.dag_run_link,
'hostname': wwwutils.nobr_f('hostname'),
'state': wwwutils.state_f,
'dag_run.execution_date': wwwutils.datetime_f('dag_run.execution_date'),
'start_date': wwwutils.datetime_f('start_date'),
'end_date': wwwutils.datetime_f('end_date'),
'queued_dttm': wwwutils.datetime_f('queued_dttm'),
'dag_id': wwwutils.dag_link,
'duration': duration_f,
}
@action(
'clear',
lazy_gettext('Clear'),
lazy_gettext(
'Are you sure you want to clear the state of the selected task'
' instance(s) and set their dagruns to the QUEUED state?'
),
single=False,
)
@action_has_dag_edit_access
@provide_session
def action_clear(self, task_instances, session=None):
"""Clears the action."""
try:
dag_to_tis = collections.defaultdict(list)
for ti in task_instances:
dag = current_app.dag_bag.get_dag(ti.dag_id)
dag_to_tis[dag].append(ti)
for dag, task_instances_list in dag_to_tis.items():
models.clear_task_instances(task_instances_list, session, dag=dag)
session.commit()
flash(f"{len(task_instances)} task instances have been cleared")
except Exception as e:
flash(f'Failed to clear task instances: "{e}"', 'error')
self.update_redirect()
return redirect(self.get_redirect())
@provide_session
def set_task_instance_state(self, tis, target_state, session=None):
"""Set task instance state."""
try:
count = len(tis)
for ti in tis:
ti.set_state(target_state, session)
session.commit()
flash(f"{count} task instances were set to '{target_state}'")
except Exception:
flash('Failed to set state', 'error')
@action('set_running', "Set state to 'running'", '', single=False)
@action_has_dag_edit_access
def action_set_running(self, tis):
"""Set state to 'running'"""
self.set_task_instance_state(tis, State.RUNNING)
self.update_redirect()
return redirect(self.get_redirect())
@action('set_failed', "Set state to 'failed'", '', single=False)
@action_has_dag_edit_access
def action_set_failed(self, tis):
"""Set state to 'failed'"""
self.set_task_instance_state(tis, State.FAILED)
self.update_redirect()
return redirect(self.get_redirect())
@action('set_success', "Set state to 'success'", '', single=False)
@action_has_dag_edit_access
def action_set_success(self, tis):
"""Set state to 'success'"""
self.set_task_instance_state(tis, State.SUCCESS)
self.update_redirect()
return redirect(self.get_redirect())
@action('set_retry', "Set state to 'up_for_retry'", '', single=False)
@action_has_dag_edit_access
def action_set_retry(self, tis):
"""Set state to 'up_for_retry'"""
self.set_task_instance_state(tis, State.UP_FOR_RETRY)
self.update_redirect()
return redirect(self.get_redirect())
class AutocompleteView(AirflowBaseView):
"""View to provide autocomplete results"""
@auth.has_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG)])
@provide_session
@expose('/dagmodel/autocomplete')
def autocomplete(self, session=None):
"""Autocomplete."""
query = unquote(request.args.get('query', ''))
if not query:
return wwwutils.json_response([])
# Provide suggestions of dag_ids and owners
dag_ids_query = session.query(DagModel.dag_id.label('item')).filter(
~DagModel.is_subdag, DagModel.is_active, DagModel.dag_id.ilike('%' + query + '%')
)
owners_query = session.query(func.distinct(DagModel.owners).label('item')).filter(
~DagModel.is_subdag, DagModel.is_active, DagModel.owners.ilike('%' + query + '%')
)
# Hide DAGs if not showing status: "all"
status = flask_session.get(FILTER_STATUS_COOKIE)
if status == 'active':
dag_ids_query = dag_ids_query.filter(~DagModel.is_paused)
owners_query = owners_query.filter(~DagModel.is_paused)
elif status == 'paused':
dag_ids_query = dag_ids_query.filter(DagModel.is_paused)
owners_query = owners_query.filter(DagModel.is_paused)
filter_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user)
dag_ids_query = dag_ids_query.filter(DagModel.dag_id.in_(filter_dag_ids))
owners_query = owners_query.filter(DagModel.dag_id.in_(filter_dag_ids))
payload = [row[0] for row in dag_ids_query.union(owners_query).limit(10).all()]
return wwwutils.json_response(payload)
class DagDependenciesView(AirflowBaseView):
"""View to show dependencies between DAGs"""
refresh_interval = timedelta(
seconds=conf.getint(
"webserver",
"dag_dependencies_refresh_interval",
fallback=conf.getint("scheduler", "dag_dir_list_interval"),
)
)
last_refresh = timezone.utcnow() - refresh_interval
nodes = []
edges = []
@expose('/dag-dependencies')
@auth.has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_DEPENDENCIES),
]
)
@gzipped
@action_logging
def list(self):
"""Display DAG dependencies"""
title = "DAG Dependencies"
if not self.nodes or not self.edges:
self._calculate_graph()
self.last_refresh = timezone.utcnow()
elif timezone.utcnow() > self.last_refresh + self.refresh_interval:
max_last_updated = SerializedDagModel.get_max_last_updated_datetime()
if max_last_updated is None or max_last_updated > self.last_refresh:
self._calculate_graph()
self.last_refresh = timezone.utcnow()
return self.render_template(
"airflow/dag_dependencies.html",
title=title,
nodes=self.nodes,
edges=self.edges,
last_refresh=self.last_refresh,
arrange=conf.get("webserver", "dag_orientation"),
width=request.args.get("width", "100%"),
height=request.args.get("height", "800"),
)
def _calculate_graph(self):
nodes = []
edges = []
for dag, dependencies in SerializedDagModel.get_dag_dependencies().items():
dag_node_id = f"dag:{dag}"
nodes.append(self._node_dict(dag_node_id, dag, "dag"))
for dep in dependencies:
nodes.append(self._node_dict(dep.node_id, dep.dependency_id, dep.dependency_type))
edges.extend(
[
{"u": f"dag:{dep.source}", "v": dep.node_id},
{"u": dep.node_id, "v": f"dag:{dep.target}"},
]
)
self.nodes = nodes
self.edges = edges
@staticmethod
def _node_dict(node_id, label, node_class):
return {
"id": node_id,
"value": {"label": label, "rx": 5, "ry": 5, "class": node_class},
}
class CustomPermissionModelView(PermissionModelView):
"""Customize permission names for FAB's builtin PermissionModelView."""
class_permission_name = permissions.RESOURCE_PERMISSION
method_permission_name = {
'list': 'read',
}
base_permissions = [
permissions.ACTION_CAN_READ,
]
class CustomPermissionViewModelView(PermissionViewModelView):
"""Customize permission names for FAB's builtin PermissionViewModelView."""
class_permission_name = permissions.RESOURCE_PERMISSION_VIEW
method_permission_name = {
'list': 'read',
}
base_permissions = [
permissions.ACTION_CAN_READ,
]
class CustomResetMyPasswordView(ResetMyPasswordView):
"""Customize permission names for FAB's builtin ResetMyPasswordView."""
class_permission_name = permissions.RESOURCE_MY_PASSWORD
method_permission_name = {
'this_form_get': 'read',
'this_form_post': 'edit',
}
base_permissions = [permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ]
class CustomResetPasswordView(ResetPasswordView):
"""Customize permission names for FAB's builtin ResetPasswordView."""
class_permission_name = permissions.RESOURCE_PASSWORD
method_permission_name = {
'this_form_get': 'read',
'this_form_post': 'edit',
}
base_permissions = [permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ]
class CustomRoleModelView(RoleModelView):
"""Customize permission names for FAB's builtin RoleModelView."""
class_permission_name = permissions.RESOURCE_ROLE
method_permission_name = {
'delete': 'delete',
'download': 'read',
'show': 'read',
'list': 'read',
'edit': 'edit',
'add': 'create',
'copy_role': 'create',
}
base_permissions = [
permissions.ACTION_CAN_CREATE,
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_EDIT,
permissions.ACTION_CAN_DELETE,
]
class CustomViewMenuModelView(ViewMenuModelView):
"""Customize permission names for FAB's builtin ViewMenuModelView."""
class_permission_name = permissions.RESOURCE_VIEW_MENU
method_permission_name = {
'list': 'read',
}
base_permissions = [
permissions.ACTION_CAN_READ,
]
class CustomUserInfoEditView(UserInfoEditView):
"""Customize permission names for FAB's builtin UserInfoEditView."""
class_permission_name = permissions.RESOURCE_MY_PROFILE
route_base = "/userinfoeditview"
method_permission_name = {
'this_form_get': 'edit',
'this_form_post': 'edit',
}
base_permissions = [permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ]
class CustomUserStatsChartView(UserStatsChartView):
"""Customize permission names for FAB's builtin UserStatsChartView."""
class_permission_name = permissions.RESOURCE_USER_STATS_CHART
route_base = "/userstatschartview"
method_permission_name = {
'chart': 'read',
'list': 'read',
}
base_permissions = [permissions.ACTION_CAN_READ]
class MultiResourceUserMixin:
"""Remaps UserModelView permissions to new resources and actions."""
_class_permission_name = permissions.RESOURCE_USER
class_permission_name_mapping = {
'userinfoedit': permissions.RESOURCE_MY_PROFILE,
'userinfo': permissions.RESOURCE_MY_PROFILE,
}
method_permission_name = {
'userinfo': 'read',
'download': 'read',
'show': 'read',
'list': 'read',
'edit': 'edit',
'userinfoedit': 'edit',
'delete': 'delete',
}
base_permissions = [
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_EDIT,
permissions.ACTION_CAN_DELETE,
]
@expose("/show/<pk>", methods=["GET"])
@has_access
def show(self, pk):
pk = self._deserialize_pk_if_composite(pk)
widgets = self._show(pk)
widgets['show'].template_args['actions'].pop('userinfoedit')
return self.render_template(
self.show_template,
pk=pk,
title=self.show_title,
widgets=widgets,
related_views=self._related_views,
)
class CustomUserDBModelView(MultiResourceUserMixin, UserDBModelView):
"""Customize permission names for FAB's builtin UserDBModelView."""
_class_permission_name = permissions.RESOURCE_USER
class_permission_name_mapping = {
'resetmypassword': permissions.RESOURCE_MY_PASSWORD,
'resetpasswords': permissions.RESOURCE_PASSWORD,
'userinfoedit': permissions.RESOURCE_MY_PROFILE,
'userinfo': permissions.RESOURCE_MY_PROFILE,
}
method_permission_name = {
'add': 'create',
'download': 'read',
'show': 'read',
'list': 'read',
'edit': 'edit',
'delete': 'delete',
'resetmypassword': 'read',
'resetpasswords': 'read',
'userinfo': 'read',
'userinfoedit': 'read',
}
base_permissions = [
permissions.ACTION_CAN_CREATE,
permissions.ACTION_CAN_READ,
permissions.ACTION_CAN_EDIT,
permissions.ACTION_CAN_DELETE,
]
@property
def class_permission_name(self):
"""Returns appropriate permission name depending on request method name."""
if request:
action_name = request.view_args.get("name")
_, method_name = request.url_rule.endpoint.rsplit(".", 1)
if method_name == 'action' and action_name:
return self.class_permission_name_mapping.get(action_name, self._class_permission_name)
if method_name:
return self.class_permission_name_mapping.get(method_name, self._class_permission_name)
return self._class_permission_name
@class_permission_name.setter
def class_permission_name(self, name):
self._class_permission_name = name
class CustomUserLDAPModelView(MultiResourceUserMixin, UserLDAPModelView):
"""Customize permission names for FAB's builtin UserLDAPModelView."""
pass
class CustomUserOAuthModelView(MultiResourceUserMixin, UserOAuthModelView):
"""Customize permission names for FAB's builtin UserOAuthModelView."""
pass
class CustomUserOIDModelView(MultiResourceUserMixin, UserOIDModelView):
"""Customize permission names for FAB's builtin UserOIDModelView."""
pass
class CustomUserRemoteUserModelView(MultiResourceUserMixin, UserRemoteUserModelView):
"""Customize permission names for FAB's builtin UserRemoteUserModelView."""
pass
| 35.442148 | 110 | 0.605489 |
4a22fc9b435c9b7ec4c402e09432ead4a4cb60af | 3,353 | py | Python | tests/unit/utils/runtime_whitespace_regex_test.py | d--j/salt | 579f900be67a80e1a77674bc6aa21fec836c1c4c | [
"Apache-2.0"
] | 1 | 2015-06-05T13:47:02.000Z | 2015-06-05T13:47:02.000Z | tests/unit/utils/runtime_whitespace_regex_test.py | epoelke/salt | 80ae64e54f9f336d3cdb6e03e42f2a50469ec8f2 | [
"Apache-2.0"
] | null | null | null | tests/unit/utils/runtime_whitespace_regex_test.py | epoelke/salt | 80ae64e54f9f336d3cdb6e03e42f2a50469ec8f2 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
tests.unit.utils.runtime_whitespace_regex_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:codeauthor: :email:`Pedro Algarvio ([email protected])`
:copyright: © 2012-2013 by the SaltStack Team, see AUTHORS for more details
:license: Apache 2.0, see LICENSE for more details.
'''
# Import python libs
import re
# Import Salt Testing libs
from salttesting import TestCase
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
from salt.utils import build_whitespace_split_regex
DOUBLE_TXT = '''\
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z "$debian_chroot" ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
'''
SINGLE_TXT = '''\
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z '$debian_chroot' ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
'''
SINGLE_DOUBLE_TXT = '''\
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z '$debian_chroot' ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z "$debian_chroot" ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
'''
SINGLE_DOUBLE_SAME_LINE_TXT = '''\
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z '$debian_chroot' ] && [ -r "/etc/debian_chroot" ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
'''
MATCH = '''\
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z '$debian_chroot' ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z "$debian_chroot" ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z "$debian_chroot" ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z '$debian_chroot' ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z '$debian_chroot' ] && [ -r "/etc/debian_chroot" ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
'''
class TestRuntimeWhitespaceRegex(TestCase):
def test_single_quotes(self):
regex = build_whitespace_split_regex(SINGLE_TXT)
self.assertTrue(re.search(regex, MATCH))
def test_double_quotes(self):
regex = build_whitespace_split_regex(DOUBLE_TXT)
self.assertTrue(re.search(regex, MATCH))
def test_single_and_double_quotes(self):
regex = build_whitespace_split_regex(SINGLE_DOUBLE_TXT)
self.assertTrue(re.search(regex, MATCH))
def test_issue_2227(self):
regex = build_whitespace_split_regex(SINGLE_DOUBLE_SAME_LINE_TXT)
self.assertTrue(re.search(regex, MATCH))
if __name__ == '__main__':
from integration import run_tests
run_tests(TestRuntimeWhitespaceRegex, needs_daemon=False)
| 30.761468 | 79 | 0.698181 |
4a22fcc3c8ad1a3e760e61dd35132bec7ac943e6 | 6,735 | py | Python | src/pymap3d/ned.py | ryanpavlick/pymap3d | 968f6837b1550503461f884d8ce2e1b10c0db1f4 | [
"BSD-2-Clause"
] | 116 | 2020-02-23T02:04:18.000Z | 2022-03-29T00:19:37.000Z | src/pymap3d/ned.py | ryanpavlick/pymap3d | 968f6837b1550503461f884d8ce2e1b10c0db1f4 | [
"BSD-2-Clause"
] | 19 | 2020-03-02T08:13:46.000Z | 2022-03-30T17:50:00.000Z | src/pymap3d/ned.py | ryanpavlick/pymap3d | 968f6837b1550503461f884d8ce2e1b10c0db1f4 | [
"BSD-2-Clause"
] | 28 | 2020-02-24T11:56:03.000Z | 2022-03-29T02:29:37.000Z | """ Transforms involving NED North East Down """
from __future__ import annotations
import typing
from .enu import geodetic2enu, aer2enu, enu2aer
from .ecef import ecef2geodetic, ecef2enuv, ecef2enu, enu2ecef
from .ellipsoid import Ellipsoid
if typing.TYPE_CHECKING:
from numpy import ndarray
def aer2ned(
az: ndarray, elev: ndarray, slantRange: ndarray, deg: bool = True
) -> tuple[ndarray, ndarray, ndarray]:
"""
converts azimuth, elevation, range to target from observer to North, East, Down
Parameters
-----------
az : float
azimuth
elev : float
elevation
slantRange : float
slant range [meters]
deg : bool, optional
degrees input/output (False: radians in/out)
Results
-------
n : float
North NED coordinate (meters)
e : float
East NED coordinate (meters)
d : float
Down NED coordinate (meters)
"""
e, n, u = aer2enu(az, elev, slantRange, deg=deg)
return n, e, -u
def ned2aer(
n: ndarray, e: ndarray, d: ndarray, deg: bool = True
) -> tuple[ndarray, ndarray, ndarray]:
"""
converts North, East, Down to azimuth, elevation, range
Parameters
----------
n : float
North NED coordinate (meters)
e : float
East NED coordinate (meters)
d : float
Down NED coordinate (meters)
deg : bool, optional
degrees input/output (False: radians in/out)
Results
-------
az : float
azimuth
elev : float
elevation
slantRange : float
slant range [meters]
"""
return enu2aer(e, n, -d, deg=deg)
def ned2geodetic(
n: ndarray,
e: ndarray,
d: ndarray,
lat0: ndarray,
lon0: ndarray,
h0: ndarray,
ell: Ellipsoid = None,
deg: bool = True,
) -> tuple[ndarray, ndarray, ndarray]:
"""
Converts North, East, Down to target latitude, longitude, altitude
Parameters
----------
n : float
North NED coordinate (meters)
e : float
East NED coordinate (meters)
d : float
Down NED coordinate (meters)
lat0 : float
Observer geodetic latitude
lon0 : float
Observer geodetic longitude
h0 : float
observer altitude above geodetic ellipsoid (meters)
ell : Ellipsoid, optional
reference ellipsoid
deg : bool, optional
degrees input/output (False: radians in/out)
Results
-------
lat : float
target geodetic latitude
lon : float
target geodetic longitude
h : float
target altitude above geodetic ellipsoid (meters)
"""
x, y, z = enu2ecef(e, n, -d, lat0, lon0, h0, ell, deg=deg)
return ecef2geodetic(x, y, z, ell, deg=deg)
def ned2ecef(
n: ndarray,
e: ndarray,
d: ndarray,
lat0: ndarray,
lon0: ndarray,
h0: ndarray,
ell: Ellipsoid = None,
deg: bool = True,
) -> tuple[ndarray, ndarray, ndarray]:
"""
North, East, Down to target ECEF coordinates
Parameters
----------
n : float
North NED coordinate (meters)
e : float
East NED coordinate (meters)
d : float
Down NED coordinate (meters)
lat0 : float
Observer geodetic latitude
lon0 : float
Observer geodetic longitude
h0 : float
observer altitude above geodetic ellipsoid (meters)
ell : Ellipsoid, optional
reference ellipsoid
deg : bool, optional
degrees input/output (False: radians in/out)
Results
-------
x : float
ECEF x coordinate (meters)
y : float
ECEF y coordinate (meters)
z : float
ECEF z coordinate (meters)
"""
return enu2ecef(e, n, -d, lat0, lon0, h0, ell, deg=deg)
def ecef2ned(
x: ndarray,
y: ndarray,
z: ndarray,
lat0: ndarray,
lon0: ndarray,
h0: ndarray,
ell: Ellipsoid = None,
deg: bool = True,
) -> tuple[ndarray, ndarray, ndarray]:
"""
Convert ECEF x,y,z to North, East, Down
Parameters
----------
x : float
ECEF x coordinate (meters)
y : float
ECEF y coordinate (meters)
z : float
ECEF z coordinate (meters)
lat0 : float
Observer geodetic latitude
lon0 : float
Observer geodetic longitude
h0 : float
observer altitude above geodetic ellipsoid (meters)
ell : Ellipsoid, optional
reference ellipsoid
deg : bool, optional
degrees input/output (False: radians in/out)
Results
-------
n : float
North NED coordinate (meters)
e : float
East NED coordinate (meters)
d : float
Down NED coordinate (meters)
"""
e, n, u = ecef2enu(x, y, z, lat0, lon0, h0, ell, deg=deg)
return n, e, -u
def geodetic2ned(
lat: ndarray,
lon: ndarray,
h: ndarray,
lat0: ndarray,
lon0: ndarray,
h0: ndarray,
ell: Ellipsoid = None,
deg: bool = True,
) -> tuple[ndarray, ndarray, ndarray]:
"""
convert latitude, longitude, altitude of target to North, East, Down from observer
Parameters
----------
lat : float
target geodetic latitude
lon : float
target geodetic longitude
h : float
target altitude above geodetic ellipsoid (meters)
lat0 : float
Observer geodetic latitude
lon0 : float
Observer geodetic longitude
h0 : float
observer altitude above geodetic ellipsoid (meters)
ell : Ellipsoid, optional
reference ellipsoid
deg : bool, optional
degrees input/output (False: radians in/out)
Results
-------
n : float
North NED coordinate (meters)
e : float
East NED coordinate (meters)
d : float
Down NED coordinate (meters)
"""
e, n, u = geodetic2enu(lat, lon, h, lat0, lon0, h0, ell, deg=deg)
return n, e, -u
def ecef2nedv(
x: float, y: float, z: float, lat0: float, lon0: float, deg: bool = True
) -> tuple[float, float, float]:
"""
for VECTOR between two points
Parameters
----------
x : float
ECEF x coordinate (meters)
y : float
ECEF y coordinate (meters)
z : float
ECEF z coordinate (meters)
lat0 : float
Observer geodetic latitude
lon0 : float
Observer geodetic longitude
deg : bool, optional
degrees input/output (False: radians in/out)
Results
-------
(Vector)
n : float
North NED coordinate (meters)
e : float
East NED coordinate (meters)
d : float
Down NED coordinate (meters)
"""
e, n, u = ecef2enuv(x, y, z, lat0, lon0, deg=deg)
return n, e, -u
| 22.009804 | 86 | 0.585449 |
4a22fd0683c2fef1952b7edad4c2554469ef0a1f | 6,258 | py | Python | soaplib/core/test/wsdl/test_wsdl_ports_services.py | divaliu1408/overfit | 083dcfaa758391092933e19544462cd831e73ef0 | [
"Apache-2.0"
] | null | null | null | soaplib/core/test/wsdl/test_wsdl_ports_services.py | divaliu1408/overfit | 083dcfaa758391092933e19544462cd831e73ef0 | [
"Apache-2.0"
] | null | null | null | soaplib/core/test/wsdl/test_wsdl_ports_services.py | divaliu1408/overfit | 083dcfaa758391092933e19544462cd831e73ef0 | [
"Apache-2.0"
] | null | null | null | #
# soaplib - Copyright (C) Soaplib contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
import unittest
from soaplib.core import namespaces
from soaplib.core.test.wsdl.wrappers import build_app, AppTestWrapper
from soaplib.core.test.wsdl.port_service_services import (S1, S2, S3, \
SinglePortService, MissingRPCPortService, DoublePortService, \
BadRPCPortService, MissingServicePortService)
class TestWSDLPortServiceBehavior(unittest.TestCase):
def setUp(self):
self.transport = 'http://schemas.xmlsoap.org/soap/http'
self.url = 'http:/localhost:7789/wsdl'
self.port_type_string = '{%s}portType' % namespaces.ns_wsdl
self.service_string = '{%s}service' % namespaces.ns_wsdl
self.binding_string = '{%s}binding' % namespaces.ns_wsdl
self.operation_string = '{%s}operation' % namespaces.ns_wsdl
self.port_string = '{%s}port' % namespaces.ns_wsdl
def tearDown(self):
pass
def test_tns(self):
sa = build_app([SinglePortService], 'SinglePort', 'TestServiceName')
sa.get_wsdl(self.url)
sa_el = sa.wsdl.elements
tns = sa_el.get('targetNamespace')
self.assertEqual('SinglePort', tns)
sa = build_app(
[SinglePortService, DoublePortService],
'MultiServiceTns',
'AppName'
)
sa.get_wsdl(self.url)
tns = sa.wsdl.elements.get('targetNamespace')
self.assertEqual(tns, 'MultiServiceTns')
def test_raise_missing_port(self):
# Test that an exception is raised when a port is declared in the service class
# but the rpc method does not declare a port.
app = build_app(
[MissingRPCPortService],
'MisingPortTns',
'MissingPortApp'
)
self.assertRaises(ValueError, app.get_wsdl, self.url)
app = build_app(
[SinglePortService, MissingRPCPortService],
'MissingPort2Tns',
'MissingPort2App'
)
self.assertRaises(ValueError, app.get_wsdl, self.url)
def test_raise_invalid_port(self):
app = build_app(
[BadRPCPortService],
'MisingPortTns',
'MissingPortApp'
)
self.assertRaises(ValueError,app.get_wsdl, self.url)
app = build_app(
[BadRPCPortService, SinglePortService],
'MissingPort2Tns',
'MissingPortApp'
)
self.assertRaises(ValueError, app.get_wsdl, self.url)
def test_raise_no_service_port(self):
app = build_app(
[MissingServicePortService],
'MisingPortTns',
'MissingPortApp'
)
self.assertRaises(ValueError,app.get_wsdl, self.url)
app = build_app(
[SinglePortService, MissingServicePortService],
'MissingServicePort2Tns',
'MissingServicePort2App'
)
self.assertRaises(ValueError, app.get_wsdl, self.url)
def test_service_name(self):
sa = build_app([SinglePortService], 'SinglePort', 'TestServiceName')
sa_wsdl = sa.get_wsdl(self.url)
sa_el = sa.wsdl.elements
sl = [s for s in sa_el.iterfind(self.service_string)]
name = sl[0].get('name')
self.assertEqual('SinglePortService_ServiceInterface', name)
def test_service_contains_ports(self):
# Check that the element for the service has the correct number of ports
# Check that the element for the service has the correct port names
app = build_app(
[SinglePortService],
'SinglePortTns',
'SinglePortApp'
)
wrapper = AppTestWrapper(app)
service = wrapper.get_service_list()[0]
# verify that there is only one port
ports = wrapper.get_port_list(service)
self.assertEquals(1, len(ports))
# verify that the ports name matched the port specified in
# the service class
port = ports[0]
self.assertEquals('FirstPortType', port.get('name'))
def test_port_name(self):
sa = build_app([SinglePortService], 'tns', name='SinglePortApp')
sa_wsdl_string = sa.get_wsdl(self.url)
sa_wsdl_el = sa.wsdl.elements
pl = [el for el in sa_wsdl_el.iterfind(self.port_type_string)]
self.assertEqual('FirstPortType', pl[0].get('name'))
da = build_app([DoublePortService], 'tns', name='DoublePortApp')
da_wsdl_string = da.get_wsdl(self.url)
da_wsdl_el = da.wsdl.elements
pl2 = [el for el in da_wsdl_el.iterfind(self.port_type_string)]
self.assertEqual('FirstPort', pl2[0].get('name'))
self.assertEqual('SecondPort', pl2[1].get('name'))
def test_port_count(self):
sa = build_app([SinglePortService], 'tns', name='SinglePortApp')
sa_wsdl_string = sa.get_wsdl(self.url)
sa_wsdl_el = sa.wsdl.elements
sa_wsdl_type = sa.wsdl
self.assertEquals(1, len(sa_wsdl_type.port_type_dict.keys()))
pl = [el for el in sa_wsdl_el.iterfind(self.port_type_string)]
self.assertEqual(1, len(pl))
da = build_app([DoublePortService], 'tns', name='DoublePortApp')
da_wsdl_string = da.get_wsdl(self.url)
da_wsdl_el = da.wsdl.elements
da_wsdl_type = da.wsdl
self.assertEquals(2, len(da_wsdl_type.port_type_dict.keys()))
pl2 = [el for el in da_wsdl_el.iterfind(self.port_type_string)]
self.assertEqual(2, len(pl2))
| 31.447236 | 87 | 0.644934 |
4a22fd4c6a34f3f9a486be08d8d38cb615d46752 | 14,005 | py | Python | backend-service/bot_server/api/request_handler.py | sravankumarmatta/Classroom-Bot | 2081600c553c003ada4c0dadd2d4715e11c073d2 | [
"MIT"
] | 1 | 2020-10-06T06:23:04.000Z | 2020-10-06T06:23:04.000Z | backend-service/bot_server/api/request_handler.py | sravankumarmatta/Classroom-Bot | 2081600c553c003ada4c0dadd2d4715e11c073d2 | [
"MIT"
] | 38 | 2020-10-01T00:13:35.000Z | 2020-10-27T03:33:09.000Z | backend-service/bot_server/api/request_handler.py | sravankumarmatta/Classroom-Bot | 2081600c553c003ada4c0dadd2d4715e11c073d2 | [
"MIT"
] | 8 | 2020-10-20T07:00:11.000Z | 2021-09-28T00:23:45.000Z | # TODO: Add Grade table and requests for it using patch
"""
This modules has functions to handle all the supported commands for the
classroom api's.
Author: Ayushi Rajendra Kumar
Date: 2020-09-02
"""
from .models import Course, Group, Student, Assignment, Schedule
import traceback
from rest_framework import exceptions
def missing_field_error(field):
"""error function
:param field:
:return:
"""
error_response = {
"status": 400,
"message": f"Missing field {field}",
}
raise exceptions.ValidationError(detail=error_response)
def create_new_course(data):
"""REST Request handler- create course
:param data:
:return:
"""
try:
response = Course.objects.create_course(workspace_id=data["workspace_id"],
course_name=data["course_name"],
department=data["department"],
semester=data["semester"],
bot_token=data["bot_token"],
admin_user_id=data["admin_user_id"])
return {'data': response}
except Exception as e:
traceback.print_exc()
return {'data': f'Could not create the course/workspace: {e}', 'status_code': 400}
def get_course_details(workspace_id, data):
"""REST Request handler- Get Course details
:param workspace_id:
:param data:
:return:
"""
data = Course.objects.get_course_details(workspace_id=workspace_id, course_name=data["course_name"],
department=data["department"],
semester=data["semester"])
return {
"status": 0,
"message": "success",
"data": data
}
def get_all_courses(workspace_id):
"""REST Request handler- get all courses
:param workspace_id:
:return:
"""
data = Course.objects.get_all_courses(workspace_id=workspace_id)
return {
"status": 0,
"message": "success",
"data": data
}
def delete_course(data):
"""REST Request handler- Delete courses
:param data:
:return:
"""
return Course.objects.del_course(workspace_id=data["workspace_id"],
course_name=data["course_name"], department=data["department"])
def create_student(data):
"""REST Request handler- Create student
:param data:
:return:
"""
try:
if 'workspace_id' in data:
course = Course.objects.get(workspace_id=data['workspace_id'])
elif 'course_id' in data:
course = Course.objects.get(log_course_id=data['course_id'])
else:
raise Exception
response = Student.objects.create_student(student_unity_id=data['student_unity_id'],
course=course,
name=data['name'],
email_id=data['email_id'])
return {'data': response}
except Exception as e:
traceback.print_exc()
return {'data': f'Could not create the student: {e}', 'status_code': 400}
def update_student_details(data):
"""REST Request handler- update student details
:param data:
:return:
"""
if 'email_id' not in data:
return missing_field_error('email_id')
if 'workspace_id' in data:
course = Course.objects.get(workspace_id=data['workspace_id'])
elif 'course_id' in data:
course = Course.objects.get(log_course_id=data['course_id'])
else:
return missing_field_error("Course Identifier")
# TODO: Add bot token to response whenever 'workspace_id' in data else remove it
response = None
if 'group_num' in data:
response = Student.objects.assign_group(email_id=data['participant'], course=data['course_id'],
group_number=data['group_num'])
elif 'slack_user_id' in data:
response = Student.objects.update_slack_user_id(data['email_id'], course, data['slack_user_id'])
else:
response = missing_field_error('No field to update')
return response
def get_student_details(email_id, workspace_id=None, course_id=None):
"""REST Request habdler- get student details
:param email_id:
:param workspace_id:
:param course_id:
:return:
"""
if workspace_id is not None:
course = Course.objects.get(workspace_id=workspace_id)
elif course_id is not None:
course = Course.objects.get(log_course_id=course_id)
else:
return missing_field_error("Course Identifier")
response = Student.objects.get_student_details(email_id=email_id, course=course)
# TODO: Add bot token whenever 'workspace_id' in data else remove it
return {
"status": 0,
"message": "success",
"data": response
}
def get_all_students():
"""REST Request habdler- get all student details
:return:
"""
response = Student.objects.get_all_students()
return {
"status": 0,
"message": "success",
"data": response
}
def delete_student(data):
"""REST Request habdler- Delete student
:param data:
:return:
"""
if 'email_id' not in data:
return missing_field_error('email_id')
if 'workspace_id' in data:
course = Course.objects.get(workspace_id=data['workspace_id'])
elif 'course_id' in data:
course = Course.objects.get(log_course_id=data['course_id'])
else:
return missing_field_error("Course Identifier")
return Student.objects.delete_student(email_id=data['email_id'], course=course)
# Scheudle APIs
def create_schedule(data):
"""REST Request handler- Create schedule
:param data:
:return:
"""
try:
data_keys = data.keys()
if "lecture_link" in data_keys:
response = Schedule.objects.create_schedule(slack_user_id=data['slack_user_id'],
lecture_link=data['lecture_link'])
elif "tutor_link" in data_keys:
response = Schedule.objects.create_schedule(slack_user_id=data['slack_user_id'],
tutor_link=data['tutor_link'])
return {'data': response}
except Exception as e:
traceback.print_exc()
return {'data': f'Could not create the student: {e}', 'status_code': 400}
def update_schedule_details(data):
"""REST Request handler- update schedule details
:param data:
:return:
"""
# TODO: Add functionality if needed later
response = None
# if 'email_id' not in data:
# return missing_field_error('email_id')
# if 'workspace_id' in data:
# course = Course.objects.get(workspace_id=data['workspace_id'])
# elif 'course_id' in data:
# course = Course.objects.get(log_course_id=data['course_id'])
# else:
# return missing_field_error("Course Identifier")
# TODO: Add bot token to response whenever 'workspace_id' in data else remove it
# if 'group_num' in data:
# response = Student.objects.assign_group(email_id=data['participant'], course=data['course_id'],
# group_number=data['group_num'])
# elif 'slack_user_id' in data:
# response = Student.objects.update_slack_user_id(data['email_id'], course, data['slack_user_id'])
# else:
# response = missing_field_error('No field to update')
return response
def get_schedule_lecture_details(lecture_link, workspace_id=None, course_id=None):
"""REST Request habdler- get schedule lecture details
:param lecture_link:
:param workspace_id:
:param course_id:
:return:
"""
if workspace_id is not None:
course = Course.objects.get(workspace_id=workspace_id)
elif course_id is not None:
course = Course.objects.get(log_course_id=course_id)
else:
return missing_field_error("Course Identifier")
response = Schedule.objects.get_schedule_lecture_details(lecture_link=lecture_link, course=course)
# TODO: Add bot token whenever 'workspace_id' in data else remove it
return {
"status": 0,
"message": "success",
"data": response
}
def get_schedule_tutor_details(tutor_link, workspace_id=None, course_id=None):
"""REST Request habdler- get schedule tutor details
:param tutor_link:
:param workspace_id:
:param course_id:
:return:
"""
if workspace_id is not None:
course = Course.objects.get(workspace_id=workspace_id)
elif course_id is not None:
course = Course.objects.get(log_course_id=course_id)
else:
return missing_field_error("Course Identifier")
response = Schedule.objects.get_tutor_lecture_details(tutor_link=tutor_link, course=course)
# TODO: Add bot token whenever 'workspace_id' in data else remove it
return {
"status": 0,
"message": "success",
"data": response
}
def get_all_schedules():
"""REST Request handler- get all schedule details
:return:
"""
response = Schedule.objects.get_all_schedule()
return {
"status": 0,
"message": "success",
"data": response
}
def delete_schedule(data):
"""REST Request handler- Delete schedule
:param data:
:return:
"""
response = None
# if 'email_id' not in data:
# return missing_field_error('email_id')
# if 'workspace_id' in data:
# course = Course.objects.get(workspace_id=data['workspace_id'])
# elif 'course_id' in data:
# course = Course.objects.get(log_course_id=data['course_id'])
# else:
# return missing_field_error("Course Identifier")
# response = Student.objects.delete_student(email_id=data['email_id'], course=course)
return response
def get_links_for_a_slack_user(student_id, schedule_type):
response = Schedule.objects.get_links_for_a_slack_user(student_id=student_id,
schedule_type=schedule_type)
return {
"status": 0,
"message": "success",
"data": response
}
# Group APIs
def create_group(group_info: dict):
"""REST Request habdler- create group
:param group_info:
:return:
"""
try:
response = Group.objects.create_group(group_info=group_info)
return {'data': response}
except Exception as e:
traceback.print_exc()
print(group_info)
return {'data': f'Could not create a group: {e}', 'status_code': 400}
def get_students_of_group(workspace_id, course_id, group_number):
"""REST Request handler- get students of groups
:param workspace_id:
:param course_id:
:param group_number:
:return:
"""
if workspace_id is not None:
course = Course.objects.get(workspace_id=workspace_id)
elif course_id is not None:
course = Course.objects.get(log_course_id=course_id)
else:
return missing_field_error("Course Identifier")
response = Group.objects.get_group_details(group_number, course)
return {
"status": 0,
"message": "success",
"data": response
}
def get_all_groups(workspace_id, course_id):
"""REST Request handler- get groups
:param workspace_id:
:param course_id:
:return:
"""
if workspace_id is not None:
course = Course.objects.get(workspace_id=workspace_id)
elif course_id is not None:
course = Course.objects.get(log_course_id=course_id)
else:
course = None
response = Group.objects.get_all_groups(course)
return {
"status": 0,
"message": "success",
"data": response
}
def delete_group(data):
"""REST Request handler- Delete group
:param data:
:return:
"""
if 'group_number' not in data:
return missing_field_error('group_number')
if 'workspace_id' in data:
course = Course.objects.get(workspace_id=data['workspace_id'])
elif 'course_id' in data:
course = Course.objects.get(log_course_id=data['course_id'])
else:
return missing_field_error("Course Identifier")
return Group.objects.del_group(group_number=data['group_number'], registered_course_id=course)
def get_groups_for_a_slack_user(slack_id):
response = Student.objects.get_groups_for_a_slack_user(user_id=slack_id)
return {
"status": 0,
"message": "success",
"data": response
}
def get_homeworks_for_team_id(workspace_id):
"""REST Request handler- get Assignment
:param workspace_id:
:return:
"""
response = Assignment.objects.get_assignment_for_team(workspace_id=workspace_id)
return {
"status": 0,
"message": "success",
"data": response
}
def create_new_homework(homework: dict):
"""REST Request handler- create Assignment
:param homework:
:return:
"""
try:
response = Assignment.objects.create_new_assignment(assignment=homework)
return {'data': response}
except Exception as e:
traceback.print_exc()
print(homework)
return {'data': f'Could not create a homework: {e}', 'status_code': 400}
def delete_homework(data):
"""REST Request handler- Delete Assignment
:param data:
:return:
"""
try:
response = Assignment.objects.delete_assignment(workspace_id=data['workspace_id'],
assignment_name=data['assignment_name'])
return {'data': response}
except Exception as e:
traceback.print_exc()
print(data)
return {'data': f'Could not delete the homework: {e}', 'status_code': 400}
| 28.93595 | 106 | 0.622278 |
4a22fd6b0fa6efb8d1284cf733622fb9c925fc66 | 292 | py | Python | przyklady/przyklady/0/listing_8-2.py | bitounu/Nauka-Pythona | e02dd239ba2c294d6fdd98958301b3ece44f44d1 | [
"Unlicense"
] | null | null | null | przyklady/przyklady/0/listing_8-2.py | bitounu/Nauka-Pythona | e02dd239ba2c294d6fdd98958301b3ece44f44d1 | [
"Unlicense"
] | null | null | null | przyklady/przyklady/0/listing_8-2.py | bitounu/Nauka-Pythona | e02dd239ba2c294d6fdd98958301b3ece44f44d1 | [
"Unlicense"
] | null | null | null | # Listing_8-2.py
# Copyright Warren & Carter Sande, 2013
# Released under MIT license http://www.opensource.org/licenses/mit-license.php
# Version $version ----------------------------
# Przy każdym przejściu pętli for wykonujemy coś innego
for petla in [1, 2, 3, 4, 5]:
print petla
| 29.2 | 81 | 0.65411 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.