repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
840k
|
---|---|---|---|---|
dylanlee101/leetcode | code_week12_713_719/is_graph_bipartite_hard.py | b059afdadb83d504e62afd1227107de0b59557af | '''
给定一个无向图graph,当这个图为二分图时返回true。
如果我们能将一个图的节点集合分割成两个独立的子集A和B,并使图中的每一条边的两个节点一个来自A集合,一个来自B集合,我们就将这个图称为二分图。
graph将会以邻接表方式给出,graph[i]表示图中与节点i相连的所有节点。每个节点都是一个在0到graph.length-1之间的整数。这图中没有自环和平行边: graph[i] 中不存在i,并且graph[i]中没有重复的值。
示例 1:
输入: [[1,3], [0,2], [1,3], [0,2]]
输出: true
解释:
无向图如下:
0----1
| |
| |
3----2
我们可以将节点分成两组: {0, 2} 和 {1, 3}。
示例 2:
输入: [[1,2,3], [0,2], [0,1,3], [0,2]]
输出: false
解释:
无向图如下:
0----1
| \ |
| \ |
3----2
我们不能将节点分割成两个独立的子集。
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/is-graph-bipartite
'''
class Solution:
def isBipartite(self, graph: List[List[int]]) -> bool:
n = len(graph)
uncolored, red, green = 0, 1, 2
color = [uncolored] * n
valid = True
def dfs(node, c):
nonlocal valid
color[node] = c
cNei = (green if c == red else red)
for neighbor in graph[node]:
if color[neighbor] == uncolored:
dfs(neighbor, cNei)
if not valid:
return
elif color[neighbor] != cNei:
valid = False
return
for i in range(n):
if color[i] == uncolored:
dfs(i, red)
if not valid:
break
return valid
| [] |
Frost199/Machine_Learning | data_preprocessing/decision_tree_regression.py | 8cf77c6cbbae7781ac6f2ffcc9218ad79472d287 | # -*- coding: utf-8 -*-
"""
Created on Tue Apr 17 06:44:47 2018
@author: Eleam Emmanuel
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.tree import DecisionTreeRegressor
# importing the dataset
dataset = pd.read_csv('Position_Salaries.csv')
# take all the columns but leave the last one(-1)
# always make sure our independent variable is a matrix not a vector and
# dependent variable can be a vector
X = dataset.iloc[:, 1:-1].values
Y = dataset.iloc[:, 2].values
# splitting the dataset into a training set and a test set
# x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size=0.2, random_state=0)
# feature scaling
"""sc_X = StandardScaler()
x_train = sc_X.fit_transform(x_train)
x_test = sc_X.transform(x_test)
sc_Y = StandardScaler()
x_train = sc_X.fit_transform(x_train)"""
# fitting the Decision Tree regression Model to the dataset
regressor = DecisionTreeRegressor(random_state=0)
regressor.fit(X, Y)
# predicting a new result
y_pred = regressor.predict(6.5)
# Visualizing the Decision tree regression result (for higher resolution and smoother curve)
X_grid = np.arange(min(X), max(X), 0.01)
X_grid = X_grid.reshape(len(X_grid), 1)
plt.scatter(X, Y, color='red')
plt.plot(X_grid, regressor.predict(X_grid), color='blue')
plt.title("Truth or Bluff (Regression Model)")
plt.xlabel("Position Level")
plt.ylabel("Salary")
plt.show() | [((14, 10, 14, 46), 'pandas.read_csv', 'pd.read_csv', ({(14, 22, 14, 45): '"""Position_Salaries.csv"""'}, {}), "('Position_Salaries.csv')", True, 'import pandas as pd\n'), ((32, 12, 32, 49), 'sklearn.tree.DecisionTreeRegressor', 'DecisionTreeRegressor', (), '', False, 'from sklearn.tree import DecisionTreeRegressor\n'), ((41, 0, 41, 30), 'matplotlib.pyplot.scatter', 'plt.scatter', (), '', True, 'import matplotlib.pyplot as plt\n'), ((43, 0, 43, 46), 'matplotlib.pyplot.title', 'plt.title', ({(43, 10, 43, 45): '"""Truth or Bluff (Regression Model)"""'}, {}), "('Truth or Bluff (Regression Model)')", True, 'import matplotlib.pyplot as plt\n'), ((44, 0, 44, 28), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(44, 11, 44, 27): '"""Position Level"""'}, {}), "('Position Level')", True, 'import matplotlib.pyplot as plt\n'), ((45, 0, 45, 20), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(45, 11, 45, 19): '"""Salary"""'}, {}), "('Salary')", True, 'import matplotlib.pyplot as plt\n'), ((46, 0, 46, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n')] |
everaccountable/django-user-messages | user_messages/apps.py | 101d539b785bdb440bf166fb16ad25eb66e4174a | from django.apps import AppConfig
from django.conf import settings
from django.core import checks
from django.template import engines
from django.template.backends.django import DjangoTemplates
from django.utils.text import capfirst
from django.utils.translation import gettext_lazy as _
@checks.register()
def check_context_processors(app_configs, **kwargs):
errors = []
for engine in engines.all():
if isinstance(engine, DjangoTemplates):
django_templates_instance = engine.engine
break
else:
django_templates_instance = None
if django_templates_instance:
if (
"django.contrib.messages.context_processors.messages"
not in django_templates_instance.context_processors
and "admin.E404" not in settings.SILENCED_SYSTEM_CHECKS
):
errors.append(
checks.Error(
"If using 'user_messages.context_processors.messages'"
" instead of the official messages context processor"
" you have to add 'admin.E404' to SILENCED_SYSTEM_CHECKS.",
id="user_messages.E001",
)
)
if ("admin.E406" not in settings.SILENCED_SYSTEM_CHECKS and
"django.contrib.messages" not in settings.INSTALLED_APPS):
errors.append(
checks.Error(
"If using 'user_messages' instead of django.contrib.messages"
" you have to add 'admin.E406' to SILENCED_SYSTEM_CHECKS.",
id="user_messages.E002",
)
)
return errors
class UserMessagesConfig(AppConfig):
default_auto_field = "django.db.models.AutoField"
name = "user_messages"
verbose_name = capfirst(_("user messages"))
| [((10, 1, 10, 18), 'django.core.checks.register', 'checks.register', ({}, {}), '()', False, 'from django.core import checks\n'), ((14, 18, 14, 31), 'django.template.engines.all', 'engines.all', ({}, {}), '()', False, 'from django.template import engines\n'), ((52, 28, 52, 46), 'django.utils.translation.gettext_lazy', '_', ({(52, 30, 52, 45): '"""user messages"""'}, {}), "('user messages')", True, 'from django.utils.translation import gettext_lazy as _\n'), ((39, 12, 43, 13), 'django.core.checks.Error', 'checks.Error', (), '', False, 'from django.core import checks\n'), ((28, 16, 33, 17), 'django.core.checks.Error', 'checks.Error', (), '', False, 'from django.core import checks\n')] |
sharshofski/evalml | evalml/tests/objective_tests/test_standard_metrics.py | f13dcd969e86b72ba01ca520247a16850030dcb0 | from itertools import product
import numpy as np
import pandas as pd
import pytest
from sklearn.metrics import matthews_corrcoef as sk_matthews_corrcoef
from evalml.objectives import (
F1,
MAPE,
MSE,
AccuracyBinary,
AccuracyMulticlass,
BalancedAccuracyBinary,
BalancedAccuracyMulticlass,
BinaryClassificationObjective,
CostBenefitMatrix,
ExpVariance,
F1Macro,
F1Micro,
F1Weighted,
LogLossBinary,
MCCBinary,
MCCMulticlass,
MeanSquaredLogError,
Precision,
PrecisionMacro,
PrecisionMicro,
PrecisionWeighted,
Recall,
RecallMacro,
RecallMicro,
RecallWeighted,
RootMeanSquaredError,
RootMeanSquaredLogError
)
from evalml.objectives.utils import (
_all_objectives_dict,
get_non_core_objectives
)
EPS = 1e-5
all_automl_objectives = _all_objectives_dict()
all_automl_objectives = {name: class_() for name, class_ in all_automl_objectives.items() if class_ not in get_non_core_objectives()}
def test_input_contains_nan():
y_predicted = np.array([np.nan, 0, 0])
y_true = np.array([1, 2, 1])
for objective in all_automl_objectives.values():
with pytest.raises(ValueError, match="y_predicted contains NaN or infinity"):
objective.score(y_true, y_predicted)
y_true = np.array([np.nan, 0, 0])
y_predicted = np.array([1, 2, 0])
for objective in all_automl_objectives.values():
with pytest.raises(ValueError, match="y_true contains NaN or infinity"):
objective.score(y_true, y_predicted)
y_true = np.array([1, 0])
y_predicted_proba = np.array([[1, np.nan], [0.1, 0]])
for objective in all_automl_objectives.values():
if objective.score_needs_proba:
with pytest.raises(ValueError, match="y_predicted contains NaN or infinity"):
objective.score(y_true, y_predicted_proba)
def test_input_contains_inf():
y_predicted = np.array([np.inf, 0, 0])
y_true = np.array([1, 0, 0])
for objective in all_automl_objectives.values():
with pytest.raises(ValueError, match="y_predicted contains NaN or infinity"):
objective.score(y_true, y_predicted)
y_true = np.array([np.inf, 0, 0])
y_predicted = np.array([1, 0, 0])
for objective in all_automl_objectives.values():
with pytest.raises(ValueError, match="y_true contains NaN or infinity"):
objective.score(y_true, y_predicted)
y_true = np.array([1, 0])
y_predicted_proba = np.array([[1, np.inf], [0.1, 0]])
for objective in all_automl_objectives.values():
if objective.score_needs_proba:
with pytest.raises(ValueError, match="y_predicted contains NaN or infinity"):
objective.score(y_true, y_predicted_proba)
def test_different_input_lengths():
y_predicted = np.array([0, 0])
y_true = np.array([1])
for objective in all_automl_objectives.values():
with pytest.raises(ValueError, match="Inputs have mismatched dimensions"):
objective.score(y_true, y_predicted)
y_true = np.array([0, 0])
y_predicted = np.array([1, 2, 0])
for objective in all_automl_objectives.values():
with pytest.raises(ValueError, match="Inputs have mismatched dimensions"):
objective.score(y_true, y_predicted)
def test_zero_input_lengths():
y_predicted = np.array([])
y_true = np.array([])
for objective in all_automl_objectives.values():
with pytest.raises(ValueError, match="Length of inputs is 0"):
objective.score(y_true, y_predicted)
def test_probabilities_not_in_0_1_range():
y_predicted = np.array([0.3, 1.001, 0.3])
y_true = np.array([1, 0, 1])
for objective in all_automl_objectives.values():
if objective.score_needs_proba:
with pytest.raises(ValueError, match="y_predicted contains probability estimates"):
objective.score(y_true, y_predicted)
y_predicted = np.array([0.3, -0.001, 0.3])
y_true = np.array([1, 0, 1])
for objective in all_automl_objectives.values():
if objective.score_needs_proba:
with pytest.raises(ValueError, match="y_predicted contains probability estimates"):
objective.score(y_true, y_predicted)
y_true = np.array([1, 0])
y_predicted_proba = np.array([[1, 3], [0.1, 0]])
for objective in all_automl_objectives.values():
if objective.score_needs_proba:
with pytest.raises(ValueError, match="y_predicted contains probability estimates"):
objective.score(y_true, y_predicted_proba)
def test_negative_with_log():
y_predicted = np.array([-1, 10, 30])
y_true = np.array([-1, 0, 1])
for objective in [MeanSquaredLogError(), RootMeanSquaredLogError()]:
with pytest.raises(ValueError, match="Mean Squared Logarithmic Error cannot be used when targets contain negative values."):
objective.score(y_true, y_predicted)
def test_binary_more_than_two_unique_values():
y_predicted = np.array([0, 1, 2])
y_true = np.array([1, 0, 1])
for objective in all_automl_objectives.values():
if isinstance(objective, BinaryClassificationObjective) and not objective.score_needs_proba:
with pytest.raises(ValueError, match="y_predicted contains more than two unique values"):
objective.score(y_true, y_predicted)
y_true = np.array([0, 1, 2])
y_predicted = np.array([1, 0, 1])
for objective in all_automl_objectives.values():
if isinstance(objective, BinaryClassificationObjective) and not objective.score_needs_proba:
with pytest.raises(ValueError, match="y_true contains more than two unique values"):
objective.score(y_true, y_predicted)
def test_accuracy_binary():
obj = AccuracyBinary()
assert obj.score(np.array([0, 0, 1, 1]),
np.array([1, 1, 0, 0])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([0, 0, 1, 1]),
np.array([0, 1, 0, 1])) == pytest.approx(0.5, EPS)
assert obj.score(np.array([0, 0, 1, 1]),
np.array([0, 0, 1, 1])) == pytest.approx(1.0, EPS)
def test_accuracy_multi():
obj = AccuracyMulticlass()
assert obj.score(np.array([0, 0, 1, 1]),
np.array([1, 1, 0, 0])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([0, 0, 1, 1]),
np.array([0, 1, 0, 1])) == pytest.approx(0.5, EPS)
assert obj.score(np.array([0, 0, 1, 1]),
np.array([0, 0, 1, 1])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([0, 0, 1, 1, 2, 2]),
np.array([0, 0, 0, 0, 0, 0])) == pytest.approx(1 / 3.0, EPS)
assert obj.score(np.array([0, 0, 0, 0, 0, 0]),
np.array([0, 0, 1, 1, 2, 2])) == pytest.approx(1 / 3.0, EPS)
def test_balanced_accuracy_binary():
obj = BalancedAccuracyBinary()
assert obj.score(np.array([0, 1, 0, 0, 1, 0]),
np.array([0, 1, 0, 0, 0, 1])) == pytest.approx(0.625, EPS)
assert obj.score(np.array([0, 1, 0, 0, 1, 0]),
np.array([0, 1, 0, 0, 1, 0])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([0, 1, 0, 0, 1, 0]),
np.array([1, 0, 1, 1, 0, 1])) == pytest.approx(0.0, EPS)
def test_balanced_accuracy_multi():
obj = BalancedAccuracyMulticlass()
assert obj.score(np.array([0, 1, 2, 0, 1, 2, 3]),
np.array([0, 0, 2, 0, 0, 2, 3])) == pytest.approx(0.75, EPS)
assert obj.score(np.array([0, 1, 2, 0, 1, 2, 3]),
np.array([0, 1, 2, 0, 1, 2, 3])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([0, 1, 2, 0, 1, 2, 3]),
np.array([1, 0, 3, 1, 2, 1, 0])) == pytest.approx(0.0, EPS)
def test_f1_binary():
obj = F1()
assert obj.score(np.array([0, 1, 0, 0, 1, 0]),
np.array([0, 1, 0, 0, 0, 1])) == pytest.approx(0.5, EPS)
assert obj.score(np.array([0, 1, 0, 0, 1, 1]),
np.array([0, 1, 0, 0, 1, 1])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([0, 0, 0, 0, 1, 0]),
np.array([0, 1, 0, 0, 0, 1])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([0, 0]),
np.array([0, 0])) == pytest.approx(0.0, EPS)
def test_f1_micro_multi():
obj = F1Micro()
assert obj.score(np.array([0, 0, 0, 0, 0, 0, 0, 0, 0]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])) == pytest.approx(1 / 3.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([2, 2, 2, 0, 0, 0, 1, 1, 1]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([1, 2]),
np.array([0, 0])) == pytest.approx(0.0, EPS)
def test_f1_macro_multi():
obj = F1Macro()
assert obj.score(np.array([0, 0, 0, 0, 0, 0, 0, 0, 0]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])) \
== pytest.approx(2 * (1 / 3.0) * (1 / 9.0) / (1 / 3.0 + 1 / 9.0), EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([2, 2, 2, 0, 0, 0, 1, 1, 1]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([1, 2]),
np.array([0, 0])) == pytest.approx(0.0, EPS)
def test_f1_weighted_multi():
obj = F1Weighted()
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 0, 0, 0, 0, 0, 0])) \
== pytest.approx(2 * (1 / 3.0) * (1 / 9.0) / (1 / 3.0 + 1 / 9.0), EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([2, 2, 2, 0, 0, 0, 1, 1, 1])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([0, 0]),
np.array([1, 2])) == pytest.approx(0.0, EPS)
def test_precision_binary():
obj = Precision()
assert obj.score(np.array([1, 1, 1, 1, 1, 1]),
np.array([0, 0, 0, 1, 1, 1])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1]),
np.array([1, 1, 1, 1, 1, 1])) == pytest.approx(0.5, EPS)
assert obj.score(np.array([0, 0, 0, 0, 0, 0]),
np.array([1, 1, 1, 1, 1, 1])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([0, 0, 0, 0, 0, 0]),
np.array([0, 0, 0, 0, 0, 0])) == pytest.approx(0.0, EPS)
def test_precision_micro_multi():
obj = PrecisionMicro()
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 0, 0, 0, 0, 0, 0])) == pytest.approx(1 / 3.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([2, 2, 2, 0, 0, 0, 1, 1, 1])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([0, 0]),
np.array([1, 2])) == pytest.approx(0.0, EPS)
def test_precision_macro_multi():
obj = PrecisionMacro()
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 0, 0, 0, 0, 0, 0])) == pytest.approx(1 / 9.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([2, 2, 2, 0, 0, 0, 1, 1, 1])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([0, 0]),
np.array([1, 2])) == pytest.approx(0.0, EPS)
def test_precision_weighted_multi():
obj = PrecisionWeighted()
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 0, 0, 0, 0, 0, 0])) == pytest.approx(1 / 9.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([2, 2, 2, 0, 0, 0, 1, 1, 1])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([0, 0]),
np.array([1, 2])) == pytest.approx(0.0, EPS)
def test_recall_binary():
obj = Recall()
assert obj.score(np.array([0, 0, 0, 1, 1, 1]),
np.array([1, 1, 1, 1, 1, 1])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1]),
np.array([0, 0, 0, 0, 0, 0])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([1, 1, 1, 1, 1, 1]),
np.array([0, 0, 0, 1, 1, 1])) == pytest.approx(0.5, EPS)
def test_recall_micro_multi():
obj = RecallMicro()
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 0, 0, 0, 0, 0, 0])) == pytest.approx(1 / 3.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([2, 2, 2, 0, 0, 0, 1, 1, 1])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([0, 0]),
np.array([1, 2])) == pytest.approx(0.0, EPS)
def test_recall_macro_multi():
obj = RecallMacro()
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 0, 0, 0, 0, 0, 0])) == pytest.approx(1 / 3.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([2, 2, 2, 0, 0, 0, 1, 1, 1])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([0, 0]),
np.array([1, 2])) == pytest.approx(0.0, EPS)
def test_recall_weighted_multi():
obj = RecallWeighted()
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 0, 0, 0, 0, 0, 0])) == pytest.approx(1 / 3.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])) == pytest.approx(1.0, EPS)
assert obj.score(np.array([0, 0, 0, 1, 1, 1, 2, 2, 2]),
np.array([2, 2, 2, 0, 0, 0, 1, 1, 1])) == pytest.approx(0.0, EPS)
assert obj.score(np.array([0, 0]),
np.array([1, 2])) == pytest.approx(0.0, EPS)
def test_log_linear_model():
obj = MeanSquaredLogError()
root_obj = RootMeanSquaredLogError()
s1_predicted = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])
s1_actual = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0])
s2_predicted = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])
s2_actual = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])
s3_predicted = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])
s3_actual = np.array([2, 2, 2, 0, 0, 0, 1, 1, 1])
assert obj.score(s1_predicted, s1_actual) == pytest.approx(0.562467324910)
assert obj.score(s2_predicted, s2_actual) == pytest.approx(0)
assert obj.score(s3_predicted, s3_actual) == pytest.approx(0.617267976207983)
assert root_obj.score(s1_predicted, s1_actual) == pytest.approx(np.sqrt(0.562467324910))
assert root_obj.score(s2_predicted, s2_actual) == pytest.approx(0)
assert root_obj.score(s3_predicted, s3_actual) == pytest.approx(np.sqrt(0.617267976207983))
def test_mse_linear_model():
obj = MSE()
root_obj = RootMeanSquaredError()
s1_predicted = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])
s1_actual = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0])
s2_predicted = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])
s2_actual = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])
s3_predicted = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])
s3_actual = np.array([2, 2, 2, 0, 0, 0, 1, 1, 1])
assert obj.score(s1_predicted, s1_actual) == pytest.approx(5. / 3.)
assert obj.score(s2_predicted, s2_actual) == pytest.approx(0)
assert obj.score(s3_predicted, s3_actual) == pytest.approx(2.)
assert root_obj.score(s1_predicted, s1_actual) == pytest.approx(np.sqrt(5. / 3.))
assert root_obj.score(s2_predicted, s2_actual) == pytest.approx(0)
assert root_obj.score(s3_predicted, s3_actual) == pytest.approx(np.sqrt(2.))
def test_mcc_catches_warnings():
y_true = [1, 0, 1, 1]
y_predicted = [0, 0, 0, 0]
with pytest.warns(RuntimeWarning) as record:
sk_matthews_corrcoef(y_true, y_predicted)
assert "invalid value" in str(record[-1].message)
with pytest.warns(None) as record:
MCCBinary().objective_function(y_true, y_predicted)
MCCMulticlass().objective_function(y_true, y_predicted)
assert len(record) == 0
def test_mape_time_series_model():
obj = MAPE()
s1_actual = np.array([0, 0, 1, 1, 1, 1, 2, 0, 2])
s1_predicted = np.array([0, 1, 0, 1, 1, 2, 1, 2, 0])
s2_actual = np.array([-1, -2, 1, 3])
s2_predicted = np.array([1, 2, -1, -3])
s3_actual = np.array([1, 2, 4, 2, 1, 2])
s3_predicted = np.array([0, 2, 2, 1, 3, 2])
with pytest.raises(ValueError, match="Mean Absolute Percentage Error cannot be used when targets contain the value 0."):
obj.score(s1_actual, s1_predicted)
assert obj.score(s2_actual, s2_predicted) == pytest.approx(8 / 4 * 100)
assert obj.score(s3_actual, s3_predicted) == pytest.approx(4 / 6 * 100)
assert obj.score(pd.Series(s3_actual, index=range(-12, -6)), s3_predicted) == pytest.approx(4 / 6 * 100)
assert obj.score(pd.Series(s2_actual, index=range(10, 14)),
pd.Series(s2_predicted, index=range(20, 24))) == pytest.approx(8 / 4 * 100)
@pytest.mark.parametrize("objective_class", _all_objectives_dict().values())
def test_calculate_percent_difference(objective_class):
score = 5
reference_score = 10
change = ((-1) ** (not objective_class.greater_is_better) * (score - reference_score)) / reference_score
answer = 100 * change
assert objective_class.calculate_percent_difference(score, reference_score) == answer
assert objective_class.perfect_score is not None
@pytest.mark.parametrize("objective_class,nan_value", product(_all_objectives_dict().values(), [None, np.nan]))
def test_calculate_percent_difference_with_nan(objective_class, nan_value):
assert pd.isna(objective_class.calculate_percent_difference(nan_value, 2))
assert pd.isna(objective_class.calculate_percent_difference(-1, nan_value))
assert pd.isna(objective_class.calculate_percent_difference(nan_value, nan_value))
assert pd.isna(objective_class.calculate_percent_difference(2, 0))
def test_calculate_percent_difference_negative_and_equal_numbers():
assert CostBenefitMatrix.calculate_percent_difference(score=5, baseline_score=5) == 0
assert CostBenefitMatrix.calculate_percent_difference(score=-5, baseline_score=-10) == 50
assert CostBenefitMatrix.calculate_percent_difference(score=-10, baseline_score=-5) == -100
assert CostBenefitMatrix.calculate_percent_difference(score=-5, baseline_score=10) == -150
assert CostBenefitMatrix.calculate_percent_difference(score=10, baseline_score=-5) == 300
# These values are not possible for LogLossBinary but we need them for 100% coverage
# We might add an objective where lower is better that can take negative values in the future
assert LogLossBinary.calculate_percent_difference(score=-5, baseline_score=-10) == -50
assert LogLossBinary.calculate_percent_difference(score=-10, baseline_score=-5) == 100
assert LogLossBinary.calculate_percent_difference(score=-5, baseline_score=10) == 150
assert LogLossBinary.calculate_percent_difference(score=10, baseline_score=-5) == -300
def test_calculate_percent_difference_small():
expected_value = 100 * -1 * np.abs(1e-9 / (1e-9))
assert np.isclose(ExpVariance.calculate_percent_difference(score=0, baseline_score=1e-9), expected_value, atol=1e-8)
assert pd.isna(ExpVariance.calculate_percent_difference(score=0, baseline_score=1e-10))
assert pd.isna(ExpVariance.calculate_percent_difference(score=1e-9, baseline_score=0))
assert pd.isna(ExpVariance.calculate_percent_difference(score=0, baseline_score=0))
| [((43, 24, 43, 46), 'evalml.objectives.utils._all_objectives_dict', '_all_objectives_dict', ({}, {}), '()', False, 'from evalml.objectives.utils import _all_objectives_dict, get_non_core_objectives\n'), ((48, 18, 48, 42), 'numpy.array', 'np.array', ({(48, 27, 48, 41): '[np.nan, 0, 0]'}, {}), '([np.nan, 0, 0])', True, 'import numpy as np\n'), ((49, 13, 49, 32), 'numpy.array', 'np.array', ({(49, 22, 49, 31): '[1, 2, 1]'}, {}), '([1, 2, 1])', True, 'import numpy as np\n'), ((54, 13, 54, 37), 'numpy.array', 'np.array', ({(54, 22, 54, 36): '[np.nan, 0, 0]'}, {}), '([np.nan, 0, 0])', True, 'import numpy as np\n'), ((55, 18, 55, 37), 'numpy.array', 'np.array', ({(55, 27, 55, 36): '[1, 2, 0]'}, {}), '([1, 2, 0])', True, 'import numpy as np\n'), ((60, 13, 60, 29), 'numpy.array', 'np.array', ({(60, 22, 60, 28): '[1, 0]'}, {}), '([1, 0])', True, 'import numpy as np\n'), ((61, 24, 61, 57), 'numpy.array', 'np.array', ({(61, 33, 61, 56): '[[1, np.nan], [0.1, 0]]'}, {}), '([[1, np.nan], [0.1, 0]])', True, 'import numpy as np\n'), ((69, 18, 69, 42), 'numpy.array', 'np.array', ({(69, 27, 69, 41): '[np.inf, 0, 0]'}, {}), '([np.inf, 0, 0])', True, 'import numpy as np\n'), ((70, 13, 70, 32), 'numpy.array', 'np.array', ({(70, 22, 70, 31): '[1, 0, 0]'}, {}), '([1, 0, 0])', True, 'import numpy as np\n'), ((75, 13, 75, 37), 'numpy.array', 'np.array', ({(75, 22, 75, 36): '[np.inf, 0, 0]'}, {}), '([np.inf, 0, 0])', True, 'import numpy as np\n'), ((76, 18, 76, 37), 'numpy.array', 'np.array', ({(76, 27, 76, 36): '[1, 0, 0]'}, {}), '([1, 0, 0])', True, 'import numpy as np\n'), ((81, 13, 81, 29), 'numpy.array', 'np.array', ({(81, 22, 81, 28): '[1, 0]'}, {}), '([1, 0])', True, 'import numpy as np\n'), ((82, 24, 82, 57), 'numpy.array', 'np.array', ({(82, 33, 82, 56): '[[1, np.inf], [0.1, 0]]'}, {}), '([[1, np.inf], [0.1, 0]])', True, 'import numpy as np\n'), ((90, 18, 90, 34), 'numpy.array', 'np.array', ({(90, 27, 90, 33): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((91, 13, 91, 26), 'numpy.array', 'np.array', ({(91, 22, 91, 25): '[1]'}, {}), '([1])', True, 'import numpy as np\n'), ((96, 13, 96, 29), 'numpy.array', 'np.array', ({(96, 22, 96, 28): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((97, 18, 97, 37), 'numpy.array', 'np.array', ({(97, 27, 97, 36): '[1, 2, 0]'}, {}), '([1, 2, 0])', True, 'import numpy as np\n'), ((104, 18, 104, 30), 'numpy.array', 'np.array', ({(104, 27, 104, 29): '[]'}, {}), '([])', True, 'import numpy as np\n'), ((105, 13, 105, 25), 'numpy.array', 'np.array', ({(105, 22, 105, 24): '[]'}, {}), '([])', True, 'import numpy as np\n'), ((112, 18, 112, 45), 'numpy.array', 'np.array', ({(112, 27, 112, 44): '[0.3, 1.001, 0.3]'}, {}), '([0.3, 1.001, 0.3])', True, 'import numpy as np\n'), ((113, 13, 113, 32), 'numpy.array', 'np.array', ({(113, 22, 113, 31): '[1, 0, 1]'}, {}), '([1, 0, 1])', True, 'import numpy as np\n'), ((119, 18, 119, 46), 'numpy.array', 'np.array', ({(119, 27, 119, 45): '[0.3, -0.001, 0.3]'}, {}), '([0.3, -0.001, 0.3])', True, 'import numpy as np\n'), ((120, 13, 120, 32), 'numpy.array', 'np.array', ({(120, 22, 120, 31): '[1, 0, 1]'}, {}), '([1, 0, 1])', True, 'import numpy as np\n'), ((126, 13, 126, 29), 'numpy.array', 'np.array', ({(126, 22, 126, 28): '[1, 0]'}, {}), '([1, 0])', True, 'import numpy as np\n'), ((127, 24, 127, 52), 'numpy.array', 'np.array', ({(127, 33, 127, 51): '[[1, 3], [0.1, 0]]'}, {}), '([[1, 3], [0.1, 0]])', True, 'import numpy as np\n'), ((135, 18, 135, 40), 'numpy.array', 'np.array', ({(135, 27, 135, 39): '[-1, 10, 30]'}, {}), '([-1, 10, 30])', True, 'import numpy as np\n'), ((136, 13, 136, 33), 'numpy.array', 'np.array', ({(136, 22, 136, 32): '[-1, 0, 1]'}, {}), '([-1, 0, 1])', True, 'import numpy as np\n'), ((143, 18, 143, 37), 'numpy.array', 'np.array', ({(143, 27, 143, 36): '[0, 1, 2]'}, {}), '([0, 1, 2])', True, 'import numpy as np\n'), ((144, 13, 144, 32), 'numpy.array', 'np.array', ({(144, 22, 144, 31): '[1, 0, 1]'}, {}), '([1, 0, 1])', True, 'import numpy as np\n'), ((150, 13, 150, 32), 'numpy.array', 'np.array', ({(150, 22, 150, 31): '[0, 1, 2]'}, {}), '([0, 1, 2])', True, 'import numpy as np\n'), ((151, 18, 151, 37), 'numpy.array', 'np.array', ({(151, 27, 151, 36): '[1, 0, 1]'}, {}), '([1, 0, 1])', True, 'import numpy as np\n'), ((159, 10, 159, 26), 'evalml.objectives.AccuracyBinary', 'AccuracyBinary', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((169, 10, 169, 30), 'evalml.objectives.AccuracyMulticlass', 'AccuracyMulticlass', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((183, 10, 183, 34), 'evalml.objectives.BalancedAccuracyBinary', 'BalancedAccuracyBinary', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((195, 10, 195, 38), 'evalml.objectives.BalancedAccuracyMulticlass', 'BalancedAccuracyMulticlass', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((207, 10, 207, 14), 'evalml.objectives.F1', 'F1', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((222, 10, 222, 19), 'evalml.objectives.F1Micro', 'F1Micro', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((237, 10, 237, 19), 'evalml.objectives.F1Macro', 'F1Macro', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((253, 10, 253, 22), 'evalml.objectives.F1Weighted', 'F1Weighted', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((269, 10, 269, 21), 'evalml.objectives.Precision', 'Precision', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((284, 10, 284, 26), 'evalml.objectives.PrecisionMicro', 'PrecisionMicro', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((299, 10, 299, 26), 'evalml.objectives.PrecisionMacro', 'PrecisionMacro', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((314, 10, 314, 29), 'evalml.objectives.PrecisionWeighted', 'PrecisionWeighted', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((329, 10, 329, 18), 'evalml.objectives.Recall', 'Recall', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((341, 10, 341, 23), 'evalml.objectives.RecallMicro', 'RecallMicro', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((356, 10, 356, 23), 'evalml.objectives.RecallMacro', 'RecallMacro', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((371, 10, 371, 26), 'evalml.objectives.RecallWeighted', 'RecallWeighted', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((386, 10, 386, 31), 'evalml.objectives.MeanSquaredLogError', 'MeanSquaredLogError', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((387, 15, 387, 40), 'evalml.objectives.RootMeanSquaredLogError', 'RootMeanSquaredLogError', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((389, 19, 389, 56), 'numpy.array', 'np.array', ({(389, 28, 389, 55): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((390, 16, 390, 53), 'numpy.array', 'np.array', ({(390, 25, 390, 52): '[0, 0, 0, 0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((392, 19, 392, 56), 'numpy.array', 'np.array', ({(392, 28, 392, 55): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((393, 16, 393, 53), 'numpy.array', 'np.array', ({(393, 25, 393, 52): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((395, 19, 395, 56), 'numpy.array', 'np.array', ({(395, 28, 395, 55): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((396, 16, 396, 53), 'numpy.array', 'np.array', ({(396, 25, 396, 52): '[2, 2, 2, 0, 0, 0, 1, 1, 1]'}, {}), '([2, 2, 2, 0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((408, 10, 408, 15), 'evalml.objectives.MSE', 'MSE', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((409, 15, 409, 37), 'evalml.objectives.RootMeanSquaredError', 'RootMeanSquaredError', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((411, 19, 411, 56), 'numpy.array', 'np.array', ({(411, 28, 411, 55): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((412, 16, 412, 53), 'numpy.array', 'np.array', ({(412, 25, 412, 52): '[0, 0, 0, 0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((414, 19, 414, 56), 'numpy.array', 'np.array', ({(414, 28, 414, 55): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((415, 16, 415, 53), 'numpy.array', 'np.array', ({(415, 25, 415, 52): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((417, 19, 417, 56), 'numpy.array', 'np.array', ({(417, 28, 417, 55): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((418, 16, 418, 53), 'numpy.array', 'np.array', ({(418, 25, 418, 52): '[2, 2, 2, 0, 0, 0, 1, 1, 1]'}, {}), '([2, 2, 2, 0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((442, 10, 442, 16), 'evalml.objectives.MAPE', 'MAPE', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((444, 16, 444, 53), 'numpy.array', 'np.array', ({(444, 25, 444, 52): '[0, 0, 1, 1, 1, 1, 2, 0, 2]'}, {}), '([0, 0, 1, 1, 1, 1, 2, 0, 2])', True, 'import numpy as np\n'), ((445, 19, 445, 56), 'numpy.array', 'np.array', ({(445, 28, 445, 55): '[0, 1, 0, 1, 1, 2, 1, 2, 0]'}, {}), '([0, 1, 0, 1, 1, 2, 1, 2, 0])', True, 'import numpy as np\n'), ((447, 16, 447, 40), 'numpy.array', 'np.array', ({(447, 25, 447, 39): '[-1, -2, 1, 3]'}, {}), '([-1, -2, 1, 3])', True, 'import numpy as np\n'), ((448, 19, 448, 43), 'numpy.array', 'np.array', ({(448, 28, 448, 42): '[1, 2, -1, -3]'}, {}), '([1, 2, -1, -3])', True, 'import numpy as np\n'), ((450, 16, 450, 44), 'numpy.array', 'np.array', ({(450, 25, 450, 43): '[1, 2, 4, 2, 1, 2]'}, {}), '([1, 2, 4, 2, 1, 2])', True, 'import numpy as np\n'), ((451, 19, 451, 47), 'numpy.array', 'np.array', ({(451, 28, 451, 46): '[0, 2, 2, 1, 3, 2]'}, {}), '([0, 2, 2, 1, 3, 2])', True, 'import numpy as np\n'), ((137, 22, 137, 43), 'evalml.objectives.MeanSquaredLogError', 'MeanSquaredLogError', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((137, 45, 137, 70), 'evalml.objectives.RootMeanSquaredLogError', 'RootMeanSquaredLogError', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((161, 48, 161, 71), 'pytest.approx', 'pytest.approx', ({(161, 62, 161, 65): '(0.0)', (161, 67, 161, 70): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((163, 48, 163, 71), 'pytest.approx', 'pytest.approx', ({(163, 62, 163, 65): '(0.5)', (163, 67, 163, 70): 'EPS'}, {}), '(0.5, EPS)', False, 'import pytest\n'), ((165, 48, 165, 71), 'pytest.approx', 'pytest.approx', ({(165, 62, 165, 65): '(1.0)', (165, 67, 165, 70): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((171, 48, 171, 71), 'pytest.approx', 'pytest.approx', ({(171, 62, 171, 65): '(0.0)', (171, 67, 171, 70): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((173, 48, 173, 71), 'pytest.approx', 'pytest.approx', ({(173, 62, 173, 65): '(0.5)', (173, 67, 173, 70): 'EPS'}, {}), '(0.5, EPS)', False, 'import pytest\n'), ((175, 48, 175, 71), 'pytest.approx', 'pytest.approx', ({(175, 62, 175, 65): '(1.0)', (175, 67, 175, 70): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((177, 54, 177, 81), 'pytest.approx', 'pytest.approx', ({(177, 68, 177, 75): '(1 / 3.0)', (177, 77, 177, 80): 'EPS'}, {}), '(1 / 3.0, EPS)', False, 'import pytest\n'), ((179, 54, 179, 81), 'pytest.approx', 'pytest.approx', ({(179, 68, 179, 75): '(1 / 3.0)', (179, 77, 179, 80): 'EPS'}, {}), '(1 / 3.0, EPS)', False, 'import pytest\n'), ((185, 54, 185, 79), 'pytest.approx', 'pytest.approx', ({(185, 68, 185, 73): '(0.625)', (185, 75, 185, 78): 'EPS'}, {}), '(0.625, EPS)', False, 'import pytest\n'), ((188, 54, 188, 77), 'pytest.approx', 'pytest.approx', ({(188, 68, 188, 71): '(1.0)', (188, 73, 188, 76): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((191, 54, 191, 77), 'pytest.approx', 'pytest.approx', ({(191, 68, 191, 71): '(0.0)', (191, 73, 191, 76): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((197, 57, 197, 81), 'pytest.approx', 'pytest.approx', ({(197, 71, 197, 75): '(0.75)', (197, 77, 197, 80): 'EPS'}, {}), '(0.75, EPS)', False, 'import pytest\n'), ((200, 57, 200, 80), 'pytest.approx', 'pytest.approx', ({(200, 71, 200, 74): '(1.0)', (200, 76, 200, 79): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((203, 57, 203, 80), 'pytest.approx', 'pytest.approx', ({(203, 71, 203, 74): '(0.0)', (203, 76, 203, 79): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((209, 54, 209, 77), 'pytest.approx', 'pytest.approx', ({(209, 68, 209, 71): '(0.5)', (209, 73, 209, 76): 'EPS'}, {}), '(0.5, EPS)', False, 'import pytest\n'), ((212, 54, 212, 77), 'pytest.approx', 'pytest.approx', ({(212, 68, 212, 71): '(1.0)', (212, 73, 212, 76): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((215, 54, 215, 77), 'pytest.approx', 'pytest.approx', ({(215, 68, 215, 71): '(0.0)', (215, 73, 215, 76): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((218, 42, 218, 65), 'pytest.approx', 'pytest.approx', ({(218, 56, 218, 59): '(0.0)', (218, 61, 218, 64): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((224, 63, 224, 90), 'pytest.approx', 'pytest.approx', ({(224, 77, 224, 84): '(1 / 3.0)', (224, 86, 224, 89): 'EPS'}, {}), '(1 / 3.0, EPS)', False, 'import pytest\n'), ((227, 63, 227, 86), 'pytest.approx', 'pytest.approx', ({(227, 77, 227, 80): '(1.0)', (227, 82, 227, 85): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((230, 63, 230, 86), 'pytest.approx', 'pytest.approx', ({(230, 77, 230, 80): '(0.0)', (230, 82, 230, 85): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((233, 42, 233, 65), 'pytest.approx', 'pytest.approx', ({(233, 56, 233, 59): '(0.0)', (233, 61, 233, 64): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((240, 11, 240, 78), 'pytest.approx', 'pytest.approx', ({(240, 25, 240, 72): '(2 * (1 / 3.0) * (1 / 9.0) / (1 / 3.0 + 1 / 9.0))', (240, 74, 240, 77): 'EPS'}, {}), '(2 * (1 / 3.0) * (1 / 9.0) / (1 / 3.0 + 1 / 9.0), EPS)', False, 'import pytest\n'), ((243, 63, 243, 86), 'pytest.approx', 'pytest.approx', ({(243, 77, 243, 80): '(1.0)', (243, 82, 243, 85): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((246, 63, 246, 86), 'pytest.approx', 'pytest.approx', ({(246, 77, 246, 80): '(0.0)', (246, 82, 246, 85): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((249, 42, 249, 65), 'pytest.approx', 'pytest.approx', ({(249, 56, 249, 59): '(0.0)', (249, 61, 249, 64): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((256, 11, 256, 78), 'pytest.approx', 'pytest.approx', ({(256, 25, 256, 72): '(2 * (1 / 3.0) * (1 / 9.0) / (1 / 3.0 + 1 / 9.0))', (256, 74, 256, 77): 'EPS'}, {}), '(2 * (1 / 3.0) * (1 / 9.0) / (1 / 3.0 + 1 / 9.0), EPS)', False, 'import pytest\n'), ((259, 63, 259, 86), 'pytest.approx', 'pytest.approx', ({(259, 77, 259, 80): '(1.0)', (259, 82, 259, 85): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((262, 63, 262, 86), 'pytest.approx', 'pytest.approx', ({(262, 77, 262, 80): '(0.0)', (262, 82, 262, 85): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((265, 42, 265, 65), 'pytest.approx', 'pytest.approx', ({(265, 56, 265, 59): '(0.0)', (265, 61, 265, 64): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((271, 54, 271, 77), 'pytest.approx', 'pytest.approx', ({(271, 68, 271, 71): '(1.0)', (271, 73, 271, 76): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((274, 54, 274, 77), 'pytest.approx', 'pytest.approx', ({(274, 68, 274, 71): '(0.5)', (274, 73, 274, 76): 'EPS'}, {}), '(0.5, EPS)', False, 'import pytest\n'), ((277, 54, 277, 77), 'pytest.approx', 'pytest.approx', ({(277, 68, 277, 71): '(0.0)', (277, 73, 277, 76): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((280, 54, 280, 77), 'pytest.approx', 'pytest.approx', ({(280, 68, 280, 71): '(0.0)', (280, 73, 280, 76): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((286, 63, 286, 90), 'pytest.approx', 'pytest.approx', ({(286, 77, 286, 84): '(1 / 3.0)', (286, 86, 286, 89): 'EPS'}, {}), '(1 / 3.0, EPS)', False, 'import pytest\n'), ((289, 63, 289, 86), 'pytest.approx', 'pytest.approx', ({(289, 77, 289, 80): '(1.0)', (289, 82, 289, 85): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((292, 63, 292, 86), 'pytest.approx', 'pytest.approx', ({(292, 77, 292, 80): '(0.0)', (292, 82, 292, 85): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((295, 42, 295, 65), 'pytest.approx', 'pytest.approx', ({(295, 56, 295, 59): '(0.0)', (295, 61, 295, 64): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((301, 63, 301, 90), 'pytest.approx', 'pytest.approx', ({(301, 77, 301, 84): '(1 / 9.0)', (301, 86, 301, 89): 'EPS'}, {}), '(1 / 9.0, EPS)', False, 'import pytest\n'), ((304, 63, 304, 86), 'pytest.approx', 'pytest.approx', ({(304, 77, 304, 80): '(1.0)', (304, 82, 304, 85): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((307, 63, 307, 86), 'pytest.approx', 'pytest.approx', ({(307, 77, 307, 80): '(0.0)', (307, 82, 307, 85): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((310, 42, 310, 65), 'pytest.approx', 'pytest.approx', ({(310, 56, 310, 59): '(0.0)', (310, 61, 310, 64): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((316, 63, 316, 90), 'pytest.approx', 'pytest.approx', ({(316, 77, 316, 84): '(1 / 9.0)', (316, 86, 316, 89): 'EPS'}, {}), '(1 / 9.0, EPS)', False, 'import pytest\n'), ((319, 63, 319, 86), 'pytest.approx', 'pytest.approx', ({(319, 77, 319, 80): '(1.0)', (319, 82, 319, 85): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((322, 63, 322, 86), 'pytest.approx', 'pytest.approx', ({(322, 77, 322, 80): '(0.0)', (322, 82, 322, 85): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((325, 42, 325, 65), 'pytest.approx', 'pytest.approx', ({(325, 56, 325, 59): '(0.0)', (325, 61, 325, 64): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((331, 54, 331, 77), 'pytest.approx', 'pytest.approx', ({(331, 68, 331, 71): '(1.0)', (331, 73, 331, 76): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((334, 54, 334, 77), 'pytest.approx', 'pytest.approx', ({(334, 68, 334, 71): '(0.0)', (334, 73, 334, 76): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((337, 54, 337, 77), 'pytest.approx', 'pytest.approx', ({(337, 68, 337, 71): '(0.5)', (337, 73, 337, 76): 'EPS'}, {}), '(0.5, EPS)', False, 'import pytest\n'), ((343, 63, 343, 90), 'pytest.approx', 'pytest.approx', ({(343, 77, 343, 84): '(1 / 3.0)', (343, 86, 343, 89): 'EPS'}, {}), '(1 / 3.0, EPS)', False, 'import pytest\n'), ((346, 63, 346, 86), 'pytest.approx', 'pytest.approx', ({(346, 77, 346, 80): '(1.0)', (346, 82, 346, 85): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((349, 63, 349, 86), 'pytest.approx', 'pytest.approx', ({(349, 77, 349, 80): '(0.0)', (349, 82, 349, 85): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((352, 42, 352, 65), 'pytest.approx', 'pytest.approx', ({(352, 56, 352, 59): '(0.0)', (352, 61, 352, 64): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((358, 63, 358, 90), 'pytest.approx', 'pytest.approx', ({(358, 77, 358, 84): '(1 / 3.0)', (358, 86, 358, 89): 'EPS'}, {}), '(1 / 3.0, EPS)', False, 'import pytest\n'), ((361, 63, 361, 86), 'pytest.approx', 'pytest.approx', ({(361, 77, 361, 80): '(1.0)', (361, 82, 361, 85): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((364, 63, 364, 86), 'pytest.approx', 'pytest.approx', ({(364, 77, 364, 80): '(0.0)', (364, 82, 364, 85): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((367, 42, 367, 65), 'pytest.approx', 'pytest.approx', ({(367, 56, 367, 59): '(0.0)', (367, 61, 367, 64): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((373, 63, 373, 90), 'pytest.approx', 'pytest.approx', ({(373, 77, 373, 84): '(1 / 3.0)', (373, 86, 373, 89): 'EPS'}, {}), '(1 / 3.0, EPS)', False, 'import pytest\n'), ((376, 63, 376, 86), 'pytest.approx', 'pytest.approx', ({(376, 77, 376, 80): '(1.0)', (376, 82, 376, 85): 'EPS'}, {}), '(1.0, EPS)', False, 'import pytest\n'), ((379, 63, 379, 86), 'pytest.approx', 'pytest.approx', ({(379, 77, 379, 80): '(0.0)', (379, 82, 379, 85): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((382, 42, 382, 65), 'pytest.approx', 'pytest.approx', ({(382, 56, 382, 59): '(0.0)', (382, 61, 382, 64): 'EPS'}, {}), '(0.0, EPS)', False, 'import pytest\n'), ((398, 49, 398, 78), 'pytest.approx', 'pytest.approx', ({(398, 63, 398, 77): '(0.56246732491)'}, {}), '(0.56246732491)', False, 'import pytest\n'), ((399, 49, 399, 65), 'pytest.approx', 'pytest.approx', ({(399, 63, 399, 64): '(0)'}, {}), '(0)', False, 'import pytest\n'), ((400, 49, 400, 81), 'pytest.approx', 'pytest.approx', ({(400, 63, 400, 80): '(0.617267976207983)'}, {}), '(0.617267976207983)', False, 'import pytest\n'), ((403, 54, 403, 70), 'pytest.approx', 'pytest.approx', ({(403, 68, 403, 69): '(0)'}, {}), '(0)', False, 'import pytest\n'), ((420, 49, 420, 71), 'pytest.approx', 'pytest.approx', ({(420, 63, 420, 70): '(5.0 / 3.0)'}, {}), '(5.0 / 3.0)', False, 'import pytest\n'), ((421, 49, 421, 65), 'pytest.approx', 'pytest.approx', ({(421, 63, 421, 64): '(0)'}, {}), '(0)', False, 'import pytest\n'), ((422, 49, 422, 66), 'pytest.approx', 'pytest.approx', ({(422, 63, 422, 65): '(2.0)'}, {}), '(2.0)', False, 'import pytest\n'), ((425, 54, 425, 70), 'pytest.approx', 'pytest.approx', ({(425, 68, 425, 69): '(0)'}, {}), '(0)', False, 'import pytest\n'), ((432, 9, 432, 37), 'pytest.warns', 'pytest.warns', ({(432, 22, 432, 36): 'RuntimeWarning'}, {}), '(RuntimeWarning)', False, 'import pytest\n'), ((433, 8, 433, 49), 'sklearn.metrics.matthews_corrcoef', 'sk_matthews_corrcoef', ({(433, 29, 433, 35): 'y_true', (433, 37, 433, 48): 'y_predicted'}, {}), '(y_true, y_predicted)', True, 'from sklearn.metrics import matthews_corrcoef as sk_matthews_corrcoef\n'), ((435, 9, 435, 27), 'pytest.warns', 'pytest.warns', ({(435, 22, 435, 26): 'None'}, {}), '(None)', False, 'import pytest\n'), ((453, 9, 453, 123), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((455, 49, 455, 75), 'pytest.approx', 'pytest.approx', ({(455, 63, 455, 74): '(8 / 4 * 100)'}, {}), '(8 / 4 * 100)', False, 'import pytest\n'), ((456, 49, 456, 75), 'pytest.approx', 'pytest.approx', ({(456, 63, 456, 74): '(4 / 6 * 100)'}, {}), '(4 / 6 * 100)', False, 'import pytest\n'), ((457, 82, 457, 108), 'pytest.approx', 'pytest.approx', ({(457, 96, 457, 107): '(4 / 6 * 100)'}, {}), '(4 / 6 * 100)', False, 'import pytest\n'), ((459, 70, 459, 96), 'pytest.approx', 'pytest.approx', ({(459, 84, 459, 95): '(8 / 4 * 100)'}, {}), '(8 / 4 * 100)', False, 'import pytest\n'), ((486, 11, 486, 84), 'evalml.objectives.CostBenefitMatrix.calculate_percent_difference', 'CostBenefitMatrix.calculate_percent_difference', (), '', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((488, 11, 488, 87), 'evalml.objectives.CostBenefitMatrix.calculate_percent_difference', 'CostBenefitMatrix.calculate_percent_difference', (), '', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((489, 11, 489, 87), 'evalml.objectives.CostBenefitMatrix.calculate_percent_difference', 'CostBenefitMatrix.calculate_percent_difference', (), '', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((490, 11, 490, 86), 'evalml.objectives.CostBenefitMatrix.calculate_percent_difference', 'CostBenefitMatrix.calculate_percent_difference', (), '', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((491, 11, 491, 86), 'evalml.objectives.CostBenefitMatrix.calculate_percent_difference', 'CostBenefitMatrix.calculate_percent_difference', (), '', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((495, 11, 495, 83), 'evalml.objectives.LogLossBinary.calculate_percent_difference', 'LogLossBinary.calculate_percent_difference', (), '', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((496, 11, 496, 83), 'evalml.objectives.LogLossBinary.calculate_percent_difference', 'LogLossBinary.calculate_percent_difference', (), '', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((497, 11, 497, 82), 'evalml.objectives.LogLossBinary.calculate_percent_difference', 'LogLossBinary.calculate_percent_difference', (), '', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((498, 11, 498, 82), 'evalml.objectives.LogLossBinary.calculate_percent_difference', 'LogLossBinary.calculate_percent_difference', (), '', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((502, 32, 502, 53), 'numpy.abs', 'np.abs', ({(502, 39, 502, 52): '(1e-09 / 1e-09)'}, {}), '(1e-09 / 1e-09)', True, 'import numpy as np\n'), ((503, 22, 503, 92), 'evalml.objectives.ExpVariance.calculate_percent_difference', 'ExpVariance.calculate_percent_difference', (), '', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((504, 19, 504, 90), 'evalml.objectives.ExpVariance.calculate_percent_difference', 'ExpVariance.calculate_percent_difference', (), '', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((505, 19, 505, 89), 'evalml.objectives.ExpVariance.calculate_percent_difference', 'ExpVariance.calculate_percent_difference', (), '', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((506, 19, 506, 86), 'evalml.objectives.ExpVariance.calculate_percent_difference', 'ExpVariance.calculate_percent_difference', (), '', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((44, 107, 44, 132), 'evalml.objectives.utils.get_non_core_objectives', 'get_non_core_objectives', ({}, {}), '()', False, 'from evalml.objectives.utils import _all_objectives_dict, get_non_core_objectives\n'), ((51, 13, 51, 84), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((57, 13, 57, 79), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((72, 13, 72, 84), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((78, 13, 78, 79), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((93, 13, 93, 81), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((99, 13, 99, 81), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((107, 13, 107, 69), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((138, 13, 138, 131), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((160, 21, 160, 43), 'numpy.array', 'np.array', ({(160, 30, 160, 42): '[0, 0, 1, 1]'}, {}), '([0, 0, 1, 1])', True, 'import numpy as np\n'), ((161, 21, 161, 43), 'numpy.array', 'np.array', ({(161, 30, 161, 42): '[1, 1, 0, 0]'}, {}), '([1, 1, 0, 0])', True, 'import numpy as np\n'), ((162, 21, 162, 43), 'numpy.array', 'np.array', ({(162, 30, 162, 42): '[0, 0, 1, 1]'}, {}), '([0, 0, 1, 1])', True, 'import numpy as np\n'), ((163, 21, 163, 43), 'numpy.array', 'np.array', ({(163, 30, 163, 42): '[0, 1, 0, 1]'}, {}), '([0, 1, 0, 1])', True, 'import numpy as np\n'), ((164, 21, 164, 43), 'numpy.array', 'np.array', ({(164, 30, 164, 42): '[0, 0, 1, 1]'}, {}), '([0, 0, 1, 1])', True, 'import numpy as np\n'), ((165, 21, 165, 43), 'numpy.array', 'np.array', ({(165, 30, 165, 42): '[0, 0, 1, 1]'}, {}), '([0, 0, 1, 1])', True, 'import numpy as np\n'), ((170, 21, 170, 43), 'numpy.array', 'np.array', ({(170, 30, 170, 42): '[0, 0, 1, 1]'}, {}), '([0, 0, 1, 1])', True, 'import numpy as np\n'), ((171, 21, 171, 43), 'numpy.array', 'np.array', ({(171, 30, 171, 42): '[1, 1, 0, 0]'}, {}), '([1, 1, 0, 0])', True, 'import numpy as np\n'), ((172, 21, 172, 43), 'numpy.array', 'np.array', ({(172, 30, 172, 42): '[0, 0, 1, 1]'}, {}), '([0, 0, 1, 1])', True, 'import numpy as np\n'), ((173, 21, 173, 43), 'numpy.array', 'np.array', ({(173, 30, 173, 42): '[0, 1, 0, 1]'}, {}), '([0, 1, 0, 1])', True, 'import numpy as np\n'), ((174, 21, 174, 43), 'numpy.array', 'np.array', ({(174, 30, 174, 42): '[0, 0, 1, 1]'}, {}), '([0, 0, 1, 1])', True, 'import numpy as np\n'), ((175, 21, 175, 43), 'numpy.array', 'np.array', ({(175, 30, 175, 42): '[0, 0, 1, 1]'}, {}), '([0, 0, 1, 1])', True, 'import numpy as np\n'), ((176, 21, 176, 49), 'numpy.array', 'np.array', ({(176, 30, 176, 48): '[0, 0, 1, 1, 2, 2]'}, {}), '([0, 0, 1, 1, 2, 2])', True, 'import numpy as np\n'), ((177, 21, 177, 49), 'numpy.array', 'np.array', ({(177, 30, 177, 48): '[0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((178, 21, 178, 49), 'numpy.array', 'np.array', ({(178, 30, 178, 48): '[0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((179, 21, 179, 49), 'numpy.array', 'np.array', ({(179, 30, 179, 48): '[0, 0, 1, 1, 2, 2]'}, {}), '([0, 0, 1, 1, 2, 2])', True, 'import numpy as np\n'), ((184, 21, 184, 49), 'numpy.array', 'np.array', ({(184, 30, 184, 48): '[0, 1, 0, 0, 1, 0]'}, {}), '([0, 1, 0, 0, 1, 0])', True, 'import numpy as np\n'), ((185, 21, 185, 49), 'numpy.array', 'np.array', ({(185, 30, 185, 48): '[0, 1, 0, 0, 0, 1]'}, {}), '([0, 1, 0, 0, 0, 1])', True, 'import numpy as np\n'), ((187, 21, 187, 49), 'numpy.array', 'np.array', ({(187, 30, 187, 48): '[0, 1, 0, 0, 1, 0]'}, {}), '([0, 1, 0, 0, 1, 0])', True, 'import numpy as np\n'), ((188, 21, 188, 49), 'numpy.array', 'np.array', ({(188, 30, 188, 48): '[0, 1, 0, 0, 1, 0]'}, {}), '([0, 1, 0, 0, 1, 0])', True, 'import numpy as np\n'), ((190, 21, 190, 49), 'numpy.array', 'np.array', ({(190, 30, 190, 48): '[0, 1, 0, 0, 1, 0]'}, {}), '([0, 1, 0, 0, 1, 0])', True, 'import numpy as np\n'), ((191, 21, 191, 49), 'numpy.array', 'np.array', ({(191, 30, 191, 48): '[1, 0, 1, 1, 0, 1]'}, {}), '([1, 0, 1, 1, 0, 1])', True, 'import numpy as np\n'), ((196, 21, 196, 52), 'numpy.array', 'np.array', ({(196, 30, 196, 51): '[0, 1, 2, 0, 1, 2, 3]'}, {}), '([0, 1, 2, 0, 1, 2, 3])', True, 'import numpy as np\n'), ((197, 21, 197, 52), 'numpy.array', 'np.array', ({(197, 30, 197, 51): '[0, 0, 2, 0, 0, 2, 3]'}, {}), '([0, 0, 2, 0, 0, 2, 3])', True, 'import numpy as np\n'), ((199, 21, 199, 52), 'numpy.array', 'np.array', ({(199, 30, 199, 51): '[0, 1, 2, 0, 1, 2, 3]'}, {}), '([0, 1, 2, 0, 1, 2, 3])', True, 'import numpy as np\n'), ((200, 21, 200, 52), 'numpy.array', 'np.array', ({(200, 30, 200, 51): '[0, 1, 2, 0, 1, 2, 3]'}, {}), '([0, 1, 2, 0, 1, 2, 3])', True, 'import numpy as np\n'), ((202, 21, 202, 52), 'numpy.array', 'np.array', ({(202, 30, 202, 51): '[0, 1, 2, 0, 1, 2, 3]'}, {}), '([0, 1, 2, 0, 1, 2, 3])', True, 'import numpy as np\n'), ((203, 21, 203, 52), 'numpy.array', 'np.array', ({(203, 30, 203, 51): '[1, 0, 3, 1, 2, 1, 0]'}, {}), '([1, 0, 3, 1, 2, 1, 0])', True, 'import numpy as np\n'), ((208, 21, 208, 49), 'numpy.array', 'np.array', ({(208, 30, 208, 48): '[0, 1, 0, 0, 1, 0]'}, {}), '([0, 1, 0, 0, 1, 0])', True, 'import numpy as np\n'), ((209, 21, 209, 49), 'numpy.array', 'np.array', ({(209, 30, 209, 48): '[0, 1, 0, 0, 0, 1]'}, {}), '([0, 1, 0, 0, 0, 1])', True, 'import numpy as np\n'), ((211, 21, 211, 49), 'numpy.array', 'np.array', ({(211, 30, 211, 48): '[0, 1, 0, 0, 1, 1]'}, {}), '([0, 1, 0, 0, 1, 1])', True, 'import numpy as np\n'), ((212, 21, 212, 49), 'numpy.array', 'np.array', ({(212, 30, 212, 48): '[0, 1, 0, 0, 1, 1]'}, {}), '([0, 1, 0, 0, 1, 1])', True, 'import numpy as np\n'), ((214, 21, 214, 49), 'numpy.array', 'np.array', ({(214, 30, 214, 48): '[0, 0, 0, 0, 1, 0]'}, {}), '([0, 0, 0, 0, 1, 0])', True, 'import numpy as np\n'), ((215, 21, 215, 49), 'numpy.array', 'np.array', ({(215, 30, 215, 48): '[0, 1, 0, 0, 0, 1]'}, {}), '([0, 1, 0, 0, 0, 1])', True, 'import numpy as np\n'), ((217, 21, 217, 37), 'numpy.array', 'np.array', ({(217, 30, 217, 36): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((218, 21, 218, 37), 'numpy.array', 'np.array', ({(218, 30, 218, 36): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((223, 21, 223, 58), 'numpy.array', 'np.array', ({(223, 30, 223, 57): '[0, 0, 0, 0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((224, 21, 224, 58), 'numpy.array', 'np.array', ({(224, 30, 224, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((226, 21, 226, 58), 'numpy.array', 'np.array', ({(226, 30, 226, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((227, 21, 227, 58), 'numpy.array', 'np.array', ({(227, 30, 227, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((229, 21, 229, 58), 'numpy.array', 'np.array', ({(229, 30, 229, 57): '[2, 2, 2, 0, 0, 0, 1, 1, 1]'}, {}), '([2, 2, 2, 0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((230, 21, 230, 58), 'numpy.array', 'np.array', ({(230, 30, 230, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((232, 21, 232, 37), 'numpy.array', 'np.array', ({(232, 30, 232, 36): '[1, 2]'}, {}), '([1, 2])', True, 'import numpy as np\n'), ((233, 21, 233, 37), 'numpy.array', 'np.array', ({(233, 30, 233, 36): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((238, 21, 238, 58), 'numpy.array', 'np.array', ({(238, 30, 238, 57): '[0, 0, 0, 0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((239, 21, 239, 58), 'numpy.array', 'np.array', ({(239, 30, 239, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((242, 21, 242, 58), 'numpy.array', 'np.array', ({(242, 30, 242, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((243, 21, 243, 58), 'numpy.array', 'np.array', ({(243, 30, 243, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((245, 21, 245, 58), 'numpy.array', 'np.array', ({(245, 30, 245, 57): '[2, 2, 2, 0, 0, 0, 1, 1, 1]'}, {}), '([2, 2, 2, 0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((246, 21, 246, 58), 'numpy.array', 'np.array', ({(246, 30, 246, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((248, 21, 248, 37), 'numpy.array', 'np.array', ({(248, 30, 248, 36): '[1, 2]'}, {}), '([1, 2])', True, 'import numpy as np\n'), ((249, 21, 249, 37), 'numpy.array', 'np.array', ({(249, 30, 249, 36): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((254, 21, 254, 58), 'numpy.array', 'np.array', ({(254, 30, 254, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((255, 21, 255, 58), 'numpy.array', 'np.array', ({(255, 30, 255, 57): '[0, 0, 0, 0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((258, 21, 258, 58), 'numpy.array', 'np.array', ({(258, 30, 258, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((259, 21, 259, 58), 'numpy.array', 'np.array', ({(259, 30, 259, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((261, 21, 261, 58), 'numpy.array', 'np.array', ({(261, 30, 261, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((262, 21, 262, 58), 'numpy.array', 'np.array', ({(262, 30, 262, 57): '[2, 2, 2, 0, 0, 0, 1, 1, 1]'}, {}), '([2, 2, 2, 0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((264, 21, 264, 37), 'numpy.array', 'np.array', ({(264, 30, 264, 36): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((265, 21, 265, 37), 'numpy.array', 'np.array', ({(265, 30, 265, 36): '[1, 2]'}, {}), '([1, 2])', True, 'import numpy as np\n'), ((270, 21, 270, 49), 'numpy.array', 'np.array', ({(270, 30, 270, 48): '[1, 1, 1, 1, 1, 1]'}, {}), '([1, 1, 1, 1, 1, 1])', True, 'import numpy as np\n'), ((271, 21, 271, 49), 'numpy.array', 'np.array', ({(271, 30, 271, 48): '[0, 0, 0, 1, 1, 1]'}, {}), '([0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((273, 21, 273, 49), 'numpy.array', 'np.array', ({(273, 30, 273, 48): '[0, 0, 0, 1, 1, 1]'}, {}), '([0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((274, 21, 274, 49), 'numpy.array', 'np.array', ({(274, 30, 274, 48): '[1, 1, 1, 1, 1, 1]'}, {}), '([1, 1, 1, 1, 1, 1])', True, 'import numpy as np\n'), ((276, 21, 276, 49), 'numpy.array', 'np.array', ({(276, 30, 276, 48): '[0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((277, 21, 277, 49), 'numpy.array', 'np.array', ({(277, 30, 277, 48): '[1, 1, 1, 1, 1, 1]'}, {}), '([1, 1, 1, 1, 1, 1])', True, 'import numpy as np\n'), ((279, 21, 279, 49), 'numpy.array', 'np.array', ({(279, 30, 279, 48): '[0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((280, 21, 280, 49), 'numpy.array', 'np.array', ({(280, 30, 280, 48): '[0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((285, 21, 285, 58), 'numpy.array', 'np.array', ({(285, 30, 285, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((286, 21, 286, 58), 'numpy.array', 'np.array', ({(286, 30, 286, 57): '[0, 0, 0, 0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((288, 21, 288, 58), 'numpy.array', 'np.array', ({(288, 30, 288, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((289, 21, 289, 58), 'numpy.array', 'np.array', ({(289, 30, 289, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((291, 21, 291, 58), 'numpy.array', 'np.array', ({(291, 30, 291, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((292, 21, 292, 58), 'numpy.array', 'np.array', ({(292, 30, 292, 57): '[2, 2, 2, 0, 0, 0, 1, 1, 1]'}, {}), '([2, 2, 2, 0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((294, 21, 294, 37), 'numpy.array', 'np.array', ({(294, 30, 294, 36): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((295, 21, 295, 37), 'numpy.array', 'np.array', ({(295, 30, 295, 36): '[1, 2]'}, {}), '([1, 2])', True, 'import numpy as np\n'), ((300, 21, 300, 58), 'numpy.array', 'np.array', ({(300, 30, 300, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((301, 21, 301, 58), 'numpy.array', 'np.array', ({(301, 30, 301, 57): '[0, 0, 0, 0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((303, 21, 303, 58), 'numpy.array', 'np.array', ({(303, 30, 303, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((304, 21, 304, 58), 'numpy.array', 'np.array', ({(304, 30, 304, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((306, 21, 306, 58), 'numpy.array', 'np.array', ({(306, 30, 306, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((307, 21, 307, 58), 'numpy.array', 'np.array', ({(307, 30, 307, 57): '[2, 2, 2, 0, 0, 0, 1, 1, 1]'}, {}), '([2, 2, 2, 0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((309, 21, 309, 37), 'numpy.array', 'np.array', ({(309, 30, 309, 36): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((310, 21, 310, 37), 'numpy.array', 'np.array', ({(310, 30, 310, 36): '[1, 2]'}, {}), '([1, 2])', True, 'import numpy as np\n'), ((315, 21, 315, 58), 'numpy.array', 'np.array', ({(315, 30, 315, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((316, 21, 316, 58), 'numpy.array', 'np.array', ({(316, 30, 316, 57): '[0, 0, 0, 0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((318, 21, 318, 58), 'numpy.array', 'np.array', ({(318, 30, 318, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((319, 21, 319, 58), 'numpy.array', 'np.array', ({(319, 30, 319, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((321, 21, 321, 58), 'numpy.array', 'np.array', ({(321, 30, 321, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((322, 21, 322, 58), 'numpy.array', 'np.array', ({(322, 30, 322, 57): '[2, 2, 2, 0, 0, 0, 1, 1, 1]'}, {}), '([2, 2, 2, 0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((324, 21, 324, 37), 'numpy.array', 'np.array', ({(324, 30, 324, 36): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((325, 21, 325, 37), 'numpy.array', 'np.array', ({(325, 30, 325, 36): '[1, 2]'}, {}), '([1, 2])', True, 'import numpy as np\n'), ((330, 21, 330, 49), 'numpy.array', 'np.array', ({(330, 30, 330, 48): '[0, 0, 0, 1, 1, 1]'}, {}), '([0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((331, 21, 331, 49), 'numpy.array', 'np.array', ({(331, 30, 331, 48): '[1, 1, 1, 1, 1, 1]'}, {}), '([1, 1, 1, 1, 1, 1])', True, 'import numpy as np\n'), ((333, 21, 333, 49), 'numpy.array', 'np.array', ({(333, 30, 333, 48): '[0, 0, 0, 1, 1, 1]'}, {}), '([0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((334, 21, 334, 49), 'numpy.array', 'np.array', ({(334, 30, 334, 48): '[0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((336, 21, 336, 49), 'numpy.array', 'np.array', ({(336, 30, 336, 48): '[1, 1, 1, 1, 1, 1]'}, {}), '([1, 1, 1, 1, 1, 1])', True, 'import numpy as np\n'), ((337, 21, 337, 49), 'numpy.array', 'np.array', ({(337, 30, 337, 48): '[0, 0, 0, 1, 1, 1]'}, {}), '([0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((342, 21, 342, 58), 'numpy.array', 'np.array', ({(342, 30, 342, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((343, 21, 343, 58), 'numpy.array', 'np.array', ({(343, 30, 343, 57): '[0, 0, 0, 0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((345, 21, 345, 58), 'numpy.array', 'np.array', ({(345, 30, 345, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((346, 21, 346, 58), 'numpy.array', 'np.array', ({(346, 30, 346, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((348, 21, 348, 58), 'numpy.array', 'np.array', ({(348, 30, 348, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((349, 21, 349, 58), 'numpy.array', 'np.array', ({(349, 30, 349, 57): '[2, 2, 2, 0, 0, 0, 1, 1, 1]'}, {}), '([2, 2, 2, 0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((351, 21, 351, 37), 'numpy.array', 'np.array', ({(351, 30, 351, 36): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((352, 21, 352, 37), 'numpy.array', 'np.array', ({(352, 30, 352, 36): '[1, 2]'}, {}), '([1, 2])', True, 'import numpy as np\n'), ((357, 21, 357, 58), 'numpy.array', 'np.array', ({(357, 30, 357, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((358, 21, 358, 58), 'numpy.array', 'np.array', ({(358, 30, 358, 57): '[0, 0, 0, 0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((360, 21, 360, 58), 'numpy.array', 'np.array', ({(360, 30, 360, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((361, 21, 361, 58), 'numpy.array', 'np.array', ({(361, 30, 361, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((363, 21, 363, 58), 'numpy.array', 'np.array', ({(363, 30, 363, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((364, 21, 364, 58), 'numpy.array', 'np.array', ({(364, 30, 364, 57): '[2, 2, 2, 0, 0, 0, 1, 1, 1]'}, {}), '([2, 2, 2, 0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((366, 21, 366, 37), 'numpy.array', 'np.array', ({(366, 30, 366, 36): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((367, 21, 367, 37), 'numpy.array', 'np.array', ({(367, 30, 367, 36): '[1, 2]'}, {}), '([1, 2])', True, 'import numpy as np\n'), ((372, 21, 372, 58), 'numpy.array', 'np.array', ({(372, 30, 372, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((373, 21, 373, 58), 'numpy.array', 'np.array', ({(373, 30, 373, 57): '[0, 0, 0, 0, 0, 0, 0, 0, 0]'}, {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0])', True, 'import numpy as np\n'), ((375, 21, 375, 58), 'numpy.array', 'np.array', ({(375, 30, 375, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((376, 21, 376, 58), 'numpy.array', 'np.array', ({(376, 30, 376, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((378, 21, 378, 58), 'numpy.array', 'np.array', ({(378, 30, 378, 57): '[0, 0, 0, 1, 1, 1, 2, 2, 2]'}, {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])', True, 'import numpy as np\n'), ((379, 21, 379, 58), 'numpy.array', 'np.array', ({(379, 30, 379, 57): '[2, 2, 2, 0, 0, 0, 1, 1, 1]'}, {}), '([2, 2, 2, 0, 0, 0, 1, 1, 1])', True, 'import numpy as np\n'), ((381, 21, 381, 37), 'numpy.array', 'np.array', ({(381, 30, 381, 36): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((382, 21, 382, 37), 'numpy.array', 'np.array', ({(382, 30, 382, 36): '[1, 2]'}, {}), '([1, 2])', True, 'import numpy as np\n'), ((402, 68, 402, 91), 'numpy.sqrt', 'np.sqrt', ({(402, 76, 402, 90): '(0.56246732491)'}, {}), '(0.56246732491)', True, 'import numpy as np\n'), ((404, 68, 404, 94), 'numpy.sqrt', 'np.sqrt', ({(404, 76, 404, 93): '(0.617267976207983)'}, {}), '(0.617267976207983)', True, 'import numpy as np\n'), ((424, 68, 424, 84), 'numpy.sqrt', 'np.sqrt', ({(424, 76, 424, 83): '(5.0 / 3.0)'}, {}), '(5.0 / 3.0)', True, 'import numpy as np\n'), ((426, 68, 426, 79), 'numpy.sqrt', 'np.sqrt', ({(426, 76, 426, 78): '(2.0)'}, {}), '(2.0)', True, 'import numpy as np\n'), ((462, 44, 462, 66), 'evalml.objectives.utils._all_objectives_dict', '_all_objectives_dict', ({}, {}), '()', False, 'from evalml.objectives.utils import _all_objectives_dict, get_non_core_objectives\n'), ((64, 17, 64, 88), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((85, 17, 85, 88), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((116, 17, 116, 94), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((123, 17, 123, 94), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((130, 17, 130, 94), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((147, 17, 147, 100), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((154, 17, 154, 95), 'pytest.raises', 'pytest.raises', (), '', False, 'import pytest\n'), ((436, 8, 436, 19), 'evalml.objectives.MCCBinary', 'MCCBinary', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((437, 8, 437, 23), 'evalml.objectives.MCCMulticlass', 'MCCMulticlass', ({}, {}), '()', False, 'from evalml.objectives import F1, MAPE, MSE, AccuracyBinary, AccuracyMulticlass, BalancedAccuracyBinary, BalancedAccuracyMulticlass, BinaryClassificationObjective, CostBenefitMatrix, ExpVariance, F1Macro, F1Micro, F1Weighted, LogLossBinary, MCCBinary, MCCMulticlass, MeanSquaredLogError, Precision, PrecisionMacro, PrecisionMicro, PrecisionWeighted, Recall, RecallMacro, RecallMicro, RecallWeighted, RootMeanSquaredError, RootMeanSquaredLogError\n'), ((474, 62, 474, 84), 'evalml.objectives.utils._all_objectives_dict', '_all_objectives_dict', ({}, {}), '()', False, 'from evalml.objectives.utils import _all_objectives_dict, get_non_core_objectives\n')] |
Aaron-Ming/websocket_terminal | server-python3/server.py | 42c24391d51c275eabf1f879fb312b9a3614f51e | import os
import urllib.parse
import eventlet
import eventlet.green.socket
# eventlet.monkey_patch()
import eventlet.websocket
import eventlet.wsgi
import wspty.pipe
from flask import Flask, request, redirect
from wspty.EchoTerminal import EchoTerminal
from wspty.EncodedTerminal import EncodedTerminal
from wspty.WebsocketBinding import WebsocketBinding
import config
def make_app():
app = Flask(__name__)
app.static_folder = get_static_folder()
print("Serving static files from: " + app.static_folder)
@app.route('/')
def index():
newurl = b'/static/index.html'
if request.query_string:
newurl = newurl + b'?' + request.query_string
return redirect(newurl)
return app
def parse_query(qstr):
return {k: v[0] for k, v in urllib.parse.parse_qs(qstr).items()}
def debug(s):
app.logger.debug(s)
class TerminalFactory:
def __init__(self, args_dict, allow_unsafe=False):
self.kind = args_dict['kind']
self.hostname = args_dict.get('hostname', 'localhost')
self.port = int(args_dict.get('port', '22'))
self.username = args_dict.get('username')
self.password = args_dict.get('password')
self.term = args_dict.get('term')
self.encoding = args_dict.get('encoding', 'utf8')
self.allow_unsafe = allow_unsafe
def create_binary(self):
if self.kind == 'ssh':
from wspty.SshTerminal import SshTerminal
return SshTerminal(
self.hostname, self.port, self.username, self.password, self.term
)
if self.kind == 'raw':
from wspty.SocketTerminal import SocketTerminal
sock = eventlet.green.socket.socket()
ip = eventlet.green.socket.gethostbyname(self.hostname)
sock.connect((ip, self.port))
return SocketTerminal(sock)
if self.kind == 'echo':
return EchoTerminal()
if self.kind == 'prompt':
if not self.allow_unsafe:
raise Exception("kind {} is disabled".format(self.kind))
from wspty import PromptTerminal
return PromptTerminal.os_terminal()
raise NotImplemented('kind: {}'.format(self.kind))
def create(self):
return EncodedTerminal(self.create_binary(), self.encoding)
class DefaultRootApp:
def __init__(self):
self._app_handle_wssh = eventlet.websocket.WebSocketWSGI(self.handle_wssh)
self.allow_unsafe = False
def handle_wssh(self, ws):
debug('Creating terminal with remote {remote}'.format(
remote=ws.environ.get('REMOTE_ADDR'),
))
ws_binding = WebsocketBinding(ws)
query = parse_query(ws.environ.get('QUERY_STRING', ''))
terminal = None
try:
kind, terminal = self.create_terminal(query)
ws_binding.send('Connected to %s\r\n' % (kind,))
wspty.pipe.pipe(ws_binding, terminal)
except BaseException as e:
ws_binding.send_error(e)
raise
finally:
if terminal:
terminal.close()
debug('Closing terminal normally with remote {remote}'.format(
remote=ws.environ.get('REMOTE_ADDR'),
))
return ''
def create_terminal(self, obj):
factory = TerminalFactory(obj, self.allow_unsafe)
return factory.kind, factory.create()
def handler(self, env, *args):
route = env["PATH_INFO"]
if route == '/wssh':
return self._app_handle_wssh(env, *args)
else:
return app(env, *args)
def make_parser():
import argparse
parser = argparse.ArgumentParser(description='Websocket Terminal server')
parser.add_argument('-l', '--listen', default='', help='Listen on interface (default all)')
parser.add_argument('-p', '--port', default=5002, type=int, help='Listen on port')
parser.add_argument('--unsafe', action='store_true', help='Allow unauthenticated connections to local machine')
return parser
def start(interface, port, root_app_handler):
conn = (interface, port)
listener = eventlet.listen(conn)
print('listening on {0}:{1}'.format(*conn))
try:
eventlet.wsgi.server(listener, root_app_handler)
except KeyboardInterrupt:
pass
def start_default(interface, port, allow_unsafe=False, root_app_cls=DefaultRootApp):
root_app = root_app_cls()
root_app.allow_unsafe = allow_unsafe
start(interface, port, root_app.handler)
def main():
args = make_parser().parse_args()
start_default(args.listen, args.port, args.unsafe)
def get_static_folder():
path_root = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../client')
path_root = os.path.join(path_root, config.CLIENT_DIR)
return os.path.abspath(path_root)
app = make_app()
if __name__ == '__main__':
main()
| [((19, 10, 19, 25), 'flask.Flask', 'Flask', ({(19, 16, 19, 24): '__name__'}, {}), '(__name__)', False, 'from flask import Flask, request, redirect\n'), ((119, 13, 119, 77), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((128, 15, 128, 36), 'eventlet.listen', 'eventlet.listen', ({(128, 31, 128, 35): 'conn'}, {}), '(conn)', False, 'import eventlet\n'), ((149, 16, 149, 58), 'os.path.join', 'os.path.join', ({(149, 29, 149, 38): 'path_root', (149, 40, 149, 57): 'config.CLIENT_DIR'}, {}), '(path_root, config.CLIENT_DIR)', False, 'import os\n'), ((150, 11, 150, 37), 'os.path.abspath', 'os.path.abspath', ({(150, 27, 150, 36): 'path_root'}, {}), '(path_root)', False, 'import os\n'), ((28, 15, 28, 31), 'flask.redirect', 'redirect', ({(28, 24, 28, 30): 'newurl'}, {}), '(newurl)', False, 'from flask import Flask, request, redirect\n'), ((78, 32, 78, 82), 'eventlet.websocket.WebSocketWSGI', 'eventlet.websocket.WebSocketWSGI', ({(78, 65, 78, 81): 'self.handle_wssh'}, {}), '(self.handle_wssh)', False, 'import eventlet\n'), ((86, 21, 86, 41), 'wspty.WebsocketBinding.WebsocketBinding', 'WebsocketBinding', ({(86, 38, 86, 40): 'ws'}, {}), '(ws)', False, 'from wspty.WebsocketBinding import WebsocketBinding\n'), ((131, 8, 131, 56), 'eventlet.wsgi.server', 'eventlet.wsgi.server', ({(131, 29, 131, 37): 'listener', (131, 39, 131, 55): 'root_app_handler'}, {}), '(listener, root_app_handler)', False, 'import eventlet\n'), ((54, 19, 56, 13), 'wspty.SshTerminal.SshTerminal', 'SshTerminal', ({(55, 16, 55, 29): 'self.hostname', (55, 31, 55, 40): 'self.port', (55, 42, 55, 55): 'self.username', (55, 57, 55, 70): 'self.password', (55, 72, 55, 81): 'self.term'}, {}), '(self.hostname, self.port, self.username, self.password, self.term)', False, 'from wspty.SshTerminal import SshTerminal\n'), ((59, 19, 59, 49), 'eventlet.green.socket.socket', 'eventlet.green.socket.socket', ({}, {}), '()', False, 'import eventlet\n'), ((60, 17, 60, 67), 'eventlet.green.socket.gethostbyname', 'eventlet.green.socket.gethostbyname', ({(60, 53, 60, 66): 'self.hostname'}, {}), '(self.hostname)', False, 'import eventlet\n'), ((62, 19, 62, 39), 'wspty.SocketTerminal.SocketTerminal', 'SocketTerminal', ({(62, 34, 62, 38): 'sock'}, {}), '(sock)', False, 'from wspty.SocketTerminal import SocketTerminal\n'), ((64, 19, 64, 33), 'wspty.EchoTerminal.EchoTerminal', 'EchoTerminal', ({}, {}), '()', False, 'from wspty.EchoTerminal import EchoTerminal\n'), ((69, 19, 69, 47), 'wspty.PromptTerminal.os_terminal', 'PromptTerminal.os_terminal', ({}, {}), '()', False, 'from wspty import PromptTerminal\n'), ((148, 45, 148, 71), 'os.path.realpath', 'os.path.realpath', ({(148, 62, 148, 70): '__file__'}, {}), '(__file__)', False, 'import os\n')] |
seomoz/roger-mesos-tools | tests/unit/test_roger_promote.py | 88b4cb3550a4b49d0187cfb5e6a22246ff6b9765 | # -*- encoding: utf-8 -*-
"""
Unit test for roger_promote.py
"""
import tests.helper
import unittest
import os
import os.path
import pytest
import requests
from mockito import mock, Mock, when
from cli.roger_promote import RogerPromote
from cli.appconfig import AppConfig
from cli.settings import Settings
from cli.framework import Framework
from cli.frameworkUtils import FrameworkUtils
from cli.marathon import Marathon
from cli.chronos import Chronos
class TestRogerPromote(unittest.TestCase):
def setUp(self):
self.marathon = mock(Marathon)
self.settings = mock(Settings)
self.app_config = mock(AppConfig)
self.framework = self.marathon
self.framework_utils = mock(FrameworkUtils)
self.config_file = "test.yml"
self.roger_env = {}
os.environ['ROGER_CONFIG_DIR'] = '/vagrant/config'
@property
def config_dir(self):
return os.environ['ROGER_CONFIG_DIR']
def test_config_dir(self):
rp = RogerPromote()
assert rp.config_dir == '/vagrant/config'
def test_roger_env(self):
fake_config = tests.helper.fake_config()
settings = mock(Settings)
when(self.app_config).getRogerEnv(
self.config_dir
).thenReturn(fake_config)
rp = RogerPromote(app_config=self.app_config)
assert rp.roger_env == fake_config
def test_set_framework(self):
app_data = {'test_app': {'name': 'test_app'}}
when(self.app_config).getAppData(
self.config_dir, self.config_file, 'test_app'
).thenReturn(app_data)
rp = RogerPromote(app_config=self.app_config)
rp._set_framework(self.config_file, 'test_app')
assert rp._framework.getName() == 'Marathon'
def test_image_name(self):
os.environ['ROGER_USER'] = "first.last"
os.environ['ROGER_USER_PASS_DEV'] = "password"
os.environ['ROGER_USER_PASS_STAGE'] = "password"
os.environ['ROGER_USER_PASS_PROD'] = "password"
framework = mock(Marathon)
when(framework).getName().thenReturn("Marathon")
when(framework).get_app_id(
"test_path/test_app.json",
"Marathon"
).thenReturn("app_id")
when(framework).get_image_name(
'first.last',
"password",
"dev",
"app_id",
self.config_dir,
self.config_file
).thenReturn("test_image")
rp = RogerPromote(framework=framework)
assert rp._image_name(
'dev',
self.config_file,
"test_path/test_app.json") == 'test_image'
def test_config_resolver(self):
framework = mock(Framework)
settings = mock(Settings)
app_config = mock(AppConfig)
config_dir = '/vagrant/config'
fake_team_config = tests.helper.fake_team_config()
when(settings).getConfigDir().thenReturn(config_dir)
when(app_config).getConfig(
config_dir, 'roger.json'
).thenReturn(fake_team_config)
rp = RogerPromote(settings=settings, app_config=app_config)
val = rp._config_resolver('template_path', 'test_app', 'roger.json')
assert val == 'framework_template_path'
def test_roger_push_script(self):
path = RogerPromote()._roger_push_script()
assert 'roger-mesos-tools/cli/roger_push.py' in path
| [((30, 24, 30, 38), 'mockito.mock', 'mock', ({(30, 29, 30, 37): 'Marathon'}, {}), '(Marathon)', False, 'from mockito import mock, Mock, when\n'), ((31, 24, 31, 38), 'mockito.mock', 'mock', ({(31, 29, 31, 37): 'Settings'}, {}), '(Settings)', False, 'from mockito import mock, Mock, when\n'), ((32, 26, 32, 41), 'mockito.mock', 'mock', ({(32, 31, 32, 40): 'AppConfig'}, {}), '(AppConfig)', False, 'from mockito import mock, Mock, when\n'), ((34, 31, 34, 51), 'mockito.mock', 'mock', ({(34, 36, 34, 50): 'FrameworkUtils'}, {}), '(FrameworkUtils)', False, 'from mockito import mock, Mock, when\n'), ((45, 13, 45, 27), 'cli.roger_promote.RogerPromote', 'RogerPromote', ({}, {}), '()', False, 'from cli.roger_promote import RogerPromote\n'), ((51, 19, 51, 33), 'mockito.mock', 'mock', ({(51, 24, 51, 32): 'Settings'}, {}), '(Settings)', False, 'from mockito import mock, Mock, when\n'), ((57, 13, 57, 53), 'cli.roger_promote.RogerPromote', 'RogerPromote', (), '', False, 'from cli.roger_promote import RogerPromote\n'), ((67, 13, 67, 53), 'cli.roger_promote.RogerPromote', 'RogerPromote', (), '', False, 'from cli.roger_promote import RogerPromote\n'), ((79, 20, 79, 34), 'mockito.mock', 'mock', ({(79, 25, 79, 33): 'Marathon'}, {}), '(Marathon)', False, 'from mockito import mock, Mock, when\n'), ((95, 13, 95, 46), 'cli.roger_promote.RogerPromote', 'RogerPromote', (), '', False, 'from cli.roger_promote import RogerPromote\n'), ((102, 20, 102, 35), 'mockito.mock', 'mock', ({(102, 25, 102, 34): 'Framework'}, {}), '(Framework)', False, 'from mockito import mock, Mock, when\n'), ((103, 19, 103, 33), 'mockito.mock', 'mock', ({(103, 24, 103, 32): 'Settings'}, {}), '(Settings)', False, 'from mockito import mock, Mock, when\n'), ((104, 21, 104, 36), 'mockito.mock', 'mock', ({(104, 26, 104, 35): 'AppConfig'}, {}), '(AppConfig)', False, 'from mockito import mock, Mock, when\n'), ((113, 13, 113, 67), 'cli.roger_promote.RogerPromote', 'RogerPromote', (), '', False, 'from cli.roger_promote import RogerPromote\n'), ((118, 15, 118, 29), 'cli.roger_promote.RogerPromote', 'RogerPromote', ({}, {}), '()', False, 'from cli.roger_promote import RogerPromote\n'), ((53, 8, 53, 29), 'mockito.when', 'when', ({(53, 13, 53, 28): 'self.app_config'}, {}), '(self.app_config)', False, 'from mockito import mock, Mock, when\n'), ((63, 8, 63, 29), 'mockito.when', 'when', ({(63, 13, 63, 28): 'self.app_config'}, {}), '(self.app_config)', False, 'from mockito import mock, Mock, when\n'), ((81, 8, 81, 23), 'mockito.when', 'when', ({(81, 13, 81, 22): 'framework'}, {}), '(framework)', False, 'from mockito import mock, Mock, when\n'), ((82, 8, 82, 23), 'mockito.when', 'when', ({(82, 13, 82, 22): 'framework'}, {}), '(framework)', False, 'from mockito import mock, Mock, when\n'), ((86, 8, 86, 23), 'mockito.when', 'when', ({(86, 13, 86, 22): 'framework'}, {}), '(framework)', False, 'from mockito import mock, Mock, when\n'), ((108, 8, 108, 22), 'mockito.when', 'when', ({(108, 13, 108, 21): 'settings'}, {}), '(settings)', False, 'from mockito import mock, Mock, when\n'), ((109, 8, 109, 24), 'mockito.when', 'when', ({(109, 13, 109, 23): 'app_config'}, {}), '(app_config)', False, 'from mockito import mock, Mock, when\n')] |
papb/COVID-19 | data/collectors.py | 2dc8e683f55c494ca894727aca56f90e53b161f3 | import json
import pandas as pd
import requests
def load_dump_covid_19_data():
COVID_19_BY_CITY_URL='https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-cities-time.csv'
by_city=(pd.read_csv(COVID_19_BY_CITY_URL)
.query('country == "Brazil"')
.drop(columns=['country'])
.pipe(lambda df: df[df.state!='TOTAL'])
.assign(city=lambda df: df.city.apply(lambda x: x.split('/')[0]))
.rename(columns={'totalCases': 'cases',
'newCases': 'new_cases',
'state': 'uf'})
.sort_values(by=['city', 'date'])
)
by_uf = (by_city
.groupby(['date', 'uf'])
['new_cases', 'cases']
.sum()
.reset_index())
dfs = [by_uf, by_city]
filenames = ['by_uf', 'by_city']
for df, filename in zip(dfs, filenames):
output_path = f'data/csv/covid_19/{filename}/{filename}.csv'
df.to_csv(output_path, index=False)
print(f'{filename} data exported to {output_path}')
def load_dump_uf_pop():
IBGE_POPULATION_EXCEL_URL = 'ftp://ftp.ibge.gov.br/Estimativas_de_Populacao/Estimativas_2019/estimativa_dou_2019.xls'
def _load_uf_codes():
print('Scraping UF codes')
return (
pd.read_html(
'https://www.oobj.com.br/bc/article/'
'quais-os-c%C3%B3digos-de-cada-uf-no-brasil-465.html'
)
[0]
.replace('\s\(\*\)', '', regex=True)
.rename(columns={'UF': 'uf'})
[['Unidade da Federação', 'uf']]
)
def _load_uf_capitals():
print('Scraping UF capital names')
return (
pd.read_html(
'https://www.estadosecapitaisdobrasil.com/'
)
[0]
.rename(columns={'Sigla': 'uf', 'Capital': 'city'})
[['uf', 'city']]
)
# TODO: download excel file only once
def _download_ibge_excel_file(url):
pass
def _load_city_pop():
print('Scraping city population')
return (
pd.read_excel(IBGE_POPULATION_EXCEL_URL, sheet_name='Municípios', header=1)
.rename(columns={
'COD. UF': 'UF_code',
'COD. MUNIC': 'city_code',
'NOME DO MUNICÍPIO': 'city',
'POPULAÇÃO ESTIMADA': 'estimated_population'
})
.dropna(how='any')
.assign(estimated_population=lambda df: df.estimated_population
.replace('\.', '', regex=True)
.replace('\-', ' ', regex=True)
.replace('\(\d+\)', '', regex=True)
.astype('int')
)
.assign( UF_code=lambda df: df.UF_code.astype(int))
.assign(city_code=lambda df: df.city_code.astype(int))
.rename(columns={'UF': 'uf'})
[['uf', 'city', 'estimated_population']]
)
def _load_uf_pop():
print('Scraping UF population')
uf_codes = _load_uf_codes()
return (
pd.read_excel(IBGE_POPULATION_EXCEL_URL, header=1)
.drop(columns=['Unnamed: 1'])
.rename(columns={'POPULAÇÃO ESTIMADA': 'estimated_population'})
.dropna(how='any')
.assign(estimated_population=lambda df: df.estimated_population
.replace('\.', '', regex=True)
.replace('\-', ' ', regex=True)
.replace('\(\d\)', '', regex=True)
.astype('int')
)
.pipe(lambda df: pd.merge(df,
uf_codes,
left_on='BRASIL E UNIDADES DA FEDERAÇÃO',
right_on='Unidade da Federação',
how='inner'))
[['uf', 'estimated_population']]
)
uf_pop, city_pop, uf_capitals = (_load_uf_pop(),
_load_city_pop(),
_load_uf_capitals())
print('Combining uf and city data')
uf_pop = (
uf_pop
# Add capital city name
.merge(
uf_capitals,
how='left',
on='uf'
)
# Add capital population
.merge(
city_pop,
how='left',
on=['uf', 'city']
)
.rename(
columns={
'estimated_population_x': 'estimated_population',
'estimated_population_y': 'capital_estimated_population'
}
)
)
dfs = [uf_pop, city_pop]
filenames = ['by_uf', 'by_city']
for df, filename in zip(dfs, filenames):
output_path = f'data/csv/population/{filename}/{filename}.csv'
df.to_csv(output_path, index=False)
print(f'{filename} data exported to {output_path}')
def load_jh_df(csv):
'''
Loads a CSV file from JH repository and make some transforms
'''
jh_data_path = (
'https://raw.githubusercontent.com/'
'CSSEGISandData/COVID-19/master/'
'csse_covid_19_data/csse_covid_19_time_series/'
)
return (
pd.read_csv(
jh_data_path
+ csv[1]
)
.drop(['Lat', 'Long'], axis=1)
.groupby('Country/Region')
.sum()
.reset_index()
.rename(
columns={'Country/Region':'country'}
)
.melt(
id_vars=['country'],
var_name='date',
value_name=csv[0]
)
.assign(
date=lambda x: pd.to_datetime(
x['date'],
format='%m/%d/%y'
)
)
)
def load_jh_data():
'''
Loads the latest COVID-19 global data from
Johns Hopkins University repository
'''
cases_csv = ('cases', 'time_series_19-covid-Confirmed.csv')
deaths_csv = ('deaths', 'time_series_19-covid-Deaths.csv')
recovered_csv = ('recoveries', 'time_series_19-covid-Recovered.csv')
return (
pd.merge(
pd.merge(
load_jh_df(cases_csv),
load_jh_df(deaths_csv)
),
load_jh_df(recovered_csv)
)
.reindex(
columns = ['date',
'cases',
'deaths',
'recoveries',
'country']
)
)
if __name__ == '__main__':
try:
load_dump_covid_19_data()
except Exception as e:
print(f'Error when collecting COVID-19 cases data: {repr(e)}')
try:
load_dump_uf_pop()
except Exception as e:
print(f'Error when collecting population data: {repr(e)}')
| [((173, 27, 176, 13), 'pandas.to_datetime', 'pd.to_datetime', (), '', True, 'import pandas as pd\n'), ((103, 29, 107, 48), 'pandas.merge', 'pd.merge', (), '', True, 'import pandas as pd\n'), ((54, 12, 56, 13), 'pandas.read_html', 'pd.read_html', ({(55, 16, 55, 59): '"""https://www.estadosecapitaisdobrasil.com/"""'}, {}), "('https://www.estadosecapitaisdobrasil.com/')", True, 'import pandas as pd\n'), ((41, 12, 44, 13), 'pandas.read_html', 'pd.read_html', ({(42, 16, 43, 69): '"""https://www.oobj.com.br/bc/article/quais-os-c%C3%B3digos-de-cada-uf-no-brasil-465.html"""'}, {}), "(\n 'https://www.oobj.com.br/bc/article/quais-os-c%C3%B3digos-de-cada-uf-no-brasil-465.html'\n )", True, 'import pandas as pd\n'), ((10, 13, 10, 46), 'pandas.read_csv', 'pd.read_csv', ({(10, 25, 10, 45): 'COVID_19_BY_CITY_URL'}, {}), '(COVID_19_BY_CITY_URL)', True, 'import pandas as pd\n'), ((93, 12, 93, 62), 'pandas.read_excel', 'pd.read_excel', (), '', True, 'import pandas as pd\n'), ((69, 12, 69, 88), 'pandas.read_excel', 'pd.read_excel', (), '', True, 'import pandas as pd\n'), ((156, 8, 159, 9), 'pandas.read_csv', 'pd.read_csv', ({(157, 12, 158, 20): '(jh_data_path + csv[1])'}, {}), '(jh_data_path + csv[1])', True, 'import pandas as pd\n')] |
freingruber/JavaScript-Raider | testsuite/testsuite_helpers.py | d1c1fff2fcfc60f210b93dbe063216fa1a83c1d0 | import config as cfg
import utils
import native_code.executor as executor
number_performed_tests = 0
expectations_correct = 0
expectations_wrong = 0
def reset_stats():
global number_performed_tests, expectations_correct, expectations_wrong
number_performed_tests = 0
expectations_correct = 0
expectations_wrong = 0
def get_number_performed_tests():
global number_performed_tests
return number_performed_tests
def get_expectations_correct():
global expectations_correct
return expectations_correct
def get_expectations_wrong():
global expectations_wrong
return expectations_wrong
def assert_success(result):
global number_performed_tests
number_performed_tests += 1
if result.status != executor.Execution_Status.SUCCESS:
utils.msg("[-] ERROR: Returned status was not SUCCESS")
raise Exception()
def assert_crash(result):
global number_performed_tests
number_performed_tests += 1
if result.status != executor.Execution_Status.CRASH:
utils.msg("[-] ERROR: Returned status was not CRASH")
raise Exception()
def assert_exception(result):
global number_performed_tests
number_performed_tests += 1
if result.status != executor.Execution_Status.EXCEPTION_THROWN and result.status != executor.Execution_Status.EXCEPTION_CRASH:
utils.msg("[-] ERROR: Returned status was not EXCEPTION")
raise Exception()
def assert_timeout(result):
global number_performed_tests
number_performed_tests += 1
if result.status != executor.Execution_Status.TIMEOUT:
utils.msg("[-] ERROR: Returned status was not TIMEOUT")
raise Exception()
def assert_output_equals(result, expected_output):
global number_performed_tests
number_performed_tests += 1
if result.output.strip() != expected_output.strip():
utils.msg("[-] ERROR: Returned output (%s) was not correct (%s)" % (result.output.strip(), expected_output))
raise Exception()
def execute_program(code_to_execute):
cfg.exec_engine.restart_engine()
result = cfg.exec_engine.execute_safe(code_to_execute)
return result
def restart_exec_engine():
cfg.exec_engine.restart_engine()
def execute_program_from_restarted_engine(code_to_execute):
restart_exec_engine()
return execute_program(code_to_execute)
def assert_int_value_equals(value_real, value_expected, error_msg):
global number_performed_tests
number_performed_tests += 1
if value_real == value_expected:
return # Test PASSED
utils.msg("[-] ERROR: %s (expected: %d ,real: %d)" % (error_msg, value_expected, value_real))
# In this case I throw an exception to stop execution because speed optimized functions must always be correct
raise Exception() # Raising an exception shows the stacktrace which contains the line number where a check failed
def assert_string_value_equals(string_real, string_expected, error_msg):
global number_performed_tests
number_performed_tests += 1
if string_real == string_expected:
return # Test PASSED
print("[-] ERROR: %s (expected: %s ,real: %s)" % (error_msg, string_expected, string_real))
# In this case I throw an exception to stop execution because speed optimized functions must always be correct
raise Exception() # Raising an exception shows the stacktrace which contains the line number where a check failed
def assert_no_new_coverage(result):
global number_performed_tests
number_performed_tests += 1
if result.status != executor.Execution_Status.SUCCESS:
utils.msg("[-] ERROR: Returned status was not SUCCESS") # but the result must always be SUCCESS
raise Exception()
if result.num_new_edges == 0:
return # test PASSED
print("[-] ERROR: Found new coverage (%d) but expected that there is no new coverage!" % result.num_new_edges)
# In this case I throw an exception to stop execution because speed optimized functions must always be correct
raise Exception() # Raising an exception shows the stacktrace which contains the line number where a check failed
def assert_new_coverage(result):
global number_performed_tests
number_performed_tests += 1
if result.status != executor.Execution_Status.SUCCESS:
utils.msg("[-] ERROR: Returned status was not SUCCESS") # but the result must always be SUCCESS
raise Exception()
if result.num_new_edges != 0:
return # test PASSED
print("[-] ERROR: Found no new coverage but there should be one!")
# In this case I throw an exception to stop execution because speed optimized functions must always be correct
raise Exception() # Raising an exception shows the stacktrace which contains the line number where a check failed
# The expect functions don't throw an exception like the assert_* functions
# Instead, they just count how often the expected result was true
def expect_no_new_coverage(result):
global expectations_correct, expectations_wrong, number_performed_tests
number_performed_tests += 1
if result.status != executor.Execution_Status.SUCCESS:
utils.msg("[-] ERROR: Returned status was not SUCCESS") # but the result must always be SUCCESS
raise Exception()
if result.num_new_edges == 0:
expectations_correct += 1
else:
expectations_wrong += 1
# The expect functions don't throw an exception like the assert_* functions
# Instead, they just count how often the expected result was true
def expect_new_coverage(result):
global expectations_correct, expectations_wrong, number_performed_tests
number_performed_tests += 1
if result.status != executor.Execution_Status.SUCCESS:
utils.msg("[-] ERROR: Returned status was not SUCCESS") # but the result must always be SUCCESS
raise Exception()
if result.num_new_edges != 0:
expectations_correct += 1
else:
expectations_wrong += 1
| [((75, 4, 75, 36), 'config.exec_engine.restart_engine', 'cfg.exec_engine.restart_engine', ({}, {}), '()', True, 'import config as cfg\n'), ((76, 13, 76, 58), 'config.exec_engine.execute_safe', 'cfg.exec_engine.execute_safe', ({(76, 42, 76, 57): 'code_to_execute'}, {}), '(code_to_execute)', True, 'import config as cfg\n'), ((81, 4, 81, 36), 'config.exec_engine.restart_engine', 'cfg.exec_engine.restart_engine', ({}, {}), '()', True, 'import config as cfg\n'), ((95, 4, 95, 97), 'utils.msg', 'utils.msg', ({(95, 14, 95, 96): "('[-] ERROR: %s (expected: %d ,real: %d)' % (error_msg, value_expected,\n value_real))"}, {}), "('[-] ERROR: %s (expected: %d ,real: %d)' % (error_msg,\n value_expected, value_real))", False, 'import utils\n'), ((38, 8, 38, 63), 'utils.msg', 'utils.msg', ({(38, 18, 38, 62): '"""[-] ERROR: Returned status was not SUCCESS"""'}, {}), "('[-] ERROR: Returned status was not SUCCESS')", False, 'import utils\n'), ((46, 8, 46, 61), 'utils.msg', 'utils.msg', ({(46, 18, 46, 60): '"""[-] ERROR: Returned status was not CRASH"""'}, {}), "('[-] ERROR: Returned status was not CRASH')", False, 'import utils\n'), ((54, 8, 54, 65), 'utils.msg', 'utils.msg', ({(54, 18, 54, 64): '"""[-] ERROR: Returned status was not EXCEPTION"""'}, {}), "('[-] ERROR: Returned status was not EXCEPTION')", False, 'import utils\n'), ((62, 8, 62, 63), 'utils.msg', 'utils.msg', ({(62, 18, 62, 62): '"""[-] ERROR: Returned status was not TIMEOUT"""'}, {}), "('[-] ERROR: Returned status was not TIMEOUT')", False, 'import utils\n'), ((116, 8, 116, 63), 'utils.msg', 'utils.msg', ({(116, 18, 116, 62): '"""[-] ERROR: Returned status was not SUCCESS"""'}, {}), "('[-] ERROR: Returned status was not SUCCESS')", False, 'import utils\n'), ((131, 8, 131, 63), 'utils.msg', 'utils.msg', ({(131, 18, 131, 62): '"""[-] ERROR: Returned status was not SUCCESS"""'}, {}), "('[-] ERROR: Returned status was not SUCCESS')", False, 'import utils\n'), ((148, 8, 148, 63), 'utils.msg', 'utils.msg', ({(148, 18, 148, 62): '"""[-] ERROR: Returned status was not SUCCESS"""'}, {}), "('[-] ERROR: Returned status was not SUCCESS')", False, 'import utils\n'), ((164, 8, 164, 63), 'utils.msg', 'utils.msg', ({(164, 18, 164, 62): '"""[-] ERROR: Returned status was not SUCCESS"""'}, {}), "('[-] ERROR: Returned status was not SUCCESS')", False, 'import utils\n')] |
davidhyman/override | examples/my_configs/two.py | e34bd3c8676233439de5c002367b3bff5c1b88d6 | from .one import *
fruit = 'banana'
colour = 'orange'
sam['eggs'] = 'plenty'
sam.pop('ham')
| [] |
aglaya-pill/ITMO_ICT_WebDevelopment_2021-2022 | students/K33402/Komarov_Georgy/LAB2/elevennote/src/api/urls.py | a63691317a72fb9b29ae537bc3d7766661458c22 | from django.urls import path, include
from rest_framework_jwt.views import obtain_jwt_token
from rest_framework.routers import DefaultRouter
from .views import NoteViewSet
app_name = 'api'
router = DefaultRouter(trailing_slash=False)
router.register('notes', NoteViewSet)
urlpatterns = [
path('jwt-auth/', obtain_jwt_token),
path('', include(router.urls)),
]
| [((9, 9, 9, 44), 'rest_framework.routers.DefaultRouter', 'DefaultRouter', (), '', False, 'from rest_framework.routers import DefaultRouter\n'), ((13, 4, 13, 39), 'django.urls.path', 'path', ({(13, 9, 13, 20): '"""jwt-auth/"""', (13, 22, 13, 38): 'obtain_jwt_token'}, {}), "('jwt-auth/', obtain_jwt_token)", False, 'from django.urls import path, include\n'), ((14, 13, 14, 33), 'django.urls.include', 'include', ({(14, 21, 14, 32): 'router.urls'}, {}), '(router.urls)', False, 'from django.urls import path, include\n')] |
CandleStein/VAlg | PathPlanning/run.py | 43aecdd351954d316f132793cf069b70bf2e5cc2 | from planning_framework import path
import cv2 as cv
import numpy as np
import argparse
import matplotlib.pyplot as plt
parser = argparse.ArgumentParser(description="Path Planning Visualisation")
parser.add_argument(
"-n",
"--n_heuristic",
default=2,
help="Heuristic for A* Algorithm (default = 2). 0 for Dijkstra's Algorithm",
)
args = parser.parse_args()
N_H = int(args.n_heuristic)
drawing = False # true if mouse is pressed
mode = "obs" # if True, draw rectangle. Press 'm' to toggle to curve
ix, iy = -1, -1
sx, sy = 0, 0
dx, dy = 50, 50
# mouse callback function
def draw(event, x, y, flags, param):
global mode, sx, sy, dx, dy, drawing
if event == cv.EVENT_LBUTTONDOWN:
drawing = True
elif event == cv.EVENT_MOUSEMOVE:
if drawing == True:
if mode == "obs":
cv.rectangle(img, (x - 5, y - 5), (x + 5, y + 5), (255, 255, 255), -1)
elif event == cv.EVENT_LBUTTONUP:
drawing = False
if mode == "obs":
cv.rectangle(img, (x - 5, y - 5), (x + 5, y + 5), (255, 255, 255), -1)
elif mode == "src":
cv.circle(img, (x, y), 5, (255, 0, 0), -1)
sx, sy = x, y
elif mode == "dst":
cv.circle(img, (x, y), 5, (0, 255, 0), -1)
dx, dy = x, y
img = np.zeros((512, 512, 3), np.uint8)
inv_im = np.ones(img.shape) * 255
cv.namedWindow("Draw the Occupancy Map")
cv.setMouseCallback("Draw the Occupancy Map", draw)
while 1:
cv.imshow("Draw the Occupancy Map", inv_im - img)
if cv.waitKey(20) & 0xFF == 27:
break
cv.destroyAllWindows()
mode = "src"
img_ = img
cv.namedWindow("Set the Starting Point")
cv.setMouseCallback("Set the Starting Point", draw)
while 1:
cv.imshow("Set the Starting Point", inv_im - img)
if cv.waitKey(20) & 0xFF == 27:
break
# cv.waitKey(20)
cv.destroyAllWindows()
mode = "dst"
end = "Set the End Point"
cv.namedWindow(end)
cv.setMouseCallback(end, draw)
while cv.getWindowProperty(end, 0) >= 0:
cv.imshow(end, inv_im - img)
if cv.waitKey(20) & 0xFF == 27:
break
cv.destroyAllWindows()
img = cv.resize(img_, (50, 50), interpolation=cv.INTER_AREA)
inv_img = np.ones(img.shape)
np.savetxt("map.txt", np.array(img[:, :, 0]))
plt.imshow(inv_img - img)
start = np.array([sx, sy]) * 50 // 512
end = np.array([dx, dy]) * 50 // 512
path(start, end, N_H)
| [((7, 9, 7, 75), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((48, 6, 48, 39), 'numpy.zeros', 'np.zeros', ({(48, 15, 48, 28): '(512, 512, 3)', (48, 30, 48, 38): 'np.uint8'}, {}), '((512, 512, 3), np.uint8)', True, 'import numpy as np\n'), ((51, 0, 51, 40), 'cv2.namedWindow', 'cv.namedWindow', ({(51, 15, 51, 39): '"""Draw the Occupancy Map"""'}, {}), "('Draw the Occupancy Map')", True, 'import cv2 as cv\n'), ((52, 0, 52, 51), 'cv2.setMouseCallback', 'cv.setMouseCallback', ({(52, 20, 52, 44): '"""Draw the Occupancy Map"""', (52, 46, 52, 50): 'draw'}, {}), "('Draw the Occupancy Map', draw)", True, 'import cv2 as cv\n'), ((57, 0, 57, 22), 'cv2.destroyAllWindows', 'cv.destroyAllWindows', ({}, {}), '()', True, 'import cv2 as cv\n'), ((61, 0, 61, 40), 'cv2.namedWindow', 'cv.namedWindow', ({(61, 15, 61, 39): '"""Set the Starting Point"""'}, {}), "('Set the Starting Point')", True, 'import cv2 as cv\n'), ((62, 0, 62, 51), 'cv2.setMouseCallback', 'cv.setMouseCallback', ({(62, 20, 62, 44): '"""Set the Starting Point"""', (62, 46, 62, 50): 'draw'}, {}), "('Set the Starting Point', draw)", True, 'import cv2 as cv\n'), ((68, 0, 68, 22), 'cv2.destroyAllWindows', 'cv.destroyAllWindows', ({}, {}), '()', True, 'import cv2 as cv\n'), ((72, 0, 72, 19), 'cv2.namedWindow', 'cv.namedWindow', ({(72, 15, 72, 18): 'end'}, {}), '(end)', True, 'import cv2 as cv\n'), ((73, 0, 73, 30), 'cv2.setMouseCallback', 'cv.setMouseCallback', ({(73, 20, 73, 23): 'end', (73, 25, 73, 29): 'draw'}, {}), '(end, draw)', True, 'import cv2 as cv\n'), ((78, 0, 78, 22), 'cv2.destroyAllWindows', 'cv.destroyAllWindows', ({}, {}), '()', True, 'import cv2 as cv\n'), ((80, 6, 80, 60), 'cv2.resize', 'cv.resize', (), '', True, 'import cv2 as cv\n'), ((81, 10, 81, 28), 'numpy.ones', 'np.ones', ({(81, 18, 81, 27): 'img.shape'}, {}), '(img.shape)', True, 'import numpy as np\n'), ((83, 0, 83, 25), 'matplotlib.pyplot.imshow', 'plt.imshow', ({(83, 11, 83, 24): '(inv_img - img)'}, {}), '(inv_img - img)', True, 'import matplotlib.pyplot as plt\n'), ((88, 0, 88, 21), 'planning_framework.path', 'path', ({(88, 5, 88, 10): 'start', (88, 12, 88, 15): 'end', (88, 17, 88, 20): 'N_H'}, {}), '(start, end, N_H)', False, 'from planning_framework import path\n'), ((49, 9, 49, 27), 'numpy.ones', 'np.ones', ({(49, 17, 49, 26): 'img.shape'}, {}), '(img.shape)', True, 'import numpy as np\n'), ((54, 4, 54, 53), 'cv2.imshow', 'cv.imshow', ({(54, 14, 54, 38): '"""Draw the Occupancy Map"""', (54, 40, 54, 52): '(inv_im - img)'}, {}), "('Draw the Occupancy Map', inv_im - img)", True, 'import cv2 as cv\n'), ((64, 4, 64, 53), 'cv2.imshow', 'cv.imshow', ({(64, 14, 64, 38): '"""Set the Starting Point"""', (64, 40, 64, 52): '(inv_im - img)'}, {}), "('Set the Starting Point', inv_im - img)", True, 'import cv2 as cv\n'), ((74, 6, 74, 34), 'cv2.getWindowProperty', 'cv.getWindowProperty', ({(74, 27, 74, 30): 'end', (74, 32, 74, 33): '(0)'}, {}), '(end, 0)', True, 'import cv2 as cv\n'), ((75, 4, 75, 32), 'cv2.imshow', 'cv.imshow', ({(75, 14, 75, 17): 'end', (75, 19, 75, 31): '(inv_im - img)'}, {}), '(end, inv_im - img)', True, 'import cv2 as cv\n'), ((82, 22, 82, 44), 'numpy.array', 'np.array', ({(82, 31, 82, 43): 'img[:, :, (0)]'}, {}), '(img[:, :, (0)])', True, 'import numpy as np\n'), ((85, 8, 85, 26), 'numpy.array', 'np.array', ({(85, 17, 85, 25): '[sx, sy]'}, {}), '([sx, sy])', True, 'import numpy as np\n'), ((86, 6, 86, 24), 'numpy.array', 'np.array', ({(86, 15, 86, 23): '[dx, dy]'}, {}), '([dx, dy])', True, 'import numpy as np\n'), ((55, 7, 55, 21), 'cv2.waitKey', 'cv.waitKey', ({(55, 18, 55, 20): '(20)'}, {}), '(20)', True, 'import cv2 as cv\n'), ((65, 7, 65, 21), 'cv2.waitKey', 'cv.waitKey', ({(65, 18, 65, 20): '(20)'}, {}), '(20)', True, 'import cv2 as cv\n'), ((76, 7, 76, 21), 'cv2.waitKey', 'cv.waitKey', ({(76, 18, 76, 20): '(20)'}, {}), '(20)', True, 'import cv2 as cv\n'), ((34, 16, 34, 86), 'cv2.rectangle', 'cv.rectangle', ({(34, 29, 34, 32): 'img', (34, 34, 34, 48): '(x - 5, y - 5)', (34, 50, 34, 64): '(x + 5, y + 5)', (34, 66, 34, 81): '(255, 255, 255)', (34, 83, 34, 85): '(-1)'}, {}), '(img, (x - 5, y - 5), (x + 5, y + 5), (255, 255, 255), -1)', True, 'import cv2 as cv\n'), ((39, 12, 39, 82), 'cv2.rectangle', 'cv.rectangle', ({(39, 25, 39, 28): 'img', (39, 30, 39, 44): '(x - 5, y - 5)', (39, 46, 39, 60): '(x + 5, y + 5)', (39, 62, 39, 77): '(255, 255, 255)', (39, 79, 39, 81): '(-1)'}, {}), '(img, (x - 5, y - 5), (x + 5, y + 5), (255, 255, 255), -1)', True, 'import cv2 as cv\n'), ((41, 12, 41, 54), 'cv2.circle', 'cv.circle', ({(41, 22, 41, 25): 'img', (41, 27, 41, 33): '(x, y)', (41, 35, 41, 36): '(5)', (41, 38, 41, 49): '(255, 0, 0)', (41, 51, 41, 53): '(-1)'}, {}), '(img, (x, y), 5, (255, 0, 0), -1)', True, 'import cv2 as cv\n'), ((44, 12, 44, 54), 'cv2.circle', 'cv.circle', ({(44, 22, 44, 25): 'img', (44, 27, 44, 33): '(x, y)', (44, 35, 44, 36): '(5)', (44, 38, 44, 49): '(0, 255, 0)', (44, 51, 44, 53): '(-1)'}, {}), '(img, (x, y), 5, (0, 255, 0), -1)', True, 'import cv2 as cv\n')] |
object-oriented-human/competitive | Codeforces/problems/0136/A/136A.py | 9e761020e887d8980a39a64eeaeaa39af0ecd777 | n = int(input())
line = list(map(int, input().split()))
l = {}
res = ""
for i, j in enumerate(line):
l[j] = i+1
for k in range(n):
res += str(l[k+1]) + " "
print(res.rstrip()) | [] |
YiLisa/DSCI560-hw2 | generatey.py | 9cf4a40a6e4755ea1b0b68248e553fb4b6b7fdf4 | import pandas as pd
def main():
input = pd.read_csv('random_x.csv', header=None)
x=input[0].tolist()
y = []
for n in x:
y.append(3*int(n)+6)
df = pd.DataFrame(y)
df.to_csv('output_y.csv', index=False, header=False)
if __name__ == '__main__':
main()
print('generating y = 3x+6...') | [((5, 12, 5, 52), 'pandas.read_csv', 'pd.read_csv', (), '', True, 'import pandas as pd\n'), ((10, 9, 10, 24), 'pandas.DataFrame', 'pd.DataFrame', ({(10, 22, 10, 23): 'y'}, {}), '(y)', True, 'import pandas as pd\n')] |
burn874/mtg | setup.py | cef47f6ec0ca110bdcb885ec09d6f5aca517c3b2 | import re
from pkg_resources import parse_requirements
import pathlib
from setuptools import find_packages, setup
README_FILE = 'README.md'
REQUIREMENTS_FILE = 'requirements.txt'
VERSION_FILE = 'mtg/_version.py'
VERSION_REGEXP = r'^__version__ = \'(\d+\.\d+\.\d+)\''
r = re.search(VERSION_REGEXP, open(VERSION_FILE).read(), re.M)
if r is None:
raise RuntimeError(f'Unable to find version string in {VERSION_FILE}.')
version = r.group(1)
long_description = open(README_FILE, encoding='utf-8').read()
install_requires = [str(r) for r in parse_requirements(open(REQUIREMENTS_FILE, 'rt'))]
setup(
name='mtg',
version=version,
description='mtg is a collection of data science and ml projects for Magic:the Gathering',
long_description=long_description,
long_description_content_type='text/markdown',
author='Ryan Saxe',
author_email='[email protected]',
url='https://github.com/RyanSaxe/mtg',
packages=find_packages(),
install_requires=install_requires,
)
| [((29, 13, 29, 28), 'setuptools.find_packages', 'find_packages', ({}, {}), '()', False, 'from setuptools import find_packages, setup\n')] |
RF-Tar-Railt/Avilla | avilla/core/resource/interface.py | 0b6eff0e253d4c04a5c82f4f252b6a11b7d81e04 | from __future__ import annotations
from dataclasses import dataclass
from avilla.core.platform import Base
from avilla.core.resource import Resource, ResourceProvider
@dataclass
class ResourceMatchPrefix:
resource_type: type[Resource]
keypath: str | None = None
platform: Base | None = None
class ResourceInterface:
providers: dict[ResourceMatchPrefix, ResourceProvider]
def __init__(self):
self.providers = {}
def register(
self,
resource_type: type[Resource],
provider: ResourceProvider,
*,
mainline_keypath: str | None = None,
platform: Base | None = None,
):
self.providers[ResourceMatchPrefix(resource_type, mainline_keypath, platform)] = provider
def get_provider(
self,
resource: Resource | type[Resource],
*,
mainline_keypath: str | None = None,
platform: Base | None = None,
) -> ResourceProvider | None:
resource_type = resource if isinstance(resource, type) else type(resource)
for prefix in self.providers:
if all((
prefix.resource_type is resource_type,
prefix.keypath == mainline_keypath if prefix.keypath is not None else True,
prefix.platform == platform if prefix.platform is not None else True
)):
return self.providers[prefix]
| [] |
atemysemicolon/scikit-image | viewer_examples/plugins/median_filter.py | a48cf5822f9539c6602b9327c18253aed14fa692 | from skimage import data
from skimage.filter.rank import median
from skimage.morphology import disk
from skimage.viewer import ImageViewer
from skimage.viewer.widgets import Slider, OKCancelButtons, SaveButtons
from skimage.viewer.plugins.base import Plugin
def median_filter(image, radius):
return median(image, selem=disk(radius))
image = data.coins()
viewer = ImageViewer(image)
plugin = Plugin(image_filter=median_filter)
plugin += Slider('radius', 2, 10, value_type='int')
plugin += SaveButtons()
plugin += OKCancelButtons()
viewer += plugin
viewer.show()
| [((12, 8, 12, 20), 'skimage.data.coins', 'data.coins', ({}, {}), '()', False, 'from skimage import data\n'), ((13, 9, 13, 27), 'skimage.viewer.ImageViewer', 'ImageViewer', ({(13, 21, 13, 26): 'image'}, {}), '(image)', False, 'from skimage.viewer import ImageViewer\n'), ((15, 9, 15, 43), 'skimage.viewer.plugins.base.Plugin', 'Plugin', (), '', False, 'from skimage.viewer.plugins.base import Plugin\n'), ((16, 10, 16, 51), 'skimage.viewer.widgets.Slider', 'Slider', (), '', False, 'from skimage.viewer.widgets import Slider, OKCancelButtons, SaveButtons\n'), ((17, 10, 17, 23), 'skimage.viewer.widgets.SaveButtons', 'SaveButtons', ({}, {}), '()', False, 'from skimage.viewer.widgets import Slider, OKCancelButtons, SaveButtons\n'), ((18, 10, 18, 27), 'skimage.viewer.widgets.OKCancelButtons', 'OKCancelButtons', ({}, {}), '()', False, 'from skimage.viewer.widgets import Slider, OKCancelButtons, SaveButtons\n'), ((10, 31, 10, 43), 'skimage.morphology.disk', 'disk', ({(10, 36, 10, 42): 'radius'}, {}), '(radius)', False, 'from skimage.morphology import disk\n')] |
scharlton2/modflow6 | autotest/test_gwf_buy_lak01.py | 83ac72ee3b6f580aaffef6352cf15c1697d3ce66 | # Test the buoyancy package and the variable density flows between the lake
# and the gwf model. This model has 4 layers and a lake incised within it.
# The model is transient and has heads in the aquifer higher than the initial
# stage in the lake. As the model runs, the lake and aquifer equalize and
# should end up at the same level. The test ensures that the initial and
# final water volumes in the entire system are the same. There are three
# different cases:
# 1. No buoyancy package
# 2. Buoyancy package with lake and aquifer density = 1000.
# 3. Buoyancy package with lake and aquifer density = 1024.5
import os
import pytest
import sys
import numpy as np
try:
import flopy
except:
msg = "Error. FloPy package is not available.\n"
msg += "Try installing using the following command:\n"
msg += " pip install flopy"
raise Exception(msg)
from framework import testing_framework
from simulation import Simulation
ex = ["buy_lak_01a"] # , 'buy_lak_01b', 'buy_lak_01c']
buy_on_list = [False] # , True, True]
concbuylist = [0.0] # , 0., 35.]
exdirs = []
for s in ex:
exdirs.append(os.path.join("temp", s))
def build_model(idx, dir):
lx = 7.0
lz = 4.0
nlay = 4
nrow = 1
ncol = 7
nper = 1
delc = 1.0
delr = lx / ncol
delz = lz / nlay
top = 4.0
botm = [3.0, 2.0, 1.0, 0.0]
perlen = [10.0]
nstp = [50]
tsmult = [1.0]
Kh = 1.0
Kv = 1.0
tdis_rc = []
for i in range(nper):
tdis_rc.append((perlen[i], nstp[i], tsmult[i]))
nouter, ninner = 700, 300
hclose, rclose, relax = 1e-8, 1e-6, 0.97
name = ex[idx]
# build MODFLOW 6 files
ws = dir
sim = flopy.mf6.MFSimulation(
sim_name=name, version="mf6", exe_name="mf6", sim_ws=ws
)
# create tdis package
tdis = flopy.mf6.ModflowTdis(
sim, time_units="DAYS", nper=nper, perioddata=tdis_rc
)
# create gwf model
gwfname = "gwf_" + name
gwf = flopy.mf6.ModflowGwf(sim, modelname=gwfname, newtonoptions="NEWTON")
imsgwf = flopy.mf6.ModflowIms(
sim,
print_option="ALL",
outer_dvclose=hclose,
outer_maximum=nouter,
under_relaxation="NONE",
inner_maximum=ninner,
inner_dvclose=hclose,
rcloserecord=rclose,
linear_acceleration="BICGSTAB",
scaling_method="NONE",
reordering_method="NONE",
relaxation_factor=relax,
filename="{}.ims".format(gwfname),
)
idomain = np.full((nlay, nrow, ncol), 1)
idomain[0, 0, 1:6] = 0
idomain[1, 0, 2:5] = 0
idomain[2, 0, 3:4] = 0
dis = flopy.mf6.ModflowGwfdis(
gwf,
nlay=nlay,
nrow=nrow,
ncol=ncol,
delr=delr,
delc=delc,
top=top,
botm=botm,
idomain=idomain,
)
# initial conditions
strt = np.zeros((nlay, nrow, ncol), dtype=float)
strt[0, 0, :] = 3.5
strt[1, 0, :] = 3.0
strt[1, 0, 1:6] = 2.5
strt[2, 0, :] = 2.0
strt[3, 0, :] = 1.0
ic = flopy.mf6.ModflowGwfic(gwf, strt=strt)
# node property flow
npf = flopy.mf6.ModflowGwfnpf(
gwf,
xt3doptions=False,
save_flows=True,
save_specific_discharge=True,
icelltype=1,
k=Kh,
k33=Kv,
)
sto = flopy.mf6.ModflowGwfsto(gwf, sy=0.3, ss=0.0, iconvert=1)
c = concbuylist[idx]
lake_dense = 1000.0 + 0.7 * c
buy_on = buy_on_list[idx]
if buy_on:
pd = [(0, 0.7, 0.0, "none", "none")]
buy = flopy.mf6.ModflowGwfbuy(
gwf, packagedata=pd, denseref=1000.0, concentration=c
)
nlakeconn = 11 # note: number of connections for this lake
# pak_data = [lakeno, strt, nlakeconn, dense, boundname]
pak_data = [(0, 2.25, nlakeconn, lake_dense)]
connlen = delr / 2.0
connwidth = delc
bedleak = "None"
con_data = [
# con_data=(lakeno,iconn,(cellid),claktype,bedleak,belev,telev,connlen,connwidth )
(0, 0, (0, 0, 0), "HORIZONTAL", bedleak, 10, 10, connlen, connwidth),
(0, 1, (1, 0, 1), "VERTICAL", bedleak, 10, 10, connlen, connwidth),
(0, 2, (1, 0, 1), "HORIZONTAL", bedleak, 10, 10, connlen, connwidth),
(0, 3, (2, 0, 2), "VERTICAL", bedleak, 10, 10, connlen, connwidth),
(0, 4, (2, 0, 2), "HORIZONTAL", bedleak, 10, 10, connlen, connwidth),
(0, 5, (3, 0, 3), "VERTICAL", bedleak, 10, 10, connlen, connwidth),
(0, 6, (2, 0, 4), "HORIZONTAL", bedleak, 10, 10, connlen, connwidth),
(0, 7, (2, 0, 4), "VERTICAL", bedleak, 10, 10, connlen, connwidth),
(0, 8, (1, 0, 5), "HORIZONTAL", bedleak, 10, 10, connlen, connwidth),
(0, 9, (1, 0, 5), "VERTICAL", bedleak, 10, 10, connlen, connwidth),
(0, 10, (0, 0, 6), "HORIZONTAL", bedleak, 10, 10, connlen, connwidth),
]
# period data
p_data = [
(0, "STATUS", "ACTIVE"),
]
# note: for specifying lake number, use fortran indexing!
fname = "{}.lak.obs.csv".format(gwfname)
lak_obs = {
fname: [
("lakestage", "stage", 1),
("lakevolume", "volume", 1),
("lak1", "lak", 1, 1),
("lak2", "lak", 1, 2),
("lak3", "lak", 1, 3),
("lak4", "lak", 1, 4),
("lak5", "lak", 1, 5),
("lak6", "lak", 1, 6),
("lak7", "lak", 1, 7),
("lak8", "lak", 1, 8),
("lak9", "lak", 1, 9),
("lak10", "lak", 1, 10),
("lak11", "lak", 1, 11),
],
# "digits": 10,
}
lak = flopy.mf6.modflow.ModflowGwflak(
gwf,
save_flows=True,
print_input=True,
print_flows=True,
print_stage=True,
stage_filerecord="{}.lak.bin".format(gwfname),
budget_filerecord="{}.lak.bud".format(gwfname),
nlakes=len(pak_data),
ntables=0,
packagedata=pak_data,
pname="LAK-1",
connectiondata=con_data,
perioddata=p_data,
observations=lak_obs,
auxiliary=["DENSITY"],
)
# output control
oc = flopy.mf6.ModflowGwfoc(
gwf,
budget_filerecord="{}.cbc".format(gwfname),
head_filerecord="{}.hds".format(gwfname),
headprintrecord=[("COLUMNS", 10, "WIDTH", 15, "DIGITS", 6, "GENERAL")],
saverecord=[("HEAD", "ALL"), ("BUDGET", "ALL")],
printrecord=[("HEAD", "LAST"), ("BUDGET", "LAST")],
)
return sim, None
def eval_results(sim):
print("evaluating results...")
# calculate volume of water and make sure it is conserved
name = ex[sim.idxsim]
gwfname = "gwf_" + name
fname = gwfname + ".lak.bin"
fname = os.path.join(sim.simpath, fname)
assert os.path.isfile(fname)
bobj = flopy.utils.HeadFile(fname, text="STAGE")
stage = bobj.get_alldata().flatten()
# print(stage)
fname = gwfname + ".hds"
fname = os.path.join(sim.simpath, fname)
assert os.path.isfile(fname)
hobj = flopy.utils.HeadFile(fname)
head = hobj.get_data()
# print(head)
# calculate initial water volume
v0 = 3.5 * 2 # outermost columns
v0 += 2.5 * 2 # next innermost columns
v0 += 2.0 * 2 # next innermost columns
v0 += 1.0 * 1 # middle column
v0 = v0 * 0.3 # specific yield
v0 = v0 + (2.25 - 2.0) * 2 + (2.25 - 1.0)
print("initial volume of water in model = {}".format(v0))
# calculate ending water volume in model
h = head[0, 0, 0]
s = stage[-1]
v = h * 4 + 2.0 * 2 + 1.0 * 1
v = v * 0.3 # specific yield
v = v + (s - 2.0) * 2 + (s - 1.0)
print("final volume of water in model = {}".format(v))
# check to make sure starting water volume same as equalized final volume
errmsg = "initial and final water volume not equal: {} {}".format(v0, v)
assert np.allclose(v0, v)
# todo: add a better check of the lake concentrations
# assert False
# - No need to change any code below
@pytest.mark.parametrize(
"idx, dir",
list(enumerate(exdirs)),
)
def test_mf6model(idx, dir):
# initialize testing framework
test = testing_framework()
# build the model
test.build_mf6_models(build_model, idx, dir)
# run the test model
test.run_mf6(Simulation(dir, exfunc=eval_results, idxsim=idx))
def main():
# initialize testing framework
test = testing_framework()
# run the test model
for idx, dir in enumerate(exdirs):
test.build_mf6_models(build_model, idx, dir)
sim = Simulation(dir, exfunc=eval_results, idxsim=idx)
test.run_mf6(sim)
if __name__ == "__main__":
# print message
print("standalone run of {}".format(os.path.basename(__file__)))
# run main routine
main()
| [((67, 10, 69, 5), 'flopy.mf6.MFSimulation', 'flopy.mf6.MFSimulation', (), '', False, 'import flopy\n'), ((71, 11, 73, 5), 'flopy.mf6.ModflowTdis', 'flopy.mf6.ModflowTdis', (), '', False, 'import flopy\n'), ((78, 10, 78, 78), 'flopy.mf6.ModflowGwf', 'flopy.mf6.ModflowGwf', (), '', False, 'import flopy\n'), ((96, 14, 96, 44), 'numpy.full', 'np.full', ({(96, 22, 96, 40): '(nlay, nrow, ncol)', (96, 42, 96, 43): '1'}, {}), '((nlay, nrow, ncol), 1)', True, 'import numpy as np\n'), ((100, 10, 110, 5), 'flopy.mf6.ModflowGwfdis', 'flopy.mf6.ModflowGwfdis', (), '', False, 'import flopy\n'), ((113, 11, 113, 52), 'numpy.zeros', 'np.zeros', (), '', True, 'import numpy as np\n'), ((119, 9, 119, 47), 'flopy.mf6.ModflowGwfic', 'flopy.mf6.ModflowGwfic', (), '', False, 'import flopy\n'), ((122, 10, 130, 5), 'flopy.mf6.ModflowGwfnpf', 'flopy.mf6.ModflowGwfnpf', (), '', False, 'import flopy\n'), ((132, 10, 132, 66), 'flopy.mf6.ModflowGwfsto', 'flopy.mf6.ModflowGwfsto', (), '', False, 'import flopy\n'), ((229, 12, 229, 44), 'os.path.join', 'os.path.join', ({(229, 25, 229, 36): 'sim.simpath', (229, 38, 229, 43): 'fname'}, {}), '(sim.simpath, fname)', False, 'import os\n'), ((230, 11, 230, 32), 'os.path.isfile', 'os.path.isfile', ({(230, 26, 230, 31): 'fname'}, {}), '(fname)', False, 'import os\n'), ((231, 11, 231, 52), 'flopy.utils.HeadFile', 'flopy.utils.HeadFile', (), '', False, 'import flopy\n'), ((236, 12, 236, 44), 'os.path.join', 'os.path.join', ({(236, 25, 236, 36): 'sim.simpath', (236, 38, 236, 43): 'fname'}, {}), '(sim.simpath, fname)', False, 'import os\n'), ((237, 11, 237, 32), 'os.path.isfile', 'os.path.isfile', ({(237, 26, 237, 31): 'fname'}, {}), '(fname)', False, 'import os\n'), ((238, 11, 238, 38), 'flopy.utils.HeadFile', 'flopy.utils.HeadFile', ({(238, 32, 238, 37): 'fname'}, {}), '(fname)', False, 'import flopy\n'), ((261, 11, 261, 29), 'numpy.allclose', 'np.allclose', ({(261, 23, 261, 25): 'v0', (261, 27, 261, 28): 'v'}, {}), '(v0, v)', True, 'import numpy as np\n'), ((274, 11, 274, 30), 'framework.testing_framework', 'testing_framework', ({}, {}), '()', False, 'from framework import testing_framework\n'), ((285, 11, 285, 30), 'framework.testing_framework', 'testing_framework', ({}, {}), '()', False, 'from framework import testing_framework\n'), ((33, 18, 33, 41), 'os.path.join', 'os.path.join', ({(33, 31, 33, 37): '"""temp"""', (33, 39, 33, 40): 's'}, {}), "('temp', s)", False, 'import os\n'), ((139, 14, 141, 9), 'flopy.mf6.ModflowGwfbuy', 'flopy.mf6.ModflowGwfbuy', (), '', False, 'import flopy\n'), ((280, 17, 280, 65), 'simulation.Simulation', 'Simulation', (), '', False, 'from simulation import Simulation\n'), ((290, 14, 290, 62), 'simulation.Simulation', 'Simulation', (), '', False, 'from simulation import Simulation\n'), ((296, 40, 296, 66), 'os.path.basename', 'os.path.basename', ({(296, 57, 296, 65): '__file__'}, {}), '(__file__)', False, 'import os\n')] |
hemiaoio/pylearning | lesson-08/roll_dice_v1.0.py | 4b3885ed7177db4e6e03da80dd9ed69719c8d866 | """
功能:模拟掷骰子
版本:1.0
"""
import random
def roll_dice():
roll = random.randint(1, 6)
return roll
def main():
total_times = 100000
result_list = [0] * 6
for i in range(total_times):
roll = roll_dice()
result_list[roll-1] += 1
for i, x in enumerate(result_list):
print('点数{}的次数:{},频率:{}'.format(i+1, x, x/total_times))
print(result_list)
if __name__ == '__main__':
main()
| [((10, 11, 10, 31), 'random.randint', 'random.randint', ({(10, 26, 10, 27): '1', (10, 29, 10, 30): '6'}, {}), '(1, 6)', False, 'import random\n')] |
gxercavins/gcp-snippets | composer/dataflow-python3/main.py | a90e4e9c922370face876aa7c56db610896e1a6f | import argparse
import logging
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
def run(argv=None, save_main_session=True):
"""Dummy pipeline to test Python3 operator."""
parser = argparse.ArgumentParser()
known_args, pipeline_args = parser.parse_known_args(argv)
pipeline_options = PipelineOptions(pipeline_args)
pipeline_options.view_as(SetupOptions).save_main_session = save_main_session
p = beam.Pipeline(options=pipeline_options)
# Just a simple test
p | 'Create Events' >> beam.Create([1, 2, 3])
result = p.run()
result.wait_until_finish()
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
run()
| [((11, 11, 11, 36), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((14, 21, 14, 51), 'apache_beam.options.pipeline_options.PipelineOptions', 'PipelineOptions', ({(14, 37, 14, 50): 'pipeline_args'}, {}), '(pipeline_args)', False, 'from apache_beam.options.pipeline_options import PipelineOptions\n'), ((16, 6, 16, 45), 'apache_beam.Pipeline', 'beam.Pipeline', (), '', True, 'import apache_beam as beam\n'), ((19, 25, 19, 47), 'apache_beam.Create', 'beam.Create', ({(19, 37, 19, 46): '[1, 2, 3]'}, {}), '([1, 2, 3])', True, 'import apache_beam as beam\n'), ((26, 2, 26, 21), 'logging.getLogger', 'logging.getLogger', ({}, {}), '()', False, 'import logging\n')] |
kangour/dingtalk-python | dingtalk/message/conversation.py | b37b9dac3ca3ff9d727308fb120a8fd05e11eaa5 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2017/11/30 下午3:02
# @Author : Matrix
# @Github : https://github.com/blackmatrix7/
# @Blog : http://www.cnblogs.com/blackmatrix/
# @File : messages.py
# @Software: PyCharm
import json
from ..foundation import *
from json import JSONDecodeError
__author__ = 'blackmatrix'
__all__ = ['async_send_msg', 'get_msg_send_result', 'get_msg_send_progress']
@dingtalk_resp
def async_send_msg(access_token, msgtype, agent_id, msgcontent, userid_list=None, dept_id_list=None, to_all_user=False):
try:
msgcontent = json.dumps(msgcontent)
except JSONDecodeError:
# 如果传入的msgcontent不能转换为json格式,依旧传给钉钉,由钉钉处理
pass
if not isinstance(userid_list, str):
userid_list = ','.join(userid_list)
args = locals().copy()
payload = {}
# 请求参数整理
for k, v in args.items():
if k in ('msgtype', 'agent_id', 'msgcontent', 'userid_list', 'dept_id_list'):
if v is not None:
payload.update({k: v})
resp = call_dingtalk_webapi(access_token, 'dingtalk.corp.message.corpconversation.asyncsend', **payload)
return resp
@dingtalk_resp
def get_msg_send_result(access_token, agent_id, task_id):
url = get_request_url(access_token, 'dingtalk.corp.message.corpconversation.getsendresult')
payload = {'task_id': task_id, 'agent_id': agent_id}
return requests.get(url, params=payload)
@dingtalk_resp
def get_msg_send_progress(access_token, agent_id, task_id):
url = get_request_url(access_token, 'dingtalk.corp.message.corpconversation.getsendprogress')
payload = {'task_id': task_id, 'agent_id': agent_id}
return requests.get(url, params=payload)
if __name__ == '__main__':
pass
| [((21, 21, 21, 43), 'json.dumps', 'json.dumps', ({(21, 32, 21, 42): 'msgcontent'}, {}), '(msgcontent)', False, 'import json\n')] |
griviala/garpix_page | backend/garpix_page/setup.py | 55f1d9bc6d1de29d18e15369bebcbef18811b5a4 | from setuptools import setup, find_packages
from os import path
here = path.join(path.abspath(path.dirname(__file__)), 'garpix_page')
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='garpix_page',
version='2.23.0',
description='',
long_description=long_description,
url='https://github.com/garpixcms/garpix_page',
author='Garpix LTD',
author_email='[email protected]',
license='MIT',
packages=find_packages(exclude=['testproject', 'testproject.*']),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'Django >= 1.11',
'django-polymorphic-tree-for-garpix-page >= 2.1.1',
'django-modeltranslation >= 0.16.2',
'django-multiurl >= 1.4.0',
'djangorestframework >= 3.12.4',
'garpix_utils >= 1.4.0',
'django-tabbed-admin >= 1.0.4',
'model-bakery >= 1.4.0'
],
)
| [((4, 30, 4, 52), 'os.path.dirname', 'path.dirname', ({(4, 43, 4, 51): '__file__'}, {}), '(__file__)', False, 'from os import path\n'), ((6, 10, 6, 39), 'os.path.join', 'path.join', ({(6, 20, 6, 24): 'here', (6, 26, 6, 38): '"""README.rst"""'}, {}), "(here, 'README.rst')", False, 'from os import path\n'), ((18, 13, 18, 68), 'setuptools.find_packages', 'find_packages', (), '', False, 'from setuptools import setup, find_packages\n')] |
C6SUMMER/allinclusive-kodi-pi | .kodi/addons/plugin.video.p2p-streams/resources/core/livestreams.py | 8baf247c79526849c640c6e56ca57a708a65bd11 | # -*- coding: utf-8 -*-
""" p2p-streams (c) 2014 enen92 fightnight
This file contains the livestream addon engine. It is mostly based on divingmule work on livestreams addon!
Functions:
xml_lists_menu() -> main menu for the xml list category
addlista() -> add a new list. It'll ask for local or remote and processes the given input
remove_list(name) -> Remove a list
get_groups(url) -> First regex function to parse a given list. Sopcast type list
get_channels(name,url) -> Second regex function to parse a given list. Used to general livestreams xml type lists
getData(url,fanart) -> Get the item data such as iconimage, fanart, etc
getChannelItems(name,url,fanart) -> Function to grab the channel items
getItems(items,fanart) -> Function to grab the items from the xml
removeNonAscii(s) -> Function to remove non-ascii characters from the list
getSoup(url) -> uses beautifulsoup to parse a remote xml
addon_log(string) -> Simple log/print function
getRegexParsed(regexs, url) -> parse the regex expression
list_type(url) -> Checks if the list is xml or m3u
parse_m3u(url) -> Parses a m3u type list
"""
import urllib,urllib2,re,xbmcplugin,xbmcgui,xbmc,xbmcaddon,HTMLParser,time,datetime,os,xbmcvfs,sys
from BeautifulSoup import BeautifulStoneSoup, BeautifulSoup, BeautifulSOAP
from peertopeerutils.pluginxbmc import *
from peertopeerutils.webutils import *
from peertopeerutils.directoryhandle import *
from peertopeerutils.iofile import *
"""
Main Menu
"""
def xml_lists_menu():
if settings.getSetting('sopcast-oficial') == "true":
addDir(translate(40116),"http://sopcast.org/chlist.xml",101,addonpath + art + 'xml_list_sopcast.png',2,True)
try:
if os.path.exists(os.path.join(pastaperfil,"Lists")):
dirs, files = xbmcvfs.listdir(os.path.join(pastaperfil,"Lists"))
for file in files:
f = open(os.path.join(pastaperfil,"Lists",file), "r")
string = f.read()
if xbmcvfs.exists(os.path.join(pastaperfil,"Lists-fanart",file.replace('.txt','.jpg'))):addDir("[B][COLOR orange]" + file.replace(".txt","") + "[/B][/COLOR]",string,101,addonpath + art + 'xml_lists.png',2,True,fan_art=os.path.join(pastaperfil,"Lists-fanart",file.replace('.txt','.jpg')))
else: addDir("[B][COLOR orange]" + file.replace(".txt","") + "[/B][/COLOR]",string,101,addonpath + art + 'xml_lists.png',2,True)
except: pass
addDir(translate(40121),MainURL,107,addonpath + art + 'plus-menu.png',2,False)
#xbmc.executebuiltin("Container.SetViewMode(51)")
"""
Add a new list function
"""
def addlista():
opcao= xbmcgui.Dialog().yesno(translate(40000), translate(40123),"","",translate(40124),translate(40125))
if opcao:
dialog = xbmcgui.Dialog()
lista_xml = dialog.browse(int(1), translate(40186), 'myprograms','.xml|.m3u')
keybdois = xbmc.Keyboard("", translate(40130))
keybdois.doModal()
if (keybdois.isConfirmed()):
searchname = keybdois.getText()
if searchname=='': sys.exit(0)
encode=urllib.quote(searchname)
if xbmcvfs.exists(os.path.join(pastaperfil,"Lists")): pass
else: xbmcvfs.mkdir(os.path.join(pastaperfil,"Lists"))
txt_name = searchname + ".txt"
save(os.path.join(pastaperfil,"Lists",txt_name),lista_xml)
mensagemok(translate(40000),translate(40129))
xbmc.executebuiltin("XBMC.Container.Refresh")
else:
keyb = xbmc.Keyboard("", translate(40127))
keyb.doModal()
if (keyb.isConfirmed()):
search = keyb.getText()
if search=='': sys.exit(0)
if "dropbox" in search and not "?dl=1" in search: search = search + '?dl=1'
if "xml" not in search.split(".")[-1] and "m3u" not in search.split(".")[-1]: mensagemok(translate(40000),translate(40128)); sys.exit(0)
else:
try:
code = get_page_source(search)
except:
mensagemok(translate(40000),translate(40128))
sys.exit(0)
keybdois = xbmc.Keyboard("", translate(40130))
keybdois.doModal()
if (keybdois.isConfirmed()):
searchname = keybdois.getText()
if searchname=='': sys.exit(0)
encode=urllib.quote(searchname)
if os.path.exists(os.path.join(pastaperfil,"Lists")): pass
else: xbmcvfs.mkdir(os.path.join(pastaperfil,"Lists"))
txt_name = searchname + ".txt"
save(os.path.join(pastaperfil,"Lists",txt_name),search)
mensagemok(translate(40000),translate(40129))
xbmc.executebuiltin("XBMC.Container.Refresh")
"""
Remove a List
"""
def remove_list(name):
xbmcvfs.delete(name)
xbmc.executebuiltin("Notification(%s,%s,%i,%s)" % (translate(40000), translate(40150), 1,addonpath+"/icon.png"))
xbmc.executebuiltin("Container.Refresh")
"""
Parsing functions
"""
def list_type(url):
ltype = url.split('.')[-1]
if 'xml' in ltype: get_groups(url)
elif 'm3u' in ltype: parse_m3u(url)
else: pass
def parse_m3u(url):
if "http" in url: content = get_page_source(url)
else: content = readfile(url)
match = re.compile('#EXTINF:.+?,(.*?)\n(.*?)(?:\r|\n)').findall(content)
for channel_name,stream_url in match:
if 'plugin://' in stream_url:
stream_url = 'XBMC.RunPlugin('+stream_url+')'
addDir(channel_name,stream_url,106,'',1,False)
elif 'sop://' in stream_url:
addDir(channel_name,stream_url,2,'',1,False)
elif ('acestream://' in stream_url) or ('.acelive' in stream_url) or ('.torrent' in stream_url):
addDir(channel_name,stream_url,1,'',1,False)
else: addLink(channel_name,stream_url,'')
def get_groups(url):
from xml.etree import ElementTree
try:
print("Sopcast xml-type list detected")
if "http" in url:
source = get_page_source(url)
save(os.path.join(pastaperfil,"working.xml"),source)
workingxml = os.path.join(pastaperfil,"working.xml")
else:
workingxml = url
groups = ElementTree.parse(workingxml).findall('.//group')
unname_group_index = 1
LANGUAGE = "en"
for group in groups:
if group.attrib[LANGUAGE] == "":
group.attrib[LANGUAGE] = str(unname_group_index)
unname_group_index = unname_group_index + 1
if re.sub('c','e',LANGUAGE) == LANGUAGE:
OTHER_LANG = re.sub('e','c',LANGUAGE)
else:
OTHER_LANG = re.sub('c','e',LANGUAGE)
if LANGUAGE == "cn":
try:
if len(group.attrib[OTHER_LANG]) > 0:
group.attrib[LANGUAGE] = group.attrib[OTHER_LANG]
unname_group_index = unname_group_index - 1
except:
pass
if (group.find('.//channel')==None): continue
group_name=group.attrib[LANGUAGE]
try:
addDir_livestreams_common(group_name,url,102,addonpath + art + 'xml_list_sopcast.png',True)
except: pass
#xbmc.executebuiltin("Container.SetViewMode(51)")
except:
print("Other type of xml list")
getData(url,"")
def get_channels(name,url):
from xml.etree import ElementTree
if url.startswith('http://'):
source = get_page_source(url)
else:
source = readfile(url)
save(os.path.join(pastaperfil,"working.xml"),source)
chlist_tree = ElementTree.parse(os.path.join(pastaperfil,"working.xml"))
LANGUAGE = "en"
groups = ElementTree.parse(os.path.join(pastaperfil,"working.xml")).findall('.//group')
for group in groups:
if group.attrib[LANGUAGE].encode('utf-8') == name:
channels = group.findall('.//channel')
for channel in channels:
try:
try:
title = channel.find('.//name').attrib['en'].encode('utf-8')
except: title = ''
if not title:
try: title = channel.find('.//name').attrib['cn'].encode('utf-8')
except: title = ''
if not title:
try: title = channel.find('.//name').text
except: title = ''
tipo = channel.find('.//stream_type').text
sop_address = channel.find('.//item').text
if not tipo: tipo = "N/A"
if not title: title = "N/A"
thumbnail = ""
try:
thumbnail = channel.find('.//thumbnail').text
except: pass
if sop_address:
if thumbnail == "": thumbnail = addonpath + art + 'sopcast_link.png'
try: addDir_livestreams_common('[B][COLOR orange]' + title + ' [/B][/COLOR](' + tipo +')',sop_address,2,thumbnail,False)
except:pass
else: pass
except: pass
else: pass
def getData(url,fanart):
soup = getSoup(url)
if len(soup('channels')) > 0:
channels = soup('channel')
for channel in channels:
name = channel('name')[0].string
thumbnail = channel('thumbnail')[0].string
if thumbnail == None:
thumbnail = ''
try:
if not channel('fanart'):
if addon.getSetting('use_thumb') == "true":
fanArt = thumbnail
else:
fanArt = fanart
else:
fanArt = channel('fanart')[0].string
if fanArt == None:
raise
except:
fanArt = fanart
try:
desc = channel('info')[0].string
if desc == None:
raise
except:
desc = ''
try:
genre = channel('genre')[0].string
if genre == None:
raise
except:
genre = ''
try:
date = channel('date')[0].string
if date == None:
raise
except:
date = ''
try:
credits = channel('credits')[0].string
if credits == None:
raise
except:
credits = ''
try:
addDir_livestreams(name.encode('utf-8', 'ignore'),url.encode('utf-8'),103,thumbnail,fanArt,desc,genre,date,credits,True)
except:
addon_log('There was a problem adding directory from getData(): '+name.encode('utf-8', 'ignore'))
else:
addon_log('No Channels: getItems')
getItems(soup('item'),fanart)
def getChannelItems(name,url,fanart):
soup = getSoup(url)
channel_list = soup.find('channel', attrs={'name' : name.decode('utf-8')})
items = channel_list('item')
try:
fanArt = channel_list('fanart')[0].string
if fanArt == None:
raise
except:
fanArt = fanart
for channel in channel_list('subchannel'):
name = channel('name')[0].string
try:
thumbnail = channel('thumbnail')[0].string
if thumbnail == None:
raise
except:
thumbnail = ''
try:
if not channel('fanart'):
if addon.getSetting('use_thumb') == "true":
fanArt = thumbnail
else:
fanArt = channel('fanart')[0].string
if fanArt == None:
raise
except:
pass
try:
desc = channel('info')[0].string
if desc == None:
raise
except:
desc = ''
try:
genre = channel('genre')[0].string
if genre == None:
raise
except:
genre = ''
try:
date = channel('date')[0].string
if date == None:
raise
except:
date = ''
try:
credits = channel('credits')[0].string
if credits == None:
raise
except:
credits = ''
try:
addDir_livestreams(name.encode('utf-8', 'ignore'),url.encode('utf-8'),3,thumbnail,fanArt,desc,genre,credits,date)
except:
addon_log('There was a problem adding directory - '+name.encode('utf-8', 'ignore'))
getItems(items,fanArt)
def getItems(items,fanart):
total = len(items)
addon_log('Total Items: %s' %total)
for item in items:
try:
name = item('title')[0].string
if name is None:
name = 'unknown?'
except:
addon_log('Name Error')
name = ''
try:
if item('epg'):
if item.epg_url:
addon_log('Get EPG Regex')
epg_url = item.epg_url.string
epg_regex = item.epg_regex.string
epg_name = get_epg(epg_url, epg_regex)
if epg_name:
name += ' - ' + epg_name
elif item('epg')[0].string > 1:
name += getepg(item('epg')[0].string)
else:
pass
except:
addon_log('EPG Error')
try:
url = []
for i in item('link'):
if not i.string == None:
url.append(i.string)
if len(url) < 1:
raise
except:
addon_log('Error <link> element, Passing:'+name.encode('utf-8', 'ignore'))
continue
try:
thumbnail = item('thumbnail')[0].string
if thumbnail == None:
raise
except:
thumbnail = ''
try:
if not item('fanart'):
if addon.getSetting('use_thumb') == "true":
fanArt = thumbnail
else:
fanArt = fanart
else:
fanArt = item('fanart')[0].string
if fanArt == None:
raise
except:
fanArt = fanart
try:
desc = item('info')[0].string
if desc == None:
raise
except:
desc = ''
try:
genre = item('genre')[0].string
if genre == None:
raise
except:
genre = ''
try:
date = item('date')[0].string
if date == None:
raise
except:
date = ''
regexs = None
if item('regex'):
try:
regexs = {}
for i in item('regex'):
regexs[i('name')[0].string] = {}
regexs[i('name')[0].string]['expre'] = i('expres')[0].string
regexs[i('name')[0].string]['page'] = i('page')[0].string
try:
regexs[i('name')[0].string]['refer'] = i('referer')[0].string
except:
addon_log("Regex: -- No Referer --")
try:
regexs[i('name')[0].string]['agent'] = i('agent')[0].string
except:
addon_log("Regex: -- No User Agent --")
regexs = urllib.quote(repr(regexs))
except:
regexs = None
addon_log('regex Error: '+name.encode('utf-8', 'ignore'))
try:
if "RunPlugin" in url[0]:
try:
addDir_livestreams(name.encode('utf-8', 'ignore'),url[0],106,thumbnail,fanArt,desc,genre,"credits",date)
except:
match = re.compile("&name=(.+?)\)").findall(url[0].replace(";",""))
if match:
try:
addDir_livestreams(name.encode('utf-8', 'ignore'),removeNonAscii(url[0]),106,thumbnail,fanArt,desc,genre,credits,date)
except:
try:
addDir_livestreams(removeNonAscii(name.encode('utf-8', 'ignore')),removeNonAscii(url[0].replace(";","")),106,thumbnail,fanArt,desc,genre,credits,date)
except:
addon_log('There was a problem adding item - '+name.encode('utf-8', 'ignore'))
else:
addon_log('There was a problem adding item - '+name.encode('utf-8', 'ignore'))
else:
if ('acestream://' in url[0]) or ('.acelive' in url[0]) or ('.torrent' in url[0]):
if 'plugin://' not in url[0]:
addDir_livestreams(name.encode('utf-8', 'ignore'),url[0],1,thumbnail,fanArt,desc,genre,"credits",date)
else:
addLink_livestreams(url[0].replace(';',''),name.encode('utf-8', 'ignore'),thumbnail,fanArt,desc,genre,date,True,None,regexs,total)
elif 'sop://' in url[0]:
if 'plugin://' not in url[0]:
addDir_livestreams(name.encode('utf-8', 'ignore'),url[0],2,thumbnail,fanArt,desc,genre,"credits",date)
else:
addLink_livestreams(url[0].replace(';',''),name.encode('utf-8', 'ignore'),thumbnail,fanArt,desc,genre,date,True,None,regexs,total)
else: addLink_livestreams(url[0].replace(';',''),name.encode('utf-8', 'ignore'),thumbnail,fanArt,desc,genre,date,True,None,regexs,total)
except:
addon_log('There was a problem adding item - '+name.encode('utf-8', 'ignore'))
def removeNonAscii(s): return "".join(filter(lambda x: ord(x)<128, s))
def getSoup(url):
if url.startswith('http://'):
data = makeRequest(url)
else:
if xbmcvfs.exists(url):
if url.startswith("smb://") or url.startswith("nfs://"):
copy = xbmcvfs.copy(url, os.path.join(profile, 'temp', 'sorce_temp.txt'))
if copy:
data = open(os.path.join(profile, 'temp', 'sorce_temp.txt'), "r").read()
xbmcvfs.delete(os.path.join(profile, 'temp', 'sorce_temp.txt'))
else:
addon_log("failed to copy from smb:")
else:
data = open(url, 'r').read()
else:
addon_log("Soup Data not found!")
return
return BeautifulSOAP(data, convertEntities=BeautifulStoneSoup.XML_ENTITIES)
def addon_log(string):
print(string)
def getRegexParsed(regexs, url):
regexs = eval(urllib.unquote(regexs))
cachedPages = {}
doRegexs = re.compile('\$doregex\[([^\]]*)\]').findall(url)
for k in doRegexs:
if k in regexs:
m = regexs[k]
if m['page'] in cachedPages:
link = cachedPages[m['page']]
else:
req = urllib2.Request(m['page'])
req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/14.0.1')
if 'refer' in m:
req.add_header('Referer', m['refer'])
if 'agent' in m:
req.add_header('User-agent', m['agent'])
response = urllib2.urlopen(req)
link = response.read()
response.close()
cachedPages[m['page']] = link
reg = re.compile(m['expre']).search(link)
url = url.replace("$doregex[" + k + "]", reg.group(1).strip())
item = xbmcgui.ListItem(path=url)
xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, item)
| [((114, 4, 114, 24), 'xbmcvfs.delete', 'xbmcvfs.delete', ({(114, 19, 114, 23): 'name'}, {}), '(name)', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((116, 4, 116, 44), 'xbmc.executebuiltin', 'xbmc.executebuiltin', ({(116, 24, 116, 43): '"""Container.Refresh"""'}, {}), "('Container.Refresh')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((493, 11, 493, 79), 'BeautifulSoup.BeautifulSOAP', 'BeautifulSOAP', (), '', False, 'from BeautifulSoup import BeautifulStoneSoup, BeautifulSoup, BeautifulSOAP\n'), ((520, 11, 520, 37), 'xbmcgui.ListItem', 'xbmcgui.ListItem', (), '', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((66, 17, 66, 33), 'xbmcgui.Dialog', 'xbmcgui.Dialog', ({}, {}), '()', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((188, 9, 188, 48), 'os.path.join', 'os.path.join', ({(188, 22, 188, 33): 'pastaperfil', (188, 34, 188, 47): '"""working.xml"""'}, {}), "(pastaperfil, 'working.xml')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((189, 36, 189, 75), 'os.path.join', 'os.path.join', ({(189, 49, 189, 60): 'pastaperfil', (189, 61, 189, 74): '"""working.xml"""'}, {}), "(pastaperfil, 'working.xml')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((480, 11, 480, 30), 'xbmcvfs.exists', 'xbmcvfs.exists', ({(480, 26, 480, 29): 'url'}, {}), '(url)', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((499, 18, 499, 40), 'urllib.unquote', 'urllib.unquote', ({(499, 33, 499, 39): 'regexs'}, {}), '(regexs)', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((46, 26, 46, 59), 'os.path.join', 'os.path.join', ({(46, 39, 46, 50): 'pastaperfil', (46, 51, 46, 58): '"""Lists"""'}, {}), "(pastaperfil, 'Lists')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((64, 11, 64, 27), 'xbmcgui.Dialog', 'xbmcgui.Dialog', ({}, {}), '()', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((73, 19, 73, 43), 'urllib.quote', 'urllib.quote', ({(73, 32, 73, 42): 'searchname'}, {}), '(searchname)', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((79, 12, 79, 57), 'xbmc.executebuiltin', 'xbmc.executebuiltin', ({(79, 32, 79, 56): '"""XBMC.Container.Refresh"""'}, {}), "('XBMC.Container.Refresh')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((132, 12, 132, 59), 're.compile', 're.compile', ({(132, 23, 132, 58): "'#EXTINF:.+?,(.*?)\\n(.*?)(?:\\r|\\n)'"}, {}), "('#EXTINF:.+?,(.*?)\\n(.*?)(?:\\r|\\n)')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((151, 25, 151, 64), 'os.path.join', 'os.path.join', ({(151, 38, 151, 49): 'pastaperfil', (151, 50, 151, 63): '"""working.xml"""'}, {}), "(pastaperfil, 'working.xml')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((501, 15, 501, 50), 're.compile', 're.compile', ({(501, 26, 501, 49): '"""\\\\$doregex\\\\[([^\\\\]]*)\\\\]"""'}, {}), "('\\\\$doregex\\\\[([^\\\\]]*)\\\\]')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((47, 42, 47, 75), 'os.path.join', 'os.path.join', ({(47, 55, 47, 66): 'pastaperfil', (47, 67, 47, 74): '"""Lists"""'}, {}), "(pastaperfil, 'Lists')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((72, 31, 72, 42), 'sys.exit', 'sys.exit', ({(72, 40, 72, 41): '(0)'}, {}), '(0)', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((74, 30, 74, 63), 'os.path.join', 'os.path.join', ({(74, 43, 74, 54): 'pastaperfil', (74, 55, 74, 62): '"""Lists"""'}, {}), "(pastaperfil, 'Lists')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((77, 17, 77, 59), 'os.path.join', 'os.path.join', ({(77, 30, 77, 41): 'pastaperfil', (77, 42, 77, 49): '"""Lists"""', (77, 50, 77, 58): 'txt_name'}, {}), "(pastaperfil, 'Lists', txt_name)", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((85, 27, 85, 38), 'sys.exit', 'sys.exit', ({(85, 36, 85, 37): '(0)'}, {}), '(0)', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((87, 137, 87, 148), 'sys.exit', 'sys.exit', ({(87, 146, 87, 147): '(0)'}, {}), '(0)', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((99, 23, 99, 47), 'urllib.quote', 'urllib.quote', ({(99, 36, 99, 46): 'searchname'}, {}), '(searchname)', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((105, 16, 105, 61), 'xbmc.executebuiltin', 'xbmc.executebuiltin', ({(105, 36, 105, 60): '"""XBMC.Container.Refresh"""'}, {}), "('XBMC.Container.Refresh')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((150, 17, 150, 56), 'os.path.join', 'os.path.join', ({(150, 30, 150, 41): 'pastaperfil', (150, 42, 150, 55): '"""working.xml"""'}, {}), "(pastaperfil, 'working.xml')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((154, 17, 154, 46), 'xml.etree.ElementTree.parse', 'ElementTree.parse', ({(154, 35, 154, 45): 'workingxml'}, {}), '(workingxml)', False, 'from xml.etree import ElementTree\n'), ((191, 31, 191, 70), 'os.path.join', 'os.path.join', ({(191, 44, 191, 55): 'pastaperfil', (191, 56, 191, 69): '"""working.xml"""'}, {}), "(pastaperfil, 'working.xml')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((508, 22, 508, 48), 'urllib2.Request', 'urllib2.Request', ({(508, 38, 508, 47): "m['page']"}, {}), "(m['page'])", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((514, 27, 514, 47), 'urllib2.urlopen', 'urllib2.urlopen', ({(514, 43, 514, 46): 'req'}, {}), '(req)', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((49, 25, 49, 63), 'os.path.join', 'os.path.join', ({(49, 38, 49, 49): 'pastaperfil', (49, 50, 49, 57): '"""Lists"""', (49, 58, 49, 62): 'file'}, {}), "(pastaperfil, 'Lists', file)", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((75, 32, 75, 65), 'os.path.join', 'os.path.join', ({(75, 45, 75, 56): 'pastaperfil', (75, 57, 75, 64): '"""Lists"""'}, {}), "(pastaperfil, 'Lists')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((98, 35, 98, 46), 'sys.exit', 'sys.exit', ({(98, 44, 98, 45): '(0)'}, {}), '(0)', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((100, 34, 100, 67), 'os.path.join', 'os.path.join', ({(100, 47, 100, 58): 'pastaperfil', (100, 59, 100, 66): '"""Lists"""'}, {}), "(pastaperfil, 'Lists')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((103, 21, 103, 63), 'os.path.join', 'os.path.join', ({(103, 34, 103, 45): 'pastaperfil', (103, 46, 103, 53): '"""Lists"""', (103, 54, 103, 62): 'txt_name'}, {}), "(pastaperfil, 'Lists', txt_name)", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((161, 19, 161, 43), 're.sub', 're.sub', ({(161, 26, 161, 29): '"""c"""', (161, 30, 161, 33): '"""e"""', (161, 34, 161, 42): 'LANGUAGE'}, {}), "('c', 'e', LANGUAGE)", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((162, 33, 162, 57), 're.sub', 're.sub', ({(162, 40, 162, 43): '"""e"""', (162, 44, 162, 47): '"""c"""', (162, 48, 162, 56): 'LANGUAGE'}, {}), "('e', 'c', LANGUAGE)", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((164, 33, 164, 57), 're.sub', 're.sub', ({(164, 40, 164, 43): '"""c"""', (164, 44, 164, 47): '"""e"""', (164, 48, 164, 56): 'LANGUAGE'}, {}), "('c', 'e', LANGUAGE)", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((482, 41, 482, 88), 'os.path.join', 'os.path.join', ({(482, 54, 482, 61): 'profile', (482, 63, 482, 69): '"""temp"""', (482, 71, 482, 87): '"""sorce_temp.txt"""'}, {}), "(profile, 'temp', 'sorce_temp.txt')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((518, 18, 518, 40), 're.compile', 're.compile', ({(518, 29, 518, 39): "m['expre']"}, {}), "(m['expre'])", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((93, 20, 93, 31), 'sys.exit', 'sys.exit', ({(93, 29, 93, 30): '(0)'}, {}), '(0)', False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((101, 36, 101, 69), 'os.path.join', 'os.path.join', ({(101, 49, 101, 60): 'pastaperfil', (101, 61, 101, 68): '"""Lists"""'}, {}), "(pastaperfil, 'Lists')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((485, 35, 485, 82), 'os.path.join', 'os.path.join', ({(485, 48, 485, 55): 'profile', (485, 57, 485, 63): '"""temp"""', (485, 65, 485, 81): '"""sorce_temp.txt"""'}, {}), "(profile, 'temp', 'sorce_temp.txt')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((447, 28, 447, 55), 're.compile', 're.compile', ({(447, 39, 447, 54): '"""&name=(.+?)\\\\)"""'}, {}), "('&name=(.+?)\\\\)')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n'), ((484, 32, 484, 79), 'os.path.join', 'os.path.join', ({(484, 45, 484, 52): 'profile', (484, 54, 484, 60): '"""temp"""', (484, 62, 484, 78): '"""sorce_temp.txt"""'}, {}), "(profile, 'temp', 'sorce_temp.txt')", False, 'import urllib, urllib2, re, xbmcplugin, xbmcgui, xbmc, xbmcaddon, HTMLParser, time, datetime, os, xbmcvfs, sys\n')] |
luisgepeto/RainItPi | RainIt/rain_it/ric/Procedure.py | 47cb7228e9c584c3c4489ebc78abf6de2096b770 | from ric.RainItComposite import RainItComposite
class Procedure(RainItComposite):
def __init__(self):
super().__init__()
def get_pickle_form(self):
return self
| [] |
FahimFBA/URI-Problem-Solve | 1067.py | d718a95e5a873dffbce19d850998e8917ec87ebb | valor = int(input())
for i in range(valor+1):
if(i%2 != 0):
print(i) | [] |
b-bold/ThreatExchange | api-reference-examples/python/te-tag-query/api-example-update.py | 6f8d0dc803faccf576c9398569bb52d54a4f9a87 | #!/usr/bin/env python
# ================================================================
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# ================================================================
import sys
import json
import TE
TE.Net.setAppTokenFromEnvName("TX_ACCESS_TOKEN")
postParams = {
"descriptor_id": "4036655176350945", # ID of the descriptor to be updated
"reactions": "INGESTED,IN_REVIEW",
}
showURLs = False
dryRun = False
validationErrorMessage, serverSideError, responseBody = TE.Net.updateThreatDescriptor(
postParams, showURLs, dryRun
)
if validationErrorMessage != None:
sys.stderr.write(validationErrorMessage + "\n")
sys.exit(1)
if serverSideError != None:
sys.stderr.write(str(serverSideError) + "\n")
sys.stderr.write(json.dumps(responseBody) + "\n")
sys.exit(1)
print(json.dumps(responseBody))
| [((11, 0, 11, 48), 'TE.Net.setAppTokenFromEnvName', 'TE.Net.setAppTokenFromEnvName', ({(11, 30, 11, 47): '"""TX_ACCESS_TOKEN"""'}, {}), "('TX_ACCESS_TOKEN')", False, 'import TE\n'), ((20, 56, 22, 1), 'TE.Net.updateThreatDescriptor', 'TE.Net.updateThreatDescriptor', ({(21, 4, 21, 14): 'postParams', (21, 16, 21, 24): 'showURLs', (21, 26, 21, 32): 'dryRun'}, {}), '(postParams, showURLs, dryRun)', False, 'import TE\n'), ((25, 4, 25, 51), 'sys.stderr.write', 'sys.stderr.write', ({(25, 21, 25, 50): "(validationErrorMessage + '\\n')"}, {}), "(validationErrorMessage + '\\n')", False, 'import sys\n'), ((26, 4, 26, 15), 'sys.exit', 'sys.exit', ({(26, 13, 26, 14): '(1)'}, {}), '(1)', False, 'import sys\n'), ((31, 4, 31, 15), 'sys.exit', 'sys.exit', ({(31, 13, 31, 14): '(1)'}, {}), '(1)', False, 'import sys\n'), ((33, 6, 33, 30), 'json.dumps', 'json.dumps', ({(33, 17, 33, 29): 'responseBody'}, {}), '(responseBody)', False, 'import json\n'), ((30, 21, 30, 45), 'json.dumps', 'json.dumps', ({(30, 32, 30, 44): 'responseBody'}, {}), '(responseBody)', False, 'import json\n')] |
Bottom-Feeders/GrabNGO | loaner/web_app/backend/api/shelf_api_test.py | 5a467362e423700a5a7276a7fa9a47040033cfcf | # Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for backend.api.shelf_api."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import mock
from protorpc import message_types
from google.appengine.api import search
import endpoints
from loaner.web_app.backend.api import root_api # pylint: disable=unused-import
from loaner.web_app.backend.api import shelf_api
from loaner.web_app.backend.api.messages import shared_messages
from loaner.web_app.backend.api.messages import shelf_messages
from loaner.web_app.backend.models import device_model
from loaner.web_app.backend.models import shelf_model # pylint: disable=unused-import
from loaner.web_app.backend.testing import loanertest
class ShelfApiTest(parameterized.TestCase, loanertest.EndpointsTestCase):
"""Test for the Shelf API."""
def setUp(self):
super(ShelfApiTest, self).setUp()
self.patcher_directory = mock.patch(
'__main__.device_model.directory.DirectoryApiClient')
self.mock_directoryclass = self.patcher_directory.start()
self.addCleanup(self.patcher_directory.stop)
self.service = shelf_api.ShelfApi()
self.login_admin_endpoints_user()
self.patcher_xsrf = mock.patch(
'__main__.shelf_api.root_api.Service.check_xsrf_token')
self.shelf = shelf_model.Shelf.enroll(
user_email=loanertest.USER_EMAIL, location='NYC', capacity=10,
friendly_name='GnG', latitude=40.6892534, longitude=-74.0466891,
altitude=1.0)
shelf1 = shelf_model.Shelf.enroll(
user_email=loanertest.USER_EMAIL, location='MTV', capacity=20)
shelf2 = shelf_model.Shelf.enroll(
user_email=loanertest.USER_EMAIL, location='SAO', capacity=10)
self.disabled_shelf = shelf_model.Shelf.enroll(
user_email=loanertest.USER_EMAIL, location='SVL', capacity=10,
friendly_name='Bay')
self.disabled_shelf.disable(loanertest.USER_EMAIL)
self.shelf_locations = [
self.shelf.location, shelf1.location, shelf2.location,
self.disabled_shelf.location]
self.device1_key = device_model.Device(
serial_number='12345',
enrolled=True,
device_model='HP Chromebook 13 G1',
current_ou='/',
chrome_device_id='unique_id_1',
damaged=False,
).put()
self.device2_key = device_model.Device(
serial_number='54321',
enrolled=True,
device_model='HP Chromebook 13 G1',
current_ou='/',
chrome_device_id='unique_id_2',
damaged=False,
).put()
self.device3_key = device_model.Device(
serial_number='67890',
enrolled=True,
shelf=self.shelf.key,
device_model='HP Chromebook 13 G1',
current_ou='/',
chrome_device_id='unique_id_3',
damaged=False,
).put()
self.device4_key = device_model.Device(
serial_number='ABC123',
enrolled=True,
shelf=self.shelf.key,
device_model='HP Chromebook 13 G1',
current_ou='/',
chrome_device_id='unique_id_4',
damaged=False,
).put()
self.device_identifiers = [
self.device1_key.get().serial_number,
self.device2_key.get().serial_number,
self.device3_key.get().serial_number]
def tearDown(self):
super(ShelfApiTest, self).tearDown()
self.service = None
@mock.patch('__main__.root_api.Service.check_xsrf_token')
@mock.patch('__main__.shelf_model.Shelf.enroll')
def test_enroll(self, mock_enroll, mock_xsrf_token):
"""Test Enroll with mock methods."""
request = shelf_messages.EnrollShelfRequest(
location='nyc', capacity=100, friendly_name='test', latitude=12.5,
longitude=12.5, altitude=2.0, responsible_for_audit='precise',
audit_interval_override=33, audit_notification_enabled=True)
response = self.service.enroll(request)
self.assertEqual(mock_xsrf_token.call_count, 1)
self.assertIsInstance(response, message_types.VoidMessage)
def test_enroll_bad_request(self):
request = shelf_messages.EnrollShelfRequest(capacity=10)
with self.assertRaisesRegexp(
shelf_api.endpoints.BadRequestException,
'Entity has uninitialized properties'):
self.service.enroll(request)
request = shelf_messages.EnrollShelfRequest(
location='nyc', capacity=10, latitude=12.5)
with self.assertRaisesRegexp(
shelf_api.endpoints.BadRequestException,
shelf_model._LAT_LONG_MSG):
self.service.enroll(request)
@mock.patch('__main__.root_api.Service.check_xsrf_token')
def test_get_by_location(self, mock_xsrf_token):
request = shelf_messages.ShelfRequest(location='NYC')
response = self.service.get(request)
self.assertEqual(mock_xsrf_token.call_count, 1)
self.assertEqual(self.shelf.location, response.location)
self.assertEqual(self.shelf.friendly_name, response.friendly_name)
def test_disable_by_location(self):
request = shelf_messages.ShelfRequest(location='NYC')
self.assertTrue(self.shelf.enabled)
response = self.service.disable(request)
self.assertFalse(self.shelf.enabled)
self.assertIsInstance(response, message_types.VoidMessage)
@mock.patch('__main__.root_api.Service.check_xsrf_token')
def test_update_using_location(self, mock_xsrf_token):
request = shelf_messages.UpdateShelfRequest(
shelf_request=shelf_messages.ShelfRequest(location='NYC'),
location='NYC-9th')
response = self.service.update(request)
self.assertEqual(mock_xsrf_token.call_count, 1)
self.assertEqual(self.shelf.location, 'NYC-9th')
shelf = shelf_model.Shelf.get(friendly_name='GnG')
self.assertEqual(shelf.location, 'NYC-9th')
self.assertIsInstance(response, message_types.VoidMessage)
@parameterized.parameters(
(shelf_messages.Shelf(capacity=10), 2,),
(shelf_messages.Shelf(enabled=False), 1,),
(shelf_messages.Shelf(
query=shared_messages.SearchRequest(
query_string='enabled:True capacity:10')), 2,),
(shelf_messages.Shelf(
query=shared_messages.SearchRequest(
query_string='enabled:False')), 1,))
@mock.patch('__main__.root_api.Service.check_xsrf_token')
def test_list_shelves(self, request, response_length, mock_xsrf_token):
response = self.service.list_shelves(request)
self.assertEqual(mock_xsrf_token.call_count, 1)
self.assertEqual(response_length, len(response.shelves))
def test_list_shelves_invalid_page_size(self):
with self.assertRaises(endpoints.BadRequestException):
request = shelf_messages.Shelf(page_size=0)
self.service.list_shelves(request)
def test_list_shelves_with_search_constraints(self):
expressions = shared_messages.SearchExpression(expression='location')
expected_response = shelf_messages.ListShelfResponse(
shelves=[shelf_messages.Shelf(
location=self.shelf.location,
shelf_request=shelf_messages.ShelfRequest(
location=self.shelf.location,
urlsafe_key=self.shelf.key.urlsafe()))],
total_results=1, total_pages=1)
request = shelf_messages.Shelf(
query=shared_messages.SearchRequest(
query_string='location:NYC',
expressions=[expressions],
returned_fields=['location']))
response = self.service.list_shelves(request)
self.assertEqual(response, expected_response)
def test_list_shelves_with_offset(self):
previouse_shelf_locations = []
request = shelf_messages.Shelf(enabled=True, page_size=1, page_number=1)
response = self.service.list_shelves(request)
self.assertEqual(len(response.shelves), 1)
previouse_shelf_locations.append(response.shelves[0].location)
# Get next page results and make sure it's not the same as last.
request = shelf_messages.Shelf(enabled=True, page_size=1, page_number=2)
response = self.service.list_shelves(request)
self.assertEqual(len(response.shelves), 1)
self.assertNotIn(response.shelves[0], previouse_shelf_locations)
previouse_shelf_locations.append(response.shelves[0].location)
# Get next page results and make sure it's not the same as last 2.
request = shelf_messages.Shelf(enabled=True, page_size=1, page_number=3)
response = self.service.list_shelves(request)
self.assertEqual(len(response.shelves), 1)
self.assertNotIn(response.shelves[0], previouse_shelf_locations)
previouse_shelf_locations.append(response.shelves[0].location)
@mock.patch('__main__.root_api.Service.check_xsrf_token')
@mock.patch('__main__.shelf_api.logging.info')
def test_audit_using_shelf_location(self, mock_logging, mock_xsrf_token):
request = shelf_messages.ShelfAuditRequest(
shelf_request=shelf_messages.ShelfRequest(location='NYC'),
device_identifiers=self.device_identifiers)
response = self.service.audit(request)
self.assertEqual(mock_xsrf_token.call_count, 1)
mock_logging.assert_called()
for identifier in self.device_identifiers:
datastore_device = device_model.Device.get(serial_number=identifier)
self.assertEqual(datastore_device.shelf.get().location, 'NYC')
self.assertFalse(self.shelf.audit_requested)
self.assertEqual(self.shelf.last_audit_by, loanertest.SUPER_ADMIN_EMAIL)
self.assertIsInstance(response, message_types.VoidMessage)
def test_audit_invalid_device(self):
request = shelf_messages.ShelfAuditRequest(
shelf_request=shelf_messages.ShelfRequest(location='NYC'),
device_identifiers=['Invalid'])
with self.assertRaisesRegexp(
endpoints.NotFoundException,
shelf_api._DEVICE_DOES_NOT_EXIST_MSG % 'Invalid'):
self.service.audit(request)
@mock.patch.object(device_model.Device, 'search')
@mock.patch.object(shelf_api, 'get_shelf', autospec=True)
def test_audit_remove_devices(
self, mock_get_shelf, mock_model_device_search):
shelf = self.device2_key.get()
shelf.shelf = self.shelf.key
shelf.put()
mock_model_device_search.return_value = (
search.SearchResults(
results=[
search.ScoredDocument(
doc_id=self.device2_key.urlsafe()),
search.ScoredDocument(
doc_id=self.device3_key.urlsafe()),
search.ScoredDocument(
doc_id=self.device4_key.urlsafe())],
number_found=3))
mock_get_shelf.return_value = self.shelf
request = shelf_messages.ShelfAuditRequest(
shelf_request=shelf_messages.ShelfRequest(location=self.shelf.location),
device_identifiers=[self.device3_key.get().serial_number])
self.service.audit(request)
self.assertEqual(self.device3_key.get().shelf, self.shelf.key)
self.assertIsNone(self.device2_key.get().shelf)
self.assertIsNone(self.device4_key.get().shelf)
def test_get_shelf_urlsafe_key(self):
"""Test getting a shelf using the urlsafe key."""
request = shelf_messages.ShelfRequest(urlsafe_key=self.shelf.key.urlsafe())
shelf = shelf_api.get_shelf(request)
self.assertEqual(shelf, self.shelf)
def test_get_shelf_using_location(self):
"""Test getting a shelf using the location."""
request = shelf_messages.ShelfRequest(location=self.shelf.location)
shelf = shelf_api.get_shelf(request)
self.assertEqual(shelf, self.shelf)
def test_get_shelf_using_location_error(self):
"""Test getting a shelf with an invalid location."""
request = shelf_messages.ShelfRequest(location='Not_Valid')
with self.assertRaisesRegexp(
endpoints.NotFoundException,
shelf_api._SHELF_DOES_NOT_EXIST_MSG % request.location):
shelf_api.get_shelf(request)
if __name__ == '__main__':
loanertest.main()
| [((111, 3, 111, 59), 'mock.patch', 'mock.patch', ({(111, 14, 111, 58): '"""__main__.root_api.Service.check_xsrf_token"""'}, {}), "('__main__.root_api.Service.check_xsrf_token')", False, 'import mock\n'), ((112, 3, 112, 50), 'mock.patch', 'mock.patch', ({(112, 14, 112, 49): '"""__main__.shelf_model.Shelf.enroll"""'}, {}), "('__main__.shelf_model.Shelf.enroll')", False, 'import mock\n'), ((136, 3, 136, 59), 'mock.patch', 'mock.patch', ({(136, 14, 136, 58): '"""__main__.root_api.Service.check_xsrf_token"""'}, {}), "('__main__.root_api.Service.check_xsrf_token')", False, 'import mock\n'), ((151, 3, 151, 59), 'mock.patch', 'mock.patch', ({(151, 14, 151, 58): '"""__main__.root_api.Service.check_xsrf_token"""'}, {}), "('__main__.root_api.Service.check_xsrf_token')", False, 'import mock\n'), ((172, 3, 172, 59), 'mock.patch', 'mock.patch', ({(172, 14, 172, 58): '"""__main__.root_api.Service.check_xsrf_token"""'}, {}), "('__main__.root_api.Service.check_xsrf_token')", False, 'import mock\n'), ((221, 3, 221, 59), 'mock.patch', 'mock.patch', ({(221, 14, 221, 58): '"""__main__.root_api.Service.check_xsrf_token"""'}, {}), "('__main__.root_api.Service.check_xsrf_token')", False, 'import mock\n'), ((222, 3, 222, 48), 'mock.patch', 'mock.patch', ({(222, 14, 222, 47): '"""__main__.shelf_api.logging.info"""'}, {}), "('__main__.shelf_api.logging.info')", False, 'import mock\n'), ((246, 3, 246, 51), 'mock.patch.object', 'mock.patch.object', ({(246, 21, 246, 40): 'device_model.Device', (246, 42, 246, 50): '"""search"""'}, {}), "(device_model.Device, 'search')", False, 'import mock\n'), ((247, 3, 247, 59), 'mock.patch.object', 'mock.patch.object', (), '', False, 'import mock\n'), ((294, 2, 294, 19), 'loaner.web_app.backend.testing.loanertest.main', 'loanertest.main', ({}, {}), '()', False, 'from loaner.web_app.backend.testing import loanertest\n'), ((44, 29, 45, 61), 'mock.patch', 'mock.patch', ({(45, 8, 45, 60): '"""__main__.device_model.directory.DirectoryApiClient"""'}, {}), "('__main__.device_model.directory.DirectoryApiClient')", False, 'import mock\n'), ((48, 19, 48, 39), 'loaner.web_app.backend.api.shelf_api.ShelfApi', 'shelf_api.ShelfApi', ({}, {}), '()', False, 'from loaner.web_app.backend.api import shelf_api\n'), ((50, 24, 51, 63), 'mock.patch', 'mock.patch', ({(51, 8, 51, 62): '"""__main__.shelf_api.root_api.Service.check_xsrf_token"""'}, {}), "('__main__.shelf_api.root_api.Service.check_xsrf_token')", False, 'import mock\n'), ((52, 17, 55, 21), 'loaner.web_app.backend.models.shelf_model.Shelf.enroll', 'shelf_model.Shelf.enroll', (), '', False, 'from loaner.web_app.backend.models import shelf_model\n'), ((56, 13, 57, 70), 'loaner.web_app.backend.models.shelf_model.Shelf.enroll', 'shelf_model.Shelf.enroll', (), '', False, 'from loaner.web_app.backend.models import shelf_model\n'), ((58, 13, 59, 70), 'loaner.web_app.backend.models.shelf_model.Shelf.enroll', 'shelf_model.Shelf.enroll', (), '', False, 'from loaner.web_app.backend.models import shelf_model\n'), ((60, 26, 62, 28), 'loaner.web_app.backend.models.shelf_model.Shelf.enroll', 'shelf_model.Shelf.enroll', (), '', False, 'from loaner.web_app.backend.models import shelf_model\n'), ((115, 14, 118, 68), 'loaner.web_app.backend.api.messages.shelf_messages.EnrollShelfRequest', 'shelf_messages.EnrollShelfRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((124, 14, 124, 60), 'loaner.web_app.backend.api.messages.shelf_messages.EnrollShelfRequest', 'shelf_messages.EnrollShelfRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((129, 14, 130, 51), 'loaner.web_app.backend.api.messages.shelf_messages.EnrollShelfRequest', 'shelf_messages.EnrollShelfRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((138, 14, 138, 57), 'loaner.web_app.backend.api.messages.shelf_messages.ShelfRequest', 'shelf_messages.ShelfRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((145, 14, 145, 57), 'loaner.web_app.backend.api.messages.shelf_messages.ShelfRequest', 'shelf_messages.ShelfRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((159, 12, 159, 54), 'loaner.web_app.backend.models.shelf_model.Shelf.get', 'shelf_model.Shelf.get', (), '', False, 'from loaner.web_app.backend.models import shelf_model\n'), ((184, 18, 184, 73), 'loaner.web_app.backend.api.messages.shared_messages.SearchExpression', 'shared_messages.SearchExpression', (), '', False, 'from loaner.web_app.backend.api.messages import shared_messages\n'), ((202, 14, 202, 76), 'loaner.web_app.backend.api.messages.shelf_messages.Shelf', 'shelf_messages.Shelf', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((208, 14, 208, 76), 'loaner.web_app.backend.api.messages.shelf_messages.Shelf', 'shelf_messages.Shelf', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((215, 14, 215, 76), 'loaner.web_app.backend.api.messages.shelf_messages.Shelf', 'shelf_messages.Shelf', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((275, 12, 275, 40), 'loaner.web_app.backend.api.shelf_api.get_shelf', 'shelf_api.get_shelf', ({(275, 32, 275, 39): 'request'}, {}), '(request)', False, 'from loaner.web_app.backend.api import shelf_api\n'), ((280, 14, 280, 71), 'loaner.web_app.backend.api.messages.shelf_messages.ShelfRequest', 'shelf_messages.ShelfRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((281, 12, 281, 40), 'loaner.web_app.backend.api.shelf_api.get_shelf', 'shelf_api.get_shelf', ({(281, 32, 281, 39): 'request'}, {}), '(request)', False, 'from loaner.web_app.backend.api import shelf_api\n'), ((286, 14, 286, 63), 'loaner.web_app.backend.api.messages.shelf_messages.ShelfRequest', 'shelf_messages.ShelfRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((164, 7, 164, 40), 'loaner.web_app.backend.api.messages.shelf_messages.Shelf', 'shelf_messages.Shelf', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((165, 7, 165, 42), 'loaner.web_app.backend.api.messages.shelf_messages.Shelf', 'shelf_messages.Shelf', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((180, 16, 180, 49), 'loaner.web_app.backend.api.messages.shelf_messages.Shelf', 'shelf_messages.Shelf', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((231, 25, 231, 74), 'loaner.web_app.backend.models.device_model.Device.get', 'device_model.Device.get', (), '', False, 'from loaner.web_app.backend.models import device_model\n'), ((290, 6, 290, 34), 'loaner.web_app.backend.api.shelf_api.get_shelf', 'shelf_api.get_shelf', ({(290, 26, 290, 33): 'request'}, {}), '(request)', False, 'from loaner.web_app.backend.api import shelf_api\n'), ((68, 23, 75, 5), 'loaner.web_app.backend.models.device_model.Device', 'device_model.Device', (), '', False, 'from loaner.web_app.backend.models import device_model\n'), ((76, 23, 83, 5), 'loaner.web_app.backend.models.device_model.Device', 'device_model.Device', (), '', False, 'from loaner.web_app.backend.models import device_model\n'), ((84, 23, 92, 5), 'loaner.web_app.backend.models.device_model.Device', 'device_model.Device', (), '', False, 'from loaner.web_app.backend.models import device_model\n'), ((93, 23, 101, 5), 'loaner.web_app.backend.models.device_model.Device', 'device_model.Device', (), '', False, 'from loaner.web_app.backend.models import device_model\n'), ((154, 22, 154, 65), 'loaner.web_app.backend.api.messages.shelf_messages.ShelfRequest', 'shelf_messages.ShelfRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((193, 14, 196, 41), 'loaner.web_app.backend.api.messages.shared_messages.SearchRequest', 'shared_messages.SearchRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shared_messages\n'), ((225, 22, 225, 65), 'loaner.web_app.backend.api.messages.shelf_messages.ShelfRequest', 'shelf_messages.ShelfRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((239, 22, 239, 65), 'loaner.web_app.backend.api.messages.shelf_messages.ShelfRequest', 'shelf_messages.ShelfRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((265, 22, 265, 79), 'loaner.web_app.backend.api.messages.shelf_messages.ShelfRequest', 'shelf_messages.ShelfRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shelf_messages\n'), ((167, 16, 168, 54), 'loaner.web_app.backend.api.messages.shared_messages.SearchRequest', 'shared_messages.SearchRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shared_messages\n'), ((170, 16, 171, 43), 'loaner.web_app.backend.api.messages.shared_messages.SearchRequest', 'shared_messages.SearchRequest', (), '', False, 'from loaner.web_app.backend.api.messages import shared_messages\n')] |
ArmandDS/ai_bert_resumes | app/views/main.py | 743f37049bbca67bcbbaf21a2ffecf0d093351df | from flask import render_template, jsonify, Flask, redirect, url_for, request
from app import app
import random
import os
# import tensorflow as tf
# import numpy as np
# import sys
# import spacy
# nlp = spacy.load('en')
# sys.path.insert(0, "/content/bert_experimental")
# from bert_experimental.finetuning.text_preprocessing import build_preprocessor
# from bert_experimental.finetuning.graph_ops import load_graph
# restored_graph = load_graph("models/frozen_graph.pb")
# graph_ops = restored_graph.get_operations()
# input_op, output_op = graph_ops[0].name, graph_ops[-1].name
# x = restored_graph.get_tensor_by_name(input_op + ':0')
# y = restored_graph.get_tensor_by_name(output_op + ':0')
# preprocessor = build_preprocessor("./uncased_L-12_H-768_A-12/vocab.txt", 256)
# py_func = tf.numpy_function(preprocessor, [x], [tf.int32, tf.int32, tf.int32], name='preprocessor')
# py_func = tf.numpy_function(preprocessor, [x], [tf.int32, tf.int32, tf.int32])
# sess = tf.Session(graph=restored_graph)
# delimiter = " ||| "
@app.route('/')
def index1():
return render_template('index.html', title='Home')
@app.route('/predict', methods = ['GET', 'POST'])
def upload_file():
if request.method == 'POST':
exp_st = request.form.get('exp')
job_st = request.form.get('job')
# y_out = sess.run(y, feed_dict={
# x: pd.DataFrame([delimiter.join((exp_st, job_st ))], columns=['name'])
# })
# doc1 = nlp(exp_st)
# doc2 = nlp(job_st )
# y_out2 = doc1.similarity(doc2)
return render_template('index.html', title='Success', predictions=80, predictions_sp =75, exp=exp_st, job= job_st)
@app.route('/index')
def index():
return render_template('index.html', title='Home')
@app.route('/map')
def map():
return render_template('map.html', title='Map')
@app.route('/map/refresh', methods=['POST'])
def map_refresh():
points = [(random.uniform(48.8434100, 48.8634100),
random.uniform(2.3388000, 2.3588000))
for _ in range(random.randint(2, 9))]
return jsonify({'points': points})
@app.route('/contact')
def contact():
return render_template('contact.html', title='Contact') | [((27, 1, 27, 15), 'app.app.route', 'app.route', ({(27, 11, 27, 14): '"""/"""'}, {}), "('/')", False, 'from app import app\n'), ((32, 1, 32, 49), 'app.app.route', 'app.route', (), '', False, 'from app import app\n'), ((46, 1, 46, 20), 'app.app.route', 'app.route', ({(46, 11, 46, 19): '"""/index"""'}, {}), "('/index')", False, 'from app import app\n'), ((50, 1, 50, 18), 'app.app.route', 'app.route', ({(50, 11, 50, 17): '"""/map"""'}, {}), "('/map')", False, 'from app import app\n'), ((55, 1, 55, 44), 'app.app.route', 'app.route', (), '', False, 'from app import app\n'), ((63, 1, 63, 22), 'app.app.route', 'app.route', ({(63, 11, 63, 21): '"""/contact"""'}, {}), "('/contact')", False, 'from app import app\n'), ((29, 11, 29, 54), 'flask.render_template', 'render_template', (), '', False, 'from flask import render_template, jsonify, Flask, redirect, url_for, request\n'), ((43, 10, 43, 117), 'flask.render_template', 'render_template', (), '', False, 'from flask import render_template, jsonify, Flask, redirect, url_for, request\n'), ((48, 11, 48, 54), 'flask.render_template', 'render_template', (), '', False, 'from flask import render_template, jsonify, Flask, redirect, url_for, request\n'), ((52, 11, 52, 51), 'flask.render_template', 'render_template', (), '', False, 'from flask import render_template, jsonify, Flask, redirect, url_for, request\n'), ((60, 11, 60, 38), 'flask.jsonify', 'jsonify', ({(60, 19, 60, 37): "{'points': points}"}, {}), "({'points': points})", False, 'from flask import render_template, jsonify, Flask, redirect, url_for, request\n'), ((65, 11, 65, 59), 'flask.render_template', 'render_template', (), '', False, 'from flask import render_template, jsonify, Flask, redirect, url_for, request\n'), ((35, 15, 35, 38), 'flask.request.form.get', 'request.form.get', ({(35, 32, 35, 37): '"""exp"""'}, {}), "('exp')", False, 'from flask import render_template, jsonify, Flask, redirect, url_for, request\n'), ((36, 15, 36, 38), 'flask.request.form.get', 'request.form.get', ({(36, 32, 36, 37): '"""job"""'}, {}), "('job')", False, 'from flask import render_template, jsonify, Flask, redirect, url_for, request\n'), ((57, 15, 57, 53), 'random.uniform', 'random.uniform', ({(57, 30, 57, 40): '(48.84341)', (57, 42, 57, 52): '(48.86341)'}, {}), '(48.84341, 48.86341)', False, 'import random\n'), ((58, 15, 58, 51), 'random.uniform', 'random.uniform', ({(58, 30, 58, 39): '(2.3388)', (58, 41, 58, 50): '(2.3588)'}, {}), '(2.3388, 2.3588)', False, 'import random\n'), ((59, 29, 59, 49), 'random.randint', 'random.randint', ({(59, 44, 59, 45): '(2)', (59, 47, 59, 48): '(9)'}, {}), '(2, 9)', False, 'import random\n')] |
jaluebbe/ahrs | ahrs/filters/complementary.py | 4b4a33b1006e0d455a71ac8379a2697202361758 | # -*- coding: utf-8 -*-
"""
Complementary Filter
====================
Attitude quaternion obtained with gyroscope and accelerometer-magnetometer
measurements, via complementary filter.
First, the current orientation is estimated at time :math:`t`, from a previous
orientation at time :math:`t-1`, and a given angular velocity,
:math:`\\omega`, in rad/s.
This orientation is computed by numerically integrating the angular velocity
and adding it to the previous orientation, which is known as an **attitude
propagation**.
.. math::
\\begin{array}{rcl}
\\mathbf{q}_\\omega &=& \\Big(\\mathbf{I}_4 + \\frac{\\Delta t}{2}\\boldsymbol\\Omega_t\\Big)\\mathbf{q}_{t-1} \\\\
&=&
\\begin{bmatrix}
1 & -\\frac{\\Delta t}{2}\\omega_x & -\\frac{\\Delta t}{2}\\omega_y & -\\frac{\\Delta t}{2}\\omega_z \\\\
\\frac{\\Delta t}{2}\\omega_x & 1 & \\frac{\\Delta t}{2}\\omega_z & -\\frac{\\Delta t}{2}\\omega_y \\\\
\\frac{\\Delta t}{2}\\omega_y & -\\frac{\\Delta t}{2}\\omega_z & 1 & \\frac{\\Delta t}{2}\\omega_x \\\\
\\frac{\\Delta t}{2}\\omega_z & \\frac{\\Delta t}{2}\\omega_y & -\\frac{\\Delta t}{2}\\omega_x & 1
\\end{bmatrix}
\\begin{bmatrix}q_w \\\\ q_x \\\\ q_y \\\\ q_z \\end{bmatrix} \\\\
&=&
\\begin{bmatrix}
q_w - \\frac{\\Delta t}{2} \\omega_x q_x - \\frac{\\Delta t}{2} \\omega_y q_y - \\frac{\\Delta t}{2} \\omega_z q_z\\\\
q_x + \\frac{\\Delta t}{2} \\omega_x q_w - \\frac{\\Delta t}{2} \\omega_y q_z + \\frac{\\Delta t}{2} \\omega_z q_y\\\\
q_y + \\frac{\\Delta t}{2} \\omega_x q_z + \\frac{\\Delta t}{2} \\omega_y q_w - \\frac{\\Delta t}{2} \\omega_z q_x\\\\
q_z - \\frac{\\Delta t}{2} \\omega_x q_y + \\frac{\\Delta t}{2} \\omega_y q_x + \\frac{\\Delta t}{2} \\omega_z q_w
\\end{bmatrix}
\\end{array}
Secondly, the *tilt* is computed from the accelerometer measurements as:
.. math::
\\begin{array}{rcl}
\\theta &=& \\mathrm{arctan2}(a_y, a_z) \\\\
\\phi &=& \\mathrm{arctan2}\\big(-a_x, \\sqrt{a_y^2+a_z^2}\\big)
\\end{array}
Only the pitch, :math:`\\phi`, and roll, :math:`\\theta`, angles are computed,
leaving the yaw angle, :math:`\\psi` equal to zero.
If a magnetometer sample is available, the yaw angle can be computed. First
compensate the measurement using the *tilt*:
.. math::
\\begin{array}{rcl}
\\mathbf{b} &=&
\\begin{bmatrix}
\\cos\\theta & \\sin\\theta\\sin\\phi & \\sin\\theta\\cos\\phi \\\\
0 & \\cos\\phi & -\\sin\\phi \\\\
-\\sin\\theta & \\cos\\theta\\sin\\phi & \\cos\\theta\\cos\\phi
\\end{bmatrix}
\\begin{bmatrix}m_x \\\\ m_y \\\\ m_z\\end{bmatrix} \\\\
\\begin{bmatrix}b_x \\\\ b_y \\\\ b_z\\end{bmatrix} &=&
\\begin{bmatrix}
m_x\\cos\\theta + m_y\\sin\\theta\\sin\\phi + m_z\\sin\\theta\\cos\\phi \\\\
m_y\\cos\\phi - m_z\\sin\\phi \\\\
-m_x\\sin\\theta + m_y\\cos\\theta\\sin\\phi + m_z\\cos\\theta\\cos\\phi
\\end{bmatrix}
\\end{array}
Then, the yaw angle, :math:`\\psi`, is obtained as:
.. math::
\\begin{array}{rcl}
\\psi &=& \\mathrm{arctan2}(-b_y, b_x) \\\\
&=& \\mathrm{arctan2}\\big(m_z\\sin\\phi - m_y\\cos\\phi, \\; m_x\\cos\\theta + \\sin\\theta(m_y\\sin\\phi + m_z\\cos\\phi)\\big)
\\end{array}
We transform the roll-pitch-yaw angles to a quaternion representation:
.. math::
\\mathbf{q}_{am} =
\\begin{pmatrix}q_w\\\\q_x\\\\q_y\\\\q_z\\end{pmatrix} =
\\begin{pmatrix}
\\cos\\Big(\\frac{\\phi}{2}\\Big)\\cos\\Big(\\frac{\\theta}{2}\\Big)\\cos\\Big(\\frac{\\psi}{2}\\Big) + \\sin\\Big(\\frac{\\phi}{2}\\Big)\\sin\\Big(\\frac{\\theta}{2}\\Big)\\sin\\Big(\\frac{\\psi}{2}\\Big) \\\\
\\sin\\Big(\\frac{\\phi}{2}\\Big)\\cos\\Big(\\frac{\\theta}{2}\\Big)\\cos\\Big(\\frac{\\psi}{2}\\Big) - \\cos\\Big(\\frac{\\phi}{2}\\Big)\\sin\\Big(\\frac{\\theta}{2}\\Big)\\sin\\Big(\\frac{\\psi}{2}\\Big) \\\\
\\cos\\Big(\\frac{\\phi}{2}\\Big)\\sin\\Big(\\frac{\\theta}{2}\\Big)\\cos\\Big(\\frac{\\psi}{2}\\Big) + \\sin\\Big(\\frac{\\phi}{2}\\Big)\\cos\\Big(\\frac{\\theta}{2}\\Big)\\sin\\Big(\\frac{\\psi}{2}\\Big) \\\\
\\cos\\Big(\\frac{\\phi}{2}\\Big)\\cos\\Big(\\frac{\\theta}{2}\\Big)\\sin\\Big(\\frac{\\psi}{2}\\Big) - \\sin\\Big(\\frac{\\phi}{2}\\Big)\\sin\\Big(\\frac{\\theta}{2}\\Big)\\cos\\Big(\\frac{\\psi}{2}\\Big)
\\end{pmatrix}
Finally, after each orientation is estimated independently, they are fused with
the complementary filter.
.. math::
\\mathbf{q} = (1 - \\alpha) \\mathbf{q}_\\omega + \\alpha\\mathbf{q}_{am}
where :math:`\\mathbf{q}_\\omega` is the attitude estimated from the gyroscope,
:math:`\\mathbf{q}_{am}` is the attitude estimated from the accelerometer and
the magnetometer, and :math:`\\alpha` is the gain of the filter.
The filter gain must be a floating value within the range :math:`[0.0, 1.0]`.
It can be seen that when :math:`\\alpha=1`, the attitude is estimated entirely
with the accelerometer and the magnetometer. When :math:`\\alpha=0`, it is
estimated solely with the gyroscope. The values within the range decide how
much of each estimation is "blended" into the quaternion.
This is actually a simple implementation of `LERP
<https://en.wikipedia.org/wiki/Linear_interpolation>`_ commonly used to
linearly interpolate quaternions with small differences between them.
"""
import numpy as np
from ..common.orientation import ecompass
class Complementary:
"""
Complementary filter for attitude estimation as quaternion.
Parameters
----------
gyr : numpy.ndarray, default: None
N-by-3 array with measurements of angular velocity, in rad/s.
acc : numpy.ndarray, default: None
N-by-3 array with measurements of acceleration, in m/s^2.
mag : numpy.ndarray, default: None
N-by-3 array with measurements of magnetic field, in mT.
frequency : float, default: 100.0
Sampling frequency in Herz.
Dt : float, default: 0.01
Sampling step in seconds. Inverse of sampling frequency. Not required
if ``frequency`` value is given.
gain : float, default: 0.1
Filter gain.
q0 : numpy.ndarray, default: None
Initial orientation, as a versor (normalized quaternion).
Raises
------
ValueError
When dimension of input arrays ``acc``, ``gyr``, or ``mag`` are not equal.
"""
def __init__(self,
gyr: np.ndarray = None,
acc: np.ndarray = None,
mag: np.ndarray = None,
frequency: float = 100.0,
gain = 0.9,
**kwargs):
self.gyr: np.ndarray = gyr
self.acc: np.ndarray = acc
self.mag: np.ndarray = mag
self.frequency: float = frequency
self.gain: float = gain
if not(0.0 <= self.gain <= 1.0):
raise ValueError(f"Filter gain must be in the range [0, 1]. Got {self.gain}")
self.Dt: float = kwargs.get('Dt', 1.0/self.frequency)
self.q0: np.ndarray = kwargs.get('q0')
# Process of given data
if self.gyr is not None and self.acc is not None:
self.Q = self._compute_all()
def _compute_all(self) -> np.ndarray:
"""
Estimate the quaternions given all data
Attributes ``gyr``, ``acc`` and, optionally, ``mag`` must contain data.
Returns
-------
Q : numpy.ndarray
M-by-4 Array with all estimated quaternions, where M is the number
of samples.
"""
if self.acc.shape != self.gyr.shape:
raise ValueError("acc and gyr are not the same size")
num_samples = len(self.acc)
Q = np.zeros((num_samples, 4))
if self.mag is None:
self.mag = [None]*num_samples
else:
if self.mag.shape != self.gyr.shape:
raise ValueError("mag and gyr are not the same size")
Q[0] = self.am_estimation(self.acc[0], self.mag[0]) if self.q0 is None else self.q0.copy()
for t in range(1, num_samples):
Q[t] = self.update(Q[t-1], self.gyr[t], self.acc[t], self.mag[t])
return Q
def attitude_propagation(self, q: np.ndarray, omega: np.ndarray, dt: float) -> np.ndarray:
"""
Attitude propagation of the orientation.
Estimate the current orientation at time :math:`t`, from a given
orientation at time :math:`t-1` and a given angular velocity,
:math:`\\omega`, in rad/s.
It is computed by numerically integrating the angular velocity and
adding it to the previous orientation.
Parameters
----------
q : numpy.ndarray
A-priori quaternion.
omega : numpy.ndarray
Tri-axial angular velocity, in rad/s.
dt : float
Time step, in seconds, between consecutive Quaternions.
Returns
-------
q_omega : numpy.ndarray
Estimated orientation, as quaternion.
"""
w = -0.5*dt*omega
A = np.array([
[1.0, -w[0], -w[1], -w[2]],
[w[0], 1.0, w[2], -w[1]],
[w[1], -w[2], 1.0, w[0]],
[w[2], w[1], -w[0], 1.0]])
q_omega = A @ q
return q_omega / np.linalg.norm(q_omega)
def am_estimation(self, acc: np.ndarray, mag: np.ndarray = None) -> np.ndarray:
"""
Attitude estimation from an Accelerometer-Magnetometer architecture.
Parameters
----------
acc : numpy.ndarray
Tri-axial sample of the accelerometer.
mag : numpy.ndarray, default: None
Tri-axial sample of the magnetometer.
Returns
-------
q_am : numpy.ndarray
Estimated attitude.
"""
return ecompass(acc, mag, frame='NED', representation='quaternion')
def update(self, q: np.ndarray, gyr: np.ndarray, acc: np.ndarray, mag: np.ndarray = None, dt: float = None) -> np.ndarray:
"""
Attitude Estimation from given measurements and previous orientation.
The new orientation is first estimated with the angular velocity, then
another orientation is computed using the accelerometers and
magnetometers. The magnetometer is optional.
Each orientation is estimated independently and fused with a
complementary filter.
.. math::
\\mathbf{q} = (1 - \\alpha) \\mathbf{q}_\\omega + \\alpha\\mathbf{q}_{am}
Parameters
----------
q : numpy.ndarray
A-priori quaternion.
gyr : numpy.ndarray
Sample of tri-axial Gyroscope in rad/s.
acc : numpy.ndarray
Sample of tri-axial Accelerometer in m/s^2.
mag : numpy.ndarray, default: None
Sample of tri-axial Magnetometer in uT.
dt : float, default: None
Time step, in seconds, between consecutive Quaternions.
Returns
-------
q : numpy.ndarray
Estimated quaternion.
"""
dt = self.Dt if dt is None else dt
if gyr is None or not np.linalg.norm(gyr) > 0:
return q
q_omega = self.attitude_propagation(q, gyr, dt)
q_am = self.am_estimation(acc, mag)
# Complementary Estimation
if np.linalg.norm(q_omega + q_am) < np.sqrt(2):
q = (1.0 - self.gain)*q_omega - self.gain*q_am
else:
q = (1.0 - self.gain)*q_omega + self.gain*q_am
return q/np.linalg.norm(q)
| [((177, 12, 177, 38), 'numpy.zeros', 'np.zeros', ({(177, 21, 177, 37): '(num_samples, 4)'}, {}), '((num_samples, 4))', True, 'import numpy as np\n'), ((214, 12, 218, 41), 'numpy.array', 'np.array', ({(214, 21, 218, 40): '[[1.0, -w[0], -w[1], -w[2]], [w[0], 1.0, w[2], -w[1]], [w[1], -w[2], 1.0, w\n [0]], [w[2], w[1], -w[0], 1.0]]'}, {}), '([[1.0, -w[0], -w[1], -w[2]], [w[0], 1.0, w[2], -w[1]], [w[1], -w[2\n ], 1.0, w[0]], [w[2], w[1], -w[0], 1.0]])', True, 'import numpy as np\n'), ((220, 25, 220, 48), 'numpy.linalg.norm', 'np.linalg.norm', ({(220, 40, 220, 47): 'q_omega'}, {}), '(q_omega)', True, 'import numpy as np\n'), ((279, 11, 279, 41), 'numpy.linalg.norm', 'np.linalg.norm', ({(279, 26, 279, 40): '(q_omega + q_am)'}, {}), '(q_omega + q_am)', True, 'import numpy as np\n'), ((279, 44, 279, 54), 'numpy.sqrt', 'np.sqrt', ({(279, 52, 279, 53): '(2)'}, {}), '(2)', True, 'import numpy as np\n'), ((283, 17, 283, 34), 'numpy.linalg.norm', 'np.linalg.norm', ({(283, 32, 283, 33): 'q'}, {}), '(q)', True, 'import numpy as np\n'), ((274, 30, 274, 49), 'numpy.linalg.norm', 'np.linalg.norm', ({(274, 45, 274, 48): 'gyr'}, {}), '(gyr)', True, 'import numpy as np\n')] |
yndu13/aliyun-openapi-python-sdk | aliyun-python-sdk-ehpc/aliyunsdkehpc/request/v20180412/EditJobTemplateRequest.py | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkehpc.endpoint import endpoint_data
class EditJobTemplateRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'EHPC', '2018-04-12', 'EditJobTemplate')
self.set_method('GET')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_StderrRedirectPath(self):
return self.get_query_params().get('StderrRedirectPath')
def set_StderrRedirectPath(self,StderrRedirectPath):
self.add_query_param('StderrRedirectPath',StderrRedirectPath)
def get_ClockTime(self):
return self.get_query_params().get('ClockTime')
def set_ClockTime(self,ClockTime):
self.add_query_param('ClockTime',ClockTime)
def get_CommandLine(self):
return self.get_query_params().get('CommandLine')
def set_CommandLine(self,CommandLine):
self.add_query_param('CommandLine',CommandLine)
def get_ArrayRequest(self):
return self.get_query_params().get('ArrayRequest')
def set_ArrayRequest(self,ArrayRequest):
self.add_query_param('ArrayRequest',ArrayRequest)
def get_PackagePath(self):
return self.get_query_params().get('PackagePath')
def set_PackagePath(self,PackagePath):
self.add_query_param('PackagePath',PackagePath)
def get_Mem(self):
return self.get_query_params().get('Mem')
def set_Mem(self,Mem):
self.add_query_param('Mem',Mem)
def get_StdoutRedirectPath(self):
return self.get_query_params().get('StdoutRedirectPath')
def set_StdoutRedirectPath(self,StdoutRedirectPath):
self.add_query_param('StdoutRedirectPath',StdoutRedirectPath)
def get_Variables(self):
return self.get_query_params().get('Variables')
def set_Variables(self,Variables):
self.add_query_param('Variables',Variables)
def get_RunasUser(self):
return self.get_query_params().get('RunasUser')
def set_RunasUser(self,RunasUser):
self.add_query_param('RunasUser',RunasUser)
def get_ReRunable(self):
return self.get_query_params().get('ReRunable')
def set_ReRunable(self,ReRunable):
self.add_query_param('ReRunable',ReRunable)
def get_Thread(self):
return self.get_query_params().get('Thread')
def set_Thread(self,Thread):
self.add_query_param('Thread',Thread)
def get_TemplateId(self):
return self.get_query_params().get('TemplateId')
def set_TemplateId(self,TemplateId):
self.add_query_param('TemplateId',TemplateId)
def get_Priority(self):
return self.get_query_params().get('Priority')
def set_Priority(self,Priority):
self.add_query_param('Priority',Priority)
def get_Gpu(self):
return self.get_query_params().get('Gpu')
def set_Gpu(self,Gpu):
self.add_query_param('Gpu',Gpu)
def get_Node(self):
return self.get_query_params().get('Node')
def set_Node(self,Node):
self.add_query_param('Node',Node)
def get_Task(self):
return self.get_query_params().get('Task')
def set_Task(self,Task):
self.add_query_param('Task',Task)
def get_Name(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name)
def get_Queue(self):
return self.get_query_params().get('Queue')
def set_Queue(self,Queue):
self.add_query_param('Queue',Queue) | [((26, 2, 26, 68), 'aliyunsdkcore.request.RpcRequest.__init__', 'RpcRequest.__init__', ({(26, 22, 26, 26): 'self', (26, 28, 26, 34): '"""EHPC"""', (26, 36, 26, 48): '"""2018-04-12"""', (26, 50, 26, 67): '"""EditJobTemplate"""'}, {}), "(self, 'EHPC', '2018-04-12', 'EditJobTemplate')", False, 'from aliyunsdkcore.request import RpcRequest\n'), ((29, 33, 29, 63), 'aliyunsdkehpc.endpoint.endpoint_data.getEndpointMap', 'endpoint_data.getEndpointMap', ({}, {}), '()', False, 'from aliyunsdkehpc.endpoint import endpoint_data\n'), ((31, 38, 31, 73), 'aliyunsdkehpc.endpoint.endpoint_data.getEndpointRegional', 'endpoint_data.getEndpointRegional', ({}, {}), '()', False, 'from aliyunsdkehpc.endpoint import endpoint_data\n')] |
angry-tony/ceph-lcm-decapod | tests/common/models/test_execution.py | 535944d3ee384c3a7c4af82f74041b0a7792433f | # -*- coding: utf-8 -*-
# Copyright (c) 2016 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for decapod_common.models.execution."""
import pytest
from decapod_common.models import execution
def test_create(new_execution, new_pcmodel, pymongo_connection):
db_model = pymongo_connection.db.execution.find_one(
{"_id": new_execution._id}
)
assert db_model
assert new_execution.model_id == db_model["model_id"]
assert new_execution.version == db_model["version"]
assert new_execution.time_created == db_model["time_created"]
assert new_execution.time_deleted == db_model["time_deleted"]
assert new_execution.initiator_id == db_model["initiator_id"]
assert new_execution.playbook_configuration_model_id == \
db_model["pc_model_id"]
assert new_execution.playbook_configuration_version == \
db_model["pc_version"]
assert new_execution.state.name == db_model["state"]
assert new_execution.state == execution.ExecutionState.created
assert new_execution.playbook_configuration_model_id == \
new_pcmodel.model_id
assert new_execution.playbook_configuration_version == \
new_pcmodel.version
@pytest.mark.parametrize("state", execution.ExecutionState)
def test_change_state_ok(state, new_execution):
new_execution.state = state
new_execution.save()
assert new_execution.state == state
@pytest.mark.parametrize("state", (
"", "changed", "started", 0, None, -1.0, [], {}, object(), set()
))
def test_change_state_fail(state, new_execution):
with pytest.raises(ValueError):
new_execution.state = state
@pytest.mark.parametrize("state", execution.ExecutionState)
def test_api_response(state, new_pcmodel, new_execution):
new_execution.state = state
new_execution.save()
assert new_execution.make_api_structure() == {
"id": new_execution.model_id,
"initiator_id": new_execution.initiator_id,
"time_deleted": new_execution.time_deleted,
"time_updated": new_execution.time_created,
"model": execution.ExecutionModel.MODEL_NAME,
"version": 2,
"data": {
"playbook_configuration": {
"id": new_pcmodel.model_id,
"version": new_pcmodel.version,
"playbook_name": new_pcmodel.playbook_id
},
"state": state.name
}
}
def test_getting_logfile(new_execution, execution_log_storage):
new_execution.logfile
execution_log_storage.get.assert_called_once_with(new_execution.model_id)
def test_create_logfile(new_execution, execution_log_storage):
new_execution.new_logfile.write("1")
execution_log_storage.delete.assert_called_once_with(
new_execution.model_id
)
execution_log_storage.new_file.assert_called_once_with(
new_execution.model_id,
filename="{0}.log".format(new_execution.model_id),
content_type="text/plain"
)
execution_log_storage.new_file().write.assert_called_once_with("1")
| [((48, 1, 48, 59), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(48, 25, 48, 32): '"""state"""', (48, 34, 48, 58): 'execution.ExecutionState'}, {}), "('state', execution.ExecutionState)", False, 'import pytest\n'), ((64, 1, 64, 59), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(64, 25, 64, 32): '"""state"""', (64, 34, 64, 58): 'execution.ExecutionState'}, {}), "('state', execution.ExecutionState)", False, 'import pytest\n'), ((60, 9, 60, 34), 'pytest.raises', 'pytest.raises', ({(60, 23, 60, 33): 'ValueError'}, {}), '(ValueError)', False, 'import pytest\n')] |
Fahreeve/TaskManager | board/models.py | 7f0a16312b43867270eaade1fe153c07abc2c10e | from django.contrib.auth.models import User
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Task(models.Model):
CLOSE = 'cl'
CANCEL = 'ca'
LATER = 'la'
UNDEFINED = 'un'
CHOICES = (
(UNDEFINED, _("Неизвестно")),
(CLOSE, _("Завершить")),
(CANCEL, _("Отменить")),
(LATER, _("Отложить")),
)
title = models.CharField(_("Заголовок"), max_length=50)
description = models.TextField(_("Описание"))
executor = models.ForeignKey(User, verbose_name=_("Исполнитель"), on_delete=models.CASCADE)
status = models.CharField(_("Статус"), choices=CHOICES, default=UNDEFINED, max_length=2)
deadline = models.DateTimeField(_("Дедлайн"))
priority = models.IntegerField(_("Приоритет"), default=1, validators=[MinValueValidator(1), MaxValueValidator(3)])
changed = models.DateTimeField(_("Дата последнего изменения"), auto_now=True)
created = models.DateTimeField(_("Дата создания"), auto_now_add=True)
@property
def text_status(self):
choices = dict(self.CHOICES)
return choices[self.status]
@property
def text_deadline(self):
return self.deadline.strftime("%d.%m.%Y %H:%M")
class Comment(models.Model):
task = models.ForeignKey(Task, related_name="comments", on_delete=models.CASCADE)
creator = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
text = models.TextField(_('Комментарий'))
created = models.DateTimeField(_("Дата создания"), auto_now_add=True)
| [((39, 11, 39, 85), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((40, 14, 40, 75), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((19, 29, 19, 52), 'django.utils.translation.ugettext_lazy', '_', ({(19, 31, 19, 51): '"""Заголовок"""'}, {}), "('Заголовок')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((20, 35, 20, 56), 'django.utils.translation.ugettext_lazy', '_', ({(20, 37, 20, 55): '"""Описание"""'}, {}), "('Описание')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((22, 30, 22, 47), 'django.utils.translation.ugettext_lazy', '_', ({(22, 32, 22, 46): '"""Статус"""'}, {}), "('Статус')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((23, 36, 23, 55), 'django.utils.translation.ugettext_lazy', '_', ({(23, 38, 23, 54): '"""Дедлайн"""'}, {}), "('Дедлайн')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((24, 35, 24, 58), 'django.utils.translation.ugettext_lazy', '_', ({(24, 37, 24, 57): '"""Приоритет"""'}, {}), "('Приоритет')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((25, 35, 25, 88), 'django.utils.translation.ugettext_lazy', '_', ({(25, 37, 25, 87): '"""Дата последнего изменения"""'}, {}), "('Дата последнего изменения')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((26, 35, 26, 65), 'django.utils.translation.ugettext_lazy', '_', ({(26, 37, 26, 64): '"""Дата создания"""'}, {}), "('Дата создания')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((41, 28, 41, 55), 'django.utils.translation.ugettext_lazy', '_', ({(41, 30, 41, 54): '"""Комментарий"""'}, {}), "('Комментарий')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((42, 35, 42, 65), 'django.utils.translation.ugettext_lazy', '_', ({(42, 37, 42, 64): '"""Дата создания"""'}, {}), "('Дата создания')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((13, 20, 13, 45), 'django.utils.translation.ugettext_lazy', '_', ({(13, 22, 13, 44): '"""Неизвестно"""'}, {}), "('Неизвестно')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((14, 16, 14, 39), 'django.utils.translation.ugettext_lazy', '_', ({(14, 18, 14, 38): '"""Завершить"""'}, {}), "('Завершить')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((15, 17, 15, 38), 'django.utils.translation.ugettext_lazy', '_', ({(15, 19, 15, 37): '"""Отменить"""'}, {}), "('Отменить')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((16, 16, 16, 37), 'django.utils.translation.ugettext_lazy', '_', ({(16, 18, 16, 36): '"""Отложить"""'}, {}), "('Отложить')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((21, 52, 21, 79), 'django.utils.translation.ugettext_lazy', '_', ({(21, 54, 21, 78): '"""Исполнитель"""'}, {}), "('Исполнитель')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((24, 83, 24, 103), 'django.core.validators.MinValueValidator', 'MinValueValidator', ({(24, 101, 24, 102): '1'}, {}), '(1)', False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n'), ((24, 105, 24, 125), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', ({(24, 123, 24, 124): '3'}, {}), '(3)', False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n')] |
bicobus/Hexy | test/test_hex_line.py | e75d58e66546c278fb648af85e3f9dae53127826 | import numpy as np
import hexy as hx
def test_get_hex_line():
expected = [
[-3, 3, 0],
[-2, 2, 0],
[-1, 2, -1],
[0, 2, -2],
[1, 1, -2],
]
start = np.array([-3, 3, 0])
end = np.array([1, 1, -2])
print(hx.get_hex_line(start, end))
print(expected);
assert(np.array_equal(
hx.get_hex_line(start, end),
expected));
if __name__ == "__main__":
test_get_hex_line()
| [((12, 12, 12, 32), 'numpy.array', 'np.array', ({(12, 21, 12, 31): '[-3, 3, 0]'}, {}), '([-3, 3, 0])', True, 'import numpy as np\n'), ((13, 10, 13, 30), 'numpy.array', 'np.array', ({(13, 19, 13, 29): '[1, 1, -2]'}, {}), '([1, 1, -2])', True, 'import numpy as np\n'), ((14, 10, 14, 37), 'hexy.get_hex_line', 'hx.get_hex_line', ({(14, 26, 14, 31): 'start', (14, 33, 14, 36): 'end'}, {}), '(start, end)', True, 'import hexy as hx\n'), ((17, 8, 17, 35), 'hexy.get_hex_line', 'hx.get_hex_line', ({(17, 24, 17, 29): 'start', (17, 31, 17, 34): 'end'}, {}), '(start, end)', True, 'import hexy as hx\n')] |
PaNOSC-ViNYL/wofry | wofry/propagator/propagators2D/integral.py | 779b5a738ee7738e959a58aafe01e7e49b03894a | # propagate_2D_integral: Simplification of the Kirchhoff-Fresnel integral. TODO: Very slow and give some problems
import numpy
from wofry.propagator.wavefront2D.generic_wavefront import GenericWavefront2D
from wofry.propagator.propagator import Propagator2D
# TODO: check resulting amplitude normalization (fft and srw likely agree, convolution gives too high amplitudes, so needs normalization)
class Integral2D(Propagator2D):
HANDLER_NAME = "INTEGRAL_2D"
def get_handler_name(self):
return self.HANDLER_NAME
def do_specific_progation_after(self, wavefront, propagation_distance, parameters, element_index=None):
return self.do_specific_progation(wavefront, propagation_distance, parameters, element_index=element_index)
def do_specific_progation_before(self, wavefront, propagation_distance, parameters, element_index=None):
return self.do_specific_progation( wavefront, propagation_distance, parameters, element_index=element_index)
"""
2D Fresnel-Kirchhoff propagator via simplified integral
NOTE: this propagator is experimental and much less performant than the ones using Fourier Optics
Therefore, it is not recommended to use.
:param wavefront:
:param propagation_distance: propagation distance
:param shuffle_interval: it is known that this method replicates the central diffraction spot
The distace of the replica is proportional to 1/pixelsize
To avoid that, it is possible to change a bit (randomly) the coordinates
of the wavefront. shuffle_interval controls this shift: 0=No shift. A typical
value can be 1e5.
The result shows a diffraction pattern without replica but with much noise.
:param calculate_grid_only: if set, it calculates only the horizontal and vertical profiles, but returns the
full image with the other pixels to zero. This is useful when calculating large arrays,
so it is set as the default.
:return: a new 2D wavefront object with propagated wavefront
"""
def do_specific_progation(self, wavefront, propagation_distance, parameters, element_index=None):
shuffle_interval = self.get_additional_parameter("shuffle_interval",False,parameters,element_index=element_index)
calculate_grid_only = self.get_additional_parameter("calculate_grid_only",True,parameters,element_index=element_index)
return self.propagate_wavefront(wavefront,propagation_distance,shuffle_interval=shuffle_interval,
calculate_grid_only=calculate_grid_only)
@classmethod
def propagate_wavefront(cls,wavefront,propagation_distance,shuffle_interval=False,calculate_grid_only=True):
#
# Fresnel-Kirchhoff integral (neglecting inclination factor)
#
if not calculate_grid_only:
#
# calculation over the whole detector area
#
p_x = wavefront.get_coordinate_x()
p_y = wavefront.get_coordinate_y()
wavelength = wavefront.get_wavelength()
amplitude = wavefront.get_complex_amplitude()
det_x = p_x.copy()
det_y = p_y.copy()
p_X = wavefront.get_mesh_x()
p_Y = wavefront.get_mesh_y()
det_X = p_X
det_Y = p_Y
amplitude_propagated = numpy.zeros_like(amplitude,dtype='complex')
wavenumber = 2 * numpy.pi / wavelength
for i in range(det_x.size):
for j in range(det_y.size):
if not shuffle_interval:
rd_x = 0.0
rd_y = 0.0
else:
rd_x = (numpy.random.rand(p_x.size,p_y.size)-0.5)*shuffle_interval
rd_y = (numpy.random.rand(p_x.size,p_y.size)-0.5)*shuffle_interval
r = numpy.sqrt( numpy.power(p_X + rd_x - det_X[i,j],2) +
numpy.power(p_Y + rd_y - det_Y[i,j],2) +
numpy.power(propagation_distance,2) )
amplitude_propagated[i,j] = (amplitude / r * numpy.exp(1.j * wavenumber * r)).sum()
output_wavefront = GenericWavefront2D.initialize_wavefront_from_arrays(det_x,det_y,amplitude_propagated)
else:
x = wavefront.get_coordinate_x()
y = wavefront.get_coordinate_y()
X = wavefront.get_mesh_x()
Y = wavefront.get_mesh_y()
wavenumber = 2 * numpy.pi / wavefront.get_wavelength()
amplitude = wavefront.get_complex_amplitude()
used_indices = wavefront.get_mask_grid(width_in_pixels=(1,1),number_of_lines=(1,1))
indices_x = wavefront.get_mesh_indices_x()
indices_y = wavefront.get_mesh_indices_y()
indices_x_flatten = indices_x[numpy.where(used_indices == 1)].flatten()
indices_y_flatten = indices_y[numpy.where(used_indices == 1)].flatten()
X_flatten = X[numpy.where(used_indices == 1)].flatten()
Y_flatten = Y[numpy.where(used_indices == 1)].flatten()
complex_amplitude_propagated = amplitude*0
print("propagate_2D_integral: Calculating %d points from a total of %d x %d = %d"%(
X_flatten.size,amplitude.shape[0],amplitude.shape[1],amplitude.shape[0]*amplitude.shape[1]))
for i in range(X_flatten.size):
r = numpy.sqrt( numpy.power(wavefront.get_mesh_x() - X_flatten[i],2) +
numpy.power(wavefront.get_mesh_y() - Y_flatten[i],2) +
numpy.power(propagation_distance,2) )
complex_amplitude_propagated[int(indices_x_flatten[i]),int(indices_y_flatten[i])] = (amplitude / r * numpy.exp(1.j * wavenumber * r)).sum()
output_wavefront = GenericWavefront2D.initialize_wavefront_from_arrays(x_array=x,
y_array=y,
z_array=complex_amplitude_propagated,
wavelength=wavefront.get_wavelength())
# added [email protected] 2018-03-23 to conserve energy - TODO: review method!
output_wavefront.rescale_amplitude( numpy.sqrt(wavefront.get_intensity().sum() /
output_wavefront.get_intensity().sum()))
return output_wavefront
| [((76, 35, 76, 78), 'numpy.zeros_like', 'numpy.zeros_like', (), '', False, 'import numpy\n'), ((95, 31, 95, 116), 'wofry.propagator.wavefront2D.generic_wavefront.GenericWavefront2D.initialize_wavefront_from_arrays', 'GenericWavefront2D.initialize_wavefront_from_arrays', ({(95, 83, 95, 88): 'det_x', (95, 89, 95, 94): 'det_y', (95, 95, 95, 115): 'amplitude_propagated'}, {}), '(det_x, det_y,\n amplitude_propagated)', False, 'from wofry.propagator.wavefront2D.generic_wavefront import GenericWavefront2D\n'), ((121, 32, 121, 67), 'numpy.power', 'numpy.power', ({(121, 44, 121, 64): 'propagation_distance', (121, 65, 121, 66): '2'}, {}), '(propagation_distance, 2)', False, 'import numpy\n'), ((91, 36, 91, 71), 'numpy.power', 'numpy.power', ({(91, 48, 91, 68): 'propagation_distance', (91, 69, 91, 70): '2'}, {}), '(propagation_distance, 2)', False, 'import numpy\n'), ((109, 42, 109, 72), 'numpy.where', 'numpy.where', ({(109, 54, 109, 71): 'used_indices == 1'}, {}), '(used_indices == 1)', False, 'import numpy\n'), ((110, 42, 110, 72), 'numpy.where', 'numpy.where', ({(110, 54, 110, 71): 'used_indices == 1'}, {}), '(used_indices == 1)', False, 'import numpy\n'), ((111, 42, 111, 72), 'numpy.where', 'numpy.where', ({(111, 54, 111, 71): 'used_indices == 1'}, {}), '(used_indices == 1)', False, 'import numpy\n'), ((112, 42, 112, 72), 'numpy.where', 'numpy.where', ({(112, 54, 112, 71): 'used_indices == 1'}, {}), '(used_indices == 1)', False, 'import numpy\n'), ((123, 117, 123, 149), 'numpy.exp', 'numpy.exp', ({(123, 127, 123, 148): '1.0j * wavenumber * r'}, {}), '(1.0j * wavenumber * r)', False, 'import numpy\n'), ((86, 32, 86, 68), 'numpy.random.rand', 'numpy.random.rand', ({(86, 50, 86, 58): 'p_x.size', (86, 59, 86, 67): 'p_y.size'}, {}), '(p_x.size, p_y.size)', False, 'import numpy\n'), ((87, 32, 87, 68), 'numpy.random.rand', 'numpy.random.rand', ({(87, 50, 87, 58): 'p_x.size', (87, 59, 87, 67): 'p_y.size'}, {}), '(p_x.size, p_y.size)', False, 'import numpy\n'), ((89, 36, 89, 74), 'numpy.power', 'numpy.power', ({(89, 48, 89, 71): 'p_X + rd_x - det_X[i, j]', (89, 72, 89, 73): '2'}, {}), '(p_X + rd_x - det_X[i, j], 2)', False, 'import numpy\n'), ((90, 36, 90, 74), 'numpy.power', 'numpy.power', ({(90, 48, 90, 71): 'p_Y + rd_y - det_Y[i, j]', (90, 72, 90, 73): '2'}, {}), '(p_Y + rd_y - det_Y[i, j], 2)', False, 'import numpy\n'), ((93, 65, 93, 97), 'numpy.exp', 'numpy.exp', ({(93, 75, 93, 96): '1.0j * wavenumber * r'}, {}), '(1.0j * wavenumber * r)', False, 'import numpy\n')] |
andor2718/LeetCode | Problems/Study Plans/Dynamic Programming/Dynamic Programming I/07_delete_and_earn.py | 59874f49085818e6da751f1cc26867b31079d35d | # https://leetcode.com/problems/delete-and-earn/
class Solution:
def deleteAndEarn(self, nums: list[int]) -> int:
num_profits = dict()
for num in nums:
num_profits[num] = num_profits.get(num, 0) + num
sorted_nums = sorted(num_profits.keys())
second_last_profit = 0
last_profit = num_profits[sorted_nums[0]]
for idx in range(1, len(sorted_nums)):
profit_with_curr_num = num_profits[sorted_nums[idx]]
if sorted_nums[idx - 1] == sorted_nums[idx] - 1:
curr_profit = max(last_profit,
second_last_profit + profit_with_curr_num)
else:
curr_profit = last_profit + profit_with_curr_num
second_last_profit, last_profit = last_profit, curr_profit
return last_profit
| [] |
GabrielSanchesRosa/Python | Desafio051.py | 3a129e27e076b2a91af03d68ede50b9c45c50217 | # Desenvolva um programa que leia o primeiro termo e a razão de uma PA. No final mostre, os 10 primeiros termos dessa prograssão.
primeiro = int(input("Primeiro Termo: "))
razao = int(input("Razão: "))
decimo = primeiro + (10 - 1) * razao
for c in range(primeiro, decimo + razao, razao):
print(f"{c}", end=" -> ")
print("Acabou")
| [] |
tiddlyweb/tiddlyweb | tiddlyweb/filters/limit.py | 376bcad280e24d2de4d74883dc4d8369abcb2c28 | """
A :py:mod:`filter <tiddlyweb.filters>` type to limit a group of entities
using a syntax similar to SQL Limit::
limit=<index>,<count>
limit=<count>
"""
import itertools
def limit_parse(count='0'):
"""
Parse the argument of a ``limit`` :py:mod:`filter <tiddlyweb.filters>`
for a count and index argument, return a function which does the limiting.
Exceptions while parsing are passed up the stack.
"""
index = '0'
if ',' in count:
index, count = count.split(',', 1)
index = int(index)
count = int(count)
def limiter(entities, indexable=False, environ=None):
return limit(entities, index=index, count=count)
return limiter
def limit(entities, count=0, index=0):
"""
Make a slice of a list of entities based on a count and index.
"""
return itertools.islice(entities, index, index + count)
| [((36, 11, 36, 59), 'itertools.islice', 'itertools.islice', ({(36, 28, 36, 36): 'entities', (36, 38, 36, 43): 'index', (36, 45, 36, 58): '(index + count)'}, {}), '(entities, index, index + count)', False, 'import itertools\n')] |
sonibla/pytorch_keras_converter | pytorch_keras_converter/API.py | 21925b67b6eb3cbbfa8eb6d33f682d57dafd357d | """
Simple API to convert models between PyTorch and Keras
(Conversions from Keras to PyTorch aren't implemented)
"""
from . import utility
from . import tests
from . import io_utils as utils
import tensorflow
def convert(model,
input_shape,
weights=True,
quiet=True,
ignore_tests=False,
input_range=None,
save=None,
filename=None,
directory=None):
"""
Conversion between PyTorch and Keras
(Conversions from Keras to PyTorch aren't implemented)
Arguments:
-model:
A Keras or PyTorch model or layer to convert
-input_shape:
Input shape (list, tuple or int), without batchsize.
-weights (bool):
Also convert weights. If set to false, only convert model
architecture
-quiet (bool):
If a progress bar and some messages should appear
-ignore_tests (bool):
If tests should be ignored.
If set to True, converted model will
still be tested by security. If models are not identical, it will
only print a warning.
If set to False, and models are not identical, RuntimeWarning will
be raised
If weights is False, tests are automatically ignored
-input_range:
Optionnal.
A list of 2 elements containing max and min values to give as
input to the model when performing the tests. If None, models will
be tested on samples from the "standard normal" distribution.
-save:
If model should be exported to a hdf5 file.
-filename:
Filename to give to model's hdf5 file. If filename is not None and
save is not False, then save will automatically be set to True
-directory:
Where to save model's hdf5 file. If directory is not None and
save is not False, then save will automatically be set to True
Raises:
-RuntimeWarning:
If converted and original model aren't identical, and ignore_tests
is False
Returns:
If model has been exported to a file, it will return the name of the
file
Else, it returns the converted model
"""
if (filename is not None or directory is not None) and save is None:
save = True
if save is None:
save = False
if weights == False:
ignore_tests = True
if not quiet:
print('\nConversion...')
# Converting:
newModel = utility.convert(model=utility.LayerRepresentation(model),
input_size=input_shape,
weights=weights,
quiet=quiet)
# Actually, newModel is a LayerRepresentation object
# Equivalents:
torchModel = newModel.equivalent['torch']
kerasModel = newModel.equivalent['keras']
if not quiet:
print('Automatically testing converted model reliability...\n')
# Checking converted model reliability
tested = False
try:
meanSquaredError = tests.comparison(model1=torchModel,
model2=kerasModel,
input_shape=input_shape,
input_range=input_range,
quiet=quiet)
tested = True
except tensorflow.errors.InvalidArgumentError:
print("Warning: tests unavailable!")
if tested and meanSquaredError > 0.0001:
if ignore_tests:
print("Warning: converted and original models aren't identical !\
(mean squared error: {})".format(meanSquaredError))
else:
raise RuntimeWarning("Original and converted model do not match !\
\nOn random input data, outputs showed a mean squared error of {} (if should \
be below 1e-10)".format(meanSquaredError))
elif not quiet and tested:
print('\n Original and converted models match !\nMean squared err\
or : {}'.format(meanSquaredError))
if save:
if not quiet:
print('Saving model...')
defaultName = 'conversion_{}'.format(newModel.name)
if filename is None:
filename = defaultName
# Formatting filename so that we don't overwrite any existing file
file = utils.formatFilename(filename,
directory)
# Freezing Keras model (trainable = False everywhere)
utils.freeze(kerasModel)
# Save the entire model
kerasModel.save(file + '.h5')
if not quiet:
print('Done !')
return file + '.h5'
if not quiet:
print('Done !')
return kerasModel
def convert_and_save(model,
input_shape,
weights=True,
quiet=True,
ignore_tests=False,
input_range=None,
filename=None,
directory=None):
"""
Conversion between PyTorch and Keras, and automatic save
(Conversions from Keras to PyTorch aren't implemented)
Arguments:
-model:
A Keras or PyTorch model or layer to convert
-input_shape:
Input shape (list, tuple or int), without batchsize.
-weights (bool):
Also convert weights. If set to false, only convert model
architecture
-quiet (bool):
If a progress bar and some messages should appear
-ignore_tests (bool):
If tests should be ignored.
If set to True, converted model will
still be tested by security. If models are not identical, it will
only print a warning.
If set to False, and models are not identical, RuntimeWarning will
be raised
If weights is False, tests are automatically ignored
-input_range:
Optionnal.
A list of 2 elements containing max and min values to give as
input to the model when performing the tests. If None, models will
be tested on samples from the "standard normal" distribution.
-filename:
Filename to give to model's hdf5 file. If filename is not None and
save is not False, then save will automatically be set to True
-directory:
Where to save model's hdf5 file. If directory is not None and
save is not False, then save will automatically be set to True
Returns:
Name of created hdf5 file
"""
return convert(model=model,
input_shape=input_shape,
weights=weights,
quiet=quiet,
ignore_tests=ignore_tests,
input_range=input_range,
save=True,
filename=filename,
directory=directory)
| [] |
kjwill/bleak | examples/enable_notifications.py | 7e0fdae6c0f6a78713e5984c2840666e0c38c3f3 | # -*- coding: utf-8 -*-
"""
Notifications
-------------
Example showing how to add notifications to a characteristic and handle the responses.
Updated on 2019-07-03 by hbldh <[email protected]>
"""
import sys
import logging
import asyncio
import platform
from bleak import BleakClient
from bleak import _logger as logger
CHARACTERISTIC_UUID = "f000aa65-0451-4000-b000-000000000000" # <--- Change to the characteristic you want to enable notifications from.
ADDRESS = (
"24:71:89:cc:09:05" # <--- Change to your device's address here if you are using Windows or Linux
if platform.system() != "Darwin"
else "B9EA5233-37EF-4DD6-87A8-2A875E821C46" # <--- Change to your device's address here if you are using macOS
)
if len(sys.argv) == 3:
ADDRESS = sys.argv[1]
CHARACTERISTIC_UUID = sys.argv[2]
def notification_handler(sender, data):
"""Simple notification handler which prints the data received."""
print("{0}: {1}".format(sender, data))
async def run(address, debug=False):
if debug:
import sys
l = logging.getLogger("asyncio")
l.setLevel(logging.DEBUG)
h = logging.StreamHandler(sys.stdout)
h.setLevel(logging.DEBUG)
l.addHandler(h)
logger.addHandler(h)
async with BleakClient(address) as client:
logger.info(f"Connected: {client.is_connected}")
await client.start_notify(CHARACTERISTIC_UUID, notification_handler)
await asyncio.sleep(5.0)
await client.stop_notify(CHARACTERISTIC_UUID)
if __name__ == "__main__":
import os
os.environ["PYTHONASYNCIODEBUG"] = str(1)
loop = asyncio.get_event_loop()
# loop.set_debug(True)
loop.run_until_complete(run(ADDRESS, True))
| [((60, 11, 60, 35), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ({}, {}), '()', False, 'import asyncio\n'), ((24, 7, 24, 24), 'platform.system', 'platform.system', ({}, {}), '()', False, 'import platform\n'), ((41, 12, 41, 40), 'logging.getLogger', 'logging.getLogger', ({(41, 30, 41, 39): '"""asyncio"""'}, {}), "('asyncio')", False, 'import logging\n'), ((43, 12, 43, 45), 'logging.StreamHandler', 'logging.StreamHandler', ({(43, 34, 43, 44): 'sys.stdout'}, {}), '(sys.stdout)', False, 'import logging\n'), ((46, 8, 46, 28), 'bleak._logger.addHandler', 'logger.addHandler', ({(46, 26, 46, 27): 'h'}, {}), '(h)', True, 'from bleak import _logger as logger\n'), ((48, 15, 48, 35), 'bleak.BleakClient', 'BleakClient', ({(48, 27, 48, 34): 'address'}, {}), '(address)', False, 'from bleak import BleakClient\n'), ((49, 8, 49, 56), 'bleak._logger.info', 'logger.info', ({(49, 20, 49, 55): 'f"""Connected: {client.is_connected}"""'}, {}), "(f'Connected: {client.is_connected}')", True, 'from bleak import _logger as logger\n'), ((52, 14, 52, 32), 'asyncio.sleep', 'asyncio.sleep', ({(52, 28, 52, 31): '(5.0)'}, {}), '(5.0)', False, 'import asyncio\n')] |
miraculixx/pyrules | pyrules/storages/base.py | b10d1d5e74052fa1db93cc9b459ac9057a9eb502 | class BaseStorage(object):
def get_rule(self, name):
raise NotImplementedError()
def get_ruleset(self, name):
raise NotImplementedError()
| [] |
raminjafary/ethical-hacking | src/15 listener_and_backdoor/listener_2.py | e76f74f4f23e1d8cb7f433d19871dcf966507dfc | #!/usr/bin/python
import socket
class Listener:
def __init__(self,ip,port):
listener = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
listener.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)
#options to reuse sockets
#listener.bind(("localhost",1234))
listener.bind((ip,port))
listener.listen(0)
print "[+] Waiting for Incoming Connection"
#listen for connecion backlog is set to 0 don't need to wory about 0
self.connection,address = listener.accept()
print "[+] Got a Connection from " + str(address)
def execute_remotely(self,command):
self.connection.send(command)
return self.connection.recv(1024)
def run(self):
while True:
command = raw_input(">> ")
result = self.execute_remotely(command)
print result
my_listener = Listener("localhost",1234)
my_listener.run() | [] |
ray-hrst/temi-tools | dialogflow/history2xls.py | 8efb1e1af93a41bd98fe0ee8c1fd6fb44e788341 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
""" Convert Dialogflow history to spreadsheet
User must manually copy the history from the browser and save this in a text file.
This reads the textfile, parses the data, and saves it to a spreadsheet.
Example training sample:
USER
サワディカ
Nov 4, 11:19 PM
AGENT
No matched intent
Nov 4, 11:19 PM
more_vert
"""
import argparse
import os
from simple_report import SimpleReport
# constants
FIELDS = ["Date", "User", "Agent"]
if __name__ == "__main__":
# collect arguments
PARSER = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
)
PARSER.add_argument("filename", help="History text file")
ARGS = PARSER.parse_args()
# generate report
filename, file_extension = os.path.splitext(ARGS.filename)
REPORT = SimpleReport(filename, FIELDS)
# step each line of history text file
with open(ARGS.filename, 'r') as fp:
num_lines = sum(1 for line in open(ARGS.filename))
rows = int(num_lines / 7)
print("Reading {} lines of text.".format(num_lines))
print("Writing {} rows.".format(rows))
for row in range(1, rows):
user_utterance = fp.readline().strip() # USER UTTERANCE
date = fp.readline().strip() # DATE
agent_intent = fp.readline().strip() # AGENT INTENT
date = fp.readline().strip() # DATE
_ = fp.readline().strip() # 'more_vert'
utterance = user_utterance.split("USER", 1)[1]
intent = agent_intent.split("AGENT", 1)[1]
if not intent:
intent = "Intent found"
print("[{}] {} {} {}".format(row, date, utterance, intent))
# add row to report
REPORT.add("Date", row, date, date)
REPORT.add("User", row, utterance)
REPORT.add("Agent", row, intent)
REPORT.close()
| [((31, 13, 33, 5), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((38, 31, 38, 62), 'os.path.splitext', 'os.path.splitext', ({(38, 48, 38, 61): 'ARGS.filename'}, {}), '(ARGS.filename)', False, 'import os\n'), ((39, 13, 39, 43), 'simple_report.SimpleReport', 'SimpleReport', ({(39, 26, 39, 34): 'filename', (39, 36, 39, 42): 'FIELDS'}, {}), '(filename, FIELDS)', False, 'from simple_report import SimpleReport\n')] |
usathe71-u/Attendance-System-Face-Recognition | recognition/views.py | c73f660a6089e8ca9dd5c473efcf2bc78f13a207 | from django.shortcuts import render,redirect
from .forms import usernameForm,DateForm,UsernameAndDateForm, DateForm_2
from django.contrib import messages
from django.contrib.auth.models import User
import cv2
import dlib
import imutils
from imutils import face_utils
from imutils.video import VideoStream
from imutils.face_utils import rect_to_bb
from imutils.face_utils import FaceAligner
import time
from attendance_system_facial_recognition.settings import BASE_DIR
import os
import face_recognition
from face_recognition.face_recognition_cli import image_files_in_folder
import pickle
from sklearn.preprocessing import LabelEncoder
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
import numpy as np
from django.contrib.auth.decorators import login_required
import matplotlib as mpl
import matplotlib.pyplot as plt
from sklearn.manifold import TSNE
import datetime
from django_pandas.io import read_frame
from users.models import Present, Time
import seaborn as sns
import pandas as pd
from django.db.models import Count
#import mpld3
import matplotlib.pyplot as plt
from pandas.plotting import register_matplotlib_converters
from matplotlib import rcParams
import math
mpl.use('Agg')
#utility functions:
def username_present(username):
if User.objects.filter(username=username).exists():
return True
return False
def create_dataset(username):
id = username
if(os.path.exists('face_recognition_data/training_dataset/{}/'.format(id))==False):
os.makedirs('face_recognition_data/training_dataset/{}/'.format(id))
directory='face_recognition_data/training_dataset/{}/'.format(id)
# Detect face
#Loading the HOG face detector and the shape predictpr for allignment
print("[INFO] Loading the facial detector")
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor('face_recognition_data/shape_predictor_68_face_landmarks.dat') #Add path to the shape predictor ######CHANGE TO RELATIVE PATH LATER
fa = FaceAligner(predictor , desiredFaceWidth = 96)
#capture images from the webcam and process and detect the face
# Initialize the video stream
print("[INFO] Initializing Video stream")
vs = VideoStream(src=0).start()
#time.sleep(2.0) ####CHECK######
# Our identifier
# We will put the id here and we will store the id with a face, so that later we can identify whose face it is
# Our dataset naming counter
sampleNum = 0
# Capturing the faces one by one and detect the faces and showing it on the window
while(True):
# Capturing the image
#vs.read each frame
frame = vs.read()
#Resize each image
frame = imutils.resize(frame ,width = 800)
#the returned img is a colored image but for the classifier to work we need a greyscale image
#to convert
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
#To store the faces
#This will detect all the images in the current frame, and it will return the coordinates of the faces
#Takes in image and some other parameter for accurate result
faces = detector(gray_frame,0)
#In above 'faces' variable there can be multiple faces so we have to get each and every face and draw a rectangle around it.
for face in faces:
print("inside for loop")
(x,y,w,h) = face_utils.rect_to_bb(face)
face_aligned = fa.align(frame,gray_frame,face)
# Whenever the program captures the face, we will write that is a folder
# Before capturing the face, we need to tell the script whose face it is
# For that we will need an identifier, here we call it id
# So now we captured a face, we need to write it in a file
sampleNum = sampleNum+1
# Saving the image dataset, but only the face part, cropping the rest
if face is None:
print("face is none")
continue
cv2.imwrite(directory+'/'+str(sampleNum)+'.jpg' , face_aligned)
face_aligned = imutils.resize(face_aligned ,width = 400)
#cv2.imshow("Image Captured",face_aligned)
# @params the initial point of the rectangle will be x,y and
# @params end point will be x+width and y+height
# @params along with color of the rectangle
# @params thickness of the rectangle
cv2.rectangle(frame,(x,y),(x+w,y+h),(0,255,0),1)
# Before continuing to the next loop, I want to give it a little pause
# waitKey of 100 millisecond
cv2.waitKey(50)
#Showing the image in another window
#Creates a window with window name "Face" and with the image img
cv2.imshow("Add Images",frame)
#Before closing it we need to give a wait command, otherwise the open cv wont work
# @params with the millisecond of delay 1
cv2.waitKey(1)
#To get out of the loop
if(sampleNum>300):
break
#Stoping the videostream
vs.stop()
# destroying all the windows
cv2.destroyAllWindows()
def predict(face_aligned,svc,threshold=0.7):
face_encodings=np.zeros((1,128))
try:
x_face_locations=face_recognition.face_locations(face_aligned)
faces_encodings=face_recognition.face_encodings(face_aligned,known_face_locations=x_face_locations)
if(len(faces_encodings)==0):
return ([-1],[0])
except:
return ([-1],[0])
prob=svc.predict_proba(faces_encodings)
result=np.where(prob[0]==np.amax(prob[0]))
if(prob[0][result[0]]<=threshold):
return ([-1],prob[0][result[0]])
return (result[0],prob[0][result[0]])
def vizualize_Data(embedded, targets,):
X_embedded = TSNE(n_components=2).fit_transform(embedded)
for i, t in enumerate(set(targets)):
idx = targets == t
plt.scatter(X_embedded[idx, 0], X_embedded[idx, 1], label=t)
plt.legend(bbox_to_anchor=(1, 1));
rcParams.update({'figure.autolayout': True})
plt.tight_layout()
plt.savefig('./recognition/static/recognition/img/training_visualisation.png')
plt.close()
def update_attendance_in_db_in(present):
today=datetime.date.today()
time=datetime.datetime.now()
for person in present:
user=User.objects.get(username=person)
try:
qs=Present.objects.get(user=user,date=today)
except :
qs= None
if qs is None:
if present[person]==True:
a=Present(user=user,date=today,present=True)
a.save()
else:
a=Present(user=user,date=today,present=False)
a.save()
else:
if present[person]==True:
qs.present=True
qs.save(update_fields=['present'])
if present[person]==True:
a=Time(user=user,date=today,time=time, out=False)
a.save()
def update_attendance_in_db_out(present):
today=datetime.date.today()
time=datetime.datetime.now()
for person in present:
user=User.objects.get(username=person)
if present[person]==True:
a=Time(user=user,date=today,time=time, out=True)
a.save()
def check_validity_times(times_all):
if(len(times_all)>0):
sign=times_all.first().out
else:
sign=True
times_in=times_all.filter(out=False)
times_out=times_all.filter(out=True)
if(len(times_in)!=len(times_out)):
sign=True
break_hourss=0
if(sign==True):
check=False
break_hourss=0
return (check,break_hourss)
prev=True
prev_time=times_all.first().time
for obj in times_all:
curr=obj.out
if(curr==prev):
check=False
break_hourss=0
return (check,break_hourss)
if(curr==False):
curr_time=obj.time
to=curr_time
ti=prev_time
break_time=((to-ti).total_seconds())/3600
break_hourss+=break_time
else:
prev_time=obj.time
prev=curr
return (True,break_hourss)
def convert_hours_to_hours_mins(hours):
h=int(hours)
hours-=h
m=hours*60
m=math.ceil(m)
return str(str(h)+ " hrs " + str(m) + " mins")
#used
def hours_vs_date_given_employee(present_qs,time_qs,admin=True):
register_matplotlib_converters()
df_hours=[]
df_break_hours=[]
qs=present_qs
for obj in qs:
date=obj.date
times_in=time_qs.filter(date=date).filter(out=False).order_by('time')
times_out=time_qs.filter(date=date).filter(out=True).order_by('time')
times_all=time_qs.filter(date=date).order_by('time')
obj.time_in=None
obj.time_out=None
obj.hours=0
obj.break_hours=0
if (len(times_in)>0):
obj.time_in=times_in.first().time
if (len(times_out)>0):
obj.time_out=times_out.last().time
if(obj.time_in is not None and obj.time_out is not None):
ti=obj.time_in
to=obj.time_out
hours=((to-ti).total_seconds())/3600
obj.hours=hours
else:
obj.hours=0
(check,break_hourss)= check_validity_times(times_all)
if check:
obj.break_hours=break_hourss
else:
obj.break_hours=0
df_hours.append(obj.hours)
df_break_hours.append(obj.break_hours)
obj.hours=convert_hours_to_hours_mins(obj.hours)
obj.break_hours=convert_hours_to_hours_mins(obj.break_hours)
df = read_frame(qs)
df["hours"]=df_hours
df["break_hours"]=df_break_hours
print(df)
sns.barplot(data=df,x='date',y='hours')
plt.xticks(rotation='vertical')
rcParams.update({'figure.autolayout': True})
plt.tight_layout()
if(admin):
plt.savefig('./recognition/static/recognition/img/attendance_graphs/hours_vs_date/1.png')
plt.close()
else:
plt.savefig('./recognition/static/recognition/img/attendance_graphs/employee_login/1.png')
plt.close()
return qs
#used
def hours_vs_employee_given_date(present_qs,time_qs):
register_matplotlib_converters()
df_hours=[]
df_break_hours=[]
df_username=[]
qs=present_qs
for obj in qs:
user=obj.user
times_in=time_qs.filter(user=user).filter(out=False)
times_out=time_qs.filter(user=user).filter(out=True)
times_all=time_qs.filter(user=user)
obj.time_in=None
obj.time_out=None
obj.hours=0
obj.hours=0
if (len(times_in)>0):
obj.time_in=times_in.first().time
if (len(times_out)>0):
obj.time_out=times_out.last().time
if(obj.time_in is not None and obj.time_out is not None):
ti=obj.time_in
to=obj.time_out
hours=((to-ti).total_seconds())/3600
obj.hours=hours
else:
obj.hours=0
(check,break_hourss)= check_validity_times(times_all)
if check:
obj.break_hours=break_hourss
else:
obj.break_hours=0
df_hours.append(obj.hours)
df_username.append(user.username)
df_break_hours.append(obj.break_hours)
obj.hours=convert_hours_to_hours_mins(obj.hours)
obj.break_hours=convert_hours_to_hours_mins(obj.break_hours)
df = read_frame(qs)
df['hours']=df_hours
df['username']=df_username
df["break_hours"]=df_break_hours
sns.barplot(data=df,x='username',y='hours')
plt.xticks(rotation='vertical')
rcParams.update({'figure.autolayout': True})
plt.tight_layout()
plt.savefig('./recognition/static/recognition/img/attendance_graphs/hours_vs_employee/1.png')
plt.close()
return qs
def total_number_employees():
qs=User.objects.all()
return (len(qs) -1)
# -1 to account for admin
def employees_present_today():
today=datetime.date.today()
qs=Present.objects.filter(date=today).filter(present=True)
return len(qs)
#used
def this_week_emp_count_vs_date():
today=datetime.date.today()
some_day_last_week=today-datetime.timedelta(days=7)
monday_of_last_week=some_day_last_week- datetime.timedelta(days=(some_day_last_week.isocalendar()[2] - 1))
monday_of_this_week = monday_of_last_week + datetime.timedelta(days=7)
qs=Present.objects.filter(date__gte=monday_of_this_week).filter(date__lte=today)
str_dates=[]
emp_count=[]
str_dates_all=[]
emp_cnt_all=[]
cnt=0
for obj in qs:
date=obj.date
str_dates.append(str(date))
qs=Present.objects.filter(date=date).filter(present=True)
emp_count.append(len(qs))
while(cnt<5):
date=str(monday_of_this_week+datetime.timedelta(days=cnt))
cnt+=1
str_dates_all.append(date)
if(str_dates.count(date))>0:
idx=str_dates.index(date)
emp_cnt_all.append(emp_count[idx])
else:
emp_cnt_all.append(0)
df=pd.DataFrame()
df["date"]=str_dates_all
df["Number of employees"]=emp_cnt_all
sns.lineplot(data=df,x='date',y='Number of employees')
plt.savefig('./recognition/static/recognition/img/attendance_graphs/this_week/1.png')
plt.close()
#used
def last_week_emp_count_vs_date():
today=datetime.date.today()
some_day_last_week=today-datetime.timedelta(days=7)
monday_of_last_week=some_day_last_week- datetime.timedelta(days=(some_day_last_week.isocalendar()[2] - 1))
monday_of_this_week = monday_of_last_week + datetime.timedelta(days=7)
qs=Present.objects.filter(date__gte=monday_of_last_week).filter(date__lt=monday_of_this_week)
str_dates=[]
emp_count=[]
str_dates_all=[]
emp_cnt_all=[]
cnt=0
for obj in qs:
date=obj.date
str_dates.append(str(date))
qs=Present.objects.filter(date=date).filter(present=True)
emp_count.append(len(qs))
while(cnt<5):
date=str(monday_of_last_week+datetime.timedelta(days=cnt))
cnt+=1
str_dates_all.append(date)
if(str_dates.count(date))>0:
idx=str_dates.index(date)
emp_cnt_all.append(emp_count[idx])
else:
emp_cnt_all.append(0)
df=pd.DataFrame()
df["date"]=str_dates_all
df["emp_count"]=emp_cnt_all
sns.lineplot(data=df,x='date',y='emp_count')
plt.savefig('./recognition/static/recognition/img/attendance_graphs/last_week/1.png')
plt.close()
# Create your views here.
def home(request):
return render(request, 'recognition/home.html')
@login_required
def dashboard(request):
if(request.user.username=='admin'):
print("admin")
return render(request, 'recognition/admin_dashboard.html')
else:
print("not admin")
return render(request,'recognition/employee_dashboard.html')
@login_required
def add_photos(request):
if request.user.username!='admin':
return redirect('not-authorised')
if request.method=='POST':
form=usernameForm(request.POST)
data = request.POST.copy()
username=data.get('username')
if username_present(username):
create_dataset(username)
messages.success(request, f'Dataset Created')
return redirect('add-photos')
else:
messages.warning(request, f'No such username found. Please register employee first.')
return redirect('dashboard')
else:
form=usernameForm()
return render(request,'recognition/add_photos.html', {'form' : form})
def mark_your_attendance(request):
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor('face_recognition_data/shape_predictor_68_face_landmarks.dat') #Add path to the shape predictor ######CHANGE TO RELATIVE PATH LATER
svc_save_path="face_recognition_data/svc.sav"
with open(svc_save_path, 'rb') as f:
svc = pickle.load(f)
fa = FaceAligner(predictor , desiredFaceWidth = 96)
encoder=LabelEncoder()
encoder.classes_ = np.load('face_recognition_data/classes.npy')
faces_encodings = np.zeros((1,128))
no_of_faces = len(svc.predict_proba(faces_encodings)[0])
count = dict()
present = dict()
log_time = dict()
start = dict()
for i in range(no_of_faces):
count[encoder.inverse_transform([i])[0]] = 0
present[encoder.inverse_transform([i])[0]] = False
vs = VideoStream(src=0).start()
sampleNum = 0
while(True):
frame = vs.read()
frame = imutils.resize(frame ,width = 800)
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = detector(gray_frame,0)
for face in faces:
print("INFO : inside for loop")
(x,y,w,h) = face_utils.rect_to_bb(face)
face_aligned = fa.align(frame,gray_frame,face)
cv2.rectangle(frame,(x,y),(x+w,y+h),(0,255,0),1)
(pred,prob)=predict(face_aligned,svc)
if(pred!=[-1]):
person_name=encoder.inverse_transform(np.ravel([pred]))[0]
pred=person_name
if count[pred] == 0:
start[pred] = time.time()
count[pred] = count.get(pred,0) + 1
if count[pred] == 4 and (time.time()-start[pred]) > 1.2:
count[pred] = 0
else:
#if count[pred] == 4 and (time.time()-start) <= 1.5:
present[pred] = True
log_time[pred] = datetime.datetime.now()
count[pred] = count.get(pred,0) + 1
print(pred, present[pred], count[pred])
cv2.putText(frame, str(person_name)+ str(prob), (x+6,y+h-6), cv2.FONT_HERSHEY_SIMPLEX,0.5,(0,255,0),1)
else:
person_name="unknown"
cv2.putText(frame, str(person_name), (x+6,y+h-6), cv2.FONT_HERSHEY_SIMPLEX,0.5,(0,255,0),1)
#cv2.putText()
# Before continuing to the next loop, I want to give it a little pause
# waitKey of 100 millisecond
#cv2.waitKey(50)
#Showing the image in another window
#Creates a window with window name "Face" and with the image img
cv2.imshow("Mark Attendance - In - Press q to exit",frame)
#Before closing it we need to give a wait command, otherwise the open cv wont work
# @params with the millisecond of delay 1
#cv2.waitKey(1)
#To get out of the loop
key=cv2.waitKey(50) & 0xFF
if(key==ord("q")):
break
#Stoping the videostream
vs.stop()
# destroying all the windows
cv2.destroyAllWindows()
update_attendance_in_db_in(present)
return redirect('home')
def mark_your_attendance_out(request):
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor('face_recognition_data/shape_predictor_68_face_landmarks.dat') #Add path to the shape predictor ######CHANGE TO RELATIVE PATH LATER
svc_save_path="face_recognition_data/svc.sav"
with open(svc_save_path, 'rb') as f:
svc = pickle.load(f)
fa = FaceAligner(predictor , desiredFaceWidth = 96)
encoder=LabelEncoder()
encoder.classes_ = np.load('face_recognition_data/classes.npy')
faces_encodings = np.zeros((1,128))
no_of_faces = len(svc.predict_proba(faces_encodings)[0])
count = dict()
present = dict()
log_time = dict()
start = dict()
for i in range(no_of_faces):
count[encoder.inverse_transform([i])[0]] = 0
present[encoder.inverse_transform([i])[0]] = False
vs = VideoStream(src=0).start()
sampleNum = 0
while(True):
frame = vs.read()
frame = imutils.resize(frame ,width = 800)
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = detector(gray_frame,0)
for face in faces:
print("INFO : inside for loop")
(x,y,w,h) = face_utils.rect_to_bb(face)
face_aligned = fa.align(frame,gray_frame,face)
cv2.rectangle(frame,(x,y),(x+w,y+h),(0,255,0),1)
(pred,prob)=predict(face_aligned,svc)
if(pred!=[-1]):
person_name=encoder.inverse_transform(np.ravel([pred]))[0]
pred=person_name
if count[pred] == 0:
start[pred] = time.time()
count[pred] = count.get(pred,0) + 1
if count[pred] == 4 and (time.time()-start[pred]) > 1.5:
count[pred] = 0
else:
#if count[pred] == 4 and (time.time()-start) <= 1.5:
present[pred] = True
log_time[pred] = datetime.datetime.now()
count[pred] = count.get(pred,0) + 1
print(pred, present[pred], count[pred])
cv2.putText(frame, str(person_name)+ str(prob), (x+6,y+h-6), cv2.FONT_HERSHEY_SIMPLEX,0.5,(0,255,0),1)
else:
person_name="unknown"
cv2.putText(frame, str(person_name), (x+6,y+h-6), cv2.FONT_HERSHEY_SIMPLEX,0.5,(0,255,0),1)
#cv2.putText()
# Before continuing to the next loop, I want to give it a little pause
# waitKey of 100 millisecond
#cv2.waitKey(50)
#Showing the image in another window
#Creates a window with window name "Face" and with the image img
cv2.imshow("Mark Attendance- Out - Press q to exit",frame)
#Before closing it we need to give a wait command, otherwise the open cv wont work
# @params with the millisecond of delay 1
#cv2.waitKey(1)
#To get out of the loop
key=cv2.waitKey(50) & 0xFF
if(key==ord("q")):
break
#Stoping the videostream
vs.stop()
# destroying all the windows
cv2.destroyAllWindows()
update_attendance_in_db_out(present)
return redirect('home')
@login_required
def train(request):
if request.user.username!='admin':
return redirect('not-authorised')
training_dir='face_recognition_data/training_dataset'
count=0
for person_name in os.listdir(training_dir):
curr_directory=os.path.join(training_dir,person_name)
if not os.path.isdir(curr_directory):
continue
for imagefile in image_files_in_folder(curr_directory):
count+=1
X=[]
y=[]
i=0
for person_name in os.listdir(training_dir):
print(str(person_name))
curr_directory=os.path.join(training_dir,person_name)
if not os.path.isdir(curr_directory):
continue
for imagefile in image_files_in_folder(curr_directory):
print(str(imagefile))
image=cv2.imread(imagefile)
try:
X.append((face_recognition.face_encodings(image)[0]).tolist())
y.append(person_name)
i+=1
except:
print("removed")
os.remove(imagefile)
targets=np.array(y)
encoder = LabelEncoder()
encoder.fit(y)
y=encoder.transform(y)
X1=np.array(X)
print("shape: "+ str(X1.shape))
np.save('face_recognition_data/classes.npy', encoder.classes_)
svc = SVC(kernel='linear',probability=True)
svc.fit(X1,y)
svc_save_path="face_recognition_data/svc.sav"
with open(svc_save_path, 'wb') as f:
pickle.dump(svc,f)
vizualize_Data(X1,targets)
messages.success(request, f'Training Complete.')
return render(request,"recognition/train.html")
@login_required
def not_authorised(request):
return render(request,'recognition/not_authorised.html')
@login_required
def view_attendance_home(request):
total_num_of_emp=total_number_employees()
emp_present_today=employees_present_today()
this_week_emp_count_vs_date()
last_week_emp_count_vs_date()
return render(request,"recognition/view_attendance_home.html", {'total_num_of_emp' : total_num_of_emp, 'emp_present_today': emp_present_today})
@login_required
def view_attendance_date(request):
if request.user.username!='admin':
return redirect('not-authorised')
qs=None
time_qs=None
present_qs=None
if request.method=='POST':
form=DateForm(request.POST)
if form.is_valid():
date=form.cleaned_data.get('date')
print("date:"+ str(date))
time_qs=Time.objects.filter(date=date)
present_qs=Present.objects.filter(date=date)
if(len(time_qs)>0 or len(present_qs)>0):
qs=hours_vs_employee_given_date(present_qs,time_qs)
return render(request,'recognition/view_attendance_date.html', {'form' : form,'qs' : qs })
else:
messages.warning(request, f'No records for selected date.')
return redirect('view-attendance-date')
else:
form=DateForm()
return render(request,'recognition/view_attendance_date.html', {'form' : form, 'qs' : qs})
@login_required
def view_attendance_employee(request):
if request.user.username!='admin':
return redirect('not-authorised')
time_qs=None
present_qs=None
qs=None
if request.method=='POST':
form=UsernameAndDateForm(request.POST)
if form.is_valid():
username=form.cleaned_data.get('username')
if username_present(username):
u=User.objects.get(username=username)
time_qs=Time.objects.filter(user=u)
present_qs=Present.objects.filter(user=u)
date_from=form.cleaned_data.get('date_from')
date_to=form.cleaned_data.get('date_to')
if date_to < date_from:
messages.warning(request, f'Invalid date selection.')
return redirect('view-attendance-employee')
else:
time_qs=time_qs.filter(date__gte=date_from).filter(date__lte=date_to).order_by('-date')
present_qs=present_qs.filter(date__gte=date_from).filter(date__lte=date_to).order_by('-date')
if (len(time_qs)>0 or len(present_qs)>0):
qs=hours_vs_date_given_employee(present_qs,time_qs,admin=True)
return render(request,'recognition/view_attendance_employee.html', {'form' : form, 'qs' :qs})
else:
#print("inside qs is None")
messages.warning(request, f'No records for selected duration.')
return redirect('view-attendance-employee')
else:
print("invalid username")
messages.warning(request, f'No such username found.')
return redirect('view-attendance-employee')
else:
form=UsernameAndDateForm()
return render(request,'recognition/view_attendance_employee.html', {'form' : form, 'qs' :qs})
@login_required
def view_my_attendance_employee_login(request):
if request.user.username=='admin':
return redirect('not-authorised')
qs=None
time_qs=None
present_qs=None
if request.method=='POST':
form=DateForm_2(request.POST)
if form.is_valid():
u=request.user
time_qs=Time.objects.filter(user=u)
present_qs=Present.objects.filter(user=u)
date_from=form.cleaned_data.get('date_from')
date_to=form.cleaned_data.get('date_to')
if date_to < date_from:
messages.warning(request, f'Invalid date selection.')
return redirect('view-my-attendance-employee-login')
else:
time_qs=time_qs.filter(date__gte=date_from).filter(date__lte=date_to).order_by('-date')
present_qs=present_qs.filter(date__gte=date_from).filter(date__lte=date_to).order_by('-date')
if (len(time_qs)>0 or len(present_qs)>0):
qs=hours_vs_date_given_employee(present_qs,time_qs,admin=False)
return render(request,'recognition/view_my_attendance_employee_login.html', {'form' : form, 'qs' :qs})
else:
messages.warning(request, f'No records for selected duration.')
return redirect('view-my-attendance-employee-login')
else:
form=DateForm_2()
return render(request,'recognition/view_my_attendance_employee_login.html', {'form' : form, 'qs' :qs}) | [((38, 0, 38, 14), 'matplotlib.use', 'mpl.use', ({(38, 8, 38, 13): '"""Agg"""'}, {}), "('Agg')", True, 'import matplotlib as mpl\n'), ((58, 12, 58, 44), 'dlib.get_frontal_face_detector', 'dlib.get_frontal_face_detector', ({}, {}), '()', False, 'import dlib\n'), ((59, 13, 59, 96), 'dlib.shape_predictor', 'dlib.shape_predictor', ({(59, 34, 59, 95): '"""face_recognition_data/shape_predictor_68_face_landmarks.dat"""'}, {}), "(\n 'face_recognition_data/shape_predictor_68_face_landmarks.dat')", False, 'import dlib\n'), ((60, 6, 60, 52), 'imutils.face_utils.FaceAligner', 'FaceAligner', (), '', False, 'from imutils.face_utils import FaceAligner\n'), ((136, 1, 136, 24), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ({}, {}), '()', False, 'import cv2\n'), ((140, 16, 140, 33), 'numpy.zeros', 'np.zeros', ({(140, 25, 140, 32): '(1, 128)'}, {}), '((1, 128))', True, 'import numpy as np\n'), ((167, 1, 167, 34), 'matplotlib.pyplot.legend', 'plt.legend', (), '', True, 'import matplotlib.pyplot as plt\n'), ((168, 1, 168, 45), 'matplotlib.rcParams.update', 'rcParams.update', ({(168, 17, 168, 44): "{'figure.autolayout': True}"}, {}), "({'figure.autolayout': True})", False, 'from matplotlib import rcParams\n'), ((169, 1, 169, 19), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((170, 1, 170, 79), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(170, 13, 170, 78): '"""./recognition/static/recognition/img/training_visualisation.png"""'}, {}), "('./recognition/static/recognition/img/training_visualisation.png')", True, 'import matplotlib.pyplot as plt\n'), ((171, 1, 171, 12), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((176, 7, 176, 28), 'datetime.date.today', 'datetime.date.today', ({}, {}), '()', False, 'import datetime\n'), ((177, 6, 177, 29), 'datetime.datetime.now', 'datetime.datetime.now', ({}, {}), '()', False, 'import datetime\n'), ((208, 7, 208, 28), 'datetime.date.today', 'datetime.date.today', ({}, {}), '()', False, 'import datetime\n'), ((209, 6, 209, 29), 'datetime.datetime.now', 'datetime.datetime.now', ({}, {}), '()', False, 'import datetime\n'), ((265, 3, 265, 15), 'math.ceil', 'math.ceil', ({(265, 13, 265, 14): 'm'}, {}), '(m)', False, 'import math\n'), ((272, 1, 272, 33), 'pandas.plotting.register_matplotlib_converters', 'register_matplotlib_converters', ({}, {}), '()', False, 'from pandas.plotting import register_matplotlib_converters\n'), ((320, 6, 320, 20), 'django_pandas.io.read_frame', 'read_frame', ({(320, 17, 320, 19): 'qs'}, {}), '(qs)', False, 'from django_pandas.io import read_frame\n'), ((328, 1, 328, 40), 'seaborn.barplot', 'sns.barplot', (), '', True, 'import seaborn as sns\n'), ((329, 1, 329, 32), 'matplotlib.pyplot.xticks', 'plt.xticks', (), '', True, 'import matplotlib.pyplot as plt\n'), ((330, 1, 330, 45), 'matplotlib.rcParams.update', 'rcParams.update', ({(330, 17, 330, 44): "{'figure.autolayout': True}"}, {}), "({'figure.autolayout': True})", False, 'from matplotlib import rcParams\n'), ((331, 1, 331, 19), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((343, 1, 343, 33), 'pandas.plotting.register_matplotlib_converters', 'register_matplotlib_converters', ({}, {}), '()', False, 'from pandas.plotting import register_matplotlib_converters\n'), ((388, 6, 388, 20), 'django_pandas.io.read_frame', 'read_frame', ({(388, 17, 388, 19): 'qs'}, {}), '(qs)', False, 'from django_pandas.io import read_frame\n'), ((394, 1, 394, 44), 'seaborn.barplot', 'sns.barplot', (), '', True, 'import seaborn as sns\n'), ((395, 1, 395, 32), 'matplotlib.pyplot.xticks', 'plt.xticks', (), '', True, 'import matplotlib.pyplot as plt\n'), ((396, 1, 396, 45), 'matplotlib.rcParams.update', 'rcParams.update', ({(396, 17, 396, 44): "{'figure.autolayout': True}"}, {}), "({'figure.autolayout': True})", False, 'from matplotlib import rcParams\n'), ((397, 1, 397, 19), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((398, 1, 398, 94), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(398, 13, 398, 93): '"""./recognition/static/recognition/img/attendance_graphs/hours_vs_employee/1.png"""'}, {}), "(\n './recognition/static/recognition/img/attendance_graphs/hours_vs_employee/1.png'\n )", True, 'import matplotlib.pyplot as plt\n'), ((399, 1, 399, 12), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((404, 4, 404, 22), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ({}, {}), '()', False, 'from django.contrib.auth.models import User\n'), ((411, 7, 411, 28), 'datetime.date.today', 'datetime.date.today', ({}, {}), '()', False, 'import datetime\n'), ((420, 7, 420, 28), 'datetime.date.today', 'datetime.date.today', ({}, {}), '()', False, 'import datetime\n'), ((460, 4, 460, 18), 'pandas.DataFrame', 'pd.DataFrame', ({}, {}), '()', True, 'import pandas as pd\n'), ((465, 1, 465, 55), 'seaborn.lineplot', 'sns.lineplot', (), '', True, 'import seaborn as sns\n'), ((466, 1, 466, 86), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(466, 13, 466, 85): '"""./recognition/static/recognition/img/attendance_graphs/this_week/1.png"""'}, {}), "(\n './recognition/static/recognition/img/attendance_graphs/this_week/1.png')", True, 'import matplotlib.pyplot as plt\n'), ((467, 1, 467, 12), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((476, 7, 476, 28), 'datetime.date.today', 'datetime.date.today', ({}, {}), '()', False, 'import datetime\n'), ((516, 4, 516, 18), 'pandas.DataFrame', 'pd.DataFrame', ({}, {}), '()', True, 'import pandas as pd\n'), ((523, 1, 523, 45), 'seaborn.lineplot', 'sns.lineplot', (), '', True, 'import seaborn as sns\n'), ((524, 1, 524, 86), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(524, 13, 524, 85): '"""./recognition/static/recognition/img/attendance_graphs/last_week/1.png"""'}, {}), "(\n './recognition/static/recognition/img/attendance_graphs/last_week/1.png')", True, 'import matplotlib.pyplot as plt\n'), ((525, 1, 525, 12), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((537, 8, 537, 48), 'django.shortcuts.render', 'render', ({(537, 15, 537, 22): 'request', (537, 24, 537, 47): '"""recognition/home.html"""'}, {}), "(request, 'recognition/home.html')", False, 'from django.shortcuts import render, redirect\n'), ((579, 12, 579, 44), 'dlib.get_frontal_face_detector', 'dlib.get_frontal_face_detector', ({}, {}), '()', False, 'import dlib\n'), ((581, 13, 581, 96), 'dlib.shape_predictor', 'dlib.shape_predictor', ({(581, 34, 581, 95): '"""face_recognition_data/shape_predictor_68_face_landmarks.dat"""'}, {}), "(\n 'face_recognition_data/shape_predictor_68_face_landmarks.dat')", False, 'import dlib\n'), ((589, 6, 589, 52), 'imutils.face_utils.FaceAligner', 'FaceAligner', (), '', False, 'from imutils.face_utils import FaceAligner\n'), ((590, 9, 590, 23), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ({}, {}), '()', False, 'from sklearn.preprocessing import LabelEncoder\n'), ((591, 20, 591, 64), 'numpy.load', 'np.load', ({(591, 28, 591, 63): '"""face_recognition_data/classes.npy"""'}, {}), "('face_recognition_data/classes.npy')", True, 'import numpy as np\n'), ((594, 19, 594, 36), 'numpy.zeros', 'np.zeros', ({(594, 28, 594, 35): '(1, 128)'}, {}), '((1, 128))', True, 'import numpy as np\n'), ((679, 1, 679, 24), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ({}, {}), '()', False, 'import cv2\n'), ((681, 8, 681, 24), 'django.shortcuts.redirect', 'redirect', ({(681, 17, 681, 23): '"""home"""'}, {}), "('home')", False, 'from django.shortcuts import render, redirect\n'), ((689, 12, 689, 44), 'dlib.get_frontal_face_detector', 'dlib.get_frontal_face_detector', ({}, {}), '()', False, 'import dlib\n'), ((691, 13, 691, 96), 'dlib.shape_predictor', 'dlib.shape_predictor', ({(691, 34, 691, 95): '"""face_recognition_data/shape_predictor_68_face_landmarks.dat"""'}, {}), "(\n 'face_recognition_data/shape_predictor_68_face_landmarks.dat')", False, 'import dlib\n'), ((699, 6, 699, 52), 'imutils.face_utils.FaceAligner', 'FaceAligner', (), '', False, 'from imutils.face_utils import FaceAligner\n'), ((700, 9, 700, 23), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ({}, {}), '()', False, 'from sklearn.preprocessing import LabelEncoder\n'), ((701, 20, 701, 64), 'numpy.load', 'np.load', ({(701, 28, 701, 63): '"""face_recognition_data/classes.npy"""'}, {}), "('face_recognition_data/classes.npy')", True, 'import numpy as np\n'), ((704, 19, 704, 36), 'numpy.zeros', 'np.zeros', ({(704, 28, 704, 35): '(1, 128)'}, {}), '((1, 128))', True, 'import numpy as np\n'), ((789, 1, 789, 24), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ({}, {}), '()', False, 'import cv2\n'), ((791, 8, 791, 24), 'django.shortcuts.redirect', 'redirect', ({(791, 17, 791, 23): '"""home"""'}, {}), "('home')", False, 'from django.shortcuts import render, redirect\n'), ((806, 20, 806, 44), 'os.listdir', 'os.listdir', ({(806, 31, 806, 43): 'training_dir'}, {}), '(training_dir)', False, 'import os\n'), ((818, 20, 818, 44), 'os.listdir', 'os.listdir', ({(818, 31, 818, 43): 'training_dir'}, {}), '(training_dir)', False, 'import os\n'), ((840, 9, 840, 20), 'numpy.array', 'np.array', ({(840, 18, 840, 19): 'y'}, {}), '(y)', True, 'import numpy as np\n'), ((841, 11, 841, 25), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ({}, {}), '()', False, 'from sklearn.preprocessing import LabelEncoder\n'), ((844, 4, 844, 15), 'numpy.array', 'np.array', ({(844, 13, 844, 14): 'X'}, {}), '(X)', True, 'import numpy as np\n'), ((846, 1, 846, 63), 'numpy.save', 'np.save', ({(846, 9, 846, 44): '"""face_recognition_data/classes.npy"""', (846, 46, 846, 62): 'encoder.classes_'}, {}), "('face_recognition_data/classes.npy', encoder.classes_)", True, 'import numpy as np\n'), ((847, 7, 847, 44), 'sklearn.svm.SVC', 'SVC', (), '', False, 'from sklearn.svm import SVC\n'), ((856, 1, 856, 49), 'django.contrib.messages.success', 'messages.success', ({(856, 18, 856, 25): 'request', (856, 27, 856, 48): 'f"""Training Complete."""'}, {}), "(request, f'Training Complete.')", False, 'from django.contrib import messages\n'), ((858, 8, 858, 48), 'django.shortcuts.render', 'render', ({(858, 15, 858, 22): 'request', (858, 23, 858, 47): '"""recognition/train.html"""'}, {}), "(request, 'recognition/train.html')", False, 'from django.shortcuts import render, redirect\n'), ((863, 8, 863, 57), 'django.shortcuts.render', 'render', ({(863, 15, 863, 22): 'request', (863, 23, 863, 56): '"""recognition/not_authorised.html"""'}, {}), "(request, 'recognition/not_authorised.html')", False, 'from django.shortcuts import render, redirect\n'), ((873, 8, 873, 144), 'django.shortcuts.render', 'render', ({(873, 15, 873, 22): 'request', (873, 23, 873, 62): '"""recognition/view_attendance_home.html"""', (873, 64, 873, 143): "{'total_num_of_emp': total_num_of_emp, 'emp_present_today': emp_present_today}"}, {}), "(request, 'recognition/view_attendance_home.html', {\n 'total_num_of_emp': total_num_of_emp, 'emp_present_today':\n emp_present_today})", False, 'from django.shortcuts import render, redirect\n'), ((78, 10, 78, 44), 'imutils.resize', 'imutils.resize', (), '', False, 'import imutils\n'), ((81, 15, 81, 54), 'cv2.cvtColor', 'cv2.cvtColor', ({(81, 28, 81, 33): 'frame', (81, 35, 81, 53): 'cv2.COLOR_BGR2GRAY'}, {}), '(frame, cv2.COLOR_BGR2GRAY)', False, 'import cv2\n'), ((125, 2, 125, 32), 'cv2.imshow', 'cv2.imshow', ({(125, 13, 125, 25): '"""Add Images"""', (125, 26, 125, 31): 'frame'}, {}), "('Add Images', frame)", False, 'import cv2\n'), ((128, 2, 128, 16), 'cv2.waitKey', 'cv2.waitKey', ({(128, 14, 128, 15): '(1)'}, {}), '(1)', False, 'import cv2\n'), ((142, 19, 142, 64), 'face_recognition.face_locations', 'face_recognition.face_locations', ({(142, 51, 142, 63): 'face_aligned'}, {}), '(face_aligned)', False, 'import face_recognition\n'), ((143, 18, 143, 101), 'face_recognition.face_encodings', 'face_recognition.face_encodings', (), '', False, 'import face_recognition\n'), ((165, 2, 165, 62), 'matplotlib.pyplot.scatter', 'plt.scatter', (), '', True, 'import matplotlib.pyplot as plt\n'), ((179, 7, 179, 40), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', (), '', False, 'from django.contrib.auth.models import User\n'), ((211, 7, 211, 40), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', (), '', False, 'from django.contrib.auth.models import User\n'), ((333, 2, 333, 91), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(333, 14, 333, 90): '"""./recognition/static/recognition/img/attendance_graphs/hours_vs_date/1.png"""'}, {}), "(\n './recognition/static/recognition/img/attendance_graphs/hours_vs_date/1.png'\n )", True, 'import matplotlib.pyplot as plt\n'), ((334, 2, 334, 13), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((336, 2, 336, 92), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(336, 14, 336, 91): '"""./recognition/static/recognition/img/attendance_graphs/employee_login/1.png"""'}, {}), "(\n './recognition/static/recognition/img/attendance_graphs/employee_login/1.png'\n )", True, 'import matplotlib.pyplot as plt\n'), ((337, 2, 337, 13), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((421, 26, 421, 52), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((423, 45, 423, 71), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((477, 26, 477, 52), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((479, 45, 479, 71), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((543, 9, 543, 60), 'django.shortcuts.render', 'render', ({(543, 16, 543, 23): 'request', (543, 25, 543, 59): '"""recognition/admin_dashboard.html"""'}, {}), "(request, 'recognition/admin_dashboard.html')", False, 'from django.shortcuts import render, redirect\n'), ((547, 9, 547, 62), 'django.shortcuts.render', 'render', ({(547, 16, 547, 23): 'request', (547, 24, 547, 61): '"""recognition/employee_dashboard.html"""'}, {}), "(request, 'recognition/employee_dashboard.html')", False, 'from django.shortcuts import render, redirect\n'), ((552, 9, 552, 35), 'django.shortcuts.redirect', 'redirect', ({(552, 18, 552, 34): '"""not-authorised"""'}, {}), "('not-authorised')", False, 'from django.shortcuts import render, redirect\n'), ((570, 10, 570, 72), 'django.shortcuts.render', 'render', ({(570, 17, 570, 24): 'request', (570, 25, 570, 54): '"""recognition/add_photos.html"""', (570, 56, 570, 71): "{'form': form}"}, {}), "(request, 'recognition/add_photos.html', {'form': form})", False, 'from django.shortcuts import render, redirect\n'), ((588, 9, 588, 23), 'pickle.load', 'pickle.load', ({(588, 21, 588, 22): 'f'}, {}), '(f)', False, 'import pickle\n'), ((614, 10, 614, 44), 'imutils.resize', 'imutils.resize', (), '', False, 'import imutils\n'), ((616, 15, 616, 54), 'cv2.cvtColor', 'cv2.cvtColor', ({(616, 28, 616, 33): 'frame', (616, 35, 616, 53): 'cv2.COLOR_BGR2GRAY'}, {}), '(frame, cv2.COLOR_BGR2GRAY)', False, 'import cv2\n'), ((666, 2, 666, 60), 'cv2.imshow', 'cv2.imshow', ({(666, 13, 666, 53): '"""Mark Attendance - In - Press q to exit"""', (666, 54, 666, 59): 'frame'}, {}), "('Mark Attendance - In - Press q to exit', frame)", False, 'import cv2\n'), ((698, 9, 698, 23), 'pickle.load', 'pickle.load', ({(698, 21, 698, 22): 'f'}, {}), '(f)', False, 'import pickle\n'), ((724, 10, 724, 44), 'imutils.resize', 'imutils.resize', (), '', False, 'import imutils\n'), ((726, 15, 726, 54), 'cv2.cvtColor', 'cv2.cvtColor', ({(726, 28, 726, 33): 'frame', (726, 35, 726, 53): 'cv2.COLOR_BGR2GRAY'}, {}), '(frame, cv2.COLOR_BGR2GRAY)', False, 'import cv2\n'), ((776, 2, 776, 60), 'cv2.imshow', 'cv2.imshow', ({(776, 13, 776, 53): '"""Mark Attendance- Out - Press q to exit"""', (776, 54, 776, 59): 'frame'}, {}), "('Mark Attendance- Out - Press q to exit', frame)", False, 'import cv2\n'), ((799, 9, 799, 35), 'django.shortcuts.redirect', 'redirect', ({(799, 18, 799, 34): '"""not-authorised"""'}, {}), "('not-authorised')", False, 'from django.shortcuts import render, redirect\n'), ((807, 17, 807, 55), 'os.path.join', 'os.path.join', ({(807, 30, 807, 42): 'training_dir', (807, 43, 807, 54): 'person_name'}, {}), '(training_dir, person_name)', False, 'import os\n'), ((810, 19, 810, 56), 'face_recognition.face_recognition_cli.image_files_in_folder', 'image_files_in_folder', ({(810, 41, 810, 55): 'curr_directory'}, {}), '(curr_directory)', False, 'from face_recognition.face_recognition_cli import image_files_in_folder\n'), ((820, 17, 820, 55), 'os.path.join', 'os.path.join', ({(820, 30, 820, 42): 'training_dir', (820, 43, 820, 54): 'person_name'}, {}), '(training_dir, person_name)', False, 'import os\n'), ((823, 19, 823, 56), 'face_recognition.face_recognition_cli.image_files_in_folder', 'image_files_in_folder', ({(823, 41, 823, 55): 'curr_directory'}, {}), '(curr_directory)', False, 'from face_recognition.face_recognition_cli import image_files_in_folder\n'), ((851, 2, 851, 20), 'pickle.dump', 'pickle.dump', ({(851, 14, 851, 17): 'svc', (851, 18, 851, 19): 'f'}, {}), '(svc, f)', False, 'import pickle\n'), ((879, 9, 879, 35), 'django.shortcuts.redirect', 'redirect', ({(879, 18, 879, 34): '"""not-authorised"""'}, {}), "('not-authorised')", False, 'from django.shortcuts import render, redirect\n'), ((912, 10, 912, 93), 'django.shortcuts.render', 'render', ({(912, 17, 912, 24): 'request', (912, 25, 912, 64): '"""recognition/view_attendance_date.html"""', (912, 66, 912, 92): "{'form': form, 'qs': qs}"}, {}), "(request, 'recognition/view_attendance_date.html', {'form': form,\n 'qs': qs})", False, 'from django.shortcuts import render, redirect\n'), ((918, 9, 918, 35), 'django.shortcuts.redirect', 'redirect', ({(918, 18, 918, 34): '"""not-authorised"""'}, {}), "('not-authorised')", False, 'from django.shortcuts import render, redirect\n'), ((968, 10, 968, 96), 'django.shortcuts.render', 'render', ({(968, 17, 968, 24): 'request', (968, 25, 968, 68): '"""recognition/view_attendance_employee.html"""', (968, 70, 968, 95): "{'form': form, 'qs': qs}"}, {}), "(request, 'recognition/view_attendance_employee.html', {'form': form,\n 'qs': qs})", False, 'from django.shortcuts import render, redirect\n'), ((976, 9, 976, 35), 'django.shortcuts.redirect', 'redirect', ({(976, 18, 976, 34): '"""not-authorised"""'}, {}), "('not-authorised')", False, 'from django.shortcuts import render, redirect\n'), ((1008, 10, 1008, 105), 'django.shortcuts.render', 'render', ({(1008, 17, 1008, 24): 'request', (1008, 25, 1008, 77): '"""recognition/view_my_attendance_employee_login.html"""', (1008, 79, 1008, 104): "{'form': form, 'qs': qs}"}, {}), "(request, 'recognition/view_my_attendance_employee_login.html', {\n 'form': form, 'qs': qs})", False, 'from django.shortcuts import render, redirect\n'), ((43, 4, 43, 42), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', (), '', False, 'from django.contrib.auth.models import User\n'), ((64, 6, 64, 24), 'imutils.video.VideoStream', 'VideoStream', (), '', False, 'from imutils.video import VideoStream\n'), ((94, 15, 94, 42), 'imutils.face_utils.rect_to_bb', 'face_utils.rect_to_bb', ({(94, 37, 94, 41): 'face'}, {}), '(face)', False, 'from imutils import face_utils\n'), ((112, 18, 112, 59), 'imutils.resize', 'imutils.resize', (), '', False, 'import imutils\n'), ((118, 3, 118, 51), 'cv2.rectangle', 'cv2.rectangle', ({(118, 17, 118, 22): 'frame', (118, 23, 118, 28): '(x, y)', (118, 29, 118, 38): '(x + w, y + h)', (118, 39, 118, 48): '(0, 255, 0)', (118, 49, 118, 50): '(1)'}, {}), '(frame, (x, y), (x + w, y + h), (0, 255, 0), 1)', False, 'import cv2\n'), ((121, 3, 121, 18), 'cv2.waitKey', 'cv2.waitKey', ({(121, 15, 121, 17): '(50)'}, {}), '(50)', False, 'import cv2\n'), ((152, 26, 152, 42), 'numpy.amax', 'np.amax', ({(152, 34, 152, 41): 'prob[0]'}, {}), '(prob[0])', True, 'import numpy as np\n'), ((161, 14, 161, 34), 'sklearn.manifold.TSNE', 'TSNE', (), '', False, 'from sklearn.manifold import TSNE\n'), ((181, 8, 181, 49), 'users.models.Present.objects.get', 'Present.objects.get', (), '', False, 'from users.models import Present, Time\n'), ((197, 5, 197, 52), 'users.models.Time', 'Time', (), '', False, 'from users.models import Present, Time\n'), ((213, 5, 213, 51), 'users.models.Time', 'Time', (), '', False, 'from users.models import Present, Time\n'), ((412, 4, 412, 38), 'users.models.Present.objects.filter', 'Present.objects.filter', (), '', False, 'from users.models import Present, Time\n'), ((424, 4, 424, 57), 'users.models.Present.objects.filter', 'Present.objects.filter', (), '', False, 'from users.models import Present, Time\n'), ((480, 4, 480, 57), 'users.models.Present.objects.filter', 'Present.objects.filter', (), '', False, 'from users.models import Present, Time\n'), ((559, 3, 559, 48), 'django.contrib.messages.success', 'messages.success', ({(559, 20, 559, 27): 'request', (559, 29, 559, 47): 'f"""Dataset Created"""'}, {}), "(request, f'Dataset Created')", False, 'from django.contrib import messages\n'), ((560, 10, 560, 32), 'django.shortcuts.redirect', 'redirect', ({(560, 19, 560, 31): '"""add-photos"""'}, {}), "('add-photos')", False, 'from django.shortcuts import render, redirect\n'), ((562, 3, 562, 88), 'django.contrib.messages.warning', 'messages.warning', ({(562, 20, 562, 27): 'request', (562, 29, 562, 87): 'f"""No such username found. Please register employee first."""'}, {}), "(request,\n f'No such username found. Please register employee first.')", False, 'from django.contrib import messages\n'), ((563, 10, 563, 31), 'django.shortcuts.redirect', 'redirect', ({(563, 19, 563, 30): '"""dashboard"""'}, {}), "('dashboard')", False, 'from django.shortcuts import render, redirect\n'), ((606, 6, 606, 24), 'imutils.video.VideoStream', 'VideoStream', (), '', False, 'from imutils.video import VideoStream\n'), ((625, 15, 625, 42), 'imutils.face_utils.rect_to_bb', 'face_utils.rect_to_bb', ({(625, 37, 625, 41): 'face'}, {}), '(face)', False, 'from imutils import face_utils\n'), ((628, 3, 628, 51), 'cv2.rectangle', 'cv2.rectangle', ({(628, 17, 628, 22): 'frame', (628, 23, 628, 28): '(x, y)', (628, 29, 628, 38): '(x + w, y + h)', (628, 39, 628, 48): '(0, 255, 0)', (628, 49, 628, 50): '(1)'}, {}), '(frame, (x, y), (x + w, y + h), (0, 255, 0), 1)', False, 'import cv2\n'), ((671, 6, 671, 21), 'cv2.waitKey', 'cv2.waitKey', ({(671, 18, 671, 20): '(50)'}, {}), '(50)', False, 'import cv2\n'), ((716, 6, 716, 24), 'imutils.video.VideoStream', 'VideoStream', (), '', False, 'from imutils.video import VideoStream\n'), ((735, 15, 735, 42), 'imutils.face_utils.rect_to_bb', 'face_utils.rect_to_bb', ({(735, 37, 735, 41): 'face'}, {}), '(face)', False, 'from imutils import face_utils\n'), ((738, 3, 738, 51), 'cv2.rectangle', 'cv2.rectangle', ({(738, 17, 738, 22): 'frame', (738, 23, 738, 28): '(x, y)', (738, 29, 738, 38): '(x + w, y + h)', (738, 39, 738, 48): '(0, 255, 0)', (738, 49, 738, 50): '(1)'}, {}), '(frame, (x, y), (x + w, y + h), (0, 255, 0), 1)', False, 'import cv2\n'), ((781, 6, 781, 21), 'cv2.waitKey', 'cv2.waitKey', ({(781, 18, 781, 20): '(50)'}, {}), '(50)', False, 'import cv2\n'), ((808, 9, 808, 38), 'os.path.isdir', 'os.path.isdir', ({(808, 23, 808, 37): 'curr_directory'}, {}), '(curr_directory)', False, 'import os\n'), ((821, 9, 821, 38), 'os.path.isdir', 'os.path.isdir', ({(821, 23, 821, 37): 'curr_directory'}, {}), '(curr_directory)', False, 'import os\n'), ((825, 9, 825, 30), 'cv2.imread', 'cv2.imread', ({(825, 20, 825, 29): 'imagefile'}, {}), '(imagefile)', False, 'import cv2\n'), ((890, 11, 890, 41), 'users.models.Time.objects.filter', 'Time.objects.filter', (), '', False, 'from users.models import Present, Time\n'), ((891, 14, 891, 47), 'users.models.Present.objects.filter', 'Present.objects.filter', (), '', False, 'from users.models import Present, Time\n'), ((984, 11, 984, 38), 'users.models.Time.objects.filter', 'Time.objects.filter', (), '', False, 'from users.models import Present, Time\n'), ((985, 14, 985, 44), 'users.models.Present.objects.filter', 'Present.objects.filter', (), '', False, 'from users.models import Present, Time\n'), ((187, 8, 187, 50), 'users.models.Present', 'Present', (), '', False, 'from users.models import Present, Time\n'), ((190, 6, 190, 49), 'users.models.Present', 'Present', (), '', False, 'from users.models import Present, Time\n'), ((438, 5, 438, 38), 'users.models.Present.objects.filter', 'Present.objects.filter', (), '', False, 'from users.models import Present, Time\n'), ((444, 31, 444, 59), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((493, 5, 493, 38), 'users.models.Present.objects.filter', 'Present.objects.filter', (), '', False, 'from users.models import Present, Time\n'), ((499, 31, 499, 59), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((896, 11, 896, 94), 'django.shortcuts.render', 'render', ({(896, 18, 896, 25): 'request', (896, 26, 896, 65): '"""recognition/view_attendance_date.html"""', (896, 67, 896, 93): "{'form': form, 'qs': qs}"}, {}), "(request, 'recognition/view_attendance_date.html', {'form': form,\n 'qs': qs})", False, 'from django.shortcuts import render, redirect\n'), ((898, 4, 898, 63), 'django.contrib.messages.warning', 'messages.warning', ({(898, 21, 898, 28): 'request', (898, 30, 898, 62): 'f"""No records for selected date."""'}, {}), "(request, f'No records for selected date.')", False, 'from django.contrib import messages\n'), ((899, 11, 899, 43), 'django.shortcuts.redirect', 'redirect', ({(899, 20, 899, 42): '"""view-attendance-date"""'}, {}), "('view-attendance-date')", False, 'from django.shortcuts import render, redirect\n'), ((929, 6, 929, 41), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', (), '', False, 'from django.contrib.auth.models import User\n'), ((931, 12, 931, 39), 'users.models.Time.objects.filter', 'Time.objects.filter', (), '', False, 'from users.models import Present, Time\n'), ((932, 15, 932, 45), 'users.models.Present.objects.filter', 'Present.objects.filter', (), '', False, 'from users.models import Present, Time\n'), ((960, 4, 960, 57), 'django.contrib.messages.warning', 'messages.warning', ({(960, 21, 960, 28): 'request', (960, 30, 960, 56): 'f"""No such username found."""'}, {}), "(request, f'No such username found.')", False, 'from django.contrib import messages\n'), ((961, 11, 961, 47), 'django.shortcuts.redirect', 'redirect', ({(961, 20, 961, 46): '"""view-attendance-employee"""'}, {}), "('view-attendance-employee')", False, 'from django.shortcuts import render, redirect\n'), ((989, 5, 989, 58), 'django.contrib.messages.warning', 'messages.warning', ({(989, 22, 989, 29): 'request', (989, 31, 989, 57): 'f"""Invalid date selection."""'}, {}), "(request, f'Invalid date selection.')", False, 'from django.contrib import messages\n'), ((990, 12, 990, 57), 'django.shortcuts.redirect', 'redirect', ({(990, 21, 990, 56): '"""view-my-attendance-employee-login"""'}, {}), "('view-my-attendance-employee-login')", False, 'from django.shortcuts import render, redirect\n'), ((648, 22, 648, 45), 'datetime.datetime.now', 'datetime.datetime.now', ({}, {}), '()', False, 'import datetime\n'), ((758, 22, 758, 45), 'datetime.datetime.now', 'datetime.datetime.now', ({}, {}), '()', False, 'import datetime\n'), ((835, 4, 835, 24), 'os.remove', 'os.remove', ({(835, 14, 835, 23): 'imagefile'}, {}), '(imagefile)', False, 'import os\n'), ((937, 5, 937, 58), 'django.contrib.messages.warning', 'messages.warning', ({(937, 22, 937, 29): 'request', (937, 31, 937, 57): 'f"""Invalid date selection."""'}, {}), "(request, f'Invalid date selection.')", False, 'from django.contrib import messages\n'), ((938, 12, 938, 48), 'django.shortcuts.redirect', 'redirect', ({(938, 21, 938, 47): '"""view-attendance-employee"""'}, {}), "('view-attendance-employee')", False, 'from django.shortcuts import render, redirect\n'), ((999, 13, 999, 108), 'django.shortcuts.render', 'render', ({(999, 20, 999, 27): 'request', (999, 28, 999, 80): '"""recognition/view_my_attendance_employee_login.html"""', (999, 82, 999, 107): "{'form': form, 'qs': qs}"}, {}), "(request, 'recognition/view_my_attendance_employee_login.html', {\n 'form': form, 'qs': qs})", False, 'from django.shortcuts import render, redirect\n'), ((1002, 6, 1002, 69), 'django.contrib.messages.warning', 'messages.warning', ({(1002, 23, 1002, 30): 'request', (1002, 32, 1002, 68): 'f"""No records for selected duration."""'}, {}), "(request, f'No records for selected duration.')", False, 'from django.contrib import messages\n'), ((1003, 13, 1003, 58), 'django.shortcuts.redirect', 'redirect', ({(1003, 22, 1003, 57): '"""view-my-attendance-employee-login"""'}, {}), "('view-my-attendance-employee-login')", False, 'from django.shortcuts import render, redirect\n'), ((637, 42, 637, 58), 'numpy.ravel', 'np.ravel', ({(637, 51, 637, 57): '[pred]'}, {}), '([pred])', True, 'import numpy as np\n'), ((747, 42, 747, 58), 'numpy.ravel', 'np.ravel', ({(747, 51, 747, 57): '[pred]'}, {}), '([pred])', True, 'import numpy as np\n'), ((947, 13, 947, 99), 'django.shortcuts.render', 'render', ({(947, 20, 947, 27): 'request', (947, 28, 947, 71): '"""recognition/view_attendance_employee.html"""', (947, 73, 947, 98): "{'form': form, 'qs': qs}"}, {}), "(request, 'recognition/view_attendance_employee.html', {'form': form,\n 'qs': qs})", False, 'from django.shortcuts import render, redirect\n'), ((950, 6, 950, 69), 'django.contrib.messages.warning', 'messages.warning', ({(950, 23, 950, 30): 'request', (950, 32, 950, 68): 'f"""No records for selected duration."""'}, {}), "(request, f'No records for selected duration.')", False, 'from django.contrib import messages\n'), ((951, 13, 951, 49), 'django.shortcuts.redirect', 'redirect', ({(951, 22, 951, 48): '"""view-attendance-employee"""'}, {}), "('view-attendance-employee')", False, 'from django.shortcuts import render, redirect\n'), ((827, 14, 827, 52), 'face_recognition.face_encodings', 'face_recognition.face_encodings', ({(827, 46, 827, 51): 'image'}, {}), '(image)', False, 'import face_recognition\n')] |
GillesArcas/Advent_of_Code | 2018/05.py | 1f57eb1686875df2684b0d56916b1d20724e9fb9 | import re
import string
DATA = '05.txt'
def react(polymer):
pairs = '|'.join([a + b + '|' + b + a for a, b in zip(string.ascii_lowercase, string.ascii_uppercase)])
length = len(polymer)
while 1:
polymer = re.sub(pairs, '', polymer)
if len(polymer) == length:
return(length)
else:
length = len(polymer)
def code1():
with open(DATA) as f:
polymer = f.readline().strip()
print('1>', react(polymer))
def code2():
with open(DATA) as f:
polymer = f.readline().strip()
minlength = len(polymer)
for c in string.ascii_lowercase:
polymer2 = re.sub(c, '', polymer, flags=re.I)
length = react(polymer2)
if length < minlength:
minlength = length
print('2>', minlength)
code1()
code2()
| [((12, 18, 12, 44), 're.sub', 're.sub', ({(12, 25, 12, 30): 'pairs', (12, 32, 12, 34): '""""""', (12, 36, 12, 43): 'polymer'}, {}), "(pairs, '', polymer)", False, 'import re\n'), ((30, 19, 30, 53), 're.sub', 're.sub', (), '', False, 'import re\n')] |
felix-lang/fbuild | lib/fbuild/builders/__init__.py | 9595fbfd6d3ceece31fda2f96c35d4a241f0129b | import abc
import contextlib
import os
import sys
from functools import partial
from itertools import chain
import fbuild
import fbuild.db
import fbuild.path
import fbuild.temp
from . import platform
# ------------------------------------------------------------------------------
class MissingProgram(fbuild.ConfigFailed):
def __init__(self, programs=None):
self.programs = programs
def __str__(self):
if self.programs is None:
return 'cannot find program'
else:
return 'cannot find any of the programs %s' % \
' '.join(repr(str(p)) for p in self.programs)
# ------------------------------------------------------------------------------
@fbuild.db.caches
def find_program(ctx, names, paths=None, *, quieter=0):
"""L{find_program} is a test that searches the paths for one of the
programs in I{name}. If one is found, it is returned. If not, the next
name in the list is searched for."""
if paths is None:
paths = os.environ['PATH'].split(os.pathsep)
# If we're running on windows, we need to append '.exe' to the filenames
# that we're searching for.
if sys.platform == 'win32':
new_names = []
for name in names:
if \
not name.endswith('.exe') or \
not name.endswith('.cmd') or \
not name.endswith('.bat'):
new_names.append(name + '.exe')
new_names.append(name + '.cmd')
new_names.append(name + '.bat')
new_names.append(name)
names = new_names
for name in names:
filename = fbuild.path.Path(name)
ctx.logger.check('looking for ' + filename.name, verbose=quieter)
if filename.exists() and filename.isfile():
ctx.logger.passed('ok %s' % filename, verbose=quieter)
return fbuild.path.Path(name)
else:
for path in paths:
filename = fbuild.path.Path(path, name)
if filename.exists() and filename.isfile():
ctx.logger.passed('ok %s' % filename, verbose=quieter)
return fbuild.path.Path(filename)
ctx.logger.failed(verbose=quieter)
raise MissingProgram(names)
# ------------------------------------------------------------------------------
def check_version(ctx, builder, version_function, *,
requires_version=None,
requires_at_least_version=None,
requires_at_most_version=None):
"""Helper function to simplify checking the version of a builder."""
if any(v is not None for v in (
requires_version,
requires_at_least_version,
requires_at_most_version)):
ctx.logger.check('checking %s version' % builder)
version_str = version_function()
# Convert the version into a tuple
version = []
for i in version_str.split('.'):
try:
version.append(int(i))
except ValueError:
# The subversion isn't a number, so just convert it to a
# string.
version.append(i)
version = tuple(version)
if requires_version is not None and requires_version != version:
msg = 'version %s required; found %s' % (
'.'.join(str(i) for i in requires_version), version_str)
ctx.logger.failed(msg)
raise fbuild.ConfigFailed(msg)
if requires_at_least_version is not None and \
requires_at_least_version > version:
msg = 'at least version %s required; found %s' % (
'.'.join(str(i) for i in requires_at_least_version),
version_str)
ctx.logger.failed(msg)
raise fbuild.ConfigFailed(msg)
if requires_at_most_version is not None and \
requires_at_most_version < version:
msg = 'at most version %s required; found %s' % (
'.'.join(str(i) for i in requires_at_most_version),
version_str)
ctx.logger.failed(msg)
raise fbuild.ConfigFailed(msg)
ctx.logger.passed(version_str)
# ------------------------------------------------------------------------------
class AbstractCompiler(fbuild.db.PersistentObject):
def __init__(self, *args, src_suffix, **kwargs):
super().__init__(*args, **kwargs)
self.src_suffix = src_suffix
@fbuild.db.cachemethod
def compile(self, src:fbuild.db.SRC, *args, **kwargs) -> fbuild.db.DST:
return self.uncached_compile(src, *args, **kwargs)
@abc.abstractmethod
def uncached_compile(self, src, *args, **kwargs):
pass
@fbuild.db.cachemethod
@platform.auto_platform_options()
def build_objects(self, srcs:fbuild.db.SRCS, *args, **kwargs) -> \
fbuild.db.DSTS:
"""Compile all of the passed in L{srcs} in parallel."""
# When a object has extra external dependencies, such as .c files
# depending on .h changes, depending on library changes, we need to add
# the dependencies in build_objects. Unfortunately, the db doesn't
# know about these new files and so it can't tell when a function
# really needs to be rerun. So, we'll just not cache this function.
# We need to add extra dependencies to our call.
objs = []
src_deps = []
dst_deps = []
for o, s, d in self.ctx.scheduler.map(
partial(self.compile.call, *args, **kwargs),
srcs):
objs.append(o)
src_deps.extend(s)
dst_deps.extend(d)
self.ctx.db.add_external_dependencies_to_call(
srcs=src_deps,
dsts=dst_deps)
return objs
# --------------------------------------------------------------------------
def tempfile(self, code):
return fbuild.temp.tempfile(code, self.src_suffix)
@contextlib.contextmanager
def tempfile_compile(self, code='', *, quieter=1, **kwargs):
with self.tempfile(code) as src:
yield self.uncached_compile(src, quieter=quieter, **kwargs)
@platform.auto_platform_options()
def try_compile(self, *args, **kwargs):
try:
with self.tempfile_compile(*args, **kwargs):
return True
except fbuild.ExecutionError:
return False
@platform.auto_platform_options()
def check_compile(self, code, msg, *args, **kwargs):
self.ctx.logger.check(msg)
if self.try_compile(code, *args, **kwargs):
self.ctx.logger.passed()
return True
else:
self.ctx.logger.failed()
return False
# ------------------------------------------------------------------------------
class AbstractLibLinker(AbstractCompiler):
@fbuild.db.cachemethod
@platform.auto_platform_options()
def link_lib(self, dst, srcs:fbuild.db.SRCS, *args,
libs:fbuild.db.SRCS=(),
**kwargs) -> fbuild.db.DST:
"""Link compiled files into a library and caches the results."""
return self.uncached_link_lib(dst, srcs, *args, libs=libs, **kwargs)
@abc.abstractmethod
def uncached_link_lib(self, *args, **kwargs):
pass
@platform.auto_platform_options()
def build_lib(self, dst, srcs, *, objs=(), libs=(), ckwargs={}, lkwargs={}):
"""Compile all of the passed in L{srcs} in parallel, then link them
into a library."""
objs = tuple(chain(objs, self.build_objects(srcs, **ckwargs)))
return self.link_lib(dst, objs, libs=libs, **lkwargs)
# --------------------------------------------------------------------------
@contextlib.contextmanager
@platform.auto_platform_options()
def tempfile_link_lib(self, code='', *, quieter=1, ckwargs={}, **kwargs):
with self.tempfile(code) as src:
dst = src.parent / 'temp'
obj = self.uncached_compile(src, quieter=quieter, **ckwargs)
yield self.uncached_link_lib(dst, [obj], quieter=quieter, **kwargs)
def try_link_lib(self, *args, **kwargs):
try:
with self.tempfile_link_lib(*args, **kwargs):
return True
except fbuild.ExecutionError:
return False
def check_link_lib(self, code, msg, *args, **kwargs):
self.ctx.logger.check(msg)
if self.try_link_lib(code, *args, **kwargs):
self.ctx.logger.passed()
return True
else:
self.ctx.logger.failed()
return False
# ------------------------------------------------------------------------------
class AbstractRunner(fbuild.db.PersistentObject):
@abc.abstractmethod
def tempfile_run(self, *args, **kwargs):
pass
def try_run(self, code='', quieter=1, **kwargs):
try:
self.tempfile_run(code, quieter=quieter, **kwargs)
except fbuild.ExecutionError:
return False
else:
return True
def check_run(self, code, msg, *args, **kwargs):
self.ctx.logger.check(msg)
if self.try_run(code, *args, **kwargs):
self.ctx.logger.passed()
return True
else:
self.ctx.logger.failed()
return False
# ------------------------------------------------------------------------------
class AbstractExeLinker(AbstractCompiler, AbstractRunner):
@fbuild.db.cachemethod
@platform.auto_platform_options()
def link_exe(self, dst, srcs:fbuild.db.SRCS, *args,
libs:fbuild.db.SRCS=(),
**kwargs) -> fbuild.db.DST:
"""Link compiled files into an executable."""
return self.uncached_link_exe(dst, srcs, *args, libs=libs, **kwargs)
@abc.abstractmethod
def uncached_link_exe(self, *args, **kwargs):
pass
@platform.auto_platform_options()
def build_exe(self, dst, srcs, *, objs=(), libs=(), ckwargs={}, lkwargs={}):
"""Compile all of the passed in L{srcs} in parallel, then link them
into an executable."""
objs = tuple(chain(objs, self.build_objects(srcs, **ckwargs)))
return self.link_exe(dst, objs, libs=libs, **lkwargs)
# --------------------------------------------------------------------------
@contextlib.contextmanager
@platform.auto_platform_options()
def tempfile_link_exe(self, code='', *, quieter=1, ckwargs={}, **kwargs):
with self.tempfile(code) as src:
dst = src.parent / 'temp'
obj = self.uncached_compile(src, quieter=quieter, **ckwargs)
yield self.uncached_link_exe(dst, [obj], quieter=quieter, **kwargs)
@platform.auto_platform_options()
def try_link_exe(self, *args, **kwargs):
try:
with self.tempfile_link_exe(*args, **kwargs):
return True
except fbuild.ExecutionError:
return False
@platform.auto_platform_options()
def check_link_exe(self, code, msg, *args, **kwargs):
self.ctx.logger.check(msg)
if self.try_link_exe(code, *args, **kwargs):
self.ctx.logger.passed()
return True
else:
self.ctx.logger.failed()
return False
@platform.auto_platform_options()
def tempfile_run(self, *args, quieter=1, ckwargs={}, lkwargs={}, **kwargs):
with self.tempfile_link_exe(*args,
quieter=quieter,
ckwargs=ckwargs,
**lkwargs) as exe:
return self.ctx.execute([exe],
quieter=quieter,
cwd=exe.parent,
**kwargs)
# ------------------------------------------------------------------------------
class AbstractCompilerBuilder(AbstractLibLinker, AbstractExeLinker):
pass
| [((54, 19, 54, 41), 'fbuild.path.Path', 'fbuild.path.Path', ({(54, 36, 54, 40): 'name'}, {}), '(name)', False, 'import fbuild\n'), ((170, 15, 170, 58), 'fbuild.temp.tempfile', 'fbuild.temp.tempfile', ({(170, 36, 170, 40): 'code', (170, 42, 170, 57): 'self.src_suffix'}, {}), '(code, self.src_suffix)', False, 'import fbuild\n'), ((59, 19, 59, 41), 'fbuild.path.Path', 'fbuild.path.Path', ({(59, 36, 59, 40): 'name'}, {}), '(name)', False, 'import fbuild\n'), ((102, 18, 102, 42), 'fbuild.ConfigFailed', 'fbuild.ConfigFailed', ({(102, 38, 102, 41): 'msg'}, {}), '(msg)', False, 'import fbuild\n'), ((111, 18, 111, 42), 'fbuild.ConfigFailed', 'fbuild.ConfigFailed', ({(111, 38, 111, 41): 'msg'}, {}), '(msg)', False, 'import fbuild\n'), ((120, 18, 120, 42), 'fbuild.ConfigFailed', 'fbuild.ConfigFailed', ({(120, 38, 120, 41): 'msg'}, {}), '(msg)', False, 'import fbuild\n'), ((155, 16, 155, 59), 'functools.partial', 'partial', ({(155, 24, 155, 41): 'self.compile.call', (155, 43, 155, 48): '*args'}, {}), '(self.compile.call, *args, **kwargs)', False, 'from functools import partial\n'), ((62, 27, 62, 55), 'fbuild.path.Path', 'fbuild.path.Path', ({(62, 44, 62, 48): 'path', (62, 50, 62, 54): 'name'}, {}), '(path, name)', False, 'import fbuild\n'), ((65, 27, 65, 53), 'fbuild.path.Path', 'fbuild.path.Path', ({(65, 44, 65, 52): 'filename'}, {}), '(filename)', False, 'import fbuild\n')] |
i3uex/CompareML | WebServer.py | 3d53d58117507db11ad08ca0b1c883ec0997840e | import json
import cherrypy
import engine
class WebServer(object):
@cherrypy.expose
def index(self):
return open('public/index.html', encoding='utf-8')
@cherrypy.expose
class GetOptionsService(object):
@cherrypy.tools.accept(media='text/plain')
def GET(self):
return json.dumps({
'providers': engine.get_providers(),
'algorithms': engine.get_algorithms(),
'default_datasets': engine.get_all_default_datasets()
})
@cherrypy.expose
class SetOptionsService(object):
@cherrypy.tools.accept(media='text/plain')
def POST(self, options):
""" Use the options selected by the user to execute all algorithms
:param options: {
is_default_dataset: bool,
dataset: str,
providers: []
algorithms: []
target: str
}
if is_default_dataset is true, dataset will contain the name of the default_dataset"""
options_dic = json.loads(options)
try:
result = engine.execute(options_dic['is_default_dataset'], options_dic['dataset'], options_dic['providers'],
options_dic['algorithms'],
options_dic['target'])
except Exception as exception:
message = f"{str(exception)}"
raise cherrypy.HTTPError(500, message=message)
return result
@cherrypy.expose
@cherrypy.tools.json_out()
class GetDefaultDatasetHeadersService(object):
@cherrypy.tools.accept(media='text/plain')
def GET(self, default_dataset_name):
return {'headers': engine.get_default_dataset_headers(default_dataset_name)}
| [((53, 1, 53, 26), 'cherrypy.tools.json_out', 'cherrypy.tools.json_out', ({}, {}), '()', False, 'import cherrypy\n'), ((17, 5, 17, 46), 'cherrypy.tools.accept', 'cherrypy.tools.accept', (), '', False, 'import cherrypy\n'), ((28, 5, 28, 46), 'cherrypy.tools.accept', 'cherrypy.tools.accept', (), '', False, 'import cherrypy\n'), ((55, 5, 55, 46), 'cherrypy.tools.accept', 'cherrypy.tools.accept', (), '', False, 'import cherrypy\n'), ((40, 22, 40, 41), 'json.loads', 'json.loads', ({(40, 33, 40, 40): 'options'}, {}), '(options)', False, 'import json\n'), ((43, 21, 45, 58), 'engine.execute', 'engine.execute', ({(43, 36, 43, 69): "options_dic['is_default_dataset']", (43, 71, 43, 93): "options_dic['dataset']", (43, 95, 43, 119): "options_dic['providers']", (44, 36, 44, 61): "options_dic['algorithms']", (45, 36, 45, 57): "options_dic['target']"}, {}), "(options_dic['is_default_dataset'], options_dic['dataset'],\n options_dic['providers'], options_dic['algorithms'], options_dic['target'])", False, 'import engine\n'), ((57, 27, 57, 83), 'engine.get_default_dataset_headers', 'engine.get_default_dataset_headers', ({(57, 62, 57, 82): 'default_dataset_name'}, {}), '(default_dataset_name)', False, 'import engine\n'), ((20, 25, 20, 47), 'engine.get_providers', 'engine.get_providers', ({}, {}), '()', False, 'import engine\n'), ((21, 26, 21, 49), 'engine.get_algorithms', 'engine.get_algorithms', ({}, {}), '()', False, 'import engine\n'), ((22, 32, 22, 65), 'engine.get_all_default_datasets', 'engine.get_all_default_datasets', ({}, {}), '()', False, 'import engine\n'), ((48, 18, 48, 58), 'cherrypy.HTTPError', 'cherrypy.HTTPError', (), '', False, 'import cherrypy\n')] |
DavideEva/2ppy | tuprolog/solve/exception/error/existence/__init__.py | 55609415102f8116165a42c8e33e029c4906e160 | from typing import Union
from tuprolog import logger
# noinspection PyUnresolvedReferences
import jpype.imports
# noinspection PyUnresolvedReferences
import it.unibo.tuprolog.solve.exception.error as errors
from tuprolog.core import Term, Atom
from tuprolog.solve import ExecutionContext, Signature
ExistenceError = errors.ExistenceError
ObjectType = ExistenceError.ObjectType
OBJECT_PROCEDURE = ObjectType.PROCEDURE
OBJECT_SOURCE_SINK = ObjectType.SOURCE_SINK
OBJECT_RESOURCE = ObjectType.RESOURCE
OBJECT_STREAM = ObjectType.STREAM
OBJECT_OOP_ALIAS = ObjectType.OOP_ALIAS
OBJECT_OOP_METHOD = ObjectType.OOP_METHOD
OBJECT_OOP_CONSTRUCTOR = ObjectType.OOP_CONSTRUCTOR
OBJECT_OOP_PROPERTY = ObjectType.OOP_PROPERTY
def existence_error(
context: ExecutionContext,
type: ObjectType,
culprit: Term,
message: str
) -> ExistenceError:
return ExistenceError.of(context, type, culprit, message)
def existence_error_for_source_sink(
context: ExecutionContext,
alias: Union[Atom, str]
) -> ExistenceError:
return ExistenceError.forSourceSink(context, alias)
def existence_error_for_procedure(
context: ExecutionContext,
procedure: Signature
) -> ExistenceError:
return ExistenceError.forProcedure(context, procedure)
def existence_error_for_stream(
context: ExecutionContext,
stream: Term
) -> ExistenceError:
return ExistenceError.forStream(context, stream)
def existence_error_for_resource(
context: ExecutionContext,
name: str
) -> ExistenceError:
return ExistenceError.forResource(context, name)
def object_type(name: Union[str, Term]) -> ObjectType:
if isinstance(name, str):
return ObjectType.of(name)
else:
return ObjectType.fromTerm(name)
logger.debug("Loaded JVM classes from it.unibo.tuprolog.solve.exception.error.ExistenceError.*")
| [((75, 0, 75, 96), 'tuprolog.logger.debug', 'logger.debug', ({(75, 13, 75, 95): '"""Loaded JVM classes from it.unibo.tuprolog.solve.exception.error.ExistenceError.*"""'}, {}), "(\n 'Loaded JVM classes from it.unibo.tuprolog.solve.exception.error.ExistenceError.*'\n )", False, 'from tuprolog import logger\n')] |
RealA10N/cptk | cptk/core/fetcher.py | e500d948e91bb70661adc3c2539b149704c734a1 | from __future__ import annotations
from typing import TYPE_CHECKING
import pkg_resources
from bs4 import BeautifulSoup
from requests import session
from cptk.scrape import PageInfo
from cptk.scrape import Website
from cptk.utils import cptkException
if TYPE_CHECKING:
from cptk.scrape import Problem
class InvalidClone(cptkException):
""" Raised when the clone command is called with a 'PageInfo' instance that
doesn't describe anything that can be cloned. """
def __init__(self, info: PageInfo) -> None:
self.info = info
super().__init__(f"We don't know how to handle data from {info.url!r}")
class UnknownWebsite(cptkException):
""" Raised when trying to fetch information from a website that is not
registed and can't be handled by cptk. """
def __init__(self, domain: str) -> None:
self.domain = domain
super().__init__(f"We don't know how to handle data from {domain!r}")
class Fetcher:
def __init__(self) -> None:
self.session = session()
self._load_websites()
def _load_websites(self) -> list[type[Website]]:
self._websites = [
point.load()()
for point in pkg_resources.iter_entry_points('cptk_sites')
]
self._domain_to_website = dict()
for website in self._websites:
domain = website.domain
if isinstance(domain, str):
self._domain_to_website[domain] = website
else:
for cur in domain:
self._domain_to_website[cur] = website
def page_to_problem(self, info: PageInfo) -> Problem:
""" Recives an arbitrary page info instance and tries to match it with
a Website class that knows how to handle this specific website. If cptk
doesn't find a way to parse the given webpage, it raises the
'InvalidClone' exception. """
for website in self._websites:
if website.is_problem(info):
return website.to_problem(info)
raise InvalidClone(info)
def to_page(self, url: str) -> PageInfo:
""" Makes an get http/s request to the given URL and returns the result
as a PageInfo instance. """
if not url.startswith('http'):
url = f'http://{url}'
res = self.session.get(url)
data = BeautifulSoup(res.content, 'lxml')
return PageInfo(url, data)
| [((38, 23, 38, 32), 'requests.session', 'session', ({}, {}), '()', False, 'from requests import session\n'), ((75, 15, 75, 49), 'bs4.BeautifulSoup', 'BeautifulSoup', ({(75, 29, 75, 40): 'res.content', (75, 42, 75, 48): '"""lxml"""'}, {}), "(res.content, 'lxml')", False, 'from bs4 import BeautifulSoup\n'), ((76, 15, 76, 34), 'cptk.scrape.PageInfo', 'PageInfo', ({(76, 24, 76, 27): 'url', (76, 29, 76, 33): 'data'}, {}), '(url, data)', False, 'from cptk.scrape import PageInfo\n'), ((44, 25, 44, 70), 'pkg_resources.iter_entry_points', 'pkg_resources.iter_entry_points', ({(44, 57, 44, 69): '"""cptk_sites"""'}, {}), "('cptk_sites')", False, 'import pkg_resources\n')] |
Hinson-A/guyueclass | machine_learning/deep_reinforcement_learning_grasping/drlgrasp/drlgrasp/pybullet_envs/kuka_reach_with_visual.py | e59129526729542dccefa6c7232378a00dc0175a | import pybullet as p
import pybullet_data
import gym
from gym import spaces
from gym.utils import seeding
import numpy as np
from math import sqrt
import random
import time
import math
import cv2
import torch
import os
def random_crop(imgs, out):
"""
args:
imgs: shape (B,C,H,W)
out: output size (e.g. 84)
"""
n, c, h, w = imgs.shape
crop_max = h - out + 1
w1 = np.random.randint(0, crop_max, n)
h1 = np.random.randint(0, crop_max, n)
cropped = np.empty((n, c, out, out), dtype=imgs.dtype)
for i, (img, w11, h11) in enumerate(zip(imgs, w1, h1)):
cropped[i] = img[:, h11:h11 + out, w11:w11 + out]
return cropped
class KukaReachVisualEnv(gym.Env):
metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': 50
}
kMaxEpisodeSteps = 700
kImageSize = {'width': 96, 'height': 96}
kFinalImageSize = {'width': 84, 'height': 84}
def __init__(self, is_render=False, is_good_view=False):
self.is_render = is_render
self.is_good_view = is_good_view
if self.is_render:
p.connect(p.GUI)
else:
p.connect(p.DIRECT)
self.x_low_obs = 0.2
self.x_high_obs = 0.7
self.y_low_obs = -0.3
self.y_high_obs = 0.3
self.z_low_obs = 0
self.z_high_obs = 0.55
self.x_low_action = -0.4
self.x_high_action = 0.4
self.y_low_action = -0.4
self.y_high_action = 0.4
self.z_low_action = -0.6
self.z_high_action = 0.3
self.step_counter = 0
self.urdf_root_path = pybullet_data.getDataPath()
# lower limits for null space
self.lower_limits = [-.967, -2, -2.96, 0.19, -2.96, -2.09, -3.05]
# upper limits for null space
self.upper_limits = [.967, 2, 2.96, 2.29, 2.96, 2.09, 3.05]
# joint ranges for null space
self.joint_ranges = [5.8, 4, 5.8, 4, 5.8, 4, 6]
# restposes for null space
self.rest_poses = [0, 0, 0, 0.5 * math.pi, 0, -math.pi * 0.5 * 0.66, 0]
# joint damping coefficents
self.joint_damping = [
0.00001, 0.00001, 0.00001, 0.00001, 0.00001, 0.00001, 0.00001
]
self.init_joint_positions = [
0.006418, 0.413184, -0.011401, -1.589317, 0.005379, 1.137684,
-0.006539
]
self.orientation = p.getQuaternionFromEuler(
[0., -math.pi, math.pi / 2.])
self.camera_parameters = {
'width': 960.,
'height': 720,
'fov': 60,
'near': 0.1,
'far': 100.,
'eye_position': [0.59, 0, 0.8],
'target_position': [0.55, 0, 0.05],
'camera_up_vector':
[1, 0, 0], # I really do not know the parameter's effect.
'light_direction': [
0.5, 0, 1
], # the direction is from the light source position to the origin of the world frame.
}
self.view_matrix = p.computeViewMatrixFromYawPitchRoll(
cameraTargetPosition=[0.55, 0, 0.05],
distance=.7,
yaw=90,
pitch=-70,
roll=0,
upAxisIndex=2)
self.projection_matrix = p.computeProjectionMatrixFOV(
fov=self.camera_parameters['fov'],
aspect=self.camera_parameters['width'] /
self.camera_parameters['height'],
nearVal=self.camera_parameters['near'],
farVal=self.camera_parameters['far'])
p.configureDebugVisualizer(lightPosition=[5, 0, 5])
p.resetDebugVisualizerCamera(cameraDistance=1.5,
cameraYaw=0,
cameraPitch=-40,
cameraTargetPosition=[0.55, -0.35, 0.2])
self.action_space = spaces.Box(low=np.array(
[self.x_low_action, self.y_low_action, self.z_low_action]),
high=np.array([
self.x_high_action,
self.y_high_action,
self.z_high_action
]),
dtype=np.float32)
self.observation_space = spaces.Box(low=0, high=1,
shape=(1, self.kFinalImageSize['width'], self.kFinalImageSize['height']))
self.seed()
self.reset()
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def reset(self):
self.step_counter = 0
p.resetSimulation()
# p.configureDebugVisualizer(p.COV_ENABLE_RENDERING, 0)
self.terminated = False
p.setGravity(0, 0, -10)
# 这些是周围那些白线,用来观察是否超过了obs的边界
p.addUserDebugLine(
lineFromXYZ=[self.x_low_obs, self.y_low_obs, 0],
lineToXYZ=[self.x_low_obs, self.y_low_obs, self.z_high_obs])
p.addUserDebugLine(
lineFromXYZ=[self.x_low_obs, self.y_high_obs, 0],
lineToXYZ=[self.x_low_obs, self.y_high_obs, self.z_high_obs])
p.addUserDebugLine(
lineFromXYZ=[self.x_high_obs, self.y_low_obs, 0],
lineToXYZ=[self.x_high_obs, self.y_low_obs, self.z_high_obs])
p.addUserDebugLine(
lineFromXYZ=[self.x_high_obs, self.y_high_obs, 0],
lineToXYZ=[self.x_high_obs, self.y_high_obs, self.z_high_obs])
p.addUserDebugLine(
lineFromXYZ=[self.x_low_obs, self.y_low_obs, self.z_high_obs],
lineToXYZ=[self.x_high_obs, self.y_low_obs, self.z_high_obs])
p.addUserDebugLine(
lineFromXYZ=[self.x_low_obs, self.y_high_obs, self.z_high_obs],
lineToXYZ=[self.x_high_obs, self.y_high_obs, self.z_high_obs])
p.addUserDebugLine(
lineFromXYZ=[self.x_low_obs, self.y_low_obs, self.z_high_obs],
lineToXYZ=[self.x_low_obs, self.y_high_obs, self.z_high_obs])
p.addUserDebugLine(
lineFromXYZ=[self.x_high_obs, self.y_low_obs, self.z_high_obs],
lineToXYZ=[self.x_high_obs, self.y_high_obs, self.z_high_obs])
p.loadURDF(os.path.join(self.urdf_root_path, "plane.urdf"),
basePosition=[0, 0, -0.65])
self.kuka_id = p.loadURDF(os.path.join(self.urdf_root_path,
"kuka_iiwa/model.urdf"),
useFixedBase=True)
table_uid = p.loadURDF(os.path.join(self.urdf_root_path,
"table/table.urdf"),
basePosition=[0.5, 0, -0.65])
p.changeVisualShape(table_uid, -1, rgbaColor=[1, 1, 1, 1])
self.object_id = p.loadURDF(os.path.join(self.urdf_root_path,
"random_urdfs/000/000.urdf"),
basePosition=[
random.uniform(self.x_low_obs,
self.x_high_obs),
random.uniform(self.y_low_obs,
self.y_high_obs), 0.01
])
self.num_joints = p.getNumJoints(self.kuka_id)
for i in range(self.num_joints):
p.resetJointState(
bodyUniqueId=self.kuka_id,
jointIndex=i,
targetValue=self.init_joint_positions[i],
)
self.robot_pos_obs = p.getLinkState(self.kuka_id,
self.num_joints - 1)[4]
p.stepSimulation()
(_, _, px, _,
_) = p.getCameraImage(width=960,
height=960,
viewMatrix=self.view_matrix,
projectionMatrix=self.projection_matrix,
renderer=p.ER_BULLET_HARDWARE_OPENGL)
self.images = px
p.enableJointForceTorqueSensor(bodyUniqueId=self.kuka_id,
jointIndex=self.num_joints - 1,
enableSensor=True)
self.object_pos = p.getBasePositionAndOrientation(self.object_id)[0]
self.images = self.images[:, :, :
3] # the 4th channel is alpha channel, we do not need it.
return self._process_image(self.images)
def _process_image(self, image):
"""Convert the RGB pic to gray pic and add a channel 1
Args:
image ([type]): [description]
"""
if image is not None:
image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
image = cv2.resize(image, (self.kImageSize['width'], self.kImageSize['height']))[None, :, :] / 255.
return image
else:
return np.zeros((1, self.kImageSize['width'], self.kImageSize['height']))
def step(self, action):
dv = 0.005
dx = action[0] * dv
dy = action[1] * dv
dz = action[2] * dv
self.current_pos = p.getLinkState(self.kuka_id, self.num_joints - 1)[4]
self.new_robot_pos = [
self.current_pos[0] + dx, self.current_pos[1] + dy,
self.current_pos[2] + dz
]
self.robot_joint_positions = p.calculateInverseKinematics(
bodyUniqueId=self.kuka_id,
endEffectorLinkIndex=self.num_joints - 1,
targetPosition=[
self.new_robot_pos[0], self.new_robot_pos[1],
self.new_robot_pos[2]
],
targetOrientation=self.orientation,
jointDamping=self.joint_damping,
)
for i in range(self.num_joints):
p.resetJointState(
bodyUniqueId=self.kuka_id,
jointIndex=i,
targetValue=self.robot_joint_positions[i],
)
p.stepSimulation()
# 在代码开始部分,如果定义了is_good_view,那么机械臂的动作会变慢,方便观察
if self.is_good_view:
time.sleep(0.05)
self.step_counter += 1
return self._reward()
def _reward(self):
# 一定注意是取第4个值,请参考pybullet手册的这个函数返回值的说明
self.robot_state = p.getLinkState(self.kuka_id, self.num_joints - 1)[4]
self.object_state = np.array(
p.getBasePositionAndOrientation(self.object_id)[0]).astype(
np.float32)
square_dx = (self.robot_state[0] - self.object_state[0]) ** 2
square_dy = (self.robot_state[1] - self.object_state[1]) ** 2
square_dz = (self.robot_state[2] - self.object_state[2]) ** 2
# 用机械臂末端和物体的距离作为奖励函数的依据
self.distance = sqrt(square_dx + square_dy + square_dz)
# print(self.distance)
x = self.robot_state[0]
y = self.robot_state[1]
z = self.robot_state[2]
# 如果机械比末端超过了obs的空间,也视为done,而且会给予一定的惩罚
terminated = bool(x < self.x_low_obs or x > self.x_high_obs
or y < self.y_low_obs or y > self.y_high_obs
or z < self.z_low_obs or z > self.z_high_obs)
if terminated:
reward = -0.1
self.terminated = True
# 如果机械臂一直无所事事,在最大步数还不能接触到物体,也需要给一定的惩罚
elif self.step_counter > self.kMaxEpisodeSteps:
reward = -0.1
self.terminated = True
elif self.distance < 0.1:
reward = 1
self.terminated = True
else:
reward = 0
self.terminated = False
info = {'distance:', self.distance}
(_, _, px, _,
_) = p.getCameraImage(width=960,
height=960,
viewMatrix=self.view_matrix,
projectionMatrix=self.projection_matrix,
renderer=p.ER_BULLET_HARDWARE_OPENGL)
self.images = px
self.processed_image = self._process_image(self.images)
# self.observation=self.robot_state
self.observation = self.object_state
return self.processed_image, reward, self.terminated, info
def close(self):
p.disconnect()
def _get_force_sensor_value(self):
force_sensor_value = p.getJointState(bodyUniqueId=self.kuka_id,
jointIndex=self.num_joints -
1)[2][2]
# the first 2 stands for jointReactionForces, the second 2 stands for Fz,
# the pybullet methods' return is a tuple,so can not
# index it with str like dict. I think it can be improved
# that return value is a dict rather than tuple.
return force_sensor_value
class CustomSkipFrame(gym.Wrapper):
""" Make a 4 frame skip, so the observation space will change to (4,84,84) from (1,84,84)
Args:
gym ([type]): [description]
"""
def __init__(self, env, skip=4):
super(CustomSkipFrame, self).__init__(env)
self.observation_space = spaces.Box(low=0,
high=1,
shape=(skip, self.kFinalImageSize['width'], self.kFinalImageSize['height']))
self.skip = skip
def step(self, action):
total_reward = 0
states = []
state, reward, done, info = self.env.step(action)
for i in range(self.skip):
if not done:
state, reward, done, info = self.env.step(action)
total_reward += reward
states.append(state)
else:
states.append(state)
states = np.concatenate(states, 0)[None, :, :, :]
return random_crop(states.astype(np.float32), self.kFinalImageSize['width']), reward, done, info
def reset(self):
state = self.env.reset()
states = np.concatenate([state for _ in range(self.skip)],
0)[None, :, :, :]
return random_crop(states.astype(np.float32), self.kFinalImageSize['width'])
if __name__ == '__main__':
# 这一部分是做baseline,即让机械臂随机选择动作,看看能够得到的分数
import matplotlib.pyplot as plt
env = KukaReachVisualEnv(is_render=False)
env = CustomSkipFrame(env)
print(env.observation_space.shape)
print(env.action_space.shape)
print(env.action_space.n)
# for _ in range(20):
# action=env.action_space.sample()
# print(action)
# env.step(action)
#
# state = env.reset()
# print(state.shape)
# img = state[0][0]
# plt.imshow(img, cmap='gray')
# plt.show()
| [((24, 9, 24, 42), 'numpy.random.randint', 'np.random.randint', ({(24, 27, 24, 28): '0', (24, 30, 24, 38): 'crop_max', (24, 40, 24, 41): 'n'}, {}), '(0, crop_max, n)', True, 'import numpy as np\n'), ((25, 9, 25, 42), 'numpy.random.randint', 'np.random.randint', ({(25, 27, 25, 28): '0', (25, 30, 25, 38): 'crop_max', (25, 40, 25, 41): 'n'}, {}), '(0, crop_max, n)', True, 'import numpy as np\n'), ((26, 14, 26, 58), 'numpy.empty', 'np.empty', (), '', True, 'import numpy as np\n'), ((68, 30, 68, 57), 'pybullet_data.getDataPath', 'pybullet_data.getDataPath', ({}, {}), '()', False, 'import pybullet_data\n'), ((87, 27, 88, 41), 'pybullet.getQuaternionFromEuler', 'p.getQuaternionFromEuler', ({(88, 12, 88, 40): '[0.0, -math.pi, math.pi / 2.0]'}, {}), '([0.0, -math.pi, math.pi / 2.0])', True, 'import pybullet as p\n'), ((105, 27, 111, 26), 'pybullet.computeViewMatrixFromYawPitchRoll', 'p.computeViewMatrixFromYawPitchRoll', (), '', True, 'import pybullet as p\n'), ((113, 33, 118, 49), 'pybullet.computeProjectionMatrixFOV', 'p.computeProjectionMatrixFOV', (), '', True, 'import pybullet as p\n'), ((121, 8, 121, 59), 'pybullet.configureDebugVisualizer', 'p.configureDebugVisualizer', (), '', True, 'import pybullet as p\n'), ((122, 8, 125, 77), 'pybullet.resetDebugVisualizerCamera', 'p.resetDebugVisualizerCamera', (), '', True, 'import pybullet as p\n'), ((136, 33, 137, 117), 'gym.spaces.Box', 'spaces.Box', (), '', False, 'from gym import spaces\n'), ((144, 31, 144, 54), 'gym.utils.seeding.np_random', 'seeding.np_random', ({(144, 49, 144, 53): 'seed'}, {}), '(seed)', False, 'from gym.utils import seeding\n'), ((150, 8, 150, 27), 'pybullet.resetSimulation', 'p.resetSimulation', ({}, {}), '()', True, 'import pybullet as p\n'), ((153, 8, 153, 31), 'pybullet.setGravity', 'p.setGravity', ({(153, 21, 153, 22): '(0)', (153, 24, 153, 25): '(0)', (153, 27, 153, 30): '(-10)'}, {}), '(0, 0, -10)', True, 'import pybullet as p\n'), ((156, 8, 158, 72), 'pybullet.addUserDebugLine', 'p.addUserDebugLine', (), '', True, 'import pybullet as p\n'), ((159, 8, 161, 73), 'pybullet.addUserDebugLine', 'p.addUserDebugLine', (), '', True, 'import pybullet as p\n'), ((162, 8, 164, 73), 'pybullet.addUserDebugLine', 'p.addUserDebugLine', (), '', True, 'import pybullet as p\n'), ((165, 8, 167, 74), 'pybullet.addUserDebugLine', 'p.addUserDebugLine', (), '', True, 'import pybullet as p\n'), ((169, 8, 171, 73), 'pybullet.addUserDebugLine', 'p.addUserDebugLine', (), '', True, 'import pybullet as p\n'), ((172, 8, 174, 74), 'pybullet.addUserDebugLine', 'p.addUserDebugLine', (), '', True, 'import pybullet as p\n'), ((175, 8, 177, 73), 'pybullet.addUserDebugLine', 'p.addUserDebugLine', (), '', True, 'import pybullet as p\n'), ((178, 8, 180, 74), 'pybullet.addUserDebugLine', 'p.addUserDebugLine', (), '', True, 'import pybullet as p\n'), ((190, 8, 190, 66), 'pybullet.changeVisualShape', 'p.changeVisualShape', (), '', True, 'import pybullet as p\n'), ((200, 26, 200, 54), 'pybullet.getNumJoints', 'p.getNumJoints', ({(200, 41, 200, 53): 'self.kuka_id'}, {}), '(self.kuka_id)', True, 'import pybullet as p\n'), ((212, 8, 212, 26), 'pybullet.stepSimulation', 'p.stepSimulation', ({}, {}), '()', True, 'import pybullet as p\n'), ((216, 14, 220, 68), 'pybullet.getCameraImage', 'p.getCameraImage', (), '', True, 'import pybullet as p\n'), ((223, 8, 225, 57), 'pybullet.enableJointForceTorqueSensor', 'p.enableJointForceTorqueSensor', (), '', True, 'import pybullet as p\n'), ((262, 37, 271, 9), 'pybullet.calculateInverseKinematics', 'p.calculateInverseKinematics', (), '', True, 'import pybullet as p\n'), ((278, 8, 278, 26), 'pybullet.stepSimulation', 'p.stepSimulation', ({}, {}), '()', True, 'import pybullet as p\n'), ((302, 24, 302, 63), 'math.sqrt', 'sqrt', ({(302, 29, 302, 62): 'square_dx + square_dy + square_dz'}, {}), '(square_dx + square_dy + square_dz)', False, 'from math import sqrt\n'), ((332, 14, 336, 68), 'pybullet.getCameraImage', 'p.getCameraImage', (), '', True, 'import pybullet as p\n'), ((344, 8, 344, 22), 'pybullet.disconnect', 'p.disconnect', ({}, {}), '()', True, 'import pybullet as p\n'), ((367, 33, 369, 120), 'gym.spaces.Box', 'spaces.Box', (), '', False, 'from gym import spaces\n'), ((48, 12, 48, 28), 'pybullet.connect', 'p.connect', ({(48, 22, 48, 27): 'p.GUI'}, {}), '(p.GUI)', True, 'import pybullet as p\n'), ((50, 12, 50, 31), 'pybullet.connect', 'p.connect', ({(50, 22, 50, 30): 'p.DIRECT'}, {}), '(p.DIRECT)', True, 'import pybullet as p\n'), ((182, 19, 182, 66), 'os.path.join', 'os.path.join', ({(182, 32, 182, 51): 'self.urdf_root_path', (182, 53, 182, 65): '"""plane.urdf"""'}, {}), "(self.urdf_root_path, 'plane.urdf')", False, 'import os\n'), ((184, 34, 185, 70), 'os.path.join', 'os.path.join', ({(184, 47, 184, 66): 'self.urdf_root_path', (185, 47, 185, 69): '"""kuka_iiwa/model.urdf"""'}, {}), "(self.urdf_root_path, 'kuka_iiwa/model.urdf')", False, 'import os\n'), ((187, 31, 188, 63), 'os.path.join', 'os.path.join', ({(187, 44, 187, 63): 'self.urdf_root_path', (188, 44, 188, 62): '"""table/table.urdf"""'}, {}), "(self.urdf_root_path, 'table/table.urdf')", False, 'import os\n'), ((191, 36, 192, 77), 'os.path.join', 'os.path.join', ({(191, 49, 191, 68): 'self.urdf_root_path', (192, 49, 192, 76): '"""random_urdfs/000/000.urdf"""'}, {}), "(self.urdf_root_path, 'random_urdfs/000/000.urdf')", False, 'import os\n'), ((203, 12, 207, 13), 'pybullet.resetJointState', 'p.resetJointState', (), '', True, 'import pybullet as p\n'), ((209, 29, 210, 64), 'pybullet.getLinkState', 'p.getLinkState', ({(209, 44, 209, 56): 'self.kuka_id', (210, 44, 210, 63): '(self.num_joints - 1)'}, {}), '(self.kuka_id, self.num_joints - 1)', True, 'import pybullet as p\n'), ((228, 26, 228, 73), 'pybullet.getBasePositionAndOrientation', 'p.getBasePositionAndOrientation', ({(228, 58, 228, 72): 'self.object_id'}, {}), '(self.object_id)', True, 'import pybullet as p\n'), ((244, 20, 244, 59), 'cv2.cvtColor', 'cv2.cvtColor', ({(244, 33, 244, 38): 'image', (244, 40, 244, 58): 'cv2.COLOR_RGB2GRAY'}, {}), '(image, cv2.COLOR_RGB2GRAY)', False, 'import cv2\n'), ((248, 19, 248, 85), 'numpy.zeros', 'np.zeros', ({(248, 28, 248, 84): "(1, self.kImageSize['width'], self.kImageSize['height'])"}, {}), "((1, self.kImageSize['width'], self.kImageSize['height']))", True, 'import numpy as np\n'), ((257, 27, 257, 76), 'pybullet.getLinkState', 'p.getLinkState', ({(257, 42, 257, 54): 'self.kuka_id', (257, 56, 257, 75): '(self.num_joints - 1)'}, {}), '(self.kuka_id, self.num_joints - 1)', True, 'import pybullet as p\n'), ((273, 12, 277, 13), 'pybullet.resetJointState', 'p.resetJointState', (), '', True, 'import pybullet as p\n'), ((282, 12, 282, 28), 'time.sleep', 'time.sleep', ({(282, 23, 282, 27): '(0.05)'}, {}), '(0.05)', False, 'import time\n'), ((291, 27, 291, 76), 'pybullet.getLinkState', 'p.getLinkState', ({(291, 42, 291, 54): 'self.kuka_id', (291, 56, 291, 75): '(self.num_joints - 1)'}, {}), '(self.kuka_id, self.num_joints - 1)', True, 'import pybullet as p\n'), ((384, 17, 384, 42), 'numpy.concatenate', 'np.concatenate', ({(384, 32, 384, 38): 'states', (384, 40, 384, 41): '(0)'}, {}), '(states, 0)', True, 'import numpy as np\n'), ((127, 43, 128, 70), 'numpy.array', 'np.array', ({(128, 12, 128, 69): '[self.x_low_action, self.y_low_action, self.z_low_action]'}, {}), '([self.x_low_action, self.y_low_action, self.z_low_action])', True, 'import numpy as np\n'), ((129, 17, 133, 14), 'numpy.array', 'np.array', ({(129, 26, 133, 13): '[self.x_high_action, self.y_high_action, self.z_high_action]'}, {}), '([self.x_high_action, self.y_high_action, self.z_high_action])', True, 'import numpy as np\n'), ((348, 29, 350, 58), 'pybullet.getJointState', 'p.getJointState', (), '', True, 'import pybullet as p\n'), ((194, 40, 195, 71), 'random.uniform', 'random.uniform', ({(194, 55, 194, 69): 'self.x_low_obs', (195, 55, 195, 70): 'self.x_high_obs'}, {}), '(self.x_low_obs, self.x_high_obs)', False, 'import random\n'), ((196, 40, 197, 71), 'random.uniform', 'random.uniform', ({(196, 55, 196, 69): 'self.y_low_obs', (197, 55, 197, 70): 'self.y_high_obs'}, {}), '(self.y_low_obs, self.y_high_obs)', False, 'import random\n'), ((245, 20, 245, 92), 'cv2.resize', 'cv2.resize', ({(245, 31, 245, 36): 'image', (245, 38, 245, 91): "(self.kImageSize['width'], self.kImageSize['height'])"}, {}), "(image, (self.kImageSize['width'], self.kImageSize['height']))", False, 'import cv2\n'), ((294, 12, 294, 59), 'pybullet.getBasePositionAndOrientation', 'p.getBasePositionAndOrientation', ({(294, 44, 294, 58): 'self.object_id'}, {}), '(self.object_id)', True, 'import pybullet as p\n')] |
jrmarino/ravensource | bucket_4C/python-Pillow/patches/patch-setup.py | 91d599fd1f2af55270258d15e72c62774f36033e | --- setup.py.orig 2019-07-02 19:13:39 UTC
+++ setup.py
@@ -465,9 +465,7 @@ class pil_build_ext(build_ext):
_add_directory(include_dirs, "/usr/X11/include")
elif (
- sys.platform.startswith("linux")
- or sys.platform.startswith("gnu")
- or sys.platform.startswith("freebsd")
+ sys.platform.startswith("nothing")
):
for dirname in _find_library_dirs_ldconfig():
_add_directory(library_dirs, dirname)
| [] |
GeekHee/mindspore | tests/ut/cpp/python_input/gtest_input/pre_activate/ir_fusion_test.py | 896b8e5165dd0a900ed5a39e0fb23525524bf8b0 | # Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore.ops import Primitive
from mindspore.ops import operations as P
from mindspore.ops.operations import _grad_ops as G
from mindspore.ops import _constants as Constants
# pylint: disable=unused-variable
tuple_getitem = Primitive(Constants.kTupleGetItem)
add = P.Add()
allreduce = P.AllReduce()
allreduce.add_prim_attr('fusion', 1)
make_tuple = Primitive("make_tuple")
conv = P.Conv2D(out_channel=64, kernel_size=7, mode=1, pad_mode="valid", pad=0, stride=1, dilation=1, group=1)
bn = P.FusedBatchNorm()
relu = P.ReLU()
conv_bn1 = Primitive('ConvBN1')
bn2_add_relu = Primitive('BN2AddRelu')
bn2_relu = Primitive('BN2Relu')
fused_bn1 = Primitive('FusedBN1')
fused_bn2 = Primitive('FusedBN2')
fused_bn3 = Primitive('FusedBN3')
bn_grad = G.FusedBatchNormGrad()
bn_grad1 = Primitive('BNGrad1')
bn_grad2 = Primitive('BNGrad2')
bn_grad3 = Primitive('BNGrad3')
class FnDict:
def __init__(self):
self.fnDict = {}
def __call__(self, fn):
self.fnDict[fn.__name__] = fn
def __getitem__(self, name):
return self.fnDict[name]
def test_bn_split(tag):
""" test_split_bn_fusion """
fns = FnDict()
@fns
def before(x, scale, b, mean, variance):
bn_output = bn(x, scale, b, mean, variance)
item0 = tuple_getitem(bn_output, 0)
return item0
@fns
def after(x, scale, b, mean, variance):
fused_bn1_output = fused_bn1(x)
fused_bn2_input0 = tuple_getitem(fused_bn1_output, 0)
fused_bn2_input1 = tuple_getitem(fused_bn1_output, 1)
fused_bn2_output = fused_bn2(fused_bn2_input0, fused_bn2_input1, mean, variance)
fused_bn3_input1 = tuple_getitem(fused_bn2_output, 0)
fused_bn3_input2 = tuple_getitem(fused_bn2_output, 1)
fused_bn3_output = fused_bn3(x, fused_bn3_input1, fused_bn3_input2, scale, b)
output1 = tuple_getitem(fused_bn2_output, 2)
output2 = tuple_getitem(fused_bn2_output, 3)
output3 = tuple_getitem(fused_bn2_output, 0)
output4 = tuple_getitem(fused_bn2_output, 1)
output = make_tuple(fused_bn3_output, output1, output2, output3, output4)
item0 = tuple_getitem(output, 0)
return make_tuple(item0)
return fns[tag]
def test_bn_grad_split(tag):
""" test_bn_grad_split """
fns = FnDict()
@fns
def before(dy, x, scale, save_mean, save_inv_variance):
bn_grad_output = bn_grad(dy, x, scale, save_mean, save_inv_variance)
item0 = tuple_getitem(bn_grad_output, 0)
item1 = tuple_getitem(bn_grad_output, 1)
item2 = tuple_getitem(bn_grad_output, 2)
output = make_tuple(item0, item1, item2)
res = tuple_getitem(output, 0)
return res
@fns
def after(i0, i1, i2, i3, i4):
bn_grad1_output = bn_grad1(i0, i1, i3)
bn_grad1_item0 = tuple_getitem(bn_grad1_output, 0)
bn_grad1_item1 = tuple_getitem(bn_grad1_output, 1)
bn_grad1_item2 = tuple_getitem(bn_grad1_output, 2)
bn_grad2_output = bn_grad2(bn_grad1_item0, bn_grad1_item1, i4, i2)
bn_grad2_item0 = tuple_getitem(bn_grad2_output, 0)
bn_grad2_item1 = tuple_getitem(bn_grad2_output, 1)
bn_grad2_item2 = tuple_getitem(bn_grad2_output, 2)
bn_grad2_item3 = tuple_getitem(bn_grad2_output, 3)
bn_grad2_item4 = tuple_getitem(bn_grad2_output, 4)
bn_grad3_output = bn_grad3(i0, bn_grad2_item2, bn_grad2_item3, bn_grad2_item4, bn_grad1_item2)
bn_grad_make_tuple = make_tuple(bn_grad3_output, bn_grad2_item0, bn_grad2_item1)
item0 = tuple_getitem(bn_grad_make_tuple, 0)
item1 = tuple_getitem(bn_grad_make_tuple, 1)
item2 = tuple_getitem(bn_grad_make_tuple, 2)
output = make_tuple(item0, item1, item2)
return make_tuple(tuple_getitem(output, 0))
return fns[tag]
def test_all_reduce_fusion_all(tag):
""" test_all_reduce_fusion_all """
fns = FnDict()
@fns
def before(x1, x2, x3, x4, x5):
y1 = allreduce(x1)
y2 = allreduce(x2)
y3 = allreduce(x3)
y4 = allreduce(x4)
y5 = allreduce(x5)
return make_tuple(y1, y2, y3, y4, y5)
@fns
def after(x1, x2, x3, x4, x5):
ar = allreduce(x5, x4, x3, x2, x1)
y5 = tuple_getitem(ar, 0)
y4 = tuple_getitem(ar, 1)
y3 = tuple_getitem(ar, 2)
y2 = tuple_getitem(ar, 3)
y1 = tuple_getitem(ar, 4)
res = make_tuple(y1, y2, y3, y4, y5)
return make_tuple(res)
@fns
def after1(x1, x2, x3, x4, x5):
ar = allreduce(x1, x2, x3, x4, x5)
y1 = tuple_getitem(ar, 0)
y2 = tuple_getitem(ar, 1)
y3 = tuple_getitem(ar, 2)
y4 = tuple_getitem(ar, 3)
y5 = tuple_getitem(ar, 4)
res = make_tuple(y1, y2, y3, y4, y5)
return make_tuple(res)
return fns[tag]
def test_all_reduce_fusion_group(tag):
""" test_all_reduce_fusion_group """
fns = FnDict()
@fns
def before(x1, x2, x3, x4, x5):
y1 = allreduce(x1)
y2 = allreduce(x2)
y3 = allreduce(x3)
y4 = allreduce(x4)
y5 = allreduce(x5)
return make_tuple(y1, y2, y3, y4, y5)
@fns
def after1(x1, x2, x3, x4, x5):
ar1 = allreduce(x5, x4)
ar2 = allreduce(x3, x2, x1)
y4 = tuple_getitem(ar1, 1)
y5 = tuple_getitem(ar1, 0)
y1 = tuple_getitem(ar2, 2)
y2 = tuple_getitem(ar2, 1)
y3 = tuple_getitem(ar2, 0)
res = make_tuple(y1, y2, y3, y4, y5)
return make_tuple(res)
@fns
def after2(x1, x2, x3, x4, x5):
ar1 = allreduce(x1, x3, x5)
ar2 = allreduce(x2, x4)
y1 = tuple_getitem(ar1, 2)
y3 = tuple_getitem(ar1, 1)
y5 = tuple_getitem(ar1, 0)
y2 = tuple_getitem(ar2, 1)
y4 = tuple_getitem(ar2, 0)
output = make_tuple(y1, y2, y3, y4, y5)
return make_tuple(output)
return fns[tag]
| [((22, 16, 22, 50), 'mindspore.ops.Primitive', 'Primitive', ({(22, 26, 22, 49): 'Constants.kTupleGetItem'}, {}), '(Constants.kTupleGetItem)', False, 'from mindspore.ops import Primitive\n'), ((23, 6, 23, 13), 'mindspore.ops.operations.Add', 'P.Add', ({}, {}), '()', True, 'from mindspore.ops import operations as P\n'), ((24, 12, 24, 25), 'mindspore.ops.operations.AllReduce', 'P.AllReduce', ({}, {}), '()', True, 'from mindspore.ops import operations as P\n'), ((26, 13, 26, 36), 'mindspore.ops.Primitive', 'Primitive', ({(26, 23, 26, 35): '"""make_tuple"""'}, {}), "('make_tuple')", False, 'from mindspore.ops import Primitive\n'), ((27, 7, 27, 110), 'mindspore.ops.operations.Conv2D', 'P.Conv2D', (), '', True, 'from mindspore.ops import operations as P\n'), ((28, 5, 28, 23), 'mindspore.ops.operations.FusedBatchNorm', 'P.FusedBatchNorm', ({}, {}), '()', True, 'from mindspore.ops import operations as P\n'), ((29, 7, 29, 15), 'mindspore.ops.operations.ReLU', 'P.ReLU', ({}, {}), '()', True, 'from mindspore.ops import operations as P\n'), ((30, 11, 30, 31), 'mindspore.ops.Primitive', 'Primitive', ({(30, 21, 30, 30): '"""ConvBN1"""'}, {}), "('ConvBN1')", False, 'from mindspore.ops import Primitive\n'), ((31, 15, 31, 38), 'mindspore.ops.Primitive', 'Primitive', ({(31, 25, 31, 37): '"""BN2AddRelu"""'}, {}), "('BN2AddRelu')", False, 'from mindspore.ops import Primitive\n'), ((32, 11, 32, 31), 'mindspore.ops.Primitive', 'Primitive', ({(32, 21, 32, 30): '"""BN2Relu"""'}, {}), "('BN2Relu')", False, 'from mindspore.ops import Primitive\n'), ((33, 12, 33, 33), 'mindspore.ops.Primitive', 'Primitive', ({(33, 22, 33, 32): '"""FusedBN1"""'}, {}), "('FusedBN1')", False, 'from mindspore.ops import Primitive\n'), ((34, 12, 34, 33), 'mindspore.ops.Primitive', 'Primitive', ({(34, 22, 34, 32): '"""FusedBN2"""'}, {}), "('FusedBN2')", False, 'from mindspore.ops import Primitive\n'), ((35, 12, 35, 33), 'mindspore.ops.Primitive', 'Primitive', ({(35, 22, 35, 32): '"""FusedBN3"""'}, {}), "('FusedBN3')", False, 'from mindspore.ops import Primitive\n'), ((36, 10, 36, 32), 'mindspore.ops.operations._grad_ops.FusedBatchNormGrad', 'G.FusedBatchNormGrad', ({}, {}), '()', True, 'from mindspore.ops.operations import _grad_ops as G\n'), ((37, 11, 37, 31), 'mindspore.ops.Primitive', 'Primitive', ({(37, 21, 37, 30): '"""BNGrad1"""'}, {}), "('BNGrad1')", False, 'from mindspore.ops import Primitive\n'), ((38, 11, 38, 31), 'mindspore.ops.Primitive', 'Primitive', ({(38, 21, 38, 30): '"""BNGrad2"""'}, {}), "('BNGrad2')", False, 'from mindspore.ops import Primitive\n'), ((39, 11, 39, 31), 'mindspore.ops.Primitive', 'Primitive', ({(39, 21, 39, 30): '"""BNGrad3"""'}, {}), "('BNGrad3')", False, 'from mindspore.ops import Primitive\n')] |
TrainerDex/DiscordBot | tdx/abc.py | 7e7bb20c5ac76bed236a7458c31017b8ddd8b8be | from abc import ABC
from typing import Dict
from redbot.core import Config
from redbot.core.bot import Red
from trainerdex.client import Client
class MixinMeta(ABC):
"""
Base class for well behaved type hint detection with composite class.
Basically, to keep developers sane when not all attributes are defined in each mixin.
"""
def __init__(self, *_args):
self.bot: Red
self.config: Config
self.client: Client
self.emoji: Dict
| [] |
PolinaRomanchenko/Victorious_Secret_DSCI_532 | app.py | e83bc19169a1736618ac55f2ade40741583089fd | import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
import pandas as pd
import numpy as np
import altair as alt
import vega_datasets
alt.data_transformers.enable('default')
alt.data_transformers.disable_max_rows()
app = dash.Dash(__name__, assets_folder='assets', external_stylesheets=[dbc.themes.BOOTSTRAP])
# Boostrap CSS.
app.css.append_css({'external_url': 'https://codepen.io/amyoshino/pen/jzXypZ.css'}) # noqa: E501
server = app.server
app.title = 'Dash app with pure Altair HTML'
df = pd.read_csv('data/Police_Department_Incidents_-_Previous_Year__2016_.csv')
# df = pd.read_csv("https://raw.github.ubc.ca/MDS-2019-20/DSCI_531_lab4_anas017/master/data/Police_Department_Incidents_-_Previous_Year__2016_.csv?token=AAAHQ0dLxUd74i7Zhzh1SJ_UuOaFVI3_ks5d5dT3wA%3D%3D")
df['datetime'] = pd.to_datetime(df[["Date","Time"]].apply(lambda x: x[0].split()[0] +" "+x[1], axis=1), format="%m/%d/%Y %H:%M")
df['hour'] = df['datetime'].dt.hour
df.dropna(inplace=True)
top_4_crimes = df['Category'].value_counts()[:6].index.to_list()
top_4_crimes
top_4_crimes.remove("NON-CRIMINAL")
top_4_crimes.remove("OTHER OFFENSES")
# top 4 crimes df subset
df_t4 = df[df["Category"].isin(top_4_crimes)].copy()
def make_plot_top(df_new=df_t4):
# Create a plot of the Displacement and the Horsepower of the cars dataset
# making the slider
slider = alt.binding_range(min = 0, max = 23, step = 1)
select_hour = alt.selection_single(name='select', fields = ['hour'],
bind = slider, init={'hour': 0})
#begin of my code
# typeDict = {'ASSAULT':'quantitative',
# 'VANDALISM':'quantitative',
# 'LARCENY/THEFT':'quantitative',
# 'VEHICLE THEFT':'quantitative'
# }
# end
chart = alt.Chart(df_new).mark_bar(size=30).encode(
x=alt.X('Category',type='nominal', title='Category'),
y=alt.Y('count()', title = "Count" , scale = alt.Scale(domain = (0,3300))),
tooltip='count()'
).properties(
title = "Per hour crime occurrences for the top 4 crimes",
width=500,
height = 315
).add_selection(
select_hour
).transform_filter(
select_hour
)
return chart
def make_plot_bot(data=df_t4):
chart_1 = alt.Chart(data).mark_circle(size=3, opacity = 0.8).encode(
longitude='X:Q',
latitude='Y:Q',
color = alt.Color('PdDistrict:N', legend = alt.Legend(title = "District")),
tooltip = 'PdDistrict'
).project(
type='albersUsa'
).properties(
width=450,
height=350
)
chart_2 = alt.Chart(data).mark_bar().encode(
x=alt.X('PdDistrict:N', axis=None, title="District"),
y=alt.Y('count()', title="Count of reports"),
color=alt.Color('PdDistrict:N', legend=alt.Legend(title="District")),
tooltip=['PdDistrict', 'count()']
).properties(
width=450,
height=350
)
# A dropdown filter
crimes_dropdown = alt.binding_select(options=list(data['Category'].unique()))
crimes_select = alt.selection_single(fields=['Category'], bind=crimes_dropdown,
name="Pick\ Crime")
combine_chart = (chart_2 | chart_1)
filter_crimes = combine_chart.add_selection(
crimes_select
).transform_filter(
crimes_select
)
return filter_crimes
body = dbc.Container(
[
dbc.Row(
[
dbc.Col(
[
html.H2("San Francisco Crime"),
html.P(
"""\
When looking for a place to live or visit, one important factor that people will consider
is the safety of the neighborhood. Searching that information district
by district could be time consuming and exhausting. It is even more difficult to
compare specific crime statistics across districts such as the crime rate
at a certain time of day. It would be useful if people can look up crime
related information across district on one application. Our app
aims to help people make decisions when considering their next trip or move to San Francisco, California
via visually exploring a dataset of crime statistics. The app provides an overview of the crime rate across
neighborhoods and allows users to focus on more specific information through
filtering of geological location, crime rate, crime type or time of the
crime.
Use the box below to choose crimes of interest.
"""
),
dcc.Dropdown(
id = 'drop_selection_crime',
options=[{'label': i, 'value': i} for i in df_t4['Category'].unique()
],
style={'height': '20px',
'width': '400px'},
value=df_t4['Category'].unique(),
multi=True)
],
md=5,
),
dbc.Col(
[
dbc.Row(
[
html.Iframe(
sandbox = "allow-scripts",
id = "plot_top",
height = "500",
width = "650",
style = {"border-width": "0px"},
srcDoc = make_plot_top().to_html()
)
]
)
]
),
]
),
dbc.Row(
html.Iframe(
sandbox='allow-scripts',
id='plot_bot',
height='500',
width='1200',
style={'border-width': '0px'},
srcDoc= make_plot_bot().to_html()
)
)
],
className="mt-4",
)
app.layout = html.Div(body)
@app.callback([dash.dependencies.Output('plot_top', 'srcDoc'),
dash.dependencies.Output('plot_bot', 'srcDoc')],
[dash.dependencies.Input('drop_selection_crime', 'value')]
)
def update_df(chosen):
new_df = df_t4[(df_t4["Category"].isin(chosen))]
updated_plot_top = make_plot_top(new_df).to_html()
updated_plot_bottom = make_plot_bot(new_df).to_html()
return updated_plot_top, updated_plot_bottom
if __name__ == '__main__':
app.run_server(debug=False) | [((10, 0, 10, 39), 'altair.data_transformers.enable', 'alt.data_transformers.enable', ({(10, 29, 10, 38): '"""default"""'}, {}), "('default')", True, 'import altair as alt\n'), ((11, 0, 11, 40), 'altair.data_transformers.disable_max_rows', 'alt.data_transformers.disable_max_rows', ({}, {}), '()', True, 'import altair as alt\n'), ((12, 6, 12, 94), 'dash.Dash', 'dash.Dash', (), '', False, 'import dash\n'), ((18, 5, 18, 79), 'pandas.read_csv', 'pd.read_csv', ({(18, 17, 18, 78): '"""data/Police_Department_Incidents_-_Previous_Year__2016_.csv"""'}, {}), "('data/Police_Department_Incidents_-_Previous_Year__2016_.csv')", True, 'import pandas as pd\n'), ((167, 13, 167, 27), 'dash_html_components.Div', 'html.Div', ({(167, 22, 167, 26): 'body'}, {}), '(body)', True, 'import dash_html_components as html\n'), ((35, 13, 35, 59), 'altair.binding_range', 'alt.binding_range', (), '', True, 'import altair as alt\n'), ((36, 18, 37, 68), 'altair.selection_single', 'alt.selection_single', (), '', True, 'import altair as alt\n'), ((87, 20, 88, 65), 'altair.selection_single', 'alt.selection_single', (), '', True, 'import altair as alt\n'), ((169, 15, 169, 61), 'dash.dependencies.Output', 'dash.dependencies.Output', ({(169, 40, 169, 50): '"""plot_top"""', (169, 52, 169, 60): '"""srcDoc"""'}, {}), "('plot_top', 'srcDoc')", False, 'import dash\n'), ((170, 4, 170, 50), 'dash.dependencies.Output', 'dash.dependencies.Output', ({(170, 29, 170, 39): '"""plot_bot"""', (170, 41, 170, 49): '"""srcDoc"""'}, {}), "('plot_bot', 'srcDoc')", False, 'import dash\n'), ((171, 5, 171, 61), 'dash.dependencies.Input', 'dash.dependencies.Input', ({(171, 29, 171, 51): '"""drop_selection_crime"""', (171, 53, 171, 60): '"""value"""'}, {}), "('drop_selection_crime', 'value')", False, 'import dash\n'), ((76, 10, 76, 60), 'altair.X', 'alt.X', (), '', True, 'import altair as alt\n'), ((77, 10, 77, 52), 'altair.Y', 'alt.Y', (), '', True, 'import altair as alt\n'), ((106, 24, 106, 54), 'dash_html_components.H2', 'html.H2', ({(106, 32, 106, 53): '"""San Francisco Crime"""'}, {}), "('San Francisco Crime')", True, 'import dash_html_components as html\n'), ((107, 24, 123, 25), 'dash_html_components.P', 'html.P', ({(108, 28, 122, 31): '""" When looking for a place to live or visit, one important factor that people will consider\n is the safety of the neighborhood. Searching that information district\n by district could be time consuming and exhausting. It is even more difficult to\n compare specific crime statistics across districts such as the crime rate\n at a certain time of day. It would be useful if people can look up crime\n related information across district on one application. Our app\n aims to help people make decisions when considering their next trip or move to San Francisco, California\n via visually exploring a dataset of crime statistics. The app provides an overview of the crime rate across\n neighborhoods and allows users to focus on more specific information through\n filtering of geological location, crime rate, crime type or time of the\n crime.\n\n Use the box below to choose crimes of interest.\n """'}, {}), '(\n """ When looking for a place to live or visit, one important factor that people will consider\n is the safety of the neighborhood. Searching that information district\n by district could be time consuming and exhausting. It is even more difficult to\n compare specific crime statistics across districts such as the crime rate\n at a certain time of day. It would be useful if people can look up crime\n related information across district on one application. Our app\n aims to help people make decisions when considering their next trip or move to San Francisco, California\n via visually exploring a dataset of crime statistics. The app provides an overview of the crime rate across\n neighborhoods and allows users to focus on more specific information through\n filtering of geological location, crime rate, crime type or time of the\n crime.\n\n Use the box below to choose crimes of interest.\n """\n )', True, 'import dash_html_components as html\n'), ((75, 14, 75, 29), 'altair.Chart', 'alt.Chart', ({(75, 24, 75, 28): 'data'}, {}), '(data)', True, 'import altair as alt\n'), ((78, 47, 78, 75), 'altair.Legend', 'alt.Legend', (), '', True, 'import altair as alt\n'), ((48, 10, 48, 60), 'altair.X', 'alt.X', (), '', True, 'import altair as alt\n'), ((63, 14, 63, 29), 'altair.Chart', 'alt.Chart', ({(63, 24, 63, 28): 'data'}, {}), '(data)', True, 'import altair as alt\n'), ((66, 51, 66, 81), 'altair.Legend', 'alt.Legend', (), '', True, 'import altair as alt\n'), ((47, 12, 47, 29), 'altair.Chart', 'alt.Chart', ({(47, 22, 47, 28): 'df_new'}, {}), '(df_new)', True, 'import altair as alt\n'), ((49, 53, 49, 81), 'altair.Scale', 'alt.Scale', (), '', True, 'import altair as alt\n')] |
johnson880319/Software | catkin_ws/src:/opt/ros/kinetic/lib/python2.7/dist-packages:/home/bala/duckietown/catkin_ws/src:/home/bala/duckietown/catkin_ws/src/lib/python2.7/site-packages/geometry/subspaces/__init__.py | 045894227f359e0a3a3ec5b7a53f8d1ebc06acdd | # coding=utf-8
from .subspaces import *
| [] |
chika626/chainer_rep | detection/contor.py | a1d4fd32a8cfcab753269455d08c1918f273388d | import json
import math
from PIL import Image,ImageDraw
import pandas as pd
import glob
import argparse
import copy
import numpy as np
import matplotlib.pyplot as plt
import pickle
import cv2
from PIL import ImageEnhance
import chainer
from chainer.datasets import ConcatenatedDataset
from chainer.datasets import TransformDataset
from chainer.optimizer_hooks import WeightDecay
from chainer import serializers
from chainer import training
from chainer.training import extensions
from chainer.training import triggers
from chainercv.datasets import voc_bbox_label_names
from chainercv.datasets import VOCBboxDataset
from chainercv.extensions import DetectionVOCEvaluator
from chainercv.links.model.ssd import GradientScaling
from chainercv.links.model.ssd import multibox_loss
from chainercv.links import SSD300
from chainercv.links import SSD512
from chainercv import transforms
from chainercv.utils import read_image
from chainercv.links.model.ssd import random_crop_with_bbox_constraints
from chainercv.links.model.ssd import random_distort
from chainercv.links.model.ssd import resize_with_random_interpolation
import queue
def run(img):
# c , H , W = img.shape
H,W = img.size
img = np.asarray(img)
# 変換後データ配列
transed = Image.new('RGB',(H,W))
for x in range(H):
for y in range(W):
transed.putpixel((x,y),(255,255,255))
for x in range(H):
for y in range(W):
if x + 1 == H or y + 1 == W:
break
if img[y][x][0] != img[y][x+1][0]:
transed.putpixel((x,y),(0,0,0))
for y in range(W):
for x in range(H):
if x + 1 == H or y + 1 == W:
break
if img[y][x][0] != img[y+1][x][0]:
transed.putpixel((x,y),(0,0,0))
return transed
def main():
# # 単一の場合のコード
# img = Image.open('cont/transed/X.jpg')
# img=img.convert('L')
# img=np.asarray(img)
# ret2, img = cv2.threshold(img, 0, 255, cv2.THRESH_OTSU)
# img=Image.fromarray(img)
# img=img.convert('RGB')
# transed = run(img)
# transed.save('transec_0.png')
# return
# 大量変換機
img_path=glob.glob("cont/crop/*")
counter=0
for path in img_path:
img = Image.open(path)
transed = run(img)
transed.save('transec_{}.png'.format(counter))
counter+=1
if __name__ == '__main__':
main() | [((42, 10, 42, 25), 'numpy.asarray', 'np.asarray', ({(42, 21, 42, 24): 'img'}, {}), '(img)', True, 'import numpy as np\n'), ((45, 14, 45, 36), 'PIL.Image.new', 'Image.new', ({(45, 24, 45, 29): '"""RGB"""', (45, 30, 45, 35): '(H, W)'}, {}), "('RGB', (H, W))", False, 'from PIL import Image, ImageDraw\n'), ((82, 13, 82, 37), 'glob.glob', 'glob.glob', ({(82, 23, 82, 36): '"""cont/crop/*"""'}, {}), "('cont/crop/*')", False, 'import glob\n'), ((85, 14, 85, 30), 'PIL.Image.open', 'Image.open', ({(85, 25, 85, 29): 'path'}, {}), '(path)', False, 'from PIL import Image, ImageDraw\n')] |
hjl-yul154/autodeeplab | train.py | 1bd8399ac830fcafd506a4207b75e05682d1e260 | import os
import pdb
import warnings
import numpy as np
import torch
import torch.nn as nn
import torch.utils.data
import torch.backends.cudnn
import torch.optim as optim
import dataloaders
from utils.utils import AverageMeter
from utils.loss import build_criterion
from utils.metrics import Evaluator
from utils.step_lr_scheduler import Iter_LR_Scheduler
from retrain_model.build_autodeeplab import Retrain_Autodeeplab
from config_utils.re_train_autodeeplab import obtain_retrain_autodeeplab_args
def main():
warnings.filterwarnings('ignore')
assert torch.cuda.is_available()
torch.backends.cudnn.benchmark = True
args = obtain_retrain_autodeeplab_args()
save_dir = os.path.join('./data/', args.save_path)
if not os.path.isdir(save_dir):
os.mkdir(save_dir)
model_fname = os.path.join(save_dir,
'deeplab_{0}_{1}_v3_{2}_epoch%d.pth'.format(args.backbone, args.dataset, args.exp))
record_name = os.path.join(save_dir, 'training_record.txt')
if args.dataset == 'pascal':
raise NotImplementedError
elif args.dataset == 'cityscapes':
kwargs = {'num_workers': args.workers, 'pin_memory': True, 'drop_last': True}
dataset_loader, num_classes, val_loader = dataloaders.make_data_loader(args, **kwargs)
args.num_classes = num_classes
else:
raise ValueError('Unknown dataset: {}'.format(args.dataset))
if args.backbone == 'autodeeplab':
model = Retrain_Autodeeplab(args)
else:
raise ValueError('Unknown backbone: {}'.format(args.backbone))
if args.criterion == 'Ohem':
args.thresh = 0.7
args.crop_size = [args.crop_size, args.crop_size] if isinstance(args.crop_size, int) else args.crop_size
args.n_min = int((args.batch_size / len(args.gpu) * args.crop_size[0] * args.crop_size[1]) // 16)
criterion = build_criterion(args)
model = nn.DataParallel(model).cuda()
model.train()
if args.freeze_bn:
for m in model.modules():
if isinstance(m, nn.BatchNorm2d):
m.eval()
m.weight.requires_grad = False
m.bias.requires_grad = False
optimizer = optim.SGD(model.module.parameters(), lr=args.base_lr, momentum=0.9, weight_decay=0.0001)
max_iteration = len(dataset_loader) * args.epochs
scheduler = Iter_LR_Scheduler(args, max_iteration, len(dataset_loader))
start_epoch = 0
evaluator=Evaluator(num_classes)
if args.resume:
if os.path.isfile(args.resume):
print('=> loading checkpoint {0}'.format(args.resume))
checkpoint = torch.load(args.resume)
start_epoch = checkpoint['epoch']
model.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
print('=> loaded checkpoint {0} (epoch {1})'.format(args.resume, checkpoint['epoch']))
else:
raise ValueError('=> no checkpoint found at {0}'.format(args.resume))
for epoch in range(start_epoch, args.epochs):
losses = AverageMeter()
print('Training epoch {}'.format(epoch))
model.train()
for i, sample in enumerate(dataset_loader):
cur_iter = epoch * len(dataset_loader) + i
scheduler(optimizer, cur_iter)
inputs = sample['image'].cuda()
target = sample['label'].cuda()
outputs = model(inputs)
loss = criterion(outputs, target)
if np.isnan(loss.item()) or np.isinf(loss.item()):
pdb.set_trace()
losses.update(loss.item(), args.batch_size)
loss.backward()
optimizer.step()
optimizer.zero_grad()
if (i + 1) % 200 == 0:
print('epoch: {0}\t''iter: {1}/{2}\t''lr: {3:.6f}\t''loss: {loss.val:.4f} ({loss.ema:.4f})'.format(
epoch + 1, i + 1, len(dataset_loader), scheduler.get_lr(optimizer), loss=losses))
if epoch < args.epochs:
if (epoch+1) % 5 == 0:
torch.save({
'epoch': epoch + 1,
'state_dict': model.state_dict(),
'optimizer': optimizer.state_dict(),
}, model_fname % (epoch + 1))
else:
torch.save({
'epoch': epoch + 1,
'state_dict': model.state_dict(),
'optimizer': optimizer.state_dict(),
}, model_fname % (epoch + 1))
line0 = 'epoch: {0}\t''loss: {loss.val:.4f} ({loss.ema:.4f})'.format(
epoch, loss=losses)
with open(record_name, 'a') as f:
f.write(line0)
if line0[-1] != '\n':
f.write('\n')
if epoch%3!=0 and epoch <args.epochs-20:
continue
print('Validate epoch {}'.format(epoch))
model.eval()
evaluator.reset()
test_loss=0.0
for i,sample in enumerate(val_loader):
inputs = sample['image'].cuda()
target = sample['label'].cuda()
with torch.no_grad():
outputs = model(inputs)
# loss = criterion(outputs, target)
# test_loss+=loss.item()
pred=outputs.data.cpu().numpy()
target=target.cpu().numpy()
pred = np.argmax(pred, axis=1)
evaluator.add_batch(target,pred)
Acc = evaluator.Pixel_Accuracy()
Acc_class = evaluator.Pixel_Accuracy_Class()
mIoU = evaluator.Mean_Intersection_over_Union()
FWIoU = evaluator.Frequency_Weighted_Intersection_over_Union()
print("epoch: {}\t Acc:{:.3f}, Acc_class:{:.3f}, mIoU:{:.3f}, fwIoU: {:.3f}".format(epoch,Acc, Acc_class, mIoU, FWIoU))
line1='epoch: {}\t''mIoU: {:.3f}'.format(epoch,mIoU)
with open(record_name, 'a') as f:
f.write(line1)
if line1[-1] != '\n':
f.write('\n')
if __name__ == "__main__":
main()
| [((22, 4, 22, 37), 'warnings.filterwarnings', 'warnings.filterwarnings', ({(22, 28, 22, 36): '"""ignore"""'}, {}), "('ignore')", False, 'import warnings\n'), ((23, 11, 23, 36), 'torch.cuda.is_available', 'torch.cuda.is_available', ({}, {}), '()', False, 'import torch\n'), ((25, 11, 25, 44), 'config_utils.re_train_autodeeplab.obtain_retrain_autodeeplab_args', 'obtain_retrain_autodeeplab_args', ({}, {}), '()', False, 'from config_utils.re_train_autodeeplab import obtain_retrain_autodeeplab_args\n'), ((26, 15, 26, 54), 'os.path.join', 'os.path.join', ({(26, 28, 26, 37): '"""./data/"""', (26, 39, 26, 53): 'args.save_path'}, {}), "('./data/', args.save_path)", False, 'import os\n'), ((31, 18, 31, 63), 'os.path.join', 'os.path.join', ({(31, 31, 31, 39): 'save_dir', (31, 41, 31, 62): '"""training_record.txt"""'}, {}), "(save_dir, 'training_record.txt')", False, 'import os\n'), ((50, 16, 50, 37), 'utils.loss.build_criterion', 'build_criterion', ({(50, 32, 50, 36): 'args'}, {}), '(args)', False, 'from utils.loss import build_criterion\n'), ((66, 14, 66, 36), 'utils.metrics.Evaluator', 'Evaluator', ({(66, 24, 66, 35): 'num_classes'}, {}), '(num_classes)', False, 'from utils.metrics import Evaluator\n'), ((27, 11, 27, 34), 'os.path.isdir', 'os.path.isdir', ({(27, 25, 27, 33): 'save_dir'}, {}), '(save_dir)', False, 'import os\n'), ((28, 8, 28, 26), 'os.mkdir', 'os.mkdir', ({(28, 17, 28, 25): 'save_dir'}, {}), '(save_dir)', False, 'import os\n'), ((42, 16, 42, 41), 'retrain_model.build_autodeeplab.Retrain_Autodeeplab', 'Retrain_Autodeeplab', ({(42, 36, 42, 40): 'args'}, {}), '(args)', False, 'from retrain_model.build_autodeeplab import Retrain_Autodeeplab\n'), ((69, 11, 69, 38), 'os.path.isfile', 'os.path.isfile', ({(69, 26, 69, 37): 'args.resume'}, {}), '(args.resume)', False, 'import os\n'), ((80, 17, 80, 31), 'utils.utils.AverageMeter', 'AverageMeter', ({}, {}), '()', False, 'from utils.utils import AverageMeter\n'), ((36, 50, 36, 94), 'dataloaders.make_data_loader', 'dataloaders.make_data_loader', ({(36, 79, 36, 83): 'args'}, {}), '(args, **kwargs)', False, 'import dataloaders\n'), ((52, 12, 52, 34), 'torch.nn.DataParallel', 'nn.DataParallel', ({(52, 28, 52, 33): 'model'}, {}), '(model)', True, 'import torch.nn as nn\n'), ((71, 25, 71, 48), 'torch.load', 'torch.load', ({(71, 36, 71, 47): 'args.resume'}, {}), '(args.resume)', False, 'import torch\n'), ((137, 19, 137, 42), 'numpy.argmax', 'np.argmax', (), '', True, 'import numpy as np\n'), ((91, 16, 91, 31), 'pdb.set_trace', 'pdb.set_trace', ({}, {}), '()', False, 'import pdb\n'), ((131, 17, 131, 32), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n')] |
xxaxdxcxx/miscellaneous-code | test.py | cdb88783f39e1b9a89fdb12f7cddfe62619e4357 | class Solution:
# dictionary keys are tuples, storing results
# structure of the tuple:
# (level, prev_sum, val_to_include)
# value is number of successful tuples
def fourSumCount(self, A, B, C, D, prev_sum=0, level=0, sums={}):
"""
:type A: List[int]
:type B: List[int]
:type C: List[int]
:type D: List[int]
:rtype: int
"""
# handle clearing dictionary between tests
sums = {} if level == 3 else sums
# base case:
if level == 3:
total = 0
for num in D:
if prev_sum + num == 0:
print("At level 3, 0 total found using entry w/ value {0}".
format(num))
total += 1
return total
total = 0
lists = [A, B, C]
for num in lists[level]:
if level == 0:
print(str(sums))
if (level, prev_sum, num) in sums:
total += sums[(level, prev_sum, num)]
print("Used dictionary entry {0}, making total {1}".
format((level, prev_sum, num), total))
else:
print("Call from level {0} to level {1}; current sum is {2}".
format(level, level + 1, prev_sum + num))
result = self.fourSumCount(A, B, C, D, prev_sum + num,
level + 1, sums)
sums[(level, prev_sum, num)] = result
total += result
if level == 0:
sums = {}
print(sums)
return total
sol = Solution()
A = [1]
B = [-1]
C = [0]
D = [1]
result = sol.fourSumCount(A, B, C, D)
print("Test 1: {0}".format(result))
A = [1, 2]
B = [-2, -1]
C = [-1, 2]
D = [0, 2]
result = sol.fourSumCount(A, B, C, D)
print("Test 2: {0}".format(result))
| [] |
johngtrs/krux | src/boot.py | 7b6c6d410e29c16ab5d3c05a5aafab618f13a86f | # The MIT License (MIT)
# Copyright (c) 2021 Tom J. Sun
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import machine
from pmu import axp192
from context import Context
from login import Login
from home import Home
import settings
pmu = axp192()
# Enable power management so that if power button is held down 6 secs,
# it shuts off as expected
pmu.enablePMICSleepMode(True)
ctx = Context()
ctx.display.flash_text(settings.load('splash', ( 'Krux' ), strip=False))
while True:
if not Login(ctx).run():
break
if not Home(ctx).run():
break
ctx.display.flash_text(( 'Shutting down..' ))
ctx.clear()
pmu.setEnterSleepMode()
machine.reset()
| [((29, 6, 29, 14), 'pmu.axp192', 'axp192', ({}, {}), '()', False, 'from pmu import axp192\n'), ((35, 6, 35, 15), 'context.Context', 'Context', ({}, {}), '()', False, 'from context import Context\n'), ((51, 0, 51, 15), 'machine.reset', 'machine.reset', ({}, {}), '()', False, 'import machine\n'), ((37, 23, 37, 71), 'settings.load', 'settings.load', (), '', False, 'import settings\n'), ((40, 11, 40, 21), 'login.Login', 'Login', ({(40, 17, 40, 20): 'ctx'}, {}), '(ctx)', False, 'from login import Login\n'), ((43, 11, 43, 20), 'home.Home', 'Home', ({(43, 16, 43, 19): 'ctx'}, {}), '(ctx)', False, 'from home import Home\n')] |
suhaibroomy/django-smartfields | smartfields/processors/video.py | e9331dc74f72d0254608526f8816aa4bb8f1fca4 | import re
import six
from smartfields.processors.base import ExternalFileProcessor
from smartfields.utils import ProcessingError
__all__ = [
'FFMPEGProcessor'
]
class FFMPEGProcessor(ExternalFileProcessor):
duration_re = re.compile(r'Duration: (?P<hours>\d+):(?P<minutes>\d+):(?P<seconds>\d+)')
progress_re = re.compile(r'time=(?P<hours>\d+):(?P<minutes>\d+):(?P<seconds>\d+)')
error_re = re.compile(r'Invalid data found when processing input')
cmd_template = "ffmpeg -i {input} -y -codec:v {vcodec} -b:v {vbitrate} " \
"-maxrate {maxrate} -bufsize {bufsize} -vf " \
"scale={width}:{height} -threads {threads} -c:a {acodec} {output}"
def stdout_handler(self, line, duration=None):
if duration is None:
duration_time = self.duration_re.search(line)
if duration_time:
duration = self.timedict_to_seconds(duration_time.groupdict())
elif duration != 0:
current_time = self.progress_re.search(line)
if current_time:
seconds = self.timedict_to_seconds(current_time.groupdict())
progress = float(seconds)/duration
progress = progress if progress < 1 else 0.99
self.set_progress(progress)
elif self.error_re.search(line):
raise ProcessingError("Invalid video file or unknown video format.")
return (duration,)
def timedict_to_seconds(self, timedict):
seconds = 0
for key, t in six.iteritems(timedict):
if key == 'seconds':
seconds+= int(t)
elif key == 'minutes':
seconds+= int(t)*60
elif key == 'hours':
seconds+= int(t)*3600
return seconds
| [((12, 18, 12, 91), 're.compile', 're.compile', ({(12, 29, 12, 90): '"""Duration: (?P<hours>\\\\d+):(?P<minutes>\\\\d+):(?P<seconds>\\\\d+)"""'}, {}), "('Duration: (?P<hours>\\\\d+):(?P<minutes>\\\\d+):(?P<seconds>\\\\d+)')", False, 'import re\n'), ((13, 18, 13, 86), 're.compile', 're.compile', ({(13, 29, 13, 85): '"""time=(?P<hours>\\\\d+):(?P<minutes>\\\\d+):(?P<seconds>\\\\d+)"""'}, {}), "('time=(?P<hours>\\\\d+):(?P<minutes>\\\\d+):(?P<seconds>\\\\d+)')", False, 'import re\n'), ((14, 15, 14, 70), 're.compile', 're.compile', ({(14, 26, 14, 69): '"""Invalid data found when processing input"""'}, {}), "('Invalid data found when processing input')", False, 'import re\n'), ((37, 22, 37, 45), 'six.iteritems', 'six.iteritems', ({(37, 36, 37, 44): 'timedict'}, {}), '(timedict)', False, 'import six\n'), ((32, 18, 32, 80), 'smartfields.utils.ProcessingError', 'ProcessingError', ({(32, 34, 32, 79): '"""Invalid video file or unknown video format."""'}, {}), "('Invalid video file or unknown video format.')", False, 'from smartfields.utils import ProcessingError\n')] |
ramtingh/vmtk | tests/test_vmtkScripts/test_vmtksurfaceconnectivity.py | 4d6f58ce65d73628353ba2b110cbc29a2e7aa7b3 | ## Program: VMTK
## Language: Python
## Date: January 12, 2018
## Version: 1.4
## Copyright (c) Richard Izzo, Luca Antiga, All rights reserved.
## See LICENSE file for details.
## This software is distributed WITHOUT ANY WARRANTY; without even
## the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
## PURPOSE. See the above copyright notices for more information.
## Note: this code was contributed by
## Richard Izzo (Github @rlizzo)
## University at Buffalo
import pytest
import vmtk.vmtksurfaceconnectivity as connectivity
import os
@pytest.fixture(scope='module')
def aorta_surface_two_segments(input_datadir):
import vmtk.vmtksurfacereader as surfacereader
reader = surfacereader.vmtkSurfaceReader()
reader.InputFileName = os.path.join(input_datadir, 'aorta-surface-two-segments.vtp')
reader.Execute()
return reader.Surface
def test_extract_largest_surface(aorta_surface_two_segments, compare_surfaces):
name = __name__ + '_test_extract_largest_surface.vtp'
connectiv = connectivity.vmtkSurfaceConnectivity()
connectiv.Surface = aorta_surface_two_segments
connectiv.Method = 'largest'
connectiv.CleanOutput = 1
connectiv.Execute()
assert compare_surfaces(connectiv.Surface, name) == True
def test_extract_closest_to_reference_surface(aorta_surface_two_segments, aorta_surface_reference, compare_surfaces):
name = __name__ + '_test_extract_closest_to_reference_surface.vtp'
connectiv = connectivity.vmtkSurfaceConnectivity()
connectiv.Surface = aorta_surface_two_segments
connectiv.Method = 'closest'
connectiv.ReferenceSurface = aorta_surface_reference
connectiv.Execute()
assert compare_surfaces(connectiv.Surface, name) == True
def test_extract_closest_to_point(aorta_surface_two_segments, compare_surfaces):
name = __name__ + '_test_extract_closest_to_point.vtp'
connectiv = connectivity.vmtkSurfaceConnectivity()
connectiv.Surface = aorta_surface_two_segments
connectiv.Method = 'closest'
connectiv.ClosestPoint = [0.0, 0.0, 0.0]
connectiv.Execute()
assert compare_surfaces(connectiv.Surface, name) == True
| [((22, 1, 22, 31), 'pytest.fixture', 'pytest.fixture', (), '', False, 'import pytest\n'), ((25, 13, 25, 46), 'vmtk.vmtksurfacereader.vmtkSurfaceReader', 'surfacereader.vmtkSurfaceReader', ({}, {}), '()', True, 'import vmtk.vmtksurfacereader as surfacereader\n'), ((26, 27, 26, 88), 'os.path.join', 'os.path.join', ({(26, 40, 26, 53): 'input_datadir', (26, 55, 26, 87): '"""aorta-surface-two-segments.vtp"""'}, {}), "(input_datadir, 'aorta-surface-two-segments.vtp')", False, 'import os\n'), ((33, 16, 33, 54), 'vmtk.vmtksurfaceconnectivity.vmtkSurfaceConnectivity', 'connectivity.vmtkSurfaceConnectivity', ({}, {}), '()', True, 'import vmtk.vmtksurfaceconnectivity as connectivity\n'), ((44, 16, 44, 54), 'vmtk.vmtksurfaceconnectivity.vmtkSurfaceConnectivity', 'connectivity.vmtkSurfaceConnectivity', ({}, {}), '()', True, 'import vmtk.vmtksurfaceconnectivity as connectivity\n'), ((55, 16, 55, 54), 'vmtk.vmtksurfaceconnectivity.vmtkSurfaceConnectivity', 'connectivity.vmtkSurfaceConnectivity', ({}, {}), '()', True, 'import vmtk.vmtksurfaceconnectivity as connectivity\n')] |
Kingpin-Apps/django-sssoon | sssoon/forms.py | 2a44d0d19e70dcd3127f9425c0ed4ba52355a1d2 | from django import forms
from nocaptcha_recaptcha.fields import NoReCaptchaField
class NewsletterForm(forms.Form):
email = forms.EmailField(label='Email', required=True,
widget=forms.TextInput(attrs={
'id': 'newsletter-email',
'type': 'email',
'title': 'Email',
'name': 'email',
'class': 'form-control transparent',
'placeholder': '[email protected]'
}))
captcha = NoReCaptchaField() | [((16, 14, 16, 32), 'nocaptcha_recaptcha.fields.NoReCaptchaField', 'NoReCaptchaField', ({}, {}), '()', False, 'from nocaptcha_recaptcha.fields import NoReCaptchaField\n'), ((7, 36, 14, 31), 'django.forms.TextInput', 'forms.TextInput', (), '', False, 'from django import forms\n')] |
william01110111/simple_run_menu | simple_run_menu.py | 804c6bb8d6c63c3a4d4c6d3377601bd44fb0eeea | #! /bin/python3
# simple run menu
import os
import stat
def is_file_executable(path):
executable = stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH
if not os.path.isfile(path):
return False
st = os.stat(path)
mode = st.st_mode
if not mode & executable:
return False
return True
def get_files_in_dir(directory):
if directory == '':
directory = '.'
if directory[-1] != '/':
directory += '/'
return [directory + i for i in os.listdir(directory)]
def command_to_name(command):
filename_with_ext = os.path.basename(command)
filename = filename_with_ext.rsplit('.', 1)[0]
name = filename.replace('_', ' ')
capitalized = ' '.join([i[0].upper() + i[1:] for i in name.split()])
return capitalized
class Option:
options = {}
@staticmethod
def add(command):
options['a'] = Option(command, command, 'a')
def __init__(self, name, command, trigger):
self.name = name
self.command = command
self.trigger = trigger
if __name__ == "__main__":
print([command_to_name(i) for i in get_files_in_dir('') if is_file_executable(i)])
| [((11, 6, 11, 19), 'os.stat', 'os.stat', ({(11, 14, 11, 18): 'path'}, {}), '(path)', False, 'import os\n'), ((25, 21, 25, 46), 'os.path.basename', 'os.path.basename', ({(25, 38, 25, 45): 'command'}, {}), '(command)', False, 'import os\n'), ((9, 8, 9, 28), 'os.path.isfile', 'os.path.isfile', ({(9, 23, 9, 27): 'path'}, {}), '(path)', False, 'import os\n'), ((22, 32, 22, 53), 'os.listdir', 'os.listdir', ({(22, 43, 22, 52): 'directory'}, {}), '(directory)', False, 'import os\n')] |
stevemats/mne-python | mne/io/cnt/tests/test_cnt.py | 47051833f21bb372d60afc3adbf4305648ac7f69 |
# Author: Jaakko Leppakangas <[email protected]>
# Joan Massich <[email protected]>
#
# License: BSD-3-Clause
import os.path as op
import numpy as np
from numpy.testing import assert_array_equal
import pytest
from mne import pick_types
from mne.datasets import testing
from mne.io.tests.test_raw import _test_raw_reader
from mne.io.cnt import read_raw_cnt
from mne.annotations import read_annotations
data_path = testing.data_path(download=False)
fname = op.join(data_path, 'CNT', 'scan41_short.cnt')
@testing.requires_testing_data
def test_data():
"""Test reading raw cnt files."""
with pytest.warns(RuntimeWarning, match='number of bytes'):
raw = _test_raw_reader(read_raw_cnt, input_fname=fname,
eog='auto', misc=['NA1', 'LEFT_EAR'])
# make sure we use annotations event if we synthesized stim
assert len(raw.annotations) == 6
eog_chs = pick_types(raw.info, eog=True, exclude=[])
assert len(eog_chs) == 2 # test eog='auto'
assert raw.info['bads'] == ['LEFT_EAR', 'VEOGR'] # test bads
# the data has "05/10/200 17:35:31" so it is set to None
assert raw.info['meas_date'] is None
@testing.requires_testing_data
def test_compare_events_and_annotations():
"""Test comparing annotations and events."""
with pytest.warns(RuntimeWarning, match='Could not parse meas date'):
raw = read_raw_cnt(fname)
events = np.array([[333, 0, 7],
[1010, 0, 7],
[1664, 0, 109],
[2324, 0, 7],
[2984, 0, 109]])
annot = read_annotations(fname)
assert len(annot) == 6
assert_array_equal(annot.onset[:-1], events[:, 0] / raw.info['sfreq'])
assert 'STI 014' not in raw.info['ch_names']
| [((19, 12, 19, 45), 'mne.datasets.testing.data_path', 'testing.data_path', (), '', False, 'from mne.datasets import testing\n'), ((20, 8, 20, 53), 'os.path.join', 'op.join', ({(20, 16, 20, 25): 'data_path', (20, 27, 20, 32): '"""CNT"""', (20, 34, 20, 52): '"""scan41_short.cnt"""'}, {}), "(data_path, 'CNT', 'scan41_short.cnt')", True, 'import os.path as op\n'), ((33, 14, 33, 56), 'mne.pick_types', 'pick_types', (), '', False, 'from mne import pick_types\n'), ((46, 13, 50, 39), 'numpy.array', 'np.array', ({(46, 22, 50, 38): '[[333, 0, 7], [1010, 0, 7], [1664, 0, 109], [2324, 0, 7], [2984, 0, 109]]'}, {}), '([[333, 0, 7], [1010, 0, 7], [1664, 0, 109], [2324, 0, 7], [2984, 0,\n 109]])', True, 'import numpy as np\n'), ((52, 12, 52, 35), 'mne.annotations.read_annotations', 'read_annotations', ({(52, 29, 52, 34): 'fname'}, {}), '(fname)', False, 'from mne.annotations import read_annotations\n'), ((54, 4, 54, 74), 'numpy.testing.assert_array_equal', 'assert_array_equal', ({(54, 23, 54, 39): 'annot.onset[:-1]', (54, 41, 54, 73): "(events[:, (0)] / raw.info['sfreq'])"}, {}), "(annot.onset[:-1], events[:, (0)] / raw.info['sfreq'])", False, 'from numpy.testing import assert_array_equal\n'), ((26, 9, 26, 62), 'pytest.warns', 'pytest.warns', (), '', False, 'import pytest\n'), ((27, 14, 28, 68), 'mne.io.tests.test_raw._test_raw_reader', '_test_raw_reader', (), '', False, 'from mne.io.tests.test_raw import _test_raw_reader\n'), ((44, 9, 44, 72), 'pytest.warns', 'pytest.warns', (), '', False, 'import pytest\n'), ((45, 14, 45, 33), 'mne.io.cnt.read_raw_cnt', 'read_raw_cnt', ({(45, 27, 45, 32): 'fname'}, {}), '(fname)', False, 'from mne.io.cnt import read_raw_cnt\n')] |
Track-your-parliament/track-your-parliament-data | parliament_proposal_fetcher.py | 1ab9d9fe5cf4921e4cc792d0e3db3263557daafd | import urllib.request, json
import pandas as pd
baseUrl = 'https://avoindata.eduskunta.fi/api/v1/tables/VaskiData'
parameters = 'rows?columnName=Eduskuntatunnus&columnValue=LA%25&perPage=100'
page = 0
df = ''
while True:
print(f'Fetching page number {page}')
with urllib.request.urlopen(f'{baseUrl}/{parameters}&page={page}') as url:
data = json.loads(url.read().decode())
if page == 0:
columns = data['columnNames']
df = pd.DataFrame(columns=columns)
dataRows = data['rowData']
df = df.append(pd.DataFrame(dataRows, columns=data['columnNames']), ignore_index=True)
if data['hasMore'] == False:
break
page = page + 1
df.to_csv('./data/parliament_proposals_raw.csv', sep=';', encoding='utf-8') | [((17, 17, 17, 46), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((20, 23, 20, 74), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n')] |
emaldonadocruz/UTuning | examples/Catboost_regression-scorer_usage.py | b32207bcbeb80e4c07e098bcbe4d5ce8b3fee778 | # -*- coding: utf-8 -*-
"""
Created on Mon Sep 20 16:15:37 2021
@author: em42363
"""
# In[1]: Import functions
'''
CatBoost is a high-performance open source library for gradient boosting
on decision trees
'''
from catboost import CatBoostRegressor
from sklearn.model_selection import train_test_split
import pandas as pd
import seaborn as sns
import numpy as np
import os
os.chdir(os.path.dirname(__file__))
import sys
sys.path.insert(0, r'C:\Users\eduar\OneDrive\PhD\UTuning')
sys.path.insert(0, r'C:\Users\em42363\OneDrive\PhD\UTuning')
from UTuning import scorer, plots
#df = pd.read_csv(r'C:\Users\eduar\OneDrive\PhD\UTuning\dataset\unconv_MV.csv')
df = pd.read_csv(r'C:\Users\em42363\OneDrive\PhD\UTuning\dataset\unconv_MV.csv')
import random
import matplotlib.pyplot as plt
# In[1]: Split train test
'''
Perform split train test
'''
y = df['Production'].values
X = df[['Por', 'LogPerm', 'Brittle', 'TOC']].values
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)
# In[6]: Regressor
'''
Define the regressor, fit the model and predict the estimates
'''
model = CatBoostRegressor(iterations=1000, learning_rate=0.2, loss_function='RMSEWithUncertainty',
verbose=False, random_seed=0)
model.fit(X_train, y_train)
estimates = model.predict(X_test)
# In[9]: Plot error line
'''
Use UTuning to plot error lines
'''
plots.error_line(estimates[:, 0], y_test, np.sqrt(estimates[:, 1]), Frac=1)
# %% Define the virtual ensemble
def virt_ensemble(X_train,y_train, num_samples=100, iters=1000, lr=0.1): # 100, .1
ens_preds = []
model = CatBoostRegressor(iterations=iters, learning_rate=lr, loss_function='RMSEWithUncertainty',
verbose=False, random_seed=1)
model.fit(X_train,y_train)
ens_preds = model.virtual_ensembles_predict(X_test, prediction_type='VirtEnsembles',
virtual_ensembles_count=num_samples,
thread_count=8)
return np.asarray(ens_preds)
# %%
n_quantiles = 11
perc = np.linspace(0.0, 1.00, n_quantiles)
Samples = 10
ens_preds=virt_ensemble(X_train,y_train, num_samples=Samples)
Pred_array = ens_preds[:,:,0]
Knowledge_u=np.sqrt(np.var(Pred_array,axis=1)) #Knowledge uncertainty
Data_u=np.sqrt(np.mean(ens_preds[:,:,1],axis=1)) #Data uncertainty
Sigma=Knowledge_u+Data_u
# %%
'''
We use UTuning to return the Indicator Function and plot the
accuracy plot and diagnose our model.
'''
scorer = scorer.scorer(Pred_array, y_test, Sigma)
IF_array = scorer.IndicatorFunction()
avgIF = np.mean(IF_array,axis=0)
# % Second plot test
plots.error_accuracy_plot(perc,IF_array,Pred_array,y_test,Sigma)
# %
print('Accuracy = {0:2.2f}'.format(scorer.Accuracy()))
print('Precision = {0:2.2f}'.format(scorer.Precision()))
print('Goodness = {0:2.2f}'.format(scorer.Goodness()))
| [((24, 0, 24, 58), 'sys.path.insert', 'sys.path.insert', ({(24, 16, 24, 17): '(0)', (24, 19, 24, 57): '"""C:\\\\Users\\\\eduar\\\\OneDrive\\\\PhD\\\\UTuning"""'}, {}), "(0, 'C:\\\\Users\\\\eduar\\\\OneDrive\\\\PhD\\\\UTuning')", False, 'import sys\n'), ((25, 0, 25, 60), 'sys.path.insert', 'sys.path.insert', ({(25, 16, 25, 17): '(0)', (25, 19, 25, 59): '"""C:\\\\Users\\\\em42363\\\\OneDrive\\\\PhD\\\\UTuning"""'}, {}), "(0, 'C:\\\\Users\\\\em42363\\\\OneDrive\\\\PhD\\\\UTuning')", False, 'import sys\n'), ((30, 5, 30, 80), 'pandas.read_csv', 'pd.read_csv', ({(30, 17, 30, 79): '"""C:\\\\Users\\\\em42363\\\\OneDrive\\\\PhD\\\\UTuning\\\\dataset\\\\unconv_MV.csv"""'}, {}), "(\n 'C:\\\\Users\\\\em42363\\\\OneDrive\\\\PhD\\\\UTuning\\\\dataset\\\\unconv_MV.csv')", True, 'import pandas as pd\n'), ((42, 35, 42, 73), 'sklearn.model_selection.train_test_split', 'train_test_split', (), '', False, 'from sklearn.model_selection import train_test_split\n'), ((49, 8, 50, 55), 'catboost.CatBoostRegressor', 'CatBoostRegressor', (), '', False, 'from catboost import CatBoostRegressor\n'), ((81, 7, 81, 42), 'numpy.linspace', 'np.linspace', ({(81, 19, 81, 22): '0.0', (81, 24, 81, 28): '1.0', (81, 30, 81, 41): 'n_quantiles'}, {}), '(0.0, 1.0, n_quantiles)', True, 'import numpy as np\n'), ((101, 8, 101, 32), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((104, 0, 104, 64), 'UTuning.plots.error_accuracy_plot', 'plots.error_accuracy_plot', ({(104, 26, 104, 30): 'perc', (104, 31, 104, 39): 'IF_array', (104, 40, 104, 50): 'Pred_array', (104, 51, 104, 57): 'y_test', (104, 58, 104, 63): 'Sigma'}, {}), '(perc, IF_array, Pred_array, y_test, Sigma)', False, 'from UTuning import scorer, plots\n'), ((21, 9, 21, 34), 'os.path.dirname', 'os.path.dirname', ({(21, 25, 21, 33): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((59, 42, 59, 66), 'numpy.sqrt', 'np.sqrt', ({(59, 50, 59, 65): 'estimates[:, (1)]'}, {}), '(estimates[:, (1)])', True, 'import numpy as np\n'), ((66, 12, 67, 55), 'catboost.CatBoostRegressor', 'CatBoostRegressor', (), '', False, 'from catboost import CatBoostRegressor\n'), ((77, 11, 77, 32), 'numpy.asarray', 'np.asarray', ({(77, 22, 77, 31): 'ens_preds'}, {}), '(ens_preds)', True, 'import numpy as np\n'), ((89, 20, 89, 45), 'numpy.var', 'np.var', (), '', True, 'import numpy as np\n'), ((90, 15, 90, 47), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n')] |
PotasnikM/translator-to-suJSON | sujson/_logger.py | abb2001c78d431bd2087754666bc896ba0543dfd | import logging
from platform import system
from tqdm import tqdm
from multiprocessing import Lock
loggers = {}
# https://stackoverflow.com/questions/38543506/
class TqdmLoggingHandler(logging.Handler):
def __init__(self, level=logging.NOTSET):
super(TqdmLoggingHandler, self).__init__(level)
def emit(self, record):
try:
msg = self.format(record)
tqdm.set_lock(Lock())
tqdm.write(msg)
self.flush()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
def setup_custom_logger(name):
"""
Create a logger with a certain name and level
"""
global loggers
if loggers.get(name):
return loggers.get(name)
formatter = logging.Formatter(
fmt='%(levelname)s: %(message)s'
)
handler = TqdmLoggingHandler()
handler.setFormatter(formatter)
if system() not in ['Windows', 'cli']:
logging.addLevelName(logging.ERROR, "\033[1;31m%s\033[1;0m" % logging.getLevelName(logging.ERROR))
logging.addLevelName(logging.WARNING, "\033[1;33m%s\033[1;0m" % logging.getLevelName(logging.WARNING))
logging.addLevelName(logging.INFO, "\033[1;34m%s\033[1;0m" % logging.getLevelName(logging.INFO))
logging.addLevelName(logging.DEBUG, "\033[1;35m%s\033[1;0m" % logging.getLevelName(logging.DEBUG))
logger = logging.getLogger(name)
logger.setLevel(logging.WARNING)
# if (logger.hasHandlers()):
# logger.handlers.clear()
if logger.handlers:
logger.handlers = []
logger.addHandler(handler)
loggers.update(dict(name=logger))
return logger
| [((34, 16, 36, 5), 'logging.Formatter', 'logging.Formatter', (), '', False, 'import logging\n'), ((47, 13, 47, 36), 'logging.getLogger', 'logging.getLogger', ({(47, 31, 47, 35): 'name'}, {}), '(name)', False, 'import logging\n'), ((41, 7, 41, 15), 'platform.system', 'system', ({}, {}), '()', False, 'from platform import system\n'), ((18, 12, 18, 27), 'tqdm.tqdm.write', 'tqdm.write', ({(18, 23, 18, 26): 'msg'}, {}), '(msg)', False, 'from tqdm import tqdm\n'), ((17, 26, 17, 32), 'multiprocessing.Lock', 'Lock', ({}, {}), '()', False, 'from multiprocessing import Lock\n'), ((42, 70, 42, 105), 'logging.getLevelName', 'logging.getLevelName', ({(42, 91, 42, 104): 'logging.ERROR'}, {}), '(logging.ERROR)', False, 'import logging\n'), ((43, 72, 43, 109), 'logging.getLevelName', 'logging.getLevelName', ({(43, 93, 43, 108): 'logging.WARNING'}, {}), '(logging.WARNING)', False, 'import logging\n'), ((44, 69, 44, 103), 'logging.getLevelName', 'logging.getLevelName', ({(44, 90, 44, 102): 'logging.INFO'}, {}), '(logging.INFO)', False, 'import logging\n'), ((45, 70, 45, 105), 'logging.getLevelName', 'logging.getLevelName', ({(45, 91, 45, 104): 'logging.DEBUG'}, {}), '(logging.DEBUG)', False, 'import logging\n')] |
Gicehajunior/face-recognition-detection-OpenCv-Python | face-detect.py | 6551285ce5b4532d8b6f3ad6b8e9a29564673ea9 | import cv2
import sys
import playsound
face_cascade = cv2.CascadeClassifier('cascades/haarcascade_frontalface_default.xml')
# capture video using cv2
video_capture = cv2.VideoCapture(0)
while True:
# capture frame by frame, i.e, one by one
ret, frame = video_capture.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# for each face on the projected on the frame
faces = face_cascade.detectMultiScale(
gray,
scaleFactor = 1.1,
minNeighbors = 5,
# minSize(35, 35)
)
# loop through the video faces for detection
for (x, y, w, h) in faces:
point1 = x+w
point2 = y+h
frame_color = (50, 50, 200)
rectangleBox = cv2.rectangle(frame, (x, y), (point1, point2), frame_color, 2)
cv2.imshow('video', frame)
if faces.any():
playsound.playsound('openDoorAlert.mp3', True)
if len(faces) > 1:
print("There are " + str(len(faces)) + " peoples at the gate")
else:
print("There is " + str(len(faces)) + " person at the gate")
else:
pass
if cv2.waitKey(1) & 0xFF == ord('q'):
sys.exit()
| [((5, 15, 5, 84), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', ({(5, 37, 5, 83): '"""cascades/haarcascade_frontalface_default.xml"""'}, {}), "('cascades/haarcascade_frontalface_default.xml')", False, 'import cv2\n'), ((8, 16, 8, 35), 'cv2.VideoCapture', 'cv2.VideoCapture', ({(8, 33, 8, 34): '0'}, {}), '(0)', False, 'import cv2\n'), ((14, 11, 14, 50), 'cv2.cvtColor', 'cv2.cvtColor', ({(14, 24, 14, 29): 'frame', (14, 31, 14, 49): 'cv2.COLOR_BGR2GRAY'}, {}), '(frame, cv2.COLOR_BGR2GRAY)', False, 'import cv2\n'), ((30, 23, 30, 85), 'cv2.rectangle', 'cv2.rectangle', ({(30, 37, 30, 42): 'frame', (30, 44, 30, 50): '(x, y)', (30, 52, 30, 68): '(point1, point2)', (30, 70, 30, 81): 'frame_color', (30, 83, 30, 84): '2'}, {}), '(frame, (x, y), (point1, point2), frame_color, 2)', False, 'import cv2\n'), ((31, 8, 31, 34), 'cv2.imshow', 'cv2.imshow', ({(31, 19, 31, 26): '"""video"""', (31, 28, 31, 33): 'frame'}, {}), "('video', frame)", False, 'import cv2\n'), ((33, 12, 33, 58), 'playsound.playsound', 'playsound.playsound', ({(33, 32, 33, 51): '"""openDoorAlert.mp3"""', (33, 53, 33, 57): '(True)'}, {}), "('openDoorAlert.mp3', True)", False, 'import playsound\n'), ((42, 12, 42, 22), 'sys.exit', 'sys.exit', ({}, {}), '()', False, 'import sys\n'), ((41, 11, 41, 25), 'cv2.waitKey', 'cv2.waitKey', ({(41, 23, 41, 24): '(1)'}, {}), '(1)', False, 'import cv2\n')] |
ryanlovett/sis-cli | sis/enrollments.py | 5efe5b9344b547c3f1365ef63a0ad33ec013fcca | # vim:set et sw=4 ts=4:
import logging
import sys
import jmespath
from . import sis, classes
# logging
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
logger = logging.getLogger(__name__)
# SIS endpoint
enrollments_uri = "https://apis.berkeley.edu/sis/v2/enrollments"
# apparently some courses have LAB without LEC (?)
section_codes = ['LEC', 'SES', 'WBL']
async def get_student_enrollments(app_id, app_key, identifier, term_id,
id_type='campus-uid', enrolled_only='true', primary_only='true',
course_attr='course-id'):
'''Gets a students enrollments.'''
uri = enrollments_uri + f"/students/{identifier}"
headers = {
"Accept": "application/json",
"app_id": app_id,
"app_key": app_key
}
params = {
"page-number": 1,
"page-size": 100, # maximum
"id-type": id_type,
"term-id": term_id,
"enrolled-only": enrolled_only,
"primary-only": primary_only,
}
enrollments = await sis.get_items(uri, params, headers, 'studentEnrollments')
logger.debug(f"enrollments: {enrollments}")
if course_attr == 'course-id':
flt = '[].classSection.class.course.identifiers[?type == `cs-course-id`].id[]'
elif course_attr == 'display-name':
flt = '[].classSection.class.course.displayName'
return jmespath.search(flt, enrollments)
async def get_section_enrollments(app_id, app_key, term_id, section_id):
'''Gets a course section's enrollments.'''
uri = enrollments_uri + f"/terms/{term_id}/classes/sections/{section_id}"
headers = {
"Accept": "application/json",
"app_id": app_id,
"app_key": app_key
}
params = {
"page-number": 1,
"page-size": 100, # maximum
}
enrollments = await sis.get_items(uri, params, headers, 'classSectionEnrollments')
logger.info(f"{section_id}: {len(enrollments)}")
return enrollments
def section_id(section):
'''Return a section's course ID, e.g. "15807".'''
return section['id']
def section_subject_area(section):
'''Return a section's subject area, e.g. "STAT".'''
return jmespath.search('class.course.subjectArea.code', section)
def section_catalog_number(section):
'''Return a section's formatted catalog number, e.g. "215B".'''
return jmespath.search('class.course.catalogNumber.formatted', section)
def section_display_name(section):
'''Return a section's displayName, e.g. "STAT 215B".'''
return jmespath.search('class.course.displayName', section)
def section_is_primary(section):
'''Return a section's primary status.'''
return jmespath.search('association.primary', section)
def enrollment_campus_uid(enrollment):
'''Return an enrollent's campus UID.'''
expr = "student.identifiers[?disclose && type=='campus-uid'].id | [0]"
return jmespath.search(expr, enrollment)
def enrollment_campus_email(enrollment):
'''Return an enrollment's campus email if found, otherwise
return any other email.'''
expr = "student.emails[?type.code=='CAMP'].emailAddress | [0]"
email = jmespath.search(expr, enrollment)
if email: return email
expr = "student.emails[?type.code=='OTHR'].emailAddress | [0]"
return jmespath.search(expr, enrollment)
def get_enrollment_uids(enrollments):
'''Given an SIS enrollment, return the student's campus UID.'''
return list(map(lambda x: enrollment_campus_uid(x), enrollments))
def get_enrollment_emails(enrollments):
'''Given an SIS enrollment, return the student's campus email.'''
return list(map(lambda x: enrollment_campus_email(x), enrollments))
def enrollment_status(enrollment):
'''Return an enrollment's status, e.g. 'E', 'W', or 'D'.'''
return jmespath.search('enrollmentStatus.status.code', enrollment)
def filter_enrollment_status(enrollments, status):
return list(filter(lambda x: enrollment_status(x) == status, enrollments))
def status_code(constituents):
return {'enrolled':'E', 'waitlisted':'W', 'dropped':'D'}[constituents]
async def get_students(term_id, class_number, constituents, credentials, exact, identifier='campus-uid'):
'''Given a term and class section number, return the student ids.'''
if exact:
# get all enrollments for this section
enrollments = await get_section_enrollments(
credentials['enrollments_id'], credentials['enrollments_key'],
term_id, class_number
)
else:
# get the data for the specified section
section = await classes.get_sections_by_id(
credentials['classes_id'], credentials['classes_key'],
term_id, class_number, include_secondary='true'
)
# extract the subject area and catalog number, e.g. STAT C8
subject_area = section_subject_area(section)
catalog_number = section_catalog_number(section)
logger.info(f"{subject_area} {catalog_number}")
# get enrollments in all matching sections
enrollments = await get_enrollments(
credentials['enrollments_id'], credentials['enrollments_key'],
term_id, subject_area, catalog_number
)
if constituents == 'students':
constituent_enrollments = enrollments
else:
# filter for those enrollments with a specific status code
constituent_enrollments = filter_enrollment_status(
enrollments, status_code(constituents))
# function to extract an enrollment attribute
if identifier == 'campus-uid':
enrollment_attr_fn = enrollment_campus_uid
else:
enrollment_attr_fn = enrollment_campus_email
logger.debug(f"constituent_enrollments: {constituent_enrollments}")
# we convert to a set to collapse overlapping enrollments between
# lectures and labs (if not exact)
return set(map(lambda x: enrollment_attr_fn(x), constituent_enrollments))
def filter_lectures(sections, relevant_codes=section_codes):
'''
Given a list of SIS sections:
[{'code': '32227', 'description': '2019 Spring ASTRON 128 001 LAB 001'}]
return only the section codes which are lectures.
'''
codes = []
for section in sections:
if 'description' not in section: continue
desc_words = set(section['description'].split())
if len(set(desc_words) & set(relevant_codes)) > 0:
codes.append(section['code'])
return codes
async def get_lecture_section_ids(app_id, app_key, term_id, subject_area, catalog_number=None):
'''
Given a term, subject, and course number, return the lecture section ids.
We only care about the lecture enrollments since they contain a superset
of the enrollments of all other section types (lab, dis).
'''
uri = enrollments_uri + f'/terms/{term_id}/classes/sections/descriptors'
headers = {
"Accept": "application/json",
"app_id": app_id,
"app_key": app_key
}
params = {
'page-number': 1,
"subject-area-code": subject_area
}
if catalog_number:
params["catalog-number"] = catalog_number
# Retrieve the sections associated with the course which includes
# both lecture and sections.
sections = await sis.get_items(uri, params, headers, 'fieldValues')
return filter_lectures(sections)
async def get_enrollments(app_id, app_key, term_id, subject_area, catalog_number):
'''Gets a course's enrollments from the SIS.'''
logger.info(f"get_enrollments: {subject_area} {catalog_number}")
# get the lectures
lecture_codes = await get_lecture_section_ids(app_id, app_key, term_id,
subject_area, catalog_number)
# get the enrollments in each lecture
enrollments = []
for section_id in lecture_codes:
enrollments += await get_section_enrollments(app_id, app_key, term_id, section_id)
logger.info(f'enrollments: {len(enrollments)}')
return enrollments
| [((10, 0, 10, 61), 'logging.basicConfig', 'logging.basicConfig', (), '', False, 'import logging\n'), ((11, 9, 11, 36), 'logging.getLogger', 'logging.getLogger', ({(11, 27, 11, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((43, 11, 43, 44), 'jmespath.search', 'jmespath.search', ({(43, 27, 43, 30): 'flt', (43, 32, 43, 43): 'enrollments'}, {}), '(flt, enrollments)', False, 'import jmespath\n'), ((67, 11, 67, 68), 'jmespath.search', 'jmespath.search', ({(67, 27, 67, 58): '"""class.course.subjectArea.code"""', (67, 60, 67, 67): 'section'}, {}), "('class.course.subjectArea.code', section)", False, 'import jmespath\n'), ((71, 11, 71, 75), 'jmespath.search', 'jmespath.search', ({(71, 27, 71, 65): '"""class.course.catalogNumber.formatted"""', (71, 67, 71, 74): 'section'}, {}), "('class.course.catalogNumber.formatted', section)", False, 'import jmespath\n'), ((75, 11, 75, 63), 'jmespath.search', 'jmespath.search', ({(75, 27, 75, 53): '"""class.course.displayName"""', (75, 55, 75, 62): 'section'}, {}), "('class.course.displayName', section)", False, 'import jmespath\n'), ((79, 11, 79, 58), 'jmespath.search', 'jmespath.search', ({(79, 27, 79, 48): '"""association.primary"""', (79, 50, 79, 57): 'section'}, {}), "('association.primary', section)", False, 'import jmespath\n'), ((84, 11, 84, 44), 'jmespath.search', 'jmespath.search', ({(84, 27, 84, 31): 'expr', (84, 33, 84, 43): 'enrollment'}, {}), '(expr, enrollment)', False, 'import jmespath\n'), ((90, 12, 90, 45), 'jmespath.search', 'jmespath.search', ({(90, 28, 90, 32): 'expr', (90, 34, 90, 44): 'enrollment'}, {}), '(expr, enrollment)', False, 'import jmespath\n'), ((93, 11, 93, 44), 'jmespath.search', 'jmespath.search', ({(93, 27, 93, 31): 'expr', (93, 33, 93, 43): 'enrollment'}, {}), '(expr, enrollment)', False, 'import jmespath\n'), ((105, 11, 105, 70), 'jmespath.search', 'jmespath.search', ({(105, 27, 105, 57): '"""enrollmentStatus.status.code"""', (105, 59, 105, 69): 'enrollment'}, {}), "('enrollmentStatus.status.code', enrollment)", False, 'import jmespath\n')] |
Nishanth-Gobi/Da-Vinci-Code | app.py | b44a2d0c553e4f9cf9e2bb3283ebb5f6eaecea4a | from flask import Flask, render_template, request, redirect, url_for
from os.path import join
from stego import Steganography
app = Flask(__name__)
UPLOAD_FOLDER = 'static/files/'
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg'}
@app.route("/")
def home():
return render_template('home.html')
@app.route("/encrypt", methods=['GET', 'POST'])
def get_image():
if request.method == 'GET':
return render_template('encrypt.html')
# Check if the user has entered the secret message
if 'file' in request.files and 'Secret' in request.values:
uploaded_image = request.files['file']
message = request.values.get('Secret')
password = request.values.get("key")
filepath = join(app.config['UPLOAD_FOLDER'], "cover_image.png")
uploaded_image.save(filepath)
im = Steganography(filepath=app.config['UPLOAD_FOLDER'], key=password)
im.encode(message=message)
return render_template('encrypt.html', value=filepath, image_flag=True, secret_flag=True)
return redirect(url_for('encrypt'))
@app.route("/decrypt", methods=['GET', 'POST'])
def get_image_to_decrypt():
if request.method == 'GET':
return render_template('decrypt.html')
if 'key' in request.values:
password = request.values.get('key')
filepath = join(app.config['UPLOAD_FOLDER'], "stego_image.png")
im = Steganography(filepath=app.config['UPLOAD_FOLDER'], key=password)
message = im.decode()
return render_template('decrypt.html', value=filepath, message=message)
if 'file' in request.files:
uploaded_image = request.files['file']
filepath = join(app.config['UPLOAD_FOLDER'], "stego_image.png")
uploaded_image.save(filepath)
return render_template('decrypt.html', value=filepath)
if __name__ == '__main__':
app.run(debug=True)
| [((6, 6, 6, 21), 'flask.Flask', 'Flask', ({(6, 12, 6, 20): '__name__'}, {}), '(__name__)', False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((14, 11, 14, 39), 'flask.render_template', 'render_template', ({(14, 27, 14, 38): '"""home.html"""'}, {}), "('home.html')", False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((21, 15, 21, 46), 'flask.render_template', 'render_template', ({(21, 31, 21, 45): '"""encrypt.html"""'}, {}), "('encrypt.html')", False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((26, 18, 26, 46), 'flask.request.values.get', 'request.values.get', ({(26, 37, 26, 45): '"""Secret"""'}, {}), "('Secret')", False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((27, 19, 27, 44), 'flask.request.values.get', 'request.values.get', ({(27, 38, 27, 43): '"""key"""'}, {}), "('key')", False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((29, 19, 29, 71), 'os.path.join', 'join', ({(29, 24, 29, 51): "app.config['UPLOAD_FOLDER']", (29, 53, 29, 70): '"""cover_image.png"""'}, {}), "(app.config['UPLOAD_FOLDER'], 'cover_image.png')", False, 'from os.path import join\n'), ((32, 13, 32, 78), 'stego.Steganography', 'Steganography', (), '', False, 'from stego import Steganography\n'), ((34, 15, 34, 97), 'flask.render_template', 'render_template', (), '', False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((36, 20, 36, 38), 'flask.url_for', 'url_for', ({(36, 28, 36, 37): '"""encrypt"""'}, {}), "('encrypt')", False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((43, 15, 43, 46), 'flask.render_template', 'render_template', ({(43, 31, 43, 45): '"""decrypt.html"""'}, {}), "('decrypt.html')", False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((46, 19, 46, 44), 'flask.request.values.get', 'request.values.get', ({(46, 38, 46, 43): '"""key"""'}, {}), "('key')", False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((47, 19, 47, 71), 'os.path.join', 'join', ({(47, 24, 47, 51): "app.config['UPLOAD_FOLDER']", (47, 53, 47, 70): '"""stego_image.png"""'}, {}), "(app.config['UPLOAD_FOLDER'], 'stego_image.png')", False, 'from os.path import join\n'), ((48, 13, 48, 78), 'stego.Steganography', 'Steganography', (), '', False, 'from stego import Steganography\n'), ((50, 15, 50, 79), 'flask.render_template', 'render_template', (), '', False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((54, 19, 54, 71), 'os.path.join', 'join', ({(54, 24, 54, 51): "app.config['UPLOAD_FOLDER']", (54, 53, 54, 70): '"""stego_image.png"""'}, {}), "(app.config['UPLOAD_FOLDER'], 'stego_image.png')", False, 'from os.path import join\n'), ((56, 15, 56, 62), 'flask.render_template', 'render_template', (), '', False, 'from flask import Flask, render_template, request, redirect, url_for\n')] |
HaiDangDang/2020-flatland | imitation_learning/generate_demonstrations/gen_envs.py | abbf2f7f62fabf6da0937f80c2181f1c457ce24a | from flatland.envs.agent_utils import RailAgentStatus
from flatland.envs.malfunction_generators import malfunction_from_params, MalfunctionParameters
from flatland.envs.observations import GlobalObsForRailEnv
from flatland.envs.rail_env import RailEnv
from flatland.envs.rail_generators import sparse_rail_generator
from flatland.envs.schedule_generators import sparse_schedule_generator
from flatland.utils.rendertools import RenderTool
import random
import sys
import os
import time
import msgpack
import json
from PIL import Image
import argparse as ap
def RandomTestParams(tid):
seed = tid * 19997 + 997
random.seed(seed)
width = 50 + random.randint(0, 100)
height = 50 + random.randint(0, 100)
nr_cities = 4 + random.randint(0, (width + height) // 10)
nr_trains = min(nr_cities * 20, 100 + random.randint(0, 100))
max_rails_between_cities = 2
max_rails_in_cities = 3 + random.randint(0, 5)
malfunction_rate = 30 + random.randint(0, 100)
malfunction_min_duration = 3 + random.randint(0, 7)
malfunction_max_duration = 20 + random.randint(0, 80)
return (
seed, width, height,
nr_trains, nr_cities,
max_rails_between_cities, max_rails_in_cities,
malfunction_rate, malfunction_min_duration, malfunction_max_duration
)
def RandomTestParams_small(tid):
seed = tid * 19997 + 997
random.seed(seed)
nSize = random.randint(0,5)
width = 20 + nSize * 5
height = 20 + nSize * 5
nr_cities = 2 + nSize // 2 + random.randint(0,2)
nr_trains = min(nr_cities * 5, 5 + random.randint(0,5)) #, 10 + random.randint(0, 10))
max_rails_between_cities = 2
max_rails_in_cities = 3 + random.randint(0, nSize)
malfunction_rate = 30 + random.randint(0, 100)
malfunction_min_duration = 3 + random.randint(0, 7)
malfunction_max_duration = 20 + random.randint(0, 80)
return (
seed, width, height,
nr_trains, nr_cities,
max_rails_between_cities, max_rails_in_cities,
malfunction_rate, malfunction_min_duration, malfunction_max_duration
)
def ShouldRunTest(tid):
return tid >= 7
#return tid >= 3
return True
def create_test_env(fnParams, nTest, sDir):
(seed, width, height,
nr_trains, nr_cities,
max_rails_between_cities, max_rails_in_cities,
malfunction_rate, malfunction_min_duration, malfunction_max_duration) = fnParams(nTest)
#if not ShouldRunTest(test_id):
# continue
rail_generator = sparse_rail_generator(
max_num_cities=nr_cities,
seed=seed,
grid_mode=False,
max_rails_between_cities=max_rails_between_cities,
max_rails_in_city=max_rails_in_cities,
)
#stochastic_data = {'malfunction_rate': malfunction_rate,
# 'min_duration': malfunction_min_duration,
# 'max_duration': malfunction_max_duration
# }
stochastic_data = MalfunctionParameters(malfunction_rate=malfunction_rate,
min_duration=malfunction_min_duration,
max_duration=malfunction_max_duration
)
observation_builder = GlobalObsForRailEnv()
DEFAULT_SPEED_RATIO_MAP = {
1.: 0.25,
1. / 2.: 0.25,
1. / 3.: 0.25,
1. / 4.: 0.25}
schedule_generator = sparse_schedule_generator(DEFAULT_SPEED_RATIO_MAP)
for iAttempt in range(5):
try:
env = RailEnv(
width=width,
height=height,
rail_generator=rail_generator,
schedule_generator=schedule_generator,
number_of_agents=nr_trains,
malfunction_generator_and_process_data=malfunction_from_params(stochastic_data),
obs_builder_object=observation_builder,
remove_agents_at_target=True
)
obs = env.reset(random_seed = seed)
break
except ValueError as oErr:
print("Error:", oErr)
width += 5
height += 5
print("Try again with larger env: (w,h):", width, height)
if not os.path.exists(sDir):
os.makedirs(sDir)
sfName = "{}/Level_{}.mpk".format(sDir, nTest)
if os.path.exists(sfName):
os.remove(sfName)
env.save(sfName)
sys.stdout.write(".")
sys.stdout.flush()
return env
#env = create_test_env(RandomTestParams_small, 0, "train-envs-small/Test_0")
def createEnvSet(nStart, nEnd, sDir, bSmall=True):
#print("Generate small envs in train-envs-small:")
print(f"Generate envs (small={bSmall}) in dir {sDir}:")
sDirImages = "train-envs-small/images/"
if not os.path.exists(sDirImages):
os.makedirs(sDirImages)
for test_id in range(nStart, nEnd, 1):
env = create_test_env(RandomTestParams_small, test_id, sDir)
oRender = RenderTool(env, gl="PILSVG")
#oRender.env = env
#oRender.set_new_rail()
oRender.render_env()
g2img = oRender.get_image()
imgPIL = Image.fromarray(g2img)
#imgPIL.show()
imgPIL.save(sDirImages + "Level_{}.png".format(test_id))
# print("Generate large envs in train-envs-1000:")
# for test_id in range(100):
# create_test_env(RandomTestParams, test_id, "train-envs-1000/Test_0")
def merge(sfEpisode, sfEnv, sfEnvOut, bJson=False):
if bJson:
with open(sfEpisode, "rb") as fEp:
oActions = json.load(fEp)
oEp = {"actions":oActions}
print("json oEp:", type(oEp), list(oEp.keys()))
else:
with open(sfEpisode, "rb") as fEp:
oEp = msgpack.load(fEp)
print("oEp:", type(oEp), list(oEp.keys()))
with open(sfEnv, "rb") as fEnv:
oEnv = msgpack.load(fEnv)
print("oEnv:", type(oEnv), list(oEnv.keys()))
# merge dicts
oEnv2 = {**oEp, **oEnv}
print("Merged keys:", list(oEnv2.keys()))
with open(sfEnvOut, "wb") as fEnv:
msgpack.dump(oEnv2, fEnv)
def printKeys1(sfEnv):
with open(sfEnv, "rb") as fEnv:
oEnv = msgpack.load(fEnv, encoding="utf-8")
print(sfEnv, "keys:", list(oEnv.keys()))
for sKey in oEnv.keys():
print("key", sKey, len(oEnv[sKey]))
if sKey == "shape":
print("shape: ", oEnv[sKey] )
def printKeys(sfEnvs):
try:
for sfEnv in sfEnvs:
printKeys1(sfEnv)
except:
# assume single env
printKeys1(sfEnvs)
def main2():
parser = ap.ArgumentParser(description='Generate envs, merge episodes into env files.')
parser.add_argument("-c", '--createEnvs', type=int, nargs=2, action="append",
metavar=("nStart", "nEnd"),
help='merge episode into env')
parser.add_argument("-d", "--outDir", type=str, nargs=1, default="./test-envs-tmp")
parser.add_argument("-m", '--merge', type=str, nargs=3, action="append",
metavar=("episode", "env", "output_env"),
help='merge episode into env')
parser.add_argument("-j", '--mergejson', type=str, nargs=3, action="append",
metavar=("json", "env", "output_env"),
help='merge json actions into env, with key actions')
parser.add_argument('-k', "--keys", type=str, action='append', nargs="+",
help='print the keys in a file')
args=parser.parse_args()
print(args)
if args.merge:
print("merge:", args.merge)
merge(*args.merge[0])
if args.mergejson:
print("merge json:", args.mergejson)
merge(*args.mergejson[0], bJson=True)
if args.keys:
print("keys:", args.keys)
printKeys(args.keys[0])
if args.outDir:
print("outDir", args.outDir)
if args.createEnvs:
print("create Envs - ", *args.createEnvs[0])
createEnvSet(*args.createEnvs[0], sDir=args.outDir)
if __name__=="__main__":
main2()
| [((22, 4, 22, 21), 'random.seed', 'random.seed', ({(22, 16, 22, 20): 'seed'}, {}), '(seed)', False, 'import random\n'), ((42, 4, 42, 21), 'random.seed', 'random.seed', ({(42, 16, 42, 20): 'seed'}, {}), '(seed)', False, 'import random\n'), ((44, 12, 44, 31), 'random.randint', 'random.randint', ({(44, 27, 44, 28): '0', (44, 29, 44, 30): '5'}, {}), '(0, 5)', False, 'import random\n'), ((79, 21, 85, 5), 'flatland.envs.rail_generators.sparse_rail_generator', 'sparse_rail_generator', (), '', False, 'from flatland.envs.rail_generators import sparse_rail_generator\n'), ((95, 22, 98, 21), 'flatland.envs.malfunction_generators.MalfunctionParameters', 'MalfunctionParameters', (), '', False, 'from flatland.envs.malfunction_generators import malfunction_from_params, MalfunctionParameters\n'), ((104, 26, 104, 47), 'flatland.envs.observations.GlobalObsForRailEnv', 'GlobalObsForRailEnv', ({}, {}), '()', False, 'from flatland.envs.observations import GlobalObsForRailEnv\n'), ((113, 25, 113, 75), 'flatland.envs.schedule_generators.sparse_schedule_generator', 'sparse_schedule_generator', ({(113, 51, 113, 74): 'DEFAULT_SPEED_RATIO_MAP'}, {}), '(DEFAULT_SPEED_RATIO_MAP)', False, 'from flatland.envs.schedule_generators import sparse_schedule_generator\n'), ((140, 7, 140, 29), 'os.path.exists', 'os.path.exists', ({(140, 22, 140, 28): 'sfName'}, {}), '(sfName)', False, 'import os\n'), ((144, 4, 144, 25), 'sys.stdout.write', 'sys.stdout.write', ({(144, 21, 144, 24): '"""."""'}, {}), "('.')", False, 'import sys\n'), ((145, 4, 145, 22), 'sys.stdout.flush', 'sys.stdout.flush', ({}, {}), '()', False, 'import sys\n'), ((228, 13, 228, 91), 'argparse.ArgumentParser', 'ap.ArgumentParser', (), '', True, 'import argparse as ap\n'), ((23, 17, 23, 39), 'random.randint', 'random.randint', ({(23, 32, 23, 33): '(0)', (23, 35, 23, 38): '(100)'}, {}), '(0, 100)', False, 'import random\n'), ((24, 18, 24, 40), 'random.randint', 'random.randint', ({(24, 33, 24, 34): '(0)', (24, 36, 24, 39): '(100)'}, {}), '(0, 100)', False, 'import random\n'), ((25, 20, 25, 61), 'random.randint', 'random.randint', ({(25, 35, 25, 36): '(0)', (25, 38, 25, 60): '((width + height) // 10)'}, {}), '(0, (width + height) // 10)', False, 'import random\n'), ((28, 30, 28, 50), 'random.randint', 'random.randint', ({(28, 45, 28, 46): '(0)', (28, 48, 28, 49): '(5)'}, {}), '(0, 5)', False, 'import random\n'), ((29, 28, 29, 50), 'random.randint', 'random.randint', ({(29, 43, 29, 44): '(0)', (29, 46, 29, 49): '(100)'}, {}), '(0, 100)', False, 'import random\n'), ((30, 35, 30, 55), 'random.randint', 'random.randint', ({(30, 50, 30, 51): '(0)', (30, 53, 30, 54): '(7)'}, {}), '(0, 7)', False, 'import random\n'), ((31, 36, 31, 57), 'random.randint', 'random.randint', ({(31, 51, 31, 52): '(0)', (31, 54, 31, 56): '(80)'}, {}), '(0, 80)', False, 'import random\n'), ((48, 33, 48, 52), 'random.randint', 'random.randint', ({(48, 48, 48, 49): '(0)', (48, 50, 48, 51): '(2)'}, {}), '(0, 2)', False, 'import random\n'), ((51, 30, 51, 54), 'random.randint', 'random.randint', ({(51, 45, 51, 46): '(0)', (51, 48, 51, 53): 'nSize'}, {}), '(0, nSize)', False, 'import random\n'), ((52, 28, 52, 50), 'random.randint', 'random.randint', ({(52, 43, 52, 44): '(0)', (52, 46, 52, 49): '(100)'}, {}), '(0, 100)', False, 'import random\n'), ((53, 35, 53, 55), 'random.randint', 'random.randint', ({(53, 50, 53, 51): '(0)', (53, 53, 53, 54): '(7)'}, {}), '(0, 7)', False, 'import random\n'), ((54, 36, 54, 57), 'random.randint', 'random.randint', ({(54, 51, 54, 52): '(0)', (54, 54, 54, 56): '(80)'}, {}), '(0, 80)', False, 'import random\n'), ((136, 11, 136, 31), 'os.path.exists', 'os.path.exists', ({(136, 26, 136, 30): 'sDir'}, {}), '(sDir)', False, 'import os\n'), ((137, 8, 137, 25), 'os.makedirs', 'os.makedirs', ({(137, 20, 137, 24): 'sDir'}, {}), '(sDir)', False, 'import os\n'), ((141, 8, 141, 25), 'os.remove', 'os.remove', ({(141, 18, 141, 24): 'sfName'}, {}), '(sfName)', False, 'import os\n'), ((158, 11, 158, 37), 'os.path.exists', 'os.path.exists', ({(158, 26, 158, 36): 'sDirImages'}, {}), '(sDirImages)', False, 'import os\n'), ((159, 8, 159, 31), 'os.makedirs', 'os.makedirs', ({(159, 20, 159, 30): 'sDirImages'}, {}), '(sDirImages)', False, 'import os\n'), ((164, 18, 164, 46), 'flatland.utils.rendertools.RenderTool', 'RenderTool', (), '', False, 'from flatland.utils.rendertools import RenderTool\n'), ((170, 17, 170, 39), 'PIL.Image.fromarray', 'Image.fromarray', ({(170, 33, 170, 38): 'g2img'}, {}), '(g2img)', False, 'from PIL import Image\n'), ((195, 15, 195, 33), 'msgpack.load', 'msgpack.load', ({(195, 28, 195, 32): 'fEnv'}, {}), '(fEnv)', False, 'import msgpack\n'), ((203, 8, 203, 33), 'msgpack.dump', 'msgpack.dump', ({(203, 21, 203, 26): 'oEnv2', (203, 28, 203, 32): 'fEnv'}, {}), '(oEnv2, fEnv)', False, 'import msgpack\n'), ((207, 15, 207, 51), 'msgpack.load', 'msgpack.load', (), '', False, 'import msgpack\n'), ((26, 42, 26, 64), 'random.randint', 'random.randint', ({(26, 57, 26, 58): '0', (26, 60, 26, 63): '100'}, {}), '(0, 100)', False, 'import random\n'), ((49, 39, 49, 58), 'random.randint', 'random.randint', ({(49, 54, 49, 55): '0', (49, 56, 49, 57): '5'}, {}), '(0, 5)', False, 'import random\n'), ((186, 23, 186, 37), 'json.load', 'json.load', ({(186, 33, 186, 36): 'fEp'}, {}), '(fEp)', False, 'import json\n'), ((191, 18, 191, 35), 'msgpack.load', 'msgpack.load', ({(191, 31, 191, 34): 'fEp'}, {}), '(fEp)', False, 'import msgpack\n'), ((123, 55, 123, 95), 'flatland.envs.malfunction_generators.malfunction_from_params', 'malfunction_from_params', ({(123, 79, 123, 94): 'stochastic_data'}, {}), '(stochastic_data)', False, 'from flatland.envs.malfunction_generators import malfunction_from_params, MalfunctionParameters\n')] |
Sruthi-Ganesh/postgres-django-queue | job-queue-portal/postgres_django_queue/djangoenv/lib/python3.8/site-packages/django_celery_results/migrations/0006_taskresult_date_created.py | 4ea8412c073ff8ceb0efbac48afc29456ae11346 | # -*- coding: utf-8 -*-
# Generated by Django 2.2.4 on 2019-08-21 19:53
# this file is auto-generated so don't do flake8 on it
# flake8: noqa
from __future__ import absolute_import, unicode_literals
from django.db import migrations, models
import django.utils.timezone
def copy_date_done_to_date_created(apps, schema_editor):
TaskResult = apps.get_model('django_celery_results', 'taskresult')
db_alias = schema_editor.connection.alias
TaskResult.objects.using(db_alias).all().update(
date_created=models.F('date_done')
)
def reverse_copy_date_done_to_date_created(app, schema_editor):
# the reverse of 'copy_date_done_to_date_created' is do nothing
# because the 'date_created' will be removed.
pass
class Migration(migrations.Migration):
dependencies = [
('django_celery_results', '0005_taskresult_worker'),
]
operations = [
migrations.AddField(
model_name='taskresult',
name='date_created',
field=models.DateTimeField(
auto_now_add=True,
db_index=True,
default=django.utils.timezone.now,
help_text='Datetime field when the task result was created in UTC',
verbose_name='Created DateTime'
),
preserve_default=False,
),
migrations.RunPython(copy_date_done_to_date_created,
reverse_copy_date_done_to_date_created),
]
| [((46, 8, 47, 68), 'django.db.migrations.RunPython', 'migrations.RunPython', ({(46, 29, 46, 59): 'copy_date_done_to_date_created', (47, 29, 47, 67): 'reverse_copy_date_done_to_date_created'}, {}), '(copy_date_done_to_date_created,\n reverse_copy_date_done_to_date_created)', False, 'from django.db import migrations, models\n'), ((17, 21, 17, 42), 'django.db.models.F', 'models.F', ({(17, 30, 17, 41): '"""date_done"""'}, {}), "('date_done')", False, 'from django.db import migrations, models\n'), ((37, 18, 43, 13), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n')] |
fabaff/remediar | remediar/modules/http/__init__.py | 014d7733b00cd40a45881c2729c04df5584476e7 | """Support for HTTP or web server issues."""
| [] |
AlanMorningLight/PyTorch-BayesianCNN | Image Recognition/utils/BayesianModels/Bayesian3Conv3FC.py | 5de7133f09dd10135bf605efbdd26c18f2a4df13 | import torch.nn as nn
from utils.BBBlayers import BBBConv2d, BBBLinearFactorial, FlattenLayer
class BBB3Conv3FC(nn.Module):
"""
Simple Neural Network having 3 Convolution
and 3 FC layers with Bayesian layers.
"""
def __init__(self, outputs, inputs):
super(BBB3Conv3FC, self).__init__()
self.conv1 = BBBConv2d(inputs, 32, 5, stride=1, padding=2)
self.soft1 = nn.Softplus()
self.pool1 = nn.MaxPool2d(kernel_size=3, stride=2)
self.conv2 = BBBConv2d(32, 64, 5, stride=1, padding=2)
self.soft2 = nn.Softplus()
self.pool2 = nn.MaxPool2d(kernel_size=3, stride=2)
self.conv3 = BBBConv2d(64, 128, 5, stride=1, padding=1)
self.soft3 = nn.Softplus()
self.pool3 = nn.MaxPool2d(kernel_size=3, stride=2)
self.flatten = FlattenLayer(2 * 2 * 128)
self.fc1 = BBBLinearFactorial(2 * 2 * 128, 1000)
self.soft5 = nn.Softplus()
self.fc2 = BBBLinearFactorial(1000, 1000)
self.soft6 = nn.Softplus()
self.fc3 = BBBLinearFactorial(1000, outputs)
layers = [self.conv1, self.soft1, self.pool1, self.conv2, self.soft2, self.pool2,
self.conv3, self.soft3, self.pool3, self.flatten, self.fc1, self.soft5,
self.fc2, self.soft6, self.fc3]
self.layers = nn.ModuleList(layers)
def probforward(self, x):
'Forward pass with Bayesian weights'
kl = 0
for layer in self.layers:
if hasattr(layer, 'convprobforward') and callable(layer.convprobforward):
x, _kl, = layer.convprobforward(x)
kl += _kl
elif hasattr(layer, 'fcprobforward') and callable(layer.fcprobforward):
x, _kl, = layer.fcprobforward(x)
kl += _kl
else:
x = layer(x)
logits = x
return logits, kl | [((12, 21, 12, 66), 'utils.BBBlayers.BBBConv2d', 'BBBConv2d', (), '', False, 'from utils.BBBlayers import BBBConv2d, BBBLinearFactorial, FlattenLayer\n'), ((13, 21, 13, 34), 'torch.nn.Softplus', 'nn.Softplus', ({}, {}), '()', True, 'import torch.nn as nn\n'), ((14, 21, 14, 58), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (), '', True, 'import torch.nn as nn\n'), ((16, 21, 16, 62), 'utils.BBBlayers.BBBConv2d', 'BBBConv2d', (), '', False, 'from utils.BBBlayers import BBBConv2d, BBBLinearFactorial, FlattenLayer\n'), ((17, 21, 17, 34), 'torch.nn.Softplus', 'nn.Softplus', ({}, {}), '()', True, 'import torch.nn as nn\n'), ((18, 21, 18, 58), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (), '', True, 'import torch.nn as nn\n'), ((20, 21, 20, 63), 'utils.BBBlayers.BBBConv2d', 'BBBConv2d', (), '', False, 'from utils.BBBlayers import BBBConv2d, BBBLinearFactorial, FlattenLayer\n'), ((21, 21, 21, 34), 'torch.nn.Softplus', 'nn.Softplus', ({}, {}), '()', True, 'import torch.nn as nn\n'), ((22, 21, 22, 58), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (), '', True, 'import torch.nn as nn\n'), ((24, 23, 24, 48), 'utils.BBBlayers.FlattenLayer', 'FlattenLayer', ({(24, 36, 24, 47): '2 * 2 * 128'}, {}), '(2 * 2 * 128)', False, 'from utils.BBBlayers import BBBConv2d, BBBLinearFactorial, FlattenLayer\n'), ((25, 19, 25, 56), 'utils.BBBlayers.BBBLinearFactorial', 'BBBLinearFactorial', ({(25, 38, 25, 49): '2 * 2 * 128', (25, 51, 25, 55): '1000'}, {}), '(2 * 2 * 128, 1000)', False, 'from utils.BBBlayers import BBBConv2d, BBBLinearFactorial, FlattenLayer\n'), ((26, 21, 26, 34), 'torch.nn.Softplus', 'nn.Softplus', ({}, {}), '()', True, 'import torch.nn as nn\n'), ((28, 19, 28, 49), 'utils.BBBlayers.BBBLinearFactorial', 'BBBLinearFactorial', ({(28, 38, 28, 42): '1000', (28, 44, 28, 48): '1000'}, {}), '(1000, 1000)', False, 'from utils.BBBlayers import BBBConv2d, BBBLinearFactorial, FlattenLayer\n'), ((29, 21, 29, 34), 'torch.nn.Softplus', 'nn.Softplus', ({}, {}), '()', True, 'import torch.nn as nn\n'), ((31, 19, 31, 52), 'utils.BBBlayers.BBBLinearFactorial', 'BBBLinearFactorial', ({(31, 38, 31, 42): '1000', (31, 44, 31, 51): 'outputs'}, {}), '(1000, outputs)', False, 'from utils.BBBlayers import BBBConv2d, BBBLinearFactorial, FlattenLayer\n'), ((37, 22, 37, 43), 'torch.nn.ModuleList', 'nn.ModuleList', ({(37, 36, 37, 42): 'layers'}, {}), '(layers)', True, 'import torch.nn as nn\n')] |
nphilou/influence-release | custom_scripts/load_animals.py | bcf3603705b6ff172bcb62123aef0248afa77a05 | import os
from tensorflow.contrib.learn.python.learn.datasets import base
import numpy as np
import IPython
from subprocess import call
from keras.preprocessing import image
from influence.dataset import DataSet
from influence.inception_v3 import preprocess_input
BASE_DIR = 'data' # TODO: change
def fill(X, Y, idx, label, img_path, img_side):
img = image.load_img(img_path, target_size=(img_side, img_side))
x = image.img_to_array(img)
X[idx, ...] = x
Y[idx] = label
def extract_and_rename_animals():
class_maps = [
('dog', 'n02084071'),
('cat', 'n02121808'),
('bird', 'n01503061'),
('fish', 'n02512053'),
('horse', 'n02374451'),
('monkey', 'n02484322'),
('zebra', 'n02391049'),
('panda', 'n02510455'),
('lemur', 'n02496913'),
('wombat', 'n01883070'),
]
for class_string, class_id in class_maps:
class_dir = os.path.join(BASE_DIR, class_string)
print(class_dir)
call('mkdir %s' % class_dir, shell=True)
call('tar -xf %s.tar -C %s' % (os.path.join(BASE_DIR, class_id), class_dir), shell=True)
for filename in os.listdir(class_dir):
file_idx = filename.split('_')[1].split('.')[0]
src_filename = os.path.join(class_dir, filename)
dst_filename = os.path.join(class_dir, '%s_%s.JPEG' % (class_string, file_idx))
os.rename(src_filename, dst_filename)
def load_animals(num_train_ex_per_class=300,
num_test_ex_per_class=100,
num_valid_ex_per_class=0,
classes=None,
):
num_channels = 3
img_side = 299
if num_valid_ex_per_class == 0:
valid_str = ''
else:
valid_str = '_valid-%s' % num_valid_examples
if classes is None:
classes = ['dog', 'cat', 'bird', 'fish', 'horse', 'monkey', 'zebra', 'panda', 'lemur', 'wombat']
data_filename = os.path.join(BASE_DIR, 'dataset_train-%s_test-%s%s.npz' % (num_train_ex_per_class, num_test_ex_per_class, valid_str))
else:
data_filename = os.path.join(BASE_DIR, 'dataset_%s_train-%s_test-%s%s.npz' % ('-'.join(classes), num_train_ex_per_class, num_test_ex_per_class, valid_str))
num_classes = len(classes)
num_train_examples = num_train_ex_per_class * num_classes
num_test_examples = num_test_ex_per_class * num_classes
num_valid_examples = num_valid_ex_per_class * num_classes
if os.path.exists(data_filename):
print('Loading animals from disk...')
f = np.load(data_filename)
X_train = f['X_train']
X_test = f['X_test']
Y_train = f['Y_train']
Y_test = f['Y_test']
if 'X_valid' in f:
X_valid = f['X_valid']
else:
X_valid = None
if 'Y_valid' in f:
Y_valid = f['Y_valid']
else:
Y_valid = None
else:
print('Reading animals from raw images...')
X_train = np.zeros([num_train_examples, img_side, img_side, num_channels])
X_test = np.zeros([num_test_examples, img_side, img_side, num_channels])
# X_valid = np.zeros([num_valid_examples, img_side, img_side, num_channels])
X_valid = None
Y_train = np.zeros([num_train_examples])
Y_test = np.zeros([num_test_examples])
# Y_valid = np.zeros([num_valid_examples])
Y_valid = None
for class_idx, class_string in enumerate(classes):
print('class: %s' % class_string)
# For some reason, a lot of numbers are skipped.
i = 0
num_filled = 0
while num_filled < num_train_ex_per_class:
img_path = os.path.join(BASE_DIR, '%s/%s_%s.JPEG' % (class_string, class_string, i))
print(img_path)
if os.path.exists(img_path):
fill(X_train, Y_train, num_filled + (num_train_ex_per_class * class_idx), class_idx, img_path, img_side)
num_filled += 1
print(num_filled)
i += 1
num_filled = 0
while num_filled < num_test_ex_per_class:
img_path = os.path.join(BASE_DIR, '%s/%s_%s.JPEG' % (class_string, class_string, i))
if os.path.exists(img_path):
fill(X_test, Y_test, num_filled + (num_test_ex_per_class * class_idx), class_idx, img_path, img_side)
num_filled += 1
print(num_filled)
i += 1
num_filled = 0
while num_filled < num_valid_ex_per_class:
img_path = os.path.join(BASE_DIR, '%s/%s_%s.JPEG' % (class_string, class_string, i))
if os.path.exists(img_path):
fill(X_valid, Y_valid, num_filled + (num_valid_ex_per_class * class_idx), class_idx, img_path, img_side)
num_filled += 1
print(num_filled)
i += 1
X_train = preprocess_input(X_train)
X_test = preprocess_input(X_test)
X_valid = preprocess_input(X_valid)
np.random.seed(0)
permutation_idx = np.arange(num_train_examples)
np.random.shuffle(permutation_idx)
X_train = X_train[permutation_idx, :]
Y_train = Y_train[permutation_idx]
permutation_idx = np.arange(num_test_examples)
np.random.shuffle(permutation_idx)
X_test = X_test[permutation_idx, :]
Y_test = Y_test[permutation_idx]
permutation_idx = np.arange(num_valid_examples)
np.random.shuffle(permutation_idx)
X_valid = X_valid[permutation_idx, :]
Y_valid = Y_valid[permutation_idx]
np.savez_compressed(data_filename, X_train=X_train, Y_train=Y_train, X_test=X_test, Y_test=Y_test, X_valid=X_valid, Y_valid=Y_valid)
train = DataSet(X_train, Y_train)
if (X_valid is not None) and (Y_valid is not None):
# validation = DataSet(X_valid, Y_valid)
validation = None
else:
validation = None
test = DataSet(X_test, Y_test)
return base.Datasets(train=train, validation=validation, test=test)
def load_koda():
num_channels = 3
img_side = 299
data_filename = os.path.join(BASE_DIR, 'dataset_koda.npz')
if os.path.exists(data_filename):
print('Loading Koda from disk...')
f = np.load(data_filename)
X = f['X']
Y = f['Y']
else:
# Returns all class 0
print('Reading Koda from raw images...')
image_files = [image_file for image_file in os.listdir(os.path.join(BASE_DIR, 'koda')) if (image_file.endswith('.jpg'))]
# Hack to get the image files in the right order
# image_files = [image_file for image_file in os.listdir(os.path.join(BASE_DIR, 'koda')) if (image_file.endswith('.jpg') and not image_file.startswith('124'))]
# image_files += [image_file for image_file in os.listdir(os.path.join(BASE_DIR, 'koda')) if (image_file.endswith('.jpg') and image_file.startswith('124'))]
num_examples = len(image_files)
X = np.zeros([num_examples, img_side, img_side, num_channels])
Y = np.zeros([num_examples])
class_idx = 0
for counter, image_file in enumerate(image_files):
img_path = os.path.join(BASE_DIR, 'koda', image_file)
fill(X, Y, counter, class_idx, img_path, img_side)
X = preprocess_input(X)
np.savez(data_filename, X=X, Y=Y)
return X, Y
def load_dogfish_with_koda():
classes = ['dog', 'fish']
X_test, Y_test = load_koda()
data_sets = load_animals(num_train_ex_per_class=900,
num_test_ex_per_class=300,
num_valid_ex_per_class=0,
classes=classes)
train = data_sets.train
validation = data_sets.validation
test = DataSet(X_test, Y_test)
return base.Datasets(train=train, validation=validation, test=test)
def load_dogfish_with_orig_and_koda():
classes = ['dog', 'fish']
X_test, Y_test = load_koda()
X_test = np.reshape(X_test, (X_test.shape[0], -1))
data_sets = load_animals(num_train_ex_per_class=900,
num_test_ex_per_class=300,
num_valid_ex_per_class=0,
classes=classes)
train = data_sets.train
validation = data_sets.validation
test = DataSet(
np.concatenate((data_sets.test.x, X_test), axis=0),
np.concatenate((data_sets.test.labels, Y_test), axis=0))
return base.Datasets(train=train, validation=validation, test=test)
| [((17, 10, 17, 68), 'keras.preprocessing.image.load_img', 'image.load_img', (), '', False, 'from keras.preprocessing import image\n'), ((18, 8, 18, 31), 'keras.preprocessing.image.img_to_array', 'image.img_to_array', ({(18, 27, 18, 30): 'img'}, {}), '(img)', False, 'from keras.preprocessing import image\n'), ((79, 7, 79, 36), 'os.path.exists', 'os.path.exists', ({(79, 22, 79, 35): 'data_filename'}, {}), '(data_filename)', False, 'import os\n'), ((162, 12, 162, 37), 'influence.dataset.DataSet', 'DataSet', ({(162, 20, 162, 27): 'X_train', (162, 29, 162, 36): 'Y_train'}, {}), '(X_train, Y_train)', False, 'from influence.dataset import DataSet\n'), ((169, 11, 169, 34), 'influence.dataset.DataSet', 'DataSet', ({(169, 19, 169, 25): 'X_test', (169, 27, 169, 33): 'Y_test'}, {}), '(X_test, Y_test)', False, 'from influence.dataset import DataSet\n'), ((171, 11, 171, 71), 'tensorflow.contrib.learn.python.learn.datasets.base.Datasets', 'base.Datasets', (), '', False, 'from tensorflow.contrib.learn.python.learn.datasets import base\n'), ((178, 20, 178, 62), 'os.path.join', 'os.path.join', ({(178, 33, 178, 41): 'BASE_DIR', (178, 43, 178, 61): '"""dataset_koda.npz"""'}, {}), "(BASE_DIR, 'dataset_koda.npz')", False, 'import os\n'), ((180, 7, 180, 36), 'os.path.exists', 'os.path.exists', ({(180, 22, 180, 35): 'data_filename'}, {}), '(data_filename)', False, 'import os\n'), ((221, 11, 221, 34), 'influence.dataset.DataSet', 'DataSet', ({(221, 19, 221, 25): 'X_test', (221, 27, 221, 33): 'Y_test'}, {}), '(X_test, Y_test)', False, 'from influence.dataset import DataSet\n'), ((223, 11, 223, 71), 'tensorflow.contrib.learn.python.learn.datasets.base.Datasets', 'base.Datasets', (), '', False, 'from tensorflow.contrib.learn.python.learn.datasets import base\n'), ((229, 13, 229, 54), 'numpy.reshape', 'np.reshape', ({(229, 24, 229, 30): 'X_test', (229, 32, 229, 53): '(X_test.shape[0], -1)'}, {}), '(X_test, (X_test.shape[0], -1))', True, 'import numpy as np\n'), ((242, 11, 242, 71), 'tensorflow.contrib.learn.python.learn.datasets.base.Datasets', 'base.Datasets', (), '', False, 'from tensorflow.contrib.learn.python.learn.datasets import base\n'), ((40, 20, 40, 56), 'os.path.join', 'os.path.join', ({(40, 33, 40, 41): 'BASE_DIR', (40, 43, 40, 55): 'class_string'}, {}), '(BASE_DIR, class_string)', False, 'import os\n'), ((42, 8, 42, 48), 'subprocess.call', 'call', (), '', False, 'from subprocess import call\n'), ((45, 24, 45, 45), 'os.listdir', 'os.listdir', ({(45, 35, 45, 44): 'class_dir'}, {}), '(class_dir)', False, 'import os\n'), ((70, 24, 70, 141), 'os.path.join', 'os.path.join', ({(70, 37, 70, 45): 'BASE_DIR', (70, 47, 70, 140): "'dataset_train-%s_test-%s%s.npz' % (num_train_ex_per_class,\n num_test_ex_per_class, valid_str)"}, {}), "(BASE_DIR, 'dataset_train-%s_test-%s%s.npz' % (\n num_train_ex_per_class, num_test_ex_per_class, valid_str))", False, 'import os\n'), ((81, 12, 81, 34), 'numpy.load', 'np.load', ({(81, 20, 81, 33): 'data_filename'}, {}), '(data_filename)', True, 'import numpy as np\n'), ((99, 18, 99, 82), 'numpy.zeros', 'np.zeros', ({(99, 27, 99, 81): '[num_train_examples, img_side, img_side, num_channels]'}, {}), '([num_train_examples, img_side, img_side, num_channels])', True, 'import numpy as np\n'), ((100, 17, 100, 80), 'numpy.zeros', 'np.zeros', ({(100, 26, 100, 79): '[num_test_examples, img_side, img_side, num_channels]'}, {}), '([num_test_examples, img_side, img_side, num_channels])', True, 'import numpy as np\n'), ((104, 18, 104, 48), 'numpy.zeros', 'np.zeros', ({(104, 27, 104, 47): '[num_train_examples]'}, {}), '([num_train_examples])', True, 'import numpy as np\n'), ((105, 17, 105, 46), 'numpy.zeros', 'np.zeros', ({(105, 26, 105, 45): '[num_test_examples]'}, {}), '([num_test_examples])', True, 'import numpy as np\n'), ((141, 18, 141, 43), 'influence.inception_v3.preprocess_input', 'preprocess_input', ({(141, 35, 141, 42): 'X_train'}, {}), '(X_train)', False, 'from influence.inception_v3 import preprocess_input\n'), ((142, 17, 142, 41), 'influence.inception_v3.preprocess_input', 'preprocess_input', ({(142, 34, 142, 40): 'X_test'}, {}), '(X_test)', False, 'from influence.inception_v3 import preprocess_input\n'), ((143, 18, 143, 43), 'influence.inception_v3.preprocess_input', 'preprocess_input', ({(143, 35, 143, 42): 'X_valid'}, {}), '(X_valid)', False, 'from influence.inception_v3 import preprocess_input\n'), ((145, 8, 145, 25), 'numpy.random.seed', 'np.random.seed', ({(145, 23, 145, 24): '(0)'}, {}), '(0)', True, 'import numpy as np\n'), ((146, 26, 146, 55), 'numpy.arange', 'np.arange', ({(146, 36, 146, 54): 'num_train_examples'}, {}), '(num_train_examples)', True, 'import numpy as np\n'), ((147, 8, 147, 42), 'numpy.random.shuffle', 'np.random.shuffle', ({(147, 26, 147, 41): 'permutation_idx'}, {}), '(permutation_idx)', True, 'import numpy as np\n'), ((150, 26, 150, 54), 'numpy.arange', 'np.arange', ({(150, 36, 150, 53): 'num_test_examples'}, {}), '(num_test_examples)', True, 'import numpy as np\n'), ((151, 8, 151, 42), 'numpy.random.shuffle', 'np.random.shuffle', ({(151, 26, 151, 41): 'permutation_idx'}, {}), '(permutation_idx)', True, 'import numpy as np\n'), ((154, 26, 154, 55), 'numpy.arange', 'np.arange', ({(154, 36, 154, 54): 'num_valid_examples'}, {}), '(num_valid_examples)', True, 'import numpy as np\n'), ((155, 8, 155, 42), 'numpy.random.shuffle', 'np.random.shuffle', ({(155, 26, 155, 41): 'permutation_idx'}, {}), '(permutation_idx)', True, 'import numpy as np\n'), ((159, 8, 159, 140), 'numpy.savez_compressed', 'np.savez_compressed', (), '', True, 'import numpy as np\n'), ((182, 12, 182, 34), 'numpy.load', 'np.load', ({(182, 20, 182, 33): 'data_filename'}, {}), '(data_filename)', True, 'import numpy as np\n'), ((196, 12, 196, 70), 'numpy.zeros', 'np.zeros', ({(196, 21, 196, 69): '[num_examples, img_side, img_side, num_channels]'}, {}), '([num_examples, img_side, img_side, num_channels])', True, 'import numpy as np\n'), ((197, 12, 197, 36), 'numpy.zeros', 'np.zeros', ({(197, 21, 197, 35): '[num_examples]'}, {}), '([num_examples])', True, 'import numpy as np\n'), ((204, 12, 204, 31), 'influence.inception_v3.preprocess_input', 'preprocess_input', ({(204, 29, 204, 30): 'X'}, {}), '(X)', False, 'from influence.inception_v3 import preprocess_input\n'), ((206, 8, 206, 41), 'numpy.savez', 'np.savez', (), '', True, 'import numpy as np\n'), ((239, 8, 239, 58), 'numpy.concatenate', 'np.concatenate', (), '', True, 'import numpy as np\n'), ((240, 8, 240, 63), 'numpy.concatenate', 'np.concatenate', (), '', True, 'import numpy as np\n'), ((48, 27, 48, 60), 'os.path.join', 'os.path.join', ({(48, 40, 48, 49): 'class_dir', (48, 51, 48, 59): 'filename'}, {}), '(class_dir, filename)', False, 'import os\n'), ((49, 27, 49, 91), 'os.path.join', 'os.path.join', ({(49, 40, 49, 49): 'class_dir', (49, 51, 49, 90): "'%s_%s.JPEG' % (class_string, file_idx)"}, {}), "(class_dir, '%s_%s.JPEG' % (class_string, file_idx))", False, 'import os\n'), ((50, 12, 50, 49), 'os.rename', 'os.rename', ({(50, 22, 50, 34): 'src_filename', (50, 36, 50, 48): 'dst_filename'}, {}), '(src_filename, dst_filename)', False, 'import os\n'), ((201, 23, 201, 65), 'os.path.join', 'os.path.join', ({(201, 36, 201, 44): 'BASE_DIR', (201, 46, 201, 52): '"""koda"""', (201, 54, 201, 64): 'image_file'}, {}), "(BASE_DIR, 'koda', image_file)", False, 'import os\n'), ((115, 27, 115, 100), 'os.path.join', 'os.path.join', ({(115, 40, 115, 48): 'BASE_DIR', (115, 50, 115, 99): "'%s/%s_%s.JPEG' % (class_string, class_string, i)"}, {}), "(BASE_DIR, '%s/%s_%s.JPEG' % (class_string, class_string, i))", False, 'import os\n'), ((117, 19, 117, 43), 'os.path.exists', 'os.path.exists', ({(117, 34, 117, 42): 'img_path'}, {}), '(img_path)', False, 'import os\n'), ((125, 27, 125, 100), 'os.path.join', 'os.path.join', ({(125, 40, 125, 48): 'BASE_DIR', (125, 50, 125, 99): "'%s/%s_%s.JPEG' % (class_string, class_string, i)"}, {}), "(BASE_DIR, '%s/%s_%s.JPEG' % (class_string, class_string, i))", False, 'import os\n'), ((126, 19, 126, 43), 'os.path.exists', 'os.path.exists', ({(126, 34, 126, 42): 'img_path'}, {}), '(img_path)', False, 'import os\n'), ((134, 27, 134, 100), 'os.path.join', 'os.path.join', ({(134, 40, 134, 48): 'BASE_DIR', (134, 50, 134, 99): "'%s/%s_%s.JPEG' % (class_string, class_string, i)"}, {}), "(BASE_DIR, '%s/%s_%s.JPEG' % (class_string, class_string, i))", False, 'import os\n'), ((135, 19, 135, 43), 'os.path.exists', 'os.path.exists', ({(135, 34, 135, 42): 'img_path'}, {}), '(img_path)', False, 'import os\n'), ((43, 39, 43, 71), 'os.path.join', 'os.path.join', ({(43, 52, 43, 60): 'BASE_DIR', (43, 62, 43, 70): 'class_id'}, {}), '(BASE_DIR, class_id)', False, 'import os\n'), ((189, 63, 189, 93), 'os.path.join', 'os.path.join', ({(189, 76, 189, 84): 'BASE_DIR', (189, 86, 189, 92): '"""koda"""'}, {}), "(BASE_DIR, 'koda')", False, 'import os\n')] |
carstenblank/qiskit-aws-braket-provider | src/qiskit_aws_braket_provider/awsbackend.py | 539f0c75c2ccf1f6e5e981b92ea74f497fcba237 | # Copyright 2020 Carsten Blank
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
from datetime import datetime, timedelta
from braket.device_schema.device_service_properties_v1 import DeviceCost
from typing import List, Dict, Optional, Any, Union, Tuple
from botocore.response import StreamingBody
from braket.aws import AwsDevice, AwsQuantumTask, AwsSession
from braket.circuits import Circuit
from braket.device_schema import DeviceCapabilities
from braket.device_schema.ionq import IonqDeviceCapabilities
from braket.device_schema.rigetti import RigettiDeviceCapabilities
from braket.device_schema.simulators import GateModelSimulatorDeviceCapabilities
from qiskit.providers import BaseBackend, JobStatus
from qiskit.providers.models import QasmBackendConfiguration, BackendProperties, BackendStatus
from qiskit.qobj import QasmQobj
from . import awsjob
from . import awsprovider
from .conversions_configuration import aws_device_2_configuration
from .conversions_properties import aws_ionq_to_properties, aws_rigetti_to_properties, aws_simulator_to_properties
from .transpilation import convert_qasm_qobj
logger = logging.getLogger(__name__)
class AWSBackend(BaseBackend):
_aws_device: AwsDevice
_configuration: QasmBackendConfiguration
_provider: 'awsprovider.AWSProvider'
def __init__(self, aws_device: AwsDevice, provider: 'awsprovider.AWSProvider' = None):
super().__init__(aws_device_2_configuration(aws_device), provider)
self._aws_device = aws_device
self._run = aws_device.run
def properties(self) -> BackendProperties:
properties: DeviceCapabilities = self._aws_device.properties
if isinstance(properties, IonqDeviceCapabilities):
return aws_ionq_to_properties(properties, self._configuration)
if isinstance(properties, RigettiDeviceCapabilities):
return aws_rigetti_to_properties(properties, self._configuration)
if isinstance(properties, GateModelSimulatorDeviceCapabilities):
return aws_simulator_to_properties(properties, self._configuration)
def status(self) -> BackendStatus:
# now = datetime.now()
# windows = self._aws_device.properties.service.executionWindows
# is_in_execution_window = windows.
status: str = self._aws_device.status
backend_status: BackendStatus = BackendStatus(
backend_name=self.name(),
backend_version=self.version(),
operational=False,
pending_jobs=0, # TODO
status_msg=status
)
if status == 'ONLINE':
backend_status.operational = True
elif status == 'OFFLINE':
backend_status.operational = False
else:
backend_status.operational = False
return backend_status
def _get_job_data_s3_folder(self, job_id):
return f"results-{self.name()}-{job_id}"
@staticmethod
def _exists_file(s3_client, s3_bucket: str, file: str):
result: dict = s3_client.list_objects_v2(
Bucket=s3_bucket,
Prefix=file
)
# TODO: error handling
return result['KeyCount'] != 0
def _save_job_task_arns(self, job_id: str, task_arns: List[str],
s3_bucket: Optional[str] = None) -> AwsSession.S3DestinationFolder:
used_s3_bucket = s3_bucket or self._provider.get_default_bucket()
s3_client = self._provider.get_s3_client()
file = f'{self._get_job_data_s3_folder(job_id=job_id)}/task_arns.json'
if AWSBackend._exists_file(s3_client, used_s3_bucket, file):
raise ValueError(f"An object '{file}' does already exist in the bucket {used_s3_bucket}")
result = s3_client.put_object(Body=json.dumps(task_arns).encode(), Bucket=used_s3_bucket, Key=file)
# TODO: error handling
return used_s3_bucket, self._get_job_data_s3_folder(job_id=job_id)
def _delete_job_task_arns(self, job_id: str, s3_bucket: Optional[str] = None):
used_s3_bucket = s3_bucket or self._provider.get_default_bucket()
s3_client = self._provider.get_s3_client()
file = f'{self._get_job_data_s3_folder(job_id=job_id)}/task_arns.json'
if not AWSBackend._exists_file(s3_client, used_s3_bucket, file):
raise ValueError(f"An object '{file}' does not exist in the bucket {used_s3_bucket}")
result: dict = s3_client.delete_object(Bucket=used_s3_bucket, Key=file)
# TODO: error handling
def _load_job_task_arns(self, job_id: str, s3_bucket: Optional[str] = None) -> List[str]:
used_s3_bucket = s3_bucket or self._provider.get_default_bucket()
s3_client = self._provider.get_s3_client()
file = f'{self._get_job_data_s3_folder(job_id=job_id)}/task_arns.json'
if not AWSBackend._exists_file(s3_client, used_s3_bucket, file):
raise ValueError(f"An object '{file}' does not exist in the bucket {used_s3_bucket}")
result: dict = s3_client.get_object(Bucket=used_s3_bucket, Key=file)
# TODO: error handling
streaming_body: StreamingBody = result['Body']
data: bytes = streaming_body.read()
task_arns = json.loads(data.decode())
return task_arns
def _save_job_data_s3(self, qobj: QasmQobj, s3_bucket: Optional[str] = None,
extra_data: Optional[dict] = None) -> AwsSession.S3DestinationFolder:
used_s3_bucket: str = s3_bucket or self._provider.get_default_bucket()
s3_client = self._provider.get_s3_client()
file = f'{self._get_job_data_s3_folder(job_id=qobj.qobj_id)}/qiskit_qobj_data.json'
if AWSBackend._exists_file(s3_client, used_s3_bucket, file):
raise ValueError(f"An object '{file}' already exists at the bucket {used_s3_bucket}")
body = {
'qobj_id': qobj.qobj_id,
'qobj': qobj.to_dict()
}
if extra_data:
body['extra_data'] = extra_data
result = s3_client.put_object(Body=json.dumps(body).encode(), Bucket=used_s3_bucket, Key=file)
# TODO: error handling
return used_s3_bucket, self._get_job_data_s3_folder(job_id=qobj.qobj_id)
def _delete_job_data_s3(self, job_id: str, s3_bucket: Optional[str] = None):
used_s3_bucket = s3_bucket or self._provider.get_default_bucket()
s3_client = self._provider.get_s3_client()
file = f'{self._get_job_data_s3_folder(job_id=job_id)}/qiskit_qobj_data.json'
if not AWSBackend._exists_file(s3_client, used_s3_bucket, file):
raise ValueError(f"An object '{file}' does not exist in the bucket {used_s3_bucket}")
result: dict = s3_client.delete_object(Bucket=used_s3_bucket, Key=file)
# TODO: error handling
def _load_job_data_s3(self, job_id: str, s3_bucket: Optional[str] = None) -> Tuple[QasmQobj, dict]:
used_s3_bucket = s3_bucket or self._provider.get_default_bucket()
s3_client = self._provider.get_s3_client()
file = f'{self._get_job_data_s3_folder(job_id=job_id)}/qiskit_qobj_data.json'
if not AWSBackend._exists_file(s3_client, used_s3_bucket, file):
raise ValueError(f"An object '{file}' does not exist in the bucket {used_s3_bucket}")
result: dict = s3_client.get_object(Bucket=used_s3_bucket, Key=file)
# TODO: error handling
streaming_body: StreamingBody = result['Body']
data: bytes = streaming_body.read()
stored_experiment_data = json.loads(data.decode())
assert 'qobj' in stored_experiment_data
qobj_raw = stored_experiment_data['qobj']
qobj = QasmQobj.from_dict(qobj_raw)
extra_data = stored_experiment_data.get('extra_data', {})
return qobj, extra_data
def _create_task(self, job_id: str, qc: Circuit, shots: int, s3_bucket: Optional[str] = None) -> AwsQuantumTask:
used_s3_bucket: str = s3_bucket or self._provider.get_default_bucket()
task: AwsQuantumTask = self._aws_device.run(
task_specification=qc,
s3_destination_folder=(used_s3_bucket, self._get_job_data_s3_folder(job_id)),
shots=shots
)
return task
def jobs(
self,
limit: int = 10,
skip: int = 0,
status: Optional[Union[JobStatus, str, List[Union[JobStatus, str]]]] = None,
job_name: Optional[str] = None,
start_datetime: Optional[datetime] = None,
end_datetime: Optional[datetime] = None,
job_tags: Optional[List[str]] = None,
job_tags_operator: Optional[str] = "OR",
descending: bool = True,
db_filter: Optional[Dict[str, Any]] = None
) -> List['awsjob.AWSJob']:
# TODO: use job tags as meta data on s3, else use the method of active_jobs
pass
def active_jobs(self, limit: int = 10) -> List['awsjob.AWSJob']:
client = self._provider._aws_session.braket_client
task_arns = []
nextToken = 'init'
while nextToken is not None:
result: dict = client.search_quantum_tasks(
filters=[{
'name': self.name(),
'operator': 'EQUAL',
'values': ['CREATED', 'QUEUED', 'RUNNING']
}
],
maxResults=limit,
nextToken=None if nextToken == 'init' or nextToken is None else nextToken
)
# TODO: build all task_arns, query s3 for all keys with task_arns.json, see to which task a job associated, load the jobs via job_id
pass
def retrieve_job(self, job_id: str, s3_bucket: Optional[str] = None) -> 'awsjob.AWSJob':
qobj, extra_data = self._load_job_data_s3(job_id=job_id, s3_bucket=s3_bucket)
arns = self._load_job_task_arns(job_id=job_id, s3_bucket=s3_bucket)
tasks = [AwsQuantumTask(arn=arn) for arn in arns]
job = awsjob.AWSJob(
job_id=job_id,
qobj=qobj,
tasks=tasks,
extra_data=extra_data,
s3_bucket=s3_bucket,
backend=self
)
return job
def estimate_costs(self, qobj: QasmQobj) -> Optional[float]:
shots = qobj.config.shots
no_experiments = len(qobj.experiments)
cost: DeviceCost = self._aws_device.properties.service.deviceCost
if cost.unit == 'shot':
return shots * no_experiments * cost.price
elif cost.unit == 'hour':
time_per_experiment = timedelta(seconds=10) # TODO: make this a better estimate: depends on no_qubits and depth
total_time = shots * no_experiments * time_per_experiment
return total_time.total_seconds() / 60 / 60 * cost.price
else:
return None
def run(self, qobj: QasmQobj, s3_bucket: Optional[str] = None, extra_data: Optional[dict] = None):
# If we get here, then we can continue with running, else ValueError!
circuits: List[Circuit] = list(convert_qasm_qobj(qobj))
shots = qobj.config.shots
tasks: List[AwsQuantumTask] = []
try:
s3_location: AwsSession.S3DestinationFolder = self._save_job_data_s3(qobj, s3_bucket=s3_bucket, extra_data=extra_data)
for circuit in circuits:
task = self._aws_device.run(
task_specification=circuit,
s3_destination_folder=s3_location,
shots=shots
)
tasks.append(task)
task_arns = [t.id for t in tasks]
self._save_job_task_arns(job_id=qobj.qobj_id, task_arns=task_arns, s3_bucket=s3_location[0])
except Exception as ex:
logger.error(f'During creation of tasks an error occurred: {ex}')
logger.error(f'Cancelling all tasks {len(tasks)}!')
for task in tasks:
logger.error(f'Attempt to cancel {task.id}...')
task.cancel()
logger.error(f'State of {task.id}: {task.state()}.')
self._delete_job_task_arns(qobj.qobj_id, s3_bucket=s3_bucket)
self._delete_job_data_s3(qobj.qobj_id, s3_bucket=s3_bucket)
raise ex
job = awsjob.AWSJob(
job_id=qobj.qobj_id,
qobj=qobj,
tasks=tasks,
extra_data=extra_data,
s3_bucket=s3_location[0],
backend=self
)
return job
| [((38, 9, 38, 36), 'logging.getLogger', 'logging.getLogger', ({(38, 27, 38, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((172, 15, 172, 43), 'qiskit.qobj.QasmQobj.from_dict', 'QasmQobj.from_dict', ({(172, 34, 172, 42): 'qobj_raw'}, {}), '(qobj_raw)', False, 'from qiskit.qobj import QasmQobj\n'), ((223, 17, 223, 40), 'braket.aws.AwsQuantumTask', 'AwsQuantumTask', (), '', False, 'from braket.aws import AwsDevice, AwsQuantumTask, AwsSession\n'), ((242, 34, 242, 55), 'datetime.timedelta', 'timedelta', (), '', False, 'from datetime import datetime, timedelta\n'), ((101, 43, 101, 64), 'json.dumps', 'json.dumps', ({(101, 54, 101, 63): 'task_arns'}, {}), '(task_arns)', False, 'import json\n'), ((144, 43, 144, 59), 'json.dumps', 'json.dumps', ({(144, 54, 144, 58): 'body'}, {}), '(body)', False, 'import json\n')] |
thirtywang/OpenPNM | test/unit/Algorithms/GenericLinearTransportTest.py | e55ee7ae69a8be3e2b0e6bf24c9ff92b6d24e16a | import OpenPNM
import numpy as np
import OpenPNM.Physics.models as pm
class GenericLinearTransportTest:
def setup_class(self):
self.net = OpenPNM.Network.Cubic(shape=[5, 5, 5])
self.phase = OpenPNM.Phases.GenericPhase(network=self.net)
Ps = self.net.Ps
Ts = self.net.Ts
self.phys = OpenPNM.Physics.GenericPhysics(network=self.net,
phase=self.phase,
pores=Ps, throats=Ts)
self.phys['throat.cond'] = 5e-8
self.alg = OpenPNM.Algorithms.GenericLinearTransport(network=self.net,
phase=self.phase)
def test_set_BC_modes_pores(self):
BC1_pores = np.arange(25, 35)
self.alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.8,
pores=BC1_pores)
ptest = self.alg.pores('pore.Dirichlet')
assert np.all(ptest == BC1_pores)
BC2_pores = np.arange(43, 50)
self.alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.8,
pores=BC2_pores,
mode='merge')
ptest = self.alg.pores('pore.Dirichlet')
assert np.all(ptest == np.concatenate((BC1_pores, BC2_pores)))
BC3_pores = np.arange(4, 9)
self.alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.8,
pores=BC3_pores,
mode='overwrite')
ptest = self.alg.pores('pore.Dirichlet')
assert np.all(ptest == BC3_pores)
BC4_pores = [11, 90]
self.alg.set_boundary_conditions(bctype='Neumann',
bcvalue=0.5,
pores=BC4_pores,
mode='overwrite')
ptest = self.alg.pores('pore.Neumann')
assert np.all(ptest == BC4_pores)
self.alg.set_boundary_conditions(bctype='Dirichlet',
pores=BC1_pores,
bcvalue=0.3)
ptest = self.alg.pores('pore.Dirichlet')
self.alg.set_boundary_conditions(bctype='Dirichlet',
pores=self.alg.Ps,
mode='remove')
Dp = np.sum(self.alg['pore.Dirichlet'])
assert Dp == 0
self.alg.set_boundary_conditions(bctype='Neumann',
mode='remove')
label = 'pore.Neumann'
assert (label not in self.alg.labels())
def test_set_BC_modes_throats(self):
BC1_throats = np.arange(25, 35)
self.alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.8,
throats=BC1_throats)
t_test = self.alg.throats('throat.Dirichlet')
assert np.all(t_test == BC1_throats)
BC2_throats = np.arange(43, 50)
self.alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.8,
throats=BC2_throats,
mode='merge')
t_test = self.alg.throats('throat.Dirichlet')
assert np.all(t_test == np.concatenate((BC1_throats, BC2_throats)))
BC3_throats = np.arange(4, 9)
self.alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.8,
throats=BC3_throats,
mode='overwrite')
t_test = self.alg.throats('throat.Dirichlet')
assert np.all(t_test == BC3_throats)
BC4_throats = [11, 90]
self.alg.set_boundary_conditions(bctype='Neumann',
bcvalue=0.5,
throats=BC4_throats,
mode='overwrite')
t_test = self.alg.throats('throat.Neumann')
assert np.all(t_test == BC4_throats)
self.alg.set_boundary_conditions(bctype='Dirichlet',
throats=BC1_throats,
bcvalue=0.3)
t_test = self.alg.throats('throat.Dirichlet')
self.alg.set_boundary_conditions(bctype='Dirichlet',
throats=self.alg.Ts,
mode='remove')
Dp = np.sum(self.alg['throat.Dirichlet'])
assert Dp == 0
self.alg.set_boundary_conditions(bctype='Neumann',
mode='remove')
label = 'throat.Neumann'
assert (label not in self.alg.labels())
def test_set_BC_modes_with_boolean_masks_pores(self):
BC1_pores = np.zeros(self.alg.Np, dtype='bool')
BC1_pores[np.arange(25, 35)] = True
self.alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.8,
pores=BC1_pores)
ptest = self.alg.pores('pore.Dirichlet')
assert np.all(ptest == self.alg._parse_locations(BC1_pores))
BC2_pores = np.zeros(self.alg.Np, dtype='bool')
BC2_pores[np.arange(43, 50)] = True
self.alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.8,
pores=BC2_pores,
mode='merge')
ptest = self.alg.pores('pore.Dirichlet')
B1 = self.alg._parse_locations(BC1_pores)
B2 = self.alg._parse_locations(BC2_pores)
assert np.all(ptest == np.concatenate((B1, B2)))
BC3_pores = np.zeros(self.alg.Np, dtype='bool')
BC3_pores[np.arange(4, 9)] = True
self.alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.8,
pores=BC3_pores,
mode='overwrite')
ptest = self.alg.pores('pore.Dirichlet')
assert np.all(ptest == self.alg._parse_locations(BC3_pores))
BC4_pores = np.zeros(self.alg.Np, dtype='bool')
BC4_pores[[11, 90]] = True
self.alg.set_boundary_conditions(bctype='Neumann',
bcvalue=0.5,
pores=BC4_pores,
mode='overwrite')
ptest = self.alg.pores('pore.Neumann')
assert np.all(ptest == self.alg._parse_locations(BC4_pores))
self.alg.set_boundary_conditions(bctype='Dirichlet',
pores=BC1_pores,
bcvalue=0.3)
ptest = self.alg.pores('pore.Dirichlet')
removed_p = self.alg._parse_locations(self.alg.Ps)
self.alg.set_boundary_conditions(bctype='Dirichlet',
pores=removed_p,
mode='remove')
Dp = np.sum(self.alg['pore.Dirichlet'])
assert Dp == 0
self.alg.set_boundary_conditions(bctype='Neumann',
mode='remove')
label = 'pore.Neumann'
assert (label not in self.alg.labels())
def test_set_BC_modes_with_boolean_masks_throats(self):
BC1_throats = np.zeros(self.alg.Nt, dtype='bool')
BC1_throats[np.arange(25, 35)] = True
self.alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.8,
throats=BC1_throats)
t_test = self.alg.throats('throat.Dirichlet')
assert np.all(t_test == self.alg._parse_locations(BC1_throats))
BC2_throats = np.zeros(self.alg.Nt, dtype='bool')
BC2_throats[np.arange(43, 50)] = True
self.alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.8,
throats=BC2_throats,
mode='merge')
t_test = self.alg.throats('throat.Dirichlet')
B1 = self.alg._parse_locations(BC1_throats)
B2 = self.alg._parse_locations(BC2_throats)
assert np.all(t_test == np.concatenate((B1, B2)))
BC3_throats = np.zeros(self.alg.Nt, dtype='bool')
BC3_throats[np.arange(4, 9)] = True
self.alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.8,
throats=BC3_throats,
mode='overwrite')
t_test = self.alg.throats('throat.Dirichlet')
assert np.all(t_test == self.alg._parse_locations(BC3_throats))
BC4_throats = np.zeros(self.alg.Nt, dtype='bool')
BC4_throats[[11, 90]] = True
self.alg.set_boundary_conditions(bctype='Neumann',
bcvalue=0.5,
throats=BC4_throats,
mode='overwrite')
t_test = self.alg.throats('throat.Neumann')
assert np.all(t_test == self.alg._parse_locations(BC4_throats))
self.alg.set_boundary_conditions(bctype='Dirichlet',
throats=BC1_throats,
bcvalue=0.3)
t_test = self.alg.throats('throat.Dirichlet')
removed_t = self.alg._parse_locations(self.alg.Ts)
self.alg.set_boundary_conditions(bctype='Dirichlet',
throats=removed_t,
mode='remove')
Dp = np.sum(self.alg['throat.Dirichlet'])
assert Dp == 0
self.alg.set_boundary_conditions(bctype='Neumann',
mode='remove')
label = 'pore.Neumann'
assert (label not in self.alg.labels())
def test_super_pore_conductance(self):
g_super = []
BC1_pores = np.arange(20, 30)
self.alg.set_boundary_conditions(bctype='Dirichlet',
bcvalue=0.4,
pores=BC1_pores)
BC2_pores = np.arange(45, 66)
self.alg.set_boundary_conditions(bctype='Neumann_group',
bcvalue=1.4e-10,
pores=BC2_pores)
g_super.append(2e-12)
BC3_pores = np.arange(87, 94)
self.alg.set_boundary_conditions(bctype='Neumann_group',
bcvalue=-0.9e-10,
pores=BC3_pores)
g_super.append(np.ones(len(BC3_pores)) * 1.5e-12)
BC4_pores = np.arange(3, 7)
self.alg.set_boundary_conditions(bctype='Neumann_group',
bcvalue=0.1e-10,
pores=BC4_pores)
g_super.append(np.array([6.42e-13]))
self.alg.run(conductance='throat.cond',
quantity='pore.mole_fraction',
super_pore_conductance=g_super)
self.alg.return_results()
r1 = self.alg.rate(BC1_pores)[0]
r2 = self.alg.rate(BC2_pores)[0]
r3 = self.alg.rate(BC3_pores)[0]
r4 = self.alg.rate(BC4_pores)[0]
assert np.absolute(r1 + r2 + r3 + r4) < 1e-20
assert np.size(self.alg.super_pore_conductance[0]) == 1
assert np.size(self.alg.super_pore_conductance[1]) == 7
assert np.size(self.alg.super_pore_conductance[2]) == 1
def test_source_term_modes(self):
self.phys['pore.item1'] = 0.5e-12
self.phys['pore.item2'] = 2.5
self.phys['pore.item3'] = -1.4e-11
self.phys.models.add(propname='pore.A',
model=pm.generic_source_term.power_law,
A1='pore.item1',
A2='pore.item2',
A3='pore.item3',
x='mole_fraction',
return_rate=False,
regen_mode='on_demand')
self.phys.models.add(propname='pore.B',
model=pm.generic_source_term.linear,
A1='pore.item1',
A2='pore.item3',
x='mole_fraction',
return_rate=False,
regen_mode='on_demand')
S1_pores = np.arange(25, 35)
self.alg.set_source_term(source_name=['pore.A', 'pore.B'],
pores=S1_pores)
mask1 = ~np.isnan(self.alg['pore.source_nonlinear_s1_A'])
mask2 = ~np.isnan(self.alg['pore.source_nonlinear_s2_A'])
assert np.all(self.alg.Ps[mask1] == S1_pores)
assert np.all(self.alg.Ps[mask2] == S1_pores)
self.alg.set_source_term(source_name='pore.A',
pores=[26], x0=np.ones(self.phys.Np),
mode='update')
assert self.alg['pore.source_nonlinear_s1_A'][26] == 1.25e-12
S2_pores = np.array([30, 31])
self.alg.set_source_term(source_name='pore.A',
pores=S2_pores,
mode='overwrite')
mask1 = ~np.isnan(self.alg['pore.source_nonlinear_s1_A'])
assert np.all(self.alg.Ps[mask1] == S2_pores)
self.alg.set_source_term(source_name='pore.B',
pores=S1_pores,
mode='remove')
mask1 = np.isnan(self.alg['pore.source_nonlinear_s1_B'])
assert np.all(self.alg.Ps[mask1] == self.alg.Ps)
self.alg.set_source_term(source_name=['pore.A', 'pore.B'],
pores=self.alg.Ps,
mode='remove')
assert ('pore.source_B' in self.alg.labels())
assert ('pore.source_A' in self.alg.labels())
self.alg.set_source_term(source_name=['pore.A', 'pore.B'],
mode='remove')
assert ('pore.source_B' not in self.alg.labels())
assert ('pore.source_A' not in self.alg.labels())
| [((8, 19, 8, 57), 'OpenPNM.Network.Cubic', 'OpenPNM.Network.Cubic', (), '', False, 'import OpenPNM\n'), ((9, 21, 9, 66), 'OpenPNM.Phases.GenericPhase', 'OpenPNM.Phases.GenericPhase', (), '', False, 'import OpenPNM\n'), ((12, 20, 14, 72), 'OpenPNM.Physics.GenericPhysics', 'OpenPNM.Physics.GenericPhysics', (), '', False, 'import OpenPNM\n'), ((16, 19, 17, 78), 'OpenPNM.Algorithms.GenericLinearTransport', 'OpenPNM.Algorithms.GenericLinearTransport', (), '', False, 'import OpenPNM\n'), ((20, 20, 20, 37), 'numpy.arange', 'np.arange', ({(20, 30, 20, 32): '25', (20, 34, 20, 36): '35'}, {}), '(25, 35)', True, 'import numpy as np\n'), ((25, 15, 25, 41), 'numpy.all', 'np.all', ({(25, 22, 25, 40): '(ptest == BC1_pores)'}, {}), '(ptest == BC1_pores)', True, 'import numpy as np\n'), ((26, 20, 26, 37), 'numpy.arange', 'np.arange', ({(26, 30, 26, 32): '43', (26, 34, 26, 36): '50'}, {}), '(43, 50)', True, 'import numpy as np\n'), ((33, 20, 33, 35), 'numpy.arange', 'np.arange', ({(33, 30, 33, 31): '4', (33, 33, 33, 34): '9'}, {}), '(4, 9)', True, 'import numpy as np\n'), ((39, 15, 39, 41), 'numpy.all', 'np.all', ({(39, 22, 39, 40): '(ptest == BC3_pores)'}, {}), '(ptest == BC3_pores)', True, 'import numpy as np\n'), ((46, 15, 46, 41), 'numpy.all', 'np.all', ({(46, 22, 46, 40): '(ptest == BC4_pores)'}, {}), '(ptest == BC4_pores)', True, 'import numpy as np\n'), ((54, 13, 54, 47), 'numpy.sum', 'np.sum', ({(54, 20, 54, 46): "self.alg['pore.Dirichlet']"}, {}), "(self.alg['pore.Dirichlet'])", True, 'import numpy as np\n'), ((62, 22, 62, 39), 'numpy.arange', 'np.arange', ({(62, 32, 62, 34): '25', (62, 36, 62, 38): '35'}, {}), '(25, 35)', True, 'import numpy as np\n'), ((67, 15, 67, 44), 'numpy.all', 'np.all', ({(67, 22, 67, 43): '(t_test == BC1_throats)'}, {}), '(t_test == BC1_throats)', True, 'import numpy as np\n'), ((68, 22, 68, 39), 'numpy.arange', 'np.arange', ({(68, 32, 68, 34): '43', (68, 36, 68, 38): '50'}, {}), '(43, 50)', True, 'import numpy as np\n'), ((75, 22, 75, 37), 'numpy.arange', 'np.arange', ({(75, 32, 75, 33): '4', (75, 35, 75, 36): '9'}, {}), '(4, 9)', True, 'import numpy as np\n'), ((81, 15, 81, 44), 'numpy.all', 'np.all', ({(81, 22, 81, 43): '(t_test == BC3_throats)'}, {}), '(t_test == BC3_throats)', True, 'import numpy as np\n'), ((88, 15, 88, 44), 'numpy.all', 'np.all', ({(88, 22, 88, 43): '(t_test == BC4_throats)'}, {}), '(t_test == BC4_throats)', True, 'import numpy as np\n'), ((96, 13, 96, 49), 'numpy.sum', 'np.sum', ({(96, 20, 96, 48): "self.alg['throat.Dirichlet']"}, {}), "(self.alg['throat.Dirichlet'])", True, 'import numpy as np\n'), ((104, 20, 104, 55), 'numpy.zeros', 'np.zeros', (), '', True, 'import numpy as np\n'), ((111, 20, 111, 55), 'numpy.zeros', 'np.zeros', (), '', True, 'import numpy as np\n'), ((121, 20, 121, 55), 'numpy.zeros', 'np.zeros', (), '', True, 'import numpy as np\n'), ((129, 20, 129, 55), 'numpy.zeros', 'np.zeros', (), '', True, 'import numpy as np\n'), ((145, 13, 145, 47), 'numpy.sum', 'np.sum', ({(145, 20, 145, 46): "self.alg['pore.Dirichlet']"}, {}), "(self.alg['pore.Dirichlet'])", True, 'import numpy as np\n'), ((153, 22, 153, 57), 'numpy.zeros', 'np.zeros', (), '', True, 'import numpy as np\n'), ((160, 22, 160, 57), 'numpy.zeros', 'np.zeros', (), '', True, 'import numpy as np\n'), ((170, 22, 170, 57), 'numpy.zeros', 'np.zeros', (), '', True, 'import numpy as np\n'), ((178, 22, 178, 57), 'numpy.zeros', 'np.zeros', (), '', True, 'import numpy as np\n'), ((194, 13, 194, 49), 'numpy.sum', 'np.sum', ({(194, 20, 194, 48): "self.alg['throat.Dirichlet']"}, {}), "(self.alg['throat.Dirichlet'])", True, 'import numpy as np\n'), ((203, 20, 203, 37), 'numpy.arange', 'np.arange', ({(203, 30, 203, 32): '20', (203, 34, 203, 36): '30'}, {}), '(20, 30)', True, 'import numpy as np\n'), ((207, 20, 207, 37), 'numpy.arange', 'np.arange', ({(207, 30, 207, 32): '45', (207, 34, 207, 36): '66'}, {}), '(45, 66)', True, 'import numpy as np\n'), ((212, 20, 212, 37), 'numpy.arange', 'np.arange', ({(212, 30, 212, 32): '87', (212, 34, 212, 36): '94'}, {}), '(87, 94)', True, 'import numpy as np\n'), ((217, 20, 217, 35), 'numpy.arange', 'np.arange', ({(217, 30, 217, 31): '3', (217, 33, 217, 34): '7'}, {}), '(3, 7)', True, 'import numpy as np\n'), ((254, 19, 254, 36), 'numpy.arange', 'np.arange', ({(254, 29, 254, 31): '25', (254, 33, 254, 35): '35'}, {}), '(25, 35)', True, 'import numpy as np\n'), ((259, 15, 259, 53), 'numpy.all', 'np.all', ({(259, 22, 259, 52): '(self.alg.Ps[mask1] == S1_pores)'}, {}), '(self.alg.Ps[mask1] == S1_pores)', True, 'import numpy as np\n'), ((260, 15, 260, 53), 'numpy.all', 'np.all', ({(260, 22, 260, 52): '(self.alg.Ps[mask2] == S1_pores)'}, {}), '(self.alg.Ps[mask2] == S1_pores)', True, 'import numpy as np\n'), ((265, 19, 265, 37), 'numpy.array', 'np.array', ({(265, 28, 265, 36): '[30, 31]'}, {}), '([30, 31])', True, 'import numpy as np\n'), ((270, 15, 270, 53), 'numpy.all', 'np.all', ({(270, 22, 270, 52): '(self.alg.Ps[mask1] == S2_pores)'}, {}), '(self.alg.Ps[mask1] == S2_pores)', True, 'import numpy as np\n'), ((274, 16, 274, 64), 'numpy.isnan', 'np.isnan', ({(274, 25, 274, 63): "self.alg['pore.source_nonlinear_s1_B']"}, {}), "(self.alg['pore.source_nonlinear_s1_B'])", True, 'import numpy as np\n'), ((275, 15, 275, 56), 'numpy.all', 'np.all', ({(275, 22, 275, 55): '(self.alg.Ps[mask1] == self.alg.Ps)'}, {}), '(self.alg.Ps[mask1] == self.alg.Ps)', True, 'import numpy as np\n'), ((221, 23, 221, 43), 'numpy.array', 'np.array', ({(221, 32, 221, 42): '[6.42e-13]'}, {}), '([6.42e-13])', True, 'import numpy as np\n'), ((230, 15, 230, 45), 'numpy.absolute', 'np.absolute', ({(230, 27, 230, 44): '(r1 + r2 + r3 + r4)'}, {}), '(r1 + r2 + r3 + r4)', True, 'import numpy as np\n'), ((231, 15, 231, 58), 'numpy.size', 'np.size', ({(231, 23, 231, 57): 'self.alg.super_pore_conductance[0]'}, {}), '(self.alg.super_pore_conductance[0])', True, 'import numpy as np\n'), ((232, 15, 232, 58), 'numpy.size', 'np.size', ({(232, 23, 232, 57): 'self.alg.super_pore_conductance[1]'}, {}), '(self.alg.super_pore_conductance[1])', True, 'import numpy as np\n'), ((233, 15, 233, 58), 'numpy.size', 'np.size', ({(233, 23, 233, 57): 'self.alg.super_pore_conductance[2]'}, {}), '(self.alg.super_pore_conductance[2])', True, 'import numpy as np\n'), ((257, 17, 257, 65), 'numpy.isnan', 'np.isnan', ({(257, 26, 257, 64): "self.alg['pore.source_nonlinear_s1_A']"}, {}), "(self.alg['pore.source_nonlinear_s1_A'])", True, 'import numpy as np\n'), ((258, 17, 258, 65), 'numpy.isnan', 'np.isnan', ({(258, 26, 258, 64): "self.alg['pore.source_nonlinear_s2_A']"}, {}), "(self.alg['pore.source_nonlinear_s2_A'])", True, 'import numpy as np\n'), ((269, 17, 269, 65), 'numpy.isnan', 'np.isnan', ({(269, 26, 269, 64): "self.alg['pore.source_nonlinear_s1_A']"}, {}), "(self.alg['pore.source_nonlinear_s1_A'])", True, 'import numpy as np\n'), ((32, 31, 32, 69), 'numpy.concatenate', 'np.concatenate', ({(32, 46, 32, 68): '(BC1_pores, BC2_pores)'}, {}), '((BC1_pores, BC2_pores))', True, 'import numpy as np\n'), ((74, 32, 74, 74), 'numpy.concatenate', 'np.concatenate', ({(74, 47, 74, 73): '(BC1_throats, BC2_throats)'}, {}), '((BC1_throats, BC2_throats))', True, 'import numpy as np\n'), ((105, 18, 105, 35), 'numpy.arange', 'np.arange', ({(105, 28, 105, 30): '(25)', (105, 32, 105, 34): '(35)'}, {}), '(25, 35)', True, 'import numpy as np\n'), ((112, 18, 112, 35), 'numpy.arange', 'np.arange', ({(112, 28, 112, 30): '(43)', (112, 32, 112, 34): '(50)'}, {}), '(43, 50)', True, 'import numpy as np\n'), ((120, 31, 120, 55), 'numpy.concatenate', 'np.concatenate', ({(120, 46, 120, 54): '(B1, B2)'}, {}), '((B1, B2))', True, 'import numpy as np\n'), ((122, 18, 122, 33), 'numpy.arange', 'np.arange', ({(122, 28, 122, 29): '(4)', (122, 31, 122, 32): '(9)'}, {}), '(4, 9)', True, 'import numpy as np\n'), ((154, 20, 154, 37), 'numpy.arange', 'np.arange', ({(154, 30, 154, 32): '(25)', (154, 34, 154, 36): '(35)'}, {}), '(25, 35)', True, 'import numpy as np\n'), ((161, 20, 161, 37), 'numpy.arange', 'np.arange', ({(161, 30, 161, 32): '(43)', (161, 34, 161, 36): '(50)'}, {}), '(43, 50)', True, 'import numpy as np\n'), ((169, 32, 169, 56), 'numpy.concatenate', 'np.concatenate', ({(169, 47, 169, 55): '(B1, B2)'}, {}), '((B1, B2))', True, 'import numpy as np\n'), ((171, 20, 171, 35), 'numpy.arange', 'np.arange', ({(171, 30, 171, 31): '(4)', (171, 33, 171, 34): '(9)'}, {}), '(4, 9)', True, 'import numpy as np\n'), ((262, 48, 262, 69), 'numpy.ones', 'np.ones', ({(262, 56, 262, 68): 'self.phys.Np'}, {}), '(self.phys.Np)', True, 'import numpy as np\n')] |
spartantri/aws-security-automation | EC2 Auto Clean Room Forensics/Lambda-Functions/snapshotForRemediation.py | a3904931220111022d12e71a3d79e4a85fc82173 | # MIT No Attribution
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import boto3
import os
def lambda_handler(event, context):
# TODO implement
print(event)
client = boto3.client('ec2')
instanceID = event.get('instanceID')
response = client.describe_instances(
InstanceIds=[
instanceID
]
)
volumeID = response['Reservations'][0]['Instances'][0]['BlockDeviceMappings'][0]['Ebs']['VolumeId']
print(volumeID)
SnapShotDetails = client.create_snapshot(
Description='Isolated Instance',
VolumeId=volumeID
)
client.create_tags(Resources=[SnapShotDetails['SnapshotId']], Tags=[{'Key': 'Name', 'Value': instanceID}])
# TODO Dump Response into S3 - response
# TODO Dump Response details into Snapshot - SnapShotDetails['SnapshotId']
print(response)
print(SnapShotDetails['SnapshotId'])
response = client.modify_instance_attribute(
Groups=[
os.environ['ISOLATED_SECUTRITYGROUP'],
],
InstanceId=instanceID
)
tagresponse = client.create_tags(
Resources=[
instanceID,
],
Tags=[
{
'Key': 'IsIsolated',
'Value': 'InstanceIsolated'
},
]
)
waiter = client.get_waiter('snapshot_completed')
waiter.wait(
SnapshotIds=[
SnapShotDetails['SnapshotId'],
]
)
# event['SnapshotId'] = SnapShotDetails['SnapshotId']
return SnapShotDetails['SnapshotId']
| [((23, 13, 23, 32), 'boto3.client', 'boto3.client', ({(23, 26, 23, 31): '"""ec2"""'}, {}), "('ec2')", False, 'import boto3\n')] |
VibhuJawa/gpu-bdb | gpu_bdb/queries/q26/gpu_bdb_query_26.py | 13987b4ef8b92db3b9d2905dec7bd2fd81f42ae9 | #
# Copyright (c) 2019-2022, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from bdb_tools.utils import (
benchmark,
gpubdb_argparser,
train_clustering_model,
run_query,
)
from bdb_tools.q26_utils import (
Q26_CATEGORY,
Q26_ITEM_COUNT,
N_CLUSTERS,
CLUSTER_ITERATIONS,
N_ITER,
read_tables
)
import numpy as np
from dask import delayed
def agg_count_distinct(df, group_key, counted_key):
"""Returns a Series that is the result of counting distinct instances of 'counted_key' within each 'group_key'.
The series' index will have one entry per unique 'group_key' value.
Workaround for lack of nunique aggregate function on Dask df.
"""
return (
df.drop_duplicates([group_key, counted_key])
.groupby(group_key)[counted_key]
.count()
)
def get_clusters(client, kmeans_input_df):
import dask_cudf
ml_tasks = [
delayed(train_clustering_model)(df, N_CLUSTERS, CLUSTER_ITERATIONS, N_ITER)
for df in kmeans_input_df.to_delayed()
]
results_dict = client.compute(*ml_tasks, sync=True)
output = kmeans_input_df.index.to_frame().reset_index(drop=True)
labels_final = dask_cudf.from_cudf(
results_dict["cid_labels"], npartitions=output.npartitions
)
output["label"] = labels_final.reset_index()[0]
# Sort based on CDH6.1 q26-result formatting
output = output.sort_values(["ss_customer_sk"])
# Based on CDH6.1 q26-result formatting
results_dict["cid_labels"] = output
return results_dict
def main(client, config):
import cudf
ss_ddf, items_ddf = benchmark(
read_tables,
config=config,
compute_result=config["get_read_time"],
)
items_filtered = items_ddf[items_ddf.i_category == Q26_CATEGORY].reset_index(
drop=True
)
items_filtered = items_filtered[["i_item_sk", "i_class_id"]]
f_ss_ddf = ss_ddf[ss_ddf["ss_customer_sk"].notnull()].reset_index(drop=True)
merged_ddf = f_ss_ddf.merge(
items_filtered, left_on="ss_item_sk", right_on="i_item_sk", how="inner"
)
keep_cols = ["ss_customer_sk", "i_class_id"]
merged_ddf = merged_ddf[keep_cols]
# One-Hot-Encode i_class_id
merged_ddf = merged_ddf.map_partitions(
cudf.get_dummies,
columns=["i_class_id"],
prefix="id",
cats={"i_class_id": np.arange(1, 16, dtype="int32")},
prefix_sep="",
dtype="float32",
)
merged_ddf["total"] = 1.0 # Will keep track of total count
all_categories = ["total"] + ["id%d" % i for i in range(1, 16)]
# Aggregate using agg to get sorted ss_customer_sk
agg_dict = dict.fromkeys(all_categories, "sum")
rollup_ddf = merged_ddf.groupby("ss_customer_sk").agg(agg_dict)
rollup_ddf = rollup_ddf[rollup_ddf.total > Q26_ITEM_COUNT][all_categories[1:]]
# Prepare data for KMeans clustering
rollup_ddf = rollup_ddf.astype("float64")
kmeans_input_df = rollup_ddf.persist()
results_dict = get_clusters(client=client, kmeans_input_df=kmeans_input_df)
return results_dict
if __name__ == "__main__":
from bdb_tools.cluster_startup import attach_to_cluster
config = gpubdb_argparser()
client, bc = attach_to_cluster(config)
run_query(config=config, client=client, query_func=main)
| [((57, 19, 59, 5), 'dask_cudf.from_cudf', 'dask_cudf.from_cudf', (), '', False, 'import dask_cudf\n'), ((73, 24, 77, 5), 'bdb_tools.utils.benchmark', 'benchmark', (), '', False, 'from bdb_tools.utils import benchmark, gpubdb_argparser, train_clustering_model, run_query\n'), ((120, 13, 120, 31), 'bdb_tools.utils.gpubdb_argparser', 'gpubdb_argparser', ({}, {}), '()', False, 'from bdb_tools.utils import benchmark, gpubdb_argparser, train_clustering_model, run_query\n'), ((121, 17, 121, 42), 'bdb_tools.cluster_startup.attach_to_cluster', 'attach_to_cluster', ({(121, 35, 121, 41): 'config'}, {}), '(config)', False, 'from bdb_tools.cluster_startup import attach_to_cluster\n'), ((122, 4, 122, 60), 'bdb_tools.utils.run_query', 'run_query', (), '', False, 'from bdb_tools.utils import benchmark, gpubdb_argparser, train_clustering_model, run_query\n'), ((50, 8, 50, 39), 'dask.delayed', 'delayed', ({(50, 16, 50, 38): 'train_clustering_model'}, {}), '(train_clustering_model)', False, 'from dask import delayed\n'), ((96, 28, 96, 59), 'numpy.arange', 'np.arange', (), '', True, 'import numpy as np\n')] |
alex/optimizer-model | tests/test_intbounds.py | 0e40a0763082f5fe0bd596e8e77ebccbcd7f4a98 | from optimizer.utils.intbounds import IntBounds
class TestIntBounds(object):
def test_make_gt(self):
i0 = IntBounds()
i1 = i0.make_gt(IntBounds(10, 10))
assert i1.lower == 11
def test_make_gt_already_bounded(self):
i0 = IntBounds()
i1 = i0.make_gt(IntBounds(10, 10)).make_gt(IntBounds(0, 0))
assert i1.lower == 11
def test_make_lt(self):
i0 = IntBounds()
i1 = i0.make_lt(IntBounds(10, 10))
assert i1.upper == 9
def test_make_lt_already_bounded(self):
i0 = IntBounds()
i1 = i0.make_lt(IntBounds(0, 0)).make_lt(IntBounds(10, 10))
assert i1.upper == -1
def test_both_bounds(self):
i0 = IntBounds()
i1 = i0.make_lt(IntBounds(10, 10)).make_gt(IntBounds(0, 0))
assert i1.upper == 9
assert i1.lower == 1
i2 = i0.make_gt(IntBounds(0, 0)).make_lt(IntBounds(10, 10))
assert i2.lower == 1
assert i2.upper == 9
def test_make_le_already_bounded(self):
i0 = IntBounds()
i1 = i0.make_le(IntBounds(0, 0)).make_le(IntBounds(2, 2))
assert i1.upper == 0
def test_make_ge_already_bounded(self):
i0 = IntBounds()
i1 = i0.make_ge(IntBounds(10, 10)).make_ge(IntBounds(0, 0))
assert i1.lower == 10
| [((6, 13, 6, 24), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({}, {}), '()', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((13, 13, 13, 24), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({}, {}), '()', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((20, 13, 20, 24), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({}, {}), '()', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((27, 13, 27, 24), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({}, {}), '()', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((34, 13, 34, 24), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({}, {}), '()', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((47, 13, 47, 24), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({}, {}), '()', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((53, 13, 53, 24), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({}, {}), '()', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((8, 24, 8, 41), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(8, 34, 8, 36): '10', (8, 38, 8, 40): '10'}, {}), '(10, 10)', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((15, 51, 15, 66), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(15, 61, 15, 62): '0', (15, 64, 15, 65): '0'}, {}), '(0, 0)', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((22, 24, 22, 41), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(22, 34, 22, 36): '10', (22, 38, 22, 40): '10'}, {}), '(10, 10)', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((29, 49, 29, 66), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(29, 59, 29, 61): '10', (29, 63, 29, 65): '10'}, {}), '(10, 10)', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((36, 51, 36, 66), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(36, 61, 36, 62): '0', (36, 64, 36, 65): '0'}, {}), '(0, 0)', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((41, 49, 41, 66), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(41, 59, 41, 61): '10', (41, 63, 41, 65): '10'}, {}), '(10, 10)', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((48, 49, 48, 64), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(48, 59, 48, 60): '2', (48, 62, 48, 63): '2'}, {}), '(2, 2)', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((54, 51, 54, 66), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(54, 61, 54, 62): '0', (54, 64, 54, 65): '0'}, {}), '(0, 0)', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((15, 24, 15, 41), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(15, 34, 15, 36): '10', (15, 38, 15, 40): '10'}, {}), '(10, 10)', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((29, 24, 29, 39), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(29, 34, 29, 35): '0', (29, 37, 29, 38): '0'}, {}), '(0, 0)', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((36, 24, 36, 41), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(36, 34, 36, 36): '10', (36, 38, 36, 40): '10'}, {}), '(10, 10)', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((41, 24, 41, 39), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(41, 34, 41, 35): '0', (41, 37, 41, 38): '0'}, {}), '(0, 0)', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((48, 24, 48, 39), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(48, 34, 48, 35): '0', (48, 37, 48, 38): '0'}, {}), '(0, 0)', False, 'from optimizer.utils.intbounds import IntBounds\n'), ((54, 24, 54, 41), 'optimizer.utils.intbounds.IntBounds', 'IntBounds', ({(54, 34, 54, 36): '10', (54, 38, 54, 40): '10'}, {}), '(10, 10)', False, 'from optimizer.utils.intbounds import IntBounds\n')] |
minchuang/td-client-python | tdclient/test/database_model_test.py | 6cf6dfbb60119f400274491d3e942d4f9fbcebd6 | #!/usr/bin/env python
from __future__ import print_function
from __future__ import unicode_literals
try:
from unittest import mock
except ImportError:
import mock
from tdclient import models
from tdclient.test.test_helper import *
def setup_function(function):
unset_environ()
def test_database():
client = mock.MagicMock()
database = models.Database(client, "sample_datasets", tables=["nasdaq", "www_access"], count=12345, created_at="created_at", updated_at="updated_at", org_name="org_name", permission="administrator")
assert database.org_name == "org_name"
assert database.permission == "administrator"
assert database.count == 12345
assert database.name == "sample_datasets"
assert database.tables() == ["nasdaq", "www_access"]
assert database.created_at == "created_at"
assert database.updated_at == "updated_at"
def test_database_update_tables():
client = mock.MagicMock()
client.tables = mock.MagicMock(return_value=[
models.Table(client, "sample_datasets", "foo", "type", "schema", "count"),
models.Table(client, "sample_datasets", "bar", "type", "schema", "count"),
models.Table(client, "sample_datasets", "baz", "type", "schema", "count"),
])
database = models.Database(client, "sample_datasets", tables=None, count=12345, created_at="created_at", updated_at="updated_at", org_name="org_name", permission="administrator")
tables = database.tables()
assert [ table.name for table in tables ] == ["foo", "bar", "baz"]
client.tables.assert_called_with("sample_datasets")
| [((18, 13, 18, 29), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((19, 15, 19, 202), 'tdclient.models.Database', 'models.Database', (), '', False, 'from tdclient import models\n'), ((29, 13, 29, 29), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((35, 15, 35, 182), 'tdclient.models.Database', 'models.Database', (), '', False, 'from tdclient import models\n'), ((31, 8, 31, 81), 'tdclient.models.Table', 'models.Table', ({(31, 21, 31, 27): 'client', (31, 29, 31, 46): '"""sample_datasets"""', (31, 48, 31, 53): '"""foo"""', (31, 55, 31, 61): '"""type"""', (31, 63, 31, 71): '"""schema"""', (31, 73, 31, 80): '"""count"""'}, {}), "(client, 'sample_datasets', 'foo', 'type', 'schema', 'count')", False, 'from tdclient import models\n'), ((32, 8, 32, 81), 'tdclient.models.Table', 'models.Table', ({(32, 21, 32, 27): 'client', (32, 29, 32, 46): '"""sample_datasets"""', (32, 48, 32, 53): '"""bar"""', (32, 55, 32, 61): '"""type"""', (32, 63, 32, 71): '"""schema"""', (32, 73, 32, 80): '"""count"""'}, {}), "(client, 'sample_datasets', 'bar', 'type', 'schema', 'count')", False, 'from tdclient import models\n'), ((33, 8, 33, 81), 'tdclient.models.Table', 'models.Table', ({(33, 21, 33, 27): 'client', (33, 29, 33, 46): '"""sample_datasets"""', (33, 48, 33, 53): '"""baz"""', (33, 55, 33, 61): '"""type"""', (33, 63, 33, 71): '"""schema"""', (33, 73, 33, 80): '"""count"""'}, {}), "(client, 'sample_datasets', 'baz', 'type', 'schema', 'count')", False, 'from tdclient import models\n')] |
ballcap231/fireTS | setup.py | 74cc89a14d67edabf31139d1552025d54791f2a9 | from setuptools import setup
dependencies = [
'numpy',
'scipy',
'scikit-learn',
]
setup(
name='fireTS',
version='0.0.7',
description='A python package for multi-variate time series prediction',
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
url='https://github.com/jxx123/fireTS.git',
author='Jinyu Xie',
author_email='[email protected]',
license='MIT',
packages=['fireTS'],
install_requires=dependencies,
include_package_data=True,
zip_safe=False)
| [] |
heyihan/scodes | euler/py/project_019.py | 342518b548a723916c9273d8ebc1b345a0467e76 | # https://projecteuler.net/problem=19
def is_leap(year):
if year%4 != 0:
return False
if year%100 == 0 and year%400 != 0:
return False
return True
def year_days(year):
if is_leap(year):
return 366
return 365
def month_days(month, year):
if month == 4 or month == 6 or month == 9 or month == 11:
return 30
if month == 2:
if is_leap(year):
return 29
return 28
return 31
day_19000101 = 1
days_1900 = year_days(1900)
day_next_day1 = (day_19000101 + days_1900)%7
print(day_19000101, days_1900, day_next_day1)
sum = 0
for i in range(1901, 2001):
for j in range(1, 13):
if day_next_day1 == 0:
print(i, j)
sum = sum + 1
days = month_days(j, i)
day_next_day1 = (day_next_day1 + days)%7
#print(i, j, days, day_next_day1)
print(sum)
| [] |
wowsuchnamaste/address_book | address_book/address_book.py | 4877d16d795c54b750e151fa93e69c080717ae72 | """A simple address book."""
from ._tools import generate_uuid
class AddressBook:
"""
A simple address book.
"""
def __init__(self):
self._entries = []
def add_entry(self, entry):
"""Add an entry to the address book."""
self._entries.append(entry)
def get_entries(self):
"""Returns a list of all entries in the address book.
:return: ``list`` of ``Person`` objects.
"""
return self._entries
def get_entry(self, name):
entry = [entry for entry in self._entries if entry.name == name]
return entry[0]
class Entry:
def __init__(
self,
name,
first_name=None,
last_name=None,
address=None,
phone_number=None,
email=None,
organization=None,
):
self._uuid = generate_uuid()
self.name = name
self.first_name = first_name
self.last_name = last_name
self._parse_name(name)
self.address = address
self.phone_number = phone_number
self.email = email
self.organization = organization
def __repr__(self):
return self.name
def _parse_name(self, name):
"""
Parse whatever is passed as ``name`` and update ``self.name`` from that.
:param name: A person's name as string or dictionary.
:return: The method doesn't return anything.
"""
if type(name) == dict:
self.first_name = name["first_name"]
self.last_name = name["last_name"]
self.name = self.first_name + " " + self.last_name
| [] |
zzhang87/ChestXray | inference.py | eaafe2f7f5e91bb30fbed02dec1f77ff314434b5 | import keras
import numpy as np
import pandas as pd
import cv2
import os
import json
import pdb
import argparse
import math
import copy
from vis.visualization import visualize_cam, overlay, visualize_activation
from vis.utils.utils import apply_modifications
from shutil import rmtree
import matplotlib.cm as cm
from matplotlib import pyplot as plt
from sklearn import metrics
import keras.backend as K
from keras import activations
from keras.applications.inception_v3 import preprocess_input as inception_pre
from keras.applications.mobilenet import preprocess_input as mobilenet_pre
from keras.applications.resnet50 import preprocess_input as resnet_pre
from keras.applications.densenet import preprocess_input as densenet_pre
from datagenerator import ImageDataGenerator
from utils import load_model
def getCAM(model, image):
# weights of the final fully-connected layer
weights = model.layers[-1].get_weights()[0]
# activation before the last global pooling
for layer in reversed(model.layers):
if len(layer.output_shape) > 2:
break
function = K.function([model.layers[0].input, K.learning_phase()], [layer.output])
activation = np.squeeze(function([image, 0])[0])
# weighted sum of the activation map
CAM = np.dot(activation, weights)
return CAM
def main():
ap = argparse.ArgumentParser()
ap.add_argument('--ckpt_path', help = 'Path to the model checkpoint.')
ap.add_argument('--image_path', help = 'Path to the image to run inference on.')
ap.add_argument('--bnbox', help = 'Path to the bounding box annotation, if applies.')
ap.add_argument('--threshold', default = 0.5, help = 'Threshold for displaying the Class Activation Map.')
args = ap.parse_args()
model_dir = os.path.dirname(args.ckpt_path)
with open(os.path.join(model_dir, 'label_map.json'), 'r') as f:
label_map = json.load(f)
num_class = len(list(label_map.keys()))
model, model_config = load_model(model_dir, args.ckpt_path)
model_name = model_config['model_name']
if model_name in ['inception']:
image_size = 299
else:
image_size = 224
preprocess_input = {
'inception': inception_pre,
'resnet': resnet_pre,
'mobilenet': mobilenet_pre,
'densenet': densenet_pre
}
if args.bnbox is not None:
annotation = pd.read_csv(args.bnbox)
image_index = os.path.basename(args.image_path)
indices = np.where(annotation['Image Index'] == image_index)[0]
bnbox = {}
for i in indices:
disease = annotation['Finding Label'][i]
x = int(annotation['Bbox [x'][i] + 0.5)
y = int(annotation['y'][i] + 0.5)
w = int(annotation['w'][i] + 0.5)
h = int(annotation['h]'][i] + 0.5)
bnbox[disease] = [x, y, x + w, y + h]
image = cv2.imread(args.image_path)
img = cv2.resize(image, (image_size, image_size))
img = preprocess_input[model_name](img.astype(np.float32))
img = np.expand_dims(img, axis = 0)
predictions = np.squeeze(model.predict(img))
CAM = getCAM(model, img)
cv2.namedWindow("ChestXray", cv2.WINDOW_NORMAL)
for key, value in label_map.items():
heatmap = CAM[:,:,int(key)]
heatmap -= heatmap.min()
heatmap *= 255.0 / heatmap.max()
heatmap[np.where(heatmap < args.threshold * 255)] *= 0.1
heatmap = cv2.applyColorMap(heatmap.astype(np.uint8), cv2.COLORMAP_JET)
heatmap = cv2.resize(heatmap, image.shape[:2], cv2.INTER_AREA)
overlay_img = overlay(heatmap, image, alpha = 0.4)
cv2.putText(overlay_img, "{}: {:.2%}".format(value, predictions[int(key)]),
(30,30), cv2.FONT_HERSHEY_DUPLEX, 1.0, (255,255,255), 2)
if value in bnbox.keys():
box = bnbox[value]
cv2.rectangle(overlay_img, (box[0], box[1]), (box[2], box[3]),
color = (0, 180, 0), thickness = 2)
cv2.imshow("ChestXray", overlay_img)
cv2.waitKey()
plt.show()
print('{}: {:.2%}'.format(value, predictions[int(key)]))
cv2.destroyAllWindows()
if __name__ == "__main__":
main() | [((41, 7, 41, 34), 'numpy.dot', 'np.dot', ({(41, 14, 41, 24): 'activation', (41, 26, 41, 33): 'weights'}, {}), '(activation, weights)', True, 'import numpy as np\n'), ((47, 6, 47, 31), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((55, 13, 55, 44), 'os.path.dirname', 'os.path.dirname', ({(55, 29, 55, 43): 'args.ckpt_path'}, {}), '(args.ckpt_path)', False, 'import os\n'), ((62, 23, 62, 60), 'utils.load_model', 'load_model', ({(62, 34, 62, 43): 'model_dir', (62, 45, 62, 59): 'args.ckpt_path'}, {}), '(model_dir, args.ckpt_path)', False, 'from utils import load_model\n'), ((82, 15, 82, 48), 'os.path.basename', 'os.path.basename', ({(82, 32, 82, 47): 'args.image_path'}, {}), '(args.image_path)', False, 'import os\n'), ((96, 9, 96, 36), 'cv2.imread', 'cv2.imread', ({(96, 20, 96, 35): 'args.image_path'}, {}), '(args.image_path)', False, 'import cv2\n'), ((98, 7, 98, 50), 'cv2.resize', 'cv2.resize', ({(98, 18, 98, 23): 'image', (98, 25, 98, 49): '(image_size, image_size)'}, {}), '(image, (image_size, image_size))', False, 'import cv2\n'), ((102, 7, 102, 36), 'numpy.expand_dims', 'np.expand_dims', (), '', True, 'import numpy as np\n'), ((108, 1, 108, 48), 'cv2.namedWindow', 'cv2.namedWindow', ({(108, 17, 108, 28): '"""ChestXray"""', (108, 30, 108, 47): 'cv2.WINDOW_NORMAL'}, {}), "('ChestXray', cv2.WINDOW_NORMAL)", False, 'import cv2\n'), ((138, 1, 138, 24), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ({}, {}), '()', False, 'import cv2\n'), ((58, 14, 58, 26), 'json.load', 'json.load', ({(58, 24, 58, 25): 'f'}, {}), '(f)', False, 'import json\n'), ((80, 15, 80, 38), 'pandas.read_csv', 'pd.read_csv', ({(80, 27, 80, 37): 'args.bnbox'}, {}), '(args.bnbox)', True, 'import pandas as pd\n'), ((84, 11, 84, 61), 'numpy.where', 'np.where', ({(84, 20, 84, 60): "(annotation['Image Index'] == image_index)"}, {}), "(annotation['Image Index'] == image_index)", True, 'import numpy as np\n'), ((119, 12, 119, 64), 'cv2.resize', 'cv2.resize', ({(119, 23, 119, 30): 'heatmap', (119, 32, 119, 47): 'image.shape[:2]', (119, 49, 119, 63): 'cv2.INTER_AREA'}, {}), '(heatmap, image.shape[:2], cv2.INTER_AREA)', False, 'import cv2\n'), ((121, 16, 121, 52), 'vis.visualization.overlay', 'overlay', (), '', False, 'from vis.visualization import visualize_cam, overlay, visualize_activation\n'), ((131, 2, 131, 38), 'cv2.imshow', 'cv2.imshow', ({(131, 13, 131, 24): '"""ChestXray"""', (131, 26, 131, 37): 'overlay_img'}, {}), "('ChestXray', overlay_img)", False, 'import cv2\n'), ((132, 2, 132, 15), 'cv2.waitKey', 'cv2.waitKey', ({}, {}), '()', False, 'import cv2\n'), ((134, 2, 134, 12), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'from matplotlib import pyplot as plt\n'), ((37, 47, 37, 65), 'keras.backend.learning_phase', 'K.learning_phase', ({}, {}), '()', True, 'import keras.backend as K\n'), ((57, 11, 57, 52), 'os.path.join', 'os.path.join', ({(57, 24, 57, 33): 'model_dir', (57, 35, 57, 51): '"""label_map.json"""'}, {}), "(model_dir, 'label_map.json')", False, 'import os\n'), ((128, 3, 129, 42), 'cv2.rectangle', 'cv2.rectangle', (), '', False, 'import cv2\n'), ((115, 10, 115, 50), 'numpy.where', 'np.where', ({(115, 19, 115, 49): '(heatmap < args.threshold * 255)'}, {}), '(heatmap < args.threshold * 255)', True, 'import numpy as np\n')] |
MistSun-Chen/py_verifier | test/DQueueTest.py | 7e9161d1fdbb611fe4be5eeb2f89a6286fa7b555 | from libTask import Queue
from common import configParams
from common import common
def main():
cp = configParams.ConfigParams("config.json")
detectGeneralQueue = Queue.DQueue(cp, len(cp.detect_general_ids), cp.modelPath, common.GENERALDETECT_METHOD_ID,
cp.GPUDevices, cp.detect_general_ids)
print("Run Into Next step")
smokeQueue = Queue.DQueue(cp, len(cp.smoke_ids), cp.modelPath, common.PEOPLESMOKE_METHOD_ID,cp.GPUDevices, cp.smoke_ids)
if __name__ == '__main__':
main() | [((5, 9, 5, 49), 'common.configParams.ConfigParams', 'configParams.ConfigParams', ({(5, 35, 5, 48): '"""config.json"""'}, {}), "('config.json')", False, 'from common import configParams\n')] |
volgachen/Chinese-Tokenization | config.py | 467e08da6fe271b6e33258d5aa6682c0405a3f32 | class Config:
ngram = 2
train_set = "data/rmrb.txt"
modified_train_set = "data/rmrb_modified.txt"
test_set = ""
model_file = ""
param_file = ""
word_max_len = 10
proposals_keep_ratio = 1.0
use_re = 1
subseq_num = 15 | [] |
python-itb/knn-from-scratch | src/Knn-Tensor.py | dbc6fb53cffb245a76d35b9ff85ac8cb21877ca8 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 13 18:52:28 2018
@author: amajidsinar
"""
from sklearn import datasets
import matplotlib.pyplot as plt
import numpy as np
plt.style.use('seaborn-white')
iris = datasets.load_iris()
dataset = iris.data
# only take 0th and 1th column for X
data_known = iris.data[:,:2]
# y
label_known = iris.target
# the hard part
# so matplotlib does not readily support labeling based on class
# but we know that one of the feature of plt is that a plt call would give those set of number
# the same color
category = np.unique(label_known)
for i in category:
plt.scatter(data_known[label_known==i][:,0],data_known[label_known==i][:,1],label=i)
# Unknown class of a data
data_unknown = np.array([[5.7,3.3],[5.6,3.4],[6.4,3],[8.2,2.2]])
plt.scatter(data_unknown[:,0],data_unknown[:,1], label='?')
plt.legend()
#-------------
# Euclidean Distance
diff = data_known - data_unknown.reshape(data_unknown.shape[0],1,data_unknown.shape[1])
distance = (diff**2).sum(2)
#return sorted index of distance
dist_index = np.argsort(distance)
label = label_known[dist_index]
#for k in [1,2,3,4,5,6,7,8,9,10]:
#keep the rank
k = 10
label = label[:,:k]
label_predict = []
for i in range(data_unknown.shape[0]):
values,counts = np.unique(label[i], return_counts=True)
ind = np.argmax(counts)
label_predict.append(values[ind])
| [((12, 0, 12, 30), 'matplotlib.pyplot.style.use', 'plt.style.use', ({(12, 14, 12, 29): '"""seaborn-white"""'}, {}), "('seaborn-white')", True, 'import matplotlib.pyplot as plt\n'), ((14, 7, 14, 27), 'sklearn.datasets.load_iris', 'datasets.load_iris', ({}, {}), '()', False, 'from sklearn import datasets\n'), ((26, 11, 26, 33), 'numpy.unique', 'np.unique', ({(26, 21, 26, 32): 'label_known'}, {}), '(label_known)', True, 'import numpy as np\n'), ((31, 15, 31, 64), 'numpy.array', 'np.array', ({(31, 24, 31, 63): '[[5.7, 3.3], [5.6, 3.4], [6.4, 3], [8.2, 2.2]]'}, {}), '([[5.7, 3.3], [5.6, 3.4], [6.4, 3], [8.2, 2.2]])', True, 'import numpy as np\n'), ((32, 0, 32, 59), 'matplotlib.pyplot.scatter', 'plt.scatter', (), '', True, 'import matplotlib.pyplot as plt\n'), ((33, 0, 33, 12), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((40, 13, 40, 33), 'numpy.argsort', 'np.argsort', ({(40, 24, 40, 32): 'distance'}, {}), '(distance)', True, 'import numpy as np\n'), ((28, 4, 28, 88), 'matplotlib.pyplot.scatter', 'plt.scatter', (), '', True, 'import matplotlib.pyplot as plt\n'), ((50, 21, 50, 60), 'numpy.unique', 'np.unique', (), '', True, 'import numpy as np\n'), ((51, 10, 51, 27), 'numpy.argmax', 'np.argmax', ({(51, 20, 51, 26): 'counts'}, {}), '(counts)', True, 'import numpy as np\n')] |
volpepe/detectron2-ResNeSt | de_test_tron2.py | 1481d50880baa615b873b7a18156c06a5606a85c | import torch, torchvision
import detectron2
from detectron2.utils.logger import setup_logger
setup_logger()
# import some common libraries
import numpy as np
import os, json, cv2, random
# import some common detectron2 utilities
from detectron2 import model_zoo
from detectron2.engine import DefaultPredictor
from detectron2.config import get_cfg
from detectron2.utils.visualizer import Visualizer
from detectron2.data import MetadataCatalog, DatasetCatalog
import argparse, time
def parse_args():
p = argparse.ArgumentParser()
p.add_argument("-i", "--image", type=str, help="Path to image to segment")
p.add_argument("-m", "--model", type=str, help="Model to use", default="COCO-InstanceSegmentation/mask_cascade_rcnn_ResNeSt_200_FPN_syncBN_all_tricks_3x.yaml")
p.add_argument("-t", "--threshold", type=float, help="Threshold for model detections", default=0.4)
p.add_argument("-rs", "--use_resnest", type=bool, help="Whether the selected model uses ResNeSt backbone or no", default=True)
return p.parse_args()
def start_segment(args):
img = args.image
model = args.model
thresh = args.threshold
use_resnest = args.use_resnest
im = cv2.imread(img)
# get default cfg file
cfg = get_cfg()
# replace cfg from specific model yaml file
cfg.merge_from_file(model_zoo.get_config_file(model))
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = thresh # set threshold for this model
# Find a model from detectron2's model zoo. You can use the https://dl.fbaipublicfiles... url as well
cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(model, resnest=use_resnest)
predictor = DefaultPredictor(cfg)
start = time.time()
outputs = predictor(im)
print("Time eplased: {}".format(time.time() - start))
v = Visualizer(im[:, :, ::-1], MetadataCatalog.get(cfg.DATASETS.TRAIN[0]), scale=1.2) #rgb image (::-1)
out = v.draw_instance_predictions(outputs["instances"].to("cpu"))
cv2.imwrite("output.jpg", out.get_image()[:, :, ::-1])
if __name__ == "__main__":
args = parse_args()
start_segment(args) | [((5, 0, 5, 14), 'detectron2.utils.logger.setup_logger', 'setup_logger', ({}, {}), '()', False, 'from detectron2.utils.logger import setup_logger\n'), ((21, 8, 21, 33), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse, time\n'), ((34, 9, 34, 24), 'cv2.imread', 'cv2.imread', ({(34, 20, 34, 23): 'img'}, {}), '(img)', False, 'import os, json, cv2, random\n'), ((37, 10, 37, 19), 'detectron2.config.get_cfg', 'get_cfg', ({}, {}), '()', False, 'from detectron2.config import get_cfg\n'), ((42, 24, 42, 80), 'detectron2.model_zoo.get_checkpoint_url', 'model_zoo.get_checkpoint_url', (), '', False, 'from detectron2 import model_zoo\n'), ((43, 16, 43, 37), 'detectron2.engine.DefaultPredictor', 'DefaultPredictor', ({(43, 33, 43, 36): 'cfg'}, {}), '(cfg)', False, 'from detectron2.engine import DefaultPredictor\n'), ((44, 12, 44, 23), 'time.time', 'time.time', ({}, {}), '()', False, 'import argparse, time\n'), ((39, 24, 39, 56), 'detectron2.model_zoo.get_config_file', 'model_zoo.get_config_file', ({(39, 50, 39, 55): 'model'}, {}), '(model)', False, 'from detectron2 import model_zoo\n'), ((47, 35, 47, 77), 'detectron2.data.MetadataCatalog.get', 'MetadataCatalog.get', ({(47, 55, 47, 76): 'cfg.DATASETS.TRAIN[0]'}, {}), '(cfg.DATASETS.TRAIN[0])', False, 'from detectron2.data import MetadataCatalog, DatasetCatalog\n'), ((46, 36, 46, 47), 'time.time', 'time.time', ({}, {}), '()', False, 'import argparse, time\n')] |
Pankrat/pika | pika/data.py | 9f62cbe032e9b4fa0fe1842587ce0702c3926a3d | """AMQP Table Encoding/Decoding"""
import struct
import decimal
import calendar
from datetime import datetime
from pika import exceptions
from pika.compat import unicode_type, PY2, long, as_bytes
def encode_short_string(pieces, value):
"""Encode a string value as short string and append it to pieces list
returning the size of the encoded value.
:param list pieces: Already encoded values
:param value: String value to encode
:type value: str or unicode
:rtype: int
"""
encoded_value = as_bytes(value)
length = len(encoded_value)
# 4.2.5.3
# Short strings, stored as an 8-bit unsigned integer length followed by zero
# or more octets of data. Short strings can carry up to 255 octets of UTF-8
# data, but may not contain binary zero octets.
# ...
# 4.2.5.5
# The server SHOULD validate field names and upon receiving an invalid field
# name, it SHOULD signal a connection exception with reply code 503 (syntax
# error).
# -> validate length (avoid truncated utf-8 / corrupted data), but skip null
# byte check.
if length > 255:
raise exceptions.ShortStringTooLong(encoded_value)
pieces.append(struct.pack('B', length))
pieces.append(encoded_value)
return 1 + length
if PY2:
def decode_short_string(encoded, offset):
"""Decode a short string value from ``encoded`` data at ``offset``.
"""
length = struct.unpack_from('B', encoded, offset)[0]
offset += 1
# Purely for compatibility with original python2 code. No idea what
# and why this does.
value = encoded[offset:offset + length]
try:
value = bytes(value)
except UnicodeEncodeError:
pass
offset += length
return value, offset
else:
def decode_short_string(encoded, offset):
"""Decode a short string value from ``encoded`` data at ``offset``.
"""
length = struct.unpack_from('B', encoded, offset)[0]
offset += 1
value = encoded[offset:offset + length].decode('utf8')
offset += length
return value, offset
def encode_table(pieces, table):
"""Encode a dict as an AMQP table appending the encded table to the
pieces list passed in.
:param list pieces: Already encoded frame pieces
:param dict table: The dict to encode
:rtype: int
"""
table = table or {}
length_index = len(pieces)
pieces.append(None) # placeholder
tablesize = 0
for (key, value) in table.items():
tablesize += encode_short_string(pieces, key)
tablesize += encode_value(pieces, value)
pieces[length_index] = struct.pack('>I', tablesize)
return tablesize + 4
def encode_value(pieces, value):
"""Encode the value passed in and append it to the pieces list returning
the the size of the encoded value.
:param list pieces: Already encoded values
:param any value: The value to encode
:rtype: int
"""
if PY2:
if isinstance(value, basestring):
if isinstance(value, unicode_type):
value = value.encode('utf-8')
pieces.append(struct.pack('>cI', b'S', len(value)))
pieces.append(value)
return 5 + len(value)
else:
# support only str on Python 3
if isinstance(value, str):
value = value.encode('utf-8')
pieces.append(struct.pack('>cI', b'S', len(value)))
pieces.append(value)
return 5 + len(value)
if isinstance(value, bool):
pieces.append(struct.pack('>cB', b't', int(value)))
return 2
if isinstance(value, long):
pieces.append(struct.pack('>cq', b'l', value))
return 9
elif isinstance(value, int):
pieces.append(struct.pack('>ci', b'I', value))
return 5
elif isinstance(value, decimal.Decimal):
value = value.normalize()
if value.as_tuple().exponent < 0:
decimals = -value.as_tuple().exponent
raw = int(value * (decimal.Decimal(10) ** decimals))
pieces.append(struct.pack('>cBi', b'D', decimals, raw))
else:
# per spec, the "decimals" octet is unsigned (!)
pieces.append(struct.pack('>cBi', b'D', 0, int(value)))
return 6
elif isinstance(value, datetime):
pieces.append(struct.pack('>cQ', b'T',
calendar.timegm(value.utctimetuple())))
return 9
elif isinstance(value, dict):
pieces.append(struct.pack('>c', b'F'))
return 1 + encode_table(pieces, value)
elif isinstance(value, list):
p = []
for v in value:
encode_value(p, v)
piece = b''.join(p)
pieces.append(struct.pack('>cI', b'A', len(piece)))
pieces.append(piece)
return 5 + len(piece)
elif value is None:
pieces.append(struct.pack('>c', b'V'))
return 1
else:
raise exceptions.UnsupportedAMQPFieldException(pieces, value)
def decode_table(encoded, offset):
"""Decode the AMQP table passed in from the encoded value returning the
decoded result and the number of bytes read plus the offset.
:param str encoded: The binary encoded data to decode
:param int offset: The starting byte offset
:rtype: tuple
"""
result = {}
tablesize = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
limit = offset + tablesize
while offset < limit:
key, offset = decode_short_string(encoded, offset)
value, offset = decode_value(encoded, offset)
result[key] = value
return result, offset
def decode_value(encoded, offset):
"""Decode the value passed in returning the decoded value and the number
of bytes read in addition to the starting offset.
:param str encoded: The binary encoded data to decode
:param int offset: The starting byte offset
:rtype: tuple
:raises: pika.exceptions.InvalidFieldTypeException
"""
# slice to get bytes in Python 3 and str in Python 2
kind = encoded[offset:offset + 1]
offset += 1
# Bool
if kind == b't':
value = struct.unpack_from('>B', encoded, offset)[0]
value = bool(value)
offset += 1
# Short-Short Int
elif kind == b'b':
value = struct.unpack_from('>B', encoded, offset)[0]
offset += 1
# Short-Short Unsigned Int
elif kind == b'B':
value = struct.unpack_from('>b', encoded, offset)[0]
offset += 1
# Short Int
elif kind == b'U':
value = struct.unpack_from('>h', encoded, offset)[0]
offset += 2
# Short Unsigned Int
elif kind == b'u':
value = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
# Long Int
elif kind == b'I':
value = struct.unpack_from('>i', encoded, offset)[0]
offset += 4
# Long Unsigned Int
elif kind == b'i':
value = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
# Long-Long Int
elif kind == b'L':
value = long(struct.unpack_from('>q', encoded, offset)[0])
offset += 8
# Long-Long Unsigned Int
elif kind == b'l':
value = long(struct.unpack_from('>Q', encoded, offset)[0])
offset += 8
# Float
elif kind == b'f':
value = long(struct.unpack_from('>f', encoded, offset)[0])
offset += 4
# Double
elif kind == b'd':
value = long(struct.unpack_from('>d', encoded, offset)[0])
offset += 8
# Decimal
elif kind == b'D':
decimals = struct.unpack_from('B', encoded, offset)[0]
offset += 1
raw = struct.unpack_from('>i', encoded, offset)[0]
offset += 4
value = decimal.Decimal(raw) * (decimal.Decimal(10) ** -decimals)
# Short String
elif kind == b's':
value, offset = decode_short_string(encoded, offset)
# Long String
elif kind == b'S':
length = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
value = encoded[offset:offset + length].decode('utf8')
offset += length
# Field Array
elif kind == b'A':
length = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
offset_end = offset + length
value = []
while offset < offset_end:
v, offset = decode_value(encoded, offset)
value.append(v)
# Timestamp
elif kind == b'T':
value = datetime.utcfromtimestamp(struct.unpack_from('>Q', encoded,
offset)[0])
offset += 8
# Field Table
elif kind == b'F':
(value, offset) = decode_table(encoded, offset)
# Null / Void
elif kind == b'V':
value = None
else:
raise exceptions.InvalidFieldTypeException(kind)
return value, offset
| [((21, 20, 21, 35), 'pika.compat.as_bytes', 'as_bytes', ({(21, 29, 21, 34): 'value'}, {}), '(value)', False, 'from pika.compat import unicode_type, PY2, long, as_bytes\n'), ((87, 27, 87, 55), 'struct.pack', 'struct.pack', ({(87, 39, 87, 43): '""">I"""', (87, 45, 87, 54): 'tablesize'}, {}), "('>I', tablesize)", False, 'import struct\n'), ((36, 14, 36, 58), 'pika.exceptions.ShortStringTooLong', 'exceptions.ShortStringTooLong', ({(36, 44, 36, 57): 'encoded_value'}, {}), '(encoded_value)', False, 'from pika import exceptions\n'), ((38, 18, 38, 42), 'struct.pack', 'struct.pack', ({(38, 30, 38, 33): '"""B"""', (38, 35, 38, 41): 'length'}, {}), "('B', length)", False, 'import struct\n'), ((166, 16, 166, 57), 'struct.unpack_from', 'struct.unpack_from', ({(166, 35, 166, 39): '""">I"""', (166, 41, 166, 48): 'encoded', (166, 50, 166, 56): 'offset'}, {}), "('>I', encoded, offset)", False, 'import struct\n'), ((47, 17, 47, 57), 'struct.unpack_from', 'struct.unpack_from', ({(47, 36, 47, 39): '"""B"""', (47, 41, 47, 48): 'encoded', (47, 50, 47, 56): 'offset'}, {}), "('B', encoded, offset)", False, 'import struct\n'), ((63, 17, 63, 57), 'struct.unpack_from', 'struct.unpack_from', ({(63, 36, 63, 39): '"""B"""', (63, 41, 63, 48): 'encoded', (63, 50, 63, 56): 'offset'}, {}), "('B', encoded, offset)", False, 'import struct\n'), ((119, 22, 119, 53), 'struct.pack', 'struct.pack', ({(119, 34, 119, 39): '""">cq"""', (119, 41, 119, 45): "b'l'", (119, 47, 119, 52): 'value'}, {}), "('>cq', b'l', value)", False, 'import struct\n'), ((192, 16, 192, 57), 'struct.unpack_from', 'struct.unpack_from', ({(192, 35, 192, 39): '""">B"""', (192, 41, 192, 48): 'encoded', (192, 50, 192, 56): 'offset'}, {}), "('>B', encoded, offset)", False, 'import struct\n'), ((122, 22, 122, 53), 'struct.pack', 'struct.pack', ({(122, 34, 122, 39): '""">ci"""', (122, 41, 122, 45): "b'I'", (122, 47, 122, 52): 'value'}, {}), "('>ci', b'I', value)", False, 'import struct\n'), ((198, 16, 198, 57), 'struct.unpack_from', 'struct.unpack_from', ({(198, 35, 198, 39): '""">B"""', (198, 41, 198, 48): 'encoded', (198, 50, 198, 56): 'offset'}, {}), "('>B', encoded, offset)", False, 'import struct\n'), ((203, 16, 203, 57), 'struct.unpack_from', 'struct.unpack_from', ({(203, 35, 203, 39): '""">b"""', (203, 41, 203, 48): 'encoded', (203, 50, 203, 56): 'offset'}, {}), "('>b', encoded, offset)", False, 'import struct\n'), ((129, 26, 129, 66), 'struct.pack', 'struct.pack', ({(129, 38, 129, 44): '""">cBi"""', (129, 46, 129, 50): "b'D'", (129, 52, 129, 60): 'decimals', (129, 62, 129, 65): 'raw'}, {}), "('>cBi', b'D', decimals, raw)", False, 'import struct\n'), ((208, 16, 208, 57), 'struct.unpack_from', 'struct.unpack_from', ({(208, 35, 208, 39): '""">h"""', (208, 41, 208, 48): 'encoded', (208, 50, 208, 56): 'offset'}, {}), "('>h', encoded, offset)", False, 'import struct\n'), ((139, 22, 139, 45), 'struct.pack', 'struct.pack', ({(139, 34, 139, 38): '""">c"""', (139, 40, 139, 44): "b'F'"}, {}), "('>c', b'F')", False, 'import struct\n'), ((213, 16, 213, 57), 'struct.unpack_from', 'struct.unpack_from', ({(213, 35, 213, 39): '""">H"""', (213, 41, 213, 48): 'encoded', (213, 50, 213, 56): 'offset'}, {}), "('>H', encoded, offset)", False, 'import struct\n'), ((128, 31, 128, 50), 'decimal.Decimal', 'decimal.Decimal', ({(128, 47, 128, 49): '10'}, {}), '(10)', False, 'import decimal\n'), ((153, 14, 153, 69), 'pika.exceptions.UnsupportedAMQPFieldException', 'exceptions.UnsupportedAMQPFieldException', ({(153, 55, 153, 61): 'pieces', (153, 63, 153, 68): 'value'}, {}), '(pieces, value)', False, 'from pika import exceptions\n'), ((218, 16, 218, 57), 'struct.unpack_from', 'struct.unpack_from', ({(218, 35, 218, 39): '""">i"""', (218, 41, 218, 48): 'encoded', (218, 50, 218, 56): 'offset'}, {}), "('>i', encoded, offset)", False, 'import struct\n'), ((150, 22, 150, 45), 'struct.pack', 'struct.pack', ({(150, 34, 150, 38): '""">c"""', (150, 40, 150, 44): "b'V'"}, {}), "('>c', b'V')", False, 'import struct\n'), ((223, 16, 223, 57), 'struct.unpack_from', 'struct.unpack_from', ({(223, 35, 223, 39): '""">I"""', (223, 41, 223, 48): 'encoded', (223, 50, 223, 56): 'offset'}, {}), "('>I', encoded, offset)", False, 'import struct\n'), ((228, 21, 228, 62), 'struct.unpack_from', 'struct.unpack_from', ({(228, 40, 228, 44): '""">q"""', (228, 46, 228, 53): 'encoded', (228, 55, 228, 61): 'offset'}, {}), "('>q', encoded, offset)", False, 'import struct\n'), ((233, 21, 233, 62), 'struct.unpack_from', 'struct.unpack_from', ({(233, 40, 233, 44): '""">Q"""', (233, 46, 233, 53): 'encoded', (233, 55, 233, 61): 'offset'}, {}), "('>Q', encoded, offset)", False, 'import struct\n'), ((238, 21, 238, 62), 'struct.unpack_from', 'struct.unpack_from', ({(238, 40, 238, 44): '""">f"""', (238, 46, 238, 53): 'encoded', (238, 55, 238, 61): 'offset'}, {}), "('>f', encoded, offset)", False, 'import struct\n'), ((243, 21, 243, 62), 'struct.unpack_from', 'struct.unpack_from', ({(243, 40, 243, 44): '""">d"""', (243, 46, 243, 53): 'encoded', (243, 55, 243, 61): 'offset'}, {}), "('>d', encoded, offset)", False, 'import struct\n'), ((248, 19, 248, 59), 'struct.unpack_from', 'struct.unpack_from', ({(248, 38, 248, 41): '"""B"""', (248, 43, 248, 50): 'encoded', (248, 52, 248, 58): 'offset'}, {}), "('B', encoded, offset)", False, 'import struct\n'), ((250, 14, 250, 55), 'struct.unpack_from', 'struct.unpack_from', ({(250, 33, 250, 37): '""">i"""', (250, 39, 250, 46): 'encoded', (250, 48, 250, 54): 'offset'}, {}), "('>i', encoded, offset)", False, 'import struct\n'), ((252, 16, 252, 36), 'decimal.Decimal', 'decimal.Decimal', ({(252, 32, 252, 35): 'raw'}, {}), '(raw)', False, 'import decimal\n'), ((252, 40, 252, 59), 'decimal.Decimal', 'decimal.Decimal', ({(252, 56, 252, 58): '(10)'}, {}), '(10)', False, 'import decimal\n'), ((260, 17, 260, 58), 'struct.unpack_from', 'struct.unpack_from', ({(260, 36, 260, 40): '""">I"""', (260, 42, 260, 49): 'encoded', (260, 51, 260, 57): 'offset'}, {}), "('>I', encoded, offset)", False, 'import struct\n'), ((267, 17, 267, 58), 'struct.unpack_from', 'struct.unpack_from', ({(267, 36, 267, 40): '""">I"""', (267, 42, 267, 49): 'encoded', (267, 51, 267, 57): 'offset'}, {}), "('>I', encoded, offset)", False, 'import struct\n'), ((277, 42, 278, 68), 'struct.unpack_from', 'struct.unpack_from', ({(277, 61, 277, 65): '""">Q"""', (277, 67, 277, 74): 'encoded', (278, 61, 278, 67): 'offset'}, {}), "('>Q', encoded, offset)", False, 'import struct\n'), ((289, 14, 289, 56), 'pika.exceptions.InvalidFieldTypeException', 'exceptions.InvalidFieldTypeException', ({(289, 51, 289, 55): 'kind'}, {}), '(kind)', False, 'from pika import exceptions\n')] |
Agi-dev/pylaas_core | tests/fixtures/data_sets/service/dummy/dummy_configurable.py | c44866b5e57eb6f05f5b2b8d731f22d62a8c01c2 | from pylaas_core.abstract.abstract_service import AbstractService
import time
from pylaas_core.interface.technical.container_configurable_aware_interface import ContainerConfigurableAwareInterface
class DummyConfigurable(AbstractService, ContainerConfigurableAwareInterface):
def __init__(self) -> None:
super().__init__()
self._microtime = int(round(time.time() * 1000))
self._configs = None
def set_configs(self, configurations):
self._configs = configurations
return self
| [((11, 36, 11, 47), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')] |
IVAN-URBACZKA/django-blog | blogtech/src/blog/views.py | 7ef6050c0de2938791843c3ec93e6e6a1e683baa | from django.urls import reverse_lazy, reverse
from django.utils.decorators import method_decorator
from django.views.generic import ListView, DetailView, CreateView, DeleteView, UpdateView
from .models import BlogPost
from django.contrib.auth.decorators import login_required
class BlogPostHomeView(ListView):
model = BlogPost
context_object_name = "posts"
class BlogPostDetailsView(DetailView):
model = BlogPost
context_object_name = "post"
@method_decorator(login_required, name='dispatch')
class BlogPostCreateView(CreateView):
model = BlogPost
fields = ['title', 'image','author', 'category', 'content']
def get_success_url(self):
return reverse('posts:home')
@method_decorator(login_required, name='dispatch')
class BlogPostUpdateView(UpdateView):
model = BlogPost
fields = ['title', 'author', 'category', 'content']
template_name = 'blog/blogpost_update.html'
@method_decorator(login_required, name='dispatch')
class BlogPostDeleteView(DeleteView):
model = BlogPost
success_url = reverse_lazy('posts:home') | [((17, 1, 17, 50), 'django.utils.decorators.method_decorator', 'method_decorator', (), '', False, 'from django.utils.decorators import method_decorator\n'), ((25, 1, 25, 50), 'django.utils.decorators.method_decorator', 'method_decorator', (), '', False, 'from django.utils.decorators import method_decorator\n'), ((31, 1, 31, 50), 'django.utils.decorators.method_decorator', 'method_decorator', (), '', False, 'from django.utils.decorators import method_decorator\n'), ((34, 18, 34, 44), 'django.urls.reverse_lazy', 'reverse_lazy', ({(34, 31, 34, 43): '"""posts:home"""'}, {}), "('posts:home')", False, 'from django.urls import reverse_lazy, reverse\n'), ((23, 15, 23, 36), 'django.urls.reverse', 'reverse', ({(23, 23, 23, 35): '"""posts:home"""'}, {}), "('posts:home')", False, 'from django.urls import reverse_lazy, reverse\n')] |
Juxi/apb-baseline | apc_deep_vision/python/generate_data.py | fd47a5fd78cdfd75c68601a40ca4726d7d20c9ce | #! /usr/bin/env python
# ********************************************************************
# Software License Agreement (BSD License)
#
# Copyright (c) 2015, University of Colorado, Boulder
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the University of Colorado Boulder
# nor the names of its contributors may be
# used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ********************************************************************/
import cv2
import os
import numpy as np
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("proposal_path", type=str,
help="relative path from python script to proposals, no slash")
parser.add_argument("--view", default=None,
help="true/1 shows each masked image")
args = parser.parse_args()
# args.proposal_path = "../test_proposals"
# args.proposal_path = args.proposal_path
included_extenstions = ['txt']
image_names = [fn[0:len(fn)-4] for fn in os.listdir(args.proposal_path)
if any(fn.endswith(ext) for ext in included_extenstions)]
for image_name in image_names:
load_path = args.proposal_path + '/' + image_name
image = cv2.imread(load_path + ".jpeg")
data = np.loadtxt(load_path + ".txt", str)
# If there is only one line, force data to be a list of lists anyway
# Note, only works for our data as first list item is a string
if isinstance(data[0], basestring):
data = [data]
# If any line does not conform to classification tl_x tl_y br_x br_y
# then forget about it
skip = False
for line in data:
if len(line) < 5:
skip = True
if skip:
continue
for i, proposal in zip(range(0,len(data)),data):
mask = cv2.imread(load_path + '_mask{0:04d}.jpeg'.format(i))
mask = np.invert(mask)
maskGray = cv2.cvtColor(mask, cv2.COLOR_BGR2GRAY)
ret, maskGray = cv2.threshold(maskGray,128,255,cv2.THRESH_BINARY)
print load_path + '_mask{0:04d}.jpeg'.format(i)
cropped = image[float(proposal[2]):float(proposal[4]), float(proposal[1]):float(proposal[3])]
masked = cv2.bitwise_and(cropped, cropped, mask = maskGray)
if args.view:
cv2.imshow("original", masked)
cv2.waitKey(0)
mask_directory = args.proposal_path + '/masked/' + proposal[0];
crop_directory = args.proposal_path + '/cropped/' + proposal[0];
if not os.path.exists(mask_directory):
os.makedirs(mask_directory)
if not os.path.exists(crop_directory):
os.makedirs(crop_directory)
cv2.imwrite(mask_directory + '/{}_{}.jpeg'.format(image_name,i), masked)
cv2.imwrite(crop_directory + '/{}_{}.jpeg'.format(image_name,i), cropped)
# item = data[]
# cropped = image[70:170, 440:540]
# startY:endY, startX:endX
# startX:startY, endX:endY
#
| [] |
shirshanka/fact-ory | stats.py | 9e6bae63ca7f8f534b811058efb8942004d6a37b | import numpy as np;
import sys
import matplotlib.pyplot as plt;
from matplotlib import cm;
from termcolor import colored;
class Stats():
def __init__(self, param1_range, param2_range):
self._total_times = 0;
self._total_time = 0.0;
self._wrong_answers = [];
self._time_dict = {};
self._param1_range = param1_range
self._param2_range = param2_range
self._param1_length = param1_range[1] - param1_range[0] + 1
self._param2_length = param2_range[1] - param2_range[0] + 1
self._red_color = 1.0
self._green_color = 0.3
self._cream_color = 0.6
self._default_color = np.nan
self._wrong_color = 1000.0
self._time_penalty = 2.0 # time penalty for wrong answer is 5 seconds
self._result_matrix = np.full((self._param1_length, self._param2_length), self._default_color)
def add_statistic(self, operator, param1,param2,ans,time_diff):
self.add_time_statistic(param1, param2, time_diff)
x_axis = param1 - self._param1_range[0]
y_axis = param2 - self._param2_range[0]
curr_value = self._result_matrix[x_axis][y_axis]
incr_value = time_diff
if (operator.evaluate(param1, param2) != ans):
# wrong answer
self.add_wrong_answer(param1,param2,ans)
incr_value = incr_value + self._time_penalty
else:
# right answer: do nothing
pass
if np.isnan(curr_value):
self._result_matrix[x_axis][y_axis] = incr_value
else:
self._result_matrix[x_axis][y_axis] = curr_value + incr_value
def add_time_statistic(self, param1, param2, time_diff):
self._total_times = self._total_times +1;
self._total_time = self._total_time + time_diff;
if not self._time_dict.has_key(param1):
self._time_dict[param1] = []
if not self._time_dict.has_key(param2):
self._time_dict[param2] = []
self._time_dict[param1].append(time_diff)
self._time_dict[param2].append(time_diff)
def add_wrong_answer(self, param1, param2, answer_given):
self._wrong_answers.append((param1,param2, answer_given))
def get_avg_time(self):
return (self._total_time / self._total_times);
def print_stats(self, operator):
sys.stdout.write("You took an average of %0.2f seconds to answer each question!\n" % self.get_avg_time());
if self._wrong_answers != []:
print("Here were the answers you got wrong...")
for (f1,f2,ans) in self._wrong_answers:
print ("%d %s %d = " % (f1,operator.symbol,f2)), colored("%d" % ans, "red"), "Correct answer is ", colored("%d" % operator.evaluate(f1,f2), "green")
row_labels = range(self._param1_range[0],self._param1_range[1]+1)
col_labels = range(self._param2_range[0],self._param2_range[1]+1)
#plt.matshow(self._result_matrix, cmap=cm.Spectral_r, vmin=0, vmax=1)
fig = plt.figure()
ax = fig.add_subplot(111)
cax = ax.matshow(self._result_matrix, interpolation='nearest', vmin=0)
fig.colorbar(cax)
plt.gca().set_aspect('auto')
row_ticks = range(len(row_labels))
col_ticks = range(len(col_labels))
if (len(row_labels) > 10):
skip_every = int(len(row_labels) / 10);
row_labels = row_labels[0::skip_every]
row_ticks = row_ticks[0::skip_every]
if (len(col_labels) > 10):
skip_every = int(len(col_labels)/10)
col_labels = col_labels[0::skip_every]
col_ticks = col_ticks[0::skip_every]
plt.xticks(col_ticks, col_labels)
plt.yticks(row_ticks, row_labels)
plt.show()
if __name__=="__main__":
print "hello world"
| [] |
zjuchenyuan/EasyLogin | examples/peptidecutter/advanced.py | acc67187d902f20ec64d2d6b9eeb953e2a0ac77d | from EasyLogin import EasyLogin
from pprint import pprint
def peptidecutter(oneprotein):
a = EasyLogin(proxy="socks5://127.0.0.1:1080") #speed up by using proxy
a.post("http://web.expasy.org/cgi-bin/peptide_cutter/peptidecutter.pl",
"protein={}&enzyme_number=all_enzymes&special_enzyme=Chym&min_prob=&block_size=60&alphtable=alphtable&cleave_number=all&cleave_exactly=&cleave_range_min=&cleave_range_max=".format(oneprotein)
)
table=a.b.find("table",{"class":"proteomics2"})
tds=table.find_all("td")
result = []
oneline = []
i = 0
for td in tds:
i+=1
if i==1:
content = td.text
elif i==2:
content = int(td.text)
else:
content = [int(i) for i in td.text.split()]
oneline.append(content)
if i==3:
result.append(oneline)
oneline=[]
i=0
return result
def fasta_reader(filename):
filecontents = open(filename).read().split("\n")
name = ""
thedata = ""
result=[]
for line in filecontents:
if not len(line): continue
if line[0]=='>':
if len(thedata):
result.append([name,thedata])
thedata = ""
name = line
else:
thedata += line
result.append([name,thedata])#don't forget the last one
return result
def peptidecutter_more(filename):
return [ [name,peptidecutter(oneprotein)] for name,oneprotein in fasta_reader(filename) ]
if __name__ == "__main__":
#pprint(peptidecutter("SERVELAT"))
import sys
pprint(peptidecutter_more(sys.argv[1]))
| [((5, 8, 5, 50), 'EasyLogin.EasyLogin', 'EasyLogin', (), '', False, 'from EasyLogin import EasyLogin\n')] |
pointerish/pgn2fixture | pgn2fixture/tests/test_utils.py | 02039680acc37cbca22fb332738e34cd113831a4 | import unittest
from .. import utils
class TestUtils(unittest.TestCase):
def setUp(self) -> None:
self.pgn_string = '''
[Event "US Championship 1963/64"]
[Site "New York, NY USA"]
[Date "1964.01.01"]
[EventDate "1963.??.??"]
[Round "11"][Result "0-1"]
[White "Anthony Saidy"]
[Black "Robert James Fischer"]
[ECO "A33"]
[WhiteElo "?"]
[BlackElo "?"][PlyCount "112"]
1. c4 0-1'''
def test_clean(self):
result = ['Event "US Championship 1963/64"', 'Site "New York, NY USA"', 'Date "1964.01.01"', 'EventDate "1963.??.??"', 'Round "11"', 'Result "0-1"',
'White "Anthony Saidy"', 'Black "Robert James Fischer"', 'ECO "A33"', 'WhiteElo "?"', 'BlackElo "?"', 'PlyCount "112"', '1. c4 0-1']
self.assertEqual(utils.clean(self.pgn_string), result)
def test_extract_tag_roster(self):
result = {'event': 'US Championship 1963/64', 'site': 'New York, NY USA', 'date': '1964.01.01', 'eventdate': '1963.??.??', 'round': '11', 'result': '0-1',
'white': 'Anthony Saidy', 'black': 'Robert James Fischer', 'eco': 'A33', 'whiteelo': '?', 'blackelo': '?', 'plycount': '112', 'moves': '1. c4 0-1'}
self.assertEqual(utils.extract_tag_roster(self.pgn_string), result)
| [] |
gouthampacha/manila | manila/tests/share/test_snapshot_access.py | 4b7ba9b99d272663f519b495668715fbf979ffbc | # Copyright (c) 2016 Hitachi Data Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import ddt
import mock
from manila.common import constants
from manila import context
from manila import db
from manila import exception
from manila.share import snapshot_access
from manila import test
from manila.tests import db_utils
from manila import utils
@ddt.ddt
class SnapshotAccessTestCase(test.TestCase):
def setUp(self):
super(SnapshotAccessTestCase, self).setUp()
self.driver = self.mock_class("manila.share.driver.ShareDriver",
mock.Mock())
self.snapshot_access = snapshot_access.ShareSnapshotInstanceAccess(
db, self.driver)
self.context = context.get_admin_context()
share = db_utils.create_share()
self.snapshot = db_utils.create_snapshot(share_id=share['id'])
self.snapshot_instance = db_utils.create_snapshot_instance(
snapshot_id=self.snapshot['id'],
share_instance_id=self.snapshot['share']['instance']['id'])
@ddt.data(constants.ACCESS_STATE_QUEUED_TO_APPLY,
constants.ACCESS_STATE_QUEUED_TO_DENY)
def test_update_access_rules(self, state):
rules = []
for i in range(2):
rules.append({
'id': 'id-%s' % i,
'state': state,
'access_id': 'rule_id%s' % i
})
all_rules = copy.deepcopy(rules)
all_rules.append({
'id': 'id-3',
'state': constants.ACCESS_STATE_ERROR,
'access_id': 'rule_id3'
})
snapshot_instance_get = self.mock_object(
db, 'share_snapshot_instance_get',
mock.Mock(return_value=self.snapshot_instance))
snap_get_all_for_snap_instance = self.mock_object(
db, 'share_snapshot_access_get_all_for_snapshot_instance',
mock.Mock(return_value=all_rules))
self.mock_object(db, 'share_snapshot_instance_access_update')
self.mock_object(self.driver, 'snapshot_update_access')
self.mock_object(self.snapshot_access, '_check_needs_refresh',
mock.Mock(return_value=False))
self.mock_object(db, 'share_snapshot_instance_access_delete')
self.snapshot_access.update_access_rules(self.context,
self.snapshot_instance['id'])
snapshot_instance_get.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance['id'], with_share_data=True)
snap_get_all_for_snap_instance.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance['id'])
if state == constants.ACCESS_STATE_QUEUED_TO_APPLY:
self.driver.snapshot_update_access.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance, rules, add_rules=rules,
delete_rules=[], share_server=None)
else:
self.driver.snapshot_update_access.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance, [], add_rules=[],
delete_rules=rules, share_server=None)
def test_update_access_rules_delete_all_rules(self):
rules = []
for i in range(2):
rules.append({
'id': 'id-%s' % i,
'state': constants.ACCESS_STATE_QUEUED_TO_DENY,
'access_id': 'rule_id%s' % i
})
snapshot_instance_get = self.mock_object(
db, 'share_snapshot_instance_get',
mock.Mock(return_value=self.snapshot_instance))
snap_get_all_for_snap_instance = self.mock_object(
db, 'share_snapshot_access_get_all_for_snapshot_instance',
mock.Mock(side_effect=[rules, []]))
self.mock_object(db, 'share_snapshot_instance_access_update')
self.mock_object(self.driver, 'snapshot_update_access')
self.mock_object(db, 'share_snapshot_instance_access_delete')
self.snapshot_access.update_access_rules(self.context,
self.snapshot_instance['id'],
delete_all_rules=True)
snapshot_instance_get.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance['id'], with_share_data=True)
snap_get_all_for_snap_instance.assert_called_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance['id'])
self.driver.snapshot_update_access.assert_called_with(
utils.IsAMatcher(context.RequestContext), self.snapshot_instance,
[], add_rules=[], delete_rules=rules, share_server=None)
def test_update_access_rules_exception(self):
rules = []
for i in range(2):
rules.append({
'id': 'id-%s' % i,
'state': constants.ACCESS_STATE_APPLYING,
'access_id': 'rule_id%s' % i
})
snapshot_instance_get = self.mock_object(
db, 'share_snapshot_instance_get',
mock.Mock(return_value=self.snapshot_instance))
snap_get_all_for_snap_instance = self.mock_object(
db, 'share_snapshot_access_get_all_for_snapshot_instance',
mock.Mock(return_value=rules))
self.mock_object(db, 'share_snapshot_instance_access_update')
self.mock_object(self.driver, 'snapshot_update_access',
mock.Mock(side_effect=exception.NotFound))
self.assertRaises(exception.NotFound,
self.snapshot_access.update_access_rules,
self.context, self.snapshot_instance['id'])
snapshot_instance_get.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance['id'], with_share_data=True)
snap_get_all_for_snap_instance.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance['id'])
self.driver.snapshot_update_access.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), self.snapshot_instance,
rules, add_rules=rules, delete_rules=[], share_server=None)
| [((45, 5, 46, 52), 'ddt.data', 'ddt.data', ({(45, 14, 45, 52): 'constants.ACCESS_STATE_QUEUED_TO_APPLY', (46, 14, 46, 51): 'constants.ACCESS_STATE_QUEUED_TO_DENY'}, {}), '(constants.ACCESS_STATE_QUEUED_TO_APPLY, constants.\n ACCESS_STATE_QUEUED_TO_DENY)', False, 'import ddt\n'), ((36, 31, 37, 28), 'manila.share.snapshot_access.ShareSnapshotInstanceAccess', 'snapshot_access.ShareSnapshotInstanceAccess', ({(37, 12, 37, 14): 'db', (37, 16, 37, 27): 'self.driver'}, {}), '(db, self.driver)', False, 'from manila.share import snapshot_access\n'), ((38, 23, 38, 50), 'manila.context.get_admin_context', 'context.get_admin_context', ({}, {}), '()', False, 'from manila import context\n'), ((39, 16, 39, 39), 'manila.tests.db_utils.create_share', 'db_utils.create_share', ({}, {}), '()', False, 'from manila.tests import db_utils\n'), ((40, 24, 40, 70), 'manila.tests.db_utils.create_snapshot', 'db_utils.create_snapshot', (), '', False, 'from manila.tests import db_utils\n'), ((41, 33, 43, 71), 'manila.tests.db_utils.create_snapshot_instance', 'db_utils.create_snapshot_instance', (), '', False, 'from manila.tests import db_utils\n'), ((56, 20, 56, 40), 'copy.deepcopy', 'copy.deepcopy', ({(56, 34, 56, 39): 'rules'}, {}), '(rules)', False, 'import copy\n'), ((35, 38, 35, 49), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((65, 12, 65, 58), 'mock.Mock', 'mock.Mock', (), '', False, 'import mock\n'), ((69, 12, 69, 45), 'mock.Mock', 'mock.Mock', (), '', False, 'import mock\n'), ((74, 25, 74, 54), 'mock.Mock', 'mock.Mock', (), '', False, 'import mock\n'), ((81, 12, 81, 52), 'manila.utils.IsAMatcher', 'utils.IsAMatcher', ({(81, 29, 81, 51): 'context.RequestContext'}, {}), '(context.RequestContext)', False, 'from manila import utils\n'), ((84, 12, 84, 52), 'manila.utils.IsAMatcher', 'utils.IsAMatcher', ({(84, 29, 84, 51): 'context.RequestContext'}, {}), '(context.RequestContext)', False, 'from manila import utils\n'), ((109, 12, 109, 58), 'mock.Mock', 'mock.Mock', (), '', False, 'import mock\n'), ((113, 12, 113, 46), 'mock.Mock', 'mock.Mock', (), '', False, 'import mock\n'), ((124, 12, 124, 52), 'manila.utils.IsAMatcher', 'utils.IsAMatcher', ({(124, 29, 124, 51): 'context.RequestContext'}, {}), '(context.RequestContext)', False, 'from manila import utils\n'), ((127, 12, 127, 52), 'manila.utils.IsAMatcher', 'utils.IsAMatcher', ({(127, 29, 127, 51): 'context.RequestContext'}, {}), '(context.RequestContext)', False, 'from manila import utils\n'), ((130, 12, 130, 52), 'manila.utils.IsAMatcher', 'utils.IsAMatcher', ({(130, 29, 130, 51): 'context.RequestContext'}, {}), '(context.RequestContext)', False, 'from manila import utils\n'), ((145, 12, 145, 58), 'mock.Mock', 'mock.Mock', (), '', False, 'import mock\n'), ((149, 12, 149, 41), 'mock.Mock', 'mock.Mock', (), '', False, 'import mock\n'), ((153, 25, 153, 66), 'mock.Mock', 'mock.Mock', (), '', False, 'import mock\n'), ((160, 12, 160, 52), 'manila.utils.IsAMatcher', 'utils.IsAMatcher', ({(160, 29, 160, 51): 'context.RequestContext'}, {}), '(context.RequestContext)', False, 'from manila import utils\n'), ((163, 12, 163, 52), 'manila.utils.IsAMatcher', 'utils.IsAMatcher', ({(163, 29, 163, 51): 'context.RequestContext'}, {}), '(context.RequestContext)', False, 'from manila import utils\n'), ((167, 12, 167, 52), 'manila.utils.IsAMatcher', 'utils.IsAMatcher', ({(167, 29, 167, 51): 'context.RequestContext'}, {}), '(context.RequestContext)', False, 'from manila import utils\n'), ((88, 16, 88, 56), 'manila.utils.IsAMatcher', 'utils.IsAMatcher', ({(88, 33, 88, 55): 'context.RequestContext'}, {}), '(context.RequestContext)', False, 'from manila import utils\n'), ((93, 16, 93, 56), 'manila.utils.IsAMatcher', 'utils.IsAMatcher', ({(93, 33, 93, 55): 'context.RequestContext'}, {}), '(context.RequestContext)', False, 'from manila import utils\n')] |
sasano8/pyright | packages/pyright-internal/src/tests/samples/unnecessaryCast1.py | e804f324ee5dbd25fd37a258791b3fd944addecd | # This sample tests the type checker's reportUnnecessaryCast feature.
from typing import cast, Union
def foo(a: int):
# This should generate an error if
# reportUnnecessaryCast is enabled.
b = cast(int, a)
c: Union[int, str] = "hello"
d = cast(int, c)
| [((13, 4, 13, 16), 'typing.cast', 'cast', ({(13, 9, 13, 12): 'int', (13, 14, 13, 15): 'c'}, {}), '(int, c)', False, 'from typing import cast, Union\n'), ((9, 8, 9, 20), 'typing.cast', 'cast', ({(9, 13, 9, 16): 'int', (9, 18, 9, 19): 'a'}, {}), '(int, a)', False, 'from typing import cast, Union\n')] |
ArikBartzadok/beecrowd-challenges | Python/1238.py | ddb0453d1caa75c87c4b3ed6a40309ab99da77f2 | def execucoes():
return int(input())
def entradas():
return input().split(' ')
def imprimir(v):
print(v)
def tamanho_a(a):
return len(a)
def tamanho_b(b):
return len(b)
def diferenca_tamanhos(a, b):
return (len(a) <= len(b))
def analisar(e, i, s):
a, b = e
if(diferenca_tamanhos(a, b)):
for i in range(tamanho_a(a)):
s += a[i]
s += b[i]
s += b[tamanho_a(a):]
else:
for i in range(tamanho_b(b)):
s += a[i]
s += b[i]
s += a[tamanho_b(b):]
return s
def combinador():
n = execucoes()
for i in range(n): imprimir(analisar(entradas(), i, ''))
combinador() | [] |
worldwise001/amundsenmetadatalibrary | metadata_service/api/popular_tables.py | 9914c8b51d38b8bd76d3249eb4f7fcce3e198d09 | from http import HTTPStatus
from typing import Iterable, Union, Mapping
from flask import request
from flask_restful import Resource, fields, marshal
from metadata_service.proxy import get_proxy_client
popular_table_fields = {
'database': fields.String,
'cluster': fields.String,
'schema': fields.String,
'table_name': fields.String(attribute='name'),
'table_description': fields.String(attribute='description'), # Optional
}
popular_tables_fields = {
'popular_tables': fields.List(fields.Nested(popular_table_fields))
}
class PopularTablesAPI(Resource):
"""
PopularTables API
"""
def __init__(self) -> None:
self.client = get_proxy_client()
def get(self) -> Iterable[Union[Mapping, int, None]]:
limit = request.args.get('limit', 10)
popular_tables = self.client.get_popular_tables(num_entries=limit)
return marshal({'popular_tables': popular_tables}, popular_tables_fields), HTTPStatus.OK
| [((13, 18, 13, 49), 'flask_restful.fields.String', 'fields.String', (), '', False, 'from flask_restful import Resource, fields, marshal\n'), ((14, 25, 14, 63), 'flask_restful.fields.String', 'fields.String', (), '', False, 'from flask_restful import Resource, fields, marshal\n'), ((18, 34, 18, 69), 'flask_restful.fields.Nested', 'fields.Nested', ({(18, 48, 18, 68): 'popular_table_fields'}, {}), '(popular_table_fields)', False, 'from flask_restful import Resource, fields, marshal\n'), ((27, 22, 27, 40), 'metadata_service.proxy.get_proxy_client', 'get_proxy_client', ({}, {}), '()', False, 'from metadata_service.proxy import get_proxy_client\n'), ((30, 16, 30, 45), 'flask.request.args.get', 'request.args.get', ({(30, 33, 30, 40): '"""limit"""', (30, 42, 30, 44): '10'}, {}), "('limit', 10)", False, 'from flask import request\n'), ((32, 15, 32, 81), 'flask_restful.marshal', 'marshal', ({(32, 23, 32, 57): "{'popular_tables': popular_tables}", (32, 59, 32, 80): 'popular_tables_fields'}, {}), "({'popular_tables': popular_tables}, popular_tables_fields)", False, 'from flask_restful import Resource, fields, marshal\n')] |
SaijC/manhwaDownloader | tests/test1.py | f6e97cfe25355598e42633a3796d84b666d5302f | import requests
import logging
import cfscrape
import os
from manhwaDownloader.constants import CONSTANTS as CONST
logging.basicConfig(level=logging.DEBUG)
folderPath = os.path.join(CONST.OUTPUTPATH, 'serious-taste-of-forbbiden-fruit')
logging.info(len([file for file in os.walk(folderPath)]))
walkList = [file for file in os.walk(folderPath)]
chapterDicts = dict()
for folder, _, files in walkList[1:]:
chapterDicts.update({folder: files})
print(chapterDicts) | [((7, 0, 7, 40), 'logging.basicConfig', 'logging.basicConfig', (), '', False, 'import logging\n'), ((9, 13, 9, 79), 'os.path.join', 'os.path.join', ({(9, 26, 9, 42): 'CONST.OUTPUTPATH', (9, 44, 9, 78): '"""serious-taste-of-forbbiden-fruit"""'}, {}), "(CONST.OUTPUTPATH, 'serious-taste-of-forbbiden-fruit')", False, 'import os\n'), ((12, 29, 12, 48), 'os.walk', 'os.walk', ({(12, 37, 12, 47): 'folderPath'}, {}), '(folderPath)', False, 'import os\n'), ((11, 35, 11, 54), 'os.walk', 'os.walk', ({(11, 43, 11, 53): 'folderPath'}, {}), '(folderPath)', False, 'import os\n')] |
rahasayantan/Work-For-Reference | others/Keras_custom_error.py | e052da538df84034ec5a0fe3b19c4287de307286 | # define custom R2 metrics for Keras backend
from keras import backend as K
def r2_keras(y_true, y_pred):
SS_res = K.sum(K.square( y_true - y_pred ))
SS_tot = K.sum(K.square( y_true - K.mean(y_true) ) )
return ( 1 - SS_res/(SS_tot + K.epsilon()) )
# base model architecture definition
def model():
model = Sequential()
#input layer
model.add(Dense(input_dims, input_dim=input_dims))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Dropout(0.3))
# hidden layers
model.add(Dense(input_dims))
model.add(BatchNormalization())
model.add(Activation(act_func))
model.add(Dropout(0.3))
model.add(Dense(input_dims//2))
model.add(BatchNormalization())
model.add(Activation(act_func))
model.add(Dropout(0.3))
model.add(Dense(input_dims//4, activation=act_func))
# output layer (y_pred)
model.add(Dense(1, activation='linear'))
# compile this model
model.compile(loss='mean_squared_error', # one may use 'mean_absolute_error' as alternative
optimizer='adam',
metrics=[r2_keras] # you can add several if needed
)
# Visualize NN architecture
print(model.summary())
return model
################K2
import pandas as pd
import numpy as np
from sklearn.feature_selection import SelectFromModel
from sklearn.linear_model import LassoCV
from sklearn.model_selection import GridSearchCV
from sklearn.preprocessing import RobustScaler
from keras import backend as K
from keras.models import Sequential
from keras.layers import Dense, InputLayer, GaussianNoise
from keras.wrappers.scikit_learn import KerasRegressor
train = pd.read_csv('../input/train.csv')
test = pd.read_csv('../input/test.csv')
#
# Data preparation
#
y_train = train['y'].values
id_test = test['ID']
num_train = len(train)
df_all = pd.concat([train, test])
df_all.drop(['ID', 'y'], axis=1, inplace=True)
# One-hot encoding of categorical/strings
df_all = pd.get_dummies(df_all, drop_first=True)
# Sscaling features
scaler = RobustScaler()
df_all = scaler.fit_transform(df_all)
train = df_all[:num_train]
test = df_all[num_train:]
# Keep only the most contributing features
sfm = SelectFromModel(LassoCV())
sfm.fit(train, y_train)
train = sfm.transform(train)
test = sfm.transform(test)
print ('Number of features : %d' % train.shape[1])
def r2_keras(y_true, y_pred):
SS_res = K.sum(K.square( y_true - y_pred ))
SS_tot = K.sum(K.square( y_true - K.mean(y_true) ) )
return ( 1 - SS_res/(SS_tot + K.epsilon()) )
def build_model_fn(neurons=20, noise=0.25):
model = Sequential()
model.add(InputLayer(input_shape=(train.shape[1],)))
model.add(GaussianNoise(noise))
model.add(Dense(neurons, activation='tanh'))
model.add(Dense(1, activation='linear'))
model.compile(loss='mean_squared_error', optimizer='nadam', metrics=[r2_keras])
return model
#
# Tuning model parameters
#
model = KerasRegressor(build_fn=build_model_fn, epochs=75, verbose=0)
gsc = GridSearchCV(
estimator=model,
param_grid={
#'neurons': range(18,31,4),
'noise': [x/20.0 for x in range(3, 7)],
},
#scoring='r2',
scoring='neg_mean_squared_error',
cv=5
)
grid_result = gsc.fit(train, y_train)
print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
for test_mean, test_stdev, train_mean, train_stdev, param in zip(
grid_result.cv_results_['mean_test_score'],
grid_result.cv_results_['std_test_score'],
grid_result.cv_results_['mean_train_score'],
grid_result.cv_results_['std_train_score'],
grid_result.cv_results_['params']):
print("Train: %f (%f) // Test : %f (%f) with: %r" % (train_mean, train_stdev, test_mean, test_stdev, param))
#
# Train model with best params for submission
#
model = build_model_fn(**grid_result.best_params_)
model.fit(train, y_train, epochs=75, verbose=2)
y_test = model.predict(test).flatten()
df_sub = pd.DataFrame({'ID': id_test, 'y': y_test})
df_sub.to_csv('mercedes-submission.csv', index=False)
#########################
import pandas as pd
import numpy as np
from sklearn.svm import SVR
from sklearn.ensemble import RandomForestRegressor, ExtraTreesRegressor
from sklearn.decomposition import PCA, FastICA
from sklearn.preprocessing import RobustScaler
from sklearn.pipeline import make_pipeline, Pipeline, _name_estimators
from sklearn.linear_model import ElasticNet, ElasticNetCV
from sklearn.model_selection import cross_val_score, KFold
from sklearn.metrics import r2_score
from sklearn.base import BaseEstimator, TransformerMixin
import xgboost as xgb
train = pd.read_csv('../input/train.csv')
test = pd.read_csv('../input/test.csv')
y_train = train['y'].values
y_mean = np.mean(y_train)
id_test = test['ID']
num_train = len(train)
df_all = pd.concat([train, test])
df_all.drop(['ID', 'y'], axis=1, inplace=True)
# One-hot encoding of categorical/strings
df_all = pd.get_dummies(df_all, drop_first=True)
train = df_all[:num_train]
test = df_all[num_train:]
class AddColumns(BaseEstimator, TransformerMixin):
def __init__(self, transform_=None):
self.transform_ = transform_
def fit(self, X, y=None):
self.transform_.fit(X, y)
return self
def transform(self, X, y=None):
xform_data = self.transform_.transform(X, y)
return np.append(X, xform_data, axis=1)
class LogExpPipeline(Pipeline):
def fit(self, X, y):
super(LogExpPipeline, self).fit(X, np.log1p(y))
def predict(self, X):
return np.expm1(super(LogExpPipeline, self).predict(X))
#
# Model/pipeline with scaling,pca,svm
#
svm_pipe = LogExpPipeline(_name_estimators([RobustScaler(),
PCA(),
SVR(kernel='rbf', C=1.0, epsilon=0.05)]))
# results = cross_val_score(svm_pipe, train, y_train, cv=5, scoring='r2')
# print("SVM score: %.4f (%.4f)" % (results.mean(), results.std()))
# exit()
#
# Model/pipeline with scaling,pca,ElasticNet
#
en_pipe = LogExpPipeline(_name_estimators([RobustScaler(),
PCA(n_components=125),
ElasticNet(alpha=0.001, l1_ratio=0.1)]))
#
# XGBoost model
#
xgb_model = xgb.sklearn.XGBRegressor(max_depth=4, learning_rate=0.005, subsample=0.921,
objective='reg:linear', n_estimators=1300, base_score=y_mean)
xgb_pipe = Pipeline(_name_estimators([AddColumns(transform_=PCA(n_components=10)),
AddColumns(transform_=FastICA(n_components=10, max_iter=500)),
xgb_model]))
# results = cross_val_score(xgb_model, train, y_train, cv=5, scoring='r2')
# print("XGB score: %.4f (%.4f)" % (results.mean(), results.std()))
#
# Random Forest
#
rf_model = RandomForestRegressor(n_estimators=250, n_jobs=4, min_samples_split=25,
min_samples_leaf=25, max_depth=3)
# results = cross_val_score(rf_model, train, y_train, cv=5, scoring='r2')
# print("RF score: %.4f (%.4f)" % (results.mean(), results.std()))
#
# Now the training and stacking part. In previous version i just tried to train each model and
# find the best combination, that lead to a horrible score (Overfit?). Code below does out-of-fold
# training/predictions and then we combine the final results.
#
# Read here for more explanation (This code was borrowed/adapted) :
#
class Ensemble(object):
def __init__(self, n_splits, stacker, base_models):
self.n_splits = n_splits
self.stacker = stacker
self.base_models = base_models
def fit_predict(self, X, y, T):
X = np.array(X)
y = np.array(y)
T = np.array(T)
folds = list(KFold(n_splits=self.n_splits, shuffle=True, random_state=2016).split(X, y))
S_train = np.zeros((X.shape[0], len(self.base_models)))
S_test = np.zeros((T.shape[0], len(self.base_models)))
for i, clf in enumerate(self.base_models):
S_test_i = np.zeros((T.shape[0], self.n_splits))
for j, (train_idx, test_idx) in enumerate(folds):
X_train = X[train_idx]
y_train = y[train_idx]
X_holdout = X[test_idx]
y_holdout = y[test_idx]
clf.fit(X_train, y_train)
y_pred = clf.predict(X_holdout)[:]
print ("Model %d fold %d score %f" % (i, j, r2_score(y_holdout, y_pred)))
S_train[test_idx, i] = y_pred
S_test_i[:, j] = clf.predict(T)[:]
S_test[:, i] = S_test_i.mean(axis=1)
# results = cross_val_score(self.stacker, S_train, y, cv=5, scoring='r2')
# print("Stacker score: %.4f (%.4f)" % (results.mean(), results.std()))
# exit()
self.stacker.fit(S_train, y)
res = self.stacker.predict(S_test)[:]
return res
stack = Ensemble(n_splits=5,
#stacker=ElasticNetCV(l1_ratio=[x/10.0 for x in range(1,10)]),
stacker=ElasticNet(l1_ratio=0.1, alpha=1.4),
base_models=(svm_pipe, en_pipe, xgb_pipe, rf_model))
y_test = stack.fit_predict(train, y_train, test)
df_sub = pd.DataFrame({'ID': id_test, 'y': y_test})
df_sub.to_csv('submission.csv', index=False)
#############################
'''This example demonstrates the use of Convolution1D for text classification.
Gets to 0.89 test accuracy after 2 epochs.
90s/epoch on Intel i5 2.4Ghz CPU.
10s/epoch on Tesla K40 GPU.
'''
from __future__ import print_function
from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.layers import Embedding
from keras.layers import Conv1D, GlobalMaxPooling1D
from keras.datasets import imdb
# set parameters:
max_features = 5000
maxlen = 400
batch_size = 32
embedding_dims = 50
filters = 250
kernel_size = 3
hidden_dims = 250
epochs = 2
print('Loading data...')
(x_train, y_train), (x_test, y_test) = imdb.load_data(num_words=max_features)
print(len(x_train), 'train sequences')
print(len(x_test), 'test sequences')
print('Pad sequences (samples x time)')
x_train = sequence.pad_sequences(x_train, maxlen=maxlen)
x_test = sequence.pad_sequences(x_test, maxlen=maxlen)
print('x_train shape:', x_train.shape)
print('x_test shape:', x_test.shape)
print('Build model...')
model = Sequential()
# we start off with an efficient embedding layer which maps
# our vocab indices into embedding_dims dimensions
model.add(Embedding(max_features,
embedding_dims,
input_length=maxlen))
model.add(Dropout(0.2))
# we add a Convolution1D, which will learn filters
# word group filters of size filter_length:
model.add(Conv1D(filters,
kernel_size,
padding='valid',
activation='relu',
strides=1))
# we use max pooling:
model.add(GlobalMaxPooling1D())
# We add a vanilla hidden layer:
model.add(Dense(hidden_dims))
model.add(Dropout(0.2))
model.add(Activation('relu'))
# We project onto a single unit output layer, and squash it with a sigmoid:
model.add(Dense(1))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
validation_data=(x_test, y_test))
| [((55, 8, 55, 41), 'pandas.read_csv', 'pd.read_csv', ({(55, 20, 55, 40): '"""../input/train.csv"""'}, {}), "('../input/train.csv')", True, 'import pandas as pd\n'), ((56, 7, 56, 39), 'pandas.read_csv', 'pd.read_csv', ({(56, 19, 56, 38): '"""../input/test.csv"""'}, {}), "('../input/test.csv')", True, 'import pandas as pd\n'), ((66, 9, 66, 33), 'pandas.concat', 'pd.concat', ({(66, 19, 66, 32): '[train, test]'}, {}), '([train, test])', True, 'import pandas as pd\n'), ((70, 9, 70, 48), 'pandas.get_dummies', 'pd.get_dummies', (), '', True, 'import pandas as pd\n'), ((73, 9, 73, 23), 'sklearn.preprocessing.RobustScaler', 'RobustScaler', ({}, {}), '()', False, 'from sklearn.preprocessing import RobustScaler\n'), ((105, 8, 105, 69), 'keras.wrappers.scikit_learn.KerasRegressor', 'KerasRegressor', (), '', False, 'from keras.wrappers.scikit_learn import KerasRegressor\n'), ((139, 9, 139, 51), 'pandas.DataFrame', 'pd.DataFrame', ({(139, 22, 139, 50): "{'ID': id_test, 'y': y_test}"}, {}), "({'ID': id_test, 'y': y_test})", True, 'import pandas as pd\n'), ((157, 8, 157, 41), 'pandas.read_csv', 'pd.read_csv', ({(157, 20, 157, 40): '"""../input/train.csv"""'}, {}), "('../input/train.csv')", True, 'import pandas as pd\n'), ((158, 7, 158, 39), 'pandas.read_csv', 'pd.read_csv', ({(158, 19, 158, 38): '"""../input/test.csv"""'}, {}), "('../input/test.csv')", True, 'import pandas as pd\n'), ((161, 9, 161, 25), 'numpy.mean', 'np.mean', ({(161, 17, 161, 24): 'y_train'}, {}), '(y_train)', True, 'import numpy as np\n'), ((165, 9, 165, 33), 'pandas.concat', 'pd.concat', ({(165, 19, 165, 32): '[train, test]'}, {}), '([train, test])', True, 'import pandas as pd\n'), ((169, 9, 169, 48), 'pandas.get_dummies', 'pd.get_dummies', (), '', True, 'import pandas as pd\n'), ((216, 12, 217, 98), 'xgboost.sklearn.XGBRegressor', 'xgb.sklearn.XGBRegressor', (), '', True, 'import xgboost as xgb\n'), ((230, 11, 231, 66), 'sklearn.ensemble.RandomForestRegressor', 'RandomForestRegressor', (), '', False, 'from sklearn.ensemble import RandomForestRegressor, ExtraTreesRegressor\n'), ((294, 9, 294, 51), 'pandas.DataFrame', 'pd.DataFrame', ({(294, 22, 294, 50): "{'ID': id_test, 'y': y_test}"}, {}), "({'ID': id_test, 'y': y_test})", True, 'import pandas as pd\n'), ((324, 39, 324, 77), 'keras.datasets.imdb.load_data', 'imdb.load_data', (), '', False, 'from keras.datasets import imdb\n'), ((329, 10, 329, 56), 'keras.preprocessing.sequence.pad_sequences', 'sequence.pad_sequences', (), '', False, 'from keras.preprocessing import sequence\n'), ((330, 9, 330, 54), 'keras.preprocessing.sequence.pad_sequences', 'sequence.pad_sequences', (), '', False, 'from keras.preprocessing import sequence\n'), ((335, 8, 335, 20), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential\n'), ((11, 12, 11, 24), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential\n'), ((80, 22, 80, 31), 'sklearn.linear_model.LassoCV', 'LassoCV', ({}, {}), '()', False, 'from sklearn.linear_model import LassoCV\n'), ((93, 12, 93, 24), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential\n'), ((339, 10, 341, 40), 'keras.layers.Embedding', 'Embedding', (), '', False, 'from keras.layers import Embedding\n'), ((342, 10, 342, 22), 'keras.layers.Dropout', 'Dropout', ({(342, 18, 342, 21): '(0.2)'}, {}), '(0.2)', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((346, 10, 350, 27), 'keras.layers.Conv1D', 'Conv1D', (), '', False, 'from keras.layers import Conv1D, GlobalMaxPooling1D\n'), ((352, 10, 352, 30), 'keras.layers.GlobalMaxPooling1D', 'GlobalMaxPooling1D', ({}, {}), '()', False, 'from keras.layers import Conv1D, GlobalMaxPooling1D\n'), ((355, 10, 355, 28), 'keras.layers.Dense', 'Dense', ({(355, 16, 355, 27): 'hidden_dims'}, {}), '(hidden_dims)', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((356, 10, 356, 22), 'keras.layers.Dropout', 'Dropout', ({(356, 18, 356, 21): '(0.2)'}, {}), '(0.2)', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((357, 10, 357, 28), 'keras.layers.Activation', 'Activation', ({(357, 21, 357, 27): '"""relu"""'}, {}), "('relu')", False, 'from keras.layers import Dense, Dropout, Activation\n'), ((360, 10, 360, 18), 'keras.layers.Dense', 'Dense', ({(360, 16, 360, 17): '(1)'}, {}), '(1)', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((361, 10, 361, 31), 'keras.layers.Activation', 'Activation', ({(361, 21, 361, 30): '"""sigmoid"""'}, {}), "('sigmoid')", False, 'from keras.layers import Dense, Dropout, Activation\n'), ((5, 20, 5, 47), 'keras.backend.square', 'K.square', ({(5, 30, 5, 45): 'y_true - y_pred'}, {}), '(y_true - y_pred)', True, 'from keras import backend as K\n'), ((13, 14, 13, 53), 'keras.layers.Dense', 'Dense', (), '', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((15, 14, 15, 32), 'keras.layers.Activation', 'Activation', ({(15, 25, 15, 31): '"""relu"""'}, {}), "('relu')", False, 'from keras.layers import Dense, Dropout, Activation\n'), ((16, 14, 16, 26), 'keras.layers.Dropout', 'Dropout', ({(16, 22, 16, 25): '(0.3)'}, {}), '(0.3)', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((18, 14, 18, 31), 'keras.layers.Dense', 'Dense', ({(18, 20, 18, 30): 'input_dims'}, {}), '(input_dims)', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((20, 14, 20, 34), 'keras.layers.Activation', 'Activation', ({(20, 25, 20, 33): 'act_func'}, {}), '(act_func)', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((21, 14, 21, 26), 'keras.layers.Dropout', 'Dropout', ({(21, 22, 21, 25): '(0.3)'}, {}), '(0.3)', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((23, 14, 23, 34), 'keras.layers.Dense', 'Dense', ({(23, 20, 23, 33): '(input_dims // 2)'}, {}), '(input_dims // 2)', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((25, 14, 25, 34), 'keras.layers.Activation', 'Activation', ({(25, 25, 25, 33): 'act_func'}, {}), '(act_func)', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((26, 14, 26, 26), 'keras.layers.Dropout', 'Dropout', ({(26, 22, 26, 25): '(0.3)'}, {}), '(0.3)', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((28, 14, 28, 55), 'keras.layers.Dense', 'Dense', (), '', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((31, 14, 31, 43), 'keras.layers.Dense', 'Dense', (), '', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((88, 20, 88, 47), 'keras.backend.square', 'K.square', ({(88, 30, 88, 45): 'y_true - y_pred'}, {}), '(y_true - y_pred)', True, 'from keras import backend as K\n'), ((94, 14, 94, 55), 'keras.layers.InputLayer', 'InputLayer', (), '', False, 'from keras.layers import Dense, InputLayer, GaussianNoise\n'), ((95, 14, 95, 34), 'keras.layers.GaussianNoise', 'GaussianNoise', ({(95, 28, 95, 33): 'noise'}, {}), '(noise)', False, 'from keras.layers import Dense, InputLayer, GaussianNoise\n'), ((96, 14, 96, 47), 'keras.layers.Dense', 'Dense', (), '', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((97, 14, 97, 43), 'keras.layers.Dense', 'Dense', (), '', False, 'from keras.layers import Dense, Dropout, Activation\n'), ((185, 15, 185, 47), 'numpy.append', 'np.append', (), '', True, 'import numpy as np\n'), ((252, 12, 252, 23), 'numpy.array', 'np.array', ({(252, 21, 252, 22): 'X'}, {}), '(X)', True, 'import numpy as np\n'), ((253, 12, 253, 23), 'numpy.array', 'np.array', ({(253, 21, 253, 22): 'y'}, {}), '(y)', True, 'import numpy as np\n'), ((254, 12, 254, 23), 'numpy.array', 'np.array', ({(254, 21, 254, 22): 'T'}, {}), '(T)', True, 'import numpy as np\n'), ((289, 25, 289, 60), 'sklearn.linear_model.ElasticNet', 'ElasticNet', (), '', False, 'from sklearn.linear_model import ElasticNet, ElasticNetCV\n'), ((190, 43, 190, 54), 'numpy.log1p', 'np.log1p', ({(190, 52, 190, 53): 'y'}, {}), '(y)', True, 'import numpy as np\n'), ((198, 44, 198, 58), 'sklearn.preprocessing.RobustScaler', 'RobustScaler', ({}, {}), '()', False, 'from sklearn.preprocessing import RobustScaler\n'), ((199, 44, 199, 49), 'sklearn.decomposition.PCA', 'PCA', ({}, {}), '()', False, 'from sklearn.decomposition import PCA, FastICA\n'), ((200, 44, 200, 82), 'sklearn.svm.SVR', 'SVR', (), '', False, 'from sklearn.svm import SVR\n'), ((209, 43, 209, 57), 'sklearn.preprocessing.RobustScaler', 'RobustScaler', ({}, {}), '()', False, 'from sklearn.preprocessing import RobustScaler\n'), ((210, 43, 210, 64), 'sklearn.decomposition.PCA', 'PCA', (), '', False, 'from sklearn.decomposition import PCA, FastICA\n'), ((211, 43, 211, 80), 'sklearn.linear_model.ElasticNet', 'ElasticNet', (), '', False, 'from sklearn.linear_model import ElasticNet, ElasticNetCV\n'), ((262, 23, 262, 60), 'numpy.zeros', 'np.zeros', ({(262, 32, 262, 59): '(T.shape[0], self.n_splits)'}, {}), '((T.shape[0], self.n_splits))', True, 'import numpy as np\n'), ((6, 38, 6, 52), 'keras.backend.mean', 'K.mean', ({(6, 45, 6, 51): 'y_true'}, {}), '(y_true)', True, 'from keras import backend as K\n'), ((7, 34, 7, 45), 'keras.backend.epsilon', 'K.epsilon', ({}, {}), '()', True, 'from keras import backend as K\n'), ((89, 38, 89, 52), 'keras.backend.mean', 'K.mean', ({(89, 45, 89, 51): 'y_true'}, {}), '(y_true)', True, 'from keras import backend as K\n'), ((90, 34, 90, 45), 'keras.backend.epsilon', 'K.epsilon', ({}, {}), '()', True, 'from keras import backend as K\n'), ((219, 60, 219, 80), 'sklearn.decomposition.PCA', 'PCA', (), '', False, 'from sklearn.decomposition import PCA, FastICA\n'), ((220, 60, 220, 98), 'sklearn.decomposition.FastICA', 'FastICA', (), '', False, 'from sklearn.decomposition import PCA, FastICA\n'), ((256, 21, 256, 83), 'sklearn.model_selection.KFold', 'KFold', (), '', False, 'from sklearn.model_selection import cross_val_score, KFold\n'), ((273, 60, 273, 87), 'sklearn.metrics.r2_score', 'r2_score', ({(273, 69, 273, 78): 'y_holdout', (273, 80, 273, 86): 'y_pred'}, {}), '(y_holdout, y_pred)', False, 'from sklearn.metrics import r2_score\n')] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.