repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
840k
|
---|---|---|---|---|
Majanao/pytorch-blender | tests/test_env.py | eb5effb033094d037e7bdc2238c00806be7012ae | import pytest
from pathlib import Path
from blendtorch import btt
BLENDDIR = Path(__file__).parent/'blender'
class MyEnv(btt.env.OpenAIRemoteEnv):
def __init__(self, background=True, **kwargs):
super().__init__(version='1.0.0')
self.launch(scene=BLENDDIR/'env.blend', script=BLENDDIR /
'env.blend.py', background=background, **kwargs)
# For Blender 2.9 if we pass scene='', the tests below fail since
# _env_post_step() is not called. Its unclear currently why this happens.
def _run_remote_env(background):
env = MyEnv(background=background)
obs = env.reset()
assert obs == 0.
obs, reward, done, info = env.step(0.1)
assert obs == pytest.approx(0.1)
assert reward == 0.
assert not done
assert info['count'] == 2 # 1 is already set by reset()
obs, reward, done, info = env.step(0.6)
assert obs == pytest.approx(0.6)
assert reward == 1.
assert not done
assert info['count'] == 3
for _ in range(8):
obs, reward, done, info = env.step(0.6)
assert done
obs = env.reset()
assert obs == 0.
obs, reward, done, info = env.step(0.1)
assert obs == pytest.approx(0.1)
assert reward == 0.
assert not done
assert info['count'] == 2
env.close()
@pytest.mark.background
def test_remote_env():
_run_remote_env(background=True)
def test_remote_env_ui():
_run_remote_env(background=False)
| [((5, 11, 5, 25), 'pathlib.Path', 'Path', ({(5, 16, 5, 24): '__file__'}, {}), '(__file__)', False, 'from pathlib import Path\n'), ((23, 18, 23, 36), 'pytest.approx', 'pytest.approx', ({(23, 32, 23, 35): '(0.1)'}, {}), '(0.1)', False, 'import pytest\n'), ((28, 18, 28, 36), 'pytest.approx', 'pytest.approx', ({(28, 32, 28, 35): '(0.6)'}, {}), '(0.6)', False, 'import pytest\n'), ((39, 18, 39, 36), 'pytest.approx', 'pytest.approx', ({(39, 32, 39, 35): '(0.1)'}, {}), '(0.1)', False, 'import pytest\n')] |
sitkatech/django-sitetree | sitetree/__init__.py | 5d7e9d503f97ff021c5c04855e04e098b3d2488c | VERSION = (0, 9, 5)
| [] |
tahashmi/deepvariant | deepvariant/runtime_by_region_vis.py | 441c1809d3290f4a20b29a0a0bbf8ecfb929a6e3 | # Copyright 2020 Google LLC.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
r"""Create a visual report of make_examples runtime by region.
Use this script to visualize the runtime-by-region data generated by running
make_examples with --runtime_by_region.
"""
from typing import Dict, Sequence, List, Tuple, Text, Any, Union
from absl import app
from absl import flags
import altair as alt
import pandas as pd
import tensorflow as tf
from third_party.nucleus.io import sharded_file_utils
# Altair uses a lot of method chaining, such as
# chart.mark_bar().encode(...).properties(...), so using backslash
# continuation to break this into separate lines makes the code more readable.
# pylint: disable=g-backslash-continuation
VEGA_URL = 'https://storage.googleapis.com/deepvariant/lib/vega'
FLAGS = flags.FLAGS
flags.DEFINE_string(
'input', None, 'TSV file that was produced when running make_examples '
'with --runtime_by_region. Can be sharded, e.g. /path/[email protected].')
flags.DEFINE_string(
'title', None, 'Title will be shown at the top of the report and will '
'be used as a prefix for downloaded image files.')
flags.DEFINE_string('output', 'runtime_by_region_report.html',
'Path for the output report, which will be an html file.')
RUNTIME_COLUMNS = [
'get reads', 'find candidates', 'make pileup images', 'write outputs'
]
COUNT_COLUMNS = ['num reads', 'num candidates', 'num examples']
CSS_STYLES = """
<style>
body {
font-family: sans-serif;
}
.chart-container {
padding: 30px;
}
</style>
"""
def read_sharded_runtime_tsvs(path_string: str) -> pd.DataFrame:
"""Imports data from a single or sharded path into a pandas dataframe.
Args:
path_string: The path to the input file, which may be sharded.
Returns:
A dataframe matching the TSV file(s) but with added Task column.
"""
if sharded_file_utils.is_sharded_file_spec(path_string):
paths = sharded_file_utils.generate_sharded_filenames(path_string)
else:
paths = [path_string]
list_of_dataframes = []
for i, path in enumerate(paths):
if path.startswith('gs://'):
# Once pandas is updated to 0.24+, pd.read_csv will work for gs://
# without this workaround.
with tf.io.gfile.GFile(path) as f:
d = pd.read_csv(f, sep='\t')
else:
d = pd.read_csv(path, sep='\t')
d['Task'] = i
list_of_dataframes.append(d)
return pd.concat(list_of_dataframes, axis=0, ignore_index=True)
def format_runtime_string(raw_seconds: float) -> str:
"""Creates a nice format string from a potentially large number of seconds.
Args:
raw_seconds: A number of seconds.
Returns:
The seconds divided into hours, minutes, and remaining seconds, formatted
nicely. For example, 2h3m5.012s.
"""
minutes, seconds = divmod(raw_seconds, 60)
hours, minutes = divmod(minutes, 60)
seconds = round(seconds, 3)
output = ''
if hours > 0:
output += f'{int(hours)}h'
if minutes > 0:
output += f'{int(minutes)}m'
if seconds > 0 or not output:
output += f'{seconds}s'
return output
def calculate_totals(df: pd.DataFrame) -> pd.DataFrame:
"""Calculates total runtime, formats it nicely, and sorts by it.
Args:
df: A dataframe of runtime profiling numbers.
Returns:
The same dataframe with some additional summary columns.
"""
# 'total runtime' is a simple sum of the runtime columns.
df['total runtime'] = df[RUNTIME_COLUMNS].sum(axis=1)
# Create a formatted runtime string for tooltips.
df['Runtime'] = df['total runtime'].apply(format_runtime_string)
# Sort by descending total region runtime.
df.sort_values(by='total runtime', inplace=True, ascending=False)
return df
def summarize_by_task(df: pd.DataFrame) -> pd.DataFrame:
"""Groups regions to get the total runtime for each task.
Args:
df: A dataframe of runtime profiling numbers.
Returns:
The dataframe grouped by task.
"""
by_task = df.groupby(by=['Task']).sum()
return by_task.reset_index()
def stage_histogram(d: pd.DataFrame, title: str = '') -> alt.Chart:
"""Plots a histogram of runtimes stacked by stage.
Args:
d: A dataframe of runtimes, either by region or by task.
title: A title for the plot.
Returns:
An altair chart.
"""
columns_used = RUNTIME_COLUMNS
d = d[columns_used]
return alt.Chart(d).transform_fold(
RUNTIME_COLUMNS, as_=['Stage', 'runtime_by_stage']) \
.mark_bar(opacity=0.3) \
.encode(
x=alt.X('runtime_by_stage:Q', bin=alt.Bin(maxbins=100),
title='Runtime (seconds)'),
y=alt.Y('count()', title='Count of regions', stack=None),
color=alt.Color('Stage:N', sort=None)
).properties(title=title)
def correlation_scatter_charts(d: pd.DataFrame, title: str = '') -> alt.Chart:
"""Produces a grid of scatter plots of runtimes of stages versus covariates.
Args:
d: A pandas dataframe of runtime by regions.
title: A title for the plot.
Returns:
An altair chart
"""
columns_used = ['region', 'total runtime'] + RUNTIME_COLUMNS + COUNT_COLUMNS
d = d[columns_used]
return alt.Chart(d).mark_circle(opacity=0.1).encode(
x=alt.X(alt.repeat('column'), type='quantitative',
axis=alt.Axis(labelExpr="datum.value + 's'")),
y=alt.Y(alt.repeat('row'), type='quantitative'),
tooltip='region'
).properties(width=100, height=100) \
.repeat(
column=['total runtime'] + RUNTIME_COLUMNS,
row=COUNT_COLUMNS,
).properties(title=title)
def totals_by_stage(d: pd.DataFrame) -> alt.Chart:
"""Plots total runtimes for each stage.
Args:
d: A dataframe of runtimes.
Returns:
An altair chart.
"""
stage_totals_series = d.sum()[RUNTIME_COLUMNS]
stage_totals = pd.DataFrame(
stage_totals_series, columns=['Runtime (seconds)'])
stage_totals.reset_index(inplace=True)
stage_totals = stage_totals.rename(columns={'index': 'Stage'})
stage_totals['Runtime'] = stage_totals['Runtime (seconds)'].apply(
format_runtime_string)
return alt.Chart(stage_totals).mark_bar().encode(
x='Runtime (seconds)',
y=alt.Y('Stage', sort=None),
tooltip=['Runtime'],
fill=alt.Fill('Stage',
sort=None)).properties(title='Overall runtime by stage')
def pareto_by_task_tooltip(row: pd.Series) -> str:
"""For one row of a dataframe, computes a tooltip description.
Args:
row: A Pandas Series, one row of a dataframe containing some specific
cumulative sum columns.
Returns:
A string to show as the tooltip for a pareto curve.
"""
return (f"{row['task cumsum order'] * 100:.2f}% of regions "
f"account for {row['task cumsum fraction'] * 100:.2f}% of "
f"the runtime in task {row['Task']}")
def calculate_pareto_metrics(df_subset: pd.DataFrame) -> pd.DataFrame:
"""Calculates cumulative sums for a subset of a dataframe.
Args:
df_subset: A dataframe subset of one task.
Returns:
The same dataframe subset with some additional columns.
"""
# These are the same for all regions in the same task, for the scatter plot:
df_subset['task total runtime'] = df_subset['total runtime'].sum()
df_subset['Runtime for task'] = df_subset['task total runtime'].apply(
format_runtime_string)
df_subset['task num examples'] = df_subset['num examples'].sum()
# These are cumulative sums for the pareto curves:
df_subset['task cumsum fraction'] = df_subset['total runtime'].cumsum(
) / df_subset['total runtime'].sum()
n = len(df_subset)
df_subset['task cumsum order'] = list(map(lambda x: x / n, range(0, n)))
df_subset['tooltip'] = df_subset.apply(pareto_by_task_tooltip, axis=1)
return df_subset
def pareto_and_runtimes_by_task(df: pd.DataFrame) -> alt.Chart:
"""Creates an interactive Pareto curve and scatter plot of task runtimes.
Tracing each curve shows to what extent a small proportion of long-running
regions contribute disproportionately to the overall runtime. That is,
"The longest-running X% of regions account for Y% of the total runtime."
There is a curve for each task.
Args:
df: A dataframe of all regions.
Returns:
An altair chart.
"""
grouped = df.groupby(df['Task'], sort=False)
df = grouped.apply(calculate_pareto_metrics)
# Sample along the Pareto curve, ensuring the longest regions are shown.
if len(df) > 5000:
x = 1000
df = pd.concat([df.nlargest(x, 'total runtime'), df.sample(5000 - x)])
# Limit columns to greatly reduce the size of the html report.
columns_used = [
'task cumsum order', 'task cumsum fraction', 'tooltip', 'Task',
'task total runtime', 'task num examples', 'Runtime for task'
]
df = df[columns_used]
# Brushing on the task_scatter plot highlights the same tasks in the Pareto
# curve.
brush = alt.selection_interval()
pareto_by_task = alt.Chart(df).mark_line(size=2).encode(
x=alt.X(
'task cumsum order',
title='The longest-runtime X% of regions',
axis=alt.Axis(format='%')),
y=alt.Y(
'task cumsum fraction',
title='Account for Y% of the total runtime',
axis=alt.Axis(format='%')),
tooltip='tooltip',
color=alt.condition(brush, 'Task:N', alt.value('lightgray'))).properties(
title='Pareto curve for each task').interactive()
# This chart needs to use the same dataframe as the first chart to enable the
# brushing on one to affect the other. Using max(task) for 'text' is a
# trick that causes bundling by task to avoid showing multiple overlapping
# points which otherwise make the text look funky.
task_scatter = alt.Chart(df).mark_point(size=10).encode(
x=alt.X('max(task total runtime)', title='Runtime (seconds)'),
y=alt.Y('task num examples:Q', title='Number of examples'),
color=alt.condition(brush, 'Task:N', alt.value('lightgray')),
tooltip=['Task', 'Runtime for task']
) \
.properties(title='Total runtime for each task (drag to highlight)') \
.add_selection(brush)
return pareto_by_task | task_scatter
def individual_region_bars(small_df: pd.DataFrame,
title: Union[str, Dict[str, str]] = '') -> alt.Chart:
"""Makes a stacked bar chart with runtime of each stage for individual regions.
Args:
small_df: A dataframe of regions, each of which will be shown as a bar.
title: A title for the plot. If a dict, it should contain 'title' and/or
'subtitle'.
Returns:
An altair chart.
"""
columns_used = ['region', 'Runtime'] + RUNTIME_COLUMNS
d = small_df[columns_used]
return alt.Chart(d).transform_fold(
RUNTIME_COLUMNS, as_=['Stage', 'runtime_by_stage']) \
.mark_bar().encode(
x=alt.X('region:N', sort=None),
y=alt.Y('runtime_by_stage:Q', scale=alt.Scale(type='linear'), title='Runtime (seconds)'),
fill=alt.Fill('Stage:N', sort=None),
tooltip='Runtime:N'
).properties(title=title)
def selected_longest_and_median_regions(df: pd.DataFrame) -> alt.Chart:
"""Creates a stacked bar charts of the top 20 and median 20 regions.
Args:
df: A dataframe of all regions.
Returns:
An altair chart.
"""
num_rows = len(df)
mid = round(num_rows / 2)
return individual_region_bars(df.iloc[0:20], 'Top runtime regions') \
| individual_region_bars(df.iloc[mid-10:mid+11], 'Median runtime regions')
def top_regions_producing_zero_examples(df: pd.DataFrame) -> alt.Chart:
"""Creates a chart of the top regions that produced zero examples.
Args:
df: A dataframe of all regions.
Returns:
An altair chart.
"""
regions_with_zero_examples = df[df['num examples'] == 0]
runtime_of_zeros = regions_with_zero_examples['total runtime'].sum() / 3600
total_runtime = df['total runtime'].sum() / 3600
subtitle = (
f'Spent {runtime_of_zeros:.2f} hours processing the '
f'{len(regions_with_zero_examples)} regions that produced no examples, '
f'which is {runtime_of_zeros / total_runtime * 100:.2f}% of the total '
f'runtime of {total_runtime:.2f} hours.')
return individual_region_bars(
regions_with_zero_examples.nlargest(50, 'total runtime'),
title={
'text': 'The longest-running regions that produced no examples',
'subtitle': subtitle
})
def write_to_html_report(charts: List[Dict[Text, alt.Chart]], title: str,
subtitle: str, html_output: Any) -> None:
"""Makes the html report with all the charts inserted.
Args:
charts: A list of altair chart objects.
title: The title to show at the top of the report.
subtitle: The subtitle to show just below the title on the report.
html_output: a writable file object.
Returns:
None. Writes into the html_output file object.
"""
# Start the HTML document.
html_output.write('<!DOCTYPE html>\n<html>\n<head>')
# Add dependencies vega and vega-lite, which render the altair charts.
html_output.write('<script type="text/javascript" src="{}/vega@5"></script>'
'\n'.format(VEGA_URL))
html_output.write(
'<script type="text/javascript" src="{}/[email protected]"></script>'
'\n'.format(VEGA_URL))
html_output.write(
'<script type="text/javascript" src="{}/vega-embed@6"></script>'
'\n'.format(VEGA_URL))
# Add styles (CSS).
html_output.write(CSS_STYLES)
html_output.write('</head>\n<body>')
html_output.write('<h1>{}</h1>\n'.format(title))
html_output.write('<h2>{}</h2>\n'.format(subtitle))
# Make a div containing all the charts.
html_output.write('<div>')
for chart in charts:
html_output.write(
'<div class="chart-container" id="vis_{}"></div>\n'.format(chart['id']))
html_output.write('</div>')
# Add JSON vega specs and hook them up to the divs with VegaEmbed.
html_output.write('<script>\n')
for chart in charts:
html_output.write('var spec_{} = {};\n'.format(chart['id'],
chart['chart'].to_json()))
download_filename = '{}_{}'.format(title.replace(' ', '_'), chart['id'])
embed_options = {'mode': 'vega-lite', 'downloadFileName': download_filename}
html_output.write('vegaEmbed("#vis_{}", spec_{}, {})\n'.format(
chart['id'], chart['id'], embed_options))
html_output.write('</script>\n')
# Close HTML document.
html_output.write('</body></html>')
def read_data_and_make_dataframes(
input_path: str) -> Tuple[pd.DataFrame, pd.DataFrame]:
"""Loads data from a file into one dataframe as-is and one by task.
Args:
input_path: str, path of the input TSV file (may be sharded).
Returns:
df: A dataframe with one row per region.
by_task: A dataframe with one row per task.
"""
df = read_sharded_runtime_tsvs(input_path)
df = calculate_totals(df)
by_task = summarize_by_task(df)
return df, by_task
def make_all_charts(
df: pd.DataFrame,
by_task: pd.DataFrame) -> List[Dict[Text, Union[str, alt.Chart]]]:
"""Creates charts and puts them in a list with their ID names.
Args:
df: A dataframe with one row per region.
by_task: A dataframe with one row per task.
Returns:
list of dicts, each containing a chart and a descriptive ID.
"""
charts = [{
'id': 'total_by_stage',
'chart': totals_by_stage(by_task)
}, {
'id': 'pareto_and_runtimes_by_task',
'chart': pareto_and_runtimes_by_task(df)
}, {
'id': 'histogram_by_task',
'chart': stage_histogram(by_task, title='Stage runtimes for each task')
}, {
'id': 'selected_longest_and_median_regions',
'chart': selected_longest_and_median_regions(df)
}, {
'id': 'zero_examples',
'chart': top_regions_producing_zero_examples(df)
}]
# Altair shows a max of 5000 data points.
if len(df) <= 5000:
# With up to 5000 points, just show them all.
charts.extend([{
'id': 'histogram',
'chart': stage_histogram(df, title='Runtime by stage for all regions')
}, {
'id': 'scatter_grid',
'chart': correlation_scatter_charts(df, title='Trends for all regions')
}])
else:
# With too many points, make different subsets to show trends better.
top_100 = df.nlargest(100, 'total runtime')
top_5000 = df.nlargest(5000, 'total runtime')
# Sample the bottom 99% to avoid outliers that obscure general trends.
bottom_99_percent = df.nsmallest(int(len(df) * .99), 'total runtime')
if len(bottom_99_percent) > 5000:
bottom_99_percent = bottom_99_percent.sample(5000)
charts.extend([{
'id':
'histogram_bottom_99_percent',
'chart':
stage_histogram(
bottom_99_percent,
title='Runtime by stage for regions in the bottom 99%')
}, {
'id':
'histogram_top_100',
'chart':
stage_histogram(
top_100, title='Runtime by stage for regions in the top 100')
}, {
'id':
'scatter_grid_top_5000',
'chart':
correlation_scatter_charts(
top_5000, title='Trends for regions in the top 5000')
}, {
'id':
'scatter_grid_bottom_99_percent',
'chart':
correlation_scatter_charts(
bottom_99_percent, title='Trends for regions in the bottom 99%')
}])
return charts
def make_report(input_path: str, title: str,
html_output: tf.io.gfile.GFile) -> None:
"""Reads data, creates charts, and composes the charts into an HTML report.
Args:
input_path: Path of the input TSV file (or sharded files).
title: Title to put at the top of the report.
html_output: Writable file object where output will be written.
"""
# Load data into pandas dataframes and add summary columns.
df, by_task = read_data_and_make_dataframes(input_path)
# Build all the charts.
charts = make_all_charts(df, by_task)
# Write a subtitle with some top-level stats.
subtitle = (f'Runtime profiling for make_examples on {len(df)} regions '
f'across {len(by_task)} task{"(s)" if len(by_task) > 1 else ""}')
# Write the HTML report with all the charts.
write_to_html_report(
charts=charts, title=title, subtitle=subtitle, html_output=html_output)
def main(argv: Sequence[str]):
if len(argv) > 1:
raise app.UsageError(
'Command line parsing failure: this script does not accept '
'positional arguments, but found these extra arguments: "{}".'
''.format(str(argv[1:])))
# Add html to the output path if that is not already the suffix.
if FLAGS.output.endswith('html'):
output_filename = FLAGS.output
else:
output_filename = f'{FLAGS.output}.html'
# Start HTML document. Using GFile enables writing to GCS too.
html_output = tf.io.gfile.GFile(output_filename, 'w')
make_report(
input_path=FLAGS.input, title=FLAGS.title, html_output=html_output)
html_output.close() # Abstracted out the file open/close to enable testing.
print('Output written to:', output_filename)
if __name__ == '__main__':
flags.mark_flags_as_required(['input', 'title'])
app.run(main)
| [((54, 0, 56, 75), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', ({(55, 4, 55, 11): '"""input"""', (55, 13, 55, 17): 'None', (55, 19, 56, 74): '"""TSV file that was produced when running make_examples with --runtime_by_region. Can be sharded, e.g. /path/[email protected]."""'}, {}), "('input', None,\n 'TSV file that was produced when running make_examples with --runtime_by_region. Can be sharded, e.g. /path/[email protected].'\n )", False, 'from absl import flags\n'), ((57, 0, 59, 54), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', ({(58, 4, 58, 11): '"""title"""', (58, 13, 58, 17): 'None', (58, 19, 59, 53): '"""Title will be shown at the top of the report and will be used as a prefix for downloaded image files."""'}, {}), "('title', None,\n 'Title will be shown at the top of the report and will be used as a prefix for downloaded image files.'\n )", False, 'from absl import flags\n'), ((60, 0, 61, 78), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', ({(60, 20, 60, 28): '"""output"""', (60, 30, 60, 61): '"""runtime_by_region_report.html"""', (61, 20, 61, 77): '"""Path for the output report, which will be an html file."""'}, {}), "('output', 'runtime_by_region_report.html',\n 'Path for the output report, which will be an html file.')", False, 'from absl import flags\n'), ((89, 5, 89, 57), 'third_party.nucleus.io.sharded_file_utils.is_sharded_file_spec', 'sharded_file_utils.is_sharded_file_spec', ({(89, 45, 89, 56): 'path_string'}, {}), '(path_string)', False, 'from third_party.nucleus.io import sharded_file_utils\n'), ((105, 9, 105, 65), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((222, 17, 223, 57), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((305, 10, 305, 34), 'altair.selection_interval', 'alt.selection_interval', ({}, {}), '()', True, 'import altair as alt\n'), ((592, 16, 592, 55), 'tensorflow.io.gfile.GFile', 'tf.io.gfile.GFile', ({(592, 34, 592, 49): 'output_filename', (592, 51, 592, 54): '"""w"""'}, {}), "(output_filename, 'w')", True, 'import tensorflow as tf\n'), ((600, 2, 600, 50), 'absl.flags.mark_flags_as_required', 'flags.mark_flags_as_required', ({(600, 31, 600, 49): "['input', 'title']"}, {}), "(['input', 'title'])", False, 'from absl import flags\n'), ((601, 2, 601, 15), 'absl.app.run', 'app.run', ({(601, 10, 601, 14): 'main'}, {}), '(main)', False, 'from absl import app\n'), ((90, 12, 90, 70), 'third_party.nucleus.io.sharded_file_utils.generate_sharded_filenames', 'sharded_file_utils.generate_sharded_filenames', ({(90, 58, 90, 69): 'path_string'}, {}), '(path_string)', False, 'from third_party.nucleus.io import sharded_file_utils\n'), ((101, 10, 101, 37), 'pandas.read_csv', 'pd.read_csv', (), '', True, 'import pandas as pd\n'), ((98, 11, 98, 34), 'tensorflow.io.gfile.GFile', 'tf.io.gfile.GFile', ({(98, 29, 98, 33): 'path'}, {}), '(path)', True, 'import tensorflow as tf\n'), ((99, 12, 99, 36), 'pandas.read_csv', 'pd.read_csv', (), '', True, 'import pandas as pd\n'), ((183, 10, 183, 64), 'altair.Y', 'alt.Y', (), '', True, 'import altair as alt\n'), ((184, 14, 184, 45), 'altair.Color', 'alt.Color', (), '', True, 'import altair as alt\n'), ((230, 8, 230, 33), 'altair.Y', 'alt.Y', (), '', True, 'import altair as alt\n'), ((232, 11, 233, 30), 'altair.Fill', 'alt.Fill', (), '', True, 'import altair as alt\n'), ((353, 10, 353, 38), 'altair.X', 'alt.X', (), '', True, 'import altair as alt\n'), ((355, 13, 355, 43), 'altair.Fill', 'alt.Fill', (), '', True, 'import altair as alt\n'), ((181, 42, 181, 62), 'altair.Bin', 'alt.Bin', (), '', True, 'import altair as alt\n'), ((228, 9, 228, 32), 'altair.Chart', 'alt.Chart', ({(228, 19, 228, 31): 'stage_totals'}, {}), '(stage_totals)', True, 'import altair as alt\n'), ((325, 8, 325, 67), 'altair.X', 'alt.X', (), '', True, 'import altair as alt\n'), ((326, 8, 326, 64), 'altair.Y', 'alt.Y', (), '', True, 'import altair as alt\n'), ((354, 44, 354, 68), 'altair.Scale', 'alt.Scale', (), '', True, 'import altair as alt\n'), ((317, 43, 317, 65), 'altair.value', 'alt.value', ({(317, 53, 317, 64): '"""lightgray"""'}, {}), "('lightgray')", True, 'import altair as alt\n'), ((327, 43, 327, 65), 'altair.value', 'alt.value', ({(327, 53, 327, 64): '"""lightgray"""'}, {}), "('lightgray')", True, 'import altair as alt\n'), ((177, 9, 177, 21), 'altair.Chart', 'alt.Chart', ({(177, 19, 177, 20): 'd'}, {}), '(d)', True, 'import altair as alt\n'), ((307, 19, 307, 32), 'altair.Chart', 'alt.Chart', ({(307, 29, 307, 31): 'df'}, {}), '(df)', True, 'import altair as alt\n'), ((311, 15, 311, 35), 'altair.Axis', 'alt.Axis', (), '', True, 'import altair as alt\n'), ((315, 15, 315, 35), 'altair.Axis', 'alt.Axis', (), '', True, 'import altair as alt\n'), ((324, 17, 324, 30), 'altair.Chart', 'alt.Chart', ({(324, 27, 324, 29): 'df'}, {}), '(df)', True, 'import altair as alt\n'), ((350, 9, 350, 21), 'altair.Chart', 'alt.Chart', ({(350, 19, 350, 20): 'd'}, {}), '(d)', True, 'import altair as alt\n'), ((201, 14, 201, 34), 'altair.repeat', 'alt.repeat', ({(201, 25, 201, 33): '"""column"""'}, {}), "('column')", True, 'import altair as alt\n'), ((203, 14, 203, 31), 'altair.repeat', 'alt.repeat', ({(203, 25, 203, 30): '"""row"""'}, {}), "('row')", True, 'import altair as alt\n'), ((200, 9, 200, 21), 'altair.Chart', 'alt.Chart', ({(200, 19, 200, 20): 'd'}, {}), '(d)', True, 'import altair as alt\n'), ((202, 19, 202, 58), 'altair.Axis', 'alt.Axis', (), '', True, 'import altair as alt\n')] |
wangyum/anaconda | pkgs/dynd-python-0.7.2-py27_0/lib/python2.7/site-packages/dynd/tests/test_nd_fields.py | 6e5a0dbead3327661d73a61e85414cf92aa52be6 | import sys
import unittest
from dynd import nd, ndt
"""
class TestFields(unittest.TestCase):
def test_simple(self):
a = nd.array([
(1, 2, 'a', 'b'),
(3, 4, 'ab', 'cd'),
(5, 6, 'def', 'ghi')],
type='3 * {x: int32, y: int32, z: string, w: string}')
# Selecting a single field
b = nd.fields(a, 'x')
self.assertEqual(nd.dtype_of(b), ndt.make_struct(
[ndt.int32],
['x']))
self.assertEqual(nd.as_py(b.x), nd.as_py(a.x))
# Selecting two fields
b = nd.fields(a, 'z', 'y')
self.assertEqual(nd.dtype_of(b), ndt.make_struct(
[ndt.string, ndt.int32],
['z', 'y']))
self.assertEqual(nd.as_py(b.z), nd.as_py(a.z))
self.assertEqual(nd.as_py(b.y), nd.as_py(a.y))
# Selecting three fields
b = nd.fields(a, 'w', 'y', 'z')
self.assertEqual(nd.dtype_of(b), ndt.make_struct(
[ndt.string, ndt.int32, ndt.string],
['w', 'y', 'z']))
self.assertEqual(nd.as_py(b.w), nd.as_py(a.w))
self.assertEqual(nd.as_py(b.y), nd.as_py(a.y))
self.assertEqual(nd.as_py(b.z), nd.as_py(a.z))
# Reordering all four fields
b = nd.fields(a, 'w', 'y', 'x', 'z')
self.assertEqual(nd.dtype_of(b), ndt.make_struct(
[ndt.string, ndt.int32, ndt.int32, ndt.string],
['w', 'y', 'x', 'z']))
self.assertEqual(nd.as_py(b.w), nd.as_py(a.w))
self.assertEqual(nd.as_py(b.y), nd.as_py(a.y))
self.assertEqual(nd.as_py(b.x), nd.as_py(a.x))
self.assertEqual(nd.as_py(b.z), nd.as_py(a.z))
def test_fixed_var(self):
a = nd.array([
[(1, 2, 'a', 'b'),
(3, 4, 'ab', 'cd')],
[(5, 6, 'def', 'ghi')],
[(7, 8, 'alpha', 'beta'),
(9, 10, 'X', 'Y'),
(11, 12, 'the', 'end')]],
type='3 * var * {x: int32, y: int32, z: string, w: string}')
# Selecting a single field
b = nd.fields(a, 'x')
self.assertEqual(nd.type_of(b), ndt.make_fixed_dim(3,
ndt.make_var_dim(ndt.make_struct(
[ndt.int32],
['x']))))
self.assertEqual(nd.as_py(b.x), nd.as_py(a.x))
# Selecting two fields
b = nd.fields(a, 'z', 'y')
self.assertEqual(nd.type_of(b), ndt.make_fixed_dim(3,
ndt.make_var_dim(ndt.make_struct(
[ndt.string, ndt.int32],
['z', 'y']))))
self.assertEqual(nd.as_py(b.z), nd.as_py(a.z))
self.assertEqual(nd.as_py(b.y), nd.as_py(a.y))
# Selecting three fields
b = nd.fields(a, 'w', 'y', 'z')
self.assertEqual(nd.type_of(b), ndt.make_fixed_dim(3,
ndt.make_var_dim(ndt.make_struct(
[ndt.string, ndt.int32, ndt.string],
['w', 'y', 'z']))))
self.assertEqual(nd.as_py(b.w), nd.as_py(a.w))
self.assertEqual(nd.as_py(b.y), nd.as_py(a.y))
self.assertEqual(nd.as_py(b.z), nd.as_py(a.z))
# Reordering all four fields
b = nd.fields(a, 'w', 'y', 'x', 'z')
self.assertEqual(nd.type_of(b), ndt.make_fixed_dim(3,
ndt.make_var_dim(ndt.make_struct(
[ndt.string, ndt.int32, ndt.int32, ndt.string],
['w', 'y', 'x', 'z']))))
self.assertEqual(nd.as_py(b.w), nd.as_py(a.w))
self.assertEqual(nd.as_py(b.y), nd.as_py(a.y))
self.assertEqual(nd.as_py(b.x), nd.as_py(a.x))
self.assertEqual(nd.as_py(b.z), nd.as_py(a.z))
def test_bad_field_name(self):
a = nd.array([
(1, 2, 'a', 'b'),
(3, 4, 'ab', 'cd'),
(5, 6, 'def', 'ghi')],
type='3 * {x: int32, y: int32, z: string, w: string}')
self.assertRaises(RuntimeError, nd.fields, a, 'y', 'v')
"""
if __name__ == '__main__':
unittest.main()
| [((98, 4, 98, 19), 'unittest.main', 'unittest.main', ({}, {}), '()', False, 'import unittest\n')] |
nicrie/xeofs | xeofs/pandas/_transformer.py | 4c0ed49b45794ce0abb641c98b82638b2faa4828 | from typing import Union, Iterable, List
import numpy as np
import pandas as pd
from ..models._transformer import _ArrayTransformer, _MultiArrayTransformer
class _DataFrameTransformer(_ArrayTransformer):
'''`_ArrayTransformer` wrapper for `pandas.DataFrame`.
'''
def __init__(self):
super().__init__()
def fit(self, X : pd.DataFrame, axis : Union[int, Iterable[int]] = 0):
if not isinstance(X, pd.DataFrame):
raise ValueError('This interface is for `pandas.DataFrame` only')
if isinstance(axis, list):
axis = axis[0]
# Set sample and feature index
if axis == 0:
self.index_samples = X.index
self.index_features = X.columns
elif axis == 1:
self.index_samples = X.columns
self.index_features = X.index
else:
raise ValueError('axis must be either 0 or 1')
# Fit the data
try:
super().fit(X=X.values, axis=axis)
except AttributeError:
err_msg = 'weights must be of type {:}.'.format(repr(pd.DataFrame))
raise TypeError(err_msg)
return self
def transform(self, X : pd.DataFrame) -> np.ndarray:
try:
return super().transform(X.values)
except AttributeError:
err_msg = 'weights must be of type {:}.'.format(repr(pd.DataFrame))
raise TypeError(err_msg)
def fit_transform(self, X : pd.DataFrame, axis : int = 0) -> np.ndarray:
return self.fit(X=X, axis=axis).transform(X)
def transform_weights(self, weights : pd.DataFrame) -> np.ndarray:
try:
return super().transform_weights(weights.values)
except AttributeError:
return super().transform_weights(weights)
def back_transform(self, X : np.ndarray) -> pd.DataFrame:
df = super().back_transform(X)
return pd.DataFrame(
df,
index=self.index_samples,
columns=self.index_features
)
def back_transform_eofs(self, X : np.ndarray) -> pd.DataFrame:
eofs = super().back_transform_eofs(X)
return pd.DataFrame(
eofs,
index=self.index_features,
columns=range(1, eofs.shape[-1] + 1)
)
def back_transform_pcs(self, X : np.ndarray) -> pd.DataFrame:
pcs = super().back_transform_pcs(X)
return pd.DataFrame(
pcs,
index=self.index_samples,
columns=range(1, pcs.shape[-1] + 1)
)
class _MultiDataFrameTransformer(_MultiArrayTransformer):
'Transform multiple 2D ``pd.DataFrame`` to a single 2D ``np.ndarry``.'
def __init__(self):
super().__init__()
def fit(self, X : Union[pd.DataFrame, List[pd.DataFrame]], axis : Union[int, Iterable[int]] = 0):
X = self._convert2list(X)
self.tfs = [_DataFrameTransformer().fit(x, axis=axis) for x in X]
if len(set([tf.n_valid_samples for tf in self.tfs])) > 1:
err_msg = 'All individual arrays must have same number of samples.'
raise ValueError(err_msg)
self.idx_array_sep = np.cumsum([tf.n_valid_features for tf in self.tfs])
self.axis_samples = self.tfs[0].axis_samples
return self
def transform(self, X : Union[pd.DataFrame, List[pd.DataFrame]]) -> np.ndarray:
return super().transform(X=X)
def transform_weights(self, weights : Union[pd.DataFrame, List[pd.DataFrame]]) -> np.ndarray:
return super().transform_weights(weights=weights)
def fit_transform(
self, X : Union[pd.DataFrame, List[pd.DataFrame]],
axis : Union[int, Iterable[int]] = 0
) -> np.ndarray:
return self.fit(X=X, axis=axis).transform(X)
def back_transform(self, X : np.ndarray) -> pd.DataFrame:
return super().back_transform(X=X)
def back_transform_eofs(self, X : np.ndarray) -> pd.DataFrame:
return super().back_transform_eofs(X=X)
def back_transform_pcs(self, X : np.ndarray) -> pd.DataFrame:
return super().back_transform_pcs(X=X)
| [((56, 15, 60, 9), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((92, 29, 92, 80), 'numpy.cumsum', 'np.cumsum', ({(92, 39, 92, 79): '[tf.n_valid_features for tf in self.tfs]'}, {}), '([tf.n_valid_features for tf in self.tfs])', True, 'import numpy as np\n')] |
FossilizedContainers/fossilized-controller | tests/bogus_python_model.py | 5aa14112b3728a619a37233906366c1cda2a0a77 | import os
import sys
import lipd
# import pythonAdapter, assumes in ../python-adapter/
tests_dir = os.path.dirname(os.path.realpath(__file__))
fc_dir = os.path.dirname(tests_dir)
python_adapter_dir = os.path.join(fc_dir, "python-adapter")
sys.path.append(python_adapter_dir)
import adapter
def fake_model(adapter):
# check to see inside function
print("\n---\nStart of the fake_model function\n---\n")
# the parameters are handed to you by the adapter
files = adapter.get_files()
# use the parameters given by the adapter to get the binary data of the LiPD file
lipd.readLipd(files['weldeab'])
# get the binary data of the NetCDF file
net_cdf_path = files['net_cdf']
# mark the NetCDF file as an output file
adapter.set_output_files(net_cdf_path)
adapter.set_output_files("lipd-files\\")
return
# have to call adapter in the adapter.py file as adapter.adapter
adapter = adapter.global_adapter
adapter.register(fake_model)
adapter.start_server()
| [((7, 9, 7, 35), 'os.path.dirname', 'os.path.dirname', ({(7, 25, 7, 34): 'tests_dir'}, {}), '(tests_dir)', False, 'import os\n'), ((8, 21, 8, 59), 'os.path.join', 'os.path.join', ({(8, 34, 8, 40): 'fc_dir', (8, 42, 8, 58): '"""python-adapter"""'}, {}), "(fc_dir, 'python-adapter')", False, 'import os\n'), ((9, 0, 9, 35), 'sys.path.append', 'sys.path.append', ({(9, 16, 9, 34): 'python_adapter_dir'}, {}), '(python_adapter_dir)', False, 'import sys\n'), ((36, 0, 36, 28), 'adapter.register', 'adapter.register', ({(36, 17, 36, 27): 'fake_model'}, {}), '(fake_model)', False, 'import adapter\n'), ((37, 0, 37, 22), 'adapter.start_server', 'adapter.start_server', ({}, {}), '()', False, 'import adapter\n'), ((6, 28, 6, 54), 'os.path.realpath', 'os.path.realpath', ({(6, 45, 6, 53): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((19, 12, 19, 31), 'adapter.get_files', 'adapter.get_files', ({}, {}), '()', False, 'import adapter\n'), ((22, 4, 22, 35), 'lipd.readLipd', 'lipd.readLipd', ({(22, 18, 22, 34): "files['weldeab']"}, {}), "(files['weldeab'])", False, 'import lipd\n'), ((28, 4, 28, 42), 'adapter.set_output_files', 'adapter.set_output_files', ({(28, 29, 28, 41): 'net_cdf_path'}, {}), '(net_cdf_path)', False, 'import adapter\n'), ((29, 4, 29, 44), 'adapter.set_output_files', 'adapter.set_output_files', ({(29, 29, 29, 43): '"""lipd-files\\\\"""'}, {}), "('lipd-files\\\\')", False, 'import adapter\n')] |
banne2266/UAV-autopilot-NCTU-2021 | tello_control_ui.py | 1a25d4add2de9659516d045054935e3b6e04d06d | from PIL import Image
from PIL import ImageTk
import tkinter as tki
from tkinter import Toplevel, Scale
import threading
import datetime
import cv2
import os
import time
import platform
class TelloUI:
"""Wrapper class to enable the GUI."""
def __init__(self,tello,outputpath):
"""
Initial all the element of the GUI,support by Tkinter
:param tello: class interacts with the Tello drone.
Raises:
RuntimeError: If the Tello rejects the attempt to enter command mode.
"""
self.tello = tello # videostream device
self.outputPath = outputpath # the path that save pictures created by clicking the takeSnapshot button
self.frame = None # frame read from h264decoder and used for pose recognition
self.thread = None # thread of the Tkinter mainloop
self.stopEvent = None
# control variables
self.distance = 0.1 # default distance for 'move' cmd
self.degree = 30 # default degree for 'cw' or 'ccw' cmd
# if the flag is TRUE,the auto-takeoff thread will stop waiting for the response from tello
self.quit_waiting_flag = False
# initialize the root window and image panel
self.root = tki.Tk()
self.panel = None
# create buttons
self.btn_snapshot = tki.Button(self.root, text="Snapshot!",
command=self.takeSnapshot)
self.btn_snapshot.pack(side="bottom", fill="both",
expand="yes", padx=10, pady=5)
self.btn_pause = tki.Button(self.root, text="Pause", relief="raised", command=self.pauseVideo)
self.btn_pause.pack(side="bottom", fill="both",
expand="yes", padx=10, pady=5)
self.btn_landing = tki.Button(
self.root, text="Open Command Panel", relief="raised", command=self.openCmdWindow)
self.btn_landing.pack(side="bottom", fill="both",
expand="yes", padx=10, pady=5)
# start a thread that constantly pools the video sensor for
# the most recently read frame
self.stopEvent = threading.Event()
self.thread = threading.Thread(target=self.videoLoop, args=())
self.thread.start()
# set a callback to handle when the window is closed
self.root.wm_title("TELLO Controller")
self.root.wm_protocol("WM_DELETE_WINDOW", self.onClose)
# the sending_command will send command to tello every 5 seconds
self.sending_command_thread = threading.Thread(target = self._sendingCommand)
def videoLoop(self):
"""
The mainloop thread of Tkinter
Raises:
RuntimeError: To get around a RunTime error that Tkinter throws due to threading.
"""
try:
# start the thread that get GUI image and drwa skeleton
time.sleep(0.5)
self.sending_command_thread.start()
while not self.stopEvent.is_set():
system = platform.system()
# read the frame for GUI show
self.frame = self.tello.read()
if self.frame is None or self.frame.size == 0:
continue
# transfer the format from frame to image
image = Image.fromarray(self.frame)
# we found compatibility problem between Tkinter,PIL and Macos,and it will
# sometimes result the very long preriod of the "ImageTk.PhotoImage" function,
# so for Macos,we start a new thread to execute the _updateGUIImage function.
if system =="Windows" or system =="Linux":
self._updateGUIImage(image)
else:
thread_tmp = threading.Thread(target=self._updateGUIImage,args=(image,))
thread_tmp.start()
time.sleep(0.03)
except RuntimeError as e:
print("[INFO] caught a RuntimeError")
def _updateGUIImage(self,image):
"""
Main operation to initial the object of image,and update the GUI panel
"""
image = ImageTk.PhotoImage(image)
# if the panel none ,we need to initial it
if self.panel is None:
self.panel = tki.Label(image=image)
self.panel.image = image
self.panel.pack(side="left", padx=10, pady=10)
# otherwise, simply update the panel
else:
self.panel.configure(image=image)
self.panel.image = image
def _sendingCommand(self):
"""
start a while loop that sends 'command' to tello every 5 second
"""
while True:
self.tello.send_command('command')
time.sleep(5)
def _setQuitWaitingFlag(self):
"""
set the variable as TRUE,it will stop computer waiting for response from tello
"""
self.quit_waiting_flag = True
def openCmdWindow(self):
"""
open the cmd window and initial all the button and text
"""
panel = Toplevel(self.root)
panel.wm_title("Command Panel")
# create text input entry
text0 = tki.Label(panel,
text='This Controller map keyboard inputs to Tello control commands\n'
'Adjust the trackbar to reset distance and degree parameter',
font='Helvetica 10 bold'
)
text0.pack(side='top')
text1 = tki.Label(panel, text=
'W - Move Tello Up\t\t\tArrow Up - Move Tello Forward\n'
'S - Move Tello Down\t\t\tArrow Down - Move Tello Backward\n'
'A - Rotate Tello Counter-Clockwise\tArrow Left - Move Tello Left\n'
'D - Rotate Tello Clockwise\t\tArrow Right - Move Tello Right',
justify="left")
text1.pack(side="top")
self.btn_landing = tki.Button(
panel, text="Land", relief="raised", command=self.telloLanding)
self.btn_landing.pack(side="bottom", fill="both",
expand="yes", padx=10, pady=5)
self.btn_takeoff = tki.Button(
panel, text="Takeoff", relief="raised", command=self.telloTakeOff)
self.btn_takeoff.pack(side="bottom", fill="both",
expand="yes", padx=10, pady=5)
# binding arrow keys to drone control
self.tmp_f = tki.Frame(panel, width=100, height=2)
self.tmp_f.bind('<KeyPress-w>', self.on_keypress_w)
self.tmp_f.bind('<KeyPress-s>', self.on_keypress_s)
self.tmp_f.bind('<KeyPress-a>', self.on_keypress_a)
self.tmp_f.bind('<KeyPress-d>', self.on_keypress_d)
self.tmp_f.bind('<KeyPress-Up>', self.on_keypress_up)
self.tmp_f.bind('<KeyPress-Down>', self.on_keypress_down)
self.tmp_f.bind('<KeyPress-Left>', self.on_keypress_left)
self.tmp_f.bind('<KeyPress-Right>', self.on_keypress_right)
self.tmp_f.pack(side="bottom")
self.tmp_f.focus_set()
self.btn_landing = tki.Button(
panel, text="Flip", relief="raised", command=self.openFlipWindow)
self.btn_landing.pack(side="bottom", fill="both",
expand="yes", padx=10, pady=5)
self.distance_bar = Scale(panel, from_=0.02, to=5, tickinterval=0.01, digits=3, label='Distance(m)',
resolution=0.01)
self.distance_bar.set(0.2)
self.distance_bar.pack(side="left")
self.btn_distance = tki.Button(panel, text="Reset Distance", relief="raised",
command=self.updateDistancebar,
)
self.btn_distance.pack(side="left", fill="both",
expand="yes", padx=10, pady=5)
self.degree_bar = Scale(panel, from_=1, to=360, tickinterval=10, label='Degree')
self.degree_bar.set(30)
self.degree_bar.pack(side="right")
self.btn_distance = tki.Button(panel, text="Reset Degree", relief="raised", command=self.updateDegreebar)
self.btn_distance.pack(side="right", fill="both",
expand="yes", padx=10, pady=5)
def openFlipWindow(self):
"""
open the flip window and initial all the button and text
"""
panel = Toplevel(self.root)
panel.wm_title("Gesture Recognition")
self.btn_flipl = tki.Button(
panel, text="Flip Left", relief="raised", command=self.telloFlip_l)
self.btn_flipl.pack(side="bottom", fill="both",
expand="yes", padx=10, pady=5)
self.btn_flipr = tki.Button(
panel, text="Flip Right", relief="raised", command=self.telloFlip_r)
self.btn_flipr.pack(side="bottom", fill="both",
expand="yes", padx=10, pady=5)
self.btn_flipf = tki.Button(
panel, text="Flip Forward", relief="raised", command=self.telloFlip_f)
self.btn_flipf.pack(side="bottom", fill="both",
expand="yes", padx=10, pady=5)
self.btn_flipb = tki.Button(
panel, text="Flip Backward", relief="raised", command=self.telloFlip_b)
self.btn_flipb.pack(side="bottom", fill="both",
expand="yes", padx=10, pady=5)
def takeSnapshot(self):
"""
save the current frame of the video as a jpg file and put it into outputpath
"""
# grab the current timestamp and use it to construct the filename
ts = datetime.datetime.now()
filename = "{}.jpg".format(ts.strftime("%Y-%m-%d_%H-%M-%S"))
p = os.path.sep.join((self.outputPath, filename))
# save the file
cv2.imwrite(p, cv2.cvtColor(self.frame, cv2.COLOR_RGB2BGR))
print("[INFO] saved {}".format(filename))
def pauseVideo(self):
"""
Toggle the freeze/unfreze of video
"""
if self.btn_pause.config('relief')[-1] == 'sunken':
self.btn_pause.config(relief="raised")
self.tello.video_freeze(False)
else:
self.btn_pause.config(relief="sunken")
self.tello.video_freeze(True)
def telloTakeOff(self):
return self.tello.takeoff()
def telloLanding(self):
return self.tello.land()
def telloFlip_l(self):
return self.tello.flip('l')
def telloFlip_r(self):
return self.tello.flip('r')
def telloFlip_f(self):
return self.tello.flip('f')
def telloFlip_b(self):
return self.tello.flip('b')
def telloCW(self, degree):
return self.tello.rotate_cw(degree)
def telloCCW(self, degree):
return self.tello.rotate_ccw(degree)
def telloMoveForward(self, distance):
return self.tello.move_forward(distance)
def telloMoveBackward(self, distance):
return self.tello.move_backward(distance)
def telloMoveLeft(self, distance):
return self.tello.move_left(distance)
def telloMoveRight(self, distance):
return self.tello.move_right(distance)
def telloUp(self, dist):
return self.tello.move_up(dist)
def telloDown(self, dist):
return self.tello.move_down(dist)
def updateTrackBar(self):
self.my_tello_hand.setThr(self.hand_thr_bar.get())
def updateDistancebar(self):
self.distance = self.distance_bar.get()
print ('reset distance to %.1f' % self.distance)
def updateDegreebar(self):
self.degree = self.degree_bar.get()
print ('reset distance to %d' % self.degree)
def on_keypress_w(self, event):
print ("up %d m" % self.distance)
self.telloUp(self.distance)
def on_keypress_s(self, event):
print ("down %d m" % self.distance)
self.telloDown(self.distance)
def on_keypress_a(self, event):
print ("ccw %d degree" % self.degree)
self.tello.rotate_ccw(self.degree)
def on_keypress_d(self, event):
print ("cw %d m" % self.degree)
self.tello.rotate_cw(self.degree)
def on_keypress_up(self, event):
print ("forward %d m" % self.distance)
self.telloMoveForward(self.distance)
def on_keypress_down(self, event):
print ("backward %d m" % self.distance)
self.telloMoveBackward(self.distance)
def on_keypress_left(self, event):
print ("left %d m" % self.distance)
self.telloMoveLeft(self.distance)
def on_keypress_right(self, event):
print ("right %d m" % self.distance)
self.telloMoveRight(self.distance)
def on_keypress_enter(self, event):
if self.frame is not None:
self.registerFace()
self.tmp_f.focus_set()
def onClose(self):
"""
set the stop event, cleanup the camera, and allow the rest of
the quit process to continue
"""
print("[INFO] closing...")
self.stopEvent.set()
del self.tello
self.root.quit()
| [((39, 20, 39, 28), 'tkinter.Tk', 'tki.Tk', ({}, {}), '()', True, 'import tkinter as tki\n'), ((43, 28, 44, 65), 'tkinter.Button', 'tki.Button', (), '', True, 'import tkinter as tki\n'), ((48, 25, 48, 102), 'tkinter.Button', 'tki.Button', (), '', True, 'import tkinter as tki\n'), ((52, 27, 53, 94), 'tkinter.Button', 'tki.Button', (), '', True, 'import tkinter as tki\n'), ((59, 25, 59, 42), 'threading.Event', 'threading.Event', ({}, {}), '()', False, 'import threading\n'), ((60, 22, 60, 70), 'threading.Thread', 'threading.Thread', (), '', False, 'import threading\n'), ((68, 38, 68, 85), 'threading.Thread', 'threading.Thread', (), '', False, 'import threading\n'), ((108, 16, 108, 41), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', ({(108, 35, 108, 40): 'image'}, {}), '(image)', False, 'from PIL import ImageTk\n'), ((139, 16, 139, 35), 'tkinter.Toplevel', 'Toplevel', ({(139, 25, 139, 34): 'self.root'}, {}), '(self.root)', False, 'from tkinter import Toplevel, Scale\n'), ((143, 16, 147, 27), 'tkinter.Label', 'tki.Label', (), '', True, 'import tkinter as tki\n'), ((150, 16, 155, 41), 'tkinter.Label', 'tki.Label', (), '', True, 'import tkinter as tki\n'), ((158, 27, 159, 75), 'tkinter.Button', 'tki.Button', (), '', True, 'import tkinter as tki\n'), ((163, 27, 164, 78), 'tkinter.Button', 'tki.Button', (), '', True, 'import tkinter as tki\n'), ((169, 21, 169, 58), 'tkinter.Frame', 'tki.Frame', (), '', True, 'import tkinter as tki\n'), ((181, 27, 182, 77), 'tkinter.Button', 'tki.Button', (), '', True, 'import tkinter as tki\n'), ((186, 28, 187, 50), 'tkinter.Scale', 'Scale', (), '', False, 'from tkinter import Toplevel, Scale\n'), ((191, 28, 193, 40), 'tkinter.Button', 'tki.Button', (), '', True, 'import tkinter as tki\n'), ((197, 26, 197, 88), 'tkinter.Scale', 'Scale', (), '', False, 'from tkinter import Toplevel, Scale\n'), ((201, 28, 201, 113), 'tkinter.Button', 'tki.Button', (), '', True, 'import tkinter as tki\n'), ((210, 16, 210, 35), 'tkinter.Toplevel', 'Toplevel', ({(210, 25, 210, 34): 'self.root'}, {}), '(self.root)', False, 'from tkinter import Toplevel, Scale\n'), ((213, 25, 214, 79), 'tkinter.Button', 'tki.Button', (), '', True, 'import tkinter as tki\n'), ((218, 25, 219, 80), 'tkinter.Button', 'tki.Button', (), '', True, 'import tkinter as tki\n'), ((223, 25, 224, 82), 'tkinter.Button', 'tki.Button', (), '', True, 'import tkinter as tki\n'), ((228, 25, 229, 83), 'tkinter.Button', 'tki.Button', (), '', True, 'import tkinter as tki\n'), ((239, 13, 239, 36), 'datetime.datetime.now', 'datetime.datetime.now', ({}, {}), '()', False, 'import datetime\n'), ((242, 12, 242, 57), 'os.path.sep.join', 'os.path.sep.join', ({(242, 29, 242, 56): '(self.outputPath, filename)'}, {}), '((self.outputPath, filename))', False, 'import os\n'), ((77, 12, 77, 27), 'time.sleep', 'time.sleep', ({(77, 23, 77, 26): '(0.5)'}, {}), '(0.5)', False, 'import time\n'), ((111, 25, 111, 47), 'tkinter.Label', 'tki.Label', (), '', True, 'import tkinter as tki\n'), ((127, 12, 127, 25), 'time.sleep', 'time.sleep', ({(127, 23, 127, 24): '(5)'}, {}), '(5)', False, 'import time\n'), ((245, 23, 245, 66), 'cv2.cvtColor', 'cv2.cvtColor', ({(245, 36, 245, 46): 'self.frame', (245, 48, 245, 65): 'cv2.COLOR_RGB2BGR'}, {}), '(self.frame, cv2.COLOR_RGB2BGR)', False, 'import cv2\n'), ((80, 25, 80, 42), 'platform.system', 'platform.system', ({}, {}), '()', False, 'import platform\n'), ((88, 24, 88, 51), 'PIL.Image.fromarray', 'Image.fromarray', ({(88, 40, 88, 50): 'self.frame'}, {}), '(self.frame)', False, 'from PIL import Image\n'), ((97, 33, 97, 92), 'threading.Thread', 'threading.Thread', (), '', False, 'import threading\n'), ((99, 20, 99, 36), 'time.sleep', 'time.sleep', ({(99, 31, 99, 35): '(0.03)'}, {}), '(0.03)', False, 'import time\n')] |
robin-gdwl/examples_topop-desc | __temp/examples/rhino/mesh-stanford-dragon.py | 3a10dfc891c3e6998029c7baf8a5a7a501870fe2 | import compas
import compas_rhino
from compas.datastructures import Mesh
mesh = Mesh.from_ply(compas.get('stanford_dragon.ply'))
compas_rhino.mesh_draw(mesh)
| [((7, 0, 7, 28), 'compas_rhino.mesh_draw', 'compas_rhino.mesh_draw', ({(7, 23, 7, 27): 'mesh'}, {}), '(mesh)', False, 'import compas_rhino\n'), ((5, 21, 5, 54), 'compas.get', 'compas.get', ({(5, 32, 5, 53): '"""stanford_dragon.ply"""'}, {}), "('stanford_dragon.ply')", False, 'import compas\n')] |
vbsteja/code | Python/ML_DL/DL/Neural-Networks-Demystified-master/partOne.py | 0c8f4dc579f5de21b6c55fe6e65c3c8eb5473687 | # Neural Networks Demystified
# Part 1: Data + Architecture
#
# Supporting code for short YouTube series on artificial neural networks.
#
# Stephen Welch
# @stephencwelch
import numpy as np
# X = (hours sleeping, hours studying), y = Score on test
X = np.array(([3,5], [5,1], [10,2]), dtype=float)
y = np.array(([75], [82], [93]), dtype=float)
# Normalize
X = X/np.amax(X, axis=0)
y = y/100 #Max test score is 100 | [((12, 4, 12, 49), 'numpy.array', 'np.array', (), '', True, 'import numpy as np\n'), ((13, 4, 13, 45), 'numpy.array', 'np.array', (), '', True, 'import numpy as np\n'), ((16, 6, 16, 24), 'numpy.amax', 'np.amax', (), '', True, 'import numpy as np\n')] |
benstear/manubot | manubot/process/util.py | df184a5c7e5eb98894a3edb43d9772d1ac3e01ab | import json
import logging
import os
import pathlib
import re
import textwrap
import warnings
from typing import List, Optional
import jinja2
import pandas
import requests
import requests_cache
import yaml
from manubot.util import read_serialized_data, read_serialized_dict
from manubot.process.bibliography import load_manual_references
from manubot.process.ci import get_continuous_integration_parameters
from manubot.process.metadata import (
get_header_includes,
get_thumbnail_url,
get_manuscript_urls,
get_software_versions,
)
from manubot.process.manuscript import (
datetime_now,
get_manuscript_stats,
get_text,
)
from manubot.cite.citekey import (
citekey_to_csl_item,
shorten_citekey,
is_valid_citekey,
standardize_citekey,
)
def check_collisions(citekeys_df):
"""
Check for short_citekey hash collisions
"""
collision_df = citekeys_df[["standard_citekey", "short_citekey"]].drop_duplicates()
collision_df = collision_df[collision_df.short_citekey.duplicated(keep=False)]
if not collision_df.empty:
logging.error(f"OMF! Hash collision. Congratulations.\n{collision_df}")
return collision_df
def check_multiple_citation_strings(citekeys_df):
"""
Identify different citation strings referring the the same reference.
"""
message = textwrap.dedent(
f"""\
{len(citekeys_df)} unique citations strings extracted from text
{citekeys_df.standard_citekey.nunique()} unique standard citations\
"""
)
logging.info(message)
multi_df = citekeys_df[citekeys_df.standard_citekey.duplicated(keep=False)]
if not multi_df.empty:
table = multi_df.to_string(
index=False, columns=["standard_citekey", "manuscript_citekey"]
)
logging.warning(f"Multiple citekeys detected for the same reference:\n{table}")
return multi_df
def read_variable_files(paths: List[str], variables: Optional[dict] = None) -> dict:
"""
Read multiple serialized data files into a user_variables dictionary.
Provide `paths` (a list of URLs or local file paths).
Paths can optionally have a namespace prepended.
For example:
```python
paths = [
'https://git.io/vbkqm', # update the dictionary's top-level
'namespace_1=https://git.io/vbkqm', # store under 'namespace_1' key
'namespace_2=some_local_path.json', # store under 'namespace_2' key
]
```
If a namespace is not provided, the JSON must contain a dictionary as its
top level. Namespaces should consist only of ASCII alphanumeric characters
(includes underscores, first character cannot be numeric).
Pass a dictionary to `variables` to update an existing dictionary rather
than create a new dictionary.
"""
if variables is None:
variables = {}
for path in paths:
logging.info(f"Reading user-provided templating variables at {path!r}")
# Match only namespaces that are valid jinja2 variable names
# http://jinja.pocoo.org/docs/2.10/api/#identifier-naming
match = re.match(r"([a-zA-Z_][a-zA-Z0-9_]*)=(.+)", path)
if match:
namespace, path = match.groups()
logging.info(
f"Using the {namespace!r} namespace for template variables from {path!r}"
)
try:
if match:
obj = {namespace: read_serialized_data(path)}
else:
obj = read_serialized_dict(path)
except Exception:
logging.exception(f"Error reading template variables from {path!r}")
continue
assert isinstance(obj, dict)
conflicts = variables.keys() & obj.keys()
if conflicts:
logging.warning(
f"Template variables in {path!r} overwrite existing "
"values for the following keys:\n" + "\n".join(conflicts)
)
variables.update(obj)
logging.debug(
f"Reading user-provided templating variables complete:\n"
f"{json.dumps(variables, indent=2, ensure_ascii=False)}"
)
return variables
def add_author_affiliations(variables: dict) -> dict:
"""
Edit variables to contain numbered author affiliations. Specifically,
add a list of affiliation_numbers for each author and add a list of
affiliations to the top-level of variables. If no authors have any
affiliations, variables is left unmodified.
"""
rows = list()
for author in variables["authors"]:
if "affiliations" not in author:
continue
if not isinstance(author["affiliations"], list):
warnings.warn(
f"Expected list for {author['name']}'s affiliations. "
f"Assuming multiple affiliations are `; ` separated. "
f"Please switch affiliations to a list.",
category=DeprecationWarning,
)
author["affiliations"] = author["affiliations"].split("; ")
for affiliation in author["affiliations"]:
rows.append((author["name"], affiliation))
if not rows:
return variables
affil_map_df = pandas.DataFrame(rows, columns=["name", "affiliation"])
affiliation_df = affil_map_df[["affiliation"]].drop_duplicates()
affiliation_df["affiliation_number"] = range(1, 1 + len(affiliation_df))
affil_map_df = affil_map_df.merge(affiliation_df)
name_to_numbers = {
name: sorted(df.affiliation_number) for name, df in affil_map_df.groupby("name")
}
for author in variables["authors"]:
author["affiliation_numbers"] = name_to_numbers.get(author["name"], [])
variables["affiliations"] = affiliation_df.to_dict(orient="records")
return variables
def load_variables(args) -> dict:
"""
Read `metadata.yaml` and files specified by `--template-variables-path` to generate
manuscript variables available for jinja2 templating.
Returns a dictionary, refered to as `variables`, with the following keys:
- `pandoc`: a dictionary for passing options to Pandoc via the `yaml_metadata_block`.
Fields in `pandoc` are either generated by Manubot or hard-coded by the user if `metadata.yaml`
includes a `pandoc` dictionary.
- `manubot`: a dictionary for manubot-related information and metadata.
Fields in `manubot` are either generated by Manubot or hard-coded by the user if `metadata.yaml`
includes a `manubot` dictionary.
- All fields from a manuscript's `metadata.yaml` that are not interpreted by Manubot are
copied to `variables`. Interpreted fields include `pandoc`, `manubot`, `title`,
`keywords`, `authors` (formerly `author_info`, now deprecated), `lang`, and `thumbnail`.
- User-specified fields inserted according to the `--template-variables-path` option.
User-specified variables take highest precedence and can overwrite values for existing
keys like `pandoc` or `manubot` (dangerous).
"""
# Generated manuscript variables
variables = {"pandoc": {}, "manubot": {}}
# Read metadata which contains pandoc_yaml_metadata
# as well as authors information.
if args.meta_yaml_path.is_file():
metadata = read_serialized_dict(args.meta_yaml_path)
else:
metadata = {}
logging.warning(
f"missing {args.meta_yaml_path} file with yaml_metadata_block for pandoc"
)
# Interpreted keys that are intended for pandoc
move_to_pandoc = "title", "keywords", "lang"
for key in move_to_pandoc:
if key in metadata:
variables["pandoc"][key] = metadata.pop(key)
# Add date to metadata
now = datetime_now()
logging.info(
f"Using {now:%Z} timezone.\n"
f"Dating manuscript with the current datetime: {now.isoformat()}"
)
variables["pandoc"]["date-meta"] = now.date().isoformat()
variables["manubot"]["date"] = f"{now:%B} {now.day}, {now.year}"
# Process authors metadata
if "author_info" in metadata:
authors = metadata.pop("author_info", [])
warnings.warn(
"metadata.yaml: 'author_info' is deprecated. Use 'authors' instead.",
category=DeprecationWarning,
)
else:
authors = metadata.pop("authors", [])
if authors is None:
authors = []
variables["pandoc"]["author-meta"] = [author["name"] for author in authors]
variables["manubot"]["authors"] = authors
add_author_affiliations(variables["manubot"])
# Set repository version metadata for CI builds
ci_params = get_continuous_integration_parameters()
if ci_params:
variables["manubot"]["ci_source"] = ci_params
# Add manuscript URLs
variables["manubot"].update(get_manuscript_urls(metadata.pop("html_url", None)))
# Add software versions
variables["manubot"].update(get_software_versions())
# Add thumbnail URL if present
thumbnail_url = get_thumbnail_url(metadata.pop("thumbnail", None))
if thumbnail_url:
variables["manubot"]["thumbnail_url"] = thumbnail_url
# Update variables with metadata.yaml pandoc/manubot dicts
for key in "pandoc", "manubot":
dict_ = metadata.pop(key, {})
if not isinstance(dict_, dict):
logging.warning(
f"load_variables expected metadata.yaml field {key!r} to be a dict."
f"Received a {dict_.__class__.__name__!r} instead."
)
continue
variables[key].update(dict_)
# Update variables with uninterpreted metadata.yaml fields
variables.update(metadata)
# Update variables with user-provided variables here
variables = read_variable_files(args.template_variables_path, variables)
# Add header-includes metadata with <meta> information for the HTML output's <head>
variables["pandoc"]["header-includes"] = get_header_includes(variables)
assert args.skip_citations
# Extend Pandoc's metadata.bibliography field with manual references paths
bibliographies = variables["pandoc"].get("bibliography", [])
if isinstance(bibliographies, str):
bibliographies = [bibliographies]
assert isinstance(bibliographies, list)
bibliographies.extend(args.manual_references_paths)
bibliographies = list(map(os.fspath, bibliographies))
variables["pandoc"]["bibliography"] = bibliographies
# enable pandoc-manubot-cite option to write bibliography to a file
variables["pandoc"]["manubot-output-bibliography"] = os.fspath(args.references_path)
variables["pandoc"]["manubot-output-citekeys"] = os.fspath(args.citations_path)
variables["pandoc"]["manubot-requests-cache-path"] = os.fspath(
args.requests_cache_path
)
variables["pandoc"]["manubot-clear-requests-cache"] = args.clear_requests_cache
return variables
def get_citekeys_df(citekeys: list, citekey_aliases: dict = {}):
"""
Generate and return citekeys_df.
citekeys_df is a pandas.DataFrame with the following columns:
- manuscript_citekey: citation keys extracted from the manuscript content files.
- detagged_citekey: manuscript_citekey but with tag citekeys dereferenced
- standard_citekey: detagged_citekey standardized
- short_citekey: standard_citekey hashed to create a shortened citekey
"""
citekeys_df = pandas.DataFrame(
{"manuscript_citekey": list(citekeys)}
).drop_duplicates()
citekeys_df["detagged_citekey"] = citekeys_df.manuscript_citekey.map(
lambda citekey: citekey_aliases.get(citekey, citekey)
)
for citation in citekeys_df.detagged_citekey:
is_valid_citekey(citation, allow_raw=True)
citekeys_df["standard_citekey"] = citekeys_df.detagged_citekey.map(
standardize_citekey
)
citekeys_df["short_citekey"] = citekeys_df.standard_citekey.map(shorten_citekey)
citekeys_df = citekeys_df.sort_values(["standard_citekey", "detagged_citekey"])
check_collisions(citekeys_df)
check_multiple_citation_strings(citekeys_df)
return citekeys_df
def read_citations_tsv(path) -> dict:
"""
Read citekey aliases from a citation-tags.tsv file.
"""
if not path.is_file():
logging.info(
f"no citation tags file at {path} "
"Not reading citekey_aliases from citation-tags.tsv."
)
return {}
tag_df = pandas.read_csv(path, sep="\t")
na_rows_df = tag_df[tag_df.isnull().any(axis="columns")]
if not na_rows_df.empty:
logging.error(
f"{path} contains rows with missing values:\n"
f"{na_rows_df}\n"
"This error can be caused by using spaces rather than tabs to delimit fields.\n"
"Proceeding to reread TSV with delim_whitespace=True."
)
tag_df = pandas.read_csv(path, delim_whitespace=True)
tag_df["manuscript_citekey"] = "tag:" + tag_df.tag
tag_df = tag_df.rename(columns={"citation": "detagged_citekey"})
citekey_aliases = dict(
zip(tag_df["manuscript_citekey"], tag_df["detagged_citekey"])
)
return citekey_aliases
def write_citekeys_tsv(citekeys_df, path):
if not path:
return
citekeys_df.to_csv(path, sep="\t", index=False)
def _citation_tags_to_reference_links(args) -> str:
"""
Convert citation-tags.tsv to markdown reference link syntax
"""
citekey_aliases = read_citations_tsv(args.citation_tags_path)
if not citekey_aliases:
return ""
text = "\n\n"
for key, value in citekey_aliases.items():
text += f"[@{key}]: {value}\n"
logging.warning(
"citation-tags.tsv is deprecated. "
f"Consider deleting citation-tags.tsv and inserting the following paragraph into your Markdown content:{text}"
)
return text
def generate_csl_items(
citekeys: list,
manual_refs: dict = {},
requests_cache_path: Optional[str] = None,
clear_requests_cache: Optional[bool] = False,
) -> list:
"""
General CSL (citeproc) items for standard_citekeys in citekeys_df.
Parameters:
- citekeys: list of standard_citekeys
- manual_refs: mapping from standard_citekey to csl_item for manual references
- requests_cache_path: path for the requests cache database.
Passed as cache_name to `requests_cache.install_cache`.
requests_cache may append an extension to this path, so it is not always the exact
path to the cache. If None, do not use requests_cache.
- clear_requests_cache: If True, clear the requests cache before generating citekey metadata.
"""
# Deduplicate citations
citekeys = list(dict.fromkeys(citekeys))
# Install cache
if requests_cache_path is not None:
requests # require `import requests` in case this is essential for monkey patching by requests_cache.
requests_cache.install_cache(requests_cache_path, include_get_headers=True)
cache = requests_cache.get_cache()
if clear_requests_cache:
logging.info("Clearing requests-cache")
requests_cache.clear()
logging.info(
f"requests-cache starting with {len(cache.responses)} cached responses"
)
csl_items = list()
failures = list()
for standard_citekey in citekeys:
if standard_citekey in manual_refs:
csl_items.append(manual_refs[standard_citekey])
continue
elif standard_citekey.startswith("raw:"):
logging.error(
f"CSL JSON Data with a standard_citekey of {standard_citekey!r} not found in manual-references.json. "
"Metadata must be provided for raw citekeys."
)
failures.append(standard_citekey)
try:
csl_item = citekey_to_csl_item(standard_citekey)
csl_items.append(csl_item)
except Exception:
logging.exception(f"Citeproc retrieval failure for {standard_citekey!r}")
failures.append(standard_citekey)
# Uninstall cache
if requests_cache_path is not None:
logging.info(
f"requests-cache finished with {len(cache.responses)} cached responses"
)
requests_cache.uninstall_cache()
if failures:
message = "CSL JSON Data retrieval failed for the following standardized citation keys:\n{}".format(
"\n".join(failures)
)
logging.error(message)
return csl_items
def _generate_csl_items(args, citekeys_df):
"""
General CSL (citeproc) items for standard_citekeys in citekeys_df.
Writes references.json to disk and logs warnings for potential problems.
"""
# Read manual references (overrides) in JSON CSL
manual_refs = load_manual_references(args.manual_references_paths)
# Retrieve CSL Items
csl_items = generate_csl_items(
citekeys=citekeys_df.standard_citekey.unique(),
manual_refs=manual_refs,
requests_cache_path=args.requests_cache_path,
clear_requests_cache=args.clear_requests_cache,
)
# Write CSL JSON bibliography for Pandoc.
write_csl_json(csl_items, args.references_path)
return csl_items
def write_csl_json(csl_items, path):
"""
Write CSL Items to a JSON file at `path`.
If `path` evaluates as False, do nothing.
"""
if not path:
return
path = pathlib.Path(path)
with path.open("w", encoding="utf-8") as write_file:
json.dump(csl_items, write_file, indent=2, ensure_ascii=False)
write_file.write("\n")
def template_with_jinja2(text, variables):
"""
Template using jinja2 with the variables dictionary unpacked as keyword
arguments.
"""
jinja_environment = jinja2.Environment(
loader=jinja2.BaseLoader(),
undefined=jinja2.make_logging_undefined(logging.getLogger()),
autoescape=False,
comment_start_string="{##",
comment_end_string="##}",
extensions=["jinja2.ext.do", "jinja2.ext.loopcontrols"],
)
template = jinja_environment.from_string(text)
return template.render(**variables)
def prepare_manuscript(args):
"""
Compile manuscript, creating manuscript.md and references.json as inputs
for pandoc.
"""
text = get_text(args.content_directory)
assert args.skip_citations
text += _citation_tags_to_reference_links(args)
variables = load_variables(args)
variables["manubot"]["manuscript_stats"] = get_manuscript_stats(text)
with args.variables_path.open("w", encoding="utf-8") as write_file:
json.dump(variables, write_file, ensure_ascii=False, indent=2)
write_file.write("\n")
text = template_with_jinja2(text, variables)
# Write manuscript for pandoc
with args.manuscript_path.open("w", encoding="utf-8") as write_file:
yaml.dump(
variables["pandoc"],
write_file,
default_flow_style=False,
explicit_start=True,
explicit_end=True,
width=float("inf"),
)
write_file.write("\n")
write_file.write(text)
| [((59, 4, 59, 25), 'logging.info', 'logging.info', ({(59, 17, 59, 24): 'message'}, {}), '(message)', False, 'import logging\n'), ((149, 19, 149, 74), 'pandas.DataFrame', 'pandas.DataFrame', (), '', False, 'import pandas\n'), ((202, 10, 202, 24), 'manubot.process.manuscript.datetime_now', 'datetime_now', ({}, {}), '()', False, 'from manubot.process.manuscript import datetime_now, get_manuscript_stats, get_text\n'), ((226, 16, 226, 55), 'manubot.process.ci.get_continuous_integration_parameters', 'get_continuous_integration_parameters', ({}, {}), '()', False, 'from manubot.process.ci import get_continuous_integration_parameters\n'), ((259, 45, 259, 75), 'manubot.process.metadata.get_header_includes', 'get_header_includes', ({(259, 65, 259, 74): 'variables'}, {}), '(variables)', False, 'from manubot.process.metadata import get_header_includes, get_thumbnail_url, get_manuscript_urls, get_software_versions\n'), ((271, 57, 271, 88), 'os.fspath', 'os.fspath', ({(271, 67, 271, 87): 'args.references_path'}, {}), '(args.references_path)', False, 'import os\n'), ((272, 53, 272, 83), 'os.fspath', 'os.fspath', ({(272, 63, 272, 82): 'args.citations_path'}, {}), '(args.citations_path)', False, 'import os\n'), ((273, 57, 275, 5), 'os.fspath', 'os.fspath', ({(274, 8, 274, 32): 'args.requests_cache_path'}, {}), '(args.requests_cache_path)', False, 'import os\n'), ((318, 13, 318, 44), 'pandas.read_csv', 'pandas.read_csv', (), '', False, 'import pandas\n'), ((352, 4, 355, 5), 'logging.warning', 'logging.warning', ({(353, 8, 354, 118): 'f"""citation-tags.tsv is deprecated. Consider deleting citation-tags.tsv and inserting the following paragraph into your Markdown content:{text}"""'}, {}), "(\n f'citation-tags.tsv is deprecated. Consider deleting citation-tags.tsv and inserting the following paragraph into your Markdown content:{text}'\n )", False, 'import logging\n'), ((434, 18, 434, 70), 'manubot.process.bibliography.load_manual_references', 'load_manual_references', ({(434, 41, 434, 69): 'args.manual_references_paths'}, {}), '(args.manual_references_paths)', False, 'from manubot.process.bibliography import load_manual_references\n'), ((456, 11, 456, 29), 'pathlib.Path', 'pathlib.Path', ({(456, 24, 456, 28): 'path'}, {}), '(path)', False, 'import pathlib\n'), ((484, 11, 484, 43), 'manubot.process.manuscript.get_text', 'get_text', ({(484, 20, 484, 42): 'args.content_directory'}, {}), '(args.content_directory)', False, 'from manubot.process.manuscript import datetime_now, get_manuscript_stats, get_text\n'), ((489, 47, 489, 73), 'manubot.process.manuscript.get_manuscript_stats', 'get_manuscript_stats', ({(489, 68, 489, 72): 'text'}, {}), '(text)', False, 'from manubot.process.manuscript import datetime_now, get_manuscript_stats, get_text\n'), ((45, 8, 45, 79), 'logging.error', 'logging.error', ({(45, 22, 45, 78): 'f"""OMF! Hash collision. Congratulations.\n{collision_df}"""'}, {}), '(f"""OMF! Hash collision. Congratulations.\n{collision_df}""")', False, 'import logging\n'), ((65, 8, 65, 87), 'logging.warning', 'logging.warning', ({(65, 24, 65, 86): 'f"""Multiple citekeys detected for the same reference:\n{table}"""'}, {}), '(\n f"""Multiple citekeys detected for the same reference:\n{table}""")', False, 'import logging\n'), ((94, 8, 94, 79), 'logging.info', 'logging.info', ({(94, 21, 94, 78): 'f"""Reading user-provided templating variables at {path!r}"""'}, {}), "(f'Reading user-provided templating variables at {path!r}')", False, 'import logging\n'), ((97, 16, 97, 64), 're.match', 're.match', ({(97, 25, 97, 57): '"""([a-zA-Z_][a-zA-Z0-9_]*)=(.+)"""', (97, 59, 97, 63): 'path'}, {}), "('([a-zA-Z_][a-zA-Z0-9_]*)=(.+)', path)", False, 'import re\n'), ((188, 19, 188, 60), 'manubot.util.read_serialized_dict', 'read_serialized_dict', ({(188, 40, 188, 59): 'args.meta_yaml_path'}, {}), '(args.meta_yaml_path)', False, 'from manubot.util import read_serialized_data, read_serialized_dict\n'), ((191, 8, 193, 9), 'logging.warning', 'logging.warning', ({(192, 12, 192, 85): 'f"""missing {args.meta_yaml_path} file with yaml_metadata_block for pandoc"""'}, {}), "(\n f'missing {args.meta_yaml_path} file with yaml_metadata_block for pandoc')", False, 'import logging\n'), ((213, 8, 216, 9), 'warnings.warn', 'warnings.warn', (), '', False, 'import warnings\n'), ((234, 32, 234, 55), 'manubot.process.metadata.get_software_versions', 'get_software_versions', ({}, {}), '()', False, 'from manubot.process.metadata import get_header_includes, get_thumbnail_url, get_manuscript_urls, get_software_versions\n'), ((297, 8, 297, 50), 'manubot.cite.citekey.is_valid_citekey', 'is_valid_citekey', (), '', False, 'from manubot.cite.citekey import citekey_to_csl_item, shorten_citekey, is_valid_citekey, standardize_citekey\n'), ((313, 8, 316, 9), 'logging.info', 'logging.info', ({(314, 12, 315, 65): 'f"""no citation tags file at {path} Not reading citekey_aliases from citation-tags.tsv."""'}, {}), "(\n f'no citation tags file at {path} Not reading citekey_aliases from citation-tags.tsv.'\n )", False, 'import logging\n'), ((321, 8, 326, 9), 'logging.error', 'logging.error', ({(322, 12, 325, 66): 'f"""{path} contains rows with missing values:\n{na_rows_df}\nThis error can be caused by using spaces rather than tabs to delimit fields.\nProceeding to reread TSV with delim_whitespace=True."""'}, {}), '(\n f"""{path} contains rows with missing values:\n{na_rows_df}\nThis error can be caused by using spaces rather than tabs to delimit fields.\nProceeding to reread TSV with delim_whitespace=True."""\n )', False, 'import logging\n'), ((327, 17, 327, 61), 'pandas.read_csv', 'pandas.read_csv', (), '', False, 'import pandas\n'), ((384, 8, 384, 83), 'requests_cache.install_cache', 'requests_cache.install_cache', (), '', False, 'import requests_cache\n'), ((385, 16, 385, 42), 'requests_cache.get_cache', 'requests_cache.get_cache', ({}, {}), '()', False, 'import requests_cache\n'), ((417, 8, 417, 40), 'requests_cache.uninstall_cache', 'requests_cache.uninstall_cache', ({}, {}), '()', False, 'import requests_cache\n'), ((423, 8, 423, 30), 'logging.error', 'logging.error', ({(423, 22, 423, 29): 'message'}, {}), '(message)', False, 'import logging\n'), ((458, 8, 458, 70), 'json.dump', 'json.dump', (), '', False, 'import json\n'), ((491, 8, 491, 70), 'json.dump', 'json.dump', (), '', False, 'import json\n'), ((100, 12, 102, 13), 'logging.info', 'logging.info', ({(101, 16, 101, 89): 'f"""Using the {namespace!r} namespace for template variables from {path!r}"""'}, {}), "(\n f'Using the {namespace!r} namespace for template variables from {path!r}')", False, 'import logging\n'), ((138, 12, 143, 13), 'warnings.warn', 'warnings.warn', (), '', False, 'import warnings\n'), ((245, 12, 248, 13), 'logging.warning', 'logging.warning', ({(246, 16, 247, 67): 'f"""load_variables expected metadata.yaml field {key!r} to be a dict.Received a {dict_.__class__.__name__!r} instead."""'}, {}), "(\n f'load_variables expected metadata.yaml field {key!r} to be a dict.Received a {dict_.__class__.__name__!r} instead.'\n )", False, 'import logging\n'), ((387, 12, 387, 51), 'logging.info', 'logging.info', ({(387, 25, 387, 50): '"""Clearing requests-cache"""'}, {}), "('Clearing requests-cache')", False, 'import logging\n'), ((388, 12, 388, 34), 'requests_cache.clear', 'requests_cache.clear', ({}, {}), '()', False, 'import requests_cache\n'), ((406, 23, 406, 60), 'manubot.cite.citekey.citekey_to_csl_item', 'citekey_to_csl_item', ({(406, 43, 406, 59): 'standard_citekey'}, {}), '(standard_citekey)', False, 'from manubot.cite.citekey import citekey_to_csl_item, shorten_citekey, is_valid_citekey, standardize_citekey\n'), ((468, 15, 468, 34), 'jinja2.BaseLoader', 'jinja2.BaseLoader', ({}, {}), '()', False, 'import jinja2\n'), ((107, 22, 107, 48), 'manubot.util.read_serialized_dict', 'read_serialized_dict', ({(107, 43, 107, 47): 'path'}, {}), '(path)', False, 'from manubot.util import read_serialized_data, read_serialized_dict\n'), ((109, 12, 109, 80), 'logging.exception', 'logging.exception', ({(109, 30, 109, 79): 'f"""Error reading template variables from {path!r}"""'}, {}), "(f'Error reading template variables from {path!r}')", False, 'import logging\n'), ((120, 9, 120, 60), 'json.dumps', 'json.dumps', (), '', False, 'import json\n'), ((400, 12, 403, 13), 'logging.error', 'logging.error', ({(401, 16, 402, 61): 'f"""CSL JSON Data with a standard_citekey of {standard_citekey!r} not found in manual-references.json. Metadata must be provided for raw citekeys."""'}, {}), "(\n f'CSL JSON Data with a standard_citekey of {standard_citekey!r} not found in manual-references.json. Metadata must be provided for raw citekeys.'\n )", False, 'import logging\n'), ((409, 12, 409, 85), 'logging.exception', 'logging.exception', ({(409, 30, 409, 84): 'f"""Citeproc retrieval failure for {standard_citekey!r}"""'}, {}), "(f'Citeproc retrieval failure for {standard_citekey!r}')", False, 'import logging\n'), ((469, 48, 469, 67), 'logging.getLogger', 'logging.getLogger', ({}, {}), '()', False, 'import logging\n'), ((105, 34, 105, 60), 'manubot.util.read_serialized_data', 'read_serialized_data', ({(105, 55, 105, 59): 'path'}, {}), '(path)', False, 'from manubot.util import read_serialized_data, read_serialized_dict\n')] |
wmwilcox/mix-mind | iba_scrape.py | 02da016f314bb5f30f267f1f46c67c6d4a4c370c | #! /usr/bin/env python
# scrape the IBA pages for cocktail lists
import sys
import xml.etree.ElementTree as ET
from lxml import html
import requests
from pprint import pprint
from collections import OrderedDict
import json
url = 'http://iba-world.com/new-era-drinks/'
jsonfile = 'IBA_new_era_drinks.json'
url = 'http://iba-world.com/iba-cocktails/'
jsonfile = 'IBA_unforgettables.json'
url = 'http://iba-world.com/contemporary-classics/'
jsonfile = 'IBA_contemporary_classics.json'
jsonfile = 'IBA_.json'
recipes = OrderedDict()
page = requests.get(url)
tree = html.fromstring(page.content)
items = tree.findall(".//div[@class='blog_list_item_lists']")
for item in items:
name = item.find(".//h3").text
name = ' '.join([word.capitalize() for word in name.split()])
body = item.find(".//div[@class='blog_text']")
recipes[name] = {'unit': 'cL'}
print name
children = [c for c in body.iterchildren()]
n = 0
if children[1].tag == 'ul':
n = -1
style = children[n+1].text
if style is None:
try:
style = children[n+1].find('span').text
except:
pass
recipes[name]['style'] = style
recipes[name]['ingredients'] = OrderedDict()
if not children[n+2].tag == 'ul':
print "adapting <p> ingredients:", children[n+2].text
ing_list = ET.tostring(children[n+2]).lstrip('<p>').rstrip('</p>\n').split('<br />\n')
else:
ing_list = [i.text for i in children[n+2].iterchildren()]
for ingredient in ing_list:
if len(ingredient.split()) == 1:
recipes[name]['ingredients'][ingredient.lower()] = ''
continue
unit = ingredient.split()[1].lower()
if unit == 'cl':
recipes[name]['ingredients'][' '.join([w.lower() for w in ingredient.split()[2:]])] = float(ingredient.split()[0])
elif unit == 'bar' or unit == 'to': # bar spoon
recipes[name]['ingredients'][' '.join([w.lower() for w in ingredient.split()[3:]])] = ' '.join(ingredient.split()[:3])
elif unit == 'dashes' or unit == 'drops' or unit == 'with':
recipes[name]['ingredients'][' '.join([w.lower() for w in ingredient.split()[2:]])] = ' '.join(ingredient.split()[:2])
elif unit == 'dash':
recipes[name]['ingredients'][' '.join([w.lower() for w in ingredient.split()[2:]])] = 'dash'
else:
print "using literal: ", ingredient
literal = {'1': 'one', '2': 'two', 'A': 'one'}
try:
recipes[name]['ingredients'][' '.join([w.lower() for w in ingredient.split()[1:]])] = literal[ingredient.split()[0]]
except:
recipes[name]['ingredients'][ingredient.lower()] = ''
# Get full description from the link
ref_url = item.find(".//a[@class='top_hover_image']").attrib.get('href')
detail_page = requests.get(ref_url)
detail_tree = html.fromstring(detail_page.content)
use_next = False
for child in detail_tree.find(".//div[@class='col-sm-9']").iterchildren():
if use_next and child.tag == 'p':
recipes[name]['IBA_description'] = child.text
break
if child.tag =='ul':
use_next = True
with open(jsonfile, 'w') as fp:
json.dump(recipes, fp, indent=4, separators=(',', ': '))
print "Wrote out as {}".format(jsonfile)
sys.exit(0)
raw = sys.argv[1]
with open(raw) as fp:
for line in fp.readlines():
if line.lstrip().startswith(r'<h3>'):
print line.lstrip()
# super hax
if line.startswith(r'<p>'):
print line
if line.startswith(r'<li>'):
print line
if not line.lstrip().startswith('<'):
print line
| [] |
Royals-Aeo-Gamer/MyPyMods | Data Structures/Tree.py | be3a521e9f823ce0b704f925b19f6f34dcb5405d | class TreeNode:
def __init__(self, name, data, parent=None):
self.name = name
self.parent = parent
self.data = data
self.childs = {}
def add_child(self, name, data):
self.childs.update({name:(type(self))(name, data, self)})
def rm_branch(self, name, ansistors_n: list = None,):
focus = self.childs
while True:
if ansistors_n == None or ansistors_n == self.name:
del focus[name]
break
elif ansistors_n[0] in focus:
focus = (focus[ansistors_n[0]]).childs
del ansistors_n[0]
elif name in focus and ansistors_n is None:
del focus[name]
break
else:
print(focus)
raise NameError(f"couldn't find branch {ansistors_n[0]}")
def __getitem__(self, item):
return self.childs[item]
def __setitem__(self, key, value):
self.childs[key] = value
def __delitem__(self, key, ansistors_n: list = None):
self.rm_branch(key, ansistors_n)
| [] |
ggiaquin16/GroupProject19 | config.py | f491abc4e8f127552dc7384f3378e14029da8008 | api_key = "9N7hvPP9yFrjBnELpBdthluBjiOWzJZw"
mongo_url = 'mongodb://localhost:27017'
mongo_db = 'CarPopularity'
mongo_collections = ['CarSalesByYear', 'PopularCarsByRegion']
years_data = ['2019', '2018', '2017', '2016', '2015']
test_mode = True | [] |
thu-ml/realsafe | pytorch_ares/pytorch_ares/attack_torch/mim.py | 474d549aa402b4cdd5e3629d23d035c31b60a360 | import imp
import torch
import torch.nn as nn
import numpy as np
import torch.nn.functional as F
from pytorch_ares.attack_torch.utils import loss_adv
class MIM(object):
'''Projected Gradient Descent'''
def __init__(self, net, epsilon, p, stepsize, steps, decay_factor, data_name,target, loss, device):
self.epsilon = epsilon
self.p = p
self.net = net
self.decay_factor = decay_factor
self.stepsize = stepsize
self.target = target
self.steps = steps
self.loss = loss
self.data_name = data_name
self.device = device
if self.data_name=="cifar10" and self.target:
raise AssertionError('cifar10 dont support targeted attack')
def forward(self, image, label, target_labels):
image, label = image.to(self.device), label.to(self.device)
if target_labels is not None:
target_labels = target_labels.to(self.device)
batchsize = image.shape[0]
advimage = image
momentum = torch.zeros_like(image).detach()
# PGD to get adversarial example
for i in range(self.steps):
advimage = advimage.clone().detach().requires_grad_(True) # clone the advimage as the next iteration input
netOut = self.net(advimage)
loss = loss_adv(self.loss, netOut, label, target_labels, self.target, self.device)
grad = torch.autograd.grad(loss, [advimage])[0].detach()
grad_norm = torch.norm(nn.Flatten()(grad), p=1, dim=1)
grad = grad / grad_norm.view([-1]+[1]*(len(grad.shape)-1))
grad = grad + momentum*self.decay_factor
momentum = grad
if self.p==np.inf:
updates = grad.sign()
else:
normVal = torch.norm(grad.view(batchsize, -1), self.p, 1)
updates = grad/normVal.view(batchsize, 1, 1, 1)
updates = updates*self.stepsize
advimage = advimage+updates
# project the disturbed image to feasible set if needed
delta = advimage-image
if self.p==np.inf:
delta = torch.clamp(delta, -self.epsilon, self.epsilon)
else:
normVal = torch.norm(delta.view(batchsize, -1), self.p, 1)
mask = normVal<=self.epsilon
scaling = self.epsilon/normVal
scaling[mask] = 1
delta = delta*scaling.view(batchsize, 1, 1, 1)
advimage = image+delta
advimage = torch.clamp(advimage, 0, 1)#cifar10(-1,1)
return advimage | [((39, 19, 39, 94), 'pytorch_ares.attack_torch.utils.loss_adv', 'loss_adv', ({(39, 28, 39, 37): 'self.loss', (39, 39, 39, 45): 'netOut', (39, 47, 39, 52): 'label', (39, 54, 39, 67): 'target_labels', (39, 69, 39, 80): 'self.target', (39, 82, 39, 93): 'self.device'}, {}), '(self.loss, netOut, label, target_labels, self.target, self.device)', False, 'from pytorch_ares.attack_torch.utils import loss_adv\n'), ((65, 23, 65, 50), 'torch.clamp', 'torch.clamp', ({(65, 35, 65, 43): 'advimage', (65, 45, 65, 46): '0', (65, 48, 65, 49): '1'}, {}), '(advimage, 0, 1)', False, 'import torch\n'), ((32, 19, 32, 42), 'torch.zeros_like', 'torch.zeros_like', ({(32, 36, 32, 41): 'image'}, {}), '(image)', False, 'import torch\n'), ((56, 24, 56, 71), 'torch.clamp', 'torch.clamp', ({(56, 36, 56, 41): 'delta', (56, 43, 56, 56): '-self.epsilon', (56, 58, 56, 70): 'self.epsilon'}, {}), '(delta, -self.epsilon, self.epsilon)', False, 'import torch\n'), ((41, 35, 41, 47), 'torch.nn.Flatten', 'nn.Flatten', ({}, {}), '()', True, 'import torch.nn as nn\n'), ((40, 19, 40, 56), 'torch.autograd.grad', 'torch.autograd.grad', ({(40, 39, 40, 43): 'loss', (40, 45, 40, 55): '[advimage]'}, {}), '(loss, [advimage])', False, 'import torch\n')] |
pwelzel/bornhack-website | src/utils/templatetags/menubutton.py | af794e6a2fba06e09626259c7768feb30ff394be | from django import template
register = template.Library()
@register.simple_tag(takes_context=True)
def menubuttonclass(context, appname):
if appname == context['request'].resolver_match.func.view_class.__module__.split(".")[0]:
return "btn-primary"
else:
return "btn-default"
| [((3, 11, 3, 29), 'django.template.Library', 'template.Library', ({}, {}), '()', False, 'from django import template\n')] |
Prones94/Make_Wiki | wiki/tests.py | f8816eb31bb370f48affff8568a6b0d0ffaf7cd4 | from django.test import TestCase
from django.contrib.auth.models import User
from wiki.models import Page
from django.utils.text import slugify
# Create your tests here.
class WikiPageTest(TestCase):
def test_edit(self):
user = User.objects.create_user(username='admin', password='djangopony')
self.client.login(username='admin', password='djangopony')
page = Page.objects.create(title="My Test Page", content="test", author=user)
page.save()
edit = {
'title': 'testing title',
'content': 'testing content'
}
response = self.client.post('/%s/' %slugify(page.title), edit)
updated = Page.objects.get(title = edit['title'])
self.assertEqual(response.status_code, 302)
self.assertEqual(updated.title, edit['title'])
def test_page(self):
user = User.objects.create_user(username='admin', password='djangopony')
self.client.login(username='admin', password='djangopony')
page = Page.objects.create(title="My Test Page", content="test", author=user)
page.save()
response = self.client.get('/%s/' %slugify(page.title))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'test')
def test_create(self):
user = User.objects.create_user(username='admin', password='djangopony')
self.client.login(username='admin', password='djangopony')
new = {
'title': 'testing title',
'content': 'testing content'
}
response = self.client.post('/wiki/new/', new)
updated = Page.objects.get(title = new['title'])
self.assertEqual(response.status_code, 302)
self.assertEqual(updated.title, new['title'])
'''
Steps to writing a test
1. Set up your test data
2. Make a request (GET, POST)
3a. Check if response matches what we expect
3b. Check if database matches what we expect
''' | [((10, 15, 10, 80), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (), '', False, 'from django.contrib.auth.models import User\n'), ((13, 15, 13, 85), 'wiki.models.Page.objects.create', 'Page.objects.create', (), '', False, 'from wiki.models import Page\n'), ((21, 18, 21, 57), 'wiki.models.Page.objects.get', 'Page.objects.get', (), '', False, 'from wiki.models import Page\n'), ((27, 15, 27, 80), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (), '', False, 'from django.contrib.auth.models import User\n'), ((30, 15, 30, 85), 'wiki.models.Page.objects.create', 'Page.objects.create', (), '', False, 'from wiki.models import Page\n'), ((39, 15, 39, 80), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (), '', False, 'from django.contrib.auth.models import User\n'), ((48, 18, 48, 56), 'wiki.models.Page.objects.get', 'Page.objects.get', (), '', False, 'from wiki.models import Page\n'), ((20, 44, 20, 63), 'django.utils.text.slugify', 'slugify', ({(20, 52, 20, 62): 'page.title'}, {}), '(page.title)', False, 'from django.utils.text import slugify\n'), ((33, 43, 33, 62), 'django.utils.text.slugify', 'slugify', ({(33, 51, 33, 61): 'page.title'}, {}), '(page.title)', False, 'from django.utils.text import slugify\n')] |
tkiapril/birdy | birdy/__init__.py | cf6a8f8d31c9363dbf7398ae3d78fe3069a5a936 | __author__ = 'Mitja Pagon <[email protected]>'
__version__ = '0.2'
| [] |
KuanKuanQAQ/ares | ares/defense/randomization.py | 40dbefc18f6438e1812021fe6d6c3195f22ca295 | ''' The randomization defense method, which applies random . '''
import tensorflow as tf
from ares.defense.input_transformation import input_transformation
def randomize(xs, scale_min=0.875, pad_value=0.0):
''' Apply random rescaling and padding to xs.
:param xs: A batch of inputs for some classifier.
:param scale_min: The random rescaling rate would be chosen between ``scale_min`` and 1.0.
:param pad_value: ``constant_values`` parameter for the ``tf.pad`` method.
:return: A new tensor with same shape and dtype as xs.
'''
ratio = tf.random.uniform((), minval=scale_min, maxval=1.0)
height, width = tf.cast(xs.shape[1].value * ratio, tf.int32), tf.cast(xs.shape[2].value * ratio, tf.int32)
xs_rescaled = tf.image.resize(xs, (height, width), method=tf.image.ResizeMethod.NEAREST_NEIGHBOR,
align_corners=True, preserve_aspect_ratio=False)
height_rem, width_rem = xs.shape[1].value - height, xs.shape[2].value - width
pad_left = tf.random_uniform((), 0, width_rem, dtype=tf.int32)
pad_right = width_rem - pad_left
pad_top = tf.random_uniform((), 0, height_rem, dtype=tf.int32)
pad_bottom = height_rem - pad_top
xs_padded = tf.pad(xs_rescaled, [[0, 0], [pad_top, pad_bottom], [pad_left, pad_right], [0, 0]],
constant_values=pad_value)
xs_padded.set_shape(xs.shape)
return xs_padded
def randomization(scale_min=0.875, pad_value=0.0):
''' A decorator to apply randomize rescaling and padding to input of the classifier.
:param scale_min: The random rescaling rate would be chosen between ``scale_min`` and 1.0.
:param pad_value: ``constant_values`` parameter for the ``tf.pad`` method.
'''
def args_fn(_):
return (scale_min, pad_value)
def kwargs_fn(_):
return {}
return lambda rs_class: input_transformation(rs_class, randomize, args_fn, kwargs_fn)
| [((16, 12, 16, 63), 'tensorflow.random.uniform', 'tf.random.uniform', (), '', True, 'import tensorflow as tf\n'), ((18, 18, 19, 82), 'tensorflow.image.resize', 'tf.image.resize', (), '', True, 'import tensorflow as tf\n'), ((21, 15, 21, 66), 'tensorflow.random_uniform', 'tf.random_uniform', (), '', True, 'import tensorflow as tf\n'), ((23, 14, 23, 66), 'tensorflow.random_uniform', 'tf.random_uniform', (), '', True, 'import tensorflow as tf\n'), ((25, 16, 26, 49), 'tensorflow.pad', 'tf.pad', (), '', True, 'import tensorflow as tf\n'), ((17, 20, 17, 64), 'tensorflow.cast', 'tf.cast', ({(17, 28, 17, 53): '(xs.shape[1].value * ratio)', (17, 55, 17, 63): 'tf.int32'}, {}), '(xs.shape[1].value * ratio, tf.int32)', True, 'import tensorflow as tf\n'), ((17, 66, 17, 110), 'tensorflow.cast', 'tf.cast', ({(17, 74, 17, 99): '(xs.shape[2].value * ratio)', (17, 101, 17, 109): 'tf.int32'}, {}), '(xs.shape[2].value * ratio, tf.int32)', True, 'import tensorflow as tf\n'), ((43, 28, 43, 89), 'ares.defense.input_transformation.input_transformation', 'input_transformation', ({(43, 49, 43, 57): 'rs_class', (43, 59, 43, 68): 'randomize', (43, 70, 43, 77): 'args_fn', (43, 79, 43, 88): 'kwargs_fn'}, {}), '(rs_class, randomize, args_fn, kwargs_fn)', False, 'from ares.defense.input_transformation import input_transformation\n')] |
hopeogbons/image-annotation | annotate/backend/admin.py | 2d8b1799bc791428fd3ab29d8052195996923130 | from django.contrib import admin
from annotate.backend.models import Image, Annotation
admin.site.register(Image)
admin.site.register(Annotation)
| [((4, 0, 4, 26), 'django.contrib.admin.site.register', 'admin.site.register', ({(4, 20, 4, 25): 'Image'}, {}), '(Image)', False, 'from django.contrib import admin\n'), ((5, 0, 5, 31), 'django.contrib.admin.site.register', 'admin.site.register', ({(5, 20, 5, 30): 'Annotation'}, {}), '(Annotation)', False, 'from django.contrib import admin\n')] |
Competitive-Programmers-Community/LeetCode | 34. Find First and Last Position of Element in Sorted Array/main.py | 841fdee805b1a626e9f1cd0e12398d25054638af | class Solution:
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
if not nums:
return [-1, -1]
low = 0
high = len(nums) - 1
f = 0
while low<=high:
mid = (low+high)//2
if nums[mid] == target:
f = 1
break
elif nums[mid] < target:
low = mid + 1
elif nums[mid] > target:
high = mid - 1
i, j = mid, mid
while i>=1 and nums[i-1] == target:
i = i-1
while j<len(nums)-1 and nums[j+1] == target:
j = j+1
if f == 1:
return [i, j]
else:
return [-1, -1]
| [] |
LMS57/domato | c/create.py | 005739f55b49ead0ac47ea14b324decee05a7625 | data = open('./original').readlines()
alphabet = {
"<":"lt",
">":"gt",
"=":"=",
"-":'-',
"+":"+",
"-":"-",
"~":"~",
"!":"ex",
"%":"%",
"^":"^",
"&":"&",
"*":"*",
"(":"(",
")":"right_paran",
"[":"[",
"]":"]",
"{":"{",
"}":"}",
"[":"[",
"]":"]",
"|":"|",
";":";",
":":":",
",":",",
".":".",
"?":"?",
"/":"/",
}
def item(y):
if "'" in y:
tmp = y.split("'")[1]
test = 0
for x in alphabet:
if x in tmp:
test = 1
if test:
final = ''
for x in tmp:
final += item(alphabet[x])
return final
else:
return item(tmp)
else:
return "<"+y+">"
start = 0
current = ""
space = "<space>"
declared = []
referenced = []
for x in data:
x = x.strip()
if x == "":
continue
if '%%' == x:
start = 1
continue
elif start != 1:
continue
if x == "test":
break;
x = x.split(' ')
if len(x) == 1:#item declaration or end
if x[0] == ';':
current = ""
else:
current = x[0]
declared.append(item(x[0]))
print ""
else:
x = x[1:]
tmp = item(current)+'\t=\t'
for y in range(len(x)):
referenced.append(item(x[y]))
tmp += item(x[y])
if y != len(x)-1 and "'" not in x[y+1] and "'" not in x[y]:
tmp+=space
print tmp
referenced = set(referenced)
final = []
for x in referenced:
if x not in declared:
final.append(x)
print ""
for x in final:
tmp = x+'\t=\t'
x = x[1:-1]
print tmp + x.lower()
| [] |
isabella232/scale-safe | AppServer/google/appengine/api/memcache/memcache_distributed.py | 8b887726768106b6b67d7be6ea257bee5cd83f9a | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Non-stub version of the memcache API, keeping all data in memcached.
Uses the python-memcached library to interface with memcached.
"""
import base64
import cPickle
import logging
import memcache
import os
import time
from google.appengine.api import apiproxy_stub
from google.appengine.api.memcache import memcache_service_pb
from google.appengine.runtime import apiproxy_errors
MemcacheSetResponse = memcache_service_pb.MemcacheSetResponse
MemcacheSetRequest = memcache_service_pb.MemcacheSetRequest
MemcacheIncrementRequest = memcache_service_pb.MemcacheIncrementRequest
MemcacheIncrementResponse = memcache_service_pb.MemcacheIncrementResponse
MemcacheDeleteResponse = memcache_service_pb.MemcacheDeleteResponse
from google.appengine.api.memcache import TYPE_INT
from google.appengine.api.memcache import TYPE_LONG
class MemcacheService(apiproxy_stub.APIProxyStub):
"""Python only memcache service.
This service keeps all data in any external servers running memcached.
"""
# The memcached default port.
MEMCACHE_PORT = "11211"
# An AppScale file which has a list of IPs running memcached.
APPSCALE_MEMCACHE_FILE = "/etc/appscale/memcache_ips"
# The minimum frequency by which memcache clients will update their list of
# clients that they connect to (which can change if AppScale scales up or
# down).
UPDATE_WINDOW = 60 # seconds
def __init__(self, gettime=time.time, service_name='memcache'):
"""Initializer.
Args:
gettime: time.time()-like function used for testing.
service_name: Service name expected for all calls.
"""
super(MemcacheService, self).__init__(service_name)
self._gettime = gettime
self._memcache = None
self.setupMemcacheClient()
def setupMemcacheClient(self):
""" Sets up the memcache client. """
if os.path.exists(self.APPSCALE_MEMCACHE_FILE):
memcache_file = open(self.APPSCALE_MEMCACHE_FILE, "r")
all_ips = memcache_file.read().split("\n")
memcache_file.close()
else:
all_ips = ['localhost']
memcaches = [ip + ":" + self.MEMCACHE_PORT for ip in all_ips if ip != '']
memcaches.sort()
self._memcache = memcache.Client(memcaches, debug=0)
def _Dynamic_Get(self, request, response):
"""Implementation of gets for memcache.
Args:
request: A MemcacheGetRequest protocol buffer.
response: A MemcacheGetResponse protocol buffer.
"""
for key in set(request.key_list()):
internal_key = self._GetKey(request.name_space(), key)
value = self._memcache.get(internal_key)
if value is None:
continue
flags = 0
stored_flags, cas_id, stored_value = cPickle.loads(value)
flags |= stored_flags
item = response.add_item()
item.set_key(key)
item.set_value(stored_value)
item.set_flags(flags)
if request.for_cas():
item.set_cas_id(cas_id)
def _Dynamic_Set(self, request, response):
"""Implementation of sets for memcache.
Args:
request: A MemcacheSetRequest.
response: A MemcacheSetResponse.
"""
for item in request.item_list():
key = self._GetKey(request.name_space(), item.key())
set_policy = item.set_policy()
old_entry = self._memcache.get(key)
cas_id = 0
if old_entry:
_, cas_id, _ = cPickle.loads(old_entry)
set_status = MemcacheSetResponse.NOT_STORED
if ((set_policy == MemcacheSetRequest.SET) or
(set_policy == MemcacheSetRequest.ADD and old_entry is None) or
(set_policy == MemcacheSetRequest.REPLACE and
old_entry is not None)):
if (old_entry is None or set_policy == MemcacheSetRequest.SET):
set_status = MemcacheSetResponse.STORED
elif (set_policy == MemcacheSetRequest.CAS and item.for_cas() and
item.has_cas_id()):
if old_entry is None:
set_status = MemcacheSetResponse.NOT_STORED
elif cas_id != item.cas_id():
set_status = MemcacheSetResponse.EXISTS
else:
set_status = MemcacheSetResponse.STORED
if (set_status == MemcacheSetResponse.STORED
or set_policy == MemcacheSetRequest.REPLACE):
set_value = cPickle.dumps(
[item.flags(), cas_id + 1, item.value()])
if set_policy == MemcacheSetRequest.REPLACE:
self._memcache.replace(key, set_value)
else:
self._memcache.set(key, set_value, item.expiration_time())
response.add_set_status(set_status)
def _Dynamic_Delete(self, request, response):
"""Implementation of delete in memcache.
Args:
request: A MemcacheDeleteRequest protocol buffer.
response: A MemcacheDeleteResponse protocol buffer.
"""
for item in request.item_list():
key = self._GetKey(request.name_space(), item.key())
entry = self._memcache.get(key)
delete_status = MemcacheDeleteResponse.DELETED
if entry is None:
delete_status = MemcacheDeleteResponse.NOT_FOUND
else:
self._memcache.delete(key)
response.add_delete_status(delete_status)
def _Increment(self, namespace, request):
"""Internal function for incrementing from a MemcacheIncrementRequest.
Args:
namespace: A string containing the namespace for the request,
if any. Pass an empty string if there is no namespace.
request: A MemcacheIncrementRequest instance.
Returns:
An integer or long if the offset was successful, None on error.
"""
if not request.delta():
return None
cas_id = 0
key = self._GetKey(namespace, request.key())
value = self._memcache.get(key)
if value is None:
if not request.has_initial_value():
return None
flags, cas_id, stored_value = (
TYPE_INT, cas_id, str(request.initial_value()))
else:
flags, cas_id, stored_value = cPickle.loads(value)
if flags == TYPE_INT:
new_value = int(stored_value)
elif flags == TYPE_LONG:
new_value = long(stored_value)
if request.direction() == MemcacheIncrementRequest.INCREMENT:
new_value += request.delta()
elif request.direction() == MemcacheIncrementRequest.DECREMENT:
new_value -= request.delta()
new_stored_value = cPickle.dumps([flags, cas_id + 1, str(new_value)])
try:
self._memcache.cas(key, new_stored_value)
except Exception, e:
logging.error(str(e))
return None
return new_value
def _Dynamic_Increment(self, request, response):
"""Implementation of increment for memcache.
Args:
request: A MemcacheIncrementRequest protocol buffer.
response: A MemcacheIncrementResponse protocol buffer.
"""
new_value = self._Increment(request.name_space(), request)
if new_value is None:
raise apiproxy_errors.ApplicationError(
memcache_service_pb.MemcacheServiceError.UNSPECIFIED_ERROR)
response.set_new_value(new_value)
def _Dynamic_BatchIncrement(self, request, response):
"""Implementation of batch increment for memcache.
Args:
request: A MemcacheBatchIncrementRequest protocol buffer.
response: A MemcacheBatchIncrementResponse protocol buffer.
"""
namespace = request.name_space()
for request_item in request.item_list():
new_value = self._Increment(namespace, request_item)
item = response.add_item()
if new_value is None:
item.set_increment_status(MemcacheIncrementResponse.NOT_CHANGED)
else:
item.set_increment_status(MemcacheIncrementResponse.OK)
item.set_new_value(new_value)
def _Dynamic_FlushAll(self, request, response):
"""Implementation of MemcacheService::FlushAll().
Args:
request: A MemcacheFlushRequest.
response: A MemcacheFlushResponse.
"""
self._memcache.flush_all()
def _Dynamic_Stats(self, request, response):
"""Implementation of MemcacheService::Stats().
Args:
request: A MemcacheStatsRequest.
response: A MemcacheStatsResponse.
"""
stats = response.mutable_stats()
num_servers = 0
hits_total = 0
misses_total = 0
byte_hits_total = 0
items_total = 0
bytes_total = 0
time_total = 0
def get_stats_value(stats_dict, key, _type=int):
""" Gets statisical values and makes sure the key is in the dict. """
if key not in stats_dict:
logging.warn("No stats for key '%s'." % key)
return _type(stats_dict.get(key, '0'))
for server, server_stats in self._memcache.get_stats():
num_servers += 1
hits_total += get_stats_value(server_stats, 'get_hits')
misses_total += get_stats_value(server_stats, 'get_misses')
byte_hits_total += get_stats_value(server_stats, 'bytes_read')
items_total += get_stats_value(server_stats, 'curr_items')
bytes_total += get_stats_value(server_stats, 'bytes')
time_total += get_stats_value(server_stats, 'time', float)
stats.set_hits(hits_total)
stats.set_misses(misses_total)
stats.set_byte_hits(byte_hits_total)
stats.set_items(items_total)
stats.set_bytes(bytes_total)
# With the Python 2.7 GAE runtime, it expects all fields here to be ints.
# Python 2.5 was fine with this being a float, so callers in that runtime
# may not be expecting an int.
stats.set_oldest_item_age(int(time.time() - time_total / num_servers))
def _GetKey(self, namespace, key):
"""Used to get the Memcache key. It is encoded because the sdk
allows special characters but the Memcache client does not.
Args:
namespace: The namespace as provided by the application.
key: The key as provided by the application.
Returns:
A base64 string __{appname}__{namespace}__{key}
"""
appname = os.environ['APPNAME']
internal_key = appname + "__" + namespace + "__" + key
return base64.b64encode(internal_key)
| [] |
arpruss/inflatemesh | inflateutils/exportmesh.py | ab4abfc7794fd4cf96f41bb797e1b2a61f687a46 | from struct import pack
from .vector import *
from .formatdecimal import decimal
from numbers import Number
import os
import sys
try:
basestring
except:
basestring = str
def isColorTriangleList(polys):
return isinstance(polys[0][1][0][0], Number)
def toPolyhedra(polys):
if isColorTriangleList(polys):
return [ (polys[0][0], list(face for rgb,face in polys)) ]
else:
return polys
def toMesh(polys):
if isColorTriangleList(polys):
return polys
else:
output = []
for rgb,polyhedron in polys:
for face in polyhedron:
output.append((rgb,face))
return output
def describeColor(c):
if c is None:
return "undef";
elif isinstance(c, str):
return c
else:
return "[%s,%s,%s]" % tuple(decimal(component) for component in c)
def toSCADModule(polys, moduleName, digitsAfterDecimal=9, colorOverride=None):
"""
INPUT:
polys: list of (color,polyhedra) pairs (counterclockwise triangles), or a list of (color,triangle) pairs (TODO: currently uses first color for all in latter case)
moduleName: OpenSCAD module name
OUTPUT: string with OpenSCAD code implementing the polys
"""
polys = toPolyhedra(polys)
scad = []
scad.append("module " +moduleName+ "() {")
for rgb,poly in polys:
if colorOverride != "" and (colorOverride or rgb):
line = " color(%s) " % describeColor(colorOverride if colorOverride else tuple(min(max(c,0.),1.0) for c in rgb))
else:
line = " "
pointsDict = {}
i = 0
line += "polyhedron(points=["
points = []
for face in poly:
for v in reversed(face):
if tuple(v) not in pointsDict:
pointsDict[tuple(v)] = i
points.append( ("[%s,%s,%s]") % tuple(decimal(x,digitsAfterDecimal) for x in v) )
i += 1
line += ",".join(points)
line += "], faces=["
line += ",".join( "[" + ",".join(str(pointsDict[tuple(v)]) for v in reversed(face)) + "]" for face in poly ) + "]"
line += ");"
scad.append(line)
scad.append("}\n")
return "\n".join(scad)
def saveSCAD(filename, polys, moduleName="object1", quiet=False):
"""
filename: filename to write OpenSCAD file
polys: list of (color,polyhedra) pairs (counterclockwise triangles)
moduleName: OpenSCAD module name
quiet: give no status message if set
"""
if not quiet: sys.stderr.write("Saving %s\n" % filename)
if filename:
with open(filename, "w") as f:
f.write(toSCADModule(polys, moduleName))
f.write("\n" + moduleName + "();\n")
else:
sys.stdout.write(toSCADModule(polys, moduleName))
sys.stdout.write("\n" + moduleName + "();\n")
def saveSTL(filename, mesh, swapYZ=False, quiet=False):
"""
filename: filename to save STL file
mesh: list of (color,triangle) pairs (counterclockwise)
swapYZ: should Y/Z axes be swapped?
quiet: give no status message if set
"""
mesh = toMesh(mesh)
if not quiet: sys.stderr.write("Saving %s\n" % filename)
minY = float("inf")
minVector = Vector(float("inf"),float("inf"),float("inf"))
numTriangles = 0
if swapYZ:
matrix = Matrix( (1,0,0), (0,0,-1), (0,1,0) )
else:
matrix = Matrix.identity(3)
mono = True
for rgb,triangle in mesh:
if rgb is not None:
mono = False
numTriangles += 1
for vertex in triangle:
vertex = matrix*vertex
minVector = Vector(min(minVector[i], vertex[i]) for i in range(3))
minVector -= Vector(0.001,0.001,0.001) # make sure all STL coordinates are strictly positive as per Wikipedia
def writeSTL(write):
write(pack("80s",b''))
write(pack("<I",numTriangles))
for rgb,tri in mesh:
if mono:
color = 0
else:
if rgb is None:
rgb = (255,255,255)
else:
rgb = tuple(min(255,max(0,int(0.5 + 255 * comp))) for comp in rgb)
color = 0x8000 | ( (rgb[0] >> 3) << 10 ) | ( (rgb[1] >> 3) << 5 ) | ( (rgb[2] >> 3) << 0 )
normal = (Vector(tri[1])-Vector(tri[0])).cross(Vector(tri[2])-Vector(tri[0])).normalize()
write(pack("<3f", *(matrix*normal)))
for vertex in tri:
write(pack("<3f", *(matrix*(vertex-minVector))))
write(pack("<H", color))
if filename:
with open(filename, "wb") as f:
writeSTL(f.write)
else:
if sys.platform == "win32":
import msvcrt
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
writeSTL(lambda data : os.write(sys.stdout.fileno(), data))
| [((83, 18, 83, 60), 'sys.stderr.write', 'sys.stderr.write', ({(83, 35, 83, 59): "('Saving %s\\n' % filename)"}, {}), "('Saving %s\\n' % filename)", False, 'import sys\n'), ((90, 8, 90, 53), 'sys.stdout.write', 'sys.stdout.write', ({(90, 25, 90, 52): "('\\n' + moduleName + '();\\n')"}, {}), "('\\n' + moduleName + '();\\n')", False, 'import sys\n'), ((102, 18, 102, 60), 'sys.stderr.write', 'sys.stderr.write', ({(102, 35, 102, 59): "('Saving %s\\n' % filename)"}, {}), "('Saving %s\\n' % filename)", False, 'import sys\n'), ((122, 14, 122, 29), 'struct.pack', 'pack', ({(122, 19, 122, 24): '"""80s"""', (122, 25, 122, 28): "b''"}, {}), "('80s', b'')", False, 'from struct import pack\n'), ((123, 14, 123, 37), 'struct.pack', 'pack', ({(123, 19, 123, 23): '"""<I"""', (123, 24, 123, 36): 'numTriangles'}, {}), "('<I', numTriangles)", False, 'from struct import pack\n'), ((134, 18, 134, 47), 'struct.pack', 'pack', ({(134, 23, 134, 28): '"""<3f"""', (134, 30, 134, 45): '*(matrix * normal)'}, {}), "('<3f', *(matrix * normal))", False, 'from struct import pack\n'), ((137, 18, 137, 35), 'struct.pack', 'pack', ({(137, 23, 137, 27): '"""<H"""', (137, 29, 137, 34): 'color'}, {}), "('<H', color)", False, 'from struct import pack\n'), ((145, 27, 145, 46), 'sys.stdout.fileno', 'sys.stdout.fileno', ({}, {}), '()', False, 'import sys\n'), ((136, 22, 136, 63), 'struct.pack', 'pack', ({(136, 27, 136, 32): '"""<3f"""', (136, 34, 136, 61): '*(matrix * (vertex - minVector))'}, {}), "('<3f', *(matrix * (vertex - minVector)))", False, 'from struct import pack\n'), ((146, 40, 146, 59), 'sys.stdout.fileno', 'sys.stdout.fileno', ({}, {}), '()', False, 'import sys\n')] |
arywatt/FDS_2020_2021 | Assignment1/Identification/match_module.py | 392f360b219c6ef5e2c685da1f3c8aab7415ce32 | import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
import histogram_module
import dist_module
def rgb2gray(rgb):
r, g, b = rgb[:, :, 0], rgb[:, :, 1], rgb[:, :, 2]
gray = 0.2989 * r + 0.5870 * g + 0.1140 * b
return gray
# model_images - list of file names of model images
# query_images - list of file names of query images
#
# dist_type - string which specifies distance type: 'chi2', 'l2', 'intersect'
# hist_type - string which specifies histogram type: 'grayvalue', 'dxdy', 'rgb', 'rg'
#
# note: use functions 'get_dist_by_name', 'get_hist_by_name' and 'is_grayvalue_hist' to obtain
# handles to distance and histogram functions, and to find out whether histogram function
# expects grayvalue or color image
def find_best_match(model_images, query_images, dist_type, hist_type, num_bins):
hist_isgray = histogram_module.is_grayvalue_hist(hist_type)
model_hists = compute_histograms(model_images, hist_type, hist_isgray, num_bins)
query_hists = compute_histograms(query_images, hist_type, hist_isgray, num_bins)
D = np.zeros((len(model_images), len(query_images)))
# compute distance for each couple of query - image
for j, query in enumerate(query_hists):
for i, model in enumerate(model_hists):
D[i, j] = dist_module.get_dist_by_name(model, query, dist_type)
best_match = [] # to save best matches
# for each query , find best model
for j in range(len(query_images)):
query_matches = D[:, j] # get query columns from matrix
argmin = np.argmin(query_matches) # get index with minimum distance
best_match.append(argmin) # save index for query
best_match = np.array(best_match) # array of best match for each query
return best_match, D
def compute_histograms(image_list, hist_type, hist_isgray, num_bins):
image_hist = []
# Compute hisgoram for each image and add it at the bottom of image_hist
# ... (your code here)
for img in image_list:
img_color = np.array(Image.open(img))
# if hist is gray type we use gray image
# othewise rgb image
img_to_process = rgb2gray(img_color) if hist_isgray else img_color.astype('double')
# We compute histogram for image
hist = histogram_module.get_hist_by_name(img=img_to_process,
num_bins_gray=num_bins,
hist_name=hist_type
)
image_hist.append(hist)
return image_hist
# For each image file from 'query_images' find and visualize the 5 nearest images from 'model_image'.
#
# Note: use the previously implemented function 'find_best_match'
# Note: use subplot command to show all the images in the same Python figure, one row per query image
def show_neighbors(model_images, query_images, dist_type, hist_type, num_bins):
plt.figure()
num_nearest = 5 # show the top-5 neighbors
# ... (your code here)
_, D = find_best_match(model_images=model_images,
query_images=query_images,
dist_type=dist_type,
hist_type=hist_type,
num_bins=num_bins
)
Q = len(query_images)
pos = 0
for j in range(Q):
query_matches = D[:, j]
best_args = np.argsort(query_matches)[:num_nearest]
query_img = query_images[j]
pos += 1
plt.subplot(Q, 6, pos);
plt.imshow(np.array(Image.open(query_img)), vmin=0, vmax=255);
plt.title(f'Q{j}')
for ind in range(len(best_args)):
pos += 1
model_ind = best_args[ind]
model_img = model_images[model_ind]
plt.subplot(Q, 6, pos);
plt.imshow(np.array(Image.open(model_img)), vmin=0, vmax=255);
plt.title(f'MO.{model_ind}')
plt.show()
| [((27, 18, 27, 63), 'histogram_module.is_grayvalue_hist', 'histogram_module.is_grayvalue_hist', ({(27, 53, 27, 62): 'hist_type'}, {}), '(hist_type)', False, 'import histogram_module\n'), ((47, 17, 47, 37), 'numpy.array', 'np.array', ({(47, 26, 47, 36): 'best_match'}, {}), '(best_match)', True, 'import numpy as np\n'), ((81, 4, 81, 16), 'matplotlib.pyplot.figure', 'plt.figure', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((114, 4, 114, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((44, 17, 44, 41), 'numpy.argmin', 'np.argmin', ({(44, 27, 44, 40): 'query_matches'}, {}), '(query_matches)', True, 'import numpy as np\n'), ((66, 15, 69, 50), 'histogram_module.get_hist_by_name', 'histogram_module.get_hist_by_name', (), '', False, 'import histogram_module\n'), ((103, 8, 103, 30), 'matplotlib.pyplot.subplot', 'plt.subplot', ({(103, 20, 103, 21): 'Q', (103, 23, 103, 24): '(6)', (103, 26, 103, 29): 'pos'}, {}), '(Q, 6, pos)', True, 'import matplotlib.pyplot as plt\n'), ((105, 8, 105, 26), 'matplotlib.pyplot.title', 'plt.title', ({(105, 18, 105, 25): 'f"""Q{j}"""'}, {}), "(f'Q{j}')", True, 'import matplotlib.pyplot as plt\n'), ((37, 22, 37, 75), 'dist_module.get_dist_by_name', 'dist_module.get_dist_by_name', ({(37, 51, 37, 56): 'model', (37, 58, 37, 63): 'query', (37, 65, 37, 74): 'dist_type'}, {}), '(model, query, dist_type)', False, 'import dist_module\n'), ((59, 29, 59, 44), 'PIL.Image.open', 'Image.open', ({(59, 40, 59, 43): 'img'}, {}), '(img)', False, 'from PIL import Image\n'), ((98, 20, 98, 45), 'numpy.argsort', 'np.argsort', ({(98, 31, 98, 44): 'query_matches'}, {}), '(query_matches)', True, 'import numpy as np\n'), ((110, 12, 110, 34), 'matplotlib.pyplot.subplot', 'plt.subplot', ({(110, 24, 110, 25): 'Q', (110, 27, 110, 28): '(6)', (110, 30, 110, 33): 'pos'}, {}), '(Q, 6, pos)', True, 'import matplotlib.pyplot as plt\n'), ((112, 12, 112, 40), 'matplotlib.pyplot.title', 'plt.title', ({(112, 22, 112, 39): 'f"""MO.{model_ind}"""'}, {}), "(f'MO.{model_ind}')", True, 'import matplotlib.pyplot as plt\n'), ((104, 28, 104, 49), 'PIL.Image.open', 'Image.open', ({(104, 39, 104, 48): 'query_img'}, {}), '(query_img)', False, 'from PIL import Image\n'), ((111, 32, 111, 53), 'PIL.Image.open', 'Image.open', ({(111, 43, 111, 52): 'model_img'}, {}), '(model_img)', False, 'from PIL import Image\n')] |
rojalator/dycco | dycco/__main__.py | 84ace8727aef84bb3d886cdaa3d3aef1089f1935 | import argparse
import logging
import sys
from .dycco import document
def main(paths, output_dir, use_ascii:bool, escape_html:bool, single_file:bool):
try:
document(paths, output_dir, use_ascii, escape_html, single_file)
except IOError as e:
logging.error('Unable to open file: %s', e)
return 1
except Exception as e:
logging.error('An error occurred: %s', e)
return 1
else:
return 0
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(prog='dycco', description='Literate-style documentation generator.')
arg_parser.add_argument('source_file', nargs='+', default=sys.stdin, help='Source files to document')
arg_parser.add_argument('-o', '--output-dir', default='docs', help='Output directory (will be created if necessary)')
arg_parser.add_argument('-a', '--asciidoc3', action='store_true', default=False, dest='use_ascii',
help='Process with asciidoc3 instead of markdown (you will have to install asciidoc3, of course)')
arg_parser.add_argument('-e', '--escape-html', action='store_true', default=False, dest='escape_html',
help='Run the documentation through html.escape() before markdown or asciidoc3')
arg_parser.add_argument('-f', '--single-file', action='store_true', default=False, dest='single_file',
help='Just produce a .md or .adoc file in single-column to be processed externally')
args = arg_parser.parse_args()
sys.exit(main(args.source_file, args.output_dir, args.use_ascii, args.escape_html, args.single_file))
| [((22, 17, 22, 109), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((12, 8, 12, 51), 'logging.error', 'logging.error', ({(12, 22, 12, 47): '"""Unable to open file: %s"""', (12, 49, 12, 50): 'e'}, {}), "('Unable to open file: %s', e)", False, 'import logging\n'), ((15, 8, 15, 49), 'logging.error', 'logging.error', ({(15, 22, 15, 45): '"""An error occurred: %s"""', (15, 47, 15, 48): 'e'}, {}), "('An error occurred: %s', e)", False, 'import logging\n')] |
rghwer/testdocs | jumpy/jumpy/ndarray.py | 8fafa40407411ed7a3f8216e691e42e0c7d32083 | ################################################################################
# Copyright (c) 2015-2018 Skymind, Inc.
#
# This program and the accompanying materials are made available under the
# terms of the Apache License, Version 2.0 which is available at
# https://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# SPDX-License-Identifier: Apache-2.0
################################################################################
from .java_classes import *
import numpy as np
import ctypes
import warnings
native_ops = NativeOpsHolder.getInstance().getDeviceNativeOps()
# DATA TYPE MANAGEMENT
DOUBLE = DataType.DOUBLE
FLOAT = DataType.FLOAT
HALF = DataType.HALF
LONG = DataType.LONG
INT = DataType.INT
SHORT = DataType.SHORT
UBYTE = DataType.UBYTE
BYTE = DataType.BYTE
BOOL = DataType.BOOL
UTF8 = DataType.UTF8
COMPRESSED = DataType.COMPRESSED
UNKNOWN = DataType.UNKNOWN
SUPPORTED_JAVA_DTYPES = [
DOUBLE,
FLOAT,
HALF,
LONG,
INT,
SHORT,
BOOL
#UTF8
]
SUPPORTED_PYTHON_DTYPES = [
np.float64,
np.float32,
np.float16,
np.int64,
np.int32,
np.int16,
np.bool_
#np.str_
]
_PY2J = {SUPPORTED_PYTHON_DTYPES[i] : SUPPORTED_JAVA_DTYPES[i] for i in range(len(SUPPORTED_JAVA_DTYPES))}
_J2PY = {SUPPORTED_JAVA_DTYPES[i] : SUPPORTED_PYTHON_DTYPES[i] for i in range(len(SUPPORTED_JAVA_DTYPES))}
def _dtype_py2j(dtype):
if isinstance(dtype, str):
dtype = np.dtype(dtype).type
elif isinstance(dtype, np.dtype):
dtype = dtype.type
jtype = _PY2J.get(dtype)
if jtype is None:
raise NotImplementedError("Unsupported type: " + dtype.name)
return jtype
def _dtype_j2py(dtype):
pytype = _J2PY.get(dtype)
if pytype is None:
raise NotImplementedError("Unsupported type: " + (str(dtype)))
return pytype
def set_context_dtype(dtype):
'''
Sets the dtype for nd4j
# Arguments
dtype: 'float' or 'double'
'''
dtype_map = {
'float32': 'float',
'float64': 'double'
}
dtype = dtype_map.get(dtype, dtype)
if dtype not in ['float', 'double']:
raise ValueError("Invalid dtype '{}'. Available dtypes are 'float' and 'double'.".format(dtype))
dtype_ = DataTypeUtil.getDtypeFromContext(dtype)
DataTypeUtil.setDTypeForContext(dtype_)
if get_context_dtype() != dtype:
warnings.warn("Can not set context dtype now. Set it at the beginning of your program.")
def get_context_dtype():
'''
Returns the nd4j dtype
'''
dtype = DataTypeUtil.getDtypeFromContext()
return DataTypeUtil.getDTypeForName(dtype)
_refs = []
def _from_numpy(np_array):
'''
Convert numpy array to nd4j array
'''
pointer_address, _ = np_array.__array_interface__['data']
_refs.append(np_array)
pointer = native_ops.pointerForAddress(pointer_address)
size = np_array.size
pointer.limit(size)
jdtype = _dtype_py2j(np_array.dtype)
'''
mapping = {
DOUBLE: DoublePointer,
FLOAT: FloatPointer,
HALF: HalfPointer,
LONG: LongPointer,
INT: IntPointer,
SHORT: ShortPointer,
BOOL: BoolPointer
}
pc = mapping[jdtype]
#pointer = pc(pointer)
'''
buff = Nd4j.createBuffer(pointer, size, jdtype)
assert buff.address() == pointer_address
_refs.append(buff)
elem_size = buff.getElementSize()
assert elem_size == np_array.dtype.itemsize
strides = np_array.strides
strides = [dim / elem_size for dim in strides]
shape = np_array.shape
nd4j_array = Nd4j.create(buff, shape, strides, 0)
assert buff.address() == nd4j_array.data().address()
return nd4j_array
def _to_numpy(nd4j_array):
'''
Convert nd4j array to numpy array
'''
buff = nd4j_array.data()
address = buff.pointer().address()
dtype = nd4j_array.dataType().toString()
mapping = {
'DOUBLE': ctypes.c_double,
'FLOAT': ctypes.c_float,
'HALF': ctypes.c_short,
'LONG': ctypes.c_long,
'INT': ctypes.c_int,
'SHORT': ctypes.c_short,
'BOOL': ctypes.c_bool
}
Pointer = ctypes.POINTER(mapping[dtype])
pointer = ctypes.cast(address, Pointer)
np_array = np.ctypeslib.as_array(pointer, tuple(nd4j_array.shape()))
return np_array
def _indarray(x):
typ = type(x)
if typ is INDArray:
return x
elif typ is ndarray:
return x.array
elif 'numpy' in str(typ):
return _from_numpy(x)
elif typ in (list, tuple):
return _from_numpy(np.array(x))
elif typ in (int, float):
return Nd4j.scalar(x)
else:
raise Exception('Data type not understood :' + str(typ))
def _nparray(x):
typ = type(x)
if typ is INDArray:
return ndarray(x).numpy()
elif typ is ndarray:
return x.numpy()
elif 'numpy' in str(typ):
return x
elif typ in (list, tuple):
return np.array(x)
elif typ in (int, float):
return np.array(x)
else:
raise Exception('Data type not understood :' + str(typ))
def broadcast_like(y, x):
xs = x.shape()
ys = y.shape()
if xs == ys:
return y
_xs = tuple(xs)
_ys = tuple(ys)
nx = len(xs)
ny = len(ys)
if nx > ny:
diff = nx - ny
ys = ([1] * diff) + ys
y = y.reshape(ys)
ny = nx
elif ny > nx:
raise Exception('Unable to broadcast shapes ' + str(_xs) + ''
' and ' + str(_ys))
yt = []
rep_y = False
for xd, yd in zip(xs, ys):
if xd == yd:
yt.append(1)
elif xd == 1:
raise Exception('Unable to broadcast shapes ' + str(_xs) + ''
' and ' + str(_ys))
elif yd == 1:
yt.append(xd)
rep_y = True
else:
raise Exception('Unable to broadcast shapes ' + str(_xs) + ''
' and ' + str(_ys))
if rep_y:
y = y.repmat(*yt)
return y
def broadcast(x, y):
xs = x.shape()
ys = y.shape()
if xs == ys:
return x, y
_xs = tuple(xs)
_ys = tuple(ys)
nx = len(xs)
ny = len(ys)
if nx > ny:
diff = nx - ny
ys = ([1] * diff) + ys
y = y.reshape(*ys)
ny = nx
elif ny > nx:
diff = ny - nx
xs = ([1] * diff) + xs
x = x.reshape(*xs)
nx = ny
xt = []
yt = []
rep_x = False
rep_y = False
for xd, yd in zip(xs, ys):
if xd == yd:
xt.append(1)
yt.append(1)
elif xd == 1:
xt.append(yd)
yt.append(1)
rep_x = True
elif yd == 1:
xt.append(1)
yt.append(xd)
rep_y = True
else:
raise Exception('Unable to broadcast shapes ' + str(_xs) + ''
' and ' + str(_ys))
if rep_x:
x = Nd4j.tile(x, *xt)
if rep_y:
try:
y = Nd4j.tile(y, *yt)
except:
y = Nd4j.tile(y, *yt)
return x, y
class ndarray(object):
def __init__(self, data, dtype=None):
# we ignore dtype for now
typ = type(data)
if 'nd4j' in typ.__name__:
# Note that we don't make a copy here
self.array = data
elif typ is ndarray:
self.array = data.array.dup()
else:
if typ is not np.ndarray:
data = np.array(data)
self.array = _from_numpy(data)
def numpy(self):
try:
return self.np_array
except AttributeError:
self.np_array = _to_numpy(self.array)
return self.np_array
@property
def size(self):
return self.array.length()
@property
def shape(self):
return tuple(self.array.shape())
@shape.setter
def shape(self, value):
arr = self.reshape(value)
self.array = arr.array
@property
def ndim(self):
return len(self.array.shape())
def __getitem__(self, key):
return ndarray(self.numpy()[key])
if type(key) is int:
return ndarray(self.array.get(NDArrayIndex.point(key)))
if type(key) is slice:
start = key.start
stop = key.stop
step = key.step
if start is None:
start = 0
if stop is None:
shape = self.array.shape()
if shape[0] == 1:
stop = shape[1]
else:
stop = shape[0]
if stop - start <= 0:
return None
if step is None or step == 1:
return ndarray(self.array.get(NDArrayIndex.interval(start, stop)))
else:
return ndarray(self.array.get(NDArrayIndex.interval(start, step, stop)))
if type(key) is list:
raise NotImplementedError(
'Sorry, this type of indexing is not supported yet.')
if type(key) is tuple:
key = list(key)
shape = self.array.shape()
ndim = len(shape)
nk = len(key)
key += [slice(None)] * (ndim - nk)
args = []
for i, dim in enumerate(key):
if type(dim) is int:
args.append(NDArrayIndex.point(dim))
elif type(dim) is slice:
if dim == slice(None):
args.append(NDArrayIndex.all())
else:
start = dim.start
stop = dim.stop
step = dim.step
if start is None:
start = 0
if stop is None:
stop = shape[i]
if stop - start <= 0:
return None
if step is None or step == 1:
args.append(NDArrayIndex.interval(start, stop))
else:
args.append(NDArrayIndex.interval(
start, step, stop))
elif type(dim) in (list, tuple):
raise NotImplementedError(
'Sorry, this type of indexing is not supported yet.')
return ndarray(self.array.get(*args))
def __setitem__(self, key, other):
self.numpy()[key] = _nparray(other)
return
other = _indarray(other)
view = self[key]
if view is None:
return
view = view.array
other = broadcast_like(other, view)
view.assign(other)
def __add__(self, other):
return ndarray(self.numpy() + _nparray(other))
other = _indarray(other)
x, y = broadcast(self.array, other)
return ndarray(x.add(y))
def __sub__(self, other):
return ndarray(self.numpy() - _nparray(other))
other = _indarray(other)
x, y = broadcast(self.array, other)
return ndarray(x.sub(y))
def __mul__(self, other):
return ndarray(self.numpy() * _nparray(other))
other = _indarray(other)
x, y = broadcast(self.array, other)
return ndarray(x.mul(y))
def __div__(self, other):
return ndarray(self.numpy() / _nparray(other))
other = _indarray(other)
x, y = broadcast(self.array, other)
return ndarray(x.div(y))
def __pow__(self, other):
return ndarray(self.numpy() ** _nparray(other))
other = _indarray(other)
x, y = broadcast(self.array, other)
return ndarray(Transforms.pow(x, y))
def __iadd__(self, other):
self.numpy().__iadd__(_nparray(other))
return self
other = _indarray(other)
if self.array.shape() == other.shape():
self.array = self.array.addi(other)
else:
x, y = broadcast(self.array, other)
self.array = x.add(y)
return self
def __isub__(self, other):
self.numpy().__isub__(_nparray(other))
return self
other = _indarray(other)
if self.array.shape() == other.shape():
self.array = self.array.subi(other)
else:
x, y = broadcast(self.array, other)
self.array = x.sub(y)
return self
def __imul__(self, other):
self.numpy().__imul__(_nparray(other))
return self
other = _indarray(other)
if self.array.shape() == other.shape():
self.array = self.array.muli(other)
else:
x, y = broadcast(self.array, other)
self.array = x.mul(y)
return self
def __idiv__(self, other):
self.numpy().__idiv__(_nparray(other))
return self
other = _indarray(other)
if self.array.shape() == other.shape():
self.array = self.array.divi(other)
else:
x, y = broadcast(self.array, other)
self.array = x.div(y)
return self
def __ipow__(self, other):
self.numpy().__ipow__(_nparray(other))
return self
other = _indarray(other)
if self.array.shape() == other.shape():
self.array = self.array.divi(other)
else:
x, y = broadcast(self.array, other)
self.array = Transforms.pow(x, y)
return self
def __getattr__(self, attr):
import ops
f = getattr(ops, attr)
setattr(ndarray, attr, f)
return getattr(self, attr)
def __int__(self):
if self.array.length() == 1:
return self.array.getInt(0)
raise Exception('Applicable only for scalars')
def __float__(self):
if self.array.length() == 1:
return self.array.getDouble(0)
raise Exception('Applicable only for scalars')
@property
def T(self):
return self.transpose()
def array(*args, **kwargs):
return ndarray(*args, **kwargs)
| [((175, 14, 175, 44), 'ctypes.POINTER', 'ctypes.POINTER', ({(175, 29, 175, 43): 'mapping[dtype]'}, {}), '(mapping[dtype])', False, 'import ctypes\n'), ((176, 14, 176, 43), 'ctypes.cast', 'ctypes.cast', ({(176, 26, 176, 33): 'address', (176, 35, 176, 42): 'Pointer'}, {}), '(address, Pointer)', False, 'import ctypes\n'), ((110, 8, 110, 96), 'warnings.warn', 'warnings.warn', ({(110, 22, 110, 95): '"""Can not set context dtype now. Set it at the beginning of your program."""'}, {}), "(\n 'Can not set context dtype now. Set it at the beginning of your program.')", False, 'import warnings\n'), ((78, 16, 78, 31), 'numpy.dtype', 'np.dtype', ({(78, 25, 78, 30): 'dtype'}, {}), '(dtype)', True, 'import numpy as np\n'), ((206, 15, 206, 26), 'numpy.array', 'np.array', ({(206, 24, 206, 25): 'x'}, {}), '(x)', True, 'import numpy as np\n'), ((309, 23, 309, 37), 'numpy.array', 'np.array', ({(309, 32, 309, 36): 'data'}, {}), '(data)', True, 'import numpy as np\n'), ((190, 27, 190, 38), 'numpy.array', 'np.array', ({(190, 36, 190, 37): 'x'}, {}), '(x)', True, 'import numpy as np\n'), ((208, 15, 208, 26), 'numpy.array', 'np.array', ({(208, 24, 208, 25): 'x'}, {}), '(x)', True, 'import numpy as np\n')] |
hanayik/StimSync | Python/usec_mode.py | f08ec01a36c47b00bfe4937b5a6eb2a60af0713d | import serial
ser = serial.Serial('/dev/tty.usbmodem7071', 115200, timeout=10)
ser.write("\xb1\xa3\xb5\xb5") #set usec mode 177,163,181,181
ser.flush()
ser.flushInput()
obs = ser.read(8)
if len(obs) != 8:
print('Error: no buttons presses detected')
print 'Observed data (as hex): '+ obs.encode('hex')
obsBin = [ord(c) for c in obs]
usec = (obsBin[3] << 24)+ (obsBin[4] << 16)+ (obsBin[5] << 8)+obsBin[6]
keys = (obsBin[1] << 8)+obsBin[2]
print 'keys pressed %d at %d usec' % (keys, usec)
ser.write("\xb1\xa3\xa9\xa9") #turn off oscilloscope: set keyboard mode 177,163,169,169
ser.close() | [] |
hahaxun/audio | torchaudio/datasets/libritts.py | 87a1886ecfa83b398a2a9a09d9a94bd9dabc5cf5 | import os
from typing import Tuple
import torchaudio
from torch import Tensor
from torch.utils.data import Dataset
from torchaudio.datasets.utils import (
download_url,
extract_archive,
walk_files,
)
URL = "train-clean-100"
FOLDER_IN_ARCHIVE = "LibriTTS"
_CHECKSUMS = {
"http://www.openslr.org/60/dev-clean.tar.gz": "0c3076c1e5245bb3f0af7d82087ee207",
"http://www.openslr.org/60/dev-other.tar.gz": "815555d8d75995782ac3ccd7f047213d",
"http://www.openslr.org/60/test-clean.tar.gz": "7bed3bdb047c4c197f1ad3bc412db59f",
"http://www.openslr.org/60/test-other.tar.gz": "ae3258249472a13b5abef2a816f733e4",
"http://www.openslr.org/60/train-clean-100.tar.gz": "4a8c202b78fe1bc0c47916a98f3a2ea8",
"http://www.openslr.org/60/train-clean-360.tar.gz": "a84ef10ddade5fd25df69596a2767b2d",
"http://www.openslr.org/60/train-other-500.tar.gz": "7b181dd5ace343a5f38427999684aa6f",
}
def load_libritts_item(
fileid: str,
path: str,
ext_audio: str,
ext_original_txt: str,
ext_normalized_txt: str,
) -> Tuple[Tensor, int, str, str, int, int, str]:
speaker_id, chapter_id, segment_id, utterance_id = fileid.split("_")
utterance_id = fileid
normalized_text = utterance_id + ext_normalized_txt
normalized_text = os.path.join(path, speaker_id, chapter_id, normalized_text)
original_text = utterance_id + ext_original_txt
original_text = os.path.join(path, speaker_id, chapter_id, original_text)
file_audio = utterance_id + ext_audio
file_audio = os.path.join(path, speaker_id, chapter_id, file_audio)
# Load audio
waveform, sample_rate = torchaudio.load(file_audio)
# Load original text
with open(original_text) as ft:
original_text = ft.readline()
# Load normalized text
with open(normalized_text, "r") as ft:
normalized_text = ft.readline()
return (
waveform,
sample_rate,
original_text,
normalized_text,
int(speaker_id),
int(chapter_id),
utterance_id,
)
class LIBRITTS(Dataset):
"""Create a Dataset for LibriTTS.
Args:
root (str): Path to the directory where the dataset is found or downloaded.
url (str, optional): The URL to download the dataset from,
or the type of the dataset to dowload.
Allowed type values are ``"dev-clean"``, ``"dev-other"``, ``"test-clean"``,
``"test-other"``, ``"train-clean-100"``, ``"train-clean-360"`` and
``"train-other-500"``. (default: ``"train-clean-100"``)
folder_in_archive (str, optional):
The top-level directory of the dataset. (default: ``"LibriTTS"``)
download (bool, optional):
Whether to download the dataset if it is not found at root path. (default: ``False``).
"""
_ext_original_txt = ".original.txt"
_ext_normalized_txt = ".normalized.txt"
_ext_audio = ".wav"
def __init__(
self,
root: str,
url: str = URL,
folder_in_archive: str = FOLDER_IN_ARCHIVE,
download: bool = False,
) -> None:
if url in [
"dev-clean",
"dev-other",
"test-clean",
"test-other",
"train-clean-100",
"train-clean-360",
"train-other-500",
]:
ext_archive = ".tar.gz"
base_url = "http://www.openslr.org/resources/60/"
url = os.path.join(base_url, url + ext_archive)
basename = os.path.basename(url)
archive = os.path.join(root, basename)
basename = basename.split(".")[0]
folder_in_archive = os.path.join(folder_in_archive, basename)
self._path = os.path.join(root, folder_in_archive)
if download:
if not os.path.isdir(self._path):
if not os.path.isfile(archive):
checksum = _CHECKSUMS.get(url, None)
download_url(url, root, hash_value=checksum)
extract_archive(archive)
walker = walk_files(
self._path, suffix=self._ext_audio, prefix=False, remove_suffix=True
)
self._walker = list(walker)
def __getitem__(self, n: int) -> Tuple[Tensor, int, str, str, int, int, str]:
"""Load the n-th sample from the dataset.
Args:
n (int): The index of the sample to be loaded
Returns:
tuple: ``(waveform, sample_rate, original_text, normalized_text, speaker_id,
chapter_id, utterance_id)``
"""
fileid = self._walker[n]
return load_libritts_item(
fileid,
self._path,
self._ext_audio,
self._ext_original_txt,
self._ext_normalized_txt,
)
def __len__(self) -> int:
return len(self._walker)
| [((37, 22, 37, 81), 'os.path.join', 'os.path.join', ({(37, 35, 37, 39): 'path', (37, 41, 37, 51): 'speaker_id', (37, 53, 37, 63): 'chapter_id', (37, 65, 37, 80): 'normalized_text'}, {}), '(path, speaker_id, chapter_id, normalized_text)', False, 'import os\n'), ((40, 20, 40, 77), 'os.path.join', 'os.path.join', ({(40, 33, 40, 37): 'path', (40, 39, 40, 49): 'speaker_id', (40, 51, 40, 61): 'chapter_id', (40, 63, 40, 76): 'original_text'}, {}), '(path, speaker_id, chapter_id, original_text)', False, 'import os\n'), ((43, 17, 43, 71), 'os.path.join', 'os.path.join', ({(43, 30, 43, 34): 'path', (43, 36, 43, 46): 'speaker_id', (43, 48, 43, 58): 'chapter_id', (43, 60, 43, 70): 'file_audio'}, {}), '(path, speaker_id, chapter_id, file_audio)', False, 'import os\n'), ((46, 28, 46, 55), 'torchaudio.load', 'torchaudio.load', ({(46, 44, 46, 54): 'file_audio'}, {}), '(file_audio)', False, 'import torchaudio\n'), ((110, 19, 110, 40), 'os.path.basename', 'os.path.basename', ({(110, 36, 110, 39): 'url'}, {}), '(url)', False, 'import os\n'), ((111, 18, 111, 46), 'os.path.join', 'os.path.join', ({(111, 31, 111, 35): 'root', (111, 37, 111, 45): 'basename'}, {}), '(root, basename)', False, 'import os\n'), ((114, 28, 114, 69), 'os.path.join', 'os.path.join', ({(114, 41, 114, 58): 'folder_in_archive', (114, 60, 114, 68): 'basename'}, {}), '(folder_in_archive, basename)', False, 'import os\n'), ((116, 21, 116, 58), 'os.path.join', 'os.path.join', ({(116, 34, 116, 38): 'root', (116, 40, 116, 57): 'folder_in_archive'}, {}), '(root, folder_in_archive)', False, 'import os\n'), ((125, 17, 127, 9), 'torchaudio.datasets.utils.walk_files', 'walk_files', (), '', False, 'from torchaudio.datasets.utils import download_url, extract_archive, walk_files\n'), ((108, 18, 108, 59), 'os.path.join', 'os.path.join', ({(108, 31, 108, 39): 'base_url', (108, 41, 108, 58): 'url + ext_archive'}, {}), '(base_url, url + ext_archive)', False, 'import os\n'), ((119, 19, 119, 44), 'os.path.isdir', 'os.path.isdir', ({(119, 33, 119, 43): 'self._path'}, {}), '(self._path)', False, 'import os\n'), ((123, 16, 123, 40), 'torchaudio.datasets.utils.extract_archive', 'extract_archive', ({(123, 32, 123, 39): 'archive'}, {}), '(archive)', False, 'from torchaudio.datasets.utils import download_url, extract_archive, walk_files\n'), ((120, 23, 120, 46), 'os.path.isfile', 'os.path.isfile', ({(120, 38, 120, 45): 'archive'}, {}), '(archive)', False, 'import os\n'), ((122, 20, 122, 64), 'torchaudio.datasets.utils.download_url', 'download_url', (), '', False, 'from torchaudio.datasets.utils import download_url, extract_archive, walk_files\n')] |
silence0201/Learn-Python | Others/Source/19/19.2/barh_test.py | 662da7c0e74221cedb445ba17d5cb1cd3af41c86 | # coding: utf-8
#########################################################################
# 网站: <a href="http://www.crazyit.org">疯狂Java联盟</a> #
# author yeeku.H.lee [email protected] #
# #
# version 1.0 #
# #
# Copyright (C), 2001-2018, yeeku.H.Lee #
# #
# This program is protected by copyright laws. #
# #
# Program Name: #
# #
# <br>Date: #
#########################################################################
import matplotlib.pyplot as plt
import numpy as np
# 构建数据
x_data = ['2011', '2012', '2013', '2014', '2015', '2016', '2017']
y_data = [58000, 60200, 63000, 71000, 84000, 90500, 107000]
y_data2 = [52000, 54200, 51500,58300, 56800, 59500, 62700]
bar_width=0.3
# Y轴数据使用range(len(x_data), 就是0、1、2...
plt.barh(y=range(len(x_data)), width=y_data, label='疯狂Java讲义',
color='steelblue', alpha=0.8, height=bar_width)
# Y轴数据使用np.arange(len(x_data))+bar_width,
# 就是bar_width、1+bar_width、2+bar_width...这样就和第一个柱状图并列了
plt.barh(y=np.arange(len(x_data))+bar_width, width=y_data2,
label='疯狂Android讲义', color='indianred', alpha=0.8, height=bar_width)
# 在柱状图上显示具体数值, ha参数控制水平对齐方式, va控制垂直对齐方式
for y, x in enumerate(y_data):
plt.text(x+5000, y-bar_width/2, '%s' % x, ha='center', va='bottom')
for y, x in enumerate(y_data2):
plt.text(x+5000, y+bar_width/2, '%s' % x, ha='center', va='bottom')
# 为Y轴设置刻度值
plt.yticks(np.arange(len(x_data))+bar_width/2, x_data)
# 设置标题
plt.title("Java与Android图书对比")
# 为两条坐标轴设置名称
plt.xlabel("销量")
plt.ylabel("年份")
# 显示图例
plt.legend()
plt.show()
| [((40, 0, 40, 39), 'matplotlib.pyplot.title', 'plt.title', ({(40, 10, 40, 38): '"""Java与Android图书对比"""'}, {}), "('Java与Android图书对比')", True, 'import matplotlib.pyplot as plt\n'), ((42, 0, 42, 20), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(42, 11, 42, 19): '"""销量"""'}, {}), "('销量')", True, 'import matplotlib.pyplot as plt\n'), ((43, 0, 43, 20), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(43, 11, 43, 19): '"""年份"""'}, {}), "('年份')", True, 'import matplotlib.pyplot as plt\n'), ((45, 0, 45, 12), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((46, 0, 46, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((34, 4, 34, 71), 'matplotlib.pyplot.text', 'plt.text', (), '', True, 'import matplotlib.pyplot as plt\n'), ((36, 4, 36, 71), 'matplotlib.pyplot.text', 'plt.text', (), '', True, 'import matplotlib.pyplot as plt\n')] |
kendny/study_docker | 03_docker_compose/03_c_simple_case_with_mongodb_orm/app/app.py | edb376fb69319a78e05f60faa5dcc88d527602c4 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 18 20:13:57 2018
@author: allen
"""
import random, os, json, datetime, time
from flask import Flask, Response
from pymongo import MongoClient
from bson import json_util
app = Flask(__name__)
MONGO_URI = "mongodb://mongodb:27017" # "mongodb:<container_name>:27017"
mongdb_client= MongoClient(MONGO_URI)
random_numbers = mongdb_client.demo.random_numbers
time.sleep(5) # hack for the mongoDb database to get running
######################
##
##########################
from pymodm.connection import connect
from pymongo.write_concern import WriteConcern
from pymodm import MongoModel, fields
# Connect to MongoDB and call the connection "my-app".
connect("mongodb://mongodb:27017/myDatabase", alias="my-app")
class User(MongoModel):
email = fields.EmailField(primary_key=True)
first_name = fields.CharField()
last_name = fields.CharField()
class Meta:
write_concern = WriteConcern(j=True)
connection_alias = 'my-app'
@app.route("/")
def hello():
html = "<h3> Hello world...</h3>"
#User('[email protected]', name, 'Ross').save()
return html
@app.route("/add_user/<name>")
def add_user(name):
#User('[email protected]', name, 'Ross').save()
html = "<h3> Hello </h3>"
User('[email protected]', name, 'Ross').save()
return "name {} save to database".format(name)
@app.route("/random/<int:lower>/<int:upper>")
def random_generator(lower, upper):
number = str(random.randint(lower, upper))
random_numbers.update(
{"_id" : "lasts"},
{"$push" : {
"items" : {
"$each": [{"value" : number, "date": datetime.datetime.utcnow()}],
"$sort" : {"date" : -1},
"$slice" : 5
}
}},
upsert=True
)
return Response(number, status=200, mimetype='application/json')
@app.route("/random-list")
def last_number_list():
last_numbers = list(random_numbers.find({"_id" : "lasts"}))
extracted = [d['value'] for d in last_numbers[0]['items']]
return Response(json.dumps(extracted, default=json_util.default), status=200, mimetype='application/json')
if __name__ == "__main__":
port = int(os.environ.get('PORT', 5000))
app.config['DEBUG'] = True
app.run(host='0.0.0.0', port=port)
| [((15, 6, 15, 21), 'flask.Flask', 'Flask', ({(15, 12, 15, 20): '__name__'}, {}), '(__name__)', False, 'from flask import Flask, Response\n'), ((18, 15, 18, 37), 'pymongo.MongoClient', 'MongoClient', ({(18, 27, 18, 36): 'MONGO_URI'}, {}), '(MONGO_URI)', False, 'from pymongo import MongoClient\n'), ((21, 0, 21, 13), 'time.sleep', 'time.sleep', ({(21, 11, 21, 12): '(5)'}, {}), '(5)', False, 'import random, os, json, datetime, time\n'), ((33, 0, 33, 61), 'pymodm.connection.connect', 'connect', (), '', False, 'from pymodm.connection import connect\n'), ((35, 12, 35, 47), 'pymodm.fields.EmailField', 'fields.EmailField', (), '', False, 'from pymodm import MongoModel, fields\n'), ((36, 17, 36, 35), 'pymodm.fields.CharField', 'fields.CharField', ({}, {}), '()', False, 'from pymodm import MongoModel, fields\n'), ((37, 16, 37, 34), 'pymodm.fields.CharField', 'fields.CharField', ({}, {}), '()', False, 'from pymodm import MongoModel, fields\n'), ((73, 11, 73, 68), 'flask.Response', 'Response', (), '', False, 'from flask import Flask, Response\n'), ((40, 24, 40, 44), 'pymongo.write_concern.WriteConcern', 'WriteConcern', (), '', False, 'from pymongo.write_concern import WriteConcern\n'), ((60, 17, 60, 45), 'random.randint', 'random.randint', ({(60, 32, 60, 37): 'lower', (60, 39, 60, 44): 'upper'}, {}), '(lower, upper)', False, 'import random, os, json, datetime, time\n'), ((81, 20, 81, 68), 'json.dumps', 'json.dumps', (), '', False, 'import random, os, json, datetime, time\n'), ((85, 15, 85, 43), 'os.environ.get', 'os.environ.get', ({(85, 30, 85, 36): '"""PORT"""', (85, 38, 85, 42): '5000'}, {}), "('PORT', 5000)", False, 'import random, os, json, datetime, time\n'), ((65, 53, 65, 79), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ({}, {}), '()', False, 'import random, os, json, datetime, time\n')] |
HPI-DeepLearning/wort2vek | goethe/eval/analogy_space.py | bc91c2752a8516665d270c7a7a793ec484c970c4 | #! /usr/bin/Python
from gensim.models.keyedvectors import KeyedVectors
from scipy import spatial
from numpy import linalg
import argparse
import sys
vector_file = sys.argv[1]
if len(sys.argv) != 6:
print('arguments wrong!')
print(len(sys.argv))
exit()
else:
words = [sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5]]
print(words)
wvs = KeyedVectors.load_word2vec_format(vector_file, binary=True)
print('WVs loaded.')
for w in words:
if w not in wvs.vocab:
print('out of vocab!')
exit()
#print(wvs.most_similar(positive=[words[1], words[2]], negative=[words[0]], topn=3))
w1 = wvs[words[0]]
w2 = wvs[words[1]]
w3 = wvs[words[2]]
w4 = wvs[words[3]]
m1 = w1 / linalg.norm(w1)
m2 = w2 / linalg.norm(w2)
m3 = w3 / linalg.norm(w3)
m4 = w4 / linalg.norm(w4)
diff1 = w1 - w2
diff2 = w3 - w4
miff1 = m1 - m2
miff2 = m3 - m4
print('-------Word Space---------')
print('to word-4: ', 1-spatial.distance.cosine(m2+m3-m1, m4))
print('to word-3: ', 1-spatial.distance.cosine(m1+m4-m2, m3))
print('to word-2: ', 1-spatial.distance.cosine(m4+m1-m3, m2))
print('to word-1: ', 1-spatial.distance.cosine(m2+m3-m4, m1))
print('------Analogy Space-------')
print(' cosine: ', 1-spatial.distance.cosine(diff1, diff2))
print(' Euclidean: ', 1-linalg.norm(diff1-diff2)/(linalg.norm(diff1)+linalg.norm(diff2)))
print(' M-cosine: ', 1-spatial.distance.cosine(miff1, miff2))
print('M-Euclidean: ', 1-linalg.norm(miff1-miff2)/(linalg.norm(miff1)+linalg.norm(miff2)))
| [((19, 6, 19, 65), 'gensim.models.keyedvectors.KeyedVectors.load_word2vec_format', 'KeyedVectors.load_word2vec_format', (), '', False, 'from gensim.models.keyedvectors import KeyedVectors\n'), ((33, 10, 33, 25), 'numpy.linalg.norm', 'linalg.norm', ({(33, 22, 33, 24): 'w1'}, {}), '(w1)', False, 'from numpy import linalg\n'), ((34, 10, 34, 25), 'numpy.linalg.norm', 'linalg.norm', ({(34, 22, 34, 24): 'w2'}, {}), '(w2)', False, 'from numpy import linalg\n'), ((35, 10, 35, 25), 'numpy.linalg.norm', 'linalg.norm', ({(35, 22, 35, 24): 'w3'}, {}), '(w3)', False, 'from numpy import linalg\n'), ((36, 10, 36, 25), 'numpy.linalg.norm', 'linalg.norm', ({(36, 22, 36, 24): 'w4'}, {}), '(w4)', False, 'from numpy import linalg\n'), ((44, 23, 44, 60), 'scipy.spatial.distance.cosine', 'spatial.distance.cosine', ({(44, 47, 44, 55): '(m2 + m3 - m1)', (44, 57, 44, 59): 'm4'}, {}), '(m2 + m3 - m1, m4)', False, 'from scipy import spatial\n'), ((45, 23, 45, 60), 'scipy.spatial.distance.cosine', 'spatial.distance.cosine', ({(45, 47, 45, 55): '(m1 + m4 - m2)', (45, 57, 45, 59): 'm3'}, {}), '(m1 + m4 - m2, m3)', False, 'from scipy import spatial\n'), ((46, 23, 46, 60), 'scipy.spatial.distance.cosine', 'spatial.distance.cosine', ({(46, 47, 46, 55): '(m4 + m1 - m3)', (46, 57, 46, 59): 'm2'}, {}), '(m4 + m1 - m3, m2)', False, 'from scipy import spatial\n'), ((47, 23, 47, 60), 'scipy.spatial.distance.cosine', 'spatial.distance.cosine', ({(47, 47, 47, 55): '(m2 + m3 - m4)', (47, 57, 47, 59): 'm1'}, {}), '(m2 + m3 - m4, m1)', False, 'from scipy import spatial\n'), ((49, 25, 49, 62), 'scipy.spatial.distance.cosine', 'spatial.distance.cosine', ({(49, 49, 49, 54): 'diff1', (49, 56, 49, 61): 'diff2'}, {}), '(diff1, diff2)', False, 'from scipy import spatial\n'), ((51, 25, 51, 62), 'scipy.spatial.distance.cosine', 'spatial.distance.cosine', ({(51, 49, 51, 54): 'miff1', (51, 56, 51, 61): 'miff2'}, {}), '(miff1, miff2)', False, 'from scipy import spatial\n'), ((50, 25, 50, 49), 'numpy.linalg.norm', 'linalg.norm', ({(50, 37, 50, 48): '(diff1 - diff2)'}, {}), '(diff1 - diff2)', False, 'from numpy import linalg\n'), ((52, 25, 52, 49), 'numpy.linalg.norm', 'linalg.norm', ({(52, 37, 52, 48): '(miff1 - miff2)'}, {}), '(miff1 - miff2)', False, 'from numpy import linalg\n'), ((50, 51, 50, 69), 'numpy.linalg.norm', 'linalg.norm', ({(50, 63, 50, 68): 'diff1'}, {}), '(diff1)', False, 'from numpy import linalg\n'), ((50, 70, 50, 88), 'numpy.linalg.norm', 'linalg.norm', ({(50, 82, 50, 87): 'diff2'}, {}), '(diff2)', False, 'from numpy import linalg\n'), ((52, 51, 52, 69), 'numpy.linalg.norm', 'linalg.norm', ({(52, 63, 52, 68): 'miff1'}, {}), '(miff1)', False, 'from numpy import linalg\n'), ((52, 70, 52, 88), 'numpy.linalg.norm', 'linalg.norm', ({(52, 82, 52, 87): 'miff2'}, {}), '(miff2)', False, 'from numpy import linalg\n')] |
vishalbelsare/LocalGraphClustering | localgraphclustering/algorithms/eig2_nL.py | a6325350997932d548a876deb259c2387fc2c809 | import numpy as np
import scipy as sp
import scipy.sparse.linalg as splinalg
def eig2_nL(g, tol_eigs = 1.0e-6, normalize:bool = True, dim:int=1):
"""
DESCRIPTION
-----------
Computes the eigenvector that corresponds to the second smallest eigenvalue
of the normalized Laplacian matrix then it uses sweep cut to round the solution.
PARAMETERS (mandatory)
----------------------
g: graph object
PARAMETERS (optional)
---------------------
dim: positive, int
default == 1
The number of eigenvectors or dimensions to compute.
tol_eigs: positive float, double
default == 1.0e-6
Tolerance for computation of the eigenvector that corresponds to
the second smallest eigenvalue of the normalized Laplacian matrix.
normalize: bool,
default == True
True if we should return the eigenvectors of the generalized
eigenvalue problem associated with the normalized Laplacian.
This should be on unless you know what you are doing.
RETURNS
------
p: Eigenvector or Eigenvector matrixthat
corresponds to the second smallest eigenvalue of the
normalized Laplacian matrix and larger eigenvectors if dim >= 0.
"""
n = g.adjacency_matrix.shape[0]
D_sqrt_neg = sp.sparse.spdiags(g.dn_sqrt.transpose(), 0, n, n)
L = sp.sparse.identity(n) - D_sqrt_neg.dot((g.adjacency_matrix.dot(D_sqrt_neg)))
emb_eig_val, p = splinalg.eigsh(L, which='SM', k=1+dim, tol = tol_eigs)
F = np.real(p[:,1:])
if normalize:
F *= g.dn_sqrt[:,np.newaxis]
return F, emb_eig_val
"""
Random walks and local cuts in graphs, Chung, LAA 2007
We just form the sub-matrix of the Laplacian and use the eigenvector there.
"""
def eig2nL_subgraph(g, ref_nodes, tol_eigs = 1.0e-6, normalize: bool = True):
A_sub = g.adjacency_matrix.tocsr()[ref_nodes, :].tocsc()[:, ref_nodes]
nref = len(ref_nodes)
D_sqrt_neg = sp.sparse.spdiags(g.dn_sqrt[ref_nodes].transpose(), 0, nref, nref)
L_sub = sp.sparse.identity(nref) - D_sqrt_neg.dot((A_sub.dot(D_sqrt_neg)))
emb_eig_val, emb_eig = splinalg.eigsh(L_sub, which='SM', k=1, tol=tol_eigs)
emb_eig *= -1 if max(emb_eig) < 0 else 1
f = emb_eig[:,0]
if normalize:
f *= g.dn_sqrt[ref_nodes]
return ((ref_nodes,f), emb_eig_val)
| [((51, 21, 51, 75), 'scipy.sparse.linalg.eigsh', 'splinalg.eigsh', (), '', True, 'import scipy.sparse.linalg as splinalg\n'), ((53, 8, 53, 24), 'numpy.real', 'np.real', ({(53, 16, 53, 23): 'p[:, 1:]'}, {}), '(p[:, 1:])', True, 'import numpy as np\n'), ((69, 27, 69, 79), 'scipy.sparse.linalg.eigsh', 'splinalg.eigsh', (), '', True, 'import scipy.sparse.linalg as splinalg\n'), ((49, 8, 49, 29), 'scipy.sparse.identity', 'sp.sparse.identity', ({(49, 27, 49, 28): 'n'}, {}), '(n)', True, 'import scipy as sp\n'), ((68, 12, 68, 36), 'scipy.sparse.identity', 'sp.sparse.identity', ({(68, 31, 68, 35): 'nref'}, {}), '(nref)', True, 'import scipy as sp\n')] |
isabella232/nanos-nonsecure-firmware | build/common/hex2carray.py | d1ce2e0e01a8ed6d8840a24308e16f6560a626aa | """
*******************************************************************************
* Ledger Blue
* (c) 2016 Ledger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
********************************************************************************
"""
from ledgerblue.hexParser import IntelHexParser
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--hex", help="Hex file to be converted as a C array")
args = parser.parse_args()
if args.hex == None:
raise Exception("Missing hex filename to sign")
parser = IntelHexParser(args.hex)
def hexU8(value):
return hex(0x100|(value & 0xFF))[3:]
for a in parser.getAreas():
if (len(a.data) > 0x10000):
raise BaseException("data must be splitted in chunks of 64k")
print "0x" + hexU8(a.start >> 24) + ", 0x" + hexU8(a.start >> 16) + ", 0x" + hexU8(a.start >> 8) + ", 0x" + hexU8(a.start) + ", "
print "0x" + hexU8(len(a.data) >> 24) + ", 0x" + hexU8(len(a.data) >> 16) + ", 0x" + hexU8(len(a.data) >> 8) + ", 0x" + hexU8(len(a.data)) + ", "
# low @ to high @
offset = 0
while offset < len(a.data):
string = ""
for i in range(8):
if offset+i < len(a.data):
string += " 0x" + hexU8(a.data[offset+i]) + ","
print string
offset+=8
| [] |
xames3/vdoxa | setup.py | 8fa945449bb34447ded0c421214c0252ff523d4a | # Copyright 2020 XAMES3. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ======================================================================
"""
vdoXA is an open-source python package for trimming the videos.
It is built as a subsystem for < XXXXX Not to be named XXXXX > project.
Originally inspired by my colleague's work, I thought of improving the
concept and build a tool to simplify the process. I hope it comes with
strong support for continuous updates, reliable functions and overall
ease of use.
Read complete documentation at: <https://github.com/xames3/vdoxa>.
"""
from setuptools import find_packages, setup
from vdoxa.vars import dev
doclines = __doc__.split('\n')
def use_readme() -> str:
"""Use `README.md` for parsing long description."""
with open('README.md') as file:
return file.read()
with open('requirements.txt', 'r') as requirements:
required_packages = [package.rstrip() for package in requirements]
setup(
name=dev.PROJECT_NAME,
version=dev.PROJECT_VERSION,
url=dev.PROJECT_LINK,
download_url=dev.PROJECT_LINK,
author=dev.AUTHOR,
author_email=dev.AUTHOR_EMAIL,
maintainer=dev.AUTHOR,
maintainer_email=dev.AUTHOR_EMAIL,
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
],
license=dev.PROJECT_LICENSE,
description=f'{doclines[1]}',
long_description=use_readme(),
long_description_content_type='text/markdown',
keywords='opencv2 cv2 moviepy',
zip_safe=False,
install_requires=required_packages,
python_requires='~=3.6',
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'vdoxa = vdoxa.parser:main',
],
}
)
| [((71, 11, 71, 26), 'setuptools.find_packages', 'find_packages', ({}, {}), '()', False, 'from setuptools import find_packages, setup\n')] |
BaggerFast/Simple_votings | application/modules/login.py | 843769fa6fd2c04feb542e6b301b7b4810260d4e | from django.contrib import messages
from django.contrib.auth import login, authenticate
from django.shortcuts import render, redirect
from django.urls import reverse
from django.views import View
from application.forms import AuthenticateForm
from application.views import get_navbar, Page
class LoginView(View):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.context = {}
def get(self, request):
self.context['navbar'] = get_navbar(request)
self.context['form'] = AuthenticateForm()
return render(request, Page.login, self.context)
def post(self, request):
self.context['navbar'] = get_navbar(request)
data = request.POST
form = AuthenticateForm(data)
if form.is_valid():
user = authenticate(
username=data['username'],
password=data['password'],
)
if user:
login(request, user)
messages.success(request, 'You have successfully logged in!')
return redirect(reverse('main'))
messages.error(request, 'Invalid username and password pair.', extra_tags='danger')
else:
messages.error(request, 'Invalid username and password pair.', extra_tags='danger')
self.context['form'] = AuthenticateForm(data)
return render(request, Page.login, self.context)
| [((17, 33, 17, 52), 'application.views.get_navbar', 'get_navbar', ({(17, 44, 17, 51): 'request'}, {}), '(request)', False, 'from application.views import get_navbar, Page\n'), ((18, 31, 18, 49), 'application.forms.AuthenticateForm', 'AuthenticateForm', ({}, {}), '()', False, 'from application.forms import AuthenticateForm\n'), ((19, 15, 19, 57), 'django.shortcuts.render', 'render', ({(19, 22, 19, 29): 'request', (19, 32, 19, 42): 'Page.login', (19, 44, 19, 56): 'self.context'}, {}), '(request, Page.login, self.context)', False, 'from django.shortcuts import render, redirect\n'), ((22, 33, 22, 52), 'application.views.get_navbar', 'get_navbar', ({(22, 44, 22, 51): 'request'}, {}), '(request)', False, 'from application.views import get_navbar, Page\n'), ((24, 15, 24, 37), 'application.forms.AuthenticateForm', 'AuthenticateForm', ({(24, 32, 24, 36): 'data'}, {}), '(data)', False, 'from application.forms import AuthenticateForm\n'), ((37, 31, 37, 53), 'application.forms.AuthenticateForm', 'AuthenticateForm', ({(37, 48, 37, 52): 'data'}, {}), '(data)', False, 'from application.forms import AuthenticateForm\n'), ((38, 15, 38, 56), 'django.shortcuts.render', 'render', ({(38, 22, 38, 29): 'request', (38, 31, 38, 41): 'Page.login', (38, 43, 38, 55): 'self.context'}, {}), '(request, Page.login, self.context)', False, 'from django.shortcuts import render, redirect\n'), ((26, 19, 29, 13), 'django.contrib.auth.authenticate', 'authenticate', (), '', False, 'from django.contrib.auth import login, authenticate\n'), ((34, 12, 34, 95), 'django.contrib.messages.error', 'messages.error', (), '', False, 'from django.contrib import messages\n'), ((36, 12, 36, 95), 'django.contrib.messages.error', 'messages.error', (), '', False, 'from django.contrib import messages\n'), ((31, 16, 31, 36), 'django.contrib.auth.login', 'login', ({(31, 22, 31, 29): 'request', (31, 31, 31, 35): 'user'}, {}), '(request, user)', False, 'from django.contrib.auth import login, authenticate\n'), ((32, 16, 32, 77), 'django.contrib.messages.success', 'messages.success', ({(32, 33, 32, 40): 'request', (32, 42, 32, 76): '"""You have successfully logged in!"""'}, {}), "(request, 'You have successfully logged in!')", False, 'from django.contrib import messages\n'), ((33, 32, 33, 47), 'django.urls.reverse', 'reverse', ({(33, 40, 33, 46): '"""main"""'}, {}), "('main')", False, 'from django.urls import reverse\n')] |
HelloChatterbox/little_questions | little_questions/utils/log.py | 04bee86244b42fdaed9f8d010c2f83037ad753f6 | # Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import inspect
import logging
import sys
class LOG:
"""
Custom logger class that acts like logging.Logger
The logger name is automatically generated by the module of the caller
Usage:
>>> LOG.debug('My message: %s', debug_str)
13:12:43.673 - :<module>:1 - DEBUG - My message: hi
>>> LOG('custom_name').debug('Another message')
13:13:10.462 - custom_name - DEBUG - Another message
"""
base_path = "stdout"
fmt = '%(asctime)s.%(msecs)03d - ' \
'%(name)s - %(levelname)s - %(message)s'
datefmt = '%Y-%m-%d %H:%M:%S'
formatter = logging.Formatter(fmt, datefmt)
name = 'little_questions'
level = "DEBUG"
_loggers = {}
@classmethod
def set_level(cls, level="INFO"):
cls.level = level
for n in cls._loggers:
cls._loggers[n].setLevel(cls.level)
@classmethod
def create_logger(cls, name):
if name in cls._loggers:
return cls._loggers[name]
logger = logging.getLogger(name)
logger.propagate = False
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setFormatter(cls.formatter)
logger.addHandler(stdout_handler)
logger.setLevel(cls.level)
cls._loggers[name] = logger
return logger
@classmethod
def _log(cls):
name = ""
if cls.name is not None:
name = cls.name + " - "
# Stack:
# [0] - _log()
# [1] - debug(), info(), warning(), or error()
# [2] - caller
stack = inspect.stack()
# Record:
# [0] - frame object
# [1] - filename
# [2] - line number
# [3] - function
# ...
record = stack[2]
name += record[3] + ':' + str(record[2])
logger = cls.create_logger(name)
return logger
@classmethod
def info(cls, *args, **kwargs):
cls._log().info(*args, **kwargs)
@classmethod
def debug(cls, *args, **kwargs):
cls._log().debug(*args, **kwargs)
@classmethod
def warning(cls, *args, **kwargs):
cls._log().warning(*args, **kwargs)
@classmethod
def error(cls, *args, **kwargs):
cls._log().error(*args, **kwargs)
@classmethod
def exception(cls, *args, **kwargs):
cls._log().exception(*args, **kwargs)
| [((35, 16, 35, 47), 'logging.Formatter', 'logging.Formatter', ({(35, 34, 35, 37): 'fmt', (35, 39, 35, 46): 'datefmt'}, {}), '(fmt, datefmt)', False, 'import logging\n'), ((50, 17, 50, 40), 'logging.getLogger', 'logging.getLogger', ({(50, 35, 50, 39): 'name'}, {}), '(name)', False, 'import logging\n'), ((52, 25, 52, 58), 'logging.StreamHandler', 'logging.StreamHandler', ({(52, 47, 52, 57): 'sys.stdout'}, {}), '(sys.stdout)', False, 'import logging\n'), ((69, 16, 69, 31), 'inspect.stack', 'inspect.stack', ({}, {}), '()', False, 'import inspect\n')] |
antopen/alipay-sdk-python-all | alipay/aop/api/response/AlipayOpenMiniVersionAuditApplyResponse.py | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayOpenMiniVersionAuditApplyResponse(AlipayResponse):
def __init__(self):
super(AlipayOpenMiniVersionAuditApplyResponse, self).__init__()
self._speed_up = None
self._speed_up_memo = None
@property
def speed_up(self):
return self._speed_up
@speed_up.setter
def speed_up(self, value):
self._speed_up = value
@property
def speed_up_memo(self):
return self._speed_up_memo
@speed_up_memo.setter
def speed_up_memo(self, value):
self._speed_up_memo = value
def parse_response_content(self, response_content):
response = super(AlipayOpenMiniVersionAuditApplyResponse, self).parse_response_content(response_content)
if 'speed_up' in response:
self.speed_up = response['speed_up']
if 'speed_up_memo' in response:
self.speed_up_memo = response['speed_up_memo']
| [] |
haruiz/PytorchCvStudio | cvstudio/view/widgets/loading_dialog/loading_dialog.py | ccf79dd0cc0d61f3fd01b1b5d96f7cda7b681eef | import os
from PyQt5 import QtCore
from PyQt5.QtCore import QRect, QPoint
from PyQt5.QtGui import QMovie, QCloseEvent, QShowEvent
from PyQt5.QtWidgets import QDialog, QLabel, QVBoxLayout, QApplication, QWidget
class QLoadingDialog(QDialog):
def __init__(self, parent=None):
super(QLoadingDialog, self).__init__()
self.setFixedSize(100, 100)
# self.setWindowOpacity(0.8)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
self.setAttribute(QtCore.Qt.WA_TranslucentBackground)
app = QApplication.instance()
curr_theme = "light"
if app:
curr_theme = app.property("theme")
gif_file = os.path.abspath("./assets/icons/{}/loading.gif".format(curr_theme))
self.movie = QMovie(gif_file)
self.label = QLabel()
self.label.setMovie(self.movie)
self.layout = QVBoxLayout(self)
self.layout.addWidget(self.label)
def center(self, host: QWidget = None):
if host:
hostGeometry: QRect = host.geometry()
# dialogGeometry : QRect = self.geometry()
centerPoint: QPoint = hostGeometry.center()
centerPoint = host.mapToGlobal(centerPoint)
offset = 30
targetPoint = QPoint(centerPoint.x() - offset, centerPoint.y() - offset)
self.move(targetPoint)
else:
screen = QApplication.desktop().screenNumber(QApplication.desktop().cursor().pos())
centerPoint = QApplication.desktop().screenGeometry(screen).center()
self.move(centerPoint)
return self
def showEvent(self, e: QShowEvent):
if self.movie.state() == QMovie.NotRunning:
self.movie.start()
def closeEvent(self, e: QCloseEvent):
if self.movie.state() == QMovie.Running:
self.movie.stop()
def exec_(self):
self.center()
return QDialog.exec_(self)
| [((16, 14, 16, 37), 'PyQt5.QtWidgets.QApplication.instance', 'QApplication.instance', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QDialog, QLabel, QVBoxLayout, QApplication, QWidget\n'), ((21, 21, 21, 37), 'PyQt5.QtGui.QMovie', 'QMovie', ({(21, 28, 21, 36): 'gif_file'}, {}), '(gif_file)', False, 'from PyQt5.QtGui import QMovie, QCloseEvent, QShowEvent\n'), ((22, 21, 22, 29), 'PyQt5.QtWidgets.QLabel', 'QLabel', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QDialog, QLabel, QVBoxLayout, QApplication, QWidget\n'), ((24, 22, 24, 39), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ({(24, 34, 24, 38): 'self'}, {}), '(self)', False, 'from PyQt5.QtWidgets import QDialog, QLabel, QVBoxLayout, QApplication, QWidget\n'), ((52, 15, 52, 34), 'PyQt5.QtWidgets.QDialog.exec_', 'QDialog.exec_', ({(52, 29, 52, 33): 'self'}, {}), '(self)', False, 'from PyQt5.QtWidgets import QDialog, QLabel, QVBoxLayout, QApplication, QWidget\n'), ((37, 21, 37, 43), 'PyQt5.QtWidgets.QApplication.desktop', 'QApplication.desktop', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QDialog, QLabel, QVBoxLayout, QApplication, QWidget\n'), ((38, 26, 38, 48), 'PyQt5.QtWidgets.QApplication.desktop', 'QApplication.desktop', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QDialog, QLabel, QVBoxLayout, QApplication, QWidget\n'), ((37, 57, 37, 79), 'PyQt5.QtWidgets.QApplication.desktop', 'QApplication.desktop', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QDialog, QLabel, QVBoxLayout, QApplication, QWidget\n')] |
agustinhenze/mibs.snmplabs.com | pysnmp-with-texts/MWORKS-MIB.py | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | #
# PySNMP MIB module MWORKS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/MWORKS-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:16:04 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Gauge32, Unsigned32, ObjectIdentity, IpAddress, Bits, MibIdentifier, Integer32, enterprises, ModuleIdentity, TimeTicks, Counter32, NotificationType, iso, Counter64, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "Unsigned32", "ObjectIdentity", "IpAddress", "Bits", "MibIdentifier", "Integer32", "enterprises", "ModuleIdentity", "TimeTicks", "Counter32", "NotificationType", "iso", "Counter64", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
tecElite = MibIdentifier((1, 3, 6, 1, 4, 1, 217))
meterWorks = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 16))
mw501 = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 16, 1))
mwMem = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 16, 1, 1))
mwHeap = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 16, 1, 2))
mwMemCeiling = MibScalar((1, 3, 6, 1, 4, 1, 217, 16, 1, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mwMemCeiling.setStatus('mandatory')
if mibBuilder.loadTexts: mwMemCeiling.setDescription('bytes of memory the agent memory manager will allow the agent to use.')
mwMemUsed = MibScalar((1, 3, 6, 1, 4, 1, 217, 16, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mwMemUsed.setStatus('mandatory')
if mibBuilder.loadTexts: mwMemUsed.setDescription("bytes of memory that meterworks has malloc'ed. some of this may be in free pools.")
mwHeapTotal = MibScalar((1, 3, 6, 1, 4, 1, 217, 16, 1, 2, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mwHeapTotal.setStatus('mandatory')
if mibBuilder.loadTexts: mwHeapTotal.setDescription('bytes of memory given to the heap manager.')
mwHeapUsed = MibScalar((1, 3, 6, 1, 4, 1, 217, 16, 1, 2, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mwHeapUsed.setStatus('mandatory')
if mibBuilder.loadTexts: mwHeapUsed.setDescription('bytes of available memory in the heap.')
mibBuilder.exportSymbols("MWORKS-MIB", mwHeap=mwHeap, mwHeapUsed=mwHeapUsed, mwMemCeiling=mwMemCeiling, meterWorks=meterWorks, tecElite=tecElite, mwMem=mwMem, mw501=mw501, mwHeapTotal=mwHeapTotal, mwMemUsed=mwMemUsed)
| [] |
mpire-nxus/nxus_unity_sdk | Assets/Editor/PostprocessBuildPlayer_MpireNxusMeasurementPostBuildiOS.py | 34a1ebfc588c47c1c71fae11f29e82c1172c6dc2 | #!/usr/bin/env python
import sys
import re
from subprocess import Popen, PIPE
import argparse
from pbxproj import XcodeProject, TreeType
from pbxproj import FileOptions
def main():
parser = argparse.ArgumentParser(description="MpireNxusMeasurement post build iOS script")
parser.add_argument('ios_project_path', help="path to the folder of the iOS project generated by unity3d")
with open('MpireNxusMeasurementPostBuildiOSLog.txt', 'w') as fileLog:
# Log function with file injected.
LogFunc = LogInput(fileLog)
# Path of the Xcode SDK on the system.
xcode_sdk_path = get_xcode_sdk_path(LogFunc)
# Path for unity iOS Xcode project and framework on the system.
unity_xcode_project_path, framework_path = get_paths(LogFunc, parser, xcode_sdk_path)
# Edit the Xcode project using mod_pbxproj:
# - Add the adSupport framework library.
# - Add the iAd framework library.
# - Change the compilation flags of the adjust project files to support non-ARC.
edit_unity_xcode_project(LogFunc, unity_xcode_project_path, framework_path)
# Removed.
# Change the Xcode project directly:
# - Allow objective-c exceptions
# rewrite_unity_xcode_project(LogFunc, unity_xcode_project_path)
sys.exit(0)
def LogInput(writeObject):
def Log(message, *args):
messageNLine = (message if message else "None") + "\n"
writeObject.write(messageNLine.format(*args))
return Log
def get_paths(Log, parser, xcode_sdk_path):
args, ignored_args = parser.parse_known_args()
ios_project_path = args.ios_project_path
unity_xcode_project_path = ios_project_path + "/Unity-iPhone.xcodeproj/project.pbxproj"
Log("Unity3d Xcode project path: {0}", unity_xcode_project_path)
framework_path = xcode_sdk_path + "/System/Library/Frameworks/"
Log("framework path: {0}", framework_path)
return unity_xcode_project_path, framework_path
def edit_unity_xcode_project(Log, unity_xcode_project_path, framework_path):
# load unity iOS pbxproj project file
unity_XcodeProject = XcodeProject.load(unity_xcode_project_path)
frameworks = unity_XcodeProject.get_or_create_group('Frameworks')
file_options_security_framework = FileOptions(embed_framework=False, weak=True)
unity_XcodeProject.add_file(framework_path + "Security.framework", parent=frameworks, tree='SDKROOT', force=False, file_options=file_options_security_framework)
Log("added Security framework")
# Add -ObjC to "Other Linker Flags" project settings.
unity_XcodeProject.add_other_ldflags('-ObjC')
# Save changes.
unity_XcodeProject.save()
def rewrite_unity_xcode_project(Log, unity_xcode_project_path):
unity_xcode_lines = []
# Allow objective-c exceptions
re_objc_excep = re.compile(r"\s*GCC_ENABLE_OBJC_EXCEPTIONS *= *NO.*")
with open(unity_xcode_project_path) as upf:
for line in upf:
if re_objc_excep.match(line):
#Log("matched line: {0}", re_objc_excep.match(line).group())
line = line.replace("NO","YES")
Log("Objective-c exceptions enabled")
unity_xcode_lines.append(line)
with open(unity_xcode_project_path, "w+") as upf:
upf.writelines(unity_xcode_lines)
def get_xcode_sdk_path(Log):
# Output all info from Xcode.
proc = Popen(["xcodebuild", "-version", "-sdk"], stdout=PIPE, stderr=PIPE)
out, err = proc.communicate()
if proc.returncode not in [0, 66]:
Log("Could not retrieve Xcode sdk path. code: {0}, err: {1}", proc.returncode, err)
return None
match = re.search("iPhoneOS.*?Path: (?P<sdk_path>.*?)\n", out, re.DOTALL)
xcode_sdk_path = match.group('sdk_path') if match else None
Log("Xcode sdk path: {0}", xcode_sdk_path)
return xcode_sdk_path
if __name__ == "__main__":
main()
| [((12, 13, 12, 94), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((35, 4, 35, 15), 'sys.exit', 'sys.exit', ({(35, 13, 35, 14): '(0)'}, {}), '(0)', False, 'import sys\n'), ((57, 25, 57, 68), 'pbxproj.XcodeProject.load', 'XcodeProject.load', ({(57, 43, 57, 67): 'unity_xcode_project_path'}, {}), '(unity_xcode_project_path)', False, 'from pbxproj import XcodeProject, TreeType\n'), ((61, 38, 61, 83), 'pbxproj.FileOptions', 'FileOptions', (), '', False, 'from pbxproj import FileOptions\n'), ((75, 20, 75, 73), 're.compile', 're.compile', ({(75, 31, 75, 72): '"""\\\\s*GCC_ENABLE_OBJC_EXCEPTIONS *= *NO.*"""'}, {}), "('\\\\s*GCC_ENABLE_OBJC_EXCEPTIONS *= *NO.*')", False, 'import re\n'), ((88, 11, 88, 78), 'subprocess.Popen', 'Popen', (), '', False, 'from subprocess import Popen, PIPE\n'), ((95, 12, 95, 77), 're.search', 're.search', ({(95, 22, 95, 60): '"""iPhoneOS.*?Path: (?P<sdk_path>.*?)\n"""', (95, 62, 95, 65): 'out', (95, 67, 95, 76): 're.DOTALL'}, {}), "('iPhoneOS.*?Path: (?P<sdk_path>.*?)\\n', out, re.DOTALL)", False, 'import re\n')] |
klyusba/python-quiz | vars_in_python.py | 9f469417458f8ba6b21f9507cc860ca4547ea67b | # == 1 ==
bar = [1, 2]
def foo(bar):
bar = sum(bar)
return bar
print(foo(bar))
# == 2 ==
bar = [1, 2]
def foo(bar):
bar[0] = 1
return sum(bar)
print(foo(bar))
# == 3 ==
bar = [1, 2]
def foo():
bar = sum(bar)
return bar
print(foo())
# == 4 ==
bar = [1, 2]
def foo(bar):
bar = [1, 2, 3, ]
return sum(bar)
print(foo(bar), bar)
# == 5 ==
bar = [1, 2]
def foo(bar):
bar[:] = [1, 2, 3, ]
return sum(bar)
print(foo(bar), bar)
# == 6 ==
try:
bar = 1 / 0
print(bar)
except ZeroDivisionError as bar:
print(bar)
print(bar)
# == 7 ==
bar = [1, 2]
print(list(bar for bar in bar))
print(bar)
# == 8 ==
bar = [1, 2]
f = lambda: sum(bar)
print(f())
bar = [1, 2, 3, ]
print(f())
# == 9 ==
bar = [1, 2]
def foo(bar):
return lambda: sum(bar)
f = foo(bar)
print(f())
bar = [1, 2, 3, ]
print(f())
# == 10 ==
bar = [1, 2]
foo = []
for i in bar:
foo.append(lambda: i)
print([f() for f in foo])
# == 11 ==
bar = [1, 2]
foo = [
lambda: i
for i in bar
]
print(list(f() for f in foo))
# == 12 ==
bar = [1, 2]
foo = [
lambda: i
for i in bar
]
print(list(f() for f in foo))
bar = [1, 2, 3, ]
print(list(f() for f in foo))
bar[:] = [1, 2, 3, ]
print(list(f() for f in foo))
# == 13 ==
bar = [1, 2]
foo = [
lambda i=i: i
for i in bar
]
print(list(f() for f in foo))
| [] |
sm43/pipelines-as-code | hack/dev/gh-replay-events.py | bd21e48c96ab128d533701ecd1a2df7a0d136d65 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Author: Chmouel Boudjnah <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# See README.md for documentation
import typing
import argparse
import base64
import hashlib
import hmac
import json
import os
import subprocess
import sys
import time
import requests
import ghapp_token
NAMESPACE = "pipelines-as-code"
SECRET_NAME = "pipelines-as-code-secret"
ELNAME = "pipelines-as-code"
EXPIRE_MINUTES_AS_SECONDS = (
int(os.environ.get("GITHUBAPP_TOKEN_EXPIRATION_MINUTES", 10)) * 60
)
def get_controller_route():
elroute = subprocess.run(
f"kubectl get route -n {NAMESPACE} -l pipelines-as-code/route=controller -o json",
shell=True,
check=True,
capture_output=True,
)
return (
"https://"
+ json.loads(elroute.stdout)["items"][0]["status"]["ingress"][0]["host"]
)
def get_controller_ingress():
elroute = subprocess.run(
f"kubectl get ingress -n {NAMESPACE} -l pipelines-as-code/route=controller -o json",
shell=True,
check=True,
capture_output=True,
)
return (
"http://" + json.loads(elroute.stdout)["items"][0]["spec"]["rules"][0]["host"]
)
def get_token_secret(
github_api_url=ghapp_token.GITHUB_API_URL, expiration_time=EXPIRE_MINUTES_AS_SECONDS
):
secret = subprocess.run(
f"kubectl get secret {SECRET_NAME} -n{NAMESPACE} -o json",
shell=True,
check=True,
capture_output=True,
)
jeez = json.loads(secret.stdout)
private_key = base64.b64decode(jeez["data"]["github-private-key"])
app_id = base64.b64decode(jeez["data"]["github-application-id"])
webhook_secret = base64.b64decode(jeez["data"]["webhook.secret"]).decode()
if not private_key or not app_id or not webhook_secret:
print(
f"private_key={private_key[1:10]} or app_id={app_id} or webhook_secret={webhook_secret} are empty"
)
sys.exit(1)
gh = ghapp_token.GitHub(
private_key,
app_id,
expiration_time,
github_api_url,
)
return gh.token, webhook_secret, app_id
def _request_app_delivery(token, iid=None, api_url=ghapp_token.GITHUB_API_URL):
url = f"{api_url}/app/hook/deliveries"
if iid:
url += f"/{iid}"
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": f"Bearer {token}",
}
return requests.request("GET", url, headers=headers)
def _request_webhooks_installed(
token: str,
owner_repo: str,
iid: typing.Union[int, None] = None,
api_url: str = ghapp_token.GITHUB_API_URL,
):
url = f"{api_url}/repos/{owner_repo}/hooks"
if iid:
url += f"/{iid}/deliveries"
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": f"Bearer {token}",
}
return requests.request("GET", url, headers=headers)
def _request_webhooks_reattempt(
token: str,
owner_repo: str,
iid: int,
delivery_id: int,
api_url: str = ghapp_token.GITHUB_API_URL,
):
url = f"{api_url}/repos/{owner_repo}/hooks/{iid}/deliveries/{delivery_id}/attempts"
print(url)
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": f"Bearer {token}",
}
return requests.request("POST", url, headers=headers)
def ask_which(token: str, api_url: str, last: bool, deliveries: dict) -> int:
dico = []
i = 1
if "message" in deliveries:
print(deliveries)
sys.exit(0)
for delivery in deliveries:
print(
f"{i}) Action={delivery['action']} Event={delivery['event']} Delivered at {delivery['delivered_at']}"
)
dico.append(delivery["id"])
if i == 10:
break
i += 1
chosen = input("Choose a delivery: ")
# return _request_app_delivery(token, dico[int(chosen) - 1], api_url=api_url).json()
return int(chosen) - 1
def webhook_get_delivery(
token: str,
owner_repo: str,
last: bool = False,
api_url: str = ghapp_token.GITHUB_API_URL,
) -> str:
r = _request_webhooks_installed(token, api_url=api_url, owner_repo=owner_repo)
r.raise_for_status()
webhooks = r.json()
if len(webhooks) == 1:
webhook_id = int(webhooks[0]["id"])
elif len(webhooks) > 1:
cnt = 1
for wh in webhooks:
print(f"{cnt}) {wh['name']} - {wh['config']['url']} ")
cnt += 1
chosen = input("Choose a delivery: ")
webhook_id = int(webhooks[int(chosen) - 1]["id"])
else:
print("could not find any webhook configuration on your repo {}")
sys.exit(1)
r = _request_webhooks_installed(
token, api_url=api_url, owner_repo=owner_repo, iid=webhook_id
)
r.raise_for_status()
deliveries = r.json()
if not deliveries:
print("no deliveries has been set ")
sys.exit(1)
if last:
delivery_id = deliveries[0]["id"]
else:
chosen = ask_which(token, api_url, last, r.json())
delivery_id = deliveries[chosen]["id"]
r = _request_webhooks_reattempt(
token=token,
owner_repo=owner_repo,
iid=webhook_id,
api_url=api_url,
delivery_id=delivery_id,
)
r.raise_for_status()
print(f"Delivery has been replayed, you can replay directly it with: ")
s = f"http POST {api_url}/repos/{owner_repo}/hooks/{webhook_id}/deliveries/{delivery_id}/attempts"
s += f' Authorization:"Bearer { os.environ.get("PASS_TOKEN", "$TOKEN") }"'
s += " Accept:application/vnd.github.v3+json"
print(s)
return s
def app_get_delivery(
token: str, last: bool = False, api_url: str = ghapp_token.GITHUB_API_URL
) -> dict:
r = _request_app_delivery(token, api_url=api_url)
r.raise_for_status()
deliveries = r.json()
if not deliveries:
print("no deliveries has been set ")
sys.exit(1)
if last:
return _request_app_delivery(token, deliveries[0]["id"], api_url=api_url).json()
chosen = ask_which(token, api_url, last, deliveries)
return _request_app_delivery(
token, deliveries[chosen]["id"], api_url=api_url
).json()
def save_script(target: str, el_route: str, headers: dict, payload: str):
s = f"""#!/usr/bin/env python3
import requests
import sys
payload = \"\"\"{json.dumps(payload)}\"\"\"
headers={headers}
el_route = "http://localhost:8080" if (len(sys.argv) > 1 and sys.argv[1] == "-l") else "{el_route}"
r = requests.request("POST",el_route,data=payload.encode("utf-8"),headers=headers)
r.raise_for_status()
print("Request has been replayed on " + el_route)
"""
with open(target, "w") as fp:
fp.write(s)
os.chmod(target, 0o755)
print(f"Request saved to {target}")
def main(args):
el = args.eroute
if not el:
try:
el = get_controller_route()
except subprocess.CalledProcessError:
try:
el = get_controller_ingress()
except subprocess.CalledProcessError:
print("Could not find an ingress or route")
sys.exit(1)
if args.webhook_repo:
token, webhook_secret = args.webhook_token, args.webhook_secret
replays = webhook_get_delivery(
token,
last=args.last_event,
api_url=args.api_url,
owner_repo=args.webhook_repo,
)
if args.save:
open(args.save, "w").write(f"""#!/usr/bin/env bash\n{replays}\n""")
os.chmod(args.save, 0o755)
print(f"Saved to {args.save}")
sys.exit(0)
else:
token, webhook_secret, app_id = get_token_secret(github_api_url=args.api_url)
delivery = app_get_delivery(token, args.last_event, args.api_url)
jeez = delivery["request"]["payload"]
headers = delivery["request"]["headers"]
payload = json.dumps(jeez)
esha256 = hmac.new(
webhook_secret.encode("utf-8"),
msg=payload.encode("utf-8"),
digestmod=hashlib.sha256,
).hexdigest()
esha1 = hmac.new(
webhook_secret.encode("utf-8"),
msg=payload.encode("utf-8"),
digestmod=hashlib.sha1,
).hexdigest()
print("Replay event for repo " + jeez["repository"]["full_name"])
headers.update(
{
"X-Hub-Signature": "sha1=" + esha1,
"X-Hub-Signature-256": "sha256=" + esha256,
}
)
if args.save:
save_script(args.save, el, headers, jeez)
sys.exit(0)
for _ in range(args.retry):
try:
r = requests.request(
"POST", el, data=payload.encode("utf-8"), headers=headers
)
except requests.exceptions.ConnectionError:
print(f"sleeping until {el} is up")
time.sleep(5)
continue
print(f"Payload has been replayed on {el}: {r}")
return
print("You have reached the maximum number of retries")
def parse_args():
parser = argparse.ArgumentParser(description="Replay a webhook")
parser.add_argument(
"--installation-id",
"-i",
default=os.environ.get("INSTALLATION_ID"),
help="Installation ID",
)
parser.add_argument(
"--controller-route",
"-e",
dest="eroute",
help="Route hostname (default to detect on openshift/ingress)",
default=os.environ.get("EL_ROUTE"),
)
parser.add_argument("--last-event", "-L", action="store_true")
parser.add_argument(
"--webhook-repo", "-w", help="Use a webhook-repo instead of app"
)
parser.add_argument("--webhook-token", "-t", help="Use this token")
parser.add_argument("--webhook-secret", "-S", help="Use this webhook secret")
parser.add_argument(
"--save", "-s", help="save the request to a shell script to replay easily"
)
parser.add_argument(
"-a",
"--api-url",
help="Github API URL",
default=os.environ.get("GITHUB_API_URL", ghapp_token.GITHUB_API_URL),
)
parser.add_argument(
"--retry",
type=int,
default=1,
help="how many time to try to contact the el route",
)
return parser.parse_args()
if __name__ == "__main__":
main(parse_args())
| [((42, 14, 47, 5), 'subprocess.run', 'subprocess.run', (), '', False, 'import subprocess\n'), ((55, 14, 60, 5), 'subprocess.run', 'subprocess.run', (), '', False, 'import subprocess\n'), ((69, 13, 74, 5), 'subprocess.run', 'subprocess.run', (), '', False, 'import subprocess\n'), ((75, 11, 75, 36), 'json.loads', 'json.loads', ({(75, 22, 75, 35): 'secret.stdout'}, {}), '(secret.stdout)', False, 'import json\n'), ((76, 18, 76, 70), 'base64.b64decode', 'base64.b64decode', ({(76, 35, 76, 69): "jeez['data']['github-private-key']"}, {}), "(jeez['data']['github-private-key'])", False, 'import base64\n'), ((77, 13, 77, 68), 'base64.b64decode', 'base64.b64decode', ({(77, 30, 77, 67): "jeez['data']['github-application-id']"}, {}), "(jeez['data']['github-application-id'])", False, 'import base64\n'), ((85, 9, 90, 5), 'ghapp_token.GitHub', 'ghapp_token.GitHub', ({(86, 8, 86, 19): 'private_key', (87, 8, 87, 14): 'app_id', (88, 8, 88, 23): 'expiration_time', (89, 8, 89, 22): 'github_api_url'}, {}), '(private_key, app_id, expiration_time, github_api_url)', False, 'import ghapp_token\n'), ((102, 11, 102, 56), 'requests.request', 'requests.request', (), '', False, 'import requests\n'), ((118, 11, 118, 56), 'requests.request', 'requests.request', (), '', False, 'import requests\n'), ((134, 11, 134, 57), 'requests.request', 'requests.request', (), '', False, 'import requests\n'), ((239, 4, 239, 27), 'os.chmod', 'os.chmod', ({(239, 13, 239, 19): 'target', (239, 21, 239, 26): '(493)'}, {}), '(target, 493)', False, 'import os\n'), ((272, 14, 272, 30), 'json.dumps', 'json.dumps', ({(272, 25, 272, 29): 'jeez'}, {}), '(jeez)', False, 'import json\n'), ((311, 13, 311, 68), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((37, 8, 37, 64), 'os.environ.get', 'os.environ.get', ({(37, 23, 37, 59): '"""GITHUBAPP_TOKEN_EXPIRATION_MINUTES"""', (37, 61, 37, 63): '(10)'}, {}), "('GITHUBAPP_TOKEN_EXPIRATION_MINUTES', 10)", False, 'import os\n'), ((83, 8, 83, 19), 'sys.exit', 'sys.exit', ({(83, 17, 83, 18): '(1)'}, {}), '(1)', False, 'import sys\n'), ((142, 8, 142, 19), 'sys.exit', 'sys.exit', ({(142, 17, 142, 18): '(0)'}, {}), '(0)', False, 'import sys\n'), ((185, 8, 185, 19), 'sys.exit', 'sys.exit', ({(185, 17, 185, 18): '(1)'}, {}), '(1)', False, 'import sys\n'), ((216, 8, 216, 19), 'sys.exit', 'sys.exit', ({(216, 17, 216, 18): '(1)'}, {}), '(1)', False, 'import sys\n'), ((266, 8, 266, 19), 'sys.exit', 'sys.exit', ({(266, 17, 266, 18): '(0)'}, {}), '(0)', False, 'import sys\n'), ((294, 8, 294, 19), 'sys.exit', 'sys.exit', ({(294, 17, 294, 18): '(0)'}, {}), '(0)', False, 'import sys\n'), ((78, 21, 78, 69), 'base64.b64decode', 'base64.b64decode', ({(78, 38, 78, 68): "jeez['data']['webhook.secret']"}, {}), "(jeez['data']['webhook.secret'])", False, 'import base64\n'), ((176, 8, 176, 19), 'sys.exit', 'sys.exit', ({(176, 17, 176, 18): '(1)'}, {}), '(1)', False, 'import sys\n'), ((202, 36, 202, 74), 'os.environ.get', 'os.environ.get', ({(202, 51, 202, 63): '"""PASS_TOKEN"""', (202, 65, 202, 73): '"""$TOKEN"""'}, {}), "('PASS_TOKEN', '$TOKEN')", False, 'import os\n'), ((230, 26, 230, 45), 'json.dumps', 'json.dumps', ({(230, 37, 230, 44): 'payload'}, {}), '(payload)', False, 'import json\n'), ((264, 12, 264, 38), 'os.chmod', 'os.chmod', ({(264, 21, 264, 30): 'args.save', (264, 32, 264, 37): '(493)'}, {}), '(args.save, 493)', False, 'import os\n'), ((315, 16, 315, 49), 'os.environ.get', 'os.environ.get', ({(315, 31, 315, 48): '"""INSTALLATION_ID"""'}, {}), "('INSTALLATION_ID')", False, 'import os\n'), ((323, 16, 323, 42), 'os.environ.get', 'os.environ.get', ({(323, 31, 323, 41): '"""EL_ROUTE"""'}, {}), "('EL_ROUTE')", False, 'import os\n'), ((340, 16, 340, 76), 'os.environ.get', 'os.environ.get', ({(340, 31, 340, 47): '"""GITHUB_API_URL"""', (340, 49, 340, 75): 'ghapp_token.GITHUB_API_URL'}, {}), "('GITHUB_API_URL', ghapp_token.GITHUB_API_URL)", False, 'import os\n'), ((302, 12, 302, 25), 'time.sleep', 'time.sleep', ({(302, 23, 302, 24): '(5)'}, {}), '(5)', False, 'import time\n'), ((253, 16, 253, 27), 'sys.exit', 'sys.exit', ({(253, 25, 253, 26): '(1)'}, {}), '(1)', False, 'import sys\n'), ((50, 10, 50, 36), 'json.loads', 'json.loads', ({(50, 21, 50, 35): 'elroute.stdout'}, {}), '(elroute.stdout)', False, 'import json\n'), ((62, 20, 62, 46), 'json.loads', 'json.loads', ({(62, 31, 62, 45): 'elroute.stdout'}, {}), '(elroute.stdout)', False, 'import json\n')] |
Pooroomoo/nintendeals | nintendeals/noa/api/__init__.py | 993f4d159ff405ed82cd2bb023c7b75d921d0acb | from .algolia import search_by_nsuid
from .algolia import search_by_platform
from .algolia import search_by_query
| [] |
joshlyman/Josh-LeetCode | 076_Minimum_Window_Substring.py | cc9e2cc406d2cbd5a90ee579efbcaeffb842c5ed |
# Other solution
# V2
def minWindow(s, t):
need = collections.Counter(t) #hash table to store char frequency
missing = len(t) #total number of chars we care
start, end = 0, 0
i = 0
for j, char in enumerate(s, 1): #index j from 1
if need[char] > 0:
missing -= 1
need[char] -= 1
if missing == 0: #match all chars
while i < j and need[s[i]] < 0: #remove chars to find the real start
need[s[i]] += 1
i += 1
need[s[i]] += 1 #make sure the first appearing char satisfies need[char]>0
missing += 1 #we missed this first char, so add missing by 1
if end == 0 or j-i < end-start: #update window
start, end = i, j
i += 1 #update i to start+1 for next window
return s[start:end]
# Time: O(|S|+|T|)
# Space:O(|S|+|T|)
# Refer from:
# https://leetcode.com/problems/minimum-window-substring/solution/
# Sliding Window
# We start with two pointers, leftleft and rightright initially pointing to the first element of the string S.
# We use the rightright pointer to expand the window until we get a desirable window i.e. a window that contains all of the characters of T.
# Once we have a window with all the characters, we can move the left pointer ahead one by one. If the window is still a desirable one we keep on updating the minimum window size.
# If the window is not desirable any more, we repeat step 2 onwards.
# The current window is s[i:j] and the result window is s[I:J]. In need[c] I store how many times I
# need character c (can be negative) and missing tells how many characters are still missing.
# In the loop, first add the new character to the window. Then, if nothing is missing,
# remove as much as possible from the window start and then update the result.
class Solution:
def minWindow(self, s: str, t: str) -> str:
m = len(s)
n = len(t)
if m < n:
return ''
lt = {}
# put t into dict (lt) and count how many # for each char
for i in t:
if i not in lt:
lt[i] = 1
else:
lt[i] += 1
# missing is to count how many remaining char needed from substring
# finally get candidate substring which satisfy need of t
missing = n
i = I = J = 0
for j, c in enumerate(s, 1):
if c in lt and lt[c] > 0:
missing -= 1
if c in lt:
# lt can be negative
lt[c] -= 1
# i is index of candidate substring, remove as many as char from candidate
while i < j and not missing:
if not J or j-i < J-I:
I, J = i, j
if s[i] not in lt:
i += 1
continue
else:
# if lt contains s[i], then # of s[i] +1, might reach to 0
lt[s[i]] += 1
# if > 0, means we need more, then missing +1
if lt[s[i]] > 0:
missing += 1
i += 1
return s[I:J]
# Time: O(|S|+|T|)
# Space:O(|S|+|T|)
# Optimized Sliding Window
# A small improvement to the above approach can reduce the time complexity of the algorithm to O(2*∣filtered_S∣+∣S∣+∣T∣),
# where filtered(S) is the string formed from S by removing all the elements not present in T
| [] |
Taywee/amberherbert.com | home/migrations/0002_auto_20171017_0412.py | 6bf384d7cdf18dc613252fe4dde38545150eabbc | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-17 04:12
from __future__ import unicode_literals
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='homepage',
name='navigation',
field=wagtail.core.fields.StreamField((('item', wagtail.core.blocks.StructBlock((('text', wagtail.core.blocks.CharBlock(help_text='If this is left blank, the title of the linked page will be used instead', max_length=16, required=False)), ('page', wagtail.core.blocks.PageChooserBlock(required=True))))),), blank=True, help_text='The list of navigation items', null=True),
),
]
| [] |
chawins/entangle-rep | lib/adv_model.py | 3e9e0d6e7536b0de0e35d7f8717f2ccc8e887759 | import torch
import torch.nn as nn
import torch.nn.functional as F
class PGDModel(nn.Module):
"""
code adapted from
https://github.com/karandwivedi42/adversarial/blob/master/main.py
"""
def __init__(self, basic_net, config):
super(PGDModel, self).__init__()
self.basic_net = basic_net
self.rand = config['random_start']
self.step_size = config['step_size']
self.epsilon = config['epsilon']
self.num_steps = config['num_steps']
assert config['loss_func'] == 'xent', 'Only xent supported for now.'
def forward(self, inputs, targets, attack=False):
if not attack:
return self.basic_net(inputs)
x = inputs.clone()
if self.rand:
x = x + torch.zeros_like(x).uniform_(-self.epsilon, self.epsilon)
for _ in range(self.num_steps):
x.requires_grad_()
with torch.enable_grad():
logits = self.basic_net(x)
loss = F.cross_entropy(logits, targets, reduction='sum')
grad = torch.autograd.grad(loss, x)[0]
x = x.detach() + self.step_size * torch.sign(grad.detach())
x = torch.min(torch.max(x, inputs.detach() - self.epsilon),
inputs.detach() + self.epsilon)
x = torch.clamp(x, 0, 1)
return self.basic_net(x)
class PGDL2Model(nn.Module):
"""
code adapted from
https://github.com/karandwivedi42/adversarial/blob/master/main.py
"""
def __init__(self, basic_net, config):
super(PGDL2Model, self).__init__()
self.basic_net = basic_net
self.epsilon = config['epsilon']
self.rand = config['random_start']
self.step_size = config['step_size']
self.num_steps = config['num_steps']
assert config['loss_func'] == 'xent', 'Only xent supported for now.'
def forward(self, inputs, targets, attack=False):
if not attack:
return self.basic_net(inputs)
x = inputs.clone()
if self.rand:
x = x + torch.zeros_like(x).normal_(0, self.step_size)
for _ in range(self.num_steps):
x.requires_grad_()
with torch.enable_grad():
logits = self.basic_net(x)
loss = F.cross_entropy(logits, targets, reduction='sum')
grad = torch.autograd.grad(loss, x)[0].detach()
grad_norm = grad.view(x.size(0), -1).norm(2, 1)
delta = self.step_size * grad / grad_norm.view(x.size(0), 1, 1, 1)
x = x.detach() + delta
diff = (x - inputs).view(x.size(0), -1).renorm(2, 0, self.epsilon)
x = diff.view(x.size()) + inputs
x.clamp_(0, 1)
return self.basic_net(x)
| [((37, 16, 37, 36), 'torch.clamp', 'torch.clamp', ({(37, 28, 37, 29): 'x', (37, 31, 37, 32): '0', (37, 34, 37, 35): '1'}, {}), '(x, 0, 1)', False, 'import torch\n'), ((30, 17, 30, 36), 'torch.enable_grad', 'torch.enable_grad', ({}, {}), '()', False, 'import torch\n'), ((32, 23, 32, 72), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (), '', True, 'import torch.nn.functional as F\n'), ((33, 19, 33, 47), 'torch.autograd.grad', 'torch.autograd.grad', ({(33, 39, 33, 43): 'loss', (33, 45, 33, 46): 'x'}, {}), '(loss, x)', False, 'import torch\n'), ((67, 17, 67, 36), 'torch.enable_grad', 'torch.enable_grad', ({}, {}), '()', False, 'import torch\n'), ((69, 23, 69, 72), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (), '', True, 'import torch.nn.functional as F\n'), ((27, 20, 27, 39), 'torch.zeros_like', 'torch.zeros_like', ({(27, 37, 27, 38): 'x'}, {}), '(x)', False, 'import torch\n'), ((63, 20, 63, 39), 'torch.zeros_like', 'torch.zeros_like', ({(63, 37, 63, 38): 'x'}, {}), '(x)', False, 'import torch\n'), ((70, 19, 70, 47), 'torch.autograd.grad', 'torch.autograd.grad', ({(70, 39, 70, 43): 'loss', (70, 45, 70, 46): 'x'}, {}), '(loss, x)', False, 'import torch\n')] |
singhaditya28/fs_image | fs_image/rpm/storage/tests/storage_base_test.py | 3d122da48eab8b26e5add6754cc1f91296139c58 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from unittest.mock import patch, MagicMock
from typing import List, Tuple
from .. import Storage # Module import to ensure we get plugins
class StorageBaseTestCase(unittest.TestCase):
'A tiny test suite that can be used to check any Storage implementation.'
def _check_write_and_read(self, storage: Storage, writes: List[bytes]):
with storage.writer() as output:
for piece in writes:
output.write(piece)
sid = output.commit()
with storage.reader(sid) as input:
written = b''.join(writes)
partial_read = input.read(3)
if written:
self.assertGreater(len(partial_read), 0)
self.assertLessEqual(len(partial_read), 3)
self.assertEqual(written, partial_read + input.read())
return sid
def check_storage_impl(
self,
storage: Storage, *,
no_empty_blobs=False,
skip_empty_writes=False,
# To make testing more meaningful, it's useful to make sure that
# some writes fill up any output buffers. For filesystem writes
# from Python, this default is probably enough.
mul=314159, # just about 300KB
# If the blob-store has a read-through cache, we cannot effectively
# test that the remove actually happened.
remove_is_immediate=True,
) -> List[Tuple[List[str], str]]: # Writes + their storage ID
# Make sure nothing bad happens if an exception flies before a
# commit. Since we don't have an ID, we can't really test that the
# partial write got discarded.
with self.assertRaisesRegex(RuntimeError, '^humbug$'):
with storage.writer() as output:
output.write(b'bah')
raise RuntimeError('humbug')
with self.assertRaisesRegex(AssertionError, '^Cannot commit twice$'):
with storage.writer() as output:
output.write(b'foo')
output.commit(remove_on_exception=True) # Leave no litter
output.commit()
# Check that the `remove_on_exception` kwarg triggers `remove`.
mock_remove = MagicMock()
with patch.object(storage, 'remove', mock_remove):
with self.assertRaisesRegex(RuntimeError, '^remove_on_exception$'):
with storage.writer() as output:
output.write(b'foo')
id_to_remove = output.commit(remove_on_exception=True)
# Contract: committed blobs are available to read
with storage.reader(id_to_remove) as reader:
self.assertEqual(b'foo', reader.read())
raise RuntimeError('remove_on_exception')
# Check that `remove` would have been called, and then call it.
mock_remove.assert_called_once_with(id_to_remove)
storage.remove(id_to_remove) # Exercise the real `remove`
if remove_is_immediate:
# The removed ID should not longer be available.
with self.assertRaises(Exception):
with storage.reader(id_to_remove) as input:
# The reader may be a pipe from another Python process,
# let's consume its output to avoid BrokenPipe logspam.
input.read()
return [
(
writes,
self._check_write_and_read(
storage,
writes if i is None else [*writes[:i], b'', *writes[i:]],
),
) for writes in [
# Some large writes
[b'abcd' * mul, b'efgh' * mul],
[b'abc' * mul, b'defg' * mul],
[b'abc' * mul, b'def' * mul, b'g' * mul],
[b'abcd' * mul],
[b'abc' * mul, b'd' * mul],
# Some tiny writes without a multiplier
[b'a', b'b', b'c', b'd'],
[b'ab'],
[b'a', b'b'],
# While clowny, some blob storage systems refuse empty blobs.
*([] if no_empty_blobs else [
[b''],
[],
]),
]
# Test the given writes, optionally insert a blank at each pos
for i in [
None,
*([] if skip_empty_writes else range(len(writes) + 1)),
]
]
| [((60, 22, 60, 33), 'unittest.mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from unittest.mock import patch, MagicMock\n'), ((61, 13, 61, 57), 'unittest.mock.patch.object', 'patch.object', ({(61, 26, 61, 33): 'storage', (61, 35, 61, 43): '"""remove"""', (61, 45, 61, 56): 'mock_remove'}, {}), "(storage, 'remove', mock_remove)", False, 'from unittest.mock import patch, MagicMock\n')] |
sisisin/pulumi-gcp | sdk/python/pulumi_gcp/kms/get_kms_crypto_key_version.py | af6681d70ea457843409110c1324817fe55f68ad | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetKMSCryptoKeyVersionResult',
'AwaitableGetKMSCryptoKeyVersionResult',
'get_kms_crypto_key_version',
'get_kms_crypto_key_version_output',
]
@pulumi.output_type
class GetKMSCryptoKeyVersionResult:
"""
A collection of values returned by getKMSCryptoKeyVersion.
"""
def __init__(__self__, algorithm=None, crypto_key=None, id=None, name=None, protection_level=None, public_keys=None, state=None, version=None):
if algorithm and not isinstance(algorithm, str):
raise TypeError("Expected argument 'algorithm' to be a str")
pulumi.set(__self__, "algorithm", algorithm)
if crypto_key and not isinstance(crypto_key, str):
raise TypeError("Expected argument 'crypto_key' to be a str")
pulumi.set(__self__, "crypto_key", crypto_key)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if protection_level and not isinstance(protection_level, str):
raise TypeError("Expected argument 'protection_level' to be a str")
pulumi.set(__self__, "protection_level", protection_level)
if public_keys and not isinstance(public_keys, list):
raise TypeError("Expected argument 'public_keys' to be a list")
pulumi.set(__self__, "public_keys", public_keys)
if state and not isinstance(state, str):
raise TypeError("Expected argument 'state' to be a str")
pulumi.set(__self__, "state", state)
if version and not isinstance(version, int):
raise TypeError("Expected argument 'version' to be a int")
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def algorithm(self) -> str:
"""
The CryptoKeyVersionAlgorithm that this CryptoKeyVersion supports.
"""
return pulumi.get(self, "algorithm")
@property
@pulumi.getter(name="cryptoKey")
def crypto_key(self) -> str:
return pulumi.get(self, "crypto_key")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The resource name for this CryptoKeyVersion in the format `projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*`
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="protectionLevel")
def protection_level(self) -> str:
"""
The ProtectionLevel describing how crypto operations are performed with this CryptoKeyVersion. See the [protection_level reference](https://cloud.google.com/kms/docs/reference/rest/v1/ProtectionLevel) for possible outputs.
"""
return pulumi.get(self, "protection_level")
@property
@pulumi.getter(name="publicKeys")
def public_keys(self) -> Sequence['outputs.GetKMSCryptoKeyVersionPublicKeyResult']:
"""
If the enclosing CryptoKey has purpose `ASYMMETRIC_SIGN` or `ASYMMETRIC_DECRYPT`, this block contains details about the public key associated to this CryptoKeyVersion. Structure is documented below.
"""
return pulumi.get(self, "public_keys")
@property
@pulumi.getter
def state(self) -> str:
"""
The current state of the CryptoKeyVersion. See the [state reference](https://cloud.google.com/kms/docs/reference/rest/v1/projects.locations.keyRings.cryptoKeys.cryptoKeyVersions#CryptoKeyVersion.CryptoKeyVersionState) for possible outputs.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def version(self) -> Optional[int]:
return pulumi.get(self, "version")
class AwaitableGetKMSCryptoKeyVersionResult(GetKMSCryptoKeyVersionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetKMSCryptoKeyVersionResult(
algorithm=self.algorithm,
crypto_key=self.crypto_key,
id=self.id,
name=self.name,
protection_level=self.protection_level,
public_keys=self.public_keys,
state=self.state,
version=self.version)
def get_kms_crypto_key_version(crypto_key: Optional[str] = None,
version: Optional[int] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetKMSCryptoKeyVersionResult:
"""
Provides access to a Google Cloud Platform KMS CryptoKeyVersion. For more information see
[the official documentation](https://cloud.google.com/kms/docs/object-hierarchy#key_version)
and
[API](https://cloud.google.com/kms/docs/reference/rest/v1/projects.locations.keyRings.cryptoKeys.cryptoKeyVersions).
A CryptoKeyVersion represents an individual cryptographic key, and the associated key material.
## Example Usage
```python
import pulumi
import pulumi_gcp as gcp
my_key_ring = gcp.kms.get_kms_key_ring(name="my-key-ring",
location="us-central1")
my_crypto_key = gcp.kms.get_kms_crypto_key(name="my-crypto-key",
key_ring=my_key_ring.id)
my_crypto_key_version = gcp.kms.get_kms_crypto_key_version(crypto_key=data["google_kms_key"]["my_key"]["id"])
```
:param str crypto_key: The `self_link` of the Google Cloud Platform CryptoKey to which the key version belongs. This is also the `id` field of the
`kms.CryptoKey` resource/datasource.
:param int version: The version number for this CryptoKeyVersion. Defaults to `1`.
"""
__args__ = dict()
__args__['cryptoKey'] = crypto_key
__args__['version'] = version
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('gcp:kms/getKMSCryptoKeyVersion:getKMSCryptoKeyVersion', __args__, opts=opts, typ=GetKMSCryptoKeyVersionResult).value
return AwaitableGetKMSCryptoKeyVersionResult(
algorithm=__ret__.algorithm,
crypto_key=__ret__.crypto_key,
id=__ret__.id,
name=__ret__.name,
protection_level=__ret__.protection_level,
public_keys=__ret__.public_keys,
state=__ret__.state,
version=__ret__.version)
@_utilities.lift_output_func(get_kms_crypto_key_version)
def get_kms_crypto_key_version_output(crypto_key: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[Optional[int]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetKMSCryptoKeyVersionResult]:
"""
Provides access to a Google Cloud Platform KMS CryptoKeyVersion. For more information see
[the official documentation](https://cloud.google.com/kms/docs/object-hierarchy#key_version)
and
[API](https://cloud.google.com/kms/docs/reference/rest/v1/projects.locations.keyRings.cryptoKeys.cryptoKeyVersions).
A CryptoKeyVersion represents an individual cryptographic key, and the associated key material.
## Example Usage
```python
import pulumi
import pulumi_gcp as gcp
my_key_ring = gcp.kms.get_kms_key_ring(name="my-key-ring",
location="us-central1")
my_crypto_key = gcp.kms.get_kms_crypto_key(name="my-crypto-key",
key_ring=my_key_ring.id)
my_crypto_key_version = gcp.kms.get_kms_crypto_key_version(crypto_key=data["google_kms_key"]["my_key"]["id"])
```
:param str crypto_key: The `self_link` of the Google Cloud Platform CryptoKey to which the key version belongs. This is also the `id` field of the
`kms.CryptoKey` resource/datasource.
:param int version: The version number for this CryptoKeyVersion. Defaults to `1`.
"""
...
| [((59, 5, 59, 36), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((80, 5, 80, 42), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((88, 5, 88, 37), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((27, 8, 27, 52), 'pulumi.set', 'pulumi.set', ({(27, 19, 27, 27): '__self__', (27, 29, 27, 40): '"""algorithm"""', (27, 42, 27, 51): 'algorithm'}, {}), "(__self__, 'algorithm', algorithm)", False, 'import pulumi\n'), ((30, 8, 30, 54), 'pulumi.set', 'pulumi.set', ({(30, 19, 30, 27): '__self__', (30, 29, 30, 41): '"""crypto_key"""', (30, 43, 30, 53): 'crypto_key'}, {}), "(__self__, 'crypto_key', crypto_key)", False, 'import pulumi\n'), ((33, 8, 33, 38), 'pulumi.set', 'pulumi.set', ({(33, 19, 33, 27): '__self__', (33, 29, 33, 33): '"""id"""', (33, 35, 33, 37): 'id'}, {}), "(__self__, 'id', id)", False, 'import pulumi\n'), ((36, 8, 36, 42), 'pulumi.set', 'pulumi.set', ({(36, 19, 36, 27): '__self__', (36, 29, 36, 35): '"""name"""', (36, 37, 36, 41): 'name'}, {}), "(__self__, 'name', name)", False, 'import pulumi\n'), ((39, 8, 39, 66), 'pulumi.set', 'pulumi.set', ({(39, 19, 39, 27): '__self__', (39, 29, 39, 47): '"""protection_level"""', (39, 49, 39, 65): 'protection_level'}, {}), "(__self__, 'protection_level', protection_level)", False, 'import pulumi\n'), ((42, 8, 42, 56), 'pulumi.set', 'pulumi.set', ({(42, 19, 42, 27): '__self__', (42, 29, 42, 42): '"""public_keys"""', (42, 44, 42, 55): 'public_keys'}, {}), "(__self__, 'public_keys', public_keys)", False, 'import pulumi\n'), ((45, 8, 45, 44), 'pulumi.set', 'pulumi.set', ({(45, 19, 45, 27): '__self__', (45, 29, 45, 36): '"""state"""', (45, 38, 45, 43): 'state'}, {}), "(__self__, 'state', state)", False, 'import pulumi\n'), ((48, 8, 48, 48), 'pulumi.set', 'pulumi.set', ({(48, 19, 48, 27): '__self__', (48, 29, 48, 38): '"""version"""', (48, 40, 48, 47): 'version'}, {}), "(__self__, 'version', version)", False, 'import pulumi\n'), ((56, 15, 56, 44), 'pulumi.get', 'pulumi.get', ({(56, 26, 56, 30): 'self', (56, 32, 56, 43): '"""algorithm"""'}, {}), "(self, 'algorithm')", False, 'import pulumi\n'), ((61, 15, 61, 45), 'pulumi.get', 'pulumi.get', ({(61, 26, 61, 30): 'self', (61, 32, 61, 44): '"""crypto_key"""'}, {}), "(self, 'crypto_key')", False, 'import pulumi\n'), ((69, 15, 69, 37), 'pulumi.get', 'pulumi.get', ({(69, 26, 69, 30): 'self', (69, 32, 69, 36): '"""id"""'}, {}), "(self, 'id')", False, 'import pulumi\n'), ((77, 15, 77, 39), 'pulumi.get', 'pulumi.get', ({(77, 26, 77, 30): 'self', (77, 32, 77, 38): '"""name"""'}, {}), "(self, 'name')", False, 'import pulumi\n'), ((85, 15, 85, 51), 'pulumi.get', 'pulumi.get', ({(85, 26, 85, 30): 'self', (85, 32, 85, 50): '"""protection_level"""'}, {}), "(self, 'protection_level')", False, 'import pulumi\n'), ((93, 15, 93, 46), 'pulumi.get', 'pulumi.get', ({(93, 26, 93, 30): 'self', (93, 32, 93, 45): '"""public_keys"""'}, {}), "(self, 'public_keys')", False, 'import pulumi\n'), ((101, 15, 101, 40), 'pulumi.get', 'pulumi.get', ({(101, 26, 101, 30): 'self', (101, 32, 101, 39): '"""state"""'}, {}), "(self, 'state')", False, 'import pulumi\n'), ((106, 15, 106, 42), 'pulumi.get', 'pulumi.get', ({(106, 26, 106, 30): 'self', (106, 32, 106, 41): '"""version"""'}, {}), "(self, 'version')", False, 'import pulumi\n'), ((158, 15, 158, 37), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ({}, {}), '()', False, 'import pulumi\n'), ((161, 14, 161, 147), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (), '', False, 'import pulumi\n')] |
nd-cse-30872-fa20/cse-30872-fa20-examples | lecture11/subsets.py | 7a991a0499e03bf91ac8ba40c99245d5d926e20c | #!/usr/bin/env python3
import itertools
# Constants
NUMBERS = range(0, 10)
# Main Execution
def main():
count = 0
for length in range(0, len(NUMBERS) + 1):
for subset in itertools.combinations(NUMBERS, length):
if sum(subset) % 3 == 0:
count += 1
print(count)
if __name__ == '__main__':
main()
| [((14, 22, 14, 61), 'itertools.combinations', 'itertools.combinations', ({(14, 45, 14, 52): 'NUMBERS', (14, 54, 14, 60): 'length'}, {}), '(NUMBERS, length)', False, 'import itertools\n')] |
Hexotical/toil | src/toil/batchSystems/abstractBatchSystem.py | 312b6e1f221ee7f7f187dd6dbfce1aecffd00e09 | # Copyright (C) 2015-2021 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
import logging
import os
import shutil
from abc import ABC, abstractmethod
from argparse import ArgumentParser, _ArgumentGroup
from contextlib import contextmanager
from typing import (Any,
Callable,
ContextManager,
Dict,
Iterator,
List,
Optional,
Tuple,
Type,
TypeVar,
Union,
NamedTuple)
from toil.common import Toil, cacheDirName, Config
from toil.deferred import DeferredFunctionManager
from toil.fileStores.abstractFileStore import AbstractFileStore
from toil.job import JobDescription
from toil.resource import Resource
logger = logging.getLogger(__name__)
# Value to use as exitStatus in UpdatedBatchJobInfo.exitStatus when status is not available.
EXIT_STATUS_UNAVAILABLE_VALUE = 255
class BatchJobExitReason(enum.Enum):
FINISHED: int = 1 # Successfully finished.
FAILED: int = 2 # Job finished, but failed.
LOST: int = 3 # Preemptable failure (job's executing host went away).
KILLED: int = 4 # Job killed before finishing.
ERROR: int = 5 # Internal error.
MEMLIMIT: int = 6 # Job hit batch system imposed memory limit
class UpdatedBatchJobInfo(NamedTuple):
jobID: int
exitStatus: int
"""
The exit status (integer value) of the job. 0 implies successful.
EXIT_STATUS_UNAVAILABLE_VALUE is used when the exit status is not available (e.g. job is lost).
"""
exitReason: Optional[BatchJobExitReason]
wallTime: Union[float, int, None]
# Information required for worker cleanup on shutdown of the batch system.
class WorkerCleanupInfo(NamedTuple):
workDir: str
"""workdir path (where the cache would go)"""
workflowID: str
"""used to identify files specific to this workflow"""
cleanWorkDir: str
class AbstractBatchSystem(ABC):
"""
An abstract (as far as Python currently allows) base class to represent the interface the batch
system must provide to Toil.
"""
@classmethod
@abstractmethod
def supportsAutoDeployment(cls) -> bool:
"""
Whether this batch system supports auto-deployment of the user script itself. If it does,
the :meth:`.setUserScript` can be invoked to set the resource object representing the user
script.
Note to implementors: If your implementation returns True here, it should also override
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def supportsWorkerCleanup(cls) -> bool:
"""
Indicates whether this batch system invokes
:meth:`BatchSystemSupport.workerCleanup` after the last job for a
particular workflow invocation finishes. Note that the term *worker*
refers to an entire node, not just a worker process. A worker process
may run more than one job sequentially, and more than one concurrent
worker process may exist on a worker node, for the same workflow. The
batch system is said to *shut down* after the last worker process
terminates.
"""
raise NotImplementedError()
def setUserScript(self, userScript: Resource) -> None:
"""
Set the user script for this workflow. This method must be called before the first job is
issued to this batch system, and only if :meth:`.supportsAutoDeployment` returns True,
otherwise it will raise an exception.
:param userScript: the resource object representing the user script
or module and the modules it depends on.
"""
raise NotImplementedError()
@abstractmethod
def issueBatchJob(self, jobDesc: JobDescription, job_environment: Optional[Dict[str, str]] = None) -> int:
"""
Issues a job with the specified command to the batch system and returns a unique jobID.
:param jobDesc a toil.job.JobDescription
:param job_environment: a collection of job-specific environment variables
to be set on the worker.
:return: a unique jobID that can be used to reference the newly issued job
"""
raise NotImplementedError()
@abstractmethod
def killBatchJobs(self, jobIDs: List[int]) -> None:
"""
Kills the given job IDs. After returning, the killed jobs will not
appear in the results of getRunningBatchJobIDs. The killed job will not
be returned from getUpdatedBatchJob.
:param jobIDs: list of IDs of jobs to kill
"""
raise NotImplementedError()
# FIXME: Return value should be a set (then also fix the tests)
@abstractmethod
def getIssuedBatchJobIDs(self) -> List[int]:
"""
Gets all currently issued jobs
:return: A list of jobs (as jobIDs) currently issued (may be running, or may be
waiting to be run). Despite the result being a list, the ordering should not
be depended upon.
"""
raise NotImplementedError()
@abstractmethod
def getRunningBatchJobIDs(self) -> Dict[int, float]:
"""
Gets a map of jobs as jobIDs that are currently running (not just waiting)
and how long they have been running, in seconds.
:return: dictionary with currently running jobID keys and how many seconds they have
been running as the value
"""
raise NotImplementedError()
@abstractmethod
def getUpdatedBatchJob(self, maxWait: int) -> Optional[UpdatedBatchJobInfo]:
"""
Returns information about job that has updated its status (i.e. ceased
running, either successfully or with an error). Each such job will be
returned exactly once.
Does not return info for jobs killed by killBatchJobs, although they
may cause None to be returned earlier than maxWait.
:param maxWait: the number of seconds to block, waiting for a result
:return: If a result is available, returns UpdatedBatchJobInfo.
Otherwise it returns None. wallTime is the number of seconds (a strictly
positive float) in wall-clock time the job ran for, or None if this
batch system does not support tracking wall time.
"""
raise NotImplementedError()
def getSchedulingStatusMessage(self) -> Optional[str]:
"""
Get a log message fragment for the user about anything that might be
going wrong in the batch system, if available.
If no useful message is available, return None.
This can be used to report what resource is the limiting factor when
scheduling jobs, for example. If the leader thinks the workflow is
stuck, the message can be displayed to the user to help them diagnose
why it might be stuck.
:return: User-directed message about scheduling state.
"""
# Default implementation returns None.
# Override to provide scheduling status information.
return None
@abstractmethod
def shutdown(self) -> None:
"""
Called at the completion of a toil invocation.
Should cleanly terminate all worker threads.
"""
raise NotImplementedError()
def setEnv(self, name: str, value: Optional[str] = None) -> None:
"""
Set an environment variable for the worker process before it is launched. The worker
process will typically inherit the environment of the machine it is running on but this
method makes it possible to override specific variables in that inherited environment
before the worker is launched. Note that this mechanism is different to the one used by
the worker internally to set up the environment of a job. A call to this method affects
all jobs issued after this method returns. Note to implementors: This means that you
would typically need to copy the variables before enqueuing a job.
If no value is provided it will be looked up from the current environment.
"""
raise NotImplementedError()
@classmethod
def add_options(cls, parser: Union[ArgumentParser, _ArgumentGroup]) -> None:
"""
If this batch system provides any command line options, add them to the given parser.
"""
pass
OptionType = TypeVar('OptionType')
@classmethod
def setOptions(cls, setOption: Callable[[str, Optional[Callable[[Any], OptionType]], Optional[Callable[[OptionType], None]], Optional[OptionType], Optional[List[str]]], None]) -> None:
"""
Process command line or configuration options relevant to this batch system.
:param setOption: A function with signature
setOption(option_name, parsing_function=None, check_function=None, default=None, env=None)
returning nothing, used to update run configuration as a side effect.
"""
# TODO: change type to a Protocol to express kwarg names, or else use a
# different interface (generator?)
pass
def getWorkerContexts(self) -> List[ContextManager[Any]]:
"""
Get a list of picklable context manager objects to wrap worker work in,
in order.
Can be used to ask the Toil worker to do things in-process (such as
configuring environment variables, hot-deploying user scripts, or
cleaning up a node) that would otherwise require a wrapping "executor"
process.
"""
return []
class BatchSystemSupport(AbstractBatchSystem):
"""
Partial implementation of AbstractBatchSystem, support methods.
"""
def __init__(self, config: Config, maxCores: float, maxMemory: int, maxDisk: int) -> None:
"""
Initializes initial state of the object
:param toil.common.Config config: object is setup by the toilSetup script and
has configuration parameters for the jobtree. You can add code
to that script to get parameters for your batch system.
:param float maxCores: the maximum number of cores the batch system can
request for any one job
:param int maxMemory: the maximum amount of memory the batch system can
request for any one job, in bytes
:param int maxDisk: the maximum amount of disk space the batch system can
request for any one job, in bytes
"""
super().__init__()
self.config = config
self.maxCores = maxCores
self.maxMemory = maxMemory
self.maxDisk = maxDisk
self.environment: Dict[str, str] = {}
self.workerCleanupInfo = WorkerCleanupInfo(workDir=self.config.workDir,
workflowID=self.config.workflowID,
cleanWorkDir=self.config.cleanWorkDir)
def checkResourceRequest(self, memory: int, cores: float, disk: int, job_name: str = '', detail: str = '') -> None:
"""
Check resource request is not greater than that available or allowed.
:param int memory: amount of memory being requested, in bytes
:param float cores: number of cores being requested
:param int disk: amount of disk space being requested, in bytes
:param str job_name: Name of the job being checked, for generating a useful error report.
:param str detail: Batch-system-specific message to include in the error.
:raise InsufficientSystemResources: raised when a resource is requested in an amount
greater than allowed
"""
batch_system = self.__class__.__name__ or 'this batch system'
for resource, requested, available in [('cores', cores, self.maxCores),
('memory', memory, self.maxMemory),
('disk', disk, self.maxDisk)]:
assert requested is not None
if requested > available:
unit = 'bytes of ' if resource in ('disk', 'memory') else ''
R = f'The job {job_name} is r' if job_name else 'R'
if resource == 'disk':
msg = (f'{R}equesting {requested} {unit}{resource} for temporary space, '
f'more than the maximum of {available} {unit}{resource} of free space on '
f'{self.config.workDir} that {batch_system} was configured with, or enforced '
f'by --max{resource.capitalize()}. Try setting/changing the toil option '
f'"--workDir" or changing the base temporary directory by setting TMPDIR.')
else:
msg = (f'{R}equesting {requested} {unit}{resource}, more than the maximum of '
f'{available} {unit}{resource} that {batch_system} was configured with, '
f'or enforced by --max{resource.capitalize()}.')
if detail:
msg += detail
raise InsufficientSystemResources(msg)
def setEnv(self, name: str, value: Optional[str] = None) -> None:
"""
Set an environment variable for the worker process before it is launched. The worker
process will typically inherit the environment of the machine it is running on but this
method makes it possible to override specific variables in that inherited environment
before the worker is launched. Note that this mechanism is different to the one used by
the worker internally to set up the environment of a job. A call to this method affects
all jobs issued after this method returns. Note to implementors: This means that you
would typically need to copy the variables before enqueuing a job.
If no value is provided it will be looked up from the current environment.
:param str name: the environment variable to be set on the worker.
:param str value: if given, the environment variable given by name will be set to this value.
if None, the variable's current value will be used as the value on the worker
:raise RuntimeError: if value is None and the name cannot be found in the environment
"""
if value is None:
try:
value = os.environ[name]
except KeyError:
raise RuntimeError(f"{name} does not exist in current environment")
self.environment[name] = value
def formatStdOutErrPath(self, toil_job_id: int, cluster_job_id: str, std: str) -> str:
"""
Format path for batch system standard output/error and other files
generated by the batch system itself.
Files will be written to the Toil work directory (which may
be on a shared file system) with names containing both the Toil and
batch system job IDs, for ease of debugging job failures.
:param: int toil_job_id : The unique id that Toil gives a job.
:param: cluster_job_id : What the cluster, for example, GridEngine, uses as its internal job id.
:param: string std : The provenance of the stream (for example: 'err' for 'stderr' or 'out' for 'stdout')
:rtype: string : Formatted filename; however if self.config.noStdOutErr is true,
returns '/dev/null' or equivalent.
"""
if self.config.noStdOutErr:
return os.devnull
fileName: str = f'toil_{self.config.workflowID}.{toil_job_id}.{cluster_job_id}.{std}.log'
workDir: str = Toil.getToilWorkDir(self.config.workDir)
return os.path.join(workDir, fileName)
@staticmethod
def workerCleanup(info: WorkerCleanupInfo) -> None:
"""
Cleans up the worker node on batch system shutdown. Also see :meth:`supportsWorkerCleanup`.
:param WorkerCleanupInfo info: A named tuple consisting of all the relevant information
for cleaning up the worker.
"""
assert isinstance(info, WorkerCleanupInfo)
workflowDir = Toil.getLocalWorkflowDir(info.workflowID, info.workDir)
DeferredFunctionManager.cleanupWorker(workflowDir)
workflowDirContents = os.listdir(workflowDir)
AbstractFileStore.shutdownFileStore(workflowDir, info.workflowID)
if (info.cleanWorkDir == 'always'
or info.cleanWorkDir in ('onSuccess', 'onError')
and workflowDirContents in ([], [cacheDirName(info.workflowID)])):
shutil.rmtree(workflowDir, ignore_errors=True)
class NodeInfo:
"""
The coresUsed attribute is a floating point value between 0 (all cores idle) and 1 (all cores
busy), reflecting the CPU load of the node.
The memoryUsed attribute is a floating point value between 0 (no memory used) and 1 (all memory
used), reflecting the memory pressure on the node.
The coresTotal and memoryTotal attributes are the node's resources, not just the used resources
The requestedCores and requestedMemory attributes are all the resources that Toil Jobs have reserved on the
node, regardless of whether the resources are actually being used by the Jobs.
The workers attribute is an integer reflecting the number of workers currently active workers
on the node.
"""
def __init__(self, coresUsed: float, memoryUsed: float,
coresTotal: float, memoryTotal: int,
requestedCores: float, requestedMemory: int,
workers: int) -> None:
self.coresUsed = coresUsed
self.memoryUsed = memoryUsed
self.coresTotal = coresTotal
self.memoryTotal = memoryTotal
self.requestedCores = requestedCores
self.requestedMemory = requestedMemory
self.workers = workers
class AbstractScalableBatchSystem(AbstractBatchSystem):
"""
A batch system that supports a variable number of worker nodes. Used by :class:`toil.
provisioners.clusterScaler.ClusterScaler` to scale the number of worker nodes in the cluster
up or down depending on overall load.
"""
@abstractmethod
def getNodes(self, preemptable: Optional[bool] = None) -> Dict[str, NodeInfo]:
"""
Returns a dictionary mapping node identifiers of preemptable or non-preemptable nodes to
NodeInfo objects, one for each node.
:param preemptable: If True (False) only (non-)preemptable nodes will be returned.
If None, all nodes will be returned.
"""
raise NotImplementedError()
@abstractmethod
def nodeInUse(self, nodeIP: str) -> bool:
"""
Can be used to determine if a worker node is running any tasks. If the node is doesn't
exist, this function should simply return False.
:param nodeIP: The worker nodes private IP address
:return: True if the worker node has been issued any tasks, else False
"""
raise NotImplementedError()
# TODO: May be unused!
@abstractmethod
@contextmanager
def nodeFiltering(self, filter: Optional[Callable[[NodeInfo], bool]]) -> Iterator[None]:
"""
Used to prevent races in autoscaling where
1) nodes have reported to the autoscaler as having no jobs
2) scaler decides to terminate these nodes. In parallel the batch system assigns jobs to the same nodes
3) scaler terminates nodes, resulting in job failures for all jobs on that node.
Call this method prior to node termination to ensure that nodes being considered for termination are not
assigned new jobs. Call the method again passing None as the filter to disable the filtering
after node termination is done.
:param method: This will be used as a filter on nodes considered when assigning new jobs.
After this context manager exits the filter should be removed
"""
raise NotImplementedError()
@abstractmethod
def ignoreNode(self, nodeAddress: str) -> None:
"""
Stop sending jobs to this node. Used in autoscaling
when the autoscaler is ready to terminate a node, but
jobs are still running. This allows the node to be terminated
after the current jobs have finished.
:param nodeAddress: IP address of node to ignore.
"""
raise NotImplementedError()
@abstractmethod
def unignoreNode(self, nodeAddress: str) -> None:
"""
Stop ignoring this address, presumably after
a node with this address has been terminated. This allows for the
possibility of a new node having the same address as a terminated one.
"""
raise NotImplementedError()
class InsufficientSystemResources(Exception):
pass
| [((40, 9, 40, 36), 'logging.getLogger', 'logging.getLogger', ({(40, 27, 40, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((234, 17, 234, 38), 'typing.TypeVar', 'TypeVar', ({(234, 25, 234, 37): '"""OptionType"""'}, {}), "('OptionType')", False, 'from typing import Any, Callable, ContextManager, Dict, Iterator, List, Optional, Tuple, Type, TypeVar, Union, NamedTuple\n'), ((379, 23, 379, 63), 'toil.common.Toil.getToilWorkDir', 'Toil.getToilWorkDir', ({(379, 43, 379, 62): 'self.config.workDir'}, {}), '(self.config.workDir)', False, 'from toil.common import Toil, cacheDirName, Config\n'), ((380, 15, 380, 46), 'os.path.join', 'os.path.join', ({(380, 28, 380, 35): 'workDir', (380, 37, 380, 45): 'fileName'}, {}), '(workDir, fileName)', False, 'import os\n'), ((391, 22, 391, 77), 'toil.common.Toil.getLocalWorkflowDir', 'Toil.getLocalWorkflowDir', ({(391, 47, 391, 62): 'info.workflowID', (391, 64, 391, 76): 'info.workDir'}, {}), '(info.workflowID, info.workDir)', False, 'from toil.common import Toil, cacheDirName, Config\n'), ((392, 8, 392, 58), 'toil.deferred.DeferredFunctionManager.cleanupWorker', 'DeferredFunctionManager.cleanupWorker', ({(392, 46, 392, 57): 'workflowDir'}, {}), '(workflowDir)', False, 'from toil.deferred import DeferredFunctionManager\n'), ((393, 30, 393, 53), 'os.listdir', 'os.listdir', ({(393, 41, 393, 52): 'workflowDir'}, {}), '(workflowDir)', False, 'import os\n'), ((394, 8, 394, 73), 'toil.fileStores.abstractFileStore.AbstractFileStore.shutdownFileStore', 'AbstractFileStore.shutdownFileStore', ({(394, 44, 394, 55): 'workflowDir', (394, 57, 394, 72): 'info.workflowID'}, {}), '(workflowDir, info.workflowID)', False, 'from toil.fileStores.abstractFileStore import AbstractFileStore\n'), ((398, 12, 398, 58), 'shutil.rmtree', 'shutil.rmtree', (), '', False, 'import shutil\n'), ((397, 45, 397, 74), 'toil.common.cacheDirName', 'cacheDirName', ({(397, 58, 397, 73): 'info.workflowID'}, {}), '(info.workflowID)', False, 'from toil.common import Toil, cacheDirName, Config\n')] |
Heartfilia/lite_tools | demo/other_demo.py | b3432ba7cb60502ac64d45e23022e20555fb1588 | # -*- coding: utf-8 -*-
from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d
# about hashlib ==> get_md5, get_sha, get_sha3 || default mode=256
s = "test_information" # 这里只能丢字符串
print(get_md5(s)) # 5414ffd88fcb58417e64ecec51bb3a6b
print(get_md5(s, upper=True)) # 5414FFD88FCB58417E64ECEC51BB3A6B
print(get_md5(s, to_bin=True)) # b'T\x14\xff\xd8\x8f\xcbXA~d\xec\xecQ\xbb:k' # 转成二进制的需求没什么用但是可以保留
print(get_sha(s)) # d09869fdf901465c8566f0e2debfa3f6a3d878a8157e199c7c4c6dd755617f33
print(get_sha(s, to_bin=True)) # b'\xd0\x98i\xfd\xf9\x01F\\\x85f\xf0\xe2\xde\xbf\xa3\xf6\xa3\xd8x\xa8\x15~\x19\x9c|Lm\xd7Ua\x7f3'
print(get_sha(s, mode=1)) # ada5dfdf0c9a76a84958310b838a70b6fd6d01f6 # default mode=256 // mode: 1 224 256 384 512
print(get_sha3(s)) # 9c539ca35c6719f546e67837ff37fe7791e53fe40715cd4da0167c78c9adc2e8
print(get_sha3(s, to_bin=True)) # b'\x9cS\x9c\xa3\\g\x19\xf5F\xe6x7\xff7\xfew\x91\xe5?\xe4\x07\x15\xcdM\xa0\x16|x\xc9\xad\xc2\xe8'
print(get_sha3(s, mode=1)) # return "" // SUPPORT: sha3_224 sha3_256 sha3_384 sha3_512// only need inputting: 224 256 384 512 # default mode=256 // mode: 224 256 384 512
print(get_sha3(s, mode=384)) # 95c09e20a139843eae877a64cd95d6a629b3c9ff383b5460557aab2612682d4228d05fe41606a79acf5ae1c4de35160c
# about base64 ==> get_b64e, get_b64d
res_b64_encode = get_b64e(s)
print(res_b64_encode) # dGVzdF9pbmZvcm1hdGlvbg==
res_b64_bin = get_b64e(s, to_bin=True)
print(res_b64_bin) # b'dGVzdF9pbmZvcm1hdGlvbg=='
res_b32_encode = get_b64e(s, mode=32) # default mode=64 // mode: 16 32 64 85
print(res_b32_encode) # ORSXG5C7NFXGM33SNVQXI2LPNY======
res_b64_decode = get_b64d(res_b64_encode)
print(res_b64_decode) # test_information
res_b32_decode = get_b64d(res_b32_encode, mode=32) # default mode=64 // mode: 16 32 64 85
print(res_b32_decode) # test_information
| [((20, 17, 20, 28), 'lite_tools.get_b64e', 'get_b64e', ({(20, 26, 20, 27): 's'}, {}), '(s)', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((23, 14, 23, 38), 'lite_tools.get_b64e', 'get_b64e', (), '', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((26, 17, 26, 37), 'lite_tools.get_b64e', 'get_b64e', (), '', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((29, 17, 29, 41), 'lite_tools.get_b64d', 'get_b64d', ({(29, 26, 29, 40): 'res_b64_encode'}, {}), '(res_b64_encode)', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((32, 17, 32, 50), 'lite_tools.get_b64d', 'get_b64d', (), '', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((7, 6, 7, 16), 'lite_tools.get_md5', 'get_md5', ({(7, 14, 7, 15): 's'}, {}), '(s)', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((8, 6, 8, 28), 'lite_tools.get_md5', 'get_md5', (), '', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((9, 6, 9, 29), 'lite_tools.get_md5', 'get_md5', (), '', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((10, 6, 10, 16), 'lite_tools.get_sha', 'get_sha', ({(10, 14, 10, 15): 's'}, {}), '(s)', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((11, 6, 11, 29), 'lite_tools.get_sha', 'get_sha', (), '', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((12, 6, 12, 24), 'lite_tools.get_sha', 'get_sha', (), '', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((13, 6, 13, 17), 'lite_tools.get_sha3', 'get_sha3', ({(13, 15, 13, 16): 's'}, {}), '(s)', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((14, 6, 14, 30), 'lite_tools.get_sha3', 'get_sha3', (), '', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((15, 6, 15, 25), 'lite_tools.get_sha3', 'get_sha3', (), '', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n'), ((16, 6, 16, 27), 'lite_tools.get_sha3', 'get_sha3', (), '', False, 'from lite_tools import get_md5, get_sha, get_sha3, get_b64e, get_b64d\n')] |
terrywqf/PreREISE | prereise/gather/solardata/tests/__init__.py | f8052dd37091eaa15024725d5c92a3ef0ee311ee | __all__ = ["mock_pv_info", "test_pv_tracking"]
| [] |
farr/arfit | arfit/cp_utils.py | 7ff6def331ef98f43f623da2d9867d1ac967448b | import carmcmc as cm
from gatspy.periodic import LombScargleFast
import matplotlib.pyplot as plt
import numpy as np
def csample_from_files(datafile, chainfile, p, q):
data = np.loadtxt(datafile)
times, tind = np.unique(data[:,0], return_index=True)
data = data[tind, :]
chain = np.loadtxt(chainfile)
assert chain.shape[1] == p + q + 5, 'dimension mismatch'
return cm.CarmaSample(data[:,0], data[:,1], data[:,2], None, q=q, trace=chain[:,:-2], loglike=chain[:,-2], logpost=chain[:,-1])
def normalised_lombscargle(ts, ys, dys, oversampling=5, nyquist_factor=3):
model = LombScargleFast().fit(ts, ys, dys)
pers, pows = model.periodogram_auto(oversampling=oversampling, nyquist_factor=nyquist_factor)
fs = 1.0/pers
T = np.max(ts) - np.min(ts)
mu = 1/T*np.trapz(ys, ts)
s2 = 1/T*np.trapz(np.square(ys-mu), ts)
return fs, s2*pows/np.trapz(pows, fs)
def plot_psd_sample_data(sample, oversampling=5, nyquist_factor=3):
psd_low, psd_high, psd_med, fs = sample.plot_power_spectrum(doShow=False)
plt.clf()
plt.loglog(fs, psd_med, '-b', alpha=0.33)
plt.fill_between(fs, psd_low, psd_high, color='b', alpha=0.17)
fs, psd = normalised_lombscargle(sample.time, sample.y, sample.ysig, oversampling=oversampling, nyquist_factor=nyquist_factor)
bw = fs[-1] - fs[0]
T = sample.time[-1] - sample.time[0]
s2 = 1/T*np.trapz(np.square(sample.ysig), sample.time)
noise_level = s2/bw
levels = noise_level*np.sqrt(sample.get_samples('measerr_scale'))
plt.axhline(np.median(levels), color='g', alpha=0.33)
plt.fill_between(fs, np.percentile(levels, 84)+0*fs, np.percentile(levels, 16)+0*fs, color='g', alpha=0.17)
plt.loglog(fs, psd, '-r', alpha=0.33)
def plot_psd_sample_draw(sample, loc='upper left', oversampling=5, nyquist_factor=3):
fs, psd = normalised_lombscargle(sample.time, sample.y, sample.ysig, oversampling=oversampling, nyquist_factor=nyquist_factor)
ys_draw = sample.predict(sample.time, bestfit='random')[0]
fs, dpsd = normalised_lombscargle(sample.time, ys_draw, sample.ysig, oversampling=oversampling, nyquist_factor=nyquist_factor)
plt.loglog(fs, psd, '-k', label='Data', alpha=0.5)
plt.loglog(fs, dpsd, '-b', label='Prediction', alpha=0.5)
plt.legend(loc=loc)
| [((7, 11, 7, 31), 'numpy.loadtxt', 'np.loadtxt', ({(7, 22, 7, 30): 'datafile'}, {}), '(datafile)', True, 'import numpy as np\n'), ((9, 18, 9, 57), 'numpy.unique', 'np.unique', (), '', True, 'import numpy as np\n'), ((12, 12, 12, 33), 'numpy.loadtxt', 'np.loadtxt', ({(12, 23, 12, 32): 'chainfile'}, {}), '(chainfile)', True, 'import numpy as np\n'), ((15, 11, 15, 131), 'carmcmc.CarmaSample', 'cm.CarmaSample', (), '', True, 'import carmcmc as cm\n'), ((34, 4, 34, 13), 'matplotlib.pyplot.clf', 'plt.clf', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((36, 4, 36, 45), 'matplotlib.pyplot.loglog', 'plt.loglog', (), '', True, 'import matplotlib.pyplot as plt\n'), ((37, 4, 37, 66), 'matplotlib.pyplot.fill_between', 'plt.fill_between', (), '', True, 'import matplotlib.pyplot as plt\n'), ((50, 4, 50, 41), 'matplotlib.pyplot.loglog', 'plt.loglog', (), '', True, 'import matplotlib.pyplot as plt\n'), ((59, 4, 59, 54), 'matplotlib.pyplot.loglog', 'plt.loglog', (), '', True, 'import matplotlib.pyplot as plt\n'), ((60, 4, 60, 61), 'matplotlib.pyplot.loglog', 'plt.loglog', (), '', True, 'import matplotlib.pyplot as plt\n'), ((61, 4, 61, 23), 'matplotlib.pyplot.legend', 'plt.legend', (), '', True, 'import matplotlib.pyplot as plt\n'), ((23, 8, 23, 18), 'numpy.max', 'np.max', ({(23, 15, 23, 17): 'ts'}, {}), '(ts)', True, 'import numpy as np\n'), ((23, 21, 23, 31), 'numpy.min', 'np.min', ({(23, 28, 23, 30): 'ts'}, {}), '(ts)', True, 'import numpy as np\n'), ((25, 13, 25, 29), 'numpy.trapz', 'np.trapz', ({(25, 22, 25, 24): 'ys', (25, 26, 25, 28): 'ts'}, {}), '(ys, ts)', True, 'import numpy as np\n'), ((47, 16, 47, 33), 'numpy.median', 'np.median', ({(47, 26, 47, 32): 'levels'}, {}), '(levels)', True, 'import numpy as np\n'), ((18, 12, 18, 29), 'gatspy.periodic.LombScargleFast', 'LombScargleFast', ({}, {}), '()', False, 'from gatspy.periodic import LombScargleFast\n'), ((26, 22, 26, 38), 'numpy.square', 'np.square', ({(26, 32, 26, 37): '(ys - mu)'}, {}), '(ys - mu)', True, 'import numpy as np\n'), ((28, 23, 28, 41), 'numpy.trapz', 'np.trapz', ({(28, 32, 28, 36): 'pows', (28, 38, 28, 40): 'fs'}, {}), '(pows, fs)', True, 'import numpy as np\n'), ((44, 22, 44, 44), 'numpy.square', 'np.square', ({(44, 32, 44, 43): 'sample.ysig'}, {}), '(sample.ysig)', True, 'import numpy as np\n'), ((48, 25, 48, 50), 'numpy.percentile', 'np.percentile', ({(48, 39, 48, 45): 'levels', (48, 47, 48, 49): '(84)'}, {}), '(levels, 84)', True, 'import numpy as np\n'), ((48, 57, 48, 82), 'numpy.percentile', 'np.percentile', ({(48, 71, 48, 77): 'levels', (48, 79, 48, 81): '(16)'}, {}), '(levels, 16)', True, 'import numpy as np\n')] |
xolox/python-pdiffcopy | pdiffcopy/hashing.py | ed765af92c0c0823818d545e61384753912a5725 | # Fast large file synchronization inspired by rsync.
#
# Author: Peter Odding <[email protected]>
# Last Change: March 6, 2020
# URL: https://pdiffcopy.readthedocs.io
"""Parallel hashing of files using :mod:`multiprocessing` and :mod:`pdiffcopy.mp`."""
# Standard library modules.
import functools
import hashlib
import os
# External dependencies.
from six.moves import range
# Modules included in our package.
from pdiffcopy.mp import WorkerPool
# Public identifiers that require documentation.
__all__ = ("compute_hashes", "hash_worker")
def compute_hashes(filename, block_size, method, concurrency):
"""Compute checksums of a file in blocks (parallel)."""
with WorkerPool(
concurrency=concurrency,
generator_fn=functools.partial(range, 0, os.path.getsize(filename), block_size),
worker_fn=functools.partial(hash_worker, block_size=block_size, filename=filename, method=method),
) as pool:
for offset, digest in pool:
yield offset, digest
def hash_worker(offset, block_size, filename, method):
"""Worker function to be run in child processes."""
with open(filename, "rb") as handle:
handle.seek(offset)
context = hashlib.new(method)
context.update(handle.read(block_size))
return offset, context.hexdigest()
| [((39, 18, 39, 37), 'hashlib.new', 'hashlib.new', ({(39, 30, 39, 36): 'method'}, {}), '(method)', False, 'import hashlib\n'), ((29, 18, 29, 105), 'functools.partial', 'functools.partial', (), '', False, 'import functools\n'), ((28, 49, 28, 74), 'os.path.getsize', 'os.path.getsize', ({(28, 65, 28, 73): 'filename'}, {}), '(filename)', False, 'import os\n')] |
robert-anderson/pyscf | pyscf/nao/test/test_0003_na2_nao.py | cdc56e168cb15f47e8cdc791a92d689fa9b655af | # Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
from pyscf.nao.m_siesta_utils import get_siesta_command, get_pseudo
class KnowValues(unittest.TestCase):
def test_siesta2sv_df(self):
import subprocess
import os
siesta_fdf = """
xml.write .true.
PAO.EnergyShift 100 meV
%block ChemicalSpeciesLabel
1 11 Na
%endblock ChemicalSpeciesLabel
NumberOfAtoms 2
NumberOfSpecies 1
%block AtomicCoordinatesAndAtomicSpecies
0.77573521 0.00000000 0.00000000 1
-0.77573521 0.00000000 0.00000000 1
%endblock AtomicCoordinatesAndAtomicSpecies
MD.NumCGsteps 0
COOP.Write .true.
WriteDenchar .true.
"""
label = 'siesta'
fi = open(label+'.fdf', 'w')
print(siesta_fdf, file=fi)
fi.close()
for sp in ['Na']:
try:
os.remove(sp+'.psf')
except :
pass
try:
pppath = get_pseudo(sp)
except:
print('get_pseudo( '+sp+' ) is not working--> skip siesta run' )
return
os.symlink(pppath, sp+'.psf')
errorcode = subprocess.call(get_siesta_command(label), shell=True)
if errorcode: raise RuntimeError('siesta returned an error: {0}'.format(errorcode))
# run test system_vars
from pyscf.nao import mf
sv = mf(label=label)
self.assertEqual(sv.norbs, 10)
self.assertTrue( sv.diag_check() )
self.assertTrue( sv.overlap_check())
if __name__ == "__main__": unittest.main()
| [((68, 27, 68, 42), 'unittest.main', 'unittest.main', ({}, {}), '()', False, 'import unittest\n'), ((63, 10, 63, 25), 'pyscf.nao.mf', 'mf', (), '', False, 'from pyscf.nao import mf\n'), ((57, 6, 57, 35), 'os.symlink', 'os.symlink', ({(57, 17, 57, 23): 'pppath', (57, 25, 57, 34): "(sp + '.psf')"}, {}), "(pppath, sp + '.psf')", False, 'import os\n'), ((58, 32, 58, 57), 'pyscf.nao.m_siesta_utils.get_siesta_command', 'get_siesta_command', ({(58, 51, 58, 56): 'label'}, {}), '(label)', False, 'from pyscf.nao.m_siesta_utils import get_siesta_command, get_pseudo\n'), ((49, 8, 49, 28), 'os.remove', 'os.remove', ({(49, 18, 49, 27): "(sp + '.psf')"}, {}), "(sp + '.psf')", False, 'import os\n'), ((53, 17, 53, 31), 'pyscf.nao.m_siesta_utils.get_pseudo', 'get_pseudo', ({(53, 28, 53, 30): 'sp'}, {}), '(sp)', False, 'from pyscf.nao.m_siesta_utils import get_siesta_command, get_pseudo\n')] |
mozkzki/moz-library | tests/moz_library/rental_books_test.py | fb925414405a9fcba8bb7194cf983ba18c920e2f | import pytest
from moz_library.rental_books import RentalBooks
class TestRentalBooks:
@pytest.fixture()
def books1(self):
return RentalBooks()
def test_can_extend_period_1(self, books1):
assert books1._can_extend_period("延長できません") is False
def test_can_extend_period_2(self, books1):
assert books1._can_extend_period("すでに延長されています") is False
def test_can_extend_period_3(self, books1):
assert books1._can_extend_period("それ以外") is True
| [((6, 5, 6, 21), 'pytest.fixture', 'pytest.fixture', ({}, {}), '()', False, 'import pytest\n'), ((8, 15, 8, 28), 'moz_library.rental_books.RentalBooks', 'RentalBooks', ({}, {}), '()', False, 'from moz_library.rental_books import RentalBooks\n')] |
aspose-slides/Aspose.Slides-for-Python-via-.NET | examples/src/Charts/MultiCategoryChart.py | c55ad5c71f942598f1e67e22a52cbcd1cb286467 | import aspose.pydrawing as drawing
import aspose.slides as slides
def charts_multi_category_chart():
#ExStart:MultiCategoryChart
# The path to the documents directory.
outDir = "./examples/out/"
with slides.Presentation() as pres:
slide = pres.slides[0]
ch = pres.slides[0].shapes.add_chart(slides.charts.ChartType.CLUSTERED_COLUMN, 100, 100, 600, 450)
ch.chart_data.series.clear()
ch.chart_data.categories.clear()
fact = ch.chart_data.chart_data_workbook
fact.clear(0)
defaultWorksheetIndex = 0
category = ch.chart_data.categories.add(fact.get_cell(0, "c2", "A"))
category.grouping_levels.set_grouping_item(1, "Group1")
category = ch.chart_data.categories.add(fact.get_cell(0, "c3", "B"))
category = ch.chart_data.categories.add(fact.get_cell(0, "c4", "C"))
category.grouping_levels.set_grouping_item(1, "Group2")
category = ch.chart_data.categories.add(fact.get_cell(0, "c5", "D"))
category = ch.chart_data.categories.add(fact.get_cell(0, "c6", "E"))
category.grouping_levels.set_grouping_item(1, "Group3")
category = ch.chart_data.categories.add(fact.get_cell(0, "c7", "F"))
category = ch.chart_data.categories.add(fact.get_cell(0, "c8", "G"))
category.grouping_levels.set_grouping_item(1, "Group4")
category = ch.chart_data.categories.add(fact.get_cell(0, "c9", "H"))
# Adding Series
series = ch.chart_data.series.add(fact.get_cell(0, "D1", "Series 1"),
slides.charts.ChartType.CLUSTERED_COLUMN)
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D2", 10))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D3", 20))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D4", 30))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D5", 40))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D6", 50))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D7", 60))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D8", 70))
series.data_points.add_data_point_for_bar_series(fact.get_cell(defaultWorksheetIndex, "D9", 80))
# Save presentation with chart
pres.save(outDir + "charts_multi_category_chart_out.pptx", slides.export.SaveFormat.PPTX)
#ExEnd:MultiCategoryChart | [] |
cybarox/netbox | netbox/extras/forms/filtersets.py | ea197eff5f4fe925bb354d1375912decd81752bd | from django import forms
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import gettext as _
from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup
from extras.choices import *
from extras.models import *
from extras.utils import FeatureQuery
from netbox.forms.base import NetBoxModelFilterSetForm
from tenancy.models import Tenant, TenantGroup
from utilities.forms import (
add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField,
ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField,
StaticSelect, TagFilterField,
)
from virtualization.models import Cluster, ClusterGroup, ClusterType
__all__ = (
'ConfigContextFilterForm',
'CustomFieldFilterForm',
'CustomLinkFilterForm',
'ExportTemplateFilterForm',
'JournalEntryFilterForm',
'LocalConfigContextFilterForm',
'ObjectChangeFilterForm',
'TagFilterForm',
'WebhookFilterForm',
)
class CustomFieldFilterForm(FilterForm):
fieldsets = (
(None, ('q',)),
('Attributes', ('type', 'content_types', 'weight', 'required')),
)
content_types = ContentTypeMultipleChoiceField(
queryset=ContentType.objects.all(),
limit_choices_to=FeatureQuery('custom_fields'),
required=False
)
type = MultipleChoiceField(
choices=CustomFieldTypeChoices,
required=False,
label=_('Field type')
)
weight = forms.IntegerField(
required=False
)
required = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
class CustomLinkFilterForm(FilterForm):
fieldsets = (
(None, ('q',)),
('Attributes', ('content_type', 'enabled', 'new_window', 'weight')),
)
content_type = ContentTypeChoiceField(
queryset=ContentType.objects.all(),
limit_choices_to=FeatureQuery('custom_links'),
required=False
)
enabled = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
new_window = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
weight = forms.IntegerField(
required=False
)
class ExportTemplateFilterForm(FilterForm):
fieldsets = (
(None, ('q',)),
('Attributes', ('content_type', 'mime_type', 'file_extension', 'as_attachment')),
)
content_type = ContentTypeChoiceField(
queryset=ContentType.objects.all(),
limit_choices_to=FeatureQuery('export_templates'),
required=False
)
mime_type = forms.CharField(
required=False,
label=_('MIME type')
)
file_extension = forms.CharField(
required=False
)
as_attachment = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
class WebhookFilterForm(FilterForm):
fieldsets = (
(None, ('q',)),
('Attributes', ('content_types', 'http_method', 'enabled')),
('Events', ('type_create', 'type_update', 'type_delete')),
)
content_types = ContentTypeMultipleChoiceField(
queryset=ContentType.objects.all(),
limit_choices_to=FeatureQuery('webhooks'),
required=False
)
http_method = MultipleChoiceField(
choices=WebhookHttpMethodChoices,
required=False,
label=_('HTTP method')
)
enabled = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
type_create = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
type_update = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
type_delete = forms.NullBooleanField(
required=False,
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
class TagFilterForm(FilterForm):
model = Tag
content_type_id = ContentTypeMultipleChoiceField(
queryset=ContentType.objects.filter(FeatureQuery('tags').get_query()),
required=False,
label=_('Tagged object type')
)
class ConfigContextFilterForm(FilterForm):
fieldsets = (
(None, ('q', 'tag_id')),
('Location', ('region_id', 'site_group_id', 'site_id')),
('Device', ('device_type_id', 'platform_id', 'role_id')),
('Cluster', ('cluster_type_id', 'cluster_group_id', 'cluster_id')),
('Tenant', ('tenant_group_id', 'tenant_id'))
)
region_id = DynamicModelMultipleChoiceField(
queryset=Region.objects.all(),
required=False,
label=_('Regions')
)
site_group_id = DynamicModelMultipleChoiceField(
queryset=SiteGroup.objects.all(),
required=False,
label=_('Site groups')
)
site_id = DynamicModelMultipleChoiceField(
queryset=Site.objects.all(),
required=False,
label=_('Sites')
)
device_type_id = DynamicModelMultipleChoiceField(
queryset=DeviceType.objects.all(),
required=False,
label=_('Device types')
)
role_id = DynamicModelMultipleChoiceField(
queryset=DeviceRole.objects.all(),
required=False,
label=_('Roles')
)
platform_id = DynamicModelMultipleChoiceField(
queryset=Platform.objects.all(),
required=False,
label=_('Platforms')
)
cluster_type_id = DynamicModelMultipleChoiceField(
queryset=ClusterType.objects.all(),
required=False,
label=_('Cluster types'),
fetch_trigger='open'
)
cluster_group_id = DynamicModelMultipleChoiceField(
queryset=ClusterGroup.objects.all(),
required=False,
label=_('Cluster groups')
)
cluster_id = DynamicModelMultipleChoiceField(
queryset=Cluster.objects.all(),
required=False,
label=_('Clusters')
)
tenant_group_id = DynamicModelMultipleChoiceField(
queryset=TenantGroup.objects.all(),
required=False,
label=_('Tenant groups')
)
tenant_id = DynamicModelMultipleChoiceField(
queryset=Tenant.objects.all(),
required=False,
label=_('Tenant')
)
tag_id = DynamicModelMultipleChoiceField(
queryset=Tag.objects.all(),
required=False,
label=_('Tags')
)
class LocalConfigContextFilterForm(forms.Form):
local_context_data = forms.NullBooleanField(
required=False,
label=_('Has local config context data'),
widget=StaticSelect(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
class JournalEntryFilterForm(NetBoxModelFilterSetForm):
model = JournalEntry
fieldsets = (
(None, ('q', 'tag')),
('Creation', ('created_before', 'created_after', 'created_by_id')),
('Attributes', ('assigned_object_type_id', 'kind'))
)
created_after = forms.DateTimeField(
required=False,
label=_('After'),
widget=DateTimePicker()
)
created_before = forms.DateTimeField(
required=False,
label=_('Before'),
widget=DateTimePicker()
)
created_by_id = DynamicModelMultipleChoiceField(
queryset=User.objects.all(),
required=False,
label=_('User'),
widget=APISelectMultiple(
api_url='/api/users/users/',
)
)
assigned_object_type_id = DynamicModelMultipleChoiceField(
queryset=ContentType.objects.all(),
required=False,
label=_('Object Type'),
widget=APISelectMultiple(
api_url='/api/extras/content-types/',
)
)
kind = forms.ChoiceField(
choices=add_blank_choice(JournalEntryKindChoices),
required=False,
widget=StaticSelect()
)
tag = TagFilterField(model)
class ObjectChangeFilterForm(FilterForm):
model = ObjectChange
fieldsets = (
(None, ('q',)),
('Time', ('time_before', 'time_after')),
('Attributes', ('action', 'user_id', 'changed_object_type_id')),
)
time_after = forms.DateTimeField(
required=False,
label=_('After'),
widget=DateTimePicker()
)
time_before = forms.DateTimeField(
required=False,
label=_('Before'),
widget=DateTimePicker()
)
action = forms.ChoiceField(
choices=add_blank_choice(ObjectChangeActionChoices),
required=False,
widget=StaticSelect()
)
user_id = DynamicModelMultipleChoiceField(
queryset=User.objects.all(),
required=False,
label=_('User'),
widget=APISelectMultiple(
api_url='/api/users/users/',
)
)
changed_object_type_id = DynamicModelMultipleChoiceField(
queryset=ContentType.objects.all(),
required=False,
label=_('Object Type'),
widget=APISelectMultiple(
api_url='/api/extras/content-types/',
)
)
| [((47, 13, 49, 5), 'django.forms.IntegerField', 'forms.IntegerField', (), '', False, 'from django import forms\n'), ((80, 13, 82, 5), 'django.forms.IntegerField', 'forms.IntegerField', (), '', False, 'from django import forms\n'), ((99, 21, 101, 5), 'django.forms.CharField', 'forms.CharField', (), '', False, 'from django import forms\n'), ((280, 10, 280, 31), 'utilities.forms.TagFilterField', 'TagFilterField', ({(280, 25, 280, 30): 'model'}, {}), '(model)', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((38, 17, 38, 42), 'django.contrib.contenttypes.models.ContentType.objects.all', 'ContentType.objects.all', ({}, {}), '()', False, 'from django.contrib.contenttypes.models import ContentType\n'), ((39, 25, 39, 54), 'extras.utils.FeatureQuery', 'FeatureQuery', ({(39, 38, 39, 53): '"""custom_fields"""'}, {}), "('custom_fields')", False, 'from extras.utils import FeatureQuery\n'), ((45, 14, 45, 29), 'django.utils.translation.gettext', '_', ({(45, 16, 45, 28): '"""Field type"""'}, {}), "('Field type')", True, 'from django.utils.translation import gettext as _\n'), ((52, 15, 54, 9), 'utilities.forms.StaticSelect', 'StaticSelect', (), '', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((64, 17, 64, 42), 'django.contrib.contenttypes.models.ContentType.objects.all', 'ContentType.objects.all', ({}, {}), '()', False, 'from django.contrib.contenttypes.models import ContentType\n'), ((65, 25, 65, 53), 'extras.utils.FeatureQuery', 'FeatureQuery', ({(65, 38, 65, 52): '"""custom_links"""'}, {}), "('custom_links')", False, 'from extras.utils import FeatureQuery\n'), ((70, 15, 72, 9), 'utilities.forms.StaticSelect', 'StaticSelect', (), '', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((76, 15, 78, 9), 'utilities.forms.StaticSelect', 'StaticSelect', (), '', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((91, 17, 91, 42), 'django.contrib.contenttypes.models.ContentType.objects.all', 'ContentType.objects.all', ({}, {}), '()', False, 'from django.contrib.contenttypes.models import ContentType\n'), ((92, 25, 92, 57), 'extras.utils.FeatureQuery', 'FeatureQuery', ({(92, 38, 92, 56): '"""export_templates"""'}, {}), "('export_templates')", False, 'from extras.utils import FeatureQuery\n'), ((97, 14, 97, 28), 'django.utils.translation.gettext', '_', ({(97, 16, 97, 27): '"""MIME type"""'}, {}), "('MIME type')", True, 'from django.utils.translation import gettext as _\n'), ((104, 15, 106, 9), 'utilities.forms.StaticSelect', 'StaticSelect', (), '', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((117, 17, 117, 42), 'django.contrib.contenttypes.models.ContentType.objects.all', 'ContentType.objects.all', ({}, {}), '()', False, 'from django.contrib.contenttypes.models import ContentType\n'), ((118, 25, 118, 49), 'extras.utils.FeatureQuery', 'FeatureQuery', ({(118, 38, 118, 48): '"""webhooks"""'}, {}), "('webhooks')", False, 'from extras.utils import FeatureQuery\n'), ((124, 14, 124, 30), 'django.utils.translation.gettext', '_', ({(124, 16, 124, 29): '"""HTTP method"""'}, {}), "('HTTP method')", True, 'from django.utils.translation import gettext as _\n'), ((128, 15, 130, 9), 'utilities.forms.StaticSelect', 'StaticSelect', (), '', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((134, 15, 136, 9), 'utilities.forms.StaticSelect', 'StaticSelect', (), '', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((140, 15, 142, 9), 'utilities.forms.StaticSelect', 'StaticSelect', (), '', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((146, 15, 148, 9), 'utilities.forms.StaticSelect', 'StaticSelect', (), '', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((157, 14, 157, 37), 'django.utils.translation.gettext', '_', ({(157, 16, 157, 36): '"""Tagged object type"""'}, {}), "('Tagged object type')", True, 'from django.utils.translation import gettext as _\n'), ((170, 17, 170, 37), 'dcim.models.Region.objects.all', 'Region.objects.all', ({}, {}), '()', False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((172, 14, 172, 26), 'django.utils.translation.gettext', '_', ({(172, 16, 172, 25): '"""Regions"""'}, {}), "('Regions')", True, 'from django.utils.translation import gettext as _\n'), ((175, 17, 175, 40), 'dcim.models.SiteGroup.objects.all', 'SiteGroup.objects.all', ({}, {}), '()', False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((177, 14, 177, 30), 'django.utils.translation.gettext', '_', ({(177, 16, 177, 29): '"""Site groups"""'}, {}), "('Site groups')", True, 'from django.utils.translation import gettext as _\n'), ((180, 17, 180, 35), 'dcim.models.Site.objects.all', 'Site.objects.all', ({}, {}), '()', False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((182, 14, 182, 24), 'django.utils.translation.gettext', '_', ({(182, 16, 182, 23): '"""Sites"""'}, {}), "('Sites')", True, 'from django.utils.translation import gettext as _\n'), ((185, 17, 185, 41), 'dcim.models.DeviceType.objects.all', 'DeviceType.objects.all', ({}, {}), '()', False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((187, 14, 187, 31), 'django.utils.translation.gettext', '_', ({(187, 16, 187, 30): '"""Device types"""'}, {}), "('Device types')", True, 'from django.utils.translation import gettext as _\n'), ((190, 17, 190, 41), 'dcim.models.DeviceRole.objects.all', 'DeviceRole.objects.all', ({}, {}), '()', False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((192, 14, 192, 24), 'django.utils.translation.gettext', '_', ({(192, 16, 192, 23): '"""Roles"""'}, {}), "('Roles')", True, 'from django.utils.translation import gettext as _\n'), ((195, 17, 195, 39), 'dcim.models.Platform.objects.all', 'Platform.objects.all', ({}, {}), '()', False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((197, 14, 197, 28), 'django.utils.translation.gettext', '_', ({(197, 16, 197, 27): '"""Platforms"""'}, {}), "('Platforms')", True, 'from django.utils.translation import gettext as _\n'), ((200, 17, 200, 42), 'virtualization.models.ClusterType.objects.all', 'ClusterType.objects.all', ({}, {}), '()', False, 'from virtualization.models import Cluster, ClusterGroup, ClusterType\n'), ((202, 14, 202, 32), 'django.utils.translation.gettext', '_', ({(202, 16, 202, 31): '"""Cluster types"""'}, {}), "('Cluster types')", True, 'from django.utils.translation import gettext as _\n'), ((206, 17, 206, 43), 'virtualization.models.ClusterGroup.objects.all', 'ClusterGroup.objects.all', ({}, {}), '()', False, 'from virtualization.models import Cluster, ClusterGroup, ClusterType\n'), ((208, 14, 208, 33), 'django.utils.translation.gettext', '_', ({(208, 16, 208, 32): '"""Cluster groups"""'}, {}), "('Cluster groups')", True, 'from django.utils.translation import gettext as _\n'), ((211, 17, 211, 38), 'virtualization.models.Cluster.objects.all', 'Cluster.objects.all', ({}, {}), '()', False, 'from virtualization.models import Cluster, ClusterGroup, ClusterType\n'), ((213, 14, 213, 27), 'django.utils.translation.gettext', '_', ({(213, 16, 213, 26): '"""Clusters"""'}, {}), "('Clusters')", True, 'from django.utils.translation import gettext as _\n'), ((216, 17, 216, 42), 'tenancy.models.TenantGroup.objects.all', 'TenantGroup.objects.all', ({}, {}), '()', False, 'from tenancy.models import Tenant, TenantGroup\n'), ((218, 14, 218, 32), 'django.utils.translation.gettext', '_', ({(218, 16, 218, 31): '"""Tenant groups"""'}, {}), "('Tenant groups')", True, 'from django.utils.translation import gettext as _\n'), ((221, 17, 221, 37), 'tenancy.models.Tenant.objects.all', 'Tenant.objects.all', ({}, {}), '()', False, 'from tenancy.models import Tenant, TenantGroup\n'), ((223, 14, 223, 25), 'django.utils.translation.gettext', '_', ({(223, 16, 223, 24): '"""Tenant"""'}, {}), "('Tenant')", True, 'from django.utils.translation import gettext as _\n'), ((228, 14, 228, 23), 'django.utils.translation.gettext', '_', ({(228, 16, 228, 22): '"""Tags"""'}, {}), "('Tags')", True, 'from django.utils.translation import gettext as _\n'), ((235, 14, 235, 48), 'django.utils.translation.gettext', '_', ({(235, 16, 235, 47): '"""Has local config context data"""'}, {}), "('Has local config context data')", True, 'from django.utils.translation import gettext as _\n'), ((236, 15, 238, 9), 'utilities.forms.StaticSelect', 'StaticSelect', (), '', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((251, 14, 251, 24), 'django.utils.translation.gettext', '_', ({(251, 16, 251, 23): '"""After"""'}, {}), "('After')", True, 'from django.utils.translation import gettext as _\n'), ((252, 15, 252, 31), 'utilities.forms.DateTimePicker', 'DateTimePicker', ({}, {}), '()', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((256, 14, 256, 25), 'django.utils.translation.gettext', '_', ({(256, 16, 256, 24): '"""Before"""'}, {}), "('Before')", True, 'from django.utils.translation import gettext as _\n'), ((257, 15, 257, 31), 'utilities.forms.DateTimePicker', 'DateTimePicker', ({}, {}), '()', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((260, 17, 260, 35), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ({}, {}), '()', False, 'from django.contrib.auth.models import User\n'), ((262, 14, 262, 23), 'django.utils.translation.gettext', '_', ({(262, 16, 262, 22): '"""User"""'}, {}), "('User')", True, 'from django.utils.translation import gettext as _\n'), ((263, 15, 265, 9), 'utilities.forms.APISelectMultiple', 'APISelectMultiple', (), '', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((268, 17, 268, 42), 'django.contrib.contenttypes.models.ContentType.objects.all', 'ContentType.objects.all', ({}, {}), '()', False, 'from django.contrib.contenttypes.models import ContentType\n'), ((270, 14, 270, 30), 'django.utils.translation.gettext', '_', ({(270, 16, 270, 29): '"""Object Type"""'}, {}), "('Object Type')", True, 'from django.utils.translation import gettext as _\n'), ((271, 15, 273, 9), 'utilities.forms.APISelectMultiple', 'APISelectMultiple', (), '', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((276, 16, 276, 57), 'utilities.forms.add_blank_choice', 'add_blank_choice', ({(276, 33, 276, 56): 'JournalEntryKindChoices'}, {}), '(JournalEntryKindChoices)', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((278, 15, 278, 29), 'utilities.forms.StaticSelect', 'StaticSelect', ({}, {}), '()', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((292, 14, 292, 24), 'django.utils.translation.gettext', '_', ({(292, 16, 292, 23): '"""After"""'}, {}), "('After')", True, 'from django.utils.translation import gettext as _\n'), ((293, 15, 293, 31), 'utilities.forms.DateTimePicker', 'DateTimePicker', ({}, {}), '()', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((297, 14, 297, 25), 'django.utils.translation.gettext', '_', ({(297, 16, 297, 24): '"""Before"""'}, {}), "('Before')", True, 'from django.utils.translation import gettext as _\n'), ((298, 15, 298, 31), 'utilities.forms.DateTimePicker', 'DateTimePicker', ({}, {}), '()', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((301, 16, 301, 59), 'utilities.forms.add_blank_choice', 'add_blank_choice', ({(301, 33, 301, 58): 'ObjectChangeActionChoices'}, {}), '(ObjectChangeActionChoices)', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((303, 15, 303, 29), 'utilities.forms.StaticSelect', 'StaticSelect', ({}, {}), '()', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((306, 17, 306, 35), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ({}, {}), '()', False, 'from django.contrib.auth.models import User\n'), ((308, 14, 308, 23), 'django.utils.translation.gettext', '_', ({(308, 16, 308, 22): '"""User"""'}, {}), "('User')", True, 'from django.utils.translation import gettext as _\n'), ((309, 15, 311, 9), 'utilities.forms.APISelectMultiple', 'APISelectMultiple', (), '', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((314, 17, 314, 42), 'django.contrib.contenttypes.models.ContentType.objects.all', 'ContentType.objects.all', ({}, {}), '()', False, 'from django.contrib.contenttypes.models import ContentType\n'), ((316, 14, 316, 30), 'django.utils.translation.gettext', '_', ({(316, 16, 316, 29): '"""Object Type"""'}, {}), "('Object Type')", True, 'from django.utils.translation import gettext as _\n'), ((317, 15, 319, 9), 'utilities.forms.APISelectMultiple', 'APISelectMultiple', (), '', False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BOOLEAN_WITH_BLANK_CHOICES, ContentTypeChoiceField, ContentTypeMultipleChoiceField, DateTimePicker, DynamicModelMultipleChoiceField, FilterForm, MultipleChoiceField, StaticSelect, TagFilterField\n'), ((155, 44, 155, 64), 'extras.utils.FeatureQuery', 'FeatureQuery', ({(155, 57, 155, 63): '"""tags"""'}, {}), "('tags')", False, 'from extras.utils import FeatureQuery\n')] |
docdiesel/smartmetertools | bin/write2cly.py | 3b7449c7a9069696af078631aa5440f53d0f57bc | #!/usr/bin/python3
## write2cly.py - reads json (generated by sml_reader.py) from stdin
## - writes values to Corlysis time series InfluxDB
##
## Writes data from smart meter to time series database (InfluxDB)
## at Corlysis.com [1]. You need to configure your database and token
## in the config section.
##
## [1] https://corlysis.com/
##==== license section ========
## This code is under MIT License: Copyright (C) 2019 Bernd Künnen
## License details see https://choosealicense.com/licenses/mit/
##==== config section ========
# define corlysis settings here - set db and token at least
cly_base_url = 'https://corlysis.com:8086/write'
cly_parameters = {
"db": "energy",
"u" : "token",
"p" : "placeyourtokenhere",
"precision": "ms"}
# assign readable field names
config = {
"1.8.0": "Bezug",
"2.8.0": "Einspeisung",
"16.7.0": "Wirkleistung"
}
##==== code section ==== no need to change lines below ====
##-- import libraries
import json, sys, requests
import requests
import time
# load json from stdin
try:
myjson = json.load(sys.stdin)
except:
sys.stderr.write('!! error loading json')
exit(1)
# decode json
try:
line = "meter_data "
# add each meter value to line
for obis in myjson['data']:
key = config[obis] # set human readable field name
value = myjson['data'][obis] # get value from smart meter
line += key + '=' + str(value) + ',' # add key=value to insert line
# cut off last comma
line = line[:-1]
# add timestamp as unix timestamp in ms
line += ' ' + str(int(time.time()*1000)) #+ '\n'
# post data into time series database; http response should be 204
r = requests.post(cly_base_url, params=cly_parameters, data=line)
if r.status_code != 204 :
sys.stderr.write(r.status_code)
sys.stderr.write(r.content)
# catch if input is no valid json
except:
sys.stderr.write('!!error: no data block in json')
exit(2)
| [((45, 11, 45, 31), 'json.load', 'json.load', ({(45, 21, 45, 30): 'sys.stdin'}, {}), '(sys.stdin)', False, 'import json, sys, requests\n'), ((66, 6, 66, 67), 'requests.post', 'requests.post', (), '', False, 'import requests\n'), ((47, 2, 47, 43), 'sys.stderr.write', 'sys.stderr.write', ({(47, 19, 47, 42): '"""!! error loading json"""'}, {}), "('!! error loading json')", False, 'import json, sys, requests\n'), ((68, 4, 68, 35), 'sys.stderr.write', 'sys.stderr.write', ({(68, 21, 68, 34): 'r.status_code'}, {}), '(r.status_code)', False, 'import json, sys, requests\n'), ((69, 4, 69, 31), 'sys.stderr.write', 'sys.stderr.write', ({(69, 21, 69, 30): 'r.content'}, {}), '(r.content)', False, 'import json, sys, requests\n'), ((73, 2, 73, 52), 'sys.stderr.write', 'sys.stderr.write', ({(73, 19, 73, 51): '"""!!error: no data block in json"""'}, {}), "('!!error: no data block in json')", False, 'import json, sys, requests\n'), ((63, 24, 63, 35), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')] |
Ashiq5/dnspython | dns/rdtypes/ANY/__init__.py | 5449af5318d88bada34f661247f3bcb16f58f057 | # Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Class ANY (generic) rdata type classes."""
__all__ = [
'AFSDB',
'AMTRELAY',
'AVC',
'CAA',
'CDNSKEY',
'CDS',
'CERT',
'CNAME',
'CSYNC',
'DLV',
'DNAME',
'DNSKEY',
'DS',
'EUI48',
'EUI64',
'GPOS',
'HINFO',
'HIP',
'ISDN',
'LOC',
'MX',
'NINFO',
'NS',
'NSEC',
'NSEC3',
'NSEC3PARAM',
'OPENPGPKEY',
'OPT',
'PTR',
'RP',
'RRSIG',
'RT',
'SMIMEA',
'SOA',
'SPF',
'SSHFP',
'TKEY',
'TLSA',
'TSIG',
'TXT',
'URI',
'X25',
]
| [] |
KhubbatulinMark/DCase2020-Task-2-on-Wigner-Ville-transform | 01_test.py | 6653d7abbaafe09fb17768d9902bb77db24945d4 | """
@file 01_test.py
@brief Script for test
@author Toshiki Nakamura, Yuki Nikaido, and Yohei Kawaguchi (Hitachi Ltd.)
Copyright (C) 2020 Hitachi, Ltd. All right reserved.
"""
########################################################################
# import default python-library
########################################################################
import os
import glob
import csv
import re
import itertools
import sys
########################################################################
########################################################################
# import additional python-library
########################################################################
import numpy
# from import
from tqdm import tqdm
from sklearn import metrics
# original lib
import common as com
import keras_model
########################################################################
########################################################################
# load parameter.yaml
########################################################################
param = com.yaml_load()
#######################################################################
########################################################################
# def
########################################################################
def save_csv(save_file_path,
save_data):
with open(save_file_path, "w", newline="") as f:
writer = csv.writer(f, lineterminator='\n')
writer.writerows(save_data)
def get_machine_id_list_for_test(target_dir,
dir_name="test",
ext="json"):
"""
target_dir : str
base directory path of "dev_data" or "eval_data"
test_dir_name : str (default="test")
directory containing test data
ext : str (default="wav)
file extension of audio files
return :
machine_id_list : list [ str ]
list of machine IDs extracted from the names of test files
"""
# create test files
dir_path = os.path.abspath("{dir}/{dir_name}/*.{ext}".format(dir=target_dir, dir_name=dir_name, ext=ext))
file_paths = sorted(glob.glob(dir_path))
# extract id
machine_id_list = sorted(list(set(itertools.chain.from_iterable(
[re.findall('id_[0-9][0-9]', ext_id) for ext_id in file_paths]))))
return machine_id_list
def test_file_list_generator(target_dir,
id_name,
dir_name="test",
prefix_normal="normal",
prefix_anomaly="anomaly",
ext="json"):
"""
target_dir : str
base directory path of the dev_data or eval_data
id_name : str
id of wav file in <<test_dir_name>> directory
dir_name : str (default="test")
directory containing test data
prefix_normal : str (default="normal")
normal directory name
prefix_anomaly : str (default="anomaly")
anomaly directory name
ext : str (default="wav")
file extension of audio files
return :
if the mode is "development":
test_files : list [ str ]
file list for test
test_labels : list [ boolean ]
label info. list for test
* normal/anomaly = 0/1
if the mode is "evaluation":
test_files : list [ str ]
file list for test
"""
com.logger.info("target_dir : {}".format(target_dir+"_"+id_name))
# development
if mode:
normal_files = sorted(
glob.glob("{dir}/{dir_name}/{prefix_normal}_{id_name}*.{ext}".format(dir=target_dir,
dir_name=dir_name,
prefix_normal=prefix_normal,
id_name=id_name,
ext=ext)))
normal_labels = numpy.zeros(len(normal_files))
anomaly_files = sorted(
glob.glob("{dir}/{dir_name}/{prefix_anomaly}_{id_name}*.{ext}".format(dir=target_dir,
dir_name=dir_name,
prefix_anomaly=prefix_anomaly,
id_name=id_name,
ext=ext)))
anomaly_labels = numpy.ones(len(anomaly_files))
files = numpy.concatenate((normal_files, anomaly_files), axis=0)
labels = numpy.concatenate((normal_labels, anomaly_labels), axis=0)
com.logger.info("test_file num : {num}".format(num=len(files)))
if len(files) == 0:
com.logger.exception("no_wav_file!!")
print("\n========================================")
# evaluation
else:
files = sorted(
glob.glob("{dir}/{dir_name}/*{id_name}*.{ext}".format(dir=target_dir,
dir_name=dir_name,
id_name=id_name,
ext=ext)))
labels = None
com.logger.info("test_file num : {num}".format(num=len(files)))
if len(files) == 0:
com.logger.exception("no_wav_file!!")
print("\n=========================================")
return files, labels
########################################################################
########################################################################
# main 01_test.py
########################################################################
if __name__ == "__main__":
# check mode
# "development": mode == True
# "evaluation": mode == False
mode = com.command_line_chk()
if mode is None:
sys.exit(-1)
# make output result directory
os.makedirs(param["result_directory"], exist_ok=True)
# load base directory
dirs = com.select_dirs(param=param, mode=mode)
# initialize lines in csv for AUC and pAUC
csv_lines = []
# loop of the base directory
for idx, target_dir in enumerate(dirs):
print("\n===========================")
print("[{idx}/{total}] {dirname}".format(dirname=target_dir, idx=idx+1, total=len(dirs)))
machine_type = os.path.split(target_dir)[1]
print("============== MODEL LOAD ==============")
# set model path
model_file = "{model}/model_{machine_type}.hdf5".format(model=param["model_directory"],
machine_type=machine_type)
# load model file
if not os.path.exists(model_file):
com.logger.error("{} model not found ".format(machine_type))
sys.exit(-1)
model = keras_model.load_model(model_file)
model.summary()
if mode:
# results by type
csv_lines.append([machine_type])
csv_lines.append(["id", "AUC", "pAUC"])
performance = []
machine_id_list = get_machine_id_list_for_test(target_dir)
print(machine_id_list)
for id_str in machine_id_list:
# load test file
test_files, y_true = test_file_list_generator(target_dir, id_str)
# setup anomaly score file path
anomaly_score_csv = "{result}/anomaly_score_{machine_type}_{id_str}.csv".format(
result=param["result_directory"],
machine_type=machine_type,
id_str=id_str)
anomaly_score_list = []
print("\n============== BEGIN TEST FOR A MACHINE ID ==============")
y_pred = [0. for k in test_files]
for file_idx, file_path in tqdm(enumerate(test_files), total=len(test_files)):
try:
data = com.file_to_vector_array(file_path,
n_mels=param["feature"]["n_mels"],
frames=param["feature"]["frames"],
n_fft=param["feature"]["n_fft"],
hop_length=param["feature"]["hop_length"],
power=param["feature"]["power"])
errors = numpy.mean(numpy.square(data - model.predict(data)), axis=1)
y_pred[file_idx] = numpy.mean(errors)
anomaly_score_list.append([os.path.basename(file_path), y_pred[file_idx]])
except:
com.logger.error("file broken!!: {}".format(file_path))
# save anomaly score
save_csv(save_file_path=anomaly_score_csv, save_data=anomaly_score_list)
com.logger.info("anomaly score result -> {}".format(anomaly_score_csv))
if mode:
# append AUC and pAUC to lists
auc = metrics.roc_auc_score(y_true, y_pred)
p_auc = metrics.roc_auc_score(y_true, y_pred, max_fpr=param["max_fpr"])
csv_lines.append([id_str.split("_", 1)[1], auc, p_auc])
performance.append([auc, p_auc])
com.logger.info("AUC : {}".format(auc))
com.logger.info("pAUC : {}".format(p_auc))
print("\n============ END OF TEST FOR A MACHINE ID ============")
if mode:
# calculate averages for AUCs and pAUCs
averaged_performance = numpy.mean(numpy.array(performance, dtype=float), axis=0)
csv_lines.append(["Average"] + list(averaged_performance))
csv_lines.append([])
if mode:
# output results
result_path = "{result}/{file_name}".format(result=param["result_directory"], file_name=param["result_file"])
com.logger.info("AUC and pAUC results -> {}".format(result_path))
save_csv(save_file_path=result_path, save_data=csv_lines)
| [((36, 8, 36, 23), 'common.yaml_load', 'com.yaml_load', ({}, {}), '()', True, 'import common as com\n'), ((154, 11, 154, 33), 'common.command_line_chk', 'com.command_line_chk', ({}, {}), '()', True, 'import common as com\n'), ((159, 4, 159, 57), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((162, 11, 162, 50), 'common.select_dirs', 'com.select_dirs', (), '', True, 'import common as com\n'), ((46, 17, 46, 51), 'csv.writer', 'csv.writer', (), '', False, 'import csv\n'), ((67, 24, 67, 43), 'glob.glob', 'glob.glob', ({(67, 34, 67, 42): 'dir_path'}, {}), '(dir_path)', False, 'import glob\n'), ((123, 16, 123, 72), 'numpy.concatenate', 'numpy.concatenate', (), '', False, 'import numpy\n'), ((124, 17, 124, 75), 'numpy.concatenate', 'numpy.concatenate', (), '', False, 'import numpy\n'), ((156, 8, 156, 20), 'sys.exit', 'sys.exit', ({(156, 17, 156, 19): '(-1)'}, {}), '(-1)', False, 'import sys\n'), ((182, 16, 182, 50), 'keras_model.load_model', 'keras_model.load_model', ({(182, 39, 182, 49): 'model_file'}, {}), '(model_file)', False, 'import keras_model\n'), ((127, 12, 127, 49), 'common.logger.exception', 'com.logger.exception', ({(127, 33, 127, 48): '"""no_wav_file!!"""'}, {}), "('no_wav_file!!')", True, 'import common as com\n'), ((140, 12, 140, 49), 'common.logger.exception', 'com.logger.exception', ({(140, 33, 140, 48): '"""no_wav_file!!"""'}, {}), "('no_wav_file!!')", True, 'import common as com\n'), ((171, 23, 171, 48), 'os.path.split', 'os.path.split', ({(171, 37, 171, 47): 'target_dir'}, {}), '(target_dir)', False, 'import os\n'), ((179, 15, 179, 41), 'os.path.exists', 'os.path.exists', ({(179, 30, 179, 40): 'model_file'}, {}), '(model_file)', False, 'import os\n'), ((181, 12, 181, 24), 'sys.exit', 'sys.exit', ({(181, 21, 181, 23): '(-1)'}, {}), '(-1)', False, 'import sys\n'), ((227, 22, 227, 59), 'sklearn.metrics.roc_auc_score', 'metrics.roc_auc_score', ({(227, 44, 227, 50): 'y_true', (227, 52, 227, 58): 'y_pred'}, {}), '(y_true, y_pred)', False, 'from sklearn import metrics\n'), ((228, 24, 228, 87), 'sklearn.metrics.roc_auc_score', 'metrics.roc_auc_score', (), '', False, 'from sklearn import metrics\n'), ((238, 46, 238, 83), 'numpy.array', 'numpy.array', (), '', False, 'import numpy\n'), ((209, 27, 214, 84), 'common.file_to_vector_array', 'com.file_to_vector_array', (), '', True, 'import common as com\n'), ((216, 39, 216, 57), 'numpy.mean', 'numpy.mean', ({(216, 50, 216, 56): 'errors'}, {}), '(errors)', False, 'import numpy\n'), ((70, 9, 70, 44), 're.findall', 're.findall', ({(70, 20, 70, 35): '"""id_[0-9][0-9]"""', (70, 37, 70, 43): 'ext_id'}, {}), "('id_[0-9][0-9]', ext_id)", False, 'import re\n'), ((217, 47, 217, 74), 'os.path.basename', 'os.path.basename', ({(217, 64, 217, 73): 'file_path'}, {}), '(file_path)', False, 'import os\n')] |
i1123581321/word_split | src/text_split/split.py | 6401cdc37f58aa8718793dd7cb9bf4d3a4b690a4 | import argparse
import os
parser = argparse.ArgumentParser(description="a simple parser")
parser.add_argument("filename", type=str)
parser.add_argument("lineno", nargs="+", type=int)
parser.add_argument("--same_length", action=argparse.BooleanOptionalAction)
def main():
args = parser.parse_args()
filename = args.filename
linenos = args.lineno
same_length = args.same_length
linenos = list(map(lambda x: x - 1, linenos))
linenos.sort()
results = []
with open(filename, "r", encoding="utf-8") as f:
content = f.readlines()
if not same_length:
start = 0
for lineno in linenos:
results.append("".join(content[start:lineno]))
start = lineno
results.append("".join(content[start:]))
else:
lineno = linenos[0] + 1 if linenos[0] else 100000
start = 0
while start < len(content):
results.append("".join(content[start: start + lineno]))
start += lineno
name, ext = os.path.splitext(filename)
for i, result in enumerate(results):
with open(f"{name}-{i + 1:02}{ext}", "w", encoding="utf-8") as f:
f.write(result)
| [((4, 9, 4, 63), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((36, 20, 36, 46), 'os.path.splitext', 'os.path.splitext', ({(36, 37, 36, 45): 'filename'}, {}), '(filename)', False, 'import os\n')] |
Ali-Tahir/sentry | src/sentry/models/event.py | aa7b306c5ea671ac002a3524982563679557cb31 | from __future__ import absolute_import
import six
import string
import warnings
import pytz
from collections import OrderedDict
from dateutil.parser import parse as parse_date
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from hashlib import md5
from semaphore.processing import StoreNormalizer
from sentry import eventtypes
from sentry.db.models import (
BoundedBigIntegerField,
BoundedIntegerField,
Model,
NodeData,
NodeField,
sane_repr,
)
from sentry.db.models.manager import EventManager
from sentry.interfaces.base import get_interfaces
from sentry.utils import json
from sentry.utils.cache import memoize
from sentry.utils.canonical import CanonicalKeyDict, CanonicalKeyView
from sentry.utils.safe import get_path
from sentry.utils.strings import truncatechars
class EventDict(CanonicalKeyDict):
"""
Creating an instance of this dictionary will send the event through basic
(Rust-based) type/schema validation called "re-normalization".
This is used as a wrapper type for `Event.data` such that creating an event
object (or loading it from the DB) will ensure the data fits the type
schema.
"""
def __init__(self, data, skip_renormalization=False, **kwargs):
is_renormalized = isinstance(data, EventDict) or (
isinstance(data, NodeData) and isinstance(data.data, EventDict)
)
if not skip_renormalization and not is_renormalized:
normalizer = StoreNormalizer(is_renormalize=True, enable_trimming=False)
data = normalizer.normalize_event(dict(data))
CanonicalKeyDict.__init__(self, data, **kwargs)
class EventCommon(object):
"""
Methods and properties common to both Event and SnubaEvent.
"""
@classmethod
def generate_node_id(cls, project_id, event_id):
"""
Returns a deterministic node_id for this event based on the project_id
and event_id which together are globally unique. The event body should
be saved under this key in nodestore so it can be retrieved using the
same generated id when we only have project_id and event_id.
"""
return md5("{}:{}".format(project_id, event_id)).hexdigest()
# TODO (alex) We need a better way to cache these properties. functools32
# doesn't quite do the trick as there is a reference bug with unsaved
# models. But the current _group_cache thing is also clunky because these
# properties need to be stripped out in __getstate__.
@property
def group(self):
from sentry.models import Group
if not self.group_id:
return None
if not hasattr(self, "_group_cache"):
self._group_cache = Group.objects.get(id=self.group_id)
return self._group_cache
@group.setter
def group(self, group):
self.group_id = group.id
self._group_cache = group
@property
def project(self):
from sentry.models import Project
if not hasattr(self, "_project_cache"):
self._project_cache = Project.objects.get(id=self.project_id)
return self._project_cache
@project.setter
def project(self, project):
if project is None:
self.project_id = None
else:
self.project_id = project.id
self._project_cache = project
def get_interfaces(self):
return CanonicalKeyView(get_interfaces(self.data))
@memoize
def interfaces(self):
return self.get_interfaces()
def get_interface(self, name):
return self.interfaces.get(name)
def get_legacy_message(self):
# TODO(mitsuhiko): remove this code once it's unused. It's still
# being used by plugin code and once the message rename is through
# plugins should instead swithc to the actual message attribute or
# this method could return what currently is real_message.
return (
get_path(self.data, "logentry", "formatted")
or get_path(self.data, "logentry", "message")
or self.message
)
def get_event_type(self):
"""
Return the type of this event.
See ``sentry.eventtypes``.
"""
return self.data.get("type", "default")
def get_event_metadata(self):
"""
Return the metadata of this event.
See ``sentry.eventtypes``.
"""
# For some inexplicable reason we have some cases where the data
# is completely empty. In that case we want to hobble along
# further.
return self.data.get("metadata") or {}
def get_grouping_config(self):
"""Returns the event grouping config."""
from sentry.grouping.api import get_grouping_config_dict_for_event_data
return get_grouping_config_dict_for_event_data(self.data, self.project)
def get_hashes(self, force_config=None):
"""
Returns the calculated hashes for the event. This uses the stored
information if available. Grouping hashes will take into account
fingerprinting and checksums.
"""
# If we have hashes stored in the data we use them, otherwise we
# fall back to generating new ones from the data. We can only use
# this if we do not force a different config.
if force_config is None:
hashes = self.data.get("hashes")
if hashes is not None:
return hashes
return filter(
None, [x.get_hash() for x in self.get_grouping_variants(force_config).values()]
)
def get_grouping_variants(self, force_config=None, normalize_stacktraces=False):
"""
This is similar to `get_hashes` but will instead return the
grouping components for each variant in a dictionary.
If `normalize_stacktraces` is set to `True` then the event data will be
modified for `in_app` in addition to event variants being created. This
means that after calling that function the event data has been modified
in place.
"""
from sentry.grouping.api import get_grouping_variants_for_event, load_grouping_config
from sentry.stacktraces.processing import normalize_stacktraces_for_grouping
# Forcing configs has two separate modes. One is where just the
# config ID is given in which case it's merged with the stored or
# default config dictionary
if force_config is not None:
if isinstance(force_config, six.string_types):
stored_config = self.get_grouping_config()
config = dict(stored_config)
config["id"] = force_config
else:
config = force_config
# Otherwise we just use the same grouping config as stored. if
# this is None the `get_grouping_variants_for_event` will fill in
# the default.
else:
config = self.data.get("grouping_config")
config = load_grouping_config(config)
if normalize_stacktraces:
normalize_stacktraces_for_grouping(self.data, config)
return get_grouping_variants_for_event(self, config)
def get_primary_hash(self):
# TODO: This *might* need to be protected from an IndexError?
return self.get_hashes()[0]
@property
def title(self):
# also see event_manager.py which inserts this for snuba
et = eventtypes.get(self.get_event_type())()
return et.get_title(self.get_event_metadata())
@property
def culprit(self):
# For a while events did not save the culprit
if self.group_id:
return self.data.get("culprit") or self.group.culprit
return self.data.get("culprit")
@property
def location(self):
# also see event_manager.py which inserts this for snuba
et = eventtypes.get(self.get_event_type())()
return et.get_location(self.get_event_metadata())
@property
def real_message(self):
# XXX(mitsuhiko): this is a transitional attribute that should be
# removed. `message` will be renamed to `search_message` and this
# will become `message`.
return (
get_path(self.data, "logentry", "formatted")
or get_path(self.data, "logentry", "message")
or ""
)
@property
def organization(self):
return self.project.organization
@property
def version(self):
return self.data.get("version", "5")
@property
def ip_address(self):
ip_address = get_path(self.data, "user", "ip_address")
if ip_address:
return ip_address
remote_addr = get_path(self.data, "request", "env", "REMOTE_ADDR")
if remote_addr:
return remote_addr
return None
@property
def tags(self):
try:
rv = sorted(
[
(t, v)
for t, v in get_path(self.data, "tags", filter=True) or ()
if t is not None and v is not None
]
)
return rv
except ValueError:
# at one point Sentry allowed invalid tag sets such as (foo, bar)
# vs ((tag, foo), (tag, bar))
return []
# For compatibility, still used by plugins.
def get_tags(self):
return self.tags
def get_tag(self, key):
for t, v in self.get_tags():
if t == key:
return v
return None
@property
def release(self):
return self.get_tag("sentry:release")
@property
def dist(self):
return self.get_tag("sentry:dist")
def get_raw_data(self):
"""Returns the internal raw event data dict."""
return dict(self.data.items())
@property
def size(self):
return len(json.dumps(dict(self.data)))
@property
def transaction(self):
return self.get_tag("transaction")
def get_email_subject(self):
template = self.project.get_option("mail:subject_template")
if template:
template = EventSubjectTemplate(template)
else:
template = DEFAULT_SUBJECT_TEMPLATE
return truncatechars(template.safe_substitute(EventSubjectTemplateData(self)), 128).encode(
"utf-8"
)
def get_environment(self):
from sentry.models import Environment
if not hasattr(self, "_environment_cache"):
self._environment_cache = Environment.objects.get(
organization_id=self.project.organization_id,
name=Environment.get_name_or_default(self.get_tag("environment")),
)
return self._environment_cache
def get_minimal_user(self):
"""
A minimal 'User' interface object that gives us enough information
to render a user badge.
"""
return self.get_interface("user")
def as_dict(self):
"""Returns the data in normalized form for external consumers."""
# We use a OrderedDict to keep elements ordered for a potential JSON serializer
data = OrderedDict()
data["event_id"] = self.event_id
data["project"] = self.project_id
data["release"] = self.release
data["dist"] = self.dist
data["platform"] = self.platform
data["message"] = self.real_message
data["datetime"] = self.datetime
data["time_spent"] = self.time_spent
data["tags"] = [(k.split("sentry:", 1)[-1], v) for (k, v) in self.tags]
for k, v in sorted(six.iteritems(self.data)):
if k in data:
continue
if k == "sdk":
v = {v_k: v_v for v_k, v_v in six.iteritems(v) if v_k != "client_ip"}
data[k] = v
# for a long time culprit was not persisted. In those cases put
# the culprit in from the group.
if data.get("culprit") is None and self.group_id:
data["culprit"] = self.group.culprit
# Override title and location with dynamically generated data
data["title"] = self.title
data["location"] = self.location
return data
# ============================================
# DEPRECATED
# ============================================
@property
def level(self):
# we might want to move to this:
# return LOG_LEVELS_MAP.get(self.get_level_display()) or self.group.level
if self.group:
return self.group.level
else:
return None
def get_level_display(self):
# we might want to move to this:
# return self.get_tag('level') or self.group.get_level_display()
if self.group:
return self.group.get_level_display()
else:
return None
# deprecated accessors
@property
def logger(self):
warnings.warn("Event.logger is deprecated. Use Event.tags instead.", DeprecationWarning)
return self.get_tag("logger")
@property
def site(self):
warnings.warn("Event.site is deprecated. Use Event.tags instead.", DeprecationWarning)
return self.get_tag("site")
@property
def server_name(self):
warnings.warn(
"Event.server_name is deprecated. Use Event.tags instead.", DeprecationWarning
)
return self.get_tag("server_name")
@property
def checksum(self):
warnings.warn("Event.checksum is no longer used", DeprecationWarning)
return ""
def error(self): # TODO why is this not a property?
warnings.warn("Event.error is deprecated, use Event.title", DeprecationWarning)
return self.title
error.short_description = _("error")
@property
def message_short(self):
warnings.warn("Event.message_short is deprecated, use Event.title", DeprecationWarning)
return self.title
class SnubaEvent(EventCommon):
"""
An event backed by data stored in snuba.
This is a readonly event and does not support event creation or save.
The basic event data is fetched from snuba, and the event body is
fetched from nodestore and bound to the data property in the same way
as a regular Event.
"""
# The minimal list of columns we need to get from snuba to bootstrap an
# event. If the client is planning on loading the entire event body from
# nodestore anyway, we may as well only fetch the minimum from snuba to
# avoid duplicated work.
minimal_columns = ["event_id", "group_id", "project_id", "timestamp"]
# A list of all useful columns we can get from snuba.
selected_columns = minimal_columns + [
"culprit",
"location",
"message",
"platform",
"title",
"type",
# Required to provide snuba-only tags
"tags.key",
"tags.value",
# Required to provide snuba-only 'user' interface
"email",
"ip_address",
"user_id",
"username",
]
__repr__ = sane_repr("project_id", "group_id")
def __init__(self, snuba_values):
"""
When initializing a SnubaEvent, think about the attributes you
might need to access on it. If you only need a few properties, and
they are all available in snuba, then you should use
`SnubaEvent.selected_colums` (or a subset depending on your needs)
But if you know you are going to need the entire event body anyway
(which requires a nodestore lookup) you may as well just initialize
the event with `SnubaEvent.minimal_colums` and let the rest of of
the attributes come from nodestore.
"""
assert all(k in snuba_values for k in SnubaEvent.minimal_columns)
# self.snuba_data is a dict of all the stuff we got from snuba
self.snuba_data = snuba_values
# self.data is a (lazy) dict of everything we got from nodestore
node_id = SnubaEvent.generate_node_id(
self.snuba_data["project_id"], self.snuba_data["event_id"]
)
self.data = NodeData(None, node_id, data=None, wrapper=EventDict)
def __getattr__(self, name):
"""
Depending on what snuba data this event was initialized with, we may
have the data available to return, or we may have to look in the
`data` dict (which would force a nodestore load). All unresolved
self.foo type accesses will come through here.
"""
if name in ("_project_cache", "_group_cache", "_environment_cache"):
raise AttributeError()
if name in self.snuba_data:
return self.snuba_data[name]
else:
return self.data[name]
# ============================================
# Snuba-only implementations of properties that
# would otherwise require nodestore data.
# ============================================
@property
def tags(self):
"""
Override of tags property that uses tags from snuba rather than
the nodestore event body. This might be useful for implementing
tag deletions without having to rewrite nodestore blobs.
"""
if "tags.key" in self.snuba_data and "tags.value" in self.snuba_data:
keys = getattr(self, "tags.key")
values = getattr(self, "tags.value")
if keys and values and len(keys) == len(values):
return sorted(zip(keys, values))
else:
return []
else:
return super(SnubaEvent, self).tags
def get_minimal_user(self):
from sentry.interfaces.user import User
return User.to_python(
{
"id": self.user_id,
"email": self.email,
"username": self.username,
"ip_address": self.ip_address,
}
)
# If the data for these is available from snuba, we assume
# it was already normalized on the way in and we can just return
# it, otherwise we defer to EventCommon implementation.
def get_event_type(self):
if "type" in self.snuba_data:
return self.snuba_data["type"]
return super(SnubaEvent, self).get_event_type()
@property
def ip_address(self):
if "ip_address" in self.snuba_data:
return self.snuba_data["ip_address"]
return super(SnubaEvent, self).ip_address
@property
def title(self):
if "title" in self.snuba_data:
return self.snuba_data["title"]
return super(SnubaEvent, self).title
@property
def culprit(self):
if "culprit" in self.snuba_data:
return self.snuba_data["culprit"]
return super(SnubaEvent, self).culprit
@property
def location(self):
if "location" in self.snuba_data:
return self.snuba_data["location"]
return super(SnubaEvent, self).location
# ====================================================
# Snuba implementations of the django fields on Event
# ====================================================
@property
def datetime(self):
"""
Reconstruct the datetime of this event from the snuba timestamp
"""
# dateutil seems to use tzlocal() instead of UTC even though the string
# ends with '+00:00', so just replace the TZ with UTC because we know
# all timestamps from snuba are UTC.
return parse_date(self.timestamp).replace(tzinfo=pytz.utc)
@property
def time_spent(self):
return None
@property
def message(self):
if "message" in self.snuba_data:
return self.snuba_data["message"]
return self.data.get("message")
@property
def platform(self):
if "platform" in self.snuba_data:
return self.snuba_data["platform"]
return self.data.get("platform")
@property
def id(self):
# Because a snuba event will never have a django row id, just return
# the hex event_id here. We should be moving to a world where we never
# have to reference the row id anyway.
return self.event_id
def save(self):
raise NotImplementedError
class Event(EventCommon, Model):
"""
An event backed by data stored in postgres.
"""
__core__ = False
group_id = BoundedBigIntegerField(blank=True, null=True)
event_id = models.CharField(max_length=32, null=True, db_column="message_id")
project_id = BoundedBigIntegerField(blank=True, null=True)
message = models.TextField()
platform = models.CharField(max_length=64, null=True)
datetime = models.DateTimeField(default=timezone.now, db_index=True)
time_spent = BoundedIntegerField(null=True)
data = NodeField(
blank=True,
null=True,
ref_func=lambda x: x.project_id or x.project.id,
ref_version=2,
wrapper=EventDict,
)
objects = EventManager()
class Meta:
app_label = "sentry"
db_table = "sentry_message"
verbose_name = _("message")
verbose_name_plural = _("messages")
unique_together = (("project_id", "event_id"),)
index_together = (("group_id", "datetime"),)
__repr__ = sane_repr("project_id", "group_id")
def __getstate__(self):
state = Model.__getstate__(self)
# do not pickle cached info. We want to fetch this on demand
# again. In particular if we were to pickle interfaces we would
# pickle a CanonicalKeyView which old sentry workers do not know
# about
state.pop("_project_cache", None)
state.pop("_environment_cache", None)
state.pop("_group_cache", None)
state.pop("interfaces", None)
return state
class EventSubjectTemplate(string.Template):
idpattern = r"(tag:)?[_a-z][_a-z0-9]*"
class EventSubjectTemplateData(object):
tag_aliases = {"release": "sentry:release", "dist": "sentry:dist", "user": "sentry:user"}
def __init__(self, event):
self.event = event
def __getitem__(self, name):
if name.startswith("tag:"):
name = name[4:]
value = self.event.get_tag(self.tag_aliases.get(name, name))
if value is None:
raise KeyError
return six.text_type(value)
elif name == "project":
return self.event.project.get_full_name()
elif name == "projectID":
return self.event.project.slug
elif name == "shortID" and self.event.group_id:
return self.event.group.qualified_short_id
elif name == "orgID":
return self.event.organization.slug
elif name == "title":
return self.event.title
raise KeyError
DEFAULT_SUBJECT_TEMPLATE = EventSubjectTemplate("$shortID - $title")
| [((415, 30, 415, 40), 'django.utils.translation.ugettext_lazy', '_', ({(415, 32, 415, 39): '"""error"""'}, {}), "('error')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((457, 15, 457, 50), 'sentry.db.models.sane_repr', 'sane_repr', ({(457, 25, 457, 37): '"""project_id"""', (457, 39, 457, 49): '"""group_id"""'}, {}), "('project_id', 'group_id')", False, 'from sentry.db.models import BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField, sane_repr\n'), ((609, 15, 609, 60), 'sentry.db.models.BoundedBigIntegerField', 'BoundedBigIntegerField', (), '', False, 'from sentry.db.models import BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField, sane_repr\n'), ((610, 15, 610, 81), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((611, 17, 611, 62), 'sentry.db.models.BoundedBigIntegerField', 'BoundedBigIntegerField', (), '', False, 'from sentry.db.models import BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField, sane_repr\n'), ((612, 14, 612, 32), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import models\n'), ((613, 15, 613, 57), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((614, 15, 614, 72), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n'), ((615, 17, 615, 47), 'sentry.db.models.BoundedIntegerField', 'BoundedIntegerField', (), '', False, 'from sentry.db.models import BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField, sane_repr\n'), ((616, 11, 622, 5), 'sentry.db.models.NodeField', 'NodeField', (), '', False, 'from sentry.db.models import BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField, sane_repr\n'), ((624, 14, 624, 28), 'sentry.db.models.manager.EventManager', 'EventManager', ({}, {}), '()', False, 'from sentry.db.models.manager import EventManager\n'), ((634, 15, 634, 50), 'sentry.db.models.sane_repr', 'sane_repr', ({(634, 25, 634, 37): '"""project_id"""', (634, 39, 634, 49): '"""group_id"""'}, {}), "('project_id', 'group_id')", False, 'from sentry.db.models import BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField, sane_repr\n'), ((54, 8, 54, 55), 'sentry.utils.canonical.CanonicalKeyDict.__init__', 'CanonicalKeyDict.__init__', ({(54, 34, 54, 38): 'self', (54, 40, 54, 44): 'data'}, {}), '(self, data, **kwargs)', False, 'from sentry.utils.canonical import CanonicalKeyDict, CanonicalKeyView\n'), ((151, 15, 151, 79), 'sentry.grouping.api.get_grouping_config_dict_for_event_data', 'get_grouping_config_dict_for_event_data', ({(151, 55, 151, 64): 'self.data', (151, 66, 151, 78): 'self.project'}, {}), '(self.data, self.project)', False, 'from sentry.grouping.api import get_grouping_config_dict_for_event_data\n'), ((201, 17, 201, 45), 'sentry.grouping.api.load_grouping_config', 'load_grouping_config', ({(201, 38, 201, 44): 'config'}, {}), '(config)', False, 'from sentry.grouping.api import get_grouping_variants_for_event, load_grouping_config\n'), ((205, 15, 205, 60), 'sentry.grouping.api.get_grouping_variants_for_event', 'get_grouping_variants_for_event', ({(205, 47, 205, 51): 'self', (205, 53, 205, 59): 'config'}, {}), '(self, config)', False, 'from sentry.grouping.api import get_grouping_variants_for_event, load_grouping_config\n'), ((251, 21, 251, 62), 'sentry.utils.safe.get_path', 'get_path', ({(251, 30, 251, 39): 'self.data', (251, 41, 251, 47): '"""user"""', (251, 49, 251, 61): '"""ip_address"""'}, {}), "(self.data, 'user', 'ip_address')", False, 'from sentry.utils.safe import get_path\n'), ((255, 22, 255, 74), 'sentry.utils.safe.get_path', 'get_path', ({(255, 31, 255, 40): 'self.data', (255, 42, 255, 51): '"""request"""', (255, 53, 255, 58): '"""env"""', (255, 60, 255, 73): '"""REMOTE_ADDR"""'}, {}), "(self.data, 'request', 'env', 'REMOTE_ADDR')", False, 'from sentry.utils.safe import get_path\n'), ((338, 15, 338, 28), 'collections.OrderedDict', 'OrderedDict', ({}, {}), '()', False, 'from collections import OrderedDict\n'), ((391, 8, 391, 96), 'warnings.warn', 'warnings.warn', ({(391, 22, 391, 75): '"""Event.logger is deprecated. Use Event.tags instead."""', (391, 77, 391, 95): 'DeprecationWarning'}, {}), "('Event.logger is deprecated. Use Event.tags instead.',\n DeprecationWarning)", False, 'import warnings\n'), ((396, 8, 396, 94), 'warnings.warn', 'warnings.warn', ({(396, 22, 396, 73): '"""Event.site is deprecated. Use Event.tags instead."""', (396, 75, 396, 93): 'DeprecationWarning'}, {}), "('Event.site is deprecated. Use Event.tags instead.',\n DeprecationWarning)", False, 'import warnings\n'), ((401, 8, 403, 9), 'warnings.warn', 'warnings.warn', ({(402, 12, 402, 70): '"""Event.server_name is deprecated. Use Event.tags instead."""', (402, 72, 402, 90): 'DeprecationWarning'}, {}), "('Event.server_name is deprecated. Use Event.tags instead.',\n DeprecationWarning)", False, 'import warnings\n'), ((408, 8, 408, 77), 'warnings.warn', 'warnings.warn', ({(408, 22, 408, 56): '"""Event.checksum is no longer used"""', (408, 58, 408, 76): 'DeprecationWarning'}, {}), "('Event.checksum is no longer used', DeprecationWarning)", False, 'import warnings\n'), ((412, 8, 412, 87), 'warnings.warn', 'warnings.warn', ({(412, 22, 412, 66): '"""Event.error is deprecated, use Event.title"""', (412, 68, 412, 86): 'DeprecationWarning'}, {}), "('Event.error is deprecated, use Event.title', DeprecationWarning)", False, 'import warnings\n'), ((419, 8, 419, 95), 'warnings.warn', 'warnings.warn', ({(419, 22, 419, 74): '"""Event.message_short is deprecated, use Event.title"""', (419, 76, 419, 94): 'DeprecationWarning'}, {}), "('Event.message_short is deprecated, use Event.title',\n DeprecationWarning)", False, 'import warnings\n'), ((479, 20, 479, 73), 'sentry.db.models.NodeData', 'NodeData', (), '', False, 'from sentry.db.models import BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField, sane_repr\n'), ((520, 15, 527, 9), 'sentry.interfaces.user.User.to_python', 'User.to_python', ({(521, 12, 526, 13): "{'id': self.user_id, 'email': self.email, 'username': self.username,\n 'ip_address': self.ip_address}"}, {}), "({'id': self.user_id, 'email': self.email, 'username': self.\n username, 'ip_address': self.ip_address})", False, 'from sentry.interfaces.user import User\n'), ((629, 23, 629, 35), 'django.utils.translation.ugettext_lazy', '_', ({(629, 25, 629, 34): '"""message"""'}, {}), "('message')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((630, 30, 630, 43), 'django.utils.translation.ugettext_lazy', '_', ({(630, 32, 630, 42): '"""messages"""'}, {}), "('messages')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((637, 16, 637, 40), 'sentry.db.models.Model.__getstate__', 'Model.__getstate__', ({(637, 35, 637, 39): 'self'}, {}), '(self)', False, 'from sentry.db.models import BoundedBigIntegerField, BoundedIntegerField, Model, NodeData, NodeField, sane_repr\n'), ((51, 25, 51, 84), 'semaphore.processing.StoreNormalizer', 'StoreNormalizer', (), '', False, 'from semaphore.processing import StoreNormalizer\n'), ((83, 32, 83, 67), 'sentry.models.Group.objects.get', 'Group.objects.get', (), '', False, 'from sentry.models import Group\n'), ((96, 34, 96, 73), 'sentry.models.Project.objects.get', 'Project.objects.get', (), '', False, 'from sentry.models import Project\n'), ((108, 32, 108, 57), 'sentry.interfaces.base.get_interfaces', 'get_interfaces', ({(108, 47, 108, 56): 'self.data'}, {}), '(self.data)', False, 'from sentry.interfaces.base import get_interfaces\n'), ((123, 12, 123, 56), 'sentry.utils.safe.get_path', 'get_path', ({(123, 21, 123, 30): 'self.data', (123, 32, 123, 42): '"""logentry"""', (123, 44, 123, 55): '"""formatted"""'}, {}), "(self.data, 'logentry', 'formatted')", False, 'from sentry.utils.safe import get_path\n'), ((124, 15, 124, 57), 'sentry.utils.safe.get_path', 'get_path', ({(124, 24, 124, 33): 'self.data', (124, 35, 124, 45): '"""logentry"""', (124, 47, 124, 56): '"""message"""'}, {}), "(self.data, 'logentry', 'message')", False, 'from sentry.utils.safe import get_path\n'), ((203, 12, 203, 65), 'sentry.stacktraces.processing.normalize_stacktraces_for_grouping', 'normalize_stacktraces_for_grouping', ({(203, 47, 203, 56): 'self.data', (203, 58, 203, 64): 'config'}, {}), '(self.data, config)', False, 'from sentry.stacktraces.processing import normalize_stacktraces_for_grouping\n'), ((236, 12, 236, 56), 'sentry.utils.safe.get_path', 'get_path', ({(236, 21, 236, 30): 'self.data', (236, 32, 236, 42): '"""logentry"""', (236, 44, 236, 55): '"""formatted"""'}, {}), "(self.data, 'logentry', 'formatted')", False, 'from sentry.utils.safe import get_path\n'), ((237, 15, 237, 57), 'sentry.utils.safe.get_path', 'get_path', ({(237, 24, 237, 33): 'self.data', (237, 35, 237, 45): '"""logentry"""', (237, 47, 237, 56): '"""message"""'}, {}), "(self.data, 'logentry', 'message')", False, 'from sentry.utils.safe import get_path\n'), ((348, 27, 348, 51), 'six.iteritems', 'six.iteritems', ({(348, 41, 348, 50): 'self.data'}, {}), '(self.data)', False, 'import six\n'), ((667, 19, 667, 39), 'six.text_type', 'six.text_type', ({(667, 33, 667, 38): 'value'}, {}), '(value)', False, 'import six\n'), ((572, 15, 572, 41), 'dateutil.parser.parse', 'parse_date', ({(572, 26, 572, 40): 'self.timestamp'}, {}), '(self.timestamp)', True, 'from dateutil.parser import parse as parse_date\n'), ((352, 46, 352, 62), 'six.iteritems', 'six.iteritems', ({(352, 60, 352, 61): 'v'}, {}), '(v)', False, 'import six\n'), ((267, 32, 267, 72), 'sentry.utils.safe.get_path', 'get_path', (), '', False, 'from sentry.utils.safe import get_path\n')] |
shikashyam/BigDataSystemsCoursework | Assignment3/src/data/make_nowcast_dataset.py | d7f9cabbfb18b0e3303292b65af1ffd530e24ccc | """
Makes training and test dataset for nowcasting model using SEVIR
"""
# -*- coding: utf-8 -*-
import argparse
import logging
import os
import h5py
os.environ["HDF5_USE_FILE_LOCKING"]='FALSE'
import sys
import numpy as np
import tensorflow as tf
from nowcast_generator import get_nowcast_test_generator
# parser = argparse.ArgumentParser(description='Make nowcast training & test datasets using SEVIR')
# parser.add_argument('--sevir_data', type=str, help='location of SEVIR dataset',default='../../data/sevir')
# parser.add_argument('--sevir_catalog', type=str, help='location of SEVIR dataset',default='../../data/CATALOG.csv')
# parser.add_argument('--output_location', type=str, help='location of SEVIR dataset',default='../../data/interim')
# parser.add_argument('--n_chunks', type=int, help='Number of chucks to use (increase if memory limited)',default=10)
#args = parser.parse_args()
def generate_data(sevir_location,sevir_catalog,output_location,n_chunks=10):
"""
Runs data processing scripts to extract training set from SEVIR
"""
logger = logging.getLogger(__name__)
logger.info('making final data set from raw data')
#trn_generator = get_nowcast_train_generator(sevir_catalog=args.sevir_catalog,sevir_location=args.sevir_data)
tst_generator = get_nowcast_test_generator(sevir_catalog,sevir_location)
#ogger.info('Reading/writing training data to %s' % ('%s/nowcast_training.h5' % args.output_location))
#read_write_chunks('%s/nowcast_training.h5' % args.output_location,trn_generator,args.n_chunks)
logger.info('Reading/writing testing data to ' + output_location+'/nowcast_testing.h5')
read_write_chunks(output_location+'/nowcast_testing.h5',tst_generator,n_chunks)
def read_write_chunks( filename, generator, n_chunks ):
logger = logging.getLogger(__name__)
chunksize = len(generator)//n_chunks
# get first chunk
logger.info('Gathering chunk 0/%s:' % n_chunks)
X,Y=generator.load_batches(n_batches=chunksize,offset=0,progress_bar=True)
# Create datasets
with h5py.File(filename, 'w') as hf:
hf.create_dataset('IN', data=X[0], maxshape=(None,X[0].shape[1],X[0].shape[2],X[0].shape[3]))
hf.create_dataset('OUT', data=Y[0], maxshape=(None,Y[0].shape[1],Y[0].shape[2],Y[0].shape[3]))
# Gather other chunks
for c in range(1,n_chunks+1):
offset = c*chunksize
n_batches = min(chunksize,len(generator)-offset)
if n_batches<0: # all done
break
logger.info('Gathering chunk %d/%s:' % (c,n_chunks))
X,Y=generator.load_batches(n_batches=n_batches,offset=offset,progress_bar=True)
with h5py.File(filename, 'a') as hf:
hf['IN'].resize((hf['IN'].shape[0] + X[0].shape[0]), axis = 0)
hf['OUT'].resize((hf['OUT'].shape[0] + Y[0].shape[0]), axis = 0)
hf['IN'][-X[0].shape[0]:] = X[0]
hf['OUT'][-Y[0].shape[0]:] = Y[0]
| [((31, 13, 31, 40), 'logging.getLogger', 'logging.getLogger', ({(31, 31, 31, 39): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((34, 20, 34, 76), 'nowcast_generator.get_nowcast_test_generator', 'get_nowcast_test_generator', ({(34, 47, 34, 60): 'sevir_catalog', (34, 61, 34, 75): 'sevir_location'}, {}), '(sevir_catalog, sevir_location)', False, 'from nowcast_generator import get_nowcast_test_generator\n'), ((43, 13, 43, 40), 'logging.getLogger', 'logging.getLogger', ({(43, 31, 43, 39): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((49, 9, 49, 33), 'h5py.File', 'h5py.File', ({(49, 19, 49, 27): 'filename', (49, 29, 49, 32): '"""w"""'}, {}), "(filename, 'w')", False, 'import h5py\n'), ((60, 11, 60, 35), 'h5py.File', 'h5py.File', ({(60, 21, 60, 29): 'filename', (60, 31, 60, 34): '"""a"""'}, {}), "(filename, 'a')", False, 'import h5py\n')] |
rtasan/ApexCastImporter | blender-plugin/import_cast.py | 17f833ab8ff9757e295ca8eadb0cb210bfdd6476 |
# The Original importer was created by Nick
# Copyright (c) 2020 Nick
import bpy
import bmesh
import os
import array
import math
from mathutils import *
from bpy_extras.image_utils import load_image
from .cast import Cast, Model, Animation, Curve, NotificationTrack, Mesh, Skeleton, Bone, Material, File
def utilityBuildPath(root, asset):
if os.path.isabs(asset):
return asset
root = os.path.dirname(root)
return os.path.join(root, asset)
def utilityAssignBSDFMaterialSlots(material, slots, path):
material.node_tree.nodes.remove(
material.node_tree.nodes["Principled BSDF"])
shader = material.node_tree.nodes.new("ShaderNodeGroup")
output = material.node_tree.nodes['Material Output']
# グループシェーダーを作成
shader.node_tree = bpy.data.node_groups['S/G-Blender']
#テクスチャを定義
switcher = {
"albedo": "Diffuse map",
"diffuse": "Diffuse map",
"specular": "Specular map",
"ao": "AO map",
"cavity": "Cavity map",
"gloss": "Glossiness map",
"normal": "Normal map",
"emissive": "Emission input"
}
# Loop and connect the slots
for slot in slots:
connection = slots[slot]
if not connection.__class__ is File:
continue
if not slot in switcher:
continue
texture = material.node_tree.nodes.new("ShaderNodeTexImage") #画像ノードを作成
try:
texture.image = bpy.data.images.load(
utilityBuildPath(path, connection.Path())) #画像を読み込み
except RuntimeError:
pass
if texture.image:
material.node_tree.links.new(
shader.inputs[switcher[slot]], texture.outputs["Color"])
material.node_tree.links.new(shader.outputs[0], output.inputs[0]) # texture.outputのカラーとinputをつなげる(link)
else:
material.node_tree.nodes.remove(texture)
def importSkeletonNode(name, skeleton):
if skeleton is None:
return None
armature = bpy.data.armatures.new("Joints")
armature.display_type = "STICK"
skeletonObj = bpy.data.objects.new(name, armature)
skeletonObj.show_in_front = True
bpy.context.view_layer.active_layer_collection.collection.objects.link(
skeletonObj)
bpy.context.view_layer.objects.active = skeletonObj
bpy.ops.object.mode_set(mode='EDIT')
bones = skeleton.Bones()
handles = [None] * len(bones)
matrices = {}
for i, bone in enumerate(bones):
newBone = armature.edit_bones.new(bone.Name())
newBone.tail = 0, 0.05, 0 # I am sorry but blender sucks
tempQuat = bone.LocalRotation() # Also sucks, WXYZ? => XYZW master race
matRotation = Quaternion(
(tempQuat[3], tempQuat[0], tempQuat[1], tempQuat[2])).to_matrix().to_4x4()
matTranslation = Matrix.Translation(Vector(bone.LocalPosition()))
matrices[bone.Name()] = matTranslation @ matRotation
handles[i] = newBone
for i, bone in enumerate(bones):
if bone.ParentIndex() > -1:
handles[i].parent = handles[bone.ParentIndex()]
bpy.context.view_layer.objects.active = skeletonObj
bpy.ops.object.mode_set(mode='POSE')
for bone in skeletonObj.pose.bones:
bone.matrix_basis.identity()
bone.matrix = matrices[bone.name]
bpy.ops.pose.armature_apply()
return skeletonObj
def importMaterialNode(path, material):
# If you already created the material, ignore this
materialNew = bpy.data.materials.get(material.Name())
if materialNew is not None:
return material.Name(), materialNew
materialNew = bpy.data.materials.new(name=material.Name())
materialNew.use_nodes = True
# Blender really only wants a BSDF shader node
# so we're gonna give it one
utilityAssignBSDFMaterialSlots(materialNew, material.Slots(), path)
return material.Name(), materialNew
def importModelNode(model, path):
# Extract the name of this model from the path
modelName = os.path.splitext(os.path.basename(path))[0]
# Import skeleton for binds, materials for meshes
skeletonObj = importSkeletonNode(modelName, model.Skeleton())
materialArray = {key: value for (key, value) in (
importMaterialNode(path, x) for x in model.Materials())}
meshes = model.Meshes()
for mesh in meshes:
newMesh = bpy.data.meshes.new("polySurfaceMesh")
blendMesh = bmesh.new()
vertexColorLayer = blendMesh.loops.layers.color.new("color1")
vertexWeightLayer = blendMesh.verts.layers.deform.new()
vertexUVLayers = [blendMesh.loops.layers.uv.new(
"map%d" % x) for x in range(mesh.UVLayerCount())]
vertexPositions = mesh.VertexPositionBuffer()
for x in range(0, len(vertexPositions), 3):
blendMesh.verts.new(
Vector((vertexPositions[x], vertexPositions[x + 1], vertexPositions[x + 2])))
blendMesh.verts.ensure_lookup_table()
faceLookupMap = [1, 2, 0]
vertexNormalLayer = []
vertexNormals = mesh.VertexNormalBuffer()
vertexColors = mesh.VertexColorBuffer()
vertexUVs = [mesh.VertexUVLayerBuffer(
x) for x in range(mesh.UVLayerCount())]
def vertexToFaceVertex(face):
for x, loop in enumerate(face.loops):
vertexIndex = faces[faceStart + faceLookupMap[x]]
if vertexNormals is not None:
vertexNormalLayer.append((vertexNormals[vertexIndex * 3], vertexNormals[(
vertexIndex * 3) + 1], vertexNormals[(vertexIndex * 3) + 2]))
for uvLayer in range(mesh.UVLayerCount()):
uv = Vector(
(vertexUVs[uvLayer][vertexIndex * 2], vertexUVs[uvLayer][(vertexIndex * 2) + 1]))
uv.y = 1.0 - uv.y
loop[vertexUVLayers[uvLayer]].uv = uv
if vertexColors is not None:
loop[vertexColorLayer] = [
(vertexColors[vertexIndex] >> i & 0xff) / 255.0 for i in (24, 16, 8, 0)]
faces = mesh.FaceBuffer()
for faceStart in range(0, len(faces), 3):
indices = [blendMesh.verts[faces[faceStart + faceLookupMap[0]]],
blendMesh.verts[faces[faceStart + faceLookupMap[1]]], blendMesh.verts[faces[faceStart + faceLookupMap[2]]]]
try:
newLoop = blendMesh.faces.new(indices)
except ValueError:
continue
else:
vertexToFaceVertex(newLoop)
maximumInfluence = mesh.MaximumWeightInfluence()
if maximumInfluence > 0:
weightBoneBuffer = mesh.VertexWeightBoneBuffer()
weightValueBuffer = mesh.VertexWeightValueBuffer()
for x, vert in enumerate(blendMesh.verts):
if (weightValueBuffer[x * maximumInfluence] > 0.0):
vert[vertexWeightLayer][weightBoneBuffer[x * maximumInfluence]
] = weightValueBuffer[x * maximumInfluence]
blendMesh.to_mesh(newMesh)
newMesh.create_normals_split()
if len(vertexNormalLayer) > 0:
for x, _loop in enumerate(newMesh.loops):
newMesh.loops[x].normal = vertexNormalLayer[x]
newMesh.validate(clean_customdata=False)
clnors = array.array('f', [0.0] * (len(newMesh.loops) * 3))
newMesh.loops.foreach_get("normal", clnors)
newMesh.polygons.foreach_set(
"use_smooth", [True] * len(newMesh.polygons))
newMesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3)))
newMesh.use_auto_smooth = True
meshObj = bpy.data.objects.new("CastMesh", newMesh)
bpy.context.view_layer.active_layer_collection.collection.objects.link(
meshObj)
bpy.context.view_layer.objects.active = meshObj
meshMaterial = mesh.Material()
if meshMaterial is not None:
meshObj.data.materials.append(materialArray[meshMaterial.Name()])
for bone in skeletonObj.pose.bones:
meshObj.vertex_groups.new(name=bone.name)
meshObj.parent = skeletonObj
modifier = meshObj.modifiers.new('Armature Rig', 'ARMATURE')
modifier.object = skeletonObj
modifier.use_bone_envelopes = False
modifier.use_vertex_groups = True
def importRootNode(node, path):
for child in node.ChildrenOfType(Model):
importModelNode(child, path)
# for child in node.ChildrenOfType(Animation):
# importAnimationNode(child, path)
def importCast(path):
cast = Cast()
cast.load(path)
for root in cast.Roots():
importRootNode(root, path)
def load(self, context, filepath=""):
# シェーダーをアペンド
shader_path = bpy.context.preferences.addons[__package__].preferences.apex_sgshader_path
try:
file_path = shader_path
inner_path = 'NodeTree'
object_name = 'S/G-Blender'
bpy.ops.wm.append(
filepath=os.path.join(file_path, inner_path, object_name),
directory=os.path.join(file_path, inner_path),
filename=object_name
)
except:
self.report({'ERROR'}, 'Set the Shader path in AddonPreferences first.')
return False
# Parse and load cast nodes
importCast(filepath)
# Update the scene, reset view mode before returning.
bpy.context.view_layer.update()
bpy.ops.object.mode_set(mode="OBJECT")
return True
| [((16, 7, 16, 27), 'os.path.isabs', 'os.path.isabs', ({(16, 21, 16, 26): 'asset'}, {}), '(asset)', False, 'import os\n'), ((19, 11, 19, 32), 'os.path.dirname', 'os.path.dirname', ({(19, 27, 19, 31): 'root'}, {}), '(root)', False, 'import os\n'), ((20, 11, 20, 36), 'os.path.join', 'os.path.join', ({(20, 24, 20, 28): 'root', (20, 30, 20, 35): 'asset'}, {}), '(root, asset)', False, 'import os\n'), ((75, 15, 75, 47), 'bpy.data.armatures.new', 'bpy.data.armatures.new', ({(75, 38, 75, 46): '"""Joints"""'}, {}), "('Joints')", False, 'import bpy\n'), ((78, 18, 78, 54), 'bpy.data.objects.new', 'bpy.data.objects.new', ({(78, 39, 78, 43): 'name', (78, 45, 78, 53): 'armature'}, {}), '(name, armature)', False, 'import bpy\n'), ((81, 4, 82, 20), 'bpy.context.view_layer.active_layer_collection.collection.objects.link', 'bpy.context.view_layer.active_layer_collection.collection.objects.link', ({(82, 8, 82, 19): 'skeletonObj'}, {}), '(\n skeletonObj)', False, 'import bpy\n'), ((85, 4, 85, 40), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', (), '', False, 'import bpy\n'), ((108, 4, 108, 40), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', (), '', False, 'import bpy\n'), ((114, 4, 114, 33), 'bpy.ops.pose.armature_apply', 'bpy.ops.pose.armature_apply', ({}, {}), '()', False, 'import bpy\n'), ((279, 4, 279, 35), 'bpy.context.view_layer.update', 'bpy.context.view_layer.update', ({}, {}), '()', False, 'import bpy\n'), ((280, 4, 280, 42), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', (), '', False, 'import bpy\n'), ((145, 18, 145, 56), 'bpy.data.meshes.new', 'bpy.data.meshes.new', ({(145, 38, 145, 55): '"""polySurfaceMesh"""'}, {}), "('polySurfaceMesh')", False, 'import bpy\n'), ((146, 20, 146, 31), 'bmesh.new', 'bmesh.new', ({}, {}), '()', False, 'import bmesh\n'), ((223, 18, 223, 59), 'bpy.data.objects.new', 'bpy.data.objects.new', ({(223, 39, 223, 49): '"""CastMesh"""', (223, 51, 223, 58): 'newMesh'}, {}), "('CastMesh', newMesh)", False, 'import bpy\n'), ((224, 8, 225, 20), 'bpy.context.view_layer.active_layer_collection.collection.objects.link', 'bpy.context.view_layer.active_layer_collection.collection.objects.link', ({(225, 12, 225, 19): 'meshObj'}, {}), '(meshObj)', False, 'import bpy\n'), ((136, 33, 136, 55), 'os.path.basename', 'os.path.basename', ({(136, 50, 136, 54): 'path'}, {}), '(path)', False, 'import os\n'), ((267, 21, 267, 69), 'os.path.join', 'os.path.join', ({(267, 34, 267, 43): 'file_path', (267, 45, 267, 55): 'inner_path', (267, 57, 267, 68): 'object_name'}, {}), '(file_path, inner_path, object_name)', False, 'import os\n'), ((268, 22, 268, 57), 'os.path.join', 'os.path.join', ({(268, 35, 268, 44): 'file_path', (268, 46, 268, 56): 'inner_path'}, {}), '(file_path, inner_path)', False, 'import os\n')] |
dekuNukem/USB4VC | user_program/usb4vc_ui.py | 66c4f0b4a4acd7cec6654ea0dd4da026edf5d24c | # https://luma-oled.readthedocs.io/en/latest/software.html
import os
import sys
import time
import threading
import usb4vc_oled
from luma.core.render import canvas
import RPi.GPIO as GPIO
import usb4vc_usb_scan
import usb4vc_shared
import usb4vc_show_ev
import usb4vc_check_update
import json
import subprocess
from subprocess import Popen, PIPE
from usb4vc_shared import this_app_dir_path
from usb4vc_shared import config_dir_path
from usb4vc_shared import firmware_dir_path
from usb4vc_shared import temp_dir_path
from usb4vc_shared import ensure_dir
from usb4vc_shared import i2c_bootloader_pbid
from usb4vc_shared import usb_bootloader_pbid
config_file_path = os.path.join(config_dir_path, 'config.json')
ensure_dir(this_app_dir_path)
ensure_dir(config_dir_path)
ensure_dir(firmware_dir_path)
ensure_dir(temp_dir_path)
PLUS_BUTTON_PIN = 27
MINUS_BUTTON_PIN = 19
ENTER_BUTTON_PIN = 22
SHUTDOWN_BUTTON_PIN = 21
PBOARD_RESET_PIN = 25
PBOARD_BOOT0_PIN = 12
SLEEP_LED_PIN = 26
GPIO.setmode(GPIO.BCM)
GPIO.setup(PBOARD_RESET_PIN, GPIO.IN)
GPIO.setup(PBOARD_BOOT0_PIN, GPIO.IN)
GPIO.setup(SLEEP_LED_PIN, GPIO.OUT)
GPIO.output(SLEEP_LED_PIN, GPIO.LOW)
SPI_MOSI_MAGIC = 0xde
SPI_MOSI_MSG_TYPE_SET_PROTOCOL = 2
set_protocl_spi_msg_template = [SPI_MOSI_MAGIC, 0, SPI_MOSI_MSG_TYPE_SET_PROTOCOL] + [0]*29
class my_button(object):
def __init__(self, bcm_pin):
super(my_button, self).__init__()
self.pin_number = bcm_pin
GPIO.setup(self.pin_number, GPIO.IN, pull_up_down=GPIO.PUD_UP)
self.prev_state = GPIO.input(self.pin_number)
def is_pressed(self):
result = False
current_state = GPIO.input(self.pin_number)
if self.prev_state == 1 and current_state == 0:
result = True
self.prev_state = current_state
return result
PBOARD_ID_UNKNOWN = 0
PBOARD_ID_IBMPC = 1
PBOARD_ID_ADB = 2
pboard_info_spi_msg = [0] * 32
this_pboard_id = PBOARD_ID_UNKNOWN
USBGP_BTN_SOUTH = 0x130
USBGP_BTN_EAST = 0x131
USBGP_BTN_C = 0x132
USBGP_BTN_NORTH = 0x133
USBGP_BTN_WEST = 0x134
USBGP_BTN_Z = 0x135
USBGP_BTN_TL = 0x136
USBGP_BTN_TR = 0x137
USBGP_BTN_TL2 = 0x138
USBGP_BTN_TR2 = 0x139
USBGP_BTN_SELECT = 0x13a
USBGP_BTN_START = 0x13b
USBGP_BTN_MODE = 0x13c
USBGP_BTN_THUMBL = 0x13d
USBGP_BTN_THUMBR = 0x13e
USBGP_BTN_A = USBGP_BTN_SOUTH
USBGP_BTN_B = USBGP_BTN_EAST
USBGP_BTN_X = USBGP_BTN_NORTH
USBGP_BTN_Y = USBGP_BTN_WEST
USBGP_ABS_X = 0x00 # left stick X
USBGP_ABS_Y = 0x01 # left stick Y
USBGP_ABS_Z = 0x02 # left analog trigger
USBGP_ABS_RX = 0x03 # right stick X
USBGP_ABS_RY = 0x04 # right stick Y
USBGP_ABS_RZ = 0x05 # right analog trigger
USBGP_ABS_HAT0X = 0x10 # D-pad X
USBGP_ABS_HAT0Y = 0x11 # D-pad Y
GENERIC_USB_GAMEPAD_TO_MOUSE_KB_DEAULT_MAPPING = {
"MAPPING_TYPE": "DEFAULT_MOUSE_KB",
'BTN_TL': {'code': 'BTN_LEFT'},
'BTN_TR': {'code': 'BTN_RIGHT'},
'BTN_TL2': {'code': 'BTN_LEFT'},
'BTN_TR2': {'code': 'BTN_RIGHT'},
'ABS_X': {'code': 'REL_X'},
'ABS_Y': {'code': 'REL_Y'},
'ABS_HAT0X': {'code': 'KEY_RIGHT', 'code_neg': 'KEY_LEFT'},
'ABS_HAT0Y': {'code': 'KEY_DOWN', 'code_neg': 'KEY_UP'}
}
IBM_GENERIC_USB_GAMEPAD_TO_15PIN_GAMEPORT_GAMEPAD_DEAULT_MAPPING = {
"MAPPING_TYPE": "DEFAULT_15PIN",
# buttons to buttons
'BTN_SOUTH': {'code':'IBM_GGP_BTN_1'},
'BTN_NORTH': {'code':'IBM_GGP_BTN_2'},
'BTN_EAST': {'code':'IBM_GGP_BTN_3'},
'BTN_WEST': {'code':'IBM_GGP_BTN_4'},
'BTN_TL': {'code':'IBM_GGP_BTN_1'},
'BTN_TR': {'code':'IBM_GGP_BTN_2'},
'BTN_Z': {'code':'IBM_GGP_BTN_3'},
'BTN_C': {'code':'IBM_GGP_BTN_4'},
'BTN_TL2': {'code':'IBM_GGP_BTN_1'},
'BTN_TR2': {'code':'IBM_GGP_BTN_2'},
# analog axes to analog axes
'ABS_X': {'code':'IBM_GGP_JS1_X'},
'ABS_Y': {'code':'IBM_GGP_JS1_Y'},
'ABS_HAT0X': {'code':'IBM_GGP_JS1_X'},
'ABS_HAT0Y': {'code':'IBM_GGP_JS1_Y'},
'ABS_RX': {'code':'IBM_GGP_JS2_X'},
'ABS_RY': {'code':'IBM_GGP_JS2_Y'},
}
PROTOCOL_OFF = {'pid':0, 'display_name':"OFF"}
PROTOCOL_AT_PS2_KB = {'pid':1, 'display_name':"AT/PS2"}
PROTOCOL_XT_KB = {'pid':2, 'display_name':"PC XT"}
PROTOCOL_ADB_KB = {'pid':3, 'display_name':"ADB"}
PROTOCOL_PS2_MOUSE_NORMAL = {'pid':4, 'display_name':"PS/2"}
PROTOCOL_MICROSOFT_SERIAL_MOUSE = {'pid':5, 'display_name':"Microsft Serial"}
PROTOCOL_ADB_MOUSE = {'pid':6, 'display_name':"ADB"}
PROTOCOL_15PIN_GAMEPORT_GAMEPAD = {'pid':7, 'display_name':"Generic 15-Pin", 'mapping':IBM_GENERIC_USB_GAMEPAD_TO_15PIN_GAMEPORT_GAMEPAD_DEAULT_MAPPING}
PROTOCOL_MOUSESYSTEMS_SERIAL_MOUSE = {'pid':8, 'display_name':"MouseSys Serial"}
PROTOCOL_USB_GP_TO_MOUSE_KB = {'pid':0, 'display_name':'Mouse & KB', 'mapping':GENERIC_USB_GAMEPAD_TO_MOUSE_KB_DEAULT_MAPPING}
PROTOCOL_RAW_KEYBOARD = {'pid':125, 'display_name':"Raw data"}
PROTOCOL_RAW_MOUSE = {'pid':126, 'display_name':"Raw data"}
PROTOCOL_RAW_GAMEPAD = {'pid':127, 'display_name':"Raw data"}
custom_profile_list = []
try:
onlyfiles = [f for f in os.listdir(config_dir_path) if os.path.isfile(os.path.join(config_dir_path, f))]
json_map_files = [os.path.join(config_dir_path, x) for x in onlyfiles if x.lower().startswith('usb4vc_map') and x.lower().endswith(".json")]
for item in json_map_files:
print('loading json file:', item)
with open(item) as json_file:
custom_profile_list.append(json.load(json_file))
except Exception as e:
print('exception json load:', e)
def get_list_of_usb_drive():
usb_drive_set = set()
try:
usb_drive_path = subprocess.getoutput(f"timeout 2 df -h | grep -i usb").replace('\r', '').split('\n')
for item in [x for x in usb_drive_path if len(x) > 2]:
usb_drive_set.add(os.path.join(item.split(' ')[-1], 'usb4vc'))
except Exception as e:
print("exception get_list_of_usb_drive:", e)
return usb_drive_set
def copy_debug_log():
usb_drive_set = get_list_of_usb_drive()
if len(usb_drive_set) == 0:
return False
for this_path in usb_drive_set:
if os.path.isdir(this_path):
print('copying debug log to', this_path)
os.system(f'sudo cp -v /home/pi/usb4vc/usb4vc_debug_log.txt {this_path}')
return True
def check_usb_drive():
usb_drive_set = get_list_of_usb_drive()
if len(usb_drive_set) == 0:
return False, 'USB Drive Not Found'
for this_path in usb_drive_set:
usb_config_path = os.path.join(this_path, 'config')
if not os.path.isdir(usb_config_path):
usb_config_path = None
if usb_config_path is not None:
return True, usb_config_path
return False, 'No Update Data Found'
def get_pbid_and_version(dfu_file_name):
pbid = None
try:
pbid = int(dfu_file_name.split('PBID')[-1].split('_')[0])
except Exception as e:
print("exception fw pbid parse:", e)
fw_ver_tuple = None
try:
fw_ver = dfu_file_name.lower().split('_v')[-1].split('.')[0].split('_')
fw_ver_tuple = (int(fw_ver[0]), int(fw_ver[1]), int(fw_ver[2]))
except Exception as e:
print('exception fw ver parse:', e)
return pbid, fw_ver_tuple
def reset_pboard():
print("resetting protocol board...")
GPIO.setup(PBOARD_BOOT0_PIN, GPIO.IN)
GPIO.setup(PBOARD_RESET_PIN, GPIO.OUT)
GPIO.output(PBOARD_RESET_PIN, GPIO.LOW)
time.sleep(0.05)
GPIO.setup(PBOARD_RESET_PIN, GPIO.IN)
time.sleep(0.05)
print("done")
def enter_dfu():
# RESET LOW: Enter reset
GPIO.setup(PBOARD_RESET_PIN, GPIO.OUT)
GPIO.output(PBOARD_RESET_PIN, GPIO.LOW)
time.sleep(0.05)
# BOOT0 HIGH: Boot into DFU mode
GPIO.setup(PBOARD_BOOT0_PIN, GPIO.OUT)
GPIO.output(PBOARD_BOOT0_PIN, GPIO.HIGH)
time.sleep(0.05)
# Release RESET, BOOT0 still HIGH, STM32 now in DFU mode
GPIO.setup(PBOARD_RESET_PIN, GPIO.IN)
time.sleep(1.5)
def exit_dfu():
# Release BOOT0
GPIO.setup(PBOARD_BOOT0_PIN, GPIO.IN)
# Activate RESET
GPIO.setup(PBOARD_RESET_PIN, GPIO.OUT)
GPIO.output(PBOARD_RESET_PIN, GPIO.LOW)
time.sleep(0.05)
# Release RESET, BOOT0 is LOW, STM32 boots in normal mode
GPIO.setup(PBOARD_RESET_PIN, GPIO.IN)
time.sleep(1.5)
def fw_update(fw_path, pbid):
is_updated = False
if pbid in i2c_bootloader_pbid and fw_path.lower().endswith('.hex'):
enter_dfu()
os.system(f'sudo stm32flash -w {fw_path} -a 0x3b /dev/i2c-1')
is_updated = True
elif pbid in usb_bootloader_pbid and fw_path.lower().endswith('.dfu'):
enter_dfu()
lsusb_str = subprocess.getoutput("lsusb")
if 'in DFU'.lower() not in lsusb_str.lower():
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Connect a USB cable", usb4vc_oled.font_regular, 0, draw)
usb4vc_oled.oled_print_centered("from P-Card to RPi", usb4vc_oled.font_regular, 10, draw)
usb4vc_oled.oled_print_centered("and try again", usb4vc_oled.font_regular, 20, draw)
time.sleep(4)
else:
os.system(f'sudo dfu-util --device ,0483:df11 -a 0 -D {fw_path}')
is_updated = True
exit_dfu()
return is_updated
def update_pboard_firmware(this_pid):
onlyfiles = [f for f in os.listdir(firmware_dir_path) if os.path.isfile(os.path.join(firmware_dir_path, f))]
firmware_files = [x for x in onlyfiles if x.startswith("PBFW_") and (x.lower().endswith(".dfu") or x.lower().endswith(".hex")) and "PBID" in x]
this_pboard_version_tuple = (pboard_info_spi_msg[5], pboard_info_spi_msg[6], pboard_info_spi_msg[7])
for item in firmware_files:
pbid, fw_ver_tuple = get_pbid_and_version(item)
if pbid is None or fw_ver_tuple is None:
continue
print('update_pboard_firmware:', this_pid, this_pboard_version_tuple, fw_ver_tuple)
if pbid == this_pid and fw_ver_tuple > this_pboard_version_tuple:
print("DOING IT NOW")
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Loading Firmware:", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered(item.strip("PBFW_").strip(".dfu").strip(".hex"), usb4vc_oled.font_regular, 16, draw)
if fw_update(os.path.join(firmware_dir_path, item), this_pid):
return True
return False
def update_from_usb(usb_config_path):
if usb_config_path is not None:
os.system(f'cp -v /home/pi/usb4vc/config/config.json {usb_config_path}')
os.system('mv -v /home/pi/usb4vc/config/config.json /home/pi/usb4vc/config.json')
os.system('rm -rfv /home/pi/usb4vc/config/*')
os.system(f"cp -v {os.path.join(usb_config_path, '*')} /home/pi/usb4vc/config")
os.system("mv -v /home/pi/usb4vc/config.json /home/pi/usb4vc/config/config.json")
ibmpc_keyboard_protocols = [PROTOCOL_OFF, PROTOCOL_AT_PS2_KB, PROTOCOL_XT_KB]
ibmpc_mouse_protocols = [PROTOCOL_OFF, PROTOCOL_PS2_MOUSE_NORMAL, PROTOCOL_MICROSOFT_SERIAL_MOUSE, PROTOCOL_MOUSESYSTEMS_SERIAL_MOUSE]
ibmpc_gamepad_protocols = [PROTOCOL_OFF, PROTOCOL_15PIN_GAMEPORT_GAMEPAD, PROTOCOL_USB_GP_TO_MOUSE_KB]
adb_keyboard_protocols = [PROTOCOL_OFF, PROTOCOL_ADB_KB]
adb_mouse_protocols = [PROTOCOL_OFF, PROTOCOL_ADB_MOUSE]
adb_gamepad_protocols = [PROTOCOL_OFF, PROTOCOL_USB_GP_TO_MOUSE_KB]
raw_keyboard_protocols = [PROTOCOL_OFF, PROTOCOL_RAW_KEYBOARD]
raw_mouse_protocols = [PROTOCOL_OFF, PROTOCOL_RAW_MOUSE]
raw_gamepad_protocols = [PROTOCOL_OFF, PROTOCOL_RAW_GAMEPAD]
mouse_sensitivity_list = [1, 1.25, 1.5, 1.75, 0.25, 0.5, 0.75]
"""
key is protocol card ID
conf_dict[pbid]:
hw revision
current keyboard protocol
current mouse protocol
current gamepad procotol
mouse sensitivity
"""
configuration_dict = {}
LINUX_EXIT_CODE_TIMEOUT = 124
def bt_setup():
rfkill_str = subprocess.getoutput("/usr/sbin/rfkill -n")
if 'bluetooth' not in rfkill_str:
return 1, "no BT receiver found"
os.system('/usr/sbin/rfkill unblock bluetooth')
time.sleep(0.1)
exit_code = os.system('timeout 1 bluetoothctl agent NoInputNoOutput') >> 8
if exit_code == LINUX_EXIT_CODE_TIMEOUT:
return 2, 'bluetoothctl stuck'
return 0, ''
def scan_bt_devices(timeout_sec = 5):
exit_code = os.system(f"timeout {timeout_sec} bluetoothctl --agent NoInputNoOutput scan on") >> 8
if exit_code != LINUX_EXIT_CODE_TIMEOUT:
return None, 'scan error'
device_str = subprocess.getoutput("bluetoothctl --agent NoInputNoOutput devices")
dev_list = []
for line in device_str.replace('\r', '').split('\n'):
if 'device' not in line.lower():
continue
line_split = line.split(' ', maxsplit=2)
# skip if device has no name
if len(line_split) < 3 or line_split[2].count('-') == 5:
continue
dev_list.append((line_split[1], line_split[2]))
return dev_list, ''
def pair_device(mac_addr):
is_ready = False
is_sent = False
fail_phrases = ['fail', 'error', 'not available', 'excep']
with Popen(["bluetoothctl", "--agent", "NoInputNoOutput"], stdout=PIPE, stdin=PIPE, bufsize=1,
universal_newlines=True, shell=True) as p:
for line in p.stdout:
print(line, end='')
line_lo = line.lower()
if 'registered' in line_lo:
is_ready = True
if is_ready is False:
continue
if '#' in line_lo and is_sent == False:
p.stdin.write(f'pair {mac_addr}\n')
is_sent = True
if 'PIN code:' in line:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Enter PIN code:", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered(line.split('PIN code:')[-1], usb4vc_oled.font_medium, 15, draw)
if '(yes/no)' in line:
p.stdin.write('yes\n')
if 'number in 0-999999' in line:
return False, "Error: Passkey needed"
if 'successful' in line_lo:
p.stdin.write('exit\n')
return True, 'Success!'
for item in fail_phrases:
if item in line_lo:
p.stdin.write('exit\n')
return False, line
return False, "wtf"
def get_paired_devices():
dev_set = set()
try:
device_str = subprocess.getoutput(f"timeout 5 bluetoothctl --agent NoInputNoOutput paired-devices")
for line in device_str.replace('\r', '').split('\n'):
if 'device' not in line.lower():
continue
line_split = line.split(' ', maxsplit=2)
# skip if device has no name
if len(line_split) < 3 or line_split[2].count('-') == 5:
continue
dev_set.add((line_split[1], line_split[2]))
except Exception as e:
print('exception get_paired_devices:', e)
return dev_set
def load_config():
global configuration_dict
try:
with open(config_file_path) as json_file:
temp_dict = json.load(json_file)
# json dump all keys as strings, need to convert them back to ints
for key in temp_dict:
if key.isdigit():
configuration_dict[int(key)] = temp_dict[key]
else:
configuration_dict[key] = temp_dict[key]
except Exception as e:
print("exception config load failed!", e)
def get_ip_address():
ip_str = subprocess.getoutput("timeout 1 hostname -I")
ip_list = [x for x in ip_str.split(' ') if '.' in x]
if len(ip_list) == 0:
return "Offline"
return f'{ip_list[0]}'
def save_config():
try:
with open(config_file_path, 'w', encoding='utf-8') as save_file:
save_file.write(json.dumps(configuration_dict))
except Exception as e:
print("exception config save failed!", e)
curve_vertial_axis_x_pos = 80
curve_horizontal_axis_width = 32
curve_linear = {0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, 10: 10, 11: 11, 12: 12, 13: 13, 14: 14, 15: 15, 16: 16, 17: 17, 18: 18, 19: 19, 20: 20, 21: 21, 22: 22, 23: 23, 24: 24, 25: 25, 26: 26, 27: 27, 28: 28, 29: 29, 30: 30, 31: 31, 32: 32, 33: 33, 34: 34, 35: 35, 36: 36, 37: 37, 38: 38, 39: 39, 40: 40, 41: 41, 42: 42, 43: 43, 44: 44, 45: 45, 46: 46, 47: 47, 48: 48, 49: 49, 50: 50, 51: 51, 52: 52, 53: 53, 54: 54, 55: 55, 56: 56, 57: 57, 58: 58, 59: 59, 60: 60, 61: 61, 62: 62, 63: 63, 64: 64, 65: 65, 66: 66, 67: 67, 68: 68, 69: 69, 70: 70, 71: 71, 72: 72, 73: 73, 74: 74, 75: 75, 76: 76, 77: 77, 78: 78, 79: 79, 80: 80, 81: 81, 82: 82, 83: 83, 84: 84, 85: 85, 86: 86, 87: 87, 88: 88, 89: 89, 90: 90, 91: 91, 92: 92, 93: 93, 94: 94, 95: 95, 96: 96, 97: 97, 98: 98, 99: 99, 100: 100, 101: 101, 102: 102, 103: 103, 104: 104, 105: 105, 106: 106, 107: 107, 108: 108, 109: 109, 110: 110, 111: 111, 112: 112, 113: 113, 114: 114, 115: 115, 116: 116, 117: 117, 118: 118, 119: 119, 120: 120, 121: 121, 122: 122, 123: 123, 124: 124, 125: 125, 126: 126, 127: 127}
curve1 = {0: 1, 1: 1, 2: 2, 3: 2, 4: 3, 5: 4, 6: 4, 7: 5, 8: 5, 9: 6, 10: 6, 11: 7, 12: 7, 13: 8, 14: 8, 15: 9, 16: 9, 17: 10, 18: 11, 19: 11, 20: 12, 21: 12, 22: 13, 23: 13, 24: 14, 25: 15, 26: 15, 27: 16, 28: 16, 29: 17, 30: 18, 31: 18, 32: 19, 33: 19, 34: 20, 35: 21, 36: 21, 37: 22, 38: 22, 39: 23, 40: 24, 41: 24, 42: 25, 43: 26, 44: 26, 45: 27, 46: 28, 47: 28, 48: 29, 49: 30, 50: 30, 51: 31, 52: 32, 53: 33, 54: 33, 55: 34, 56: 35, 57: 36, 58: 36, 59: 37, 60: 38, 61: 39, 62: 39, 63: 40, 64: 41, 65: 42, 66: 43, 67: 44, 68: 45, 69: 46, 70: 46, 71: 47, 72: 48, 73: 49, 74: 50, 75: 51, 76: 52, 77: 53, 78: 55, 79: 56, 80: 57, 81: 58, 82: 59, 83: 60, 84: 61, 85: 62, 86: 63, 87: 65, 88: 66, 89: 67, 90: 68, 91: 70, 92: 71, 93: 72, 94: 73, 95: 75, 96: 76, 97: 77, 98: 79, 99: 80, 100: 81, 101: 83, 102: 84, 103: 86, 104: 87, 105: 89, 106: 90, 107: 92, 108: 93, 109: 95, 110: 96, 111: 98, 112: 100, 113: 101, 114: 103, 115: 105, 116: 106, 117: 108, 118: 110, 119: 112, 120: 113, 121: 115, 122: 117, 123: 119, 124: 121, 125: 123, 126: 125, 127: 127}
curve2 = {0: 1, 1: 1, 2: 1, 3: 1, 4: 2, 5: 2, 6: 2, 7: 2, 8: 2, 9: 3, 10: 3, 11: 3, 12: 3, 13: 4, 14: 4, 15: 4, 16: 4, 17: 5, 18: 5, 19: 5, 20: 5, 21: 6, 22: 6, 23: 6, 24: 7, 25: 7, 26: 7, 27: 8, 28: 8, 29: 8, 30: 8, 31: 9, 32: 9, 33: 9, 34: 10, 35: 10, 36: 10, 37: 11, 38: 11, 39: 12, 40: 12, 41: 12, 42: 13, 43: 13, 44: 13, 45: 14, 46: 14, 47: 15, 48: 15, 49: 15, 50: 16, 51: 16, 52: 17, 53: 17, 54: 18, 55: 18, 56: 19, 57: 19, 58: 20, 59: 20, 60: 21, 61: 21, 62: 22, 63: 22, 64: 23, 65: 23, 66: 24, 67: 24, 68: 25, 69: 26, 70: 26, 71: 27, 72: 28, 73: 28, 74: 29, 75: 30, 76: 30, 77: 31, 78: 32, 79: 33, 80: 34, 81: 35, 82: 36, 83: 37, 84: 38, 85: 39, 86: 40, 87: 41, 88: 42, 89: 43, 90: 44, 91: 45, 92: 47, 93: 48, 94: 49, 95: 51, 96: 52, 97: 53, 98: 55, 99: 56, 100: 58, 101: 59, 102: 61, 103: 63, 104: 64, 105: 66, 106: 68, 107: 70, 108: 71, 109: 73, 110: 75, 111: 78, 112: 80, 113: 82, 114: 84, 115: 86, 116: 89, 117: 92, 118: 94, 119: 96, 120: 100, 121: 102, 122: 106, 123: 110, 124: 112, 125: 116, 126: 120, 127: 125}
curve3 = {0: 1, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1, 6: 1, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1, 12: 1, 13: 1, 14: 1, 15: 1, 16: 1, 17: 1, 18: 1, 19: 1, 20: 1, 21: 2, 22: 2, 23: 2, 24: 2, 25: 2, 26: 2, 27: 2, 28: 2, 29: 2, 30: 2, 31: 3, 32: 3, 33: 3, 34: 3, 35: 3, 36: 3, 37: 3, 38: 4, 39: 4, 40: 4, 41: 4, 42: 4, 43: 4, 44: 5, 45: 5, 46: 5, 47: 5, 48: 5, 49: 6, 50: 6, 51: 6, 52: 6, 53: 7, 54: 7, 55: 7, 56: 7, 57: 8, 58: 8, 59: 8, 60: 8, 61: 9, 62: 9, 63: 9, 64: 10, 65: 10, 66: 10, 67: 11, 68: 11, 69: 11, 70: 12, 71: 12, 72: 12, 73: 13, 74: 13, 75: 14, 76: 14, 77: 15, 78: 15, 79: 16, 80: 16, 81: 17, 82: 17, 83: 18, 84: 19, 85: 19, 86: 20, 87: 21, 88: 21, 89: 22, 90: 23, 91: 24, 92: 25, 93: 26, 94: 27, 95: 28, 96: 29, 97: 30, 98: 32, 99: 33, 100: 34, 101: 35, 102: 37, 103: 38, 104: 40, 105: 41, 106: 43, 107: 45, 108: 46, 109: 48, 110: 50, 111: 52, 112: 54, 113: 56, 114: 59, 115: 61, 116: 64, 117: 66, 118: 69, 119: 72, 120: 76, 121: 79, 122: 83, 123: 87, 124: 92, 125: 99, 126: 104, 127: 118}
joystick_curve_list = [curve_linear, curve1, curve2, curve3]
class usb4vc_menu(object):
def cap_index(self, index, list_size):
if index >= list_size:
return 0
return index
def __init__(self, pboard, conf_dict):
super(usb4vc_menu, self).__init__()
self.current_level = 0
self.current_page = 0
self.level_size = 6
self.page_size = [7, 6, 4, 1, 1, 5]
self.kb_protocol_list = list(pboard['protocol_list_keyboard'])
self.mouse_protocol_list = list(pboard['protocol_list_mouse'])
self.gamepad_protocol_list = list(pboard['protocol_list_gamepad'])
self.pb_info = dict(pboard)
self.current_keyboard_protocol_index = self.cap_index(conf_dict.get('keyboard_protocol_index', 0), len(self.kb_protocol_list))
self.current_mouse_protocol_index = self.cap_index(conf_dict.get("mouse_protocol_index", 0), len(self.mouse_protocol_list))
self.current_mouse_sensitivity_offset_index = self.cap_index(conf_dict.get("mouse_sensitivity_index", 0), len(mouse_sensitivity_list))
self.current_gamepad_protocol_index = self.cap_index(conf_dict.get("gamepad_protocol_index", 0), len(self.gamepad_protocol_list))
self.current_keyboard_protocol = self.kb_protocol_list[self.current_keyboard_protocol_index]
self.current_mouse_protocol = self.mouse_protocol_list[self.current_mouse_protocol_index]
self.current_gamepad_protocol = self.gamepad_protocol_list[self.current_gamepad_protocol_index]
self.current_joystick_curve_index = self.cap_index(conf_dict.get("joystick_curve_index", 0), len(joystick_curve_list))
self.last_spi_message = []
self.bluetooth_device_list = None
self.error_message = ''
self.pairing_result = ''
self.bt_scan_timeout_sec = 10
self.paired_devices_list = []
self.send_protocol_set_spi_msg()
def switch_page(self, amount):
self.current_page = (self.current_page + amount) % self.page_size[self.current_level]
def goto_page(self, new_page):
if new_page < self.page_size[self.current_level]:
self.current_page = new_page
def goto_level(self, new_level):
if new_level < self.level_size:
self.current_level = new_level
self.current_page = 0
def draw_joystick_curve(self):
this_curve = joystick_curve_list[self.current_joystick_curve_index % len(joystick_curve_list)]
with canvas(usb4vc_oled.oled_device) as draw:
draw.text((0, 0), "Joystick", font=usb4vc_oled.font_medium, fill="white")
draw.text((0, 15), "Curve", font=usb4vc_oled.font_medium, fill="white")
draw.line((curve_vertial_axis_x_pos, 0, curve_vertial_axis_x_pos, curve_vertial_axis_x_pos), fill="white")
draw.line((curve_vertial_axis_x_pos, 31, curve_vertial_axis_x_pos+curve_horizontal_axis_width, 31), fill="white")
for xxx in range(curve_horizontal_axis_width):
dict_key = xxx*4
this_point_x = xxx + curve_vertial_axis_x_pos
this_point_y = usb4vc_oled.OLED_HEIGHT - this_curve[dict_key]//4 - 1
draw.line((this_point_x,this_point_y,this_point_x,this_point_y), fill="white")
def display_page(self, level, page):
if level == 0:
if page == 0:
with canvas(usb4vc_oled.oled_device) as draw:
mouse_count, kb_count, gp_count = usb4vc_usb_scan.get_device_count()
draw.text((0, 0), f"KBD {kb_count} {self.current_keyboard_protocol['display_name']}", font=usb4vc_oled.font_regular, fill="white")
draw.text((0, 10), f"MOS {mouse_count} {self.current_mouse_protocol['display_name']}", font=usb4vc_oled.font_regular, fill="white")
draw.text((0, 20), f"GPD {gp_count} {self.current_gamepad_protocol['display_name']}", font=usb4vc_oled.font_regular, fill="white")
if page == 1:
with canvas(usb4vc_oled.oled_device) as draw:
if 'Unknown' in self.pb_info['full_name']:
draw.text((0, 0), f"{self.pb_info['full_name']} PID {this_pboard_id}", font=usb4vc_oled.font_regular, fill="white")
else:
draw.text((0, 0), f"{self.pb_info['full_name']}", font=usb4vc_oled.font_regular, fill="white")
draw.text((0, 10), f"PB {self.pb_info['fw_ver'][0]}.{self.pb_info['fw_ver'][1]}.{self.pb_info['fw_ver'][2]} RPi {usb4vc_shared.RPI_APP_VERSION_TUPLE[0]}.{usb4vc_shared.RPI_APP_VERSION_TUPLE[1]}.{usb4vc_shared.RPI_APP_VERSION_TUPLE[2]}", font=usb4vc_oled.font_regular, fill="white")
draw.text((0, 20), f"IP: {get_ip_address()}", font=usb4vc_oled.font_regular, fill="white")
if page == 2:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Load Custom", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered("Config from USB", usb4vc_oled.font_medium, 16, draw)
if page == 3:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Internet Update", usb4vc_oled.font_medium, 10, draw)
if page == 4:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Show Event Codes", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered("(experimental)", usb4vc_oled.font_regular, 20, draw)
if page == 5:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Remove BT Device", usb4vc_oled.font_medium, 10, draw)
if page == 6:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Pair Bluetooth", usb4vc_oled.font_medium, 10, draw)
if level == 1:
if page == 0:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Keyboard Protocol", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered(self.kb_protocol_list[self.current_keyboard_protocol_index]['display_name'], usb4vc_oled.font_medium, 15, draw)
if page == 1:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Mouse Protocol", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered(self.mouse_protocol_list[self.current_mouse_protocol_index]['display_name'], usb4vc_oled.font_medium, 15, draw)
if page == 2:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Gamepad Protocol", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered(self.gamepad_protocol_list[self.current_gamepad_protocol_index]['display_name'], usb4vc_oled.font_medium, 15, draw)
if page == 3:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Mouse Sensitivity", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered(f"{mouse_sensitivity_list[self.current_mouse_sensitivity_offset_index]}", usb4vc_oled.font_medium, 15, draw)
if page == 4:
self.draw_joystick_curve()
if page == 5:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Save & Quit", usb4vc_oled.font_medium, 10, draw)
if level == 2:
if page == 0:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Put your device in", usb4vc_oled.font_regular, 0, draw)
usb4vc_oled.oled_print_centered("pairing mode now.", usb4vc_oled.font_regular, 10, draw)
usb4vc_oled.oled_print_centered("Press enter to start", usb4vc_oled.font_regular, 20, draw)
if page == 1:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Scanning...", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered("Please wait", usb4vc_oled.font_medium, 15, draw)
result, self.error_message = bt_setup()
if result != 0:
self.goto_page(3)
self.display_curent_page()
return
paired_devices_set = get_paired_devices()
self.bluetooth_device_list, self.error_message = scan_bt_devices(self.bt_scan_timeout_sec)
self.bluetooth_device_list = list(set(self.bluetooth_device_list) - paired_devices_set)
if len(self.bluetooth_device_list) == 0:
self.error_message = "Nothing was found"
self.goto_page(3)
self.display_curent_page()
return
print("BT LIST:", self.bluetooth_device_list)
# set up level 3 menu structure
self.page_size[3] = len(self.bluetooth_device_list) + 1
self.goto_level(3)
self.display_curent_page()
if page == 2:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Pairing result:", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered(self.pairing_result, usb4vc_oled.font_regular, 20, draw)
if page == 3:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Bluetooth Error!", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered(self.error_message, usb4vc_oled.font_regular, 20, draw)
if level == 3:
if page == self.page_size[3] - 1:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Exit", usb4vc_oled.font_medium, 10, draw)
else:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered(f"Found {len(self.bluetooth_device_list)}. Pair this?", usb4vc_oled.font_regular, 0, draw)
usb4vc_oled.oled_print_centered(f"{self.bluetooth_device_list[page][1]}", usb4vc_oled.font_regular, 10, draw)
usb4vc_oled.oled_print_centered(f"{self.bluetooth_device_list[page][0]}", usb4vc_oled.font_regular, 20, draw)
if level == 4:
if page == self.page_size[4] - 1:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Exit", usb4vc_oled.font_medium, 10, draw)
else:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered(f"Remove this?", usb4vc_oled.font_regular, 0, draw)
usb4vc_oled.oled_print_centered(f"{self.paired_devices_list[page][1]}", usb4vc_oled.font_regular, 10, draw)
usb4vc_oled.oled_print_centered(f"{self.paired_devices_list[page][0]}", usb4vc_oled.font_regular, 20, draw)
if level == 5:
if page == 0:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Power Down", usb4vc_oled.font_medium, 10, draw)
if page == 1:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Relaunch", usb4vc_oled.font_medium, 10, draw)
if page == 2:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Reboot", usb4vc_oled.font_medium, 10, draw)
if page == 3:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Exit to Linux", usb4vc_oled.font_medium, 10, draw)
if page == 4:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Cancel", usb4vc_oled.font_medium, 10, draw)
def send_protocol_set_spi_msg(self):
status_dict = {}
for index, item in enumerate(self.kb_protocol_list):
if item['pid'] & 0x7f in status_dict and status_dict[item['pid'] & 0x7f] == 1:
continue
status_dict[item['pid'] & 0x7f] = 0
if index == self.current_keyboard_protocol_index:
status_dict[item['pid'] & 0x7f] = 1
for index, item in enumerate(self.mouse_protocol_list):
if item['pid'] & 0x7f in status_dict and status_dict[item['pid'] & 0x7f] == 1:
continue
status_dict[item['pid'] & 0x7f] = 0
if index == self.current_mouse_protocol_index:
status_dict[item['pid'] & 0x7f] = 1
for index, item in enumerate(self.gamepad_protocol_list):
if item['pid'] & 0x7f in status_dict and status_dict[item['pid'] & 0x7f] == 1:
continue
status_dict[item['pid'] & 0x7f] = 0
if index == self.current_gamepad_protocol_index:
status_dict[item['pid'] & 0x7f] = 1
protocol_bytes = []
for key in status_dict:
if key == PROTOCOL_OFF['pid']:
continue
if status_dict[key]:
protocol_bytes.append(key | 0x80)
else:
protocol_bytes.append(key)
this_msg = list(set_protocl_spi_msg_template)
this_msg[3:3+len(protocol_bytes)] = protocol_bytes
self.current_keyboard_protocol = self.kb_protocol_list[self.current_keyboard_protocol_index]
self.current_mouse_protocol = self.mouse_protocol_list[self.current_mouse_protocol_index]
self.current_gamepad_protocol = self.gamepad_protocol_list[self.current_gamepad_protocol_index]
if this_msg == self.last_spi_message:
print("SPI: no need to send")
return
print("set_protocol:", [hex(x) for x in this_msg])
usb4vc_usb_scan.set_protocol(this_msg)
print('new status:', [hex(x) for x in usb4vc_usb_scan.get_pboard_info()])
self.last_spi_message = list(this_msg)
def action(self, level, page):
if level == 0:
if page == 2:
usb_present, config_path = check_usb_drive()
if usb_present is False:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Error:", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered(str(config_path), usb4vc_oled.font_regular, 16, draw)
time.sleep(3)
self.goto_level(0)
else:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Copying", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered("Debug Log...", usb4vc_oled.font_medium, 16, draw)
copy_debug_log()
time.sleep(2)
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Copying custom", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered("mapping...", usb4vc_oled.font_medium, 16, draw)
time.sleep(2)
update_from_usb(config_path)
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Update complete!", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered("Relaunching...", usb4vc_oled.font_medium, 16, draw)
time.sleep(3)
usb4vc_oled.oled_device.clear()
os._exit(0)
self.goto_level(0)
elif page == 3:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Updating...", usb4vc_oled.font_medium, 10, draw)
fffff = usb4vc_check_update.download_latest_firmware(this_pboard_id)
if fffff != 0:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Unable to download", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered(f"firmware: {fffff}", usb4vc_oled.font_medium, 16, draw)
elif update_pboard_firmware(this_pboard_id):
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Firmware updated!", usb4vc_oled.font_medium, 10, draw)
else:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("FW update ERR or", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered("already newest", usb4vc_oled.font_medium, 15, draw)
time.sleep(3)
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Updating code...", usb4vc_oled.font_medium, 10, draw)
time.sleep(1)
update_result = usb4vc_check_update.update(temp_dir_path)
if update_result[0] == 0:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Update complete!", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered("Relaunching...", usb4vc_oled.font_medium, 16, draw)
else:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Update failed:", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered(f"{update_result[-1]} {update_result[0]}", usb4vc_oled.font_regular, 16, draw)
time.sleep(4)
usb4vc_oled.oled_device.clear()
os._exit(0)
elif page == 4:
try:
usb4vc_show_ev.ev_loop([plus_button, minus_button, enter_button])
except Exception as e:
print('exception ev_loop:', e)
self.goto_level(0)
elif page == 5:
self.paired_devices_list = list(get_paired_devices())
self.page_size[4] = len(self.paired_devices_list) + 1
self.goto_level(4)
elif page == 6:
self.goto_level(2)
else:
self.goto_level(1)
if level == 1:
if page == 0:
self.current_keyboard_protocol_index = (self.current_keyboard_protocol_index + 1) % len(self.kb_protocol_list)
if page == 1:
self.current_mouse_protocol_index = (self.current_mouse_protocol_index + 1) % len(self.mouse_protocol_list)
if page == 2:
self.current_gamepad_protocol_index = (self.current_gamepad_protocol_index + 1) % len(self.gamepad_protocol_list)
if page == 3:
self.current_mouse_sensitivity_offset_index = (self.current_mouse_sensitivity_offset_index + 1) % len(mouse_sensitivity_list)
if page == 4:
self.current_joystick_curve_index = (self.current_joystick_curve_index + 1) % len(joystick_curve_list)
self.draw_joystick_curve()
if page == 5:
configuration_dict[this_pboard_id]["keyboard_protocol_index"] = self.current_keyboard_protocol_index
configuration_dict[this_pboard_id]["mouse_protocol_index"] = self.current_mouse_protocol_index
configuration_dict[this_pboard_id]["mouse_sensitivity_index"] = self.current_mouse_sensitivity_offset_index
configuration_dict[this_pboard_id]["gamepad_protocol_index"] = self.current_gamepad_protocol_index
configuration_dict[this_pboard_id]["joystick_curve_index"] = self.current_joystick_curve_index
save_config()
self.send_protocol_set_spi_msg()
self.goto_level(0)
if level == 2:
if page == 0:
self.switch_page(1)
if page == 2:
self.goto_level(0)
if page == 3:
self.goto_level(0)
if level == 3:
if page == self.page_size[3] - 1:
self.goto_level(0)
else:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Pairing...", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered("Please wait", usb4vc_oled.font_medium, 15, draw)
print("pairing", self.bluetooth_device_list[page])
bt_mac_addr = self.bluetooth_device_list[page][0]
is_successful, result_message = pair_device(bt_mac_addr)
self.pairing_result = result_message.split('.')[-1].strip()[-22:]
if is_successful:
os.system(f'timeout {self.bt_scan_timeout_sec} bluetoothctl --agent NoInputNoOutput trust {bt_mac_addr}')
os.system(f'timeout {self.bt_scan_timeout_sec} bluetoothctl --agent NoInputNoOutput connect {bt_mac_addr}')
self.goto_level(2)
self.goto_page(2)
if level == 4:
if page == self.page_size[4] - 1:
self.goto_level(0)
else:
os.system(f'timeout 5 bluetoothctl --agent NoInputNoOutput untrust {self.paired_devices_list[page][0]}')
os.system(f'timeout 5 bluetoothctl --agent NoInputNoOutput remove {self.paired_devices_list[page][0]}')
self.goto_level(0)
if level == 5:
if page == 0:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Wait Until Green", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered("LED Stops Blinking", usb4vc_oled.font_medium, 15, draw)
time.sleep(2)
os.system("sudo halt")
while 1:
time.sleep(1)
if page == 1:
usb4vc_oled.oled_device.clear()
os._exit(0)
if page == 2:
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Rebooting...", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered("Unplug if stuck >10s", usb4vc_oled.font_regular, 16, draw)
os.system("sudo reboot")
while 1:
time.sleep(1)
if page == 3:
usb4vc_oled.oled_device.clear()
os._exit(169)
if page == 4:
self.goto_level(0)
self.display_curent_page()
def action_current_page(self):
self.action(self.current_level, self.current_page)
def display_curent_page(self):
self.display_page(self.current_level, self.current_page)
def update_usb_status(self):
if self.current_level == 0 and self.current_page == 0:
self.display_page(0, 0)
def update_board_status(self):
if self.current_level == 0 and self.current_page == 1:
self.display_page(0, 1)
pboard_database = {
PBOARD_ID_UNKNOWN:{'author':'Unknown', 'fw_ver':(0,0,0), 'full_name':'Unknown', 'hw_rev':0, 'protocol_list_keyboard':raw_keyboard_protocols, 'protocol_list_mouse':raw_mouse_protocols, 'protocol_list_gamepad':raw_gamepad_protocols},
PBOARD_ID_IBMPC:{'author':'dekuNukem', 'fw_ver':(0,0,0), 'full_name':'IBM PC Compatible', 'hw_rev':0, 'protocol_list_keyboard':ibmpc_keyboard_protocols, 'protocol_list_mouse':ibmpc_mouse_protocols, 'protocol_list_gamepad':ibmpc_gamepad_protocols},
PBOARD_ID_ADB:{'author':'dekuNukem', 'fw_ver':(0,0,0), 'full_name':'Apple Desktop Bus', 'hw_rev':0, 'protocol_list_keyboard':adb_keyboard_protocols, 'protocol_list_mouse':adb_mouse_protocols, 'protocol_list_gamepad':adb_gamepad_protocols},
}
def get_pboard_dict(pid):
if pid not in pboard_database:
pid = 0
return pboard_database[pid]
def get_mouse_sensitivity():
return mouse_sensitivity_list[configuration_dict[this_pboard_id]["mouse_sensitivity_index"]]
def ui_init():
global pboard_info_spi_msg
global this_pboard_id
load_config()
pboard_info_spi_msg = usb4vc_usb_scan.get_pboard_info()
print("PB INFO:", pboard_info_spi_msg)
this_pboard_id = pboard_info_spi_msg[3]
if this_pboard_id in pboard_database:
# load custom profile mapping into protocol list
for item in custom_profile_list:
this_mapping_bid = usb4vc_shared.board_id_lookup.get(item['protocol_board'], 0)
if this_mapping_bid == this_pboard_id and item['device_type'] in pboard_database[this_pboard_id]:
this_mapping_pid = usb4vc_shared.protocol_id_lookup.get(item['protocol_name'])
item['pid'] = this_mapping_pid
pboard_database[this_pboard_id][item['device_type']].append(item)
pboard_database[this_pboard_id]['hw_rev'] = pboard_info_spi_msg[4]
pboard_database[this_pboard_id]['fw_ver'] = (pboard_info_spi_msg[5], pboard_info_spi_msg[6], pboard_info_spi_msg[7])
if 'rpi_app_ver' not in configuration_dict:
configuration_dict['rpi_app_ver'] = usb4vc_shared.RPI_APP_VERSION_TUPLE
if this_pboard_id not in configuration_dict:
configuration_dict[this_pboard_id] = {"keyboard_protocol_index":1, "mouse_protocol_index":1, "mouse_sensitivity_index":0, "gamepad_protocol_index":1}
plus_button = my_button(PLUS_BUTTON_PIN)
minus_button = my_button(MINUS_BUTTON_PIN)
enter_button = my_button(ENTER_BUTTON_PIN)
shutdown_button = my_button(SHUTDOWN_BUTTON_PIN)
class oled_sleep_control(object):
def __init__(self):
super(oled_sleep_control, self).__init__()
self.is_sleeping = False
self.last_input_event = time.time()
self.ui_loop_count = 0
def sleep(self):
if self.is_sleeping is False:
print("sleeping!")
usb4vc_oled.oled_device.clear()
self.is_sleeping = True
# GPIO.output(SLEEP_LED_PIN, GPIO.HIGH)
def wakeup(self):
if self.is_sleeping:
print("waking up!")
my_menu.display_curent_page()
self.last_input_event = time.time()
self.is_sleeping = False
# GPIO.output(SLEEP_LED_PIN, GPIO.LOW)
def check_sleep(self):
# time.time() might jump ahead a lot when RPi gets its time from network
# this ensures OLED won't go to sleep too early
if self.ui_loop_count <= 1500:
return
if time.time() - self.last_input_event > 180:
self.sleep()
else:
self.wakeup()
def kick(self):
self.last_input_event = time.time()
my_oled = oled_sleep_control()
my_menu = None
def ui_worker():
global my_menu
print(configuration_dict)
print("ui_worker started")
my_menu = usb4vc_menu(get_pboard_dict(this_pboard_id), configuration_dict[this_pboard_id])
my_menu.display_page(0, 0)
for x in range(2):
GPIO.output(SLEEP_LED_PIN, GPIO.HIGH)
time.sleep(0.2)
GPIO.output(SLEEP_LED_PIN, GPIO.LOW)
time.sleep(0.2)
while 1:
time.sleep(0.1)
my_oled.ui_loop_count += 1
if my_oled.is_sleeping is False and my_oled.ui_loop_count % 5 == 0:
my_menu.update_usb_status()
my_menu.update_board_status()
if plus_button.is_pressed():
my_oled.kick()
if my_oled.is_sleeping:
my_oled.wakeup()
elif my_menu.current_level != 2:
my_menu.switch_page(1)
my_menu.display_curent_page()
if minus_button.is_pressed():
my_oled.kick()
if my_oled.is_sleeping:
my_oled.wakeup()
elif my_menu.current_level != 2:
my_menu.switch_page(-1)
my_menu.display_curent_page()
if enter_button.is_pressed():
my_oled.kick()
if my_oled.is_sleeping:
my_oled.wakeup()
else:
my_menu.action_current_page()
if shutdown_button.is_pressed():
my_oled.kick()
if my_oled.is_sleeping:
my_oled.wakeup()
else:
my_menu.goto_level(5)
my_menu.display_curent_page()
my_oled.check_sleep()
def get_gamepad_protocol():
return my_menu.current_gamepad_protocol
def get_joystick_curve():
return joystick_curve_list[my_menu.current_joystick_curve_index]
def oled_print_model_changed():
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("RPi Model Changed!", usb4vc_oled.font_regular, 0, draw)
usb4vc_oled.oled_print_centered("Recompiling BT Driver", usb4vc_oled.font_regular, 10, draw)
usb4vc_oled.oled_print_centered("Might take a while...", usb4vc_oled.font_regular, 20, draw)
def oled_print_oneline(msg):
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered(msg, usb4vc_oled.font_medium, 10, draw)
def oled_print_reboot():
with canvas(usb4vc_oled.oled_device) as draw:
usb4vc_oled.oled_print_centered("Done! Rebooting..", usb4vc_oled.font_medium, 0, draw)
usb4vc_oled.oled_print_centered("Unplug if stuck >10s", usb4vc_oled.font_regular, 16, draw)
ui_thread = threading.Thread(target=ui_worker, daemon=True)
| [((26, 19, 26, 63), 'os.path.join', 'os.path.join', ({(26, 32, 26, 47): 'config_dir_path', (26, 49, 26, 62): '"""config.json"""'}, {}), "(config_dir_path, 'config.json')", False, 'import os\n'), ((28, 0, 28, 29), 'usb4vc_shared.ensure_dir', 'ensure_dir', ({(28, 11, 28, 28): 'this_app_dir_path'}, {}), '(this_app_dir_path)', False, 'from usb4vc_shared import ensure_dir\n'), ((29, 0, 29, 27), 'usb4vc_shared.ensure_dir', 'ensure_dir', ({(29, 11, 29, 26): 'config_dir_path'}, {}), '(config_dir_path)', False, 'from usb4vc_shared import ensure_dir\n'), ((30, 0, 30, 29), 'usb4vc_shared.ensure_dir', 'ensure_dir', ({(30, 11, 30, 28): 'firmware_dir_path'}, {}), '(firmware_dir_path)', False, 'from usb4vc_shared import ensure_dir\n'), ((31, 0, 31, 25), 'usb4vc_shared.ensure_dir', 'ensure_dir', ({(31, 11, 31, 24): 'temp_dir_path'}, {}), '(temp_dir_path)', False, 'from usb4vc_shared import ensure_dir\n'), ((42, 0, 42, 22), 'RPi.GPIO.setmode', 'GPIO.setmode', ({(42, 13, 42, 21): 'GPIO.BCM'}, {}), '(GPIO.BCM)', True, 'import RPi.GPIO as GPIO\n'), ((43, 0, 43, 37), 'RPi.GPIO.setup', 'GPIO.setup', ({(43, 11, 43, 27): 'PBOARD_RESET_PIN', (43, 29, 43, 36): 'GPIO.IN'}, {}), '(PBOARD_RESET_PIN, GPIO.IN)', True, 'import RPi.GPIO as GPIO\n'), ((44, 0, 44, 37), 'RPi.GPIO.setup', 'GPIO.setup', ({(44, 11, 44, 27): 'PBOARD_BOOT0_PIN', (44, 29, 44, 36): 'GPIO.IN'}, {}), '(PBOARD_BOOT0_PIN, GPIO.IN)', True, 'import RPi.GPIO as GPIO\n'), ((45, 0, 45, 35), 'RPi.GPIO.setup', 'GPIO.setup', ({(45, 11, 45, 24): 'SLEEP_LED_PIN', (45, 26, 45, 34): 'GPIO.OUT'}, {}), '(SLEEP_LED_PIN, GPIO.OUT)', True, 'import RPi.GPIO as GPIO\n'), ((46, 0, 46, 36), 'RPi.GPIO.output', 'GPIO.output', ({(46, 12, 46, 25): 'SLEEP_LED_PIN', (46, 27, 46, 35): 'GPIO.LOW'}, {}), '(SLEEP_LED_PIN, GPIO.LOW)', True, 'import RPi.GPIO as GPIO\n'), ((976, 12, 976, 59), 'threading.Thread', 'threading.Thread', (), '', False, 'import threading\n'), ((218, 4, 218, 41), 'RPi.GPIO.setup', 'GPIO.setup', ({(218, 15, 218, 31): 'PBOARD_BOOT0_PIN', (218, 33, 218, 40): 'GPIO.IN'}, {}), '(PBOARD_BOOT0_PIN, GPIO.IN)', True, 'import RPi.GPIO as GPIO\n'), ((219, 4, 219, 42), 'RPi.GPIO.setup', 'GPIO.setup', ({(219, 15, 219, 31): 'PBOARD_RESET_PIN', (219, 33, 219, 41): 'GPIO.OUT'}, {}), '(PBOARD_RESET_PIN, GPIO.OUT)', True, 'import RPi.GPIO as GPIO\n'), ((220, 4, 220, 43), 'RPi.GPIO.output', 'GPIO.output', ({(220, 16, 220, 32): 'PBOARD_RESET_PIN', (220, 34, 220, 42): 'GPIO.LOW'}, {}), '(PBOARD_RESET_PIN, GPIO.LOW)', True, 'import RPi.GPIO as GPIO\n'), ((221, 4, 221, 20), 'time.sleep', 'time.sleep', ({(221, 15, 221, 19): '(0.05)'}, {}), '(0.05)', False, 'import time\n'), ((222, 4, 222, 41), 'RPi.GPIO.setup', 'GPIO.setup', ({(222, 15, 222, 31): 'PBOARD_RESET_PIN', (222, 33, 222, 40): 'GPIO.IN'}, {}), '(PBOARD_RESET_PIN, GPIO.IN)', True, 'import RPi.GPIO as GPIO\n'), ((223, 4, 223, 20), 'time.sleep', 'time.sleep', ({(223, 15, 223, 19): '(0.05)'}, {}), '(0.05)', False, 'import time\n'), ((228, 4, 228, 42), 'RPi.GPIO.setup', 'GPIO.setup', ({(228, 15, 228, 31): 'PBOARD_RESET_PIN', (228, 33, 228, 41): 'GPIO.OUT'}, {}), '(PBOARD_RESET_PIN, GPIO.OUT)', True, 'import RPi.GPIO as GPIO\n'), ((229, 4, 229, 43), 'RPi.GPIO.output', 'GPIO.output', ({(229, 16, 229, 32): 'PBOARD_RESET_PIN', (229, 34, 229, 42): 'GPIO.LOW'}, {}), '(PBOARD_RESET_PIN, GPIO.LOW)', True, 'import RPi.GPIO as GPIO\n'), ((230, 4, 230, 20), 'time.sleep', 'time.sleep', ({(230, 15, 230, 19): '(0.05)'}, {}), '(0.05)', False, 'import time\n'), ((232, 4, 232, 42), 'RPi.GPIO.setup', 'GPIO.setup', ({(232, 15, 232, 31): 'PBOARD_BOOT0_PIN', (232, 33, 232, 41): 'GPIO.OUT'}, {}), '(PBOARD_BOOT0_PIN, GPIO.OUT)', True, 'import RPi.GPIO as GPIO\n'), ((233, 4, 233, 44), 'RPi.GPIO.output', 'GPIO.output', ({(233, 16, 233, 32): 'PBOARD_BOOT0_PIN', (233, 34, 233, 43): 'GPIO.HIGH'}, {}), '(PBOARD_BOOT0_PIN, GPIO.HIGH)', True, 'import RPi.GPIO as GPIO\n'), ((234, 4, 234, 20), 'time.sleep', 'time.sleep', ({(234, 15, 234, 19): '(0.05)'}, {}), '(0.05)', False, 'import time\n'), ((236, 4, 236, 41), 'RPi.GPIO.setup', 'GPIO.setup', ({(236, 15, 236, 31): 'PBOARD_RESET_PIN', (236, 33, 236, 40): 'GPIO.IN'}, {}), '(PBOARD_RESET_PIN, GPIO.IN)', True, 'import RPi.GPIO as GPIO\n'), ((237, 4, 237, 19), 'time.sleep', 'time.sleep', ({(237, 15, 237, 18): '(1.5)'}, {}), '(1.5)', False, 'import time\n'), ((241, 4, 241, 41), 'RPi.GPIO.setup', 'GPIO.setup', ({(241, 15, 241, 31): 'PBOARD_BOOT0_PIN', (241, 33, 241, 40): 'GPIO.IN'}, {}), '(PBOARD_BOOT0_PIN, GPIO.IN)', True, 'import RPi.GPIO as GPIO\n'), ((243, 4, 243, 42), 'RPi.GPIO.setup', 'GPIO.setup', ({(243, 15, 243, 31): 'PBOARD_RESET_PIN', (243, 33, 243, 41): 'GPIO.OUT'}, {}), '(PBOARD_RESET_PIN, GPIO.OUT)', True, 'import RPi.GPIO as GPIO\n'), ((244, 4, 244, 43), 'RPi.GPIO.output', 'GPIO.output', ({(244, 16, 244, 32): 'PBOARD_RESET_PIN', (244, 34, 244, 42): 'GPIO.LOW'}, {}), '(PBOARD_RESET_PIN, GPIO.LOW)', True, 'import RPi.GPIO as GPIO\n'), ((245, 4, 245, 20), 'time.sleep', 'time.sleep', ({(245, 15, 245, 19): '(0.05)'}, {}), '(0.05)', False, 'import time\n'), ((247, 4, 247, 41), 'RPi.GPIO.setup', 'GPIO.setup', ({(247, 15, 247, 31): 'PBOARD_RESET_PIN', (247, 33, 247, 40): 'GPIO.IN'}, {}), '(PBOARD_RESET_PIN, GPIO.IN)', True, 'import RPi.GPIO as GPIO\n'), ((248, 4, 248, 19), 'time.sleep', 'time.sleep', ({(248, 15, 248, 18): '(1.5)'}, {}), '(1.5)', False, 'import time\n'), ((326, 17, 326, 60), 'subprocess.getoutput', 'subprocess.getoutput', ({(326, 38, 326, 59): '"""/usr/sbin/rfkill -n"""'}, {}), "('/usr/sbin/rfkill -n')", False, 'import subprocess\n'), ((329, 4, 329, 51), 'os.system', 'os.system', ({(329, 14, 329, 50): '"""/usr/sbin/rfkill unblock bluetooth"""'}, {}), "('/usr/sbin/rfkill unblock bluetooth')", False, 'import os\n'), ((330, 4, 330, 19), 'time.sleep', 'time.sleep', ({(330, 15, 330, 18): '(0.1)'}, {}), '(0.1)', False, 'import time\n'), ((340, 17, 340, 85), 'subprocess.getoutput', 'subprocess.getoutput', ({(340, 38, 340, 84): '"""bluetoothctl --agent NoInputNoOutput devices"""'}, {}), "('bluetoothctl --agent NoInputNoOutput devices')", False, 'import subprocess\n'), ((416, 13, 416, 58), 'subprocess.getoutput', 'subprocess.getoutput', ({(416, 34, 416, 57): '"""timeout 1 hostname -I"""'}, {}), "('timeout 1 hostname -I')", False, 'import subprocess\n'), ((849, 26, 849, 59), 'usb4vc_usb_scan.get_pboard_info', 'usb4vc_usb_scan.get_pboard_info', ({}, {}), '()', False, 'import usb4vc_usb_scan\n'), ((56, 8, 56, 70), 'RPi.GPIO.setup', 'GPIO.setup', (), '', True, 'import RPi.GPIO as GPIO\n'), ((57, 26, 57, 53), 'RPi.GPIO.input', 'GPIO.input', ({(57, 37, 57, 52): 'self.pin_number'}, {}), '(self.pin_number)', True, 'import RPi.GPIO as GPIO\n'), ((61, 24, 61, 51), 'RPi.GPIO.input', 'GPIO.input', ({(61, 35, 61, 50): 'self.pin_number'}, {}), '(self.pin_number)', True, 'import RPi.GPIO as GPIO\n'), ((158, 22, 158, 54), 'os.path.join', 'os.path.join', ({(158, 35, 158, 50): 'config_dir_path', (158, 52, 158, 53): 'x'}, {}), '(config_dir_path, x)', False, 'import os\n'), ((181, 11, 181, 35), 'os.path.isdir', 'os.path.isdir', ({(181, 25, 181, 34): 'this_path'}, {}), '(this_path)', False, 'import os\n'), ((193, 26, 193, 59), 'os.path.join', 'os.path.join', ({(193, 39, 193, 48): 'this_path', (193, 50, 193, 58): '"""config"""'}, {}), "(this_path, 'config')", False, 'import os\n'), ((254, 8, 254, 69), 'os.system', 'os.system', ({(254, 18, 254, 68): 'f"""sudo stm32flash -w {fw_path} -a 0x3b /dev/i2c-1"""'}, {}), "(f'sudo stm32flash -w {fw_path} -a 0x3b /dev/i2c-1')", False, 'import os\n'), ((291, 8, 291, 80), 'os.system', 'os.system', ({(291, 18, 291, 79): 'f"""cp -v /home/pi/usb4vc/config/config.json {usb_config_path}"""'}, {}), "(f'cp -v /home/pi/usb4vc/config/config.json {usb_config_path}')", False, 'import os\n'), ((292, 8, 292, 89), 'os.system', 'os.system', ({(292, 18, 292, 88): '"""mv -v /home/pi/usb4vc/config/config.json /home/pi/usb4vc/config.json"""'}, {}), "(\n 'mv -v /home/pi/usb4vc/config/config.json /home/pi/usb4vc/config.json')", False, 'import os\n'), ((293, 8, 293, 53), 'os.system', 'os.system', ({(293, 18, 293, 52): '"""rm -rfv /home/pi/usb4vc/config/*"""'}, {}), "('rm -rfv /home/pi/usb4vc/config/*')", False, 'import os\n'), ((295, 8, 295, 89), 'os.system', 'os.system', ({(295, 18, 295, 88): '"""mv -v /home/pi/usb4vc/config.json /home/pi/usb4vc/config/config.json"""'}, {}), "(\n 'mv -v /home/pi/usb4vc/config.json /home/pi/usb4vc/config/config.json')", False, 'import os\n'), ((331, 16, 331, 73), 'os.system', 'os.system', ({(331, 26, 331, 72): '"""timeout 1 bluetoothctl agent NoInputNoOutput"""'}, {}), "('timeout 1 bluetoothctl agent NoInputNoOutput')", False, 'import os\n'), ((337, 16, 337, 96), 'os.system', 'os.system', ({(337, 26, 337, 95): 'f"""timeout {timeout_sec} bluetoothctl --agent NoInputNoOutput scan on"""'}, {}), "(f'timeout {timeout_sec} bluetoothctl --agent NoInputNoOutput scan on'\n )", False, 'import os\n'), ((356, 9, 357, 51), 'subprocess.Popen', 'Popen', (), '', False, 'from subprocess import Popen, PIPE\n'), ((388, 21, 388, 107), 'subprocess.getoutput', 'subprocess.getoutput', ({(388, 42, 388, 106): 'f"""timeout 5 bluetoothctl --agent NoInputNoOutput paired-devices"""'}, {}), "(\n f'timeout 5 bluetoothctl --agent NoInputNoOutput paired-devices')", False, 'import subprocess\n'), ((663, 8, 663, 46), 'usb4vc_usb_scan.set_protocol', 'usb4vc_usb_scan.set_protocol', ({(663, 37, 663, 45): 'this_msg'}, {}), '(this_msg)', False, 'import usb4vc_usb_scan\n'), ((876, 32, 876, 43), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((901, 32, 901, 43), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((912, 8, 912, 45), 'RPi.GPIO.output', 'GPIO.output', ({(912, 20, 912, 33): 'SLEEP_LED_PIN', (912, 35, 912, 44): 'GPIO.HIGH'}, {}), '(SLEEP_LED_PIN, GPIO.HIGH)', True, 'import RPi.GPIO as GPIO\n'), ((913, 8, 913, 23), 'time.sleep', 'time.sleep', ({(913, 19, 913, 22): '(0.2)'}, {}), '(0.2)', False, 'import time\n'), ((914, 8, 914, 44), 'RPi.GPIO.output', 'GPIO.output', ({(914, 20, 914, 33): 'SLEEP_LED_PIN', (914, 35, 914, 43): 'GPIO.LOW'}, {}), '(SLEEP_LED_PIN, GPIO.LOW)', True, 'import RPi.GPIO as GPIO\n'), ((915, 8, 915, 23), 'time.sleep', 'time.sleep', ({(915, 19, 915, 22): '(0.2)'}, {}), '(0.2)', False, 'import time\n'), ((917, 8, 917, 23), 'time.sleep', 'time.sleep', ({(917, 19, 917, 22): '(0.1)'}, {}), '(0.1)', False, 'import time\n'), ((962, 9, 962, 40), 'luma.core.render.canvas', 'canvas', ({(962, 16, 962, 39): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((963, 8, 963, 96), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(963, 40, 963, 60): '"""RPi Model Changed!"""', (963, 62, 963, 86): 'usb4vc_oled.font_regular', (963, 88, 963, 89): '(0)', (963, 91, 963, 95): 'draw'}, {}), "('RPi Model Changed!', usb4vc_oled.\n font_regular, 0, draw)", False, 'import usb4vc_oled\n'), ((964, 8, 964, 100), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(964, 40, 964, 63): '"""Recompiling BT Driver"""', (964, 65, 964, 89): 'usb4vc_oled.font_regular', (964, 91, 964, 93): '(10)', (964, 95, 964, 99): 'draw'}, {}), "('Recompiling BT Driver', usb4vc_oled.\n font_regular, 10, draw)", False, 'import usb4vc_oled\n'), ((965, 8, 965, 100), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(965, 40, 965, 63): '"""Might take a while..."""', (965, 65, 965, 89): 'usb4vc_oled.font_regular', (965, 91, 965, 93): '(20)', (965, 95, 965, 99): 'draw'}, {}), "('Might take a while...', usb4vc_oled.\n font_regular, 20, draw)", False, 'import usb4vc_oled\n'), ((968, 9, 968, 40), 'luma.core.render.canvas', 'canvas', ({(968, 16, 968, 39): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((969, 8, 969, 79), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(969, 40, 969, 43): 'msg', (969, 45, 969, 68): 'usb4vc_oled.font_medium', (969, 70, 969, 72): '(10)', (969, 74, 969, 78): 'draw'}, {}), '(msg, usb4vc_oled.font_medium, 10, draw)', False, 'import usb4vc_oled\n'), ((972, 9, 972, 40), 'luma.core.render.canvas', 'canvas', ({(972, 16, 972, 39): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((973, 8, 973, 94), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(973, 40, 973, 59): '"""Done! Rebooting.."""', (973, 61, 973, 84): 'usb4vc_oled.font_medium', (973, 86, 973, 87): '(0)', (973, 89, 973, 93): 'draw'}, {}), "('Done! Rebooting..', usb4vc_oled.\n font_medium, 0, draw)", False, 'import usb4vc_oled\n'), ((974, 8, 974, 99), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(974, 40, 974, 62): '"""Unplug if stuck >10s"""', (974, 64, 974, 88): 'usb4vc_oled.font_regular', (974, 90, 974, 92): '(16)', (974, 94, 974, 98): 'draw'}, {}), "('Unplug if stuck >10s', usb4vc_oled.\n font_regular, 16, draw)", False, 'import usb4vc_oled\n'), ((157, 28, 157, 55), 'os.listdir', 'os.listdir', ({(157, 39, 157, 54): 'config_dir_path'}, {}), '(config_dir_path)', False, 'import os\n'), ((183, 12, 183, 85), 'os.system', 'os.system', ({(183, 22, 183, 84): 'f"""sudo cp -v /home/pi/usb4vc/usb4vc_debug_log.txt {this_path}"""'}, {}), "(f'sudo cp -v /home/pi/usb4vc/usb4vc_debug_log.txt {this_path}')", False, 'import os\n'), ((194, 15, 194, 45), 'os.path.isdir', 'os.path.isdir', ({(194, 29, 194, 44): 'usb_config_path'}, {}), '(usb_config_path)', False, 'import os\n'), ((258, 20, 258, 49), 'subprocess.getoutput', 'subprocess.getoutput', ({(258, 41, 258, 48): '"""lsusb"""'}, {}), "('lsusb')", False, 'import subprocess\n'), ((272, 28, 272, 57), 'os.listdir', 'os.listdir', ({(272, 39, 272, 56): 'firmware_dir_path'}, {}), '(firmware_dir_path)', False, 'import os\n'), ((405, 24, 405, 44), 'json.load', 'json.load', ({(405, 34, 405, 43): 'json_file'}, {}), '(json_file)', False, 'import json\n'), ((483, 13, 483, 44), 'luma.core.render.canvas', 'canvas', ({(483, 20, 483, 43): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((855, 31, 855, 91), 'usb4vc_shared.board_id_lookup.get', 'usb4vc_shared.board_id_lookup.get', ({(855, 65, 855, 87): "item['protocol_board']", (855, 89, 855, 90): '0'}, {}), "(item['protocol_board'], 0)", False, 'import usb4vc_shared\n'), ((881, 12, 881, 43), 'usb4vc_oled.oled_device.clear', 'usb4vc_oled.oled_device.clear', ({}, {}), '()', False, 'import usb4vc_oled\n'), ((888, 36, 888, 47), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((157, 74, 157, 106), 'os.path.join', 'os.path.join', ({(157, 87, 157, 102): 'config_dir_path', (157, 104, 157, 105): 'f'}, {}), '(config_dir_path, f)', False, 'import os\n'), ((162, 39, 162, 59), 'json.load', 'json.load', ({(162, 49, 162, 58): 'json_file'}, {}), '(json_file)', False, 'import json\n'), ((264, 12, 264, 25), 'time.sleep', 'time.sleep', ({(264, 23, 264, 24): '(4)'}, {}), '(4)', False, 'import time\n'), ((266, 12, 266, 77), 'os.system', 'os.system', ({(266, 22, 266, 76): 'f"""sudo dfu-util --device ,0483:df11 -a 0 -D {fw_path}"""'}, {}), "(f'sudo dfu-util --device ,0483:df11 -a 0 -D {fw_path}')", False, 'import os\n'), ((272, 76, 272, 110), 'os.path.join', 'os.path.join', ({(272, 89, 272, 106): 'firmware_dir_path', (272, 108, 272, 109): 'f'}, {}), '(firmware_dir_path, f)', False, 'import os\n'), ((282, 17, 282, 48), 'luma.core.render.canvas', 'canvas', ({(282, 24, 282, 47): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((283, 16, 283, 102), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(283, 48, 283, 67): '"""Loading Firmware:"""', (283, 69, 283, 92): 'usb4vc_oled.font_medium', (283, 94, 283, 95): '(0)', (283, 97, 283, 101): 'draw'}, {}), "('Loading Firmware:', usb4vc_oled.\n font_medium, 0, draw)", False, 'import usb4vc_oled\n'), ((285, 25, 285, 62), 'os.path.join', 'os.path.join', ({(285, 38, 285, 55): 'firmware_dir_path', (285, 57, 285, 61): 'item'}, {}), '(firmware_dir_path, item)', False, 'import os\n'), ((425, 27, 425, 57), 'json.dumps', 'json.dumps', ({(425, 38, 425, 56): 'configuration_dict'}, {}), '(configuration_dict)', False, 'import json\n'), ((788, 16, 788, 120), 'os.system', 'os.system', ({(788, 26, 788, 119): 'f"""timeout 5 bluetoothctl --agent NoInputNoOutput untrust {self.paired_devices_list[page][0]}"""'}, {}), "(\n f'timeout 5 bluetoothctl --agent NoInputNoOutput untrust {self.paired_devices_list[page][0]}'\n )", False, 'import os\n'), ((789, 16, 789, 119), 'os.system', 'os.system', ({(789, 26, 789, 118): 'f"""timeout 5 bluetoothctl --agent NoInputNoOutput remove {self.paired_devices_list[page][0]}"""'}, {}), "(\n f'timeout 5 bluetoothctl --agent NoInputNoOutput remove {self.paired_devices_list[page][0]}'\n )", False, 'import os\n'), ((796, 16, 796, 29), 'time.sleep', 'time.sleep', ({(796, 27, 796, 28): '(2)'}, {}), '(2)', False, 'import time\n'), ((797, 16, 797, 38), 'os.system', 'os.system', ({(797, 26, 797, 37): '"""sudo halt"""'}, {}), "('sudo halt')", False, 'import os\n'), ((801, 16, 801, 47), 'usb4vc_oled.oled_device.clear', 'usb4vc_oled.oled_device.clear', ({}, {}), '()', False, 'import usb4vc_oled\n'), ((802, 16, 802, 27), 'os._exit', 'os._exit', ({(802, 25, 802, 26): '(0)'}, {}), '(0)', False, 'import os\n'), ((807, 16, 807, 40), 'os.system', 'os.system', ({(807, 26, 807, 39): '"""sudo reboot"""'}, {}), "('sudo reboot')", False, 'import os\n'), ((811, 16, 811, 47), 'usb4vc_oled.oled_device.clear', 'usb4vc_oled.oled_device.clear', ({}, {}), '()', False, 'import usb4vc_oled\n'), ((812, 16, 812, 29), 'os._exit', 'os._exit', ({(812, 25, 812, 28): '(169)'}, {}), '(169)', False, 'import os\n'), ((857, 35, 857, 94), 'usb4vc_shared.protocol_id_lookup.get', 'usb4vc_shared.protocol_id_lookup.get', ({(857, 72, 857, 93): "item['protocol_name']"}, {}), "(item['protocol_name'])", False, 'import usb4vc_shared\n'), ((896, 11, 896, 22), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((260, 17, 260, 48), 'luma.core.render.canvas', 'canvas', ({(260, 24, 260, 47): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((261, 16, 261, 105), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(261, 48, 261, 69): '"""Connect a USB cable"""', (261, 71, 261, 95): 'usb4vc_oled.font_regular', (261, 97, 261, 98): '(0)', (261, 100, 261, 104): 'draw'}, {}), "('Connect a USB cable', usb4vc_oled.\n font_regular, 0, draw)", False, 'import usb4vc_oled\n'), ((262, 16, 262, 105), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(262, 48, 262, 68): '"""from P-Card to RPi"""', (262, 70, 262, 94): 'usb4vc_oled.font_regular', (262, 96, 262, 98): '(10)', (262, 100, 262, 104): 'draw'}, {}), "('from P-Card to RPi', usb4vc_oled.\n font_regular, 10, draw)", False, 'import usb4vc_oled\n'), ((263, 16, 263, 100), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(263, 48, 263, 63): '"""and try again"""', (263, 65, 263, 89): 'usb4vc_oled.font_regular', (263, 91, 263, 93): '(20)', (263, 95, 263, 99): 'draw'}, {}), "('and try again', usb4vc_oled.font_regular, \n 20, draw)", False, 'import usb4vc_oled\n'), ((294, 27, 294, 61), 'os.path.join', 'os.path.join', ({(294, 40, 294, 55): 'usb_config_path', (294, 57, 294, 60): '"""*"""'}, {}), "(usb_config_path, '*')", False, 'import os\n'), ((369, 21, 369, 52), 'luma.core.render.canvas', 'canvas', ({(369, 28, 369, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((370, 20, 370, 104), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(370, 52, 370, 69): '"""Enter PIN code:"""', (370, 71, 370, 94): 'usb4vc_oled.font_medium', (370, 96, 370, 97): '(0)', (370, 99, 370, 103): 'draw'}, {}), "('Enter PIN code:', usb4vc_oled.font_medium,\n 0, draw)", False, 'import usb4vc_oled\n'), ((497, 21, 497, 52), 'luma.core.render.canvas', 'canvas', ({(497, 28, 497, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((498, 54, 498, 88), 'usb4vc_usb_scan.get_device_count', 'usb4vc_usb_scan.get_device_count', ({}, {}), '()', False, 'import usb4vc_usb_scan\n'), ((503, 21, 503, 52), 'luma.core.render.canvas', 'canvas', ({(503, 28, 503, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((511, 21, 511, 52), 'luma.core.render.canvas', 'canvas', ({(511, 28, 511, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((512, 20, 512, 100), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(512, 52, 512, 65): '"""Load Custom"""', (512, 67, 512, 90): 'usb4vc_oled.font_medium', (512, 92, 512, 93): '(0)', (512, 95, 512, 99): 'draw'}, {}), "('Load Custom', usb4vc_oled.font_medium, 0, draw\n )", False, 'import usb4vc_oled\n'), ((513, 20, 513, 105), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(513, 52, 513, 69): '"""Config from USB"""', (513, 71, 513, 94): 'usb4vc_oled.font_medium', (513, 96, 513, 98): '(16)', (513, 100, 513, 104): 'draw'}, {}), "('Config from USB', usb4vc_oled.font_medium,\n 16, draw)", False, 'import usb4vc_oled\n'), ((515, 21, 515, 52), 'luma.core.render.canvas', 'canvas', ({(515, 28, 515, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((516, 20, 516, 105), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(516, 52, 516, 69): '"""Internet Update"""', (516, 71, 516, 94): 'usb4vc_oled.font_medium', (516, 96, 516, 98): '(10)', (516, 100, 516, 104): 'draw'}, {}), "('Internet Update', usb4vc_oled.font_medium,\n 10, draw)", False, 'import usb4vc_oled\n'), ((518, 21, 518, 52), 'luma.core.render.canvas', 'canvas', ({(518, 28, 518, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((519, 20, 519, 105), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(519, 52, 519, 70): '"""Show Event Codes"""', (519, 72, 519, 95): 'usb4vc_oled.font_medium', (519, 97, 519, 98): '(0)', (519, 100, 519, 104): 'draw'}, {}), "('Show Event Codes', usb4vc_oled.font_medium,\n 0, draw)", False, 'import usb4vc_oled\n'), ((520, 20, 520, 105), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(520, 52, 520, 68): '"""(experimental)"""', (520, 70, 520, 94): 'usb4vc_oled.font_regular', (520, 96, 520, 98): '(20)', (520, 100, 520, 104): 'draw'}, {}), "('(experimental)', usb4vc_oled.font_regular,\n 20, draw)", False, 'import usb4vc_oled\n'), ((522, 21, 522, 52), 'luma.core.render.canvas', 'canvas', ({(522, 28, 522, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((523, 20, 523, 106), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(523, 52, 523, 70): '"""Remove BT Device"""', (523, 72, 523, 95): 'usb4vc_oled.font_medium', (523, 97, 523, 99): '(10)', (523, 101, 523, 105): 'draw'}, {}), "('Remove BT Device', usb4vc_oled.font_medium,\n 10, draw)", False, 'import usb4vc_oled\n'), ((525, 21, 525, 52), 'luma.core.render.canvas', 'canvas', ({(525, 28, 525, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((526, 20, 526, 104), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(526, 52, 526, 68): '"""Pair Bluetooth"""', (526, 70, 526, 93): 'usb4vc_oled.font_medium', (526, 95, 526, 97): '(10)', (526, 99, 526, 103): 'draw'}, {}), "('Pair Bluetooth', usb4vc_oled.font_medium, \n 10, draw)", False, 'import usb4vc_oled\n'), ((529, 21, 529, 52), 'luma.core.render.canvas', 'canvas', ({(529, 28, 529, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((530, 20, 530, 106), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(530, 52, 530, 71): '"""Keyboard Protocol"""', (530, 73, 530, 96): 'usb4vc_oled.font_medium', (530, 98, 530, 99): '(0)', (530, 101, 530, 105): 'draw'}, {}), "('Keyboard Protocol', usb4vc_oled.\n font_medium, 0, draw)", False, 'import usb4vc_oled\n'), ((531, 20, 531, 163), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(531, 52, 531, 127): "self.kb_protocol_list[self.current_keyboard_protocol_index]['display_name']", (531, 129, 531, 152): 'usb4vc_oled.font_medium', (531, 154, 531, 156): '(15)', (531, 158, 531, 162): 'draw'}, {}), "(self.kb_protocol_list[self.\n current_keyboard_protocol_index]['display_name'], usb4vc_oled.\n font_medium, 15, draw)", False, 'import usb4vc_oled\n'), ((533, 21, 533, 52), 'luma.core.render.canvas', 'canvas', ({(533, 28, 533, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((534, 20, 534, 103), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(534, 52, 534, 68): '"""Mouse Protocol"""', (534, 70, 534, 93): 'usb4vc_oled.font_medium', (534, 95, 534, 96): '(0)', (534, 98, 534, 102): 'draw'}, {}), "('Mouse Protocol', usb4vc_oled.font_medium, \n 0, draw)", False, 'import usb4vc_oled\n'), ((535, 20, 535, 163), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(535, 52, 535, 127): "self.mouse_protocol_list[self.current_mouse_protocol_index]['display_name']", (535, 129, 535, 152): 'usb4vc_oled.font_medium', (535, 154, 535, 156): '(15)', (535, 158, 535, 162): 'draw'}, {}), "(self.mouse_protocol_list[self.\n current_mouse_protocol_index]['display_name'], usb4vc_oled.font_medium,\n 15, draw)", False, 'import usb4vc_oled\n'), ((537, 21, 537, 52), 'luma.core.render.canvas', 'canvas', ({(537, 28, 537, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((538, 20, 538, 105), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(538, 52, 538, 70): '"""Gamepad Protocol"""', (538, 72, 538, 95): 'usb4vc_oled.font_medium', (538, 97, 538, 98): '(0)', (538, 100, 538, 104): 'draw'}, {}), "('Gamepad Protocol', usb4vc_oled.font_medium,\n 0, draw)", False, 'import usb4vc_oled\n'), ((539, 20, 539, 167), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(539, 52, 539, 131): "self.gamepad_protocol_list[self.current_gamepad_protocol_index]['display_name']", (539, 133, 539, 156): 'usb4vc_oled.font_medium', (539, 158, 539, 160): '(15)', (539, 162, 539, 166): 'draw'}, {}), "(self.gamepad_protocol_list[self.\n current_gamepad_protocol_index]['display_name'], usb4vc_oled.\n font_medium, 15, draw)", False, 'import usb4vc_oled\n'), ((541, 21, 541, 52), 'luma.core.render.canvas', 'canvas', ({(541, 28, 541, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((542, 20, 542, 106), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(542, 52, 542, 71): '"""Mouse Sensitivity"""', (542, 73, 542, 96): 'usb4vc_oled.font_medium', (542, 98, 542, 99): '(0)', (542, 101, 542, 105): 'draw'}, {}), "('Mouse Sensitivity', usb4vc_oled.\n font_medium, 0, draw)", False, 'import usb4vc_oled\n'), ((543, 20, 543, 160), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(543, 52, 543, 124): 'f"""{mouse_sensitivity_list[self.current_mouse_sensitivity_offset_index]}"""', (543, 126, 543, 149): 'usb4vc_oled.font_medium', (543, 151, 543, 153): '(15)', (543, 155, 543, 159): 'draw'}, {}), "(\n f'{mouse_sensitivity_list[self.current_mouse_sensitivity_offset_index]}',\n usb4vc_oled.font_medium, 15, draw)", False, 'import usb4vc_oled\n'), ((547, 21, 547, 52), 'luma.core.render.canvas', 'canvas', ({(547, 28, 547, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((548, 20, 548, 101), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(548, 52, 548, 65): '"""Save & Quit"""', (548, 67, 548, 90): 'usb4vc_oled.font_medium', (548, 92, 548, 94): '(10)', (548, 96, 548, 100): 'draw'}, {}), "('Save & Quit', usb4vc_oled.font_medium, 10,\n draw)", False, 'import usb4vc_oled\n'), ((551, 21, 551, 52), 'luma.core.render.canvas', 'canvas', ({(551, 28, 551, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((552, 20, 552, 108), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(552, 52, 552, 72): '"""Put your device in"""', (552, 74, 552, 98): 'usb4vc_oled.font_regular', (552, 100, 552, 101): '(0)', (552, 103, 552, 107): 'draw'}, {}), "('Put your device in', usb4vc_oled.\n font_regular, 0, draw)", False, 'import usb4vc_oled\n'), ((553, 20, 553, 108), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(553, 52, 553, 71): '"""pairing mode now."""', (553, 73, 553, 97): 'usb4vc_oled.font_regular', (553, 99, 553, 101): '(10)', (553, 103, 553, 107): 'draw'}, {}), "('pairing mode now.', usb4vc_oled.\n font_regular, 10, draw)", False, 'import usb4vc_oled\n'), ((554, 20, 554, 111), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(554, 52, 554, 74): '"""Press enter to start"""', (554, 76, 554, 100): 'usb4vc_oled.font_regular', (554, 102, 554, 104): '(20)', (554, 106, 554, 110): 'draw'}, {}), "('Press enter to start', usb4vc_oled.\n font_regular, 20, draw)", False, 'import usb4vc_oled\n'), ((556, 21, 556, 52), 'luma.core.render.canvas', 'canvas', ({(556, 28, 556, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((557, 20, 557, 100), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(557, 52, 557, 65): '"""Scanning..."""', (557, 67, 557, 90): 'usb4vc_oled.font_medium', (557, 92, 557, 93): '(0)', (557, 95, 557, 99): 'draw'}, {}), "('Scanning...', usb4vc_oled.font_medium, 0, draw\n )", False, 'import usb4vc_oled\n'), ((558, 20, 558, 101), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(558, 52, 558, 65): '"""Please wait"""', (558, 67, 558, 90): 'usb4vc_oled.font_medium', (558, 92, 558, 94): '(15)', (558, 96, 558, 100): 'draw'}, {}), "('Please wait', usb4vc_oled.font_medium, 15,\n draw)", False, 'import usb4vc_oled\n'), ((578, 21, 578, 52), 'luma.core.render.canvas', 'canvas', ({(578, 28, 578, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((579, 20, 579, 104), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(579, 52, 579, 69): '"""Pairing result:"""', (579, 71, 579, 94): 'usb4vc_oled.font_medium', (579, 96, 579, 97): '(0)', (579, 99, 579, 103): 'draw'}, {}), "('Pairing result:', usb4vc_oled.font_medium,\n 0, draw)", False, 'import usb4vc_oled\n'), ((580, 20, 580, 108), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(580, 52, 580, 71): 'self.pairing_result', (580, 73, 580, 97): 'usb4vc_oled.font_regular', (580, 99, 580, 101): '(20)', (580, 103, 580, 107): 'draw'}, {}), '(self.pairing_result, usb4vc_oled.\n font_regular, 20, draw)', False, 'import usb4vc_oled\n'), ((582, 21, 582, 52), 'luma.core.render.canvas', 'canvas', ({(582, 28, 582, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((583, 20, 583, 105), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(583, 52, 583, 70): '"""Bluetooth Error!"""', (583, 72, 583, 95): 'usb4vc_oled.font_medium', (583, 97, 583, 98): '(0)', (583, 100, 583, 104): 'draw'}, {}), "('Bluetooth Error!', usb4vc_oled.font_medium,\n 0, draw)", False, 'import usb4vc_oled\n'), ((584, 20, 584, 107), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(584, 52, 584, 70): 'self.error_message', (584, 72, 584, 96): 'usb4vc_oled.font_regular', (584, 98, 584, 100): '(20)', (584, 102, 584, 106): 'draw'}, {}), '(self.error_message, usb4vc_oled.\n font_regular, 20, draw)', False, 'import usb4vc_oled\n'), ((587, 21, 587, 52), 'luma.core.render.canvas', 'canvas', ({(587, 28, 587, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((588, 20, 588, 94), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(588, 52, 588, 58): '"""Exit"""', (588, 60, 588, 83): 'usb4vc_oled.font_medium', (588, 85, 588, 87): '(10)', (588, 89, 588, 93): 'draw'}, {}), "('Exit', usb4vc_oled.font_medium, 10, draw)", False, 'import usb4vc_oled\n'), ((590, 21, 590, 52), 'luma.core.render.canvas', 'canvas', ({(590, 28, 590, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((592, 20, 592, 129), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(592, 52, 592, 92): 'f"""{self.bluetooth_device_list[page][1]}"""', (592, 94, 592, 118): 'usb4vc_oled.font_regular', (592, 120, 592, 122): '(10)', (592, 124, 592, 128): 'draw'}, {}), "(f'{self.bluetooth_device_list[page][1]}',\n usb4vc_oled.font_regular, 10, draw)", False, 'import usb4vc_oled\n'), ((593, 20, 593, 129), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(593, 52, 593, 92): 'f"""{self.bluetooth_device_list[page][0]}"""', (593, 94, 593, 118): 'usb4vc_oled.font_regular', (593, 120, 593, 122): '(20)', (593, 124, 593, 128): 'draw'}, {}), "(f'{self.bluetooth_device_list[page][0]}',\n usb4vc_oled.font_regular, 20, draw)", False, 'import usb4vc_oled\n'), ((596, 21, 596, 52), 'luma.core.render.canvas', 'canvas', ({(596, 28, 596, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((597, 20, 597, 94), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(597, 52, 597, 58): '"""Exit"""', (597, 60, 597, 83): 'usb4vc_oled.font_medium', (597, 85, 597, 87): '(10)', (597, 89, 597, 93): 'draw'}, {}), "('Exit', usb4vc_oled.font_medium, 10, draw)", False, 'import usb4vc_oled\n'), ((599, 21, 599, 52), 'luma.core.render.canvas', 'canvas', ({(599, 28, 599, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((600, 20, 600, 103), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(600, 52, 600, 67): 'f"""Remove this?"""', (600, 69, 600, 93): 'usb4vc_oled.font_regular', (600, 95, 600, 96): '(0)', (600, 98, 600, 102): 'draw'}, {}), "(f'Remove this?', usb4vc_oled.font_regular, \n 0, draw)", False, 'import usb4vc_oled\n'), ((601, 20, 601, 127), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(601, 52, 601, 90): 'f"""{self.paired_devices_list[page][1]}"""', (601, 92, 601, 116): 'usb4vc_oled.font_regular', (601, 118, 601, 120): '(10)', (601, 122, 601, 126): 'draw'}, {}), "(f'{self.paired_devices_list[page][1]}',\n usb4vc_oled.font_regular, 10, draw)", False, 'import usb4vc_oled\n'), ((602, 20, 602, 127), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(602, 52, 602, 90): 'f"""{self.paired_devices_list[page][0]}"""', (602, 92, 602, 116): 'usb4vc_oled.font_regular', (602, 118, 602, 120): '(20)', (602, 122, 602, 126): 'draw'}, {}), "(f'{self.paired_devices_list[page][0]}',\n usb4vc_oled.font_regular, 20, draw)", False, 'import usb4vc_oled\n'), ((605, 21, 605, 52), 'luma.core.render.canvas', 'canvas', ({(605, 28, 605, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((606, 20, 606, 100), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(606, 52, 606, 64): '"""Power Down"""', (606, 66, 606, 89): 'usb4vc_oled.font_medium', (606, 91, 606, 93): '(10)', (606, 95, 606, 99): 'draw'}, {}), "('Power Down', usb4vc_oled.font_medium, 10, draw\n )", False, 'import usb4vc_oled\n'), ((608, 21, 608, 52), 'luma.core.render.canvas', 'canvas', ({(608, 28, 608, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((609, 20, 609, 98), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(609, 52, 609, 62): '"""Relaunch"""', (609, 64, 609, 87): 'usb4vc_oled.font_medium', (609, 89, 609, 91): '(10)', (609, 93, 609, 97): 'draw'}, {}), "('Relaunch', usb4vc_oled.font_medium, 10, draw)", False, 'import usb4vc_oled\n'), ((611, 21, 611, 52), 'luma.core.render.canvas', 'canvas', ({(611, 28, 611, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((612, 20, 612, 96), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(612, 52, 612, 60): '"""Reboot"""', (612, 62, 612, 85): 'usb4vc_oled.font_medium', (612, 87, 612, 89): '(10)', (612, 91, 612, 95): 'draw'}, {}), "('Reboot', usb4vc_oled.font_medium, 10, draw)", False, 'import usb4vc_oled\n'), ((614, 21, 614, 52), 'luma.core.render.canvas', 'canvas', ({(614, 28, 614, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((615, 20, 615, 103), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(615, 52, 615, 67): '"""Exit to Linux"""', (615, 69, 615, 92): 'usb4vc_oled.font_medium', (615, 94, 615, 96): '(10)', (615, 98, 615, 102): 'draw'}, {}), "('Exit to Linux', usb4vc_oled.font_medium, \n 10, draw)", False, 'import usb4vc_oled\n'), ((617, 21, 617, 52), 'luma.core.render.canvas', 'canvas', ({(617, 28, 617, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((618, 20, 618, 96), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(618, 52, 618, 60): '"""Cancel"""', (618, 62, 618, 85): 'usb4vc_oled.font_medium', (618, 87, 618, 89): '(10)', (618, 91, 618, 95): 'draw'}, {}), "('Cancel', usb4vc_oled.font_medium, 10, draw)", False, 'import usb4vc_oled\n'), ((664, 46, 664, 79), 'usb4vc_usb_scan.get_pboard_info', 'usb4vc_usb_scan.get_pboard_info', ({}, {}), '()', False, 'import usb4vc_usb_scan\n'), ((675, 20, 675, 33), 'time.sleep', 'time.sleep', ({(675, 31, 675, 32): '(3)'}, {}), '(3)', False, 'import time\n'), ((682, 20, 682, 33), 'time.sleep', 'time.sleep', ({(682, 31, 682, 32): '(2)'}, {}), '(2)', False, 'import time\n'), ((686, 20, 686, 33), 'time.sleep', 'time.sleep', ({(686, 31, 686, 32): '(2)'}, {}), '(2)', False, 'import time\n'), ((691, 20, 691, 33), 'time.sleep', 'time.sleep', ({(691, 31, 691, 32): '(3)'}, {}), '(3)', False, 'import time\n'), ((692, 20, 692, 51), 'usb4vc_oled.oled_device.clear', 'usb4vc_oled.oled_device.clear', ({}, {}), '()', False, 'import usb4vc_oled\n'), ((693, 20, 693, 31), 'os._exit', 'os._exit', ({(693, 29, 693, 30): '(0)'}, {}), '(0)', False, 'import os\n'), ((698, 24, 698, 84), 'usb4vc_check_update.download_latest_firmware', 'usb4vc_check_update.download_latest_firmware', ({(698, 69, 698, 83): 'this_pboard_id'}, {}), '(this_pboard_id)', False, 'import usb4vc_check_update\n'), ((710, 16, 710, 29), 'time.sleep', 'time.sleep', ({(710, 27, 710, 28): '(3)'}, {}), '(3)', False, 'import time\n'), ((713, 16, 713, 29), 'time.sleep', 'time.sleep', ({(713, 27, 713, 28): '(1)'}, {}), '(1)', False, 'import time\n'), ((714, 32, 714, 73), 'usb4vc_check_update.update', 'usb4vc_check_update.update', ({(714, 59, 714, 72): 'temp_dir_path'}, {}), '(temp_dir_path)', False, 'import usb4vc_check_update\n'), ((723, 16, 723, 29), 'time.sleep', 'time.sleep', ({(723, 27, 723, 28): '(4)'}, {}), '(4)', False, 'import time\n'), ((724, 16, 724, 47), 'usb4vc_oled.oled_device.clear', 'usb4vc_oled.oled_device.clear', ({}, {}), '()', False, 'import usb4vc_oled\n'), ((725, 16, 725, 27), 'os._exit', 'os._exit', ({(725, 25, 725, 26): '(0)'}, {}), '(0)', False, 'import os\n'), ((772, 21, 772, 52), 'luma.core.render.canvas', 'canvas', ({(772, 28, 772, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((773, 20, 773, 99), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(773, 52, 773, 64): '"""Pairing..."""', (773, 66, 773, 89): 'usb4vc_oled.font_medium', (773, 91, 773, 92): '(0)', (773, 94, 773, 98): 'draw'}, {}), "('Pairing...', usb4vc_oled.font_medium, 0, draw)", False, 'import usb4vc_oled\n'), ((774, 20, 774, 101), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(774, 52, 774, 65): '"""Please wait"""', (774, 67, 774, 90): 'usb4vc_oled.font_medium', (774, 92, 774, 94): '(15)', (774, 96, 774, 100): 'draw'}, {}), "('Please wait', usb4vc_oled.font_medium, 15,\n draw)", False, 'import usb4vc_oled\n'), ((780, 20, 780, 125), 'os.system', 'os.system', ({(780, 30, 780, 124): 'f"""timeout {self.bt_scan_timeout_sec} bluetoothctl --agent NoInputNoOutput trust {bt_mac_addr}"""'}, {}), "(\n f'timeout {self.bt_scan_timeout_sec} bluetoothctl --agent NoInputNoOutput trust {bt_mac_addr}'\n )", False, 'import os\n'), ((781, 20, 781, 127), 'os.system', 'os.system', ({(781, 30, 781, 126): 'f"""timeout {self.bt_scan_timeout_sec} bluetoothctl --agent NoInputNoOutput connect {bt_mac_addr}"""'}, {}), "(\n f'timeout {self.bt_scan_timeout_sec} bluetoothctl --agent NoInputNoOutput connect {bt_mac_addr}'\n )", False, 'import os\n'), ((793, 21, 793, 52), 'luma.core.render.canvas', 'canvas', ({(793, 28, 793, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((794, 20, 794, 105), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(794, 52, 794, 70): '"""Wait Until Green"""', (794, 72, 794, 95): 'usb4vc_oled.font_medium', (794, 97, 794, 98): '(0)', (794, 100, 794, 104): 'draw'}, {}), "('Wait Until Green', usb4vc_oled.font_medium,\n 0, draw)", False, 'import usb4vc_oled\n'), ((795, 20, 795, 108), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(795, 52, 795, 72): '"""LED Stops Blinking"""', (795, 74, 795, 97): 'usb4vc_oled.font_medium', (795, 99, 795, 101): '(15)', (795, 103, 795, 107): 'draw'}, {}), "('LED Stops Blinking', usb4vc_oled.\n font_medium, 15, draw)", False, 'import usb4vc_oled\n'), ((799, 20, 799, 33), 'time.sleep', 'time.sleep', ({(799, 31, 799, 32): '(1)'}, {}), '(1)', False, 'import time\n'), ((804, 21, 804, 52), 'luma.core.render.canvas', 'canvas', ({(804, 28, 804, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((805, 20, 805, 101), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(805, 52, 805, 66): '"""Rebooting..."""', (805, 68, 805, 91): 'usb4vc_oled.font_medium', (805, 93, 805, 94): '(0)', (805, 96, 805, 100): 'draw'}, {}), "('Rebooting...', usb4vc_oled.font_medium, 0,\n draw)", False, 'import usb4vc_oled\n'), ((806, 20, 806, 111), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(806, 52, 806, 74): '"""Unplug if stuck >10s"""', (806, 76, 806, 100): 'usb4vc_oled.font_regular', (806, 102, 806, 104): '(16)', (806, 106, 806, 110): 'draw'}, {}), "('Unplug if stuck >10s', usb4vc_oled.\n font_regular, 16, draw)", False, 'import usb4vc_oled\n'), ((809, 20, 809, 33), 'time.sleep', 'time.sleep', ({(809, 31, 809, 32): '(1)'}, {}), '(1)', False, 'import time\n'), ((169, 25, 169, 79), 'subprocess.getoutput', 'subprocess.getoutput', ({(169, 46, 169, 78): 'f"""timeout 2 df -h | grep -i usb"""'}, {}), "(f'timeout 2 df -h | grep -i usb')", False, 'import subprocess\n'), ((672, 25, 672, 56), 'luma.core.render.canvas', 'canvas', ({(672, 32, 672, 55): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((673, 24, 673, 99), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(673, 56, 673, 64): '"""Error:"""', (673, 66, 673, 89): 'usb4vc_oled.font_medium', (673, 91, 673, 92): '(0)', (673, 94, 673, 98): 'draw'}, {}), "('Error:', usb4vc_oled.font_medium, 0, draw)", False, 'import usb4vc_oled\n'), ((678, 25, 678, 56), 'luma.core.render.canvas', 'canvas', ({(678, 32, 678, 55): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((679, 24, 679, 100), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(679, 56, 679, 65): '"""Copying"""', (679, 67, 679, 90): 'usb4vc_oled.font_medium', (679, 92, 679, 93): '(0)', (679, 95, 679, 99): 'draw'}, {}), "('Copying', usb4vc_oled.font_medium, 0, draw)", False, 'import usb4vc_oled\n'), ((680, 24, 680, 106), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(680, 56, 680, 70): '"""Debug Log..."""', (680, 72, 680, 95): 'usb4vc_oled.font_medium', (680, 97, 680, 99): '(16)', (680, 101, 680, 105): 'draw'}, {}), "('Debug Log...', usb4vc_oled.font_medium, 16,\n draw)", False, 'import usb4vc_oled\n'), ((683, 25, 683, 56), 'luma.core.render.canvas', 'canvas', ({(683, 32, 683, 55): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((684, 24, 684, 107), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(684, 56, 684, 72): '"""Copying custom"""', (684, 74, 684, 97): 'usb4vc_oled.font_medium', (684, 99, 684, 100): '(0)', (684, 102, 684, 106): 'draw'}, {}), "('Copying custom', usb4vc_oled.font_medium, \n 0, draw)", False, 'import usb4vc_oled\n'), ((685, 24, 685, 104), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(685, 56, 685, 68): '"""mapping..."""', (685, 70, 685, 93): 'usb4vc_oled.font_medium', (685, 95, 685, 97): '(16)', (685, 99, 685, 103): 'draw'}, {}), "('mapping...', usb4vc_oled.font_medium, 16, draw\n )", False, 'import usb4vc_oled\n'), ((688, 25, 688, 56), 'luma.core.render.canvas', 'canvas', ({(688, 32, 688, 55): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((689, 24, 689, 109), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(689, 56, 689, 74): '"""Update complete!"""', (689, 76, 689, 99): 'usb4vc_oled.font_medium', (689, 101, 689, 102): '(0)', (689, 104, 689, 108): 'draw'}, {}), "('Update complete!', usb4vc_oled.font_medium,\n 0, draw)", False, 'import usb4vc_oled\n'), ((690, 24, 690, 108), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(690, 56, 690, 72): '"""Relaunching..."""', (690, 74, 690, 97): 'usb4vc_oled.font_medium', (690, 99, 690, 101): '(16)', (690, 103, 690, 107): 'draw'}, {}), "('Relaunching...', usb4vc_oled.font_medium, \n 16, draw)", False, 'import usb4vc_oled\n'), ((696, 21, 696, 52), 'luma.core.render.canvas', 'canvas', ({(696, 28, 696, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((697, 20, 697, 101), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(697, 52, 697, 65): '"""Updating..."""', (697, 67, 697, 90): 'usb4vc_oled.font_medium', (697, 92, 697, 94): '(10)', (697, 96, 697, 100): 'draw'}, {}), "('Updating...', usb4vc_oled.font_medium, 10,\n draw)", False, 'import usb4vc_oled\n'), ((711, 21, 711, 52), 'luma.core.render.canvas', 'canvas', ({(711, 28, 711, 51): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((712, 20, 712, 106), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(712, 52, 712, 70): '"""Updating code..."""', (712, 72, 712, 95): 'usb4vc_oled.font_medium', (712, 97, 712, 99): '(10)', (712, 101, 712, 105): 'draw'}, {}), "('Updating code...', usb4vc_oled.font_medium,\n 10, draw)", False, 'import usb4vc_oled\n'), ((700, 25, 700, 56), 'luma.core.render.canvas', 'canvas', ({(700, 32, 700, 55): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((701, 24, 701, 111), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(701, 56, 701, 76): '"""Unable to download"""', (701, 78, 701, 101): 'usb4vc_oled.font_medium', (701, 103, 701, 104): '(0)', (701, 106, 701, 110): 'draw'}, {}), "('Unable to download', usb4vc_oled.\n font_medium, 0, draw)", False, 'import usb4vc_oled\n'), ((702, 24, 702, 112), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(702, 56, 702, 76): 'f"""firmware: {fffff}"""', (702, 78, 702, 101): 'usb4vc_oled.font_medium', (702, 103, 702, 105): '(16)', (702, 107, 702, 111): 'draw'}, {}), "(f'firmware: {fffff}', usb4vc_oled.\n font_medium, 16, draw)", False, 'import usb4vc_oled\n'), ((716, 25, 716, 56), 'luma.core.render.canvas', 'canvas', ({(716, 32, 716, 55): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((717, 24, 717, 109), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(717, 56, 717, 74): '"""Update complete!"""', (717, 76, 717, 99): 'usb4vc_oled.font_medium', (717, 101, 717, 102): '(0)', (717, 104, 717, 108): 'draw'}, {}), "('Update complete!', usb4vc_oled.font_medium,\n 0, draw)", False, 'import usb4vc_oled\n'), ((718, 24, 718, 108), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(718, 56, 718, 72): '"""Relaunching..."""', (718, 74, 718, 97): 'usb4vc_oled.font_medium', (718, 99, 718, 101): '(16)', (718, 103, 718, 107): 'draw'}, {}), "('Relaunching...', usb4vc_oled.font_medium, \n 16, draw)", False, 'import usb4vc_oled\n'), ((720, 25, 720, 56), 'luma.core.render.canvas', 'canvas', ({(720, 32, 720, 55): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((721, 24, 721, 107), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(721, 56, 721, 72): '"""Update failed:"""', (721, 74, 721, 97): 'usb4vc_oled.font_medium', (721, 99, 721, 100): '(0)', (721, 102, 721, 106): 'draw'}, {}), "('Update failed:', usb4vc_oled.font_medium, \n 0, draw)", False, 'import usb4vc_oled\n'), ((722, 24, 722, 134), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(722, 56, 722, 97): 'f"""{update_result[-1]} {update_result[0]}"""', (722, 99, 722, 123): 'usb4vc_oled.font_regular', (722, 125, 722, 127): '(16)', (722, 129, 722, 133): 'draw'}, {}), "(f'{update_result[-1]} {update_result[0]}',\n usb4vc_oled.font_regular, 16, draw)", False, 'import usb4vc_oled\n'), ((728, 20, 728, 85), 'usb4vc_show_ev.ev_loop', 'usb4vc_show_ev.ev_loop', ({(728, 43, 728, 84): '[plus_button, minus_button, enter_button]'}, {}), '([plus_button, minus_button, enter_button])', False, 'import usb4vc_show_ev\n'), ((704, 29, 704, 60), 'luma.core.render.canvas', 'canvas', ({(704, 36, 704, 59): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((705, 28, 705, 115), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(705, 60, 705, 79): '"""Firmware updated!"""', (705, 81, 705, 104): 'usb4vc_oled.font_medium', (705, 106, 705, 108): '(10)', (705, 110, 705, 114): 'draw'}, {}), "('Firmware updated!', usb4vc_oled.\n font_medium, 10, draw)", False, 'import usb4vc_oled\n'), ((707, 25, 707, 56), 'luma.core.render.canvas', 'canvas', ({(707, 32, 707, 55): 'usb4vc_oled.oled_device'}, {}), '(usb4vc_oled.oled_device)', False, 'from luma.core.render import canvas\n'), ((708, 24, 708, 109), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(708, 56, 708, 74): '"""FW update ERR or"""', (708, 76, 708, 99): 'usb4vc_oled.font_medium', (708, 101, 708, 102): '(0)', (708, 104, 708, 108): 'draw'}, {}), "('FW update ERR or', usb4vc_oled.font_medium,\n 0, draw)", False, 'import usb4vc_oled\n'), ((709, 24, 709, 108), 'usb4vc_oled.oled_print_centered', 'usb4vc_oled.oled_print_centered', ({(709, 56, 709, 72): '"""already newest"""', (709, 74, 709, 97): 'usb4vc_oled.font_medium', (709, 99, 709, 101): '(15)', (709, 103, 709, 107): 'draw'}, {}), "('already newest', usb4vc_oled.font_medium, \n 15, draw)", False, 'import usb4vc_oled\n')] |
hpathipati/Quick-Tutor | study/migrations/0003_auto_20200224_2316.py | 17476d79b87f51b12a6c8fc435d1a6506bff1e04 | # Generated by Django 3.0.2 on 2020-02-24 23:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('study', '0002_student'),
]
operations = [
migrations.AlterField(
model_name='student',
name='bio',
field=models.CharField(blank=True, max_length=200),
),
]
| [((16, 18, 16, 62), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n')] |
gzecchi/oneview-python | tests/unit/resources/test_resource.py | 949bc67ca3eaed324a6dc058620145d9e067e25b | # -*- coding: utf-8 -*-
###
# (C) Copyright [2019] Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
import io
import unittest
import mock
from mock import call
from tests.test_utils import mock_builtin
from hpOneView.connection import connection
from hpOneView import exceptions
from hpOneView.resources.resource import (ResourceClient, ResourceHelper, ResourceFileHandlerMixin,
ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin,
ResourceSchemaMixin, Resource,
RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor,
RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED,
transform_list_to_dict, extract_id_from_uri, merge_resources,
merge_default_values, unavailable_method)
class StubResourceFileHandler(ResourceFileHandlerMixin, Resource):
"""Stub class to test resource file operations"""
class StubResourceZeroBody(ResourceZeroBodyMixin, Resource):
"""Stub class to test resoruce zero body methods"""
class StubResourcePatch(ResourcePatchMixin, Resource):
"""Stub class to test resource patch operations"""
class StubResourceUtilization(ResourceUtilizationMixin, Resource):
"""Stub class to test resource utilization methods"""
class StubResourceSchema(ResourceSchemaMixin, Resource):
"""Stub class to test resource schema methods"""
class StubResource(Resource):
"""Stub class to test resource common methods"""
URI = "/rest/testuri"
class BaseTest(unittest.TestCase):
URI = "/rest/testuri"
TYPE_V200 = "typeV200"
TYPE_V300 = "typeV300"
DEFAULT_VALUES = {
"200": {"type": TYPE_V200},
"300": {"type": TYPE_V300}
}
def setUp(self, resource_client=None):
self.resource_client = resource_client
self.resource_client.URI = self.URI
self.resource_client.DEFAULT_VALUES = self.DEFAULT_VALUES
self.resource_client.data = {"uri": "/rest/testuri"}
self.resource_client._merge_default_values()
self.task = {"task": "task", "taskState": "Finished"}
self.response_body = {"body": "body"}
self.custom_headers = {"Accept-Language": "en_US"}
class ResourceFileHandlerMixinTest(BaseTest):
def setUp(self):
self.connection = connection('127.0.0.1', 300)
self.resource_client = StubResourceFileHandler(self.connection)
super(ResourceFileHandlerMixinTest, self).setUp(self.resource_client)
@mock.patch.object(connection, "post_multipart_with_response_handling")
def test_upload_should_call_post_multipart(self, mock_post_multipart):
uri = "/rest/testuri/"
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, mock.Mock()
self.resource_client.upload(filepath, uri)
mock_post_multipart.assert_called_once_with(uri, filepath, "SPPgen9snap6.2015_0405.81.iso")
@mock.patch.object(connection, "post_multipart_with_response_handling")
def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart):
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, mock.Mock()
self.resource_client.upload(filepath)
mock_post_multipart.assert_called_once_with("/rest/testuri", mock.ANY, mock.ANY)
@mock.patch.object(connection, "post_multipart_with_response_handling")
@mock.patch.object(TaskMonitor, "wait_for_task")
@mock.patch.object(connection, "get")
def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart):
uri = "/rest/testuri/"
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = self.task, mock.Mock()
self.resource_client.upload(filepath, uri)
mock_wait4task.assert_called_once_with(self.task, -1)
@mock.patch.object(connection, "post_multipart_with_response_handling")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart):
uri = "/rest/testuri/"
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, mock.Mock()
self.resource_client.upload(filepath, uri)
mock_wait4task.not_been_called()
@mock.patch.object(connection, "post_multipart_with_response_handling")
@mock.patch.object(TaskMonitor, "wait_for_task")
@mock.patch.object(connection, "get")
def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task,
mock_post_multipart):
fake_associated_resurce = mock.Mock()
uri = "/rest/testuri/"
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = self.task, mock.Mock()
mock_wait4task.return_value = fake_associated_resurce
result = self.resource_client.upload(filepath, uri)
self.assertEqual(result, fake_associated_resurce)
@mock.patch.object(connection, "post_multipart_with_response_handling")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart):
fake_response_body = mock.Mock()
uri = "/rest/testuri/"
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, fake_response_body
result = self.resource_client.upload(filepath, uri)
self.assertEqual(result, fake_response_body)
@mock.patch.object(connection, "download_to_stream")
@mock.patch(mock_builtin("open"))
def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = "/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315"
mock_open.return_value = io.StringIO()
self.resource_client.download(uri, file_path)
mock_download_to_stream.assert_called_once_with(mock.ANY, uri)
@mock.patch.object(connection, "download_to_stream")
@mock.patch(mock_builtin("open"))
def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = "/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315"
fake_file = io.StringIO()
mock_open.return_value = fake_file
self.resource_client.download(uri, file_path)
mock_open.assert_called_once_with(file_path, 'wb')
mock_download_to_stream.assert_called_once_with(fake_file, mock.ANY)
@mock.patch.object(connection, "download_to_stream")
@mock.patch(mock_builtin("open"))
def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = "/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315"
mock_download_to_stream.return_value = True
mock_open.return_value = io.StringIO()
result = self.resource_client.download(uri, file_path)
self.assertTrue(result)
@mock.patch.object(connection, "download_to_stream")
@mock.patch(mock_builtin("open"))
def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = "/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315"
mock_download_to_stream.return_value = False
mock_open.return_value = io.StringIO()
result = self.resource_client.download(uri, file_path)
self.assertFalse(result)
class ResourceZeroBodyMixinTest(BaseTest):
def setUp(self):
self.connection = connection('127.0.0.1', 300)
self.resource_client = StubResourceZeroBody(self.connection)
super(ResourceZeroBodyMixinTest, self).setUp(self.resource_client)
@mock.patch.object(connection, "post")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.create_with_zero_body()
mock_post.assert_called_once_with(
"/rest/testuri", {}, custom_headers=None)
@mock.patch.object(connection, "post")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.create_with_zero_body(timeout=-1)
mock_post.assert_called_once_with(
"/rest/testuri", {}, custom_headers=None)
@mock.patch.object(connection, "post")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.create_with_zero_body(custom_headers=self.custom_headers)
mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={"Accept-Language": "en_US"})
@mock.patch.object(connection, "post")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post):
response_body = {"resource_name": "name"}
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = response_body
new_resource = self.resource_client.create_with_zero_body(timeout=-1)
self.assertNotEqual(new_resource, self.resource_client)
@mock.patch.object(connection, "post")
def test_create_with_zero_body_without_task(self, mock_post):
mock_post.return_value = None, self.response_body
new_resource = self.resource_client.create_with_zero_body()
self.assertNotEqual(new_resource, self.resource_client)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update, mock_ensure_resource):
mock_update.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.URI = "/rest/enclosures"
self.resource_client.update_with_zero_body("/rest/enclosures/09USE133E5H4/configuration",
timeout=-1)
mock_update.assert_called_once_with(
"/rest/enclosures/09USE133E5H4/configuration", None, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update, mock_ensure_resource):
mock_update.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.update_with_zero_body(uri="/rest/testuri", custom_headers=self.custom_headers)
mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={"Accept-Language": "en_US"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource):
response_body = {"resource_name": "name"}
self.resource_client.URI = "/rest/enclosures"
mock_put.return_value = self.task, self.task
mock_wait4task.return_value = response_body
result = self.resource_client.update_with_zero_body(
"/rest/enclosures/09USE133E5H4/configuration", timeout=-1)
self.assertEqual(result, response_body)
@mock.patch.object(connection, "put")
def test_update_with_zero_body_without_task(self, mock_put):
mock_put.return_value = None, self.response_body
self.resource_client.URI = "/rest/enclosures"
result = self.resource_client.update_with_zero_body(
"/rest/enclosures/09USE133E5H4/configuration", timeout=-1)
self.assertEqual(result, self.response_body)
class ResourcePatchMixinTest(BaseTest):
def setUp(self):
self.connection = connection('127.0.0.1', 300)
self.resource_client = StubResourcePatch(self.connection)
super(ResourcePatchMixinTest, self).setUp(self.resource_client)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
def test_patch_request_when_id_is_provided_v200(self, mock_patch, mock_ensure_resource):
uri = "/rest/testuri"
request_body = [{
"op": "replace",
"path": "/name",
"value": "new_name",
}]
mock_patch.return_value = {}, {}
self.connection._apiVersion = 200
self.resource_client.patch("replace", "/name", "new_name")
mock_patch.assert_called_once_with(uri, request_body, custom_headers={})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
def test_patch_request_when_id_is_provided_v300(self, mock_patch, mock_ensure_resource):
request_body = [{
"op": "replace",
"path": "/name",
"value": "new_name",
}]
mock_patch.return_value = {}, {}
self.resource_client.patch("replace", "/name", "new_name")
mock_patch.assert_called_once_with(
"/rest/testuri", request_body, custom_headers={"Content-Type": "application/json-patch+json"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
def test_patch_request_when_uri_is_provided(self, mock_patch, mock_ensure_resource):
request_body = [{
"op": "replace",
"path": "/name",
"value": "new_name",
}]
mock_patch.return_value = {}, {}
self.resource_client.patch("replace", "/name", "new_name")
mock_patch.assert_called_once_with(
"/rest/testuri", request_body, custom_headers={"Content-Type": "application/json-patch+json"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
def test_patch_with_custom_headers_v200(self, mock_patch, mock_ensure_resource):
mock_patch.return_value = {}, {}
self.connection._apiVersion = 200
self.resource_client.patch("operation", "/field", "value",
custom_headers=self.custom_headers)
mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={"Accept-Language": "en_US"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
def test_patch_with_custom_headers_v300(self, mock_patch, mock_ensure_resource):
mock_patch.return_value = {}, {}
self.resource_client.patch("operation", "/field", "value",
custom_headers=self.custom_headers)
mock_patch.assert_called_once_with(mock.ANY,
mock.ANY,
custom_headers={"Accept-Language": "en_US",
"Content-Type": "application/json-patch+json"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_patch_return_entity(self, mock_wait4task, mock_patch, mock_ensure_resource):
entity = {"resource_id": "123a53cz"}
mock_patch.return_value = self.task, self.task
mock_wait4task.return_value = entity
self.resource_client.patch("replace", "/name", "new_name")
self.assertEqual(self.resource_client.data, entity)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
@mock.patch.object(TaskMonitor, "get_completed_task")
def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch, mock_ensure_resource):
uri = "/rest/testuri"
dict_info = {"resource_name": "a name"}
mock_patch.return_value = {}, {}
headers = {"Content-Type": "application/json",
"Extra": "extra"}
self.connection._apiVersion = 300
self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers)
mock_patch.assert_called_once_with(uri, dict_info, custom_headers=headers)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
@mock.patch.object(TaskMonitor, "get_completed_task")
def test_patch_request_custom_headers(self, mock_task, mock_patch, mock_ensure_resource):
uri = "/rest/testuri"
dict_info = {"resource_name": "a name"}
mock_patch.return_value = {}, {}
headers = {"Extra": "extra"}
self.connection._apiVersion = 300
self.resource_client.patch_request(uri, body=dict_info, custom_headers=headers)
mock_patch.assert_called_once_with(
uri,
dict_info,
custom_headers={"Extra": "extra",
"Content-Type": "application/json-patch+json"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "patch")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch, mock_ensure_resource):
entity = {"resource_id": "123a53cz"}
mock_patch.return_value = self.task, self.task
mock_wait4task.return_value = entity
self.resource_client.patch("replace", "/name", "new_name")
mock_wait4task.assert_called_once_with(self.task, mock.ANY)
class ResourceUtilizationMixinTest(BaseTest):
def setUp(self):
self.connection = connection('127.0.0.1', 300)
self.resource_client = StubResourceUtilization(self.connection)
super(ResourceUtilizationMixinTest, self).setUp(self.resource_client)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "get")
def test_get_utilization_with_args(self, mock_get, mock_ensure_resource):
self.resource_client.get_utilization(fields="AmbientTemperature,AveragePower,PeakPower",
filter="startDate=2016-05-30T03:29:42.361Z",
refresh=True, view="day")
expected_uri = "/rest/testuri/utilization" \
"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z" \
"&fields=AmbientTemperature%2CAveragePower%2CPeakPower" \
"&refresh=true" \
"&view=day"
mock_get.assert_called_once_with(expected_uri)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "get")
def test_get_utilization_with_multiple_filters(self, mock_get, mock_ensure_resource):
self.resource_client.get_utilization(
fields="AmbientTemperature,AveragePower,PeakPower",
filter=["startDate=2016-05-30T03:29:42.361Z",
"endDate=2016-05-31T03:29:42.361Z"],
refresh=True,
view="day")
expected_uri = "/rest/testuri/utilization" \
"?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z" \
"&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z" \
"&fields=AmbientTemperature%2CAveragePower%2CPeakPower" \
"&refresh=true" \
"&view=day"
mock_get.assert_called_once_with(expected_uri)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "get")
def test_get_utilization_by_id_with_defaults(self, mock_get, mock_ensure_resource):
self.resource_client.get_utilization()
expected_uri = "/rest/testuri/utilization"
mock_get.assert_called_once_with(expected_uri)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "get")
def test_get_utilization_by_uri_with_defaults(self, mock_get, mock_ensure_resource):
self.resource_client.get_utilization()
expected_uri = "/rest/testuri/utilization"
mock_get.assert_called_once_with(expected_uri)
class ResourceSchemaMixinTest(BaseTest):
def setUp(self):
self.connection = connection('127.0.0.1', 300)
self.resource_client = StubResourceSchema(self.connection)
super(ResourceSchemaMixinTest, self).setUp(self.resource_client)
@mock.patch.object(connection, "get")
def test_get_schema_uri(self, mock_get):
self.resource_client.get_schema()
mock_get.assert_called_once_with(self.URI + "/schema")
class ResourceTest(BaseTest):
def setUp(self):
self.connection = connection('127.0.0.1', 300)
self.resource_client = StubResource(self.connection)
super(ResourceTest, self).setUp(self.resource_client)
self.resource_helper = ResourceHelper(self.URI, self.connection, None)
@mock.patch.object(ResourceHelper, "do_put")
@mock.patch.object(Resource, "ensure_resource_data")
def test_ensure_resource_should_call_once(self, mock_do_put, mock_ensure_resource):
self.resource_client.data = {"uri": "/rest/test"}
self.resource_client.update(data={"name": "test"})
mock_do_put.assert_called_once()
mock_ensure_resource.assert_called_once()
def test_ensure_resource_raise_unique_identifier_exception(self):
self.resource_client.data = []
self.assertRaises(exceptions.HPOneViewMissingUniqueIdentifiers,
self.resource_client.ensure_resource_data)
@mock.patch.object(ResourceHelper, "do_get")
def test_ensure_resource_raise_resource_not_found_exception_with_uri(self, mock_do_get):
self.resource_client.data = {"uri": "/uri/test"}
mock_do_get.return_value = []
with self.assertRaises(exceptions.HPOneViewResourceNotFound):
self.resource_client.ensure_resource_data(update_data=True)
@mock.patch.object(Resource, "get_by")
def test_ensure_resource_raise_resource_not_found_exception_without_uri(self, mock_get_by):
self.resource_client.data = {"name": "testname"}
mock_get_by.return_value = []
with self.assertRaises(exceptions.HPOneViewResourceNotFound):
self.resource_client.ensure_resource_data(update_data=True)
@mock.patch.object(ResourceHelper, "do_get")
@mock.patch.object(Resource, "get_by")
def test_ensure_resource_should_update_resource_data(self, mock_do_get, mock_get_by):
get_by_return_value = [{"name": "testname", "uri": "/rest/testuri"}]
self.resource_client.data = {"name": "testname"}
mock_do_get.return_value = get_by_return_value
self.resource_client.ensure_resource_data(update_data=True)
self.assertEqual(self.resource_client.data, get_by_return_value[0])
@mock.patch.object(Resource, "get_by")
def test_ensure_resource_without_data_update(self, mock_get_by):
mock_get_by.return_value = []
actual_result = self.resource_client.ensure_resource_data(update_data=False)
expected_result = None
self.assertEqual(actual_result, expected_result)
@mock.patch.object(connection, "get")
def test_get_all_called_once(self, mock_get):
filter = "'name'='OneViewSDK \"Test FC Network'"
sort = "name:ascending"
query = "name NE 'WrongName'"
mock_get.return_value = {"members": [{"member": "member"}]}
result = self.resource_helper.get_all(
1, 500, filter, query, sort)
uri = "{resource_uri}?start=1" \
"&count=500" \
"&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27" \
"&query=name%20NE%20%27WrongName%27" \
"&sort=name%3Aascending".format(resource_uri=self.URI)
self.assertEqual([{"member": "member"}], result)
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, "get")
def test_get_all_with_defaults(self, mock_get):
self.resource_client.get_all()
uri = "{resource_uri}?start=0&count=-1".format(resource_uri=self.URI)
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, "get")
def test_get_all_with_custom_uri(self, mock_get):
self.resource_helper.get_all(uri="/rest/testuri/12467836/subresources")
uri = "/rest/testuri/12467836/subresources?start=0&count=-1"
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, "get")
def test_get_all_with_custom_uri_and_query_string(self, mock_get):
self.resource_helper.get_all(uri="/rest/testuri/12467836/subresources?param=value")
uri = "/rest/testuri/12467836/subresources?param=value&start=0&count=-1"
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, "get")
def test_get_all_with_different_resource_uri_should_fail(self, mock_get):
try:
self.resource_helper.get_all(uri="/rest/other/resource/12467836/subresources")
except exceptions.HPOneViewUnknownType as e:
self.assertEqual(UNRECOGNIZED_URI, e.args[0])
else:
self.fail("Expected Exception was not raised")
@mock.patch.object(connection, "get")
def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get):
uri_list = ["/rest/testuri?start=0&count=-1",
"/rest/testuri?start=3&count=3",
"/rest/testuri?start=6&count=3"]
results = [{"nextPageUri": uri_list[1], "members": [{"id": "1"}, {"id": "2"}, {"id": "3"}]},
{"nextPageUri": uri_list[2], "members": [{"id": "4"}, {"id": "5"}, {"id": "6"}]},
{"nextPageUri": None, "members": [{"id": "7"}, {"id": "8"}]}]
mock_get.side_effect = results
self.resource_client.get_all()
expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])]
self.assertEqual(mock_get.call_args_list, expected_calls)
@mock.patch.object(connection, "get")
def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get):
uri_list = ["/rest/testuri?start=0&count=15",
"/rest/testuri?start=3&count=3",
"/rest/testuri?start=6&count=3"]
results = [{"nextPageUri": uri_list[1], "members": [{"id": "1"}, {"id": "2"}, {"id": "3"}]},
{"nextPageUri": uri_list[2], "members": [{"id": "4"}, {"id": "5"}, {"id": "6"}]},
{'nextPageUri': None, "members": [{"id": "7"}, {"id": "8"}]}]
mock_get.side_effect = results
self.resource_client.get_all(count=15)
expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])]
self.assertEqual(mock_get.call_args_list, expected_calls)
@mock.patch.object(connection, "get")
def test_get_all_should_return_all_items_when_response_paginated(self, mock_get):
uri_list = ["/rest/testuri?start=0&count=-1",
"/rest/testuri?start=3&count=3",
"/rest/testuri?start=6&count=1"]
results = [{"nextPageUri": uri_list[1], "members": [{"id": "1"}, {"id": "2"}, {"id": "3"}]},
{"nextPageUri": uri_list[2], "members": [{"id": "4"}, {"id": "5"}, {"id": "6"}]},
{"nextPageUri": None, "members": [{"id": "7"}]}]
mock_get.side_effect = results
result = self.resource_client.get_all()
expected_items = [{'id': '1'}, {'id': '2'}, {'id': '3'}, {'id': '4'}, {'id': '5'}, {'id': '6'}, {'id': '7'}]
self.assertSequenceEqual(result, expected_items)
@mock.patch.object(connection, 'get')
def test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get):
uri_list = ['/rest/testuri?start=0&count=15',
'/rest/testuri?start=3&count=3',
'/rest/testuri?start=6&count=1']
results = [{"nextPageUri": uri_list[1], "members": [{"id": "1"}, {"id": "2"}, {"id": "3"}]},
{"nextPageUri": uri_list[2], "members": [{"id": "4"}, {"id": "5"}, {"id": "6"}]},
{"nextPageUri": None, "members": [{"id": "7"}]}]
mock_get.side_effect = results
result = self.resource_client.get_all(count=15)
expected_items = [{"id": "1"}, {"id": "2"}, {"id": "3"}, {"id": "4"}, {"id": "5"}, {"id": "6"}, {"id": "7"}]
self.assertSequenceEqual(result, expected_items)
@mock.patch.object(connection, "get")
def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get):
"""
In this case, the user provides a maximum number of results to be returned but for pagination purposes, a
nextPageUri is returned by OneView.
"""
uri_list = ["/rest/testuri?start=0&count=3",
"/rest/testuri?start=3&count=3",
"/rest/testuri?start=6&count=3"]
results = [{"nextPageUri": uri_list[1], "members": [{"id": "1"}, {"id": "2"}, {"id": "3"}]},
{"nextPageUri": uri_list[2], "members": [{"id": "4"}, {"id": "5"}, {"id": "6"}]},
{"nextPageUri": None, "members": [{"id": "7"}, {"id": "8"}]}]
mock_get.side_effect = results
self.resource_client.get_all(count=3)
mock_get.assert_called_once_with(uri_list[0])
@mock.patch.object(connection, "get")
def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get):
uri = "/rest/testuri?start=0&count=-1"
members = [{"id": "1"}, {"id": "2"}, {"id": "3"}]
mock_get.return_value = {
"nextPageUri": uri,
"members": members,
"uri": uri
}
result = self.resource_client.get_all()
self.assertSequenceEqual(result, members)
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, "get")
def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get):
mock_get.return_value = {"nextPageUri": None, "members": []}
result = self.resource_client.get_all()
self.assertEqual(result, [])
@mock.patch.object(connection, "get")
def test_get_all_should_return_empty_list_when_no_members(self, mock_get):
mock_get.return_value = {"nextPageUri": None, "members": None}
result = self.resource_client.get_all()
self.assertEqual(result, [])
@mock.patch.object(ResourceHelper, "do_get")
def test_refresh(self, mock_do_get):
updated_data = {"resource_name": "updated name"}
mock_do_get.return_value = updated_data
self.resource_client.refresh()
self.assertEqual(self.resource_client.data, updated_data)
@mock.patch.object(connection, "post")
def test_create_uri(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
expected_dict = {"resource_name": "a name", "type": self.TYPE_V300}
self.resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)
@mock.patch.object(connection, "post")
def test_create_with_api_version_200(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
self.connection._apiVersion = 200
self.resource_client._merge_default_values()
expected_dict = {"resource_name": "a name", "type": self.TYPE_V200}
self.resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)
@mock.patch.object(connection, "post")
def test_create_with_default_api_version_300(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
expected_dict = {"resource_name": "a name", "type": self.TYPE_V300}
self.resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)
@mock.patch.object(connection, "post")
def test_create_should_not_override_resource_properties(self, mock_post):
dict_to_create = {"resource_name": "a name", "type": "anotherType"}
mock_post.return_value = {}, {}
expected = {"resource_name": "a name", "type": "anotherType"}
self.resource_client.create(dict_to_create)
mock_post.assert_called_once_with(self.URI, expected, custom_headers=None)
@mock.patch.object(connection, "post")
def test_create_without_default_values(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
resource_client = ResourceClient(self.connection, self.URI)
resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None)
@mock.patch.object(connection, "post")
def test_create_with_custom_headers(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
self.resource_client.create(dict_to_create, custom_headers=self.custom_headers)
mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={"Accept-Language": "en_US"})
@mock.patch.object(connection, "post")
def test_create_should_return_new_resource_instance(self, mock_post):
mock_post.return_value = {}, {}
new_instance = self.resource_client.create({})
self.assertNotEqual(self.resource_client, new_instance)
@mock.patch.object(connection, "post")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_wait_for_activity_on_create(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, {}
mock_wait4task.return_value = self.task
self.resource_client.create({"test": "test"}, timeout=60)
mock_wait4task.assert_called_once_with(self.task, 60)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "delete")
def test_delete_should_return_true(self, mock_delete, mock_ensure_resource):
mock_delete.return_value = None, self.response_body
self.resource_client.data = {"uri": "/rest/testuri"}
result = self.resource_client.delete()
self.assertTrue(result)
@mock.patch.object(connection, 'delete')
def test_helper_delete_all_should_return_true(self, mock_delete):
mock_delete.return_value = None, self.response_body
filter = "name='Exchange Server'"
result = self.resource_helper.delete_all(filter=filter, force=True, timeout=-1)
self.assertTrue(result)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "delete")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_delete_with_force(self, mock_ensure_resource, mock_delete, mock_wait4task):
mock_delete.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
self.resource_client.data = {"uri": "/rest/testuri"}
self.resource_client.delete(force=True)
mock_delete.assert_called_once_with("/rest/testuri?force=True", custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "delete")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_delete_with_custom_headers(self, mock_ensure_resource, mock_delete, mock_wait4task):
mock_delete.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
self.resource_client.data = {"uri": "/rest/testuri"}
self.resource_client.delete(custom_headers=self.custom_headers)
mock_delete.assert_called_once_with(mock.ANY, custom_headers={"Accept-Language": "en_US"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
def test_update_with_uri_called_once(self, mock_put, mock_ensure_resource):
uri = "/rest/testuri"
dict_to_update = {"name": "test", "type": "typeV300"}
self.resource_client.data = {'uri': uri}
expected = {"name": "test", "type": "typeV300", "uri": uri}
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update)
self.assertEqual(self.response_body, self.resource_client.data)
mock_put.assert_called_once_with(uri, expected, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
def test_update_with_custom_headers(self, mock_put, mock_ensure_resource):
dict_to_update = {"name": "test"}
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update, custom_headers=self.custom_headers)
mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={"Accept-Language": "en_US"})
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
def test_update_with_force(self, mock_put, mock_laod_resource):
dict_to_update = {"name": "test"}
uri = "/rest/testuri"
expected = {"name": "test", "uri": uri, "type": "typeV300"}
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update)
expected_uri = "/rest/testuri"
mock_put.assert_called_once_with(expected_uri, expected, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
def test_update_with_default_api_version_300(self, mock_put, mock_ensure_resource):
dict_to_update = {"name": "test"}
uri = "/rest/testuri"
mock_put.return_value = None, self.response_body
expected_dict = {"name": "test", "type": self.TYPE_V300, "uri": uri}
self.resource_client._merge_default_values()
self.resource_client.update(dict_to_update)
mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
def test_update_should_not_override_resource_properties(self, mock_put, mock_ensure_resource):
dict_to_update = {"name": "test", "type": "anotherType"}
uri = "/rest/testuri"
mock_put.return_value = None, self.response_body
expected = {"name": "test", "type": "anotherType", "uri": uri}
self.resource_client.update(dict_to_update)
mock_put.assert_called_once_with(uri, expected, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
def test_update_without_default_values(self, mock_put, mock_ensure_resource):
uri = "/rest/testuri"
dict_to_update = {"name": "test"}
expected = {"name": "test", "uri": uri, "type": "typeV300"}
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update)
mock_put.assert_called_once_with(uri, expected, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_update_uri(self, mock_wait4task, mock_update, mock_ensure_resource):
uri = "/rest/testuri"
dict_to_update = {"resource_data": "resource_data", "uri": uri}
expected = {"resource_data": "resource_data", "uri": uri, "type": "typeV300"}
mock_update.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
self.resource_client.update(dict_to_update, False)
self.assertEqual(self.task, self.resource_client.data)
mock_update.assert_called_once_with(uri, expected, custom_headers=None)
@mock.patch.object(Resource, "ensure_resource_data")
@mock.patch.object(connection, "put")
@mock.patch.object(TaskMonitor, "wait_for_task")
def test_update_return_entity(self, mock_wait4task, mock_put, mock_ensure_resource):
uri = "/rest/testuri"
dict_to_update = {"resource_name": "a name", "uri": uri}
mock_put.return_value = self.task, {}
mock_wait4task.return_value = dict_to_update
self.resource_client.update(dict_to_update, timeout=-1)
self.assertEqual(self.resource_client.data, dict_to_update)
@mock.patch.object(Resource, "get_by")
def test_get_by_name_with_result(self, mock_get_by):
self.resource_client.get_by_name("Resource Name,")
mock_get_by.assert_called_once_with("name", "Resource Name,")
@mock.patch.object(Resource, "get_by")
def test_get_by_name_without_result(self, mock_get_by):
mock_get_by.return_value = []
response = self.resource_client.get_by_name("Resource Name,")
self.assertIsNone(response)
mock_get_by.assert_called_once_with("name", "Resource Name,")
@mock.patch.object(connection, "get")
def test_get_by_uri(self, mock_get):
self.resource_client.get_by_uri("/rest/testuri")
mock_get.assert_called_once_with('/rest/testuri')
@mock.patch.object(connection, "get")
def test_get_by_id_with_result(self, mock_get):
self.resource_client.get_by_id("123")
mock_get.assert_called_once_with("/rest/testuri/123")
@mock.patch.object(connection, "get")
def test_get_by_id_without_result(self, mock_get):
mock_get.return_value = []
response = self.resource_client.get_by_id("123")
self.assertIsNone(response)
mock_get.assert_called_once_with("/rest/testuri/123")
@mock.patch.object(connection, "get")
def test_get_collection_uri(self, mock_get):
mock_get.return_value = {"members": [{"key": "value"}, {"key": "value"}]}
self.resource_helper.get_collection()
mock_get.assert_called_once_with(self.URI)
@mock.patch.object(connection, "get")
def test_get_collection_with_filter(self, mock_get):
mock_get.return_value = {}
self.resource_helper.get_collection(filter="name=name")
mock_get.assert_called_once_with(self.URI + "?filter=name%3Dname")
@mock.patch.object(connection, "get")
def test_get_collection_with_path(self, mock_get):
mock_get.return_value = {}
self.resource_helper.get_collection(path="/test")
mock_get.assert_called_once_with(self.URI + "/test")
@mock.patch.object(connection, "get")
def test_get_collection_with_multiple_filters(self, mock_get):
mock_get.return_value = {}
self.resource_helper.get_collection(filter=["name1=one", "name2=two", "name=three"])
mock_get.assert_called_once_with(self.URI + "?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree")
@mock.patch.object(connection, "get")
def test_get_collection_should_return_list(self, mock_get):
mock_get.return_value = {"members": [{"key": "value"}, {"key": "value"}]}
collection = self.resource_helper.get_collection()
self.assertEqual(len(collection), 2)
def test_build_uri_with_id_should_work(self):
input = "09USE7335NW35"
expected_output = "/rest/testuri/09USE7335NW35"
result = self.resource_client._helper.build_uri(input)
self.assertEqual(expected_output, result)
def test_build_uri_with_uri_should_work(self):
input = "/rest/testuri/09USE7335NW3"
expected_output = "/rest/testuri/09USE7335NW3"
result = self.resource_client._helper.build_uri(input)
self.assertEqual(expected_output, result)
def test_build_uri_with_none_should_raise_exception(self):
try:
self.resource_client._helper.build_uri(None)
except ValueError as exception:
self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_empty_str_should_raise_exception(self):
try:
self.resource_client._helper.build_uri('')
except ValueError as exception:
self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_different_resource_uri_should_raise_exception(self):
try:
self.resource_client._helper.build_uri(
"/rest/test/another/resource/uri/09USE7335NW3")
except exceptions.HPOneViewUnknownType as exception:
self.assertEqual(UNRECOGNIZED_URI, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_incomplete_uri_should_raise_exception(self):
try:
self.resource_client._helper.build_uri("/rest/")
except exceptions.HPOneViewUnknownType as exception:
self.assertEqual(UNRECOGNIZED_URI, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_subresource_uri(self):
options = [
dict(
resource="1",
subresource="2",
path="sub",
uri="/rest/testuri/1/sub/2"),
dict(
resource="/rest/testuri/3",
subresource="4",
path="sub",
uri="/rest/testuri/3/sub/4"),
dict(
resource="5",
subresource="/rest/testuri/5/sub/6",
path="sub",
uri="/rest/testuri/5/sub/6"),
dict(
resource="/rest/testuri/7",
subresource="/rest/testuri/7/sub/8",
path="sub",
uri="/rest/testuri/7/sub/8"),
dict(
resource=None,
subresource="/rest/testuri/9/sub/10",
path="sub",
uri="/rest/testuri/9/sub/10"),
dict(
resource="/rest/testuri/11",
subresource="12",
path="/sub/",
uri="/rest/testuri/11/sub/12"),
dict(
resource="/rest/testuri/13",
subresource=None,
path="/sub/",
uri="/rest/testuri/13/sub"),
]
for option in options:
uri = self.resource_client._helper.build_subresource_uri(option["resource"], option["subresource"], option["path"])
self.assertEqual(uri, option["uri"])
def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self):
try:
self.resource_client._helper.build_subresource_uri(None, "123456", "sub-path")
except exceptions.HPOneViewValueError as exception:
self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_merge_resources(self):
resource1 = {"name": "resource1", "type": "resource"}
resource2 = {"name": "resource2", "port": "1"}
expected_resource = {"name": "resource2", "type": "resource", "port": "1"}
merged_resource = merge_resources(resource1, resource2)
self.assertEqual(merged_resource, expected_resource)
def test_merge_default_values(self):
default_type = {"type": "type1"}
resource1 = {"name": "resource1"}
resource2 = {"name": "resource2"}
result_list = merge_default_values([resource1, resource2], default_type)
expected_list = [
{"name": "resource1", "type": "type1"},
{"name": "resource2", "type": "type1"}
]
self.assertEqual(result_list, expected_list)
def test_raise_unavailable_method_exception(self):
self.assertRaises(exceptions.HPOneViewUnavailableMethod,
unavailable_method)
class FakeResource(object):
def __init__(self, con):
self._connection = con
self._client = ResourceClient(con, "/rest/fake/resource")
def get_fake(self, uri):
return self._client.get(uri)
class ResourceClientTest(unittest.TestCase):
URI = "/rest/testuri"
TYPE_V200 = 'typeV200'
TYPE_V300 = 'typeV300'
DEFAULT_VALUES = {
'200': {'type': TYPE_V200},
'300': {'type': TYPE_V300}
}
def setUp(self):
super(ResourceClientTest, self).setUp()
self.host = '127.0.0.1'
self.connection = connection(self.host, 300)
self.resource_client = ResourceClient(self.connection, self.URI)
self.task = {"task": "task", "taskState": "Finished"}
self.response_body = {"body": "body"}
self.custom_headers = {'Accept-Language': 'en_US'}
@mock.patch.object(connection, 'get')
def test_get_all_called_once(self, mock_get):
filter = "'name'='OneViewSDK \"Test FC Network'"
sort = 'name:ascending'
query = "name NE 'WrongName'"
view = '"{view-name}"'
scope_uris = '/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'
mock_get.return_value = {"members": [{"member": "member"}]}
result = self.resource_client.get_all(
1, 500, filter, query, sort, view, 'name,owner,modified', scope_uris=scope_uris)
uri = '{resource_uri}?start=1' \
'&count=500' \
'&filter=%27name%27%3D%27OneViewSDK%20%22Test%20FC%20Network%27' \
'&query=name%20NE%20%27WrongName%27' \
'&sort=name%3Aascending' \
'&view=%22%7Bview-name%7D%22' \
'&fields=name%2Cowner%2Cmodified' \
'&scopeUris=/rest/scopes/cd237b60-09e2-45c4-829e-082e318a6d2a'.format(resource_uri=self.URI)
self.assertEqual([{'member': 'member'}], result)
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, 'get')
def test_get_all_with_defaults(self, mock_get):
self.resource_client.get_all()
uri = "{resource_uri}?start=0&count=-1".format(resource_uri=self.URI)
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, 'get')
def test_get_all_with_custom_uri(self, mock_get):
self.resource_client.get_all(uri='/rest/testuri/12467836/subresources')
uri = "/rest/testuri/12467836/subresources?start=0&count=-1"
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, 'get')
def test_get_all_with_custom_uri_and_query_string(self, mock_get):
self.resource_client.get_all(uri='/rest/testuri/12467836/subresources?param=value')
uri = "/rest/testuri/12467836/subresources?param=value&start=0&count=-1"
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, 'get')
def test_get_all_with_different_resource_uri_should_fail(self, mock_get):
try:
self.resource_client.get_all(uri='/rest/other/resource/12467836/subresources')
except exceptions.HPOneViewUnknownType as e:
self.assertEqual(UNRECOGNIZED_URI, e.args[0])
else:
self.fail('Expected Exception was not raised')
@mock.patch.object(connection, 'get')
def test_get_all_should_do_multi_requests_when_response_paginated(self, mock_get):
uri_list = ['/rest/testuri?start=0&count=-1',
'/rest/testuri?start=3&count=3',
'/rest/testuri?start=6&count=3']
results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]},
{'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]},
{'nextPageUri': None, 'members': [{'id': '7'}, {'id': '8'}]}]
mock_get.side_effect = results
self.resource_client.get_all()
expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])]
self.assertEqual(mock_get.call_args_list, expected_calls)
@mock.patch.object(connection, 'get')
def test_get_all_with_count_should_do_multi_requests_when_response_paginated(self, mock_get):
uri_list = ['/rest/testuri?start=0&count=15',
'/rest/testuri?start=3&count=3',
'/rest/testuri?start=6&count=3']
results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]},
{'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]},
{'nextPageUri': None, 'members': [{'id': '7'}, {'id': '8'}]}]
mock_get.side_effect = results
self.resource_client.get_all(count=15)
expected_calls = [call(uri_list[0]), call(uri_list[1]), call(uri_list[2])]
self.assertEqual(mock_get.call_args_list, expected_calls)
@mock.patch.object(connection, 'get')
def test_get_all_should_return_all_items_when_response_paginated(self, mock_get):
uri_list = ['/rest/testuri?start=0&count=-1',
'/rest/testuri?start=3&count=3',
'/rest/testuri?start=6&count=1']
results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]},
{'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]},
{'nextPageUri': None, 'members': [{'id': '7'}]}]
mock_get.side_effect = results
result = self.resource_client.get_all()
expected_items = [{'id': '1'}, {'id': '2'}, {'id': '3'}, {'id': '4'}, {'id': '5'}, {'id': '6'}, {'id': '7'}]
self.assertSequenceEqual(result, expected_items)
@mock.patch.object(connection, 'get')
def test_get_all_should_limit_results_to_requested_count_when_response_is_paginated(self, mock_get):
uri_list = ['/rest/testuri?start=0&count=15',
'/rest/testuri?start=3&count=3',
'/rest/testuri?start=6&count=1']
results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]},
{'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]},
{'nextPageUri': None, 'members': [{'id': '7'}]}]
mock_get.side_effect = results
result = self.resource_client.get_all(count=15)
expected_items = [{'id': '1'}, {'id': '2'}, {'id': '3'}, {'id': '4'}, {'id': '5'}, {'id': '6'}, {'id': '7'}]
self.assertSequenceEqual(result, expected_items)
@mock.patch.object(connection, 'get')
def test_get_all_should_stop_requests_when_requested_count_reached(self, mock_get):
"""
In this case, the user provides a maximum number of results to be returned but for pagination purposes, a
nextPageUri is returned by OneView.
"""
uri_list = ['/rest/testuri?start=0&count=3',
'/rest/testuri?start=3&count=3',
'/rest/testuri?start=6&count=3']
results = [{'nextPageUri': uri_list[1], 'members': [{'id': '1'}, {'id': '2'}, {'id': '3'}]},
{'nextPageUri': uri_list[2], 'members': [{'id': '4'}, {'id': '5'}, {'id': '6'}]},
{'nextPageUri': None, 'members': [{'id': '7'}, {'id': '8'}]}]
mock_get.side_effect = results
self.resource_client.get_all(count=3)
mock_get.assert_called_once_with(uri_list[0])
@mock.patch.object(connection, 'get')
def test_get_all_should_stop_requests_when_next_page_is_equal_to_current_page(self, mock_get):
uri = '/rest/testuri?start=0&count=-1'
members = [{'id': '1'}, {'id': '2'}, {'id': '3'}]
mock_get.return_value = {
'nextPageUri': uri,
'members': members,
'uri': uri
}
result = self.resource_client.get_all()
self.assertSequenceEqual(result, members)
mock_get.assert_called_once_with(uri)
@mock.patch.object(connection, 'get')
def test_get_all_should_return_empty_list_when_response_has_no_items(self, mock_get):
mock_get.return_value = {'nextPageUri': None, 'members': []}
result = self.resource_client.get_all()
self.assertEqual(result, [])
@mock.patch.object(connection, 'get')
def test_get_all_should_return_empty_list_when_no_members(self, mock_get):
mock_get.return_value = {'nextPageUri': None, 'members': None}
result = self.resource_client.get_all()
self.assertEqual(result, [])
@mock.patch.object(connection, 'delete')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_delete_all_called_once(self, mock_wait4task, mock_delete):
mock_delete.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
filter = "name='Exchange Server'"
uri = "/rest/testuri?filter=name%3D%27Exchange%20Server%27&force=True"
self.resource_client.delete_all(filter=filter, force=True, timeout=-1)
mock_delete.assert_called_once_with(uri)
@mock.patch.object(connection, 'delete')
def test_delete_all_should_return_true(self, mock_delete):
mock_delete.return_value = None, self.response_body
filter = "name='Exchange Server'"
result = self.resource_client.delete_all(filter=filter, force=True, timeout=-1)
self.assertTrue(result)
@mock.patch.object(connection, 'delete')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_delete_all_should_wait_for_task(self, mock_wait4task, mock_delete):
mock_delete.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
filter = "name='Exchange Server'"
delete_task = self.resource_client.delete_all(filter=filter, force=True, timeout=-1)
mock_wait4task.assert_called_with(self.task, timeout=-1)
self.assertEqual(self.task, delete_task)
@mock.patch.object(connection, 'delete')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_delete_by_id_called_once(self, mock_wait4task, mock_delete):
mock_delete.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
delete_task = self.resource_client.delete('1', force=True, timeout=-1)
self.assertEqual(self.task, delete_task)
mock_delete.assert_called_once_with(self.URI + "/1?force=True", custom_headers=None)
@mock.patch.object(connection, 'delete')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_delete_with_custom_headers(self, mock_wait4task, mock_delete):
mock_delete.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
self.resource_client.delete('1', custom_headers=self.custom_headers)
mock_delete.assert_called_once_with(mock.ANY, custom_headers={'Accept-Language': 'en_US'})
def test_delete_dict_invalid_uri(self):
dict_to_delete = {"task": "task",
"uri": ""}
try:
self.resource_client.delete(dict_to_delete, False, -1)
except exceptions.HPOneViewUnknownType as e:
self.assertEqual("Unknown object type", e.args[0])
else:
self.fail()
@mock.patch.object(connection, 'get')
def test_get_schema_uri(self, mock_get):
self.resource_client.get_schema()
mock_get.assert_called_once_with(self.URI + "/schema")
@mock.patch.object(connection, 'get')
def test_get_by_id_uri(self, mock_get):
self.resource_client.get('12345')
mock_get.assert_called_once_with(self.URI + "/12345")
@mock.patch.object(ResourceClient, 'get_by')
def test_get_by_name_with_result(self, mock_get_by):
mock_get_by.return_value = [{"name": "value"}]
response = self.resource_client.get_by_name('Resource Name,')
self.assertEqual(response, {"name": "value"})
mock_get_by.assert_called_once_with("name", 'Resource Name,')
@mock.patch.object(ResourceClient, 'get_by')
def test_get_by_name_without_result(self, mock_get_by):
mock_get_by.return_value = []
response = self.resource_client.get_by_name('Resource Name,')
self.assertIsNone(response)
mock_get_by.assert_called_once_with("name", 'Resource Name,')
@mock.patch.object(connection, 'get')
def test_get_collection_uri(self, mock_get):
mock_get.return_value = {"members": [{"key": "value"}, {"key": "value"}]}
self.resource_client.get_collection('12345')
mock_get.assert_called_once_with(self.URI + "/12345")
@mock.patch.object(connection, 'get')
def test_get_collection_with_filter(self, mock_get):
mock_get.return_value = {}
self.resource_client.get_collection('12345', 'name=name')
mock_get.assert_called_once_with(self.URI + "/12345?filter=name%3Dname")
@mock.patch.object(connection, 'get')
def test_get_collection_with_multiple_filters(self, mock_get):
mock_get.return_value = {}
self.resource_client.get_collection('12345', ['name1=one', 'name2=two', 'name=three'])
mock_get.assert_called_once_with(self.URI + "/12345?filter=name1%3Done&filter=name2%3Dtwo&filter=name%3Dthree")
@mock.patch.object(connection, 'get')
def test_get_collection_should_return_list(self, mock_get):
mock_get.return_value = {"members": [{"key": "value"}, {"key": "value"}]}
collection = self.resource_client.get_collection('12345')
self.assertEqual(len(collection), 2)
@mock.patch.object(ResourceClient, 'get_all')
def test_get_by_property(self, mock_get_all):
self.resource_client.get_by('name', 'MyFibreNetwork')
mock_get_all.assert_called_once_with(filter="\"name='MyFibreNetwork'\"", uri='/rest/testuri')
@mock.patch.object(ResourceClient, 'get_all')
def test_get_by_with_incorrect_result_autofix(self, mock_get_all):
mock_get_all.return_value = [{"name": "EXpected"},
{"name": "not expected"}]
response = self.resource_client.get_by('name', 'exPEcted')
self.assertEqual(response, [{"name": "EXpected"}])
mock_get_all.assert_called_once_with(filter="\"name='exPEcted'\"", uri='/rest/testuri')
@mock.patch.object(ResourceClient, 'get_all')
def test_get_by_with_incorrect_result_skip_autofix(self, mock_get_all):
mock_get_all.return_value = [{"name": "expected"},
{"name": "not expected"}]
response = self.resource_client.get_by('connection.name', 'expected')
self.assertEqual(response, [{'name': 'expected'}, {'name': 'not expected'}])
mock_get_all.assert_called_once_with(filter="\"connection.name='expected'\"", uri='/rest/testuri')
@mock.patch.object(ResourceClient, 'get_all')
def test_get_by_property_with_uri(self, mock_get_all):
self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/testuri/5435534/sub')
mock_get_all.assert_called_once_with(filter="\"name='MyFibreNetwork'\"", uri='/rest/testuri/5435534/sub')
@mock.patch.object(ResourceClient, 'get_all')
def test_get_by_property_with__invalid_uri(self, mock_get_all):
try:
self.resource_client.get_by('name', 'MyFibreNetwork', uri='/rest/other/5435534/sub')
except exceptions.HPOneViewUnknownType as e:
self.assertEqual('Unrecognized URI for this resource', e.args[0])
else:
self.fail()
@mock.patch.object(connection, 'put')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_update_with_zero_body_called_once(self, mock_wait4task, mock_update):
mock_update.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.update_with_zero_body('/rest/enclosures/09USE133E5H4/configuration',
timeout=-1)
mock_update.assert_called_once_with(
"/rest/enclosures/09USE133E5H4/configuration", None, custom_headers=None)
@mock.patch.object(connection, 'put')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_update_with_zero_body_and_custom_headers(self, mock_wait4task, mock_update):
mock_update.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.update_with_zero_body('1', custom_headers=self.custom_headers)
mock_update.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})
@mock.patch.object(connection, 'put')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_update_with_zero_body_return_entity(self, mock_wait4task, mock_put):
response_body = {"resource_name": "name"}
mock_put.return_value = self.task, self.task
mock_wait4task.return_value = response_body
result = self.resource_client.update_with_zero_body(
'/rest/enclosures/09USE133E5H4/configuration', timeout=-1)
self.assertEqual(result, response_body)
@mock.patch.object(connection, 'put')
def test_update_with_zero_body_without_task(self, mock_put):
mock_put.return_value = None, self.response_body
result = self.resource_client.update_with_zero_body(
'/rest/enclosures/09USE133E5H4/configuration', timeout=-1)
self.assertEqual(result, self.response_body)
@mock.patch.object(connection, 'put')
def test_update_with_uri_called_once(self, mock_put):
dict_to_update = {"name": "test"}
uri = "/rest/resource/test"
mock_put.return_value = None, self.response_body
response = self.resource_client.update(dict_to_update, uri=uri)
self.assertEqual(self.response_body, response)
mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None)
@mock.patch.object(connection, 'put')
def test_update_with_custom_headers(self, mock_put):
dict_to_update = {"name": "test"}
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update, uri="/path", custom_headers=self.custom_headers)
mock_put.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})
@mock.patch.object(connection, 'put')
def test_update_with_force(self, mock_put):
dict_to_update = {"name": "test"}
uri = "/rest/resource/test"
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update, uri=uri, force=True)
expected_uri = "/rest/resource/test?force=True"
mock_put.assert_called_once_with(expected_uri, dict_to_update, custom_headers=None)
@mock.patch.object(connection, 'put')
def test_update_with_api_version_200(self, mock_put):
dict_to_update = {"name": "test"}
uri = "/rest/resource/test"
mock_put.return_value = None, self.response_body
self.connection._apiVersion = 200
expected_dict = {"name": "test", "type": self.TYPE_V200}
self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES)
mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None)
@mock.patch.object(connection, 'put')
def test_update_with_default_api_version_300(self, mock_put):
dict_to_update = {"name": "test"}
uri = "/rest/resource/test"
mock_put.return_value = None, self.response_body
expected_dict = {"name": "test", "type": self.TYPE_V300}
self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES)
mock_put.assert_called_once_with(uri, expected_dict, custom_headers=None)
@mock.patch.object(connection, 'put')
def test_update_should_not_override_resource_properties(self, mock_put):
dict_to_update = {"name": "test", "type": "anotherType"}
uri = "/rest/resource/test"
mock_put.return_value = None, self.response_body
self.resource_client.update(dict_to_update, uri=uri, default_values=self.DEFAULT_VALUES)
mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None)
@mock.patch.object(connection, 'put')
def test_update_without_default_values(self, mock_put):
dict_to_update = {"name": "test"}
uri = "/rest/resource/test"
mock_put.return_value = None, self.response_body
resource_client = ResourceClient(self.connection, self.URI)
resource_client.update(dict_to_update, uri=uri)
mock_put.assert_called_once_with(uri, dict_to_update, custom_headers=None)
@mock.patch.object(connection, 'put')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_update_uri(self, mock_wait4task, mock_update):
dict_to_update = {"resource_data": "resource_data",
"uri": "a_uri"}
mock_update.return_value = self.task, self.response_body
mock_wait4task.return_value = self.task
update_task = self.resource_client.update(dict_to_update, False)
self.assertEqual(self.task, update_task)
mock_update.assert_called_once_with("a_uri", dict_to_update, custom_headers=None)
@mock.patch.object(connection, 'put')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_update_return_entity(self, mock_wait4task, mock_put):
dict_to_update = {
"resource_name": "a name",
"uri": "a_uri",
}
mock_put.return_value = self.task, {}
mock_wait4task.return_value = dict_to_update
result = self.resource_client.update(dict_to_update, timeout=-1)
self.assertEqual(result, dict_to_update)
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_create_with_zero_body_called_once(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.create_with_zero_body('/rest/enclosures/09USE133E5H4/configuration',
timeout=-1)
mock_post.assert_called_once_with(
"/rest/enclosures/09USE133E5H4/configuration", {}, custom_headers=None)
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_create_with_zero_body_called_once_without_uri(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.create_with_zero_body(timeout=-1)
mock_post.assert_called_once_with(
'/rest/testuri', {}, custom_headers=None)
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_create_with_zero_body_and_custom_headers(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = self.task
self.resource_client.create_with_zero_body('1', custom_headers=self.custom_headers)
mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_create_with_zero_body_return_entity(self, mock_wait4task, mock_post):
response_body = {"resource_name": "name"}
mock_post.return_value = self.task, self.task
mock_wait4task.return_value = response_body
result = self.resource_client.create_with_zero_body(
'/rest/enclosures/09USE133E5H4/configuration', timeout=-1)
self.assertEqual(result, response_body)
@mock.patch.object(connection, 'post')
def test_create_with_zero_body_without_task(self, mock_post):
mock_post.return_value = None, self.response_body
result = self.resource_client.create_with_zero_body(
'/rest/enclosures/09USE133E5H4/configuration', timeout=-1)
self.assertEqual(result, self.response_body)
@mock.patch.object(connection, 'post')
def test_create_uri(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
self.resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None)
@mock.patch.object(connection, 'post')
def test_create_uri_with_force(self, mock_post):
dict_to_create = {"resource_name": "a name", "force": "yes"}
mock_post.return_value = {}, {}
self.resource_client.create(dict_to_create, timeout=-1)
expected_uri = "/rest/testuri"
mock_post.assert_called_once_with(expected_uri, dict_to_create, custom_headers=None)
@mock.patch.object(connection, 'post')
def test_create_with_api_version_200(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
self.connection._apiVersion = 200
expected_dict = {"resource_name": "a name", "type": self.TYPE_V200}
self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES)
mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)
@mock.patch.object(connection, 'post')
def test_create_with_default_api_version_300(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
expected_dict = {"resource_name": "a name", "type": self.TYPE_V300}
self.resource_client.create(dict_to_create, timeout=-1, default_values=self.DEFAULT_VALUES)
mock_post.assert_called_once_with(self.URI, expected_dict, custom_headers=None)
@mock.patch.object(connection, 'post')
def test_create_should_not_override_resource_properties(self, mock_post):
dict_to_create = {"resource_name": "a name", "type": "anotherType"}
mock_post.return_value = {}, {}
self.resource_client.create(dict_to_create, default_values=self.DEFAULT_VALUES)
mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None)
@mock.patch.object(connection, 'post')
def test_create_without_default_values(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
resource_client = ResourceClient(self.connection, self.URI)
resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None)
@mock.patch.object(connection, 'post')
def test_create_with_custom_headers(self, mock_post):
dict_to_create = {"resource_name": "a name"}
mock_post.return_value = {}, {}
self.resource_client.create(dict_to_create, custom_headers=self.custom_headers)
mock_post.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_create_return_entity(self, mock_wait4task, mock_post):
dict_to_create = {
"resource_name": "a name",
}
created_resource = {
"resource_id": "123",
"resource_name": "a name",
}
mock_post.return_value = self.task, {}
mock_wait4task.return_value = created_resource
result = self.resource_client.create(dict_to_create, -1)
self.assertEqual(result, created_resource)
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_wait_for_activity_on_create(self, mock_wait4task, mock_post):
mock_post.return_value = self.task, {}
mock_wait4task.return_value = self.task
self.resource_client.create({"test": "test"}, timeout=60)
mock_wait4task.assert_called_once_with(self.task, 60)
@mock.patch.object(connection, 'patch')
def test_patch_request_when_id_is_provided_v200(self, mock_patch):
request_body = [{
'op': 'replace',
'path': '/name',
'value': 'new_name',
}]
mock_patch.return_value = {}, {}
self.connection._apiVersion = 200
self.resource_client.patch(
'123a53cz', 'replace', '/name', 'new_name', 70)
mock_patch.assert_called_once_with(
'/rest/testuri/123a53cz', request_body, custom_headers={})
@mock.patch.object(connection, 'patch')
def test_patch_request_when_id_is_provided_v300(self, mock_patch):
request_body = [{
'op': 'replace',
'path': '/name',
'value': 'new_name',
}]
mock_patch.return_value = {}, {}
resource_client = ResourceClient(self.connection, self.URI)
resource_client.patch(
'123a53cz', 'replace', '/name', 'new_name', 70)
mock_patch.assert_called_once_with(
'/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'})
@mock.patch.object(connection, 'patch')
def test_patch_request_when_uri_is_provided(self, mock_patch):
request_body = [{
'op': 'replace',
'path': '/name',
'value': 'new_name',
}]
mock_patch.return_value = {}, {}
self.resource_client.patch(
'/rest/testuri/123a53cz', 'replace', '/name', 'new_name', 60)
mock_patch.assert_called_once_with(
'/rest/testuri/123a53cz', request_body, custom_headers={'Content-Type': 'application/json-patch+json'})
@mock.patch.object(connection, 'patch')
def test_patch_with_custom_headers_v200(self, mock_patch):
mock_patch.return_value = {}, {}
self.connection._apiVersion = 200
self.resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value',
custom_headers=self.custom_headers)
mock_patch.assert_called_once_with(mock.ANY, mock.ANY, custom_headers={'Accept-Language': 'en_US'})
@mock.patch.object(connection, 'patch')
def test_patch_with_custom_headers_v300(self, mock_patch):
mock_patch.return_value = {}, {}
resource_client = ResourceClient(self.connection, self.URI)
resource_client.patch('/rest/testuri/123', 'operation', '/field', 'value',
custom_headers=self.custom_headers)
mock_patch.assert_called_once_with(mock.ANY,
mock.ANY,
custom_headers={'Accept-Language': 'en_US',
'Content-Type': 'application/json-patch+json'})
@mock.patch.object(connection, 'patch')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_patch_return_entity(self, mock_wait4task, mock_patch):
entity = {"resource_id": "123a53cz"}
mock_patch.return_value = self.task, self.task
mock_wait4task.return_value = entity
result = self.resource_client.patch(
'123a53cz', 'replace', '/name', 'new_name', -1)
self.assertEqual(result, entity)
@mock.patch.object(connection, 'patch')
@mock.patch.object(TaskMonitor, 'get_completed_task')
def test_patch_request_custom_headers_with_content_type(self, mock_task, mock_patch):
dict_info = {"resource_name": "a name"}
mock_patch.return_value = {}, {}
headers = {'Content-Type': 'application/json',
'Extra': 'extra'}
self.connection._apiVersion = 300
resource_client = ResourceClient(self.connection, self.URI)
resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers)
mock_patch.assert_called_once_with('/rest/testuri/id', dict_info, custom_headers=headers)
@mock.patch.object(connection, 'patch')
@mock.patch.object(TaskMonitor, 'get_completed_task')
def test_patch_request_custom_headers(self, mock_task, mock_patch):
dict_info = {"resource_name": "a name"}
mock_patch.return_value = {}, {}
headers = {'Extra': 'extra'}
self.connection._apiVersion = 300
resource_client = ResourceClient(self.connection, self.URI)
resource_client.patch_request('/rest/testuri/id', body=dict_info, custom_headers=headers)
mock_patch.assert_called_once_with(
'/rest/testuri/id',
dict_info,
custom_headers={'Extra': 'extra',
'Content-Type': 'application/json-patch+json'})
@mock.patch.object(connection, 'patch')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_wait_for_activity_on_patch(self, mock_wait4task, mock_patch):
entity = {"resource_id": "123a53cz"}
mock_patch.return_value = self.task, self.task
mock_wait4task.return_value = entity
self.resource_client.patch(
'123a53cz', 'replace', '/name', 'new_name', -1)
mock_wait4task.assert_called_once_with(self.task, mock.ANY)
def test_delete_with_none(self):
try:
self.resource_client.delete(None)
except ValueError as e:
self.assertTrue("Resource" in e.args[0])
else:
self.fail()
@mock.patch.object(connection, 'delete')
def test_delete_with_dict_uri(self, mock_delete):
resource = {"uri": "uri"}
mock_delete.return_value = {}, {}
delete_result = self.resource_client.delete(resource)
self.assertTrue(delete_result)
mock_delete.assert_called_once_with("uri", custom_headers=None)
def test_delete_with_empty_dict(self):
try:
self.resource_client.delete({})
except ValueError as e:
self.assertTrue("Resource" in e.args[0])
else:
self.fail()
def test_get_with_none(self):
try:
self.resource_client.get(None)
except ValueError as e:
self.assertTrue("id" in e.args[0])
else:
self.fail()
def test_get_collection_with_none(self):
try:
self.resource_client.get_collection(None)
except ValueError as e:
self.assertTrue("id" in e.args[0])
else:
self.fail()
def test_create_with_none(self):
try:
self.resource_client.create(None)
except ValueError as e:
self.assertTrue("Resource" in e.args[0])
else:
self.fail()
def test_create_with_empty_dict(self):
try:
self.resource_client.create({})
except ValueError as e:
self.assertTrue("Resource" in e.args[0])
else:
self.fail()
def test_update_with_none(self):
try:
self.resource_client.update(None)
except ValueError as e:
self.assertTrue("Resource" in e.args[0])
else:
self.fail()
def test_update_with_empty_dict(self):
try:
self.resource_client.update({})
except ValueError as e:
self.assertTrue("Resource" in e.args[0])
else:
self.fail()
def test_get_by_with_name_none(self):
try:
self.resource_client.get_by(None, None)
except ValueError as e:
self.assertTrue("field" in e.args[0])
else:
self.fail()
@mock.patch.object(connection, 'get')
def test_get_with_uri_should_work(self, mock_get):
mock_get.return_value = {}
uri = self.URI + "/ad28cf21-8b15-4f92-bdcf-51cb2042db32"
self.resource_client.get(uri)
mock_get.assert_called_once_with(uri)
def test_get_with_uri_with_incompatible_url_shoud_fail(self):
message = "Unrecognized URI for this resource"
uri = "/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32"
try:
self.resource_client.get(uri)
except exceptions.HPOneViewUnknownType as exception:
self.assertEqual(message, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_get_with_uri_from_another_resource_with_incompatible_url_shoud_fail(self):
message = "Unrecognized URI for this resource"
uri = "/rest/interconnects/ad28cf21-8b15-4f92-bdcf-51cb2042db32"
fake_resource = FakeResource(None)
try:
fake_resource.get_fake(uri)
except exceptions.HPOneViewUnknownType as exception:
self.assertEqual(message, exception.args[0])
else:
self.fail("Expected Exception was not raised")
@mock.patch.object(connection, 'get')
def test_get_utilization_with_args(self, mock_get):
self.resource_client.get_utilization('09USE7335NW3', fields='AmbientTemperature,AveragePower,PeakPower',
filter='startDate=2016-05-30T03:29:42.361Z',
refresh=True, view='day')
expected_uri = '/rest/testuri/09USE7335NW3/utilization' \
'?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \
'&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \
'&refresh=true' \
'&view=day'
mock_get.assert_called_once_with(expected_uri)
@mock.patch.object(connection, 'get')
def test_get_utilization_with_multiple_filters(self, mock_get):
self.resource_client.get_utilization(
'09USE7335NW3',
fields='AmbientTemperature,AveragePower,PeakPower',
filter=['startDate=2016-05-30T03:29:42.361Z',
'endDate=2016-05-31T03:29:42.361Z'],
refresh=True,
view='day')
expected_uri = '/rest/testuri/09USE7335NW3/utilization' \
'?filter=startDate%3D2016-05-30T03%3A29%3A42.361Z' \
'&filter=endDate%3D2016-05-31T03%3A29%3A42.361Z' \
'&fields=AmbientTemperature%2CAveragePower%2CPeakPower' \
'&refresh=true' \
'&view=day'
mock_get.assert_called_once_with(expected_uri)
@mock.patch.object(connection, 'get')
def test_get_utilization_by_id_with_defaults(self, mock_get):
self.resource_client.get_utilization('09USE7335NW3')
expected_uri = '/rest/testuri/09USE7335NW3/utilization'
mock_get.assert_called_once_with(expected_uri)
@mock.patch.object(connection, 'get')
def test_get_utilization_by_uri_with_defaults(self, mock_get):
self.resource_client.get_utilization('/rest/testuri/09USE7335NW3')
expected_uri = '/rest/testuri/09USE7335NW3/utilization'
mock_get.assert_called_once_with(expected_uri)
def test_get_utilization_with_empty(self):
try:
self.resource_client.get_utilization('')
except ValueError as exception:
self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_id_should_work(self):
input = '09USE7335NW35'
expected_output = '/rest/testuri/09USE7335NW35'
result = self.resource_client.build_uri(input)
self.assertEqual(expected_output, result)
def test_build_uri_with_uri_should_work(self):
input = '/rest/testuri/09USE7335NW3'
expected_output = '/rest/testuri/09USE7335NW3'
result = self.resource_client.build_uri(input)
self.assertEqual(expected_output, result)
def test_build_uri_with_none_should_raise_exception(self):
try:
self.resource_client.build_uri(None)
except ValueError as exception:
self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_empty_str_should_raise_exception(self):
try:
self.resource_client.build_uri('')
except ValueError as exception:
self.assertEqual(RESOURCE_CLIENT_INVALID_ID, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_different_resource_uri_should_raise_exception(self):
try:
self.resource_client.build_uri(
'/rest/test/another/resource/uri/09USE7335NW3')
except exceptions.HPOneViewUnknownType as exception:
self.assertEqual(UNRECOGNIZED_URI, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_uri_with_incomplete_uri_should_raise_exception(self):
try:
self.resource_client.build_uri('/rest/')
except exceptions.HPOneViewUnknownType as exception:
self.assertEqual(UNRECOGNIZED_URI, exception.args[0])
else:
self.fail("Expected Exception was not raised")
def test_build_subresource_uri(self):
options = [
dict(
resource='1',
subresource='2',
path='sub',
uri='/rest/testuri/1/sub/2'),
dict(
resource='/rest/testuri/3',
subresource='4',
path='sub',
uri='/rest/testuri/3/sub/4'),
dict(
resource='5',
subresource='/rest/testuri/5/sub/6',
path='sub',
uri='/rest/testuri/5/sub/6'),
dict(
resource='/rest/testuri/7',
subresource='/rest/testuri/7/sub/8',
path='sub',
uri='/rest/testuri/7/sub/8'),
dict(
resource=None,
subresource='/rest/testuri/9/sub/10',
path='sub',
uri='/rest/testuri/9/sub/10'),
dict(
resource='/rest/testuri/11',
subresource='12',
path='/sub/',
uri='/rest/testuri/11/sub/12'),
dict(
resource='/rest/testuri/13',
subresource=None,
path='/sub/',
uri='/rest/testuri/13/sub'),
]
for option in options:
uri = self.resource_client.build_subresource_uri(option['resource'], option['subresource'], option['path'])
self.assertEqual(uri, option['uri'])
def test_build_subresource_uri_with_subresourceid_and_without_resource_should_fail(self):
try:
self.resource_client.build_subresource_uri(None, "123456", 'sub-path')
except exceptions.HPOneViewValueError as exception:
self.assertEqual(RESOURCE_ID_OR_URI_REQUIRED, exception.args[0])
else:
self.fail("Expected Exception was not raised")
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'get_completed_task')
def test_create_report_should_do_post_request(self, mock_get_completed_task, mock_post):
task_with_output = self.task.copy()
task_with_output['taskOutput'] = []
mock_post.return_value = self.task, {}
mock_get_completed_task.return_value = task_with_output
self.resource_client.create_report("/rest/path/create-report")
mock_post.assert_called_once_with("/rest/path/create-report", {})
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'get_completed_task')
def test_create_report_should_wait_task_completion(self, mock_get_completed_task, mock_post):
task_with_output = self.task.copy()
task_with_output['taskOutput'] = []
mock_post.return_value = self.task, {}
mock_get_completed_task.return_value = task_with_output
self.resource_client.create_report("/rest/path/create-report", timeout=60)
mock_get_completed_task.assert_called_once_with(self.task, 60)
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'get_completed_task')
def test_create_report_should_return_output_list_when_results(self, mock_get_completed_task, mock_post):
task_output = [
{"type": "FCIssueResponseV2", "created": "2015-03-24T15: 32: 50.889Z"},
{"type": "FCIssueResponseV2", "created": "2015-03-13T14: 10: 50.322Z"}
]
task_with_output = self.task.copy()
task_with_output['taskOutput'] = task_output
mock_post.return_value = self.task, {}
mock_get_completed_task.return_value = task_with_output
result = self.resource_client.create_report("/rest/path/create-report")
self.assertEqual(result, task_output)
@mock.patch.object(connection, 'post')
@mock.patch.object(TaskMonitor, 'get_completed_task')
def test_create_report_should_return_empty_list_when_output_is_empty(self, mock_get_completed_task, mock_post):
task_with_output = self.task.copy()
task_with_output['taskOutput'] = []
mock_post.return_value = self.task, {}
mock_get_completed_task.return_value = task_with_output
result = self.resource_client.create_report("/rest/path/create-report")
self.assertEqual(result, [])
@mock.patch.object(connection, 'post')
def test_create_report_should_raise_exception_when_not_task(self, mock_post):
task_with_output = self.task.copy()
task_with_output['taskOutput'] = []
mock_post.return_value = None, {}
try:
self.resource_client.create_report("/rest/path/create-report")
except exceptions.HPOneViewException as exception:
self.assertEqual(RESOURCE_CLIENT_TASK_EXPECTED, exception.args[0])
else:
self.fail("Expected Exception was not raised")
@mock.patch.object(connection, 'post')
def test_create_when_the_resource_is_a_list(self, mock_post):
dict_to_create = [{"resource_name": "a name"}]
mock_post.return_value = {}, {}
resource_client = ResourceClient(self.connection, self.URI)
resource_client.create(dict_to_create, timeout=-1)
mock_post.assert_called_once_with(self.URI, dict_to_create, custom_headers=None)
def test_merge_api_default_values(self):
resource = {'name': 'resource1'}
default_values = {
'200': {"type": "EnclosureGroupV200"},
'300': {"type": "EnclosureGroupV300"}
}
expected = {'name': 'resource1', "type": "EnclosureGroupV300"}
resource_client = ResourceClient(self.connection, self.URI)
result = resource_client.merge_default_values(resource, default_values)
self.assertEqual(result, expected)
def test_should_not_merge_when_default_values_not_defined(self):
resource = {'name': 'resource1'}
default_values = {}
expected = {'name': 'resource1'}
resource_client = ResourceClient(self.connection, self.URI)
result = resource_client.merge_default_values(resource, default_values)
self.assertEqual(result, expected)
@mock.patch.object(connection, 'post_multipart_with_response_handling')
def test_upload_should_call_post_multipart(self, mock_post_multipart):
uri = '/rest/testuri/'
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, mock.Mock()
self.resource_client.upload(filepath, uri)
mock_post_multipart.assert_called_once_with(uri, filepath, 'SPPgen9snap6.2015_0405.81.iso')
@mock.patch.object(connection, 'post_multipart_with_response_handling')
def test_upload_should_call_post_multipart_with_resource_uri_when_not_uri_provided(self, mock_post_multipart):
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, mock.Mock()
self.resource_client.upload(filepath)
mock_post_multipart.assert_called_once_with('/rest/testuri', mock.ANY, mock.ANY)
@mock.patch.object(connection, 'post_multipart_with_response_handling')
@mock.patch.object(TaskMonitor, 'wait_for_task')
@mock.patch.object(connection, 'get')
def test_upload_should_wait_for_task_when_response_is_task(self, mock_get, mock_wait4task, mock_post_multipart):
uri = '/rest/testuri/'
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = self.task, mock.Mock()
self.resource_client.upload(filepath, uri)
mock_wait4task.assert_called_once_with(self.task, -1)
@mock.patch.object(connection, 'post_multipart_with_response_handling')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_upload_should_not_wait_for_task_when_response_is_not_task(self, mock_wait4task, mock_post_multipart):
uri = '/rest/testuri/'
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, mock.Mock()
self.resource_client.upload(filepath, uri)
mock_wait4task.not_been_called()
@mock.patch.object(connection, 'post_multipart_with_response_handling')
@mock.patch.object(TaskMonitor, 'wait_for_task')
@mock.patch.object(connection, 'get')
def test_upload_should_return_associated_resource_when_response_is_task(self, mock_get, mock_wait4task,
mock_post_multipart):
fake_associated_resurce = mock.Mock()
uri = '/rest/testuri/'
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = self.task, mock.Mock()
mock_wait4task.return_value = fake_associated_resurce
result = self.resource_client.upload(filepath, uri)
self.assertEqual(result, fake_associated_resurce)
@mock.patch.object(connection, 'post_multipart_with_response_handling')
@mock.patch.object(TaskMonitor, 'wait_for_task')
def test_upload_should_return_resource_when_response_is_not_task(self, mock_wait4task, mock_post_multipart):
fake_response_body = mock.Mock()
uri = '/rest/testuri/'
filepath = "test/SPPgen9snap6.2015_0405.81.iso"
mock_post_multipart.return_value = None, fake_response_body
result = self.resource_client.upload(filepath, uri)
self.assertEqual(result, fake_response_body)
@mock.patch.object(connection, 'download_to_stream')
@mock.patch(mock_builtin('open'))
def test_download_should_call_download_to_stream_with_given_uri(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315'
mock_open.return_value = io.StringIO()
self.resource_client.download(uri, file_path)
mock_download_to_stream.assert_called_once_with(mock.ANY, uri, custom_headers=mock.ANY)
@mock.patch.object(connection, 'download_to_stream')
@mock.patch(mock_builtin('open'))
def test_download_should_call_download_to_stream_with_open_file(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315'
fake_file = io.StringIO()
mock_open.return_value = fake_file
self.resource_client.download(uri, file_path)
mock_open.assert_called_once_with(file_path, 'wb')
mock_download_to_stream.assert_called_once_with(fake_file, uri, custom_headers=mock.ANY)
@mock.patch.object(connection, 'download_to_stream')
@mock.patch(mock_builtin('open'))
def test_download_should_return_true_when_success(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315'
mock_download_to_stream.return_value = True
mock_open.return_value = io.StringIO()
result = self.resource_client.download(uri, file_path)
self.assertTrue(result)
@mock.patch.object(connection, 'download_to_stream')
@mock.patch(mock_builtin('open'))
def test_download_should_return_false_when_error(self, mock_open, mock_download_to_stream):
file_path = "~/archive.log"
uri = '/rest/testuri/3ec91dd2-0ebb-4484-8b2d-90d065114315'
mock_download_to_stream.return_value = False
mock_open.return_value = io.StringIO()
result = self.resource_client.download(uri, file_path)
self.assertFalse(result)
def test_transform_list_to_dict(self):
list = ['one', 'two', {'tree': 3}, 'four', 5]
dict_transformed = transform_list_to_dict(list=list)
self.assertEqual(dict_transformed,
{'5': True,
'four': True,
'one': True,
'tree': 3,
'two': True})
def test_extract_id_from_uri(self):
uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155'
id = '3518be0e-17c1-4189-8f81-83f3724f6155'
extracted_id = extract_id_from_uri(uri)
self.assertEqual(id, extracted_id)
def test_extract_id_from_uri_with_extra_slash(self):
uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/'
extracted_id = extract_id_from_uri(uri)
self.assertEqual(extracted_id, '')
def test_extract_id_from_uri_passing_id(self):
uri = '3518be0e-17c1-4189-8f81-83f3724f6155'
extracted_id = extract_id_from_uri(uri)
self.assertEqual(extracted_id, '3518be0e-17c1-4189-8f81-83f3724f6155')
def test_extract_id_from_uri_unsupported(self):
# This example is not supported yet
uri = '/rest/plan-scripts/3518be0e-17c1-4189-8f81-83f3724f6155/otherthing'
extracted_id = extract_id_from_uri(uri)
self.assertEqual(extracted_id, 'otherthing')
| [((88, 5, 88, 75), 'mock.patch.object', 'mock.patch.object', ({(88, 23, 88, 33): 'connection', (88, 35, 88, 74): '"""post_multipart_with_response_handling"""'}, {}), "(connection, 'post_multipart_with_response_handling')", False, 'import mock\n'), ((98, 5, 98, 75), 'mock.patch.object', 'mock.patch.object', ({(98, 23, 98, 33): 'connection', (98, 35, 98, 74): '"""post_multipart_with_response_handling"""'}, {}), "(connection, 'post_multipart_with_response_handling')", False, 'import mock\n'), ((107, 5, 107, 75), 'mock.patch.object', 'mock.patch.object', ({(107, 23, 107, 33): 'connection', (107, 35, 107, 74): '"""post_multipart_with_response_handling"""'}, {}), "(connection, 'post_multipart_with_response_handling')", False, 'import mock\n'), ((108, 5, 108, 52), 'mock.patch.object', 'mock.patch.object', ({(108, 23, 108, 34): 'TaskMonitor', (108, 36, 108, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((109, 5, 109, 41), 'mock.patch.object', 'mock.patch.object', ({(109, 23, 109, 33): 'connection', (109, 35, 109, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((119, 5, 119, 75), 'mock.patch.object', 'mock.patch.object', ({(119, 23, 119, 33): 'connection', (119, 35, 119, 74): '"""post_multipart_with_response_handling"""'}, {}), "(connection, 'post_multipart_with_response_handling')", False, 'import mock\n'), ((120, 5, 120, 52), 'mock.patch.object', 'mock.patch.object', ({(120, 23, 120, 34): 'TaskMonitor', (120, 36, 120, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((130, 5, 130, 75), 'mock.patch.object', 'mock.patch.object', ({(130, 23, 130, 33): 'connection', (130, 35, 130, 74): '"""post_multipart_with_response_handling"""'}, {}), "(connection, 'post_multipart_with_response_handling')", False, 'import mock\n'), ((131, 5, 131, 52), 'mock.patch.object', 'mock.patch.object', ({(131, 23, 131, 34): 'TaskMonitor', (131, 36, 131, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((132, 5, 132, 41), 'mock.patch.object', 'mock.patch.object', ({(132, 23, 132, 33): 'connection', (132, 35, 132, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((145, 5, 145, 75), 'mock.patch.object', 'mock.patch.object', ({(145, 23, 145, 33): 'connection', (145, 35, 145, 74): '"""post_multipart_with_response_handling"""'}, {}), "(connection, 'post_multipart_with_response_handling')", False, 'import mock\n'), ((146, 5, 146, 52), 'mock.patch.object', 'mock.patch.object', ({(146, 23, 146, 34): 'TaskMonitor', (146, 36, 146, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((157, 5, 157, 56), 'mock.patch.object', 'mock.patch.object', ({(157, 23, 157, 33): 'connection', (157, 35, 157, 55): '"""download_to_stream"""'}, {}), "(connection, 'download_to_stream')", False, 'import mock\n'), ((168, 5, 168, 56), 'mock.patch.object', 'mock.patch.object', ({(168, 23, 168, 33): 'connection', (168, 35, 168, 55): '"""download_to_stream"""'}, {}), "(connection, 'download_to_stream')", False, 'import mock\n'), ((181, 5, 181, 56), 'mock.patch.object', 'mock.patch.object', ({(181, 23, 181, 33): 'connection', (181, 35, 181, 55): '"""download_to_stream"""'}, {}), "(connection, 'download_to_stream')", False, 'import mock\n'), ((193, 5, 193, 56), 'mock.patch.object', 'mock.patch.object', ({(193, 23, 193, 33): 'connection', (193, 35, 193, 55): '"""download_to_stream"""'}, {}), "(connection, 'download_to_stream')", False, 'import mock\n'), ((213, 5, 213, 42), 'mock.patch.object', 'mock.patch.object', ({(213, 23, 213, 33): 'connection', (213, 35, 213, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((214, 5, 214, 52), 'mock.patch.object', 'mock.patch.object', ({(214, 23, 214, 34): 'TaskMonitor', (214, 36, 214, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((223, 5, 223, 42), 'mock.patch.object', 'mock.patch.object', ({(223, 23, 223, 33): 'connection', (223, 35, 223, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((224, 5, 224, 52), 'mock.patch.object', 'mock.patch.object', ({(224, 23, 224, 34): 'TaskMonitor', (224, 36, 224, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((233, 5, 233, 42), 'mock.patch.object', 'mock.patch.object', ({(233, 23, 233, 33): 'connection', (233, 35, 233, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((234, 5, 234, 52), 'mock.patch.object', 'mock.patch.object', ({(234, 23, 234, 34): 'TaskMonitor', (234, 36, 234, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((242, 5, 242, 42), 'mock.patch.object', 'mock.patch.object', ({(242, 23, 242, 33): 'connection', (242, 35, 242, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((243, 5, 243, 52), 'mock.patch.object', 'mock.patch.object', ({(243, 23, 243, 34): 'TaskMonitor', (243, 36, 243, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((254, 5, 254, 42), 'mock.patch.object', 'mock.patch.object', ({(254, 23, 254, 33): 'connection', (254, 35, 254, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((262, 5, 262, 56), 'mock.patch.object', 'mock.patch.object', ({(262, 23, 262, 31): 'Resource', (262, 33, 262, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((263, 5, 263, 41), 'mock.patch.object', 'mock.patch.object', ({(263, 23, 263, 33): 'connection', (263, 35, 263, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((264, 5, 264, 52), 'mock.patch.object', 'mock.patch.object', ({(264, 23, 264, 34): 'TaskMonitor', (264, 36, 264, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((276, 5, 276, 56), 'mock.patch.object', 'mock.patch.object', ({(276, 23, 276, 31): 'Resource', (276, 33, 276, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((277, 5, 277, 41), 'mock.patch.object', 'mock.patch.object', ({(277, 23, 277, 33): 'connection', (277, 35, 277, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((278, 5, 278, 52), 'mock.patch.object', 'mock.patch.object', ({(278, 23, 278, 34): 'TaskMonitor', (278, 36, 278, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((287, 5, 287, 56), 'mock.patch.object', 'mock.patch.object', ({(287, 23, 287, 31): 'Resource', (287, 33, 287, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((288, 5, 288, 41), 'mock.patch.object', 'mock.patch.object', ({(288, 23, 288, 33): 'connection', (288, 35, 288, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((289, 5, 289, 52), 'mock.patch.object', 'mock.patch.object', ({(289, 23, 289, 34): 'TaskMonitor', (289, 36, 289, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((302, 5, 302, 41), 'mock.patch.object', 'mock.patch.object', ({(302, 23, 302, 33): 'connection', (302, 35, 302, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((319, 5, 319, 56), 'mock.patch.object', 'mock.patch.object', ({(319, 23, 319, 31): 'Resource', (319, 33, 319, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((320, 5, 320, 43), 'mock.patch.object', 'mock.patch.object', ({(320, 23, 320, 33): 'connection', (320, 35, 320, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((335, 5, 335, 56), 'mock.patch.object', 'mock.patch.object', ({(335, 23, 335, 31): 'Resource', (335, 33, 335, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((336, 5, 336, 43), 'mock.patch.object', 'mock.patch.object', ({(336, 23, 336, 33): 'connection', (336, 35, 336, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((349, 5, 349, 56), 'mock.patch.object', 'mock.patch.object', ({(349, 23, 349, 31): 'Resource', (349, 33, 349, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((350, 5, 350, 43), 'mock.patch.object', 'mock.patch.object', ({(350, 23, 350, 33): 'connection', (350, 35, 350, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((363, 5, 363, 56), 'mock.patch.object', 'mock.patch.object', ({(363, 23, 363, 31): 'Resource', (363, 33, 363, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((364, 5, 364, 43), 'mock.patch.object', 'mock.patch.object', ({(364, 23, 364, 33): 'connection', (364, 35, 364, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((374, 5, 374, 56), 'mock.patch.object', 'mock.patch.object', ({(374, 23, 374, 31): 'Resource', (374, 33, 374, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((375, 5, 375, 43), 'mock.patch.object', 'mock.patch.object', ({(375, 23, 375, 33): 'connection', (375, 35, 375, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((386, 5, 386, 56), 'mock.patch.object', 'mock.patch.object', ({(386, 23, 386, 31): 'Resource', (386, 33, 386, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((387, 5, 387, 43), 'mock.patch.object', 'mock.patch.object', ({(387, 23, 387, 33): 'connection', (387, 35, 387, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((388, 5, 388, 52), 'mock.patch.object', 'mock.patch.object', ({(388, 23, 388, 34): 'TaskMonitor', (388, 36, 388, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((397, 5, 397, 56), 'mock.patch.object', 'mock.patch.object', ({(397, 23, 397, 31): 'Resource', (397, 33, 397, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((398, 5, 398, 43), 'mock.patch.object', 'mock.patch.object', ({(398, 23, 398, 33): 'connection', (398, 35, 398, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((399, 5, 399, 57), 'mock.patch.object', 'mock.patch.object', ({(399, 23, 399, 34): 'TaskMonitor', (399, 36, 399, 56): '"""get_completed_task"""'}, {}), "(TaskMonitor, 'get_completed_task')", False, 'import mock\n'), ((412, 5, 412, 56), 'mock.patch.object', 'mock.patch.object', ({(412, 23, 412, 31): 'Resource', (412, 33, 412, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((413, 5, 413, 43), 'mock.patch.object', 'mock.patch.object', ({(413, 23, 413, 33): 'connection', (413, 35, 413, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((414, 5, 414, 57), 'mock.patch.object', 'mock.patch.object', ({(414, 23, 414, 34): 'TaskMonitor', (414, 36, 414, 56): '"""get_completed_task"""'}, {}), "(TaskMonitor, 'get_completed_task')", False, 'import mock\n'), ((430, 5, 430, 56), 'mock.patch.object', 'mock.patch.object', ({(430, 23, 430, 31): 'Resource', (430, 33, 430, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((431, 5, 431, 43), 'mock.patch.object', 'mock.patch.object', ({(431, 23, 431, 33): 'connection', (431, 35, 431, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((432, 5, 432, 52), 'mock.patch.object', 'mock.patch.object', ({(432, 23, 432, 34): 'TaskMonitor', (432, 36, 432, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((449, 5, 449, 56), 'mock.patch.object', 'mock.patch.object', ({(449, 23, 449, 31): 'Resource', (449, 33, 449, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((450, 5, 450, 41), 'mock.patch.object', 'mock.patch.object', ({(450, 23, 450, 33): 'connection', (450, 35, 450, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((464, 5, 464, 56), 'mock.patch.object', 'mock.patch.object', ({(464, 23, 464, 31): 'Resource', (464, 33, 464, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((465, 5, 465, 41), 'mock.patch.object', 'mock.patch.object', ({(465, 23, 465, 33): 'connection', (465, 35, 465, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((482, 5, 482, 56), 'mock.patch.object', 'mock.patch.object', ({(482, 23, 482, 31): 'Resource', (482, 33, 482, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((483, 5, 483, 41), 'mock.patch.object', 'mock.patch.object', ({(483, 23, 483, 33): 'connection', (483, 35, 483, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((491, 5, 491, 56), 'mock.patch.object', 'mock.patch.object', ({(491, 23, 491, 31): 'Resource', (491, 33, 491, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((492, 5, 492, 41), 'mock.patch.object', 'mock.patch.object', ({(492, 23, 492, 33): 'connection', (492, 35, 492, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((508, 5, 508, 41), 'mock.patch.object', 'mock.patch.object', ({(508, 23, 508, 33): 'connection', (508, 35, 508, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((522, 5, 522, 48), 'mock.patch.object', 'mock.patch.object', ({(522, 23, 522, 37): 'ResourceHelper', (522, 39, 522, 47): '"""do_put"""'}, {}), "(ResourceHelper, 'do_put')", False, 'import mock\n'), ((523, 5, 523, 56), 'mock.patch.object', 'mock.patch.object', ({(523, 23, 523, 31): 'Resource', (523, 33, 523, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((535, 5, 535, 48), 'mock.patch.object', 'mock.patch.object', ({(535, 23, 535, 37): 'ResourceHelper', (535, 39, 535, 47): '"""do_get"""'}, {}), "(ResourceHelper, 'do_get')", False, 'import mock\n'), ((542, 5, 542, 42), 'mock.patch.object', 'mock.patch.object', ({(542, 23, 542, 31): 'Resource', (542, 33, 542, 41): '"""get_by"""'}, {}), "(Resource, 'get_by')", False, 'import mock\n'), ((549, 5, 549, 48), 'mock.patch.object', 'mock.patch.object', ({(549, 23, 549, 37): 'ResourceHelper', (549, 39, 549, 47): '"""do_get"""'}, {}), "(ResourceHelper, 'do_get')", False, 'import mock\n'), ((550, 5, 550, 42), 'mock.patch.object', 'mock.patch.object', ({(550, 23, 550, 31): 'Resource', (550, 33, 550, 41): '"""get_by"""'}, {}), "(Resource, 'get_by')", False, 'import mock\n'), ((559, 5, 559, 42), 'mock.patch.object', 'mock.patch.object', ({(559, 23, 559, 31): 'Resource', (559, 33, 559, 41): '"""get_by"""'}, {}), "(Resource, 'get_by')", False, 'import mock\n'), ((566, 5, 566, 41), 'mock.patch.object', 'mock.patch.object', ({(566, 23, 566, 33): 'connection', (566, 35, 566, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((586, 5, 586, 41), 'mock.patch.object', 'mock.patch.object', ({(586, 23, 586, 33): 'connection', (586, 35, 586, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((593, 5, 593, 41), 'mock.patch.object', 'mock.patch.object', ({(593, 23, 593, 33): 'connection', (593, 35, 593, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((600, 5, 600, 41), 'mock.patch.object', 'mock.patch.object', ({(600, 23, 600, 33): 'connection', (600, 35, 600, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((607, 5, 607, 41), 'mock.patch.object', 'mock.patch.object', ({(607, 23, 607, 33): 'connection', (607, 35, 607, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((616, 5, 616, 41), 'mock.patch.object', 'mock.patch.object', ({(616, 23, 616, 33): 'connection', (616, 35, 616, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((633, 5, 633, 41), 'mock.patch.object', 'mock.patch.object', ({(633, 23, 633, 33): 'connection', (633, 35, 633, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((650, 5, 650, 41), 'mock.patch.object', 'mock.patch.object', ({(650, 23, 650, 33): 'connection', (650, 35, 650, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((667, 5, 667, 41), 'mock.patch.object', 'mock.patch.object', ({(667, 23, 667, 33): 'connection', (667, 35, 667, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((684, 5, 684, 41), 'mock.patch.object', 'mock.patch.object', ({(684, 23, 684, 33): 'connection', (684, 35, 684, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((704, 5, 704, 41), 'mock.patch.object', 'mock.patch.object', ({(704, 23, 704, 33): 'connection', (704, 35, 704, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((720, 5, 720, 41), 'mock.patch.object', 'mock.patch.object', ({(720, 23, 720, 33): 'connection', (720, 35, 720, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((728, 5, 728, 41), 'mock.patch.object', 'mock.patch.object', ({(728, 23, 728, 33): 'connection', (728, 35, 728, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((736, 5, 736, 48), 'mock.patch.object', 'mock.patch.object', ({(736, 23, 736, 37): 'ResourceHelper', (736, 39, 736, 47): '"""do_get"""'}, {}), "(ResourceHelper, 'do_get')", False, 'import mock\n'), ((743, 5, 743, 42), 'mock.patch.object', 'mock.patch.object', ({(743, 23, 743, 33): 'connection', (743, 35, 743, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((752, 5, 752, 42), 'mock.patch.object', 'mock.patch.object', ({(752, 23, 752, 33): 'connection', (752, 35, 752, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((764, 5, 764, 42), 'mock.patch.object', 'mock.patch.object', ({(764, 23, 764, 33): 'connection', (764, 35, 764, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((773, 5, 773, 42), 'mock.patch.object', 'mock.patch.object', ({(773, 23, 773, 33): 'connection', (773, 35, 773, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((782, 5, 782, 42), 'mock.patch.object', 'mock.patch.object', ({(782, 23, 782, 33): 'connection', (782, 35, 782, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((792, 5, 792, 42), 'mock.patch.object', 'mock.patch.object', ({(792, 23, 792, 33): 'connection', (792, 35, 792, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((801, 5, 801, 42), 'mock.patch.object', 'mock.patch.object', ({(801, 23, 801, 33): 'connection', (801, 35, 801, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((807, 5, 807, 42), 'mock.patch.object', 'mock.patch.object', ({(807, 23, 807, 33): 'connection', (807, 35, 807, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((808, 5, 808, 52), 'mock.patch.object', 'mock.patch.object', ({(808, 23, 808, 34): 'TaskMonitor', (808, 36, 808, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((817, 5, 817, 56), 'mock.patch.object', 'mock.patch.object', ({(817, 23, 817, 31): 'Resource', (817, 33, 817, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((818, 5, 818, 44), 'mock.patch.object', 'mock.patch.object', ({(818, 23, 818, 33): 'connection', (818, 35, 818, 43): '"""delete"""'}, {}), "(connection, 'delete')", False, 'import mock\n'), ((825, 5, 825, 44), 'mock.patch.object', 'mock.patch.object', ({(825, 23, 825, 33): 'connection', (825, 35, 825, 43): '"""delete"""'}, {}), "(connection, 'delete')", False, 'import mock\n'), ((833, 5, 833, 56), 'mock.patch.object', 'mock.patch.object', ({(833, 23, 833, 31): 'Resource', (833, 33, 833, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((834, 5, 834, 44), 'mock.patch.object', 'mock.patch.object', ({(834, 23, 834, 33): 'connection', (834, 35, 834, 43): '"""delete"""'}, {}), "(connection, 'delete')", False, 'import mock\n'), ((835, 5, 835, 52), 'mock.patch.object', 'mock.patch.object', ({(835, 23, 835, 34): 'TaskMonitor', (835, 36, 835, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((844, 5, 844, 56), 'mock.patch.object', 'mock.patch.object', ({(844, 23, 844, 31): 'Resource', (844, 33, 844, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((845, 5, 845, 44), 'mock.patch.object', 'mock.patch.object', ({(845, 23, 845, 33): 'connection', (845, 35, 845, 43): '"""delete"""'}, {}), "(connection, 'delete')", False, 'import mock\n'), ((846, 5, 846, 52), 'mock.patch.object', 'mock.patch.object', ({(846, 23, 846, 34): 'TaskMonitor', (846, 36, 846, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((855, 5, 855, 56), 'mock.patch.object', 'mock.patch.object', ({(855, 23, 855, 31): 'Resource', (855, 33, 855, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((856, 5, 856, 41), 'mock.patch.object', 'mock.patch.object', ({(856, 23, 856, 33): 'connection', (856, 35, 856, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((869, 5, 869, 56), 'mock.patch.object', 'mock.patch.object', ({(869, 23, 869, 31): 'Resource', (869, 33, 869, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((870, 5, 870, 41), 'mock.patch.object', 'mock.patch.object', ({(870, 23, 870, 33): 'connection', (870, 35, 870, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((878, 5, 878, 56), 'mock.patch.object', 'mock.patch.object', ({(878, 23, 878, 31): 'Resource', (878, 33, 878, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((879, 5, 879, 41), 'mock.patch.object', 'mock.patch.object', ({(879, 23, 879, 33): 'connection', (879, 35, 879, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((891, 5, 891, 56), 'mock.patch.object', 'mock.patch.object', ({(891, 23, 891, 31): 'Resource', (891, 33, 891, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((892, 5, 892, 41), 'mock.patch.object', 'mock.patch.object', ({(892, 23, 892, 33): 'connection', (892, 35, 892, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((902, 5, 902, 56), 'mock.patch.object', 'mock.patch.object', ({(902, 23, 902, 31): 'Resource', (902, 33, 902, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((903, 5, 903, 41), 'mock.patch.object', 'mock.patch.object', ({(903, 23, 903, 33): 'connection', (903, 35, 903, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((913, 5, 913, 56), 'mock.patch.object', 'mock.patch.object', ({(913, 23, 913, 31): 'Resource', (913, 33, 913, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((914, 5, 914, 41), 'mock.patch.object', 'mock.patch.object', ({(914, 23, 914, 33): 'connection', (914, 35, 914, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((924, 5, 924, 56), 'mock.patch.object', 'mock.patch.object', ({(924, 23, 924, 31): 'Resource', (924, 33, 924, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((925, 5, 925, 41), 'mock.patch.object', 'mock.patch.object', ({(925, 23, 925, 33): 'connection', (925, 35, 925, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((926, 5, 926, 52), 'mock.patch.object', 'mock.patch.object', ({(926, 23, 926, 34): 'TaskMonitor', (926, 36, 926, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((938, 5, 938, 56), 'mock.patch.object', 'mock.patch.object', ({(938, 23, 938, 31): 'Resource', (938, 33, 938, 55): '"""ensure_resource_data"""'}, {}), "(Resource, 'ensure_resource_data')", False, 'import mock\n'), ((939, 5, 939, 41), 'mock.patch.object', 'mock.patch.object', ({(939, 23, 939, 33): 'connection', (939, 35, 939, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((940, 5, 940, 52), 'mock.patch.object', 'mock.patch.object', ({(940, 23, 940, 34): 'TaskMonitor', (940, 36, 940, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((951, 5, 951, 42), 'mock.patch.object', 'mock.patch.object', ({(951, 23, 951, 31): 'Resource', (951, 33, 951, 41): '"""get_by"""'}, {}), "(Resource, 'get_by')", False, 'import mock\n'), ((956, 5, 956, 42), 'mock.patch.object', 'mock.patch.object', ({(956, 23, 956, 31): 'Resource', (956, 33, 956, 41): '"""get_by"""'}, {}), "(Resource, 'get_by')", False, 'import mock\n'), ((963, 5, 963, 41), 'mock.patch.object', 'mock.patch.object', ({(963, 23, 963, 33): 'connection', (963, 35, 963, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((968, 5, 968, 41), 'mock.patch.object', 'mock.patch.object', ({(968, 23, 968, 33): 'connection', (968, 35, 968, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((973, 5, 973, 41), 'mock.patch.object', 'mock.patch.object', ({(973, 23, 973, 33): 'connection', (973, 35, 973, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((980, 5, 980, 41), 'mock.patch.object', 'mock.patch.object', ({(980, 23, 980, 33): 'connection', (980, 35, 980, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((988, 5, 988, 41), 'mock.patch.object', 'mock.patch.object', ({(988, 23, 988, 33): 'connection', (988, 35, 988, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((996, 5, 996, 41), 'mock.patch.object', 'mock.patch.object', ({(996, 23, 996, 33): 'connection', (996, 35, 996, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1004, 5, 1004, 41), 'mock.patch.object', 'mock.patch.object', ({(1004, 23, 1004, 33): 'connection', (1004, 35, 1004, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1012, 5, 1012, 41), 'mock.patch.object', 'mock.patch.object', ({(1012, 23, 1012, 33): 'connection', (1012, 35, 1012, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1173, 5, 1173, 41), 'mock.patch.object', 'mock.patch.object', ({(1173, 23, 1173, 33): 'connection', (1173, 35, 1173, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1198, 5, 1198, 41), 'mock.patch.object', 'mock.patch.object', ({(1198, 23, 1198, 33): 'connection', (1198, 35, 1198, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1205, 5, 1205, 41), 'mock.patch.object', 'mock.patch.object', ({(1205, 23, 1205, 33): 'connection', (1205, 35, 1205, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1212, 5, 1212, 41), 'mock.patch.object', 'mock.patch.object', ({(1212, 23, 1212, 33): 'connection', (1212, 35, 1212, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1219, 5, 1219, 41), 'mock.patch.object', 'mock.patch.object', ({(1219, 23, 1219, 33): 'connection', (1219, 35, 1219, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1228, 5, 1228, 41), 'mock.patch.object', 'mock.patch.object', ({(1228, 23, 1228, 33): 'connection', (1228, 35, 1228, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1245, 5, 1245, 41), 'mock.patch.object', 'mock.patch.object', ({(1245, 23, 1245, 33): 'connection', (1245, 35, 1245, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1262, 5, 1262, 41), 'mock.patch.object', 'mock.patch.object', ({(1262, 23, 1262, 33): 'connection', (1262, 35, 1262, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1279, 5, 1279, 41), 'mock.patch.object', 'mock.patch.object', ({(1279, 23, 1279, 33): 'connection', (1279, 35, 1279, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1296, 5, 1296, 41), 'mock.patch.object', 'mock.patch.object', ({(1296, 23, 1296, 33): 'connection', (1296, 35, 1296, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1316, 5, 1316, 41), 'mock.patch.object', 'mock.patch.object', ({(1316, 23, 1316, 33): 'connection', (1316, 35, 1316, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1332, 5, 1332, 41), 'mock.patch.object', 'mock.patch.object', ({(1332, 23, 1332, 33): 'connection', (1332, 35, 1332, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1340, 5, 1340, 41), 'mock.patch.object', 'mock.patch.object', ({(1340, 23, 1340, 33): 'connection', (1340, 35, 1340, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1348, 5, 1348, 44), 'mock.patch.object', 'mock.patch.object', ({(1348, 23, 1348, 33): 'connection', (1348, 35, 1348, 43): '"""delete"""'}, {}), "(connection, 'delete')", False, 'import mock\n'), ((1349, 5, 1349, 52), 'mock.patch.object', 'mock.patch.object', ({(1349, 23, 1349, 34): 'TaskMonitor', (1349, 36, 1349, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1360, 5, 1360, 44), 'mock.patch.object', 'mock.patch.object', ({(1360, 23, 1360, 33): 'connection', (1360, 35, 1360, 43): '"""delete"""'}, {}), "(connection, 'delete')", False, 'import mock\n'), ((1369, 5, 1369, 44), 'mock.patch.object', 'mock.patch.object', ({(1369, 23, 1369, 33): 'connection', (1369, 35, 1369, 43): '"""delete"""'}, {}), "(connection, 'delete')", False, 'import mock\n'), ((1370, 5, 1370, 52), 'mock.patch.object', 'mock.patch.object', ({(1370, 23, 1370, 34): 'TaskMonitor', (1370, 36, 1370, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1381, 5, 1381, 44), 'mock.patch.object', 'mock.patch.object', ({(1381, 23, 1381, 33): 'connection', (1381, 35, 1381, 43): '"""delete"""'}, {}), "(connection, 'delete')", False, 'import mock\n'), ((1382, 5, 1382, 52), 'mock.patch.object', 'mock.patch.object', ({(1382, 23, 1382, 34): 'TaskMonitor', (1382, 36, 1382, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1392, 5, 1392, 44), 'mock.patch.object', 'mock.patch.object', ({(1392, 23, 1392, 33): 'connection', (1392, 35, 1392, 43): '"""delete"""'}, {}), "(connection, 'delete')", False, 'import mock\n'), ((1393, 5, 1393, 52), 'mock.patch.object', 'mock.patch.object', ({(1393, 23, 1393, 34): 'TaskMonitor', (1393, 36, 1393, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1412, 5, 1412, 41), 'mock.patch.object', 'mock.patch.object', ({(1412, 23, 1412, 33): 'connection', (1412, 35, 1412, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1417, 5, 1417, 41), 'mock.patch.object', 'mock.patch.object', ({(1417, 23, 1417, 33): 'connection', (1417, 35, 1417, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1422, 5, 1422, 48), 'mock.patch.object', 'mock.patch.object', ({(1422, 23, 1422, 37): 'ResourceClient', (1422, 39, 1422, 47): '"""get_by"""'}, {}), "(ResourceClient, 'get_by')", False, 'import mock\n'), ((1429, 5, 1429, 48), 'mock.patch.object', 'mock.patch.object', ({(1429, 23, 1429, 37): 'ResourceClient', (1429, 39, 1429, 47): '"""get_by"""'}, {}), "(ResourceClient, 'get_by')", False, 'import mock\n'), ((1436, 5, 1436, 41), 'mock.patch.object', 'mock.patch.object', ({(1436, 23, 1436, 33): 'connection', (1436, 35, 1436, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1444, 5, 1444, 41), 'mock.patch.object', 'mock.patch.object', ({(1444, 23, 1444, 33): 'connection', (1444, 35, 1444, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1452, 5, 1452, 41), 'mock.patch.object', 'mock.patch.object', ({(1452, 23, 1452, 33): 'connection', (1452, 35, 1452, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1460, 5, 1460, 41), 'mock.patch.object', 'mock.patch.object', ({(1460, 23, 1460, 33): 'connection', (1460, 35, 1460, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((1468, 5, 1468, 49), 'mock.patch.object', 'mock.patch.object', ({(1468, 23, 1468, 37): 'ResourceClient', (1468, 39, 1468, 48): '"""get_all"""'}, {}), "(ResourceClient, 'get_all')", False, 'import mock\n'), ((1473, 5, 1473, 49), 'mock.patch.object', 'mock.patch.object', ({(1473, 23, 1473, 37): 'ResourceClient', (1473, 39, 1473, 48): '"""get_all"""'}, {}), "(ResourceClient, 'get_all')", False, 'import mock\n'), ((1483, 5, 1483, 49), 'mock.patch.object', 'mock.patch.object', ({(1483, 23, 1483, 37): 'ResourceClient', (1483, 39, 1483, 48): '"""get_all"""'}, {}), "(ResourceClient, 'get_all')", False, 'import mock\n'), ((1493, 5, 1493, 49), 'mock.patch.object', 'mock.patch.object', ({(1493, 23, 1493, 37): 'ResourceClient', (1493, 39, 1493, 48): '"""get_all"""'}, {}), "(ResourceClient, 'get_all')", False, 'import mock\n'), ((1498, 5, 1498, 49), 'mock.patch.object', 'mock.patch.object', ({(1498, 23, 1498, 37): 'ResourceClient', (1498, 39, 1498, 48): '"""get_all"""'}, {}), "(ResourceClient, 'get_all')", False, 'import mock\n'), ((1507, 5, 1507, 41), 'mock.patch.object', 'mock.patch.object', ({(1507, 23, 1507, 33): 'connection', (1507, 35, 1507, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((1508, 5, 1508, 52), 'mock.patch.object', 'mock.patch.object', ({(1508, 23, 1508, 34): 'TaskMonitor', (1508, 36, 1508, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1518, 5, 1518, 41), 'mock.patch.object', 'mock.patch.object', ({(1518, 23, 1518, 33): 'connection', (1518, 35, 1518, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((1519, 5, 1519, 52), 'mock.patch.object', 'mock.patch.object', ({(1519, 23, 1519, 34): 'TaskMonitor', (1519, 36, 1519, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1527, 5, 1527, 41), 'mock.patch.object', 'mock.patch.object', ({(1527, 23, 1527, 33): 'connection', (1527, 35, 1527, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((1528, 5, 1528, 52), 'mock.patch.object', 'mock.patch.object', ({(1528, 23, 1528, 34): 'TaskMonitor', (1528, 36, 1528, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1540, 5, 1540, 41), 'mock.patch.object', 'mock.patch.object', ({(1540, 23, 1540, 33): 'connection', (1540, 35, 1540, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((1549, 5, 1549, 41), 'mock.patch.object', 'mock.patch.object', ({(1549, 23, 1549, 33): 'connection', (1549, 35, 1549, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((1560, 5, 1560, 41), 'mock.patch.object', 'mock.patch.object', ({(1560, 23, 1560, 33): 'connection', (1560, 35, 1560, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((1569, 5, 1569, 41), 'mock.patch.object', 'mock.patch.object', ({(1569, 23, 1569, 33): 'connection', (1569, 35, 1569, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((1580, 5, 1580, 41), 'mock.patch.object', 'mock.patch.object', ({(1580, 23, 1580, 33): 'connection', (1580, 35, 1580, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((1593, 5, 1593, 41), 'mock.patch.object', 'mock.patch.object', ({(1593, 23, 1593, 33): 'connection', (1593, 35, 1593, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((1605, 5, 1605, 41), 'mock.patch.object', 'mock.patch.object', ({(1605, 23, 1605, 33): 'connection', (1605, 35, 1605, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((1615, 5, 1615, 41), 'mock.patch.object', 'mock.patch.object', ({(1615, 23, 1615, 33): 'connection', (1615, 35, 1615, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((1627, 5, 1627, 41), 'mock.patch.object', 'mock.patch.object', ({(1627, 23, 1627, 33): 'connection', (1627, 35, 1627, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((1628, 5, 1628, 52), 'mock.patch.object', 'mock.patch.object', ({(1628, 23, 1628, 34): 'TaskMonitor', (1628, 36, 1628, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1640, 5, 1640, 41), 'mock.patch.object', 'mock.patch.object', ({(1640, 23, 1640, 33): 'connection', (1640, 35, 1640, 40): '"""put"""'}, {}), "(connection, 'put')", False, 'import mock\n'), ((1641, 5, 1641, 52), 'mock.patch.object', 'mock.patch.object', ({(1641, 23, 1641, 34): 'TaskMonitor', (1641, 36, 1641, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1654, 5, 1654, 42), 'mock.patch.object', 'mock.patch.object', ({(1654, 23, 1654, 33): 'connection', (1654, 35, 1654, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1655, 5, 1655, 52), 'mock.patch.object', 'mock.patch.object', ({(1655, 23, 1655, 34): 'TaskMonitor', (1655, 36, 1655, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1665, 5, 1665, 42), 'mock.patch.object', 'mock.patch.object', ({(1665, 23, 1665, 33): 'connection', (1665, 35, 1665, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1666, 5, 1666, 52), 'mock.patch.object', 'mock.patch.object', ({(1666, 23, 1666, 34): 'TaskMonitor', (1666, 36, 1666, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1675, 5, 1675, 42), 'mock.patch.object', 'mock.patch.object', ({(1675, 23, 1675, 33): 'connection', (1675, 35, 1675, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1676, 5, 1676, 52), 'mock.patch.object', 'mock.patch.object', ({(1676, 23, 1676, 34): 'TaskMonitor', (1676, 36, 1676, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1684, 5, 1684, 42), 'mock.patch.object', 'mock.patch.object', ({(1684, 23, 1684, 33): 'connection', (1684, 35, 1684, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1685, 5, 1685, 52), 'mock.patch.object', 'mock.patch.object', ({(1685, 23, 1685, 34): 'TaskMonitor', (1685, 36, 1685, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1697, 5, 1697, 42), 'mock.patch.object', 'mock.patch.object', ({(1697, 23, 1697, 33): 'connection', (1697, 35, 1697, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1706, 5, 1706, 42), 'mock.patch.object', 'mock.patch.object', ({(1706, 23, 1706, 33): 'connection', (1706, 35, 1706, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1714, 5, 1714, 42), 'mock.patch.object', 'mock.patch.object', ({(1714, 23, 1714, 33): 'connection', (1714, 35, 1714, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1723, 5, 1723, 42), 'mock.patch.object', 'mock.patch.object', ({(1723, 23, 1723, 33): 'connection', (1723, 35, 1723, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1734, 5, 1734, 42), 'mock.patch.object', 'mock.patch.object', ({(1734, 23, 1734, 33): 'connection', (1734, 35, 1734, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1744, 5, 1744, 42), 'mock.patch.object', 'mock.patch.object', ({(1744, 23, 1744, 33): 'connection', (1744, 35, 1744, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1753, 5, 1753, 42), 'mock.patch.object', 'mock.patch.object', ({(1753, 23, 1753, 33): 'connection', (1753, 35, 1753, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1763, 5, 1763, 42), 'mock.patch.object', 'mock.patch.object', ({(1763, 23, 1763, 33): 'connection', (1763, 35, 1763, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1772, 5, 1772, 42), 'mock.patch.object', 'mock.patch.object', ({(1772, 23, 1772, 33): 'connection', (1772, 35, 1772, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1773, 5, 1773, 52), 'mock.patch.object', 'mock.patch.object', ({(1773, 23, 1773, 34): 'TaskMonitor', (1773, 36, 1773, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1790, 5, 1790, 42), 'mock.patch.object', 'mock.patch.object', ({(1790, 23, 1790, 33): 'connection', (1790, 35, 1790, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((1791, 5, 1791, 52), 'mock.patch.object', 'mock.patch.object', ({(1791, 23, 1791, 34): 'TaskMonitor', (1791, 36, 1791, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1800, 5, 1800, 43), 'mock.patch.object', 'mock.patch.object', ({(1800, 23, 1800, 33): 'connection', (1800, 35, 1800, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((1817, 5, 1817, 43), 'mock.patch.object', 'mock.patch.object', ({(1817, 23, 1817, 33): 'connection', (1817, 35, 1817, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((1833, 5, 1833, 43), 'mock.patch.object', 'mock.patch.object', ({(1833, 23, 1833, 33): 'connection', (1833, 35, 1833, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((1848, 5, 1848, 43), 'mock.patch.object', 'mock.patch.object', ({(1848, 23, 1848, 33): 'connection', (1848, 35, 1848, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((1859, 5, 1859, 43), 'mock.patch.object', 'mock.patch.object', ({(1859, 23, 1859, 33): 'connection', (1859, 35, 1859, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((1872, 5, 1872, 43), 'mock.patch.object', 'mock.patch.object', ({(1872, 23, 1872, 33): 'connection', (1872, 35, 1872, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((1873, 5, 1873, 52), 'mock.patch.object', 'mock.patch.object', ({(1873, 23, 1873, 34): 'TaskMonitor', (1873, 36, 1873, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1884, 5, 1884, 43), 'mock.patch.object', 'mock.patch.object', ({(1884, 23, 1884, 33): 'connection', (1884, 35, 1884, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((1885, 5, 1885, 57), 'mock.patch.object', 'mock.patch.object', ({(1885, 23, 1885, 34): 'TaskMonitor', (1885, 36, 1885, 56): '"""get_completed_task"""'}, {}), "(TaskMonitor, 'get_completed_task')", False, 'import mock\n'), ((1901, 5, 1901, 43), 'mock.patch.object', 'mock.patch.object', ({(1901, 23, 1901, 33): 'connection', (1901, 35, 1901, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((1902, 5, 1902, 57), 'mock.patch.object', 'mock.patch.object', ({(1902, 23, 1902, 34): 'TaskMonitor', (1902, 36, 1902, 56): '"""get_completed_task"""'}, {}), "(TaskMonitor, 'get_completed_task')", False, 'import mock\n'), ((1920, 5, 1920, 43), 'mock.patch.object', 'mock.patch.object', ({(1920, 23, 1920, 33): 'connection', (1920, 35, 1920, 42): '"""patch"""'}, {}), "(connection, 'patch')", False, 'import mock\n'), ((1921, 5, 1921, 52), 'mock.patch.object', 'mock.patch.object', ({(1921, 23, 1921, 34): 'TaskMonitor', (1921, 36, 1921, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((1940, 5, 1940, 44), 'mock.patch.object', 'mock.patch.object', ({(1940, 23, 1940, 33): 'connection', (1940, 35, 1940, 43): '"""delete"""'}, {}), "(connection, 'delete')", False, 'import mock\n'), ((2015, 5, 2015, 41), 'mock.patch.object', 'mock.patch.object', ({(2015, 23, 2015, 33): 'connection', (2015, 35, 2015, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((2044, 5, 2044, 41), 'mock.patch.object', 'mock.patch.object', ({(2044, 23, 2044, 33): 'connection', (2044, 35, 2044, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((2058, 5, 2058, 41), 'mock.patch.object', 'mock.patch.object', ({(2058, 23, 2058, 33): 'connection', (2058, 35, 2058, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((2077, 5, 2077, 41), 'mock.patch.object', 'mock.patch.object', ({(2077, 23, 2077, 33): 'connection', (2077, 35, 2077, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((2085, 5, 2085, 41), 'mock.patch.object', 'mock.patch.object', ({(2085, 23, 2085, 33): 'connection', (2085, 35, 2085, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((2198, 5, 2198, 42), 'mock.patch.object', 'mock.patch.object', ({(2198, 23, 2198, 33): 'connection', (2198, 35, 2198, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((2199, 5, 2199, 57), 'mock.patch.object', 'mock.patch.object', ({(2199, 23, 2199, 34): 'TaskMonitor', (2199, 36, 2199, 56): '"""get_completed_task"""'}, {}), "(TaskMonitor, 'get_completed_task')", False, 'import mock\n'), ((2211, 5, 2211, 42), 'mock.patch.object', 'mock.patch.object', ({(2211, 23, 2211, 33): 'connection', (2211, 35, 2211, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((2212, 5, 2212, 57), 'mock.patch.object', 'mock.patch.object', ({(2212, 23, 2212, 34): 'TaskMonitor', (2212, 36, 2212, 56): '"""get_completed_task"""'}, {}), "(TaskMonitor, 'get_completed_task')", False, 'import mock\n'), ((2224, 5, 2224, 42), 'mock.patch.object', 'mock.patch.object', ({(2224, 23, 2224, 33): 'connection', (2224, 35, 2224, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((2225, 5, 2225, 57), 'mock.patch.object', 'mock.patch.object', ({(2225, 23, 2225, 34): 'TaskMonitor', (2225, 36, 2225, 56): '"""get_completed_task"""'}, {}), "(TaskMonitor, 'get_completed_task')", False, 'import mock\n'), ((2241, 5, 2241, 42), 'mock.patch.object', 'mock.patch.object', ({(2241, 23, 2241, 33): 'connection', (2241, 35, 2241, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((2242, 5, 2242, 57), 'mock.patch.object', 'mock.patch.object', ({(2242, 23, 2242, 34): 'TaskMonitor', (2242, 36, 2242, 56): '"""get_completed_task"""'}, {}), "(TaskMonitor, 'get_completed_task')", False, 'import mock\n'), ((2254, 5, 2254, 42), 'mock.patch.object', 'mock.patch.object', ({(2254, 23, 2254, 33): 'connection', (2254, 35, 2254, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((2268, 5, 2268, 42), 'mock.patch.object', 'mock.patch.object', ({(2268, 23, 2268, 33): 'connection', (2268, 35, 2268, 41): '"""post"""'}, {}), "(connection, 'post')", False, 'import mock\n'), ((2303, 5, 2303, 75), 'mock.patch.object', 'mock.patch.object', ({(2303, 23, 2303, 33): 'connection', (2303, 35, 2303, 74): '"""post_multipart_with_response_handling"""'}, {}), "(connection, 'post_multipart_with_response_handling')", False, 'import mock\n'), ((2313, 5, 2313, 75), 'mock.patch.object', 'mock.patch.object', ({(2313, 23, 2313, 33): 'connection', (2313, 35, 2313, 74): '"""post_multipart_with_response_handling"""'}, {}), "(connection, 'post_multipart_with_response_handling')", False, 'import mock\n'), ((2322, 5, 2322, 75), 'mock.patch.object', 'mock.patch.object', ({(2322, 23, 2322, 33): 'connection', (2322, 35, 2322, 74): '"""post_multipart_with_response_handling"""'}, {}), "(connection, 'post_multipart_with_response_handling')", False, 'import mock\n'), ((2323, 5, 2323, 52), 'mock.patch.object', 'mock.patch.object', ({(2323, 23, 2323, 34): 'TaskMonitor', (2323, 36, 2323, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((2324, 5, 2324, 41), 'mock.patch.object', 'mock.patch.object', ({(2324, 23, 2324, 33): 'connection', (2324, 35, 2324, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((2334, 5, 2334, 75), 'mock.patch.object', 'mock.patch.object', ({(2334, 23, 2334, 33): 'connection', (2334, 35, 2334, 74): '"""post_multipart_with_response_handling"""'}, {}), "(connection, 'post_multipart_with_response_handling')", False, 'import mock\n'), ((2335, 5, 2335, 52), 'mock.patch.object', 'mock.patch.object', ({(2335, 23, 2335, 34): 'TaskMonitor', (2335, 36, 2335, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((2345, 5, 2345, 75), 'mock.patch.object', 'mock.patch.object', ({(2345, 23, 2345, 33): 'connection', (2345, 35, 2345, 74): '"""post_multipart_with_response_handling"""'}, {}), "(connection, 'post_multipart_with_response_handling')", False, 'import mock\n'), ((2346, 5, 2346, 52), 'mock.patch.object', 'mock.patch.object', ({(2346, 23, 2346, 34): 'TaskMonitor', (2346, 36, 2346, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((2347, 5, 2347, 41), 'mock.patch.object', 'mock.patch.object', ({(2347, 23, 2347, 33): 'connection', (2347, 35, 2347, 40): '"""get"""'}, {}), "(connection, 'get')", False, 'import mock\n'), ((2360, 5, 2360, 75), 'mock.patch.object', 'mock.patch.object', ({(2360, 23, 2360, 33): 'connection', (2360, 35, 2360, 74): '"""post_multipart_with_response_handling"""'}, {}), "(connection, 'post_multipart_with_response_handling')", False, 'import mock\n'), ((2361, 5, 2361, 52), 'mock.patch.object', 'mock.patch.object', ({(2361, 23, 2361, 34): 'TaskMonitor', (2361, 36, 2361, 51): '"""wait_for_task"""'}, {}), "(TaskMonitor, 'wait_for_task')", False, 'import mock\n'), ((2372, 5, 2372, 56), 'mock.patch.object', 'mock.patch.object', ({(2372, 23, 2372, 33): 'connection', (2372, 35, 2372, 55): '"""download_to_stream"""'}, {}), "(connection, 'download_to_stream')", False, 'import mock\n'), ((2383, 5, 2383, 56), 'mock.patch.object', 'mock.patch.object', ({(2383, 23, 2383, 33): 'connection', (2383, 35, 2383, 55): '"""download_to_stream"""'}, {}), "(connection, 'download_to_stream')", False, 'import mock\n'), ((2396, 5, 2396, 56), 'mock.patch.object', 'mock.patch.object', ({(2396, 23, 2396, 33): 'connection', (2396, 35, 2396, 55): '"""download_to_stream"""'}, {}), "(connection, 'download_to_stream')", False, 'import mock\n'), ((2408, 5, 2408, 56), 'mock.patch.object', 'mock.patch.object', ({(2408, 23, 2408, 33): 'connection', (2408, 35, 2408, 55): '"""download_to_stream"""'}, {}), "(connection, 'download_to_stream')", False, 'import mock\n'), ((84, 26, 84, 54), 'hpOneView.connection.connection', 'connection', ({(84, 37, 84, 48): '"""127.0.0.1"""', (84, 50, 84, 53): '300'}, {}), "('127.0.0.1', 300)", False, 'from hpOneView.connection import connection\n'), ((135, 34, 135, 45), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((148, 29, 148, 40), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((162, 33, 162, 46), 'io.StringIO', 'io.StringIO', ({}, {}), '()', False, 'import io\n'), ((158, 16, 158, 36), 'tests.test_utils.mock_builtin', 'mock_builtin', ({(158, 29, 158, 35): '"""open"""'}, {}), "('open')", False, 'from tests.test_utils import mock_builtin\n'), ((173, 20, 173, 33), 'io.StringIO', 'io.StringIO', ({}, {}), '()', False, 'import io\n'), ((169, 16, 169, 36), 'tests.test_utils.mock_builtin', 'mock_builtin', ({(169, 29, 169, 35): '"""open"""'}, {}), "('open')", False, 'from tests.test_utils import mock_builtin\n'), ((187, 33, 187, 46), 'io.StringIO', 'io.StringIO', ({}, {}), '()', False, 'import io\n'), ((182, 16, 182, 36), 'tests.test_utils.mock_builtin', 'mock_builtin', ({(182, 29, 182, 35): '"""open"""'}, {}), "('open')", False, 'from tests.test_utils import mock_builtin\n'), ((199, 33, 199, 46), 'io.StringIO', 'io.StringIO', ({}, {}), '()', False, 'import io\n'), ((194, 16, 194, 36), 'tests.test_utils.mock_builtin', 'mock_builtin', ({(194, 29, 194, 35): '"""open"""'}, {}), "('open')", False, 'from tests.test_utils import mock_builtin\n'), ((209, 26, 209, 54), 'hpOneView.connection.connection', 'connection', ({(209, 37, 209, 48): '"""127.0.0.1"""', (209, 50, 209, 53): '300'}, {}), "('127.0.0.1', 300)", False, 'from hpOneView.connection import connection\n'), ((315, 26, 315, 54), 'hpOneView.connection.connection', 'connection', ({(315, 37, 315, 48): '"""127.0.0.1"""', (315, 50, 315, 53): '300'}, {}), "('127.0.0.1', 300)", False, 'from hpOneView.connection import connection\n'), ((445, 26, 445, 54), 'hpOneView.connection.connection', 'connection', ({(445, 37, 445, 48): '"""127.0.0.1"""', (445, 50, 445, 53): '300'}, {}), "('127.0.0.1', 300)", False, 'from hpOneView.connection import connection\n'), ((504, 26, 504, 54), 'hpOneView.connection.connection', 'connection', ({(504, 37, 504, 48): '"""127.0.0.1"""', (504, 50, 504, 53): '300'}, {}), "('127.0.0.1', 300)", False, 'from hpOneView.connection import connection\n'), ((517, 26, 517, 54), 'hpOneView.connection.connection', 'connection', ({(517, 37, 517, 48): '"""127.0.0.1"""', (517, 50, 517, 53): '300'}, {}), "('127.0.0.1', 300)", False, 'from hpOneView.connection import connection\n'), ((520, 31, 520, 78), 'hpOneView.resources.resource.ResourceHelper', 'ResourceHelper', ({(520, 46, 520, 54): 'self.URI', (520, 56, 520, 71): 'self.connection', (520, 73, 520, 77): 'None'}, {}), '(self.URI, self.connection, None)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((787, 26, 787, 67), 'hpOneView.resources.resource.ResourceClient', 'ResourceClient', ({(787, 41, 787, 56): 'self.connection', (787, 58, 787, 66): 'self.URI'}, {}), '(self.connection, self.URI)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((1122, 26, 1122, 63), 'hpOneView.resources.resource.merge_resources', 'merge_resources', ({(1122, 42, 1122, 51): 'resource1', (1122, 53, 1122, 62): 'resource2'}, {}), '(resource1, resource2)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((1130, 22, 1130, 80), 'hpOneView.resources.resource.merge_default_values', 'merge_default_values', ({(1130, 43, 1130, 65): '[resource1, resource2]', (1130, 67, 1130, 79): 'default_type'}, {}), '([resource1, resource2], default_type)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((1147, 23, 1147, 65), 'hpOneView.resources.resource.ResourceClient', 'ResourceClient', ({(1147, 38, 1147, 41): 'con', (1147, 43, 1147, 64): '"""/rest/fake/resource"""'}, {}), "(con, '/rest/fake/resource')", False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((1167, 26, 1167, 52), 'hpOneView.connection.connection', 'connection', ({(1167, 37, 1167, 46): 'self.host', (1167, 48, 1167, 51): '300'}, {}), '(self.host, 300)', False, 'from hpOneView.connection import connection\n'), ((1168, 31, 1168, 72), 'hpOneView.resources.resource.ResourceClient', 'ResourceClient', ({(1168, 46, 1168, 61): 'self.connection', (1168, 63, 1168, 71): 'self.URI'}, {}), '(self.connection, self.URI)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((1622, 26, 1622, 67), 'hpOneView.resources.resource.ResourceClient', 'ResourceClient', ({(1622, 41, 1622, 56): 'self.connection', (1622, 58, 1622, 66): 'self.URI'}, {}), '(self.connection, self.URI)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((1758, 26, 1758, 67), 'hpOneView.resources.resource.ResourceClient', 'ResourceClient', ({(1758, 41, 1758, 56): 'self.connection', (1758, 58, 1758, 66): 'self.URI'}, {}), '(self.connection, self.URI)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((1826, 26, 1826, 67), 'hpOneView.resources.resource.ResourceClient', 'ResourceClient', ({(1826, 41, 1826, 56): 'self.connection', (1826, 58, 1826, 66): 'self.URI'}, {}), '(self.connection, self.URI)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((1863, 26, 1863, 67), 'hpOneView.resources.resource.ResourceClient', 'ResourceClient', ({(1863, 41, 1863, 56): 'self.connection', (1863, 58, 1863, 66): 'self.URI'}, {}), '(self.connection, self.URI)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((1896, 26, 1896, 67), 'hpOneView.resources.resource.ResourceClient', 'ResourceClient', ({(1896, 41, 1896, 56): 'self.connection', (1896, 58, 1896, 66): 'self.URI'}, {}), '(self.connection, self.URI)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((1911, 26, 1911, 67), 'hpOneView.resources.resource.ResourceClient', 'ResourceClient', ({(1911, 41, 1911, 56): 'self.connection', (1911, 58, 1911, 66): 'self.URI'}, {}), '(self.connection, self.URI)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((2273, 26, 2273, 67), 'hpOneView.resources.resource.ResourceClient', 'ResourceClient', ({(2273, 41, 2273, 56): 'self.connection', (2273, 58, 2273, 66): 'self.URI'}, {}), '(self.connection, self.URI)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((2287, 26, 2287, 67), 'hpOneView.resources.resource.ResourceClient', 'ResourceClient', ({(2287, 41, 2287, 56): 'self.connection', (2287, 58, 2287, 66): 'self.URI'}, {}), '(self.connection, self.URI)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((2298, 26, 2298, 67), 'hpOneView.resources.resource.ResourceClient', 'ResourceClient', ({(2298, 41, 2298, 56): 'self.connection', (2298, 58, 2298, 66): 'self.URI'}, {}), '(self.connection, self.URI)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((2350, 34, 2350, 45), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((2363, 29, 2363, 40), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((2377, 33, 2377, 46), 'io.StringIO', 'io.StringIO', ({}, {}), '()', False, 'import io\n'), ((2373, 16, 2373, 36), 'tests.test_utils.mock_builtin', 'mock_builtin', ({(2373, 29, 2373, 35): '"""open"""'}, {}), "('open')", False, 'from tests.test_utils import mock_builtin\n'), ((2388, 20, 2388, 33), 'io.StringIO', 'io.StringIO', ({}, {}), '()', False, 'import io\n'), ((2384, 16, 2384, 36), 'tests.test_utils.mock_builtin', 'mock_builtin', ({(2384, 29, 2384, 35): '"""open"""'}, {}), "('open')", False, 'from tests.test_utils import mock_builtin\n'), ((2402, 33, 2402, 46), 'io.StringIO', 'io.StringIO', ({}, {}), '()', False, 'import io\n'), ((2397, 16, 2397, 36), 'tests.test_utils.mock_builtin', 'mock_builtin', ({(2397, 29, 2397, 35): '"""open"""'}, {}), "('open')", False, 'from tests.test_utils import mock_builtin\n'), ((2414, 33, 2414, 46), 'io.StringIO', 'io.StringIO', ({}, {}), '()', False, 'import io\n'), ((2409, 16, 2409, 36), 'tests.test_utils.mock_builtin', 'mock_builtin', ({(2409, 29, 2409, 35): '"""open"""'}, {}), "('open')", False, 'from tests.test_utils import mock_builtin\n'), ((2423, 27, 2423, 60), 'hpOneView.resources.resource.transform_list_to_dict', 'transform_list_to_dict', (), '', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((2435, 23, 2435, 47), 'hpOneView.resources.resource.extract_id_from_uri', 'extract_id_from_uri', ({(2435, 43, 2435, 46): 'uri'}, {}), '(uri)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((2440, 23, 2440, 47), 'hpOneView.resources.resource.extract_id_from_uri', 'extract_id_from_uri', ({(2440, 43, 2440, 46): 'uri'}, {}), '(uri)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((2445, 23, 2445, 47), 'hpOneView.resources.resource.extract_id_from_uri', 'extract_id_from_uri', ({(2445, 43, 2445, 46): 'uri'}, {}), '(uri)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((2451, 23, 2451, 47), 'hpOneView.resources.resource.extract_id_from_uri', 'extract_id_from_uri', ({(2451, 43, 2451, 46): 'uri'}, {}), '(uri)', False, 'from hpOneView.resources.resource import ResourceClient, ResourceHelper, ResourceFileHandlerMixin, ResourceZeroBodyMixin, ResourcePatchMixin, ResourceUtilizationMixin, ResourceSchemaMixin, Resource, RESOURCE_CLIENT_INVALID_ID, UNRECOGNIZED_URI, TaskMonitor, RESOURCE_CLIENT_TASK_EXPECTED, RESOURCE_ID_OR_URI_REQUIRED, transform_list_to_dict, extract_id_from_uri, merge_resources, merge_default_values, unavailable_method\n'), ((92, 49, 92, 60), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((101, 49, 101, 60), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((113, 54, 113, 65), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((124, 49, 124, 60), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((138, 54, 138, 65), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((630, 26, 630, 43), 'mock.call', 'call', ({(630, 31, 630, 42): 'uri_list[0]'}, {}), '(uri_list[0])', False, 'from mock import call\n'), ((630, 45, 630, 62), 'mock.call', 'call', ({(630, 50, 630, 61): 'uri_list[1]'}, {}), '(uri_list[1])', False, 'from mock import call\n'), ((630, 64, 630, 81), 'mock.call', 'call', ({(630, 69, 630, 80): 'uri_list[2]'}, {}), '(uri_list[2])', False, 'from mock import call\n'), ((647, 26, 647, 43), 'mock.call', 'call', ({(647, 31, 647, 42): 'uri_list[0]'}, {}), '(uri_list[0])', False, 'from mock import call\n'), ((647, 45, 647, 62), 'mock.call', 'call', ({(647, 50, 647, 61): 'uri_list[1]'}, {}), '(uri_list[1])', False, 'from mock import call\n'), ((647, 64, 647, 81), 'mock.call', 'call', ({(647, 69, 647, 80): 'uri_list[2]'}, {}), '(uri_list[2])', False, 'from mock import call\n'), ((1242, 26, 1242, 43), 'mock.call', 'call', ({(1242, 31, 1242, 42): 'uri_list[0]'}, {}), '(uri_list[0])', False, 'from mock import call\n'), ((1242, 45, 1242, 62), 'mock.call', 'call', ({(1242, 50, 1242, 61): 'uri_list[1]'}, {}), '(uri_list[1])', False, 'from mock import call\n'), ((1242, 64, 1242, 81), 'mock.call', 'call', ({(1242, 69, 1242, 80): 'uri_list[2]'}, {}), '(uri_list[2])', False, 'from mock import call\n'), ((1259, 26, 1259, 43), 'mock.call', 'call', ({(1259, 31, 1259, 42): 'uri_list[0]'}, {}), '(uri_list[0])', False, 'from mock import call\n'), ((1259, 45, 1259, 62), 'mock.call', 'call', ({(1259, 50, 1259, 61): 'uri_list[1]'}, {}), '(uri_list[1])', False, 'from mock import call\n'), ((1259, 64, 1259, 81), 'mock.call', 'call', ({(1259, 69, 1259, 80): 'uri_list[2]'}, {}), '(uri_list[2])', False, 'from mock import call\n'), ((2307, 49, 2307, 60), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((2316, 49, 2316, 60), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((2328, 54, 2328, 65), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((2339, 49, 2339, 60), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n'), ((2353, 54, 2353, 65), 'mock.Mock', 'mock.Mock', ({}, {}), '()', False, 'import mock\n')] |
vanadium23/workalendar | workalendar/usa/colorado.py | 4c67b5a7900fa56d7a93b767c6cbd8f1cc6b70a7 | # -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from .core import UnitedStates
class Colorado(UnitedStates):
"""Colorado"""
# Colorado has only federal state holidays.
# NOTE: Cesar Chavez Day is an optional holiday
| [] |
thierrypin/gei-pool | dataloaders/augmentation.py | 0a9e79b01148735f0e975c50d2476e41ba20af4f | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
import numpy as np
# Generic data augmentation
class Augmenter:
""" Generic data augmentation class with chained operations
"""
def __init__(self, ops=[]):
if not isinstance(ops, list):
print("Error: ops must be a list of functions")
quit()
self.ops = ops
def add(self, op):
self.ops.append(op)
def augment(self, img):
aug = img.copy()
for op in self.ops:
aug = op(aug)
return aug
def __call__(self, img):
return self.augment(img)
##########
# Images #
##########
def horizontal_flip(p=0.5):
def fc(img):
if random.random() < p:
return img[..., ::-1]
else:
return img
return fc
def vertical_flip(p=0.5):
def fc(img):
if random.random() < p:
return img[..., ::-1, :]
else:
return img
return fc
def gaussian_noise(p=0.5, mean=0, sigma=0.02):
def fc(img):
if random.random() < p:
gauss = np.random.normal(mean, sigma, img.shape).astype(np.float32)
return img + gauss
else:
return img
return fc
def black_vstripe(p=0.5, size=10):
def fc(img):
if random.random() < p:
j = int(random.random() * (img.shape[1]-size))
img[..., j:j+size] = 0
return img
else:
return img
return fc
def black_hstripe(p=0.5, size=10):
def fc(img):
if random.random() < p:
j = int(random.random() * (img.shape[0]-size))
img[..., j:j+size, :] = 0
return img
else:
return img
return fc
def default_augmenter(p=0.5, strip_size=3, mean=0, sigma=0.02):
"""Default data augmentation with horizontal flip, vertical flip, gaussian noise, black hstripe, and black vstripe.
Returns:
Augmenter object. Use as: aug.augment(img)
"""
print("Using default image augmenter")
return Augmenter([ horizontal_flip(p), gaussian_noise(p, mean, sigma), black_hstripe(p, size=strip_size), black_vstripe(p, size=strip_size) ])
##########
# Videos #
##########
def horizontal_flip_vid(p=0.5):
def fc(vid):
if random.random() < p:
return vid[..., ::-1]
else:
return vid
return fc
def black_vstripe_vid(p=0.5, size=10):
def fc(batch):
if random.random() < p:
j = int(random.random() * (batch.shape[-1]-size))
batch[..., j:j+size] = 0
return batch
else:
return batch
return fc
def black_hstripe_vid(p=0.5, size=10):
def fc(batch):
if random.random() < p:
j = int(random.random() * (batch.shape[-2]-size))
batch[..., j:j+size, :] = 0
return batch
else:
return batch
return fc
def default_augmenter_vid(p=0.5, strip_size=3, mean=0, sigma=0.02):
"""Default data augmentation with horizontal flip, gaussian noise, black hstripe, and black vstripe.
Returns:
Augmenter object. Use as: aug.augment(img)
"""
return Augmenter([ horizontal_flip_vid(p), gaussian_noise(p, mean, sigma), black_hstripe_vid(p, size=strip_size), black_vstripe_vid(p, size=strip_size) ])
| [((36, 11, 36, 26), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n'), ((44, 11, 44, 26), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n'), ((52, 11, 52, 26), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n'), ((61, 11, 61, 26), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n'), ((71, 11, 71, 26), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n'), ((96, 11, 96, 26), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n'), ((104, 11, 104, 26), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n'), ((114, 11, 114, 26), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n'), ((53, 20, 53, 60), 'numpy.random.normal', 'np.random.normal', ({(53, 37, 53, 41): 'mean', (53, 43, 53, 48): 'sigma', (53, 50, 53, 59): 'img.shape'}, {}), '(mean, sigma, img.shape)', True, 'import numpy as np\n'), ((62, 20, 62, 35), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n'), ((72, 20, 72, 35), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n'), ((105, 20, 105, 35), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n'), ((115, 20, 115, 35), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n')] |
CzechInvest/ciis | cigeo/admin.py | c6102598f564a717472e5e31e7eb894bba2c8104 | from django.contrib import admin
from django.contrib.gis import geos
from leaflet.admin import LeafletGeoAdmin, LeafletGeoAdminMixin
from .models import Lau1
from .models import Nuts3
from .models import Airport
from .models import Road
from .models import PublicTransportStop
from .models import RailwayStation
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
import nested_admin
import uuid
import json
class AirportAdmin(LeafletGeoAdmin):
default_zoom = 7
default_lon = 1730000
default_lat = 6430000
#readonly_fields = ("code", "name",)
class RoadAdmin(LeafletGeoAdmin):
default_zoom = 7
default_lon = 1730000
default_lat = 6430000
#readonly_fields = ("code", "name",)
class RailwayStationAdmin(LeafletGeoAdmin):
default_zoom = 7
default_lon = 1730000
default_lat = 6430000
#readonly_fields = ("code", "name",)
class PublicTransportStopAdmin(LeafletGeoAdmin):
default_zoom = 7
default_lon = 1730000
default_lat = 6430000
#readonly_fields = ("code", "name",)
class LAU1Admin(LeafletGeoAdmin):
default_zoom = 7
default_lon = 1730000
default_lat = 6430000
#readonly_fields = ("code", "name",)
class NUTS3Admin(LeafletGeoAdmin):
default_zoom = 7
default_lon = 1730000
default_lat = 6430000
#readonly_fields = ("code", "name",)
class NUTS3AdminInline(LeafletGeoAdminMixin, admin.StackedInline):
model = Nuts3
class LAU1AdminInline(LeafletGeoAdminMixin, admin.StackedInline):
model = Lau1
class NUTS3Filter(admin.SimpleListFilter):
"""Filter for admin interface of NUTS3 regions (Kraje)
"""
title = _('NUTS3 regions')
parameter_name = 'nuts3#'
def lookups(self, request, model_admin):
nuts3 = Nuts3.objects.all()
return (
(obj.id, obj.name) for obj in nuts3
)
def queryset(self, request, queryset):
val = self.value()
if val:
nuts3 = Nuts3.objects.get(pk=val)
results = queryset.filter(
location__geometry__intersects=nuts3.geometry)
else:
results = queryset
return results
class ArealFieldAdmin(nested_admin.NestedModelAdmin):
geojson_attributes = []
def get_place(self, obj):
if hasattr(obj.location, "address") and \
obj.location.address is not None:
return obj.location.address.city
else:
return ", ".join(
[l.__str__() for l in Nuts3.objects.filter(
geometry__intersects=obj.location.geometry)])
def get_search_results(self, request, queryset, search_term):
"""Add NUTS3 (by name) search and area size search (using `<>` operator)
"""
result, use_distinct = super(
ArealFieldAdmin, self).get_search_results(
request, queryset, search_term)
if search_term:
if len(result) == 0 or len(result) == len(queryset):
result = self._search_lay1_nuts3_by_name(
queryset, search_term)
if len(result) == 0 or len(result) == len(queryset):
result = self._search_area(queryset, search_term)
return (result, use_distinct)
def _search_lay1_nuts3_by_name(self, queryset, search_term):
"""Search NUTS3 (kraje) and LAU1 (okresy) region according to name
"""
filtered = queryset.none()
for cls in (Lau1, Nuts3):
objs = cls.objects.filter(name__startswith=search_term)
for o in objs:
objects = queryset.filter(
location__geometry__intersects=o.geometry)
filtered |= objects
return filtered
def _search_area(self, queryset, search_term):
"""Search all features, where MIN < area.total < MAX
"""
filtered = queryset.none()
if search_term.find("<>") > -1:
area_min, area_max = [float(x) for x in search_term.split("<>")]
filtered = queryset.filter(
areal__area__total__gte=area_min,
areal__area__total__lte=area_max)
return filtered
def changelist_view(self, request, extra_context=None):
"""Adjust change list view
add GeoJSON encoded data for the queryset
"""
extra_context = extra_context or {}
response = super().changelist_view(
request, extra_context=extra_context,
)
if hasattr(response, "context_data"):
filtered_query_set = response.context_data["cl"].queryset
extra_context['objects_data'] = \
json.dumps(self.as_geojson(filtered_query_set))
response.context_data.update(extra_context)
return response
def as_geojson(self, queryset):
if self.geojson_attributes:
attributes = self.geojson_attributes
else:
attributes = []
data = {
"type": "FeatureCollection",
"features": []
}
for obj in queryset:
geom = None
if hasattr(obj, "location_set"):
multipoint = geos.MultiPoint(
[loc.address.coordinates for loc in obj.location_set.all()])
geom = multipoint.centroid
elif hasattr(obj, "location"):
geom = obj.location.geometry.centroid
elif hasattr(obj, "geom"):
geom = obj.geom
elif hasattr(obj, "address"):
geom = obj.address.coordinates
if geom:
title = None
if hasattr(obj, "title"):
title = obj.title
elif hasattr(obj, "name"):
title = obj.name
if type(obj.pk) == uuid.UUID:
id = str(obj.pk)
else:
id = obj.pk
feature = {
"type": "Feature",
"properties": {
"name": title,
"object_url":
reverse('admin:{}_{}_change'.format(
obj._meta.app_label,
obj._meta.model_name), args=(obj.pk,)),
},
"geometry": json.loads(geom.json),
"id": id
}
for attribute in attributes:
if hasattr(obj, attribute):
value = getattr(obj, attribute.__str__())
if type(value) == uuid.UUID:
feature[attribute] = str(value)
else:
feature[attribute] = value
#print(feature)
data["features"].append(feature)
return data
# Register your models here.
admin.site.register(Lau1, LAU1Admin)
admin.site.register(Nuts3, NUTS3Admin)
admin.site.register(Road, RoadAdmin)
admin.site.register(PublicTransportStop, PublicTransportStopAdmin)
admin.site.register(RailwayStation, RailwayStationAdmin)
admin.site.register(Airport, AirportAdmin)
| [((229, 0, 229, 36), 'django.contrib.admin.site.register', 'admin.site.register', ({(229, 20, 229, 24): 'Lau1', (229, 26, 229, 35): 'LAU1Admin'}, {}), '(Lau1, LAU1Admin)', False, 'from django.contrib import admin\n'), ((230, 0, 230, 38), 'django.contrib.admin.site.register', 'admin.site.register', ({(230, 20, 230, 25): 'Nuts3', (230, 27, 230, 37): 'NUTS3Admin'}, {}), '(Nuts3, NUTS3Admin)', False, 'from django.contrib import admin\n'), ((231, 0, 231, 36), 'django.contrib.admin.site.register', 'admin.site.register', ({(231, 20, 231, 24): 'Road', (231, 26, 231, 35): 'RoadAdmin'}, {}), '(Road, RoadAdmin)', False, 'from django.contrib import admin\n'), ((232, 0, 232, 66), 'django.contrib.admin.site.register', 'admin.site.register', ({(232, 20, 232, 39): 'PublicTransportStop', (232, 41, 232, 65): 'PublicTransportStopAdmin'}, {}), '(PublicTransportStop, PublicTransportStopAdmin)', False, 'from django.contrib import admin\n'), ((233, 0, 233, 56), 'django.contrib.admin.site.register', 'admin.site.register', ({(233, 20, 233, 34): 'RailwayStation', (233, 36, 233, 55): 'RailwayStationAdmin'}, {}), '(RailwayStation, RailwayStationAdmin)', False, 'from django.contrib import admin\n'), ((234, 0, 234, 42), 'django.contrib.admin.site.register', 'admin.site.register', ({(234, 20, 234, 27): 'Airport', (234, 29, 234, 41): 'AirportAdmin'}, {}), '(Airport, AirportAdmin)', False, 'from django.contrib import admin\n'), ((68, 12, 68, 30), 'django.utils.translation.ugettext_lazy', '_', ({(68, 14, 68, 29): '"""NUTS3 regions"""'}, {}), "('NUTS3 regions')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((209, 32, 209, 53), 'json.loads', 'json.loads', ({(209, 43, 209, 52): 'geom.json'}, {}), '(geom.json)', False, 'import json\n')] |
RafaelAPB/umbra | umbra/monitor/main.py | cf075bbe73e46540e9edee25f9ec3d0828620d5f | import logging
import json
import asyncio
from google.protobuf import json_format
from umbra.common.protobuf.umbra_grpc import MonitorBase
from umbra.common.protobuf.umbra_pb2 import Instruction, Snapshot
from umbra.monitor.tools import Tools
logger = logging.getLogger(__name__)
logging.getLogger("hpack").setLevel(logging.WARNING)
class Monitor(MonitorBase):
def __init__(self, info):
self.tools = Tools()
async def Listen(self, stream):
logging.debug("Instruction Received")
instruction: Instruction = await stream.recv_message()
instruction_dict = json_format.MessageToDict(instruction, preserving_proto_field_name=True)
snapshot_dict = await self.tools.handle(instruction_dict)
snapshot = json_format.ParseDict(snapshot_dict, Snapshot())
await stream.send_message(snapshot)
| [((12, 9, 12, 36), 'logging.getLogger', 'logging.getLogger', ({(12, 27, 12, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((13, 0, 13, 26), 'logging.getLogger', 'logging.getLogger', ({(13, 18, 13, 25): '"""hpack"""'}, {}), "('hpack')", False, 'import logging\n'), ((18, 21, 18, 28), 'umbra.monitor.tools.Tools', 'Tools', ({}, {}), '()', False, 'from umbra.monitor.tools import Tools\n'), ((21, 8, 21, 45), 'logging.debug', 'logging.debug', ({(21, 22, 21, 44): '"""Instruction Received"""'}, {}), "('Instruction Received')", False, 'import logging\n'), ((23, 27, 23, 99), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', (), '', False, 'from google.protobuf import json_format\n'), ((25, 56, 25, 66), 'umbra.common.protobuf.umbra_pb2.Snapshot', 'Snapshot', ({}, {}), '()', False, 'from umbra.common.protobuf.umbra_pb2 import Instruction, Snapshot\n')] |
dwagon/pycs | pycs/spells/hunters_mark.py | 4d02acbf380526d3bf0380f6bb8b757a827024b8 | """https://www.dndbeyond.com/spells/hunters-mark"""
from unittest.mock import patch
import dice
from pycs.constant import ActionCategory
from pycs.constant import SpellType
from pycs.creature import Creature
from pycs.effect import Effect
from pycs.gear import Shortbow
from pycs.spell import SpellAction
from pycs.spells.spelltest import SpellTest
##############################################################################
##############################################################################
##############################################################################
class HuntersMark(SpellAction):
"""You choose a creature you can see within range and mystically
mark it as your quarry. Until the spell ends, you deal an extra 1d6
damage to the target whenever you hit it with a weapon attack, and
you have advantage on any Wisdom (Perception) or Wisdom (Survival)
check you make to find it. If the target drops to 0 hit points
before this spell ends, you can use a bonus action on a subsequent
turn of yours to mark a new creature.
At Higher Levels. When you cast this spell using a spell slot of
3rd or 4th level, you can maintain your concentration on the spell
for up to 8 hours. When you use a spell slot of 5th level or higher,
you can maintain your concentration on the spell for up to 24
hours."""
##########################################################################
def __init__(self, **kwargs):
name = "Hunters Mark"
kwargs.update(
{
"category": ActionCategory.BONUS,
"concentration": SpellType.CONCENTRATION,
"level": 1,
"reach": 90,
"type": SpellType.BUFF,
}
)
super().__init__(name, **kwargs)
self._victim = None
##########################################################################
def heuristic(self):
"""Should we do the spell"""
if self.pick_target():
return 6
print("No enemy in range")
return 0
##########################################################################
def pick_target(self):
"""Who should we do the spell to"""
for enemy in self.owner.pick_closest_enemy():
if self.owner.distance(enemy) > self.range()[0]:
continue
if enemy.has_effect("Hunters Mark"):
continue
self.target = enemy
return enemy
return None
##########################################################################
def cast(self):
"""Do the spell"""
self._victim = self.target
self._victim.add_effect(HuntersMarkEffect(caster=self.owner))
print(f"Cast Hunters Mark on {self._victim}")
##########################################################################
def end_concentration(self):
"""What happens when we stop concentrating"""
if self._victim:
print(f"Removing Hunters Mark from {self._victim}")
self._victim.remove_effect("Hunters Mark")
self._victim = None
##############################################################################
##############################################################################
##############################################################################
class HuntersMarkEffect(Effect):
"""Hunters Mark Effect"""
##########################################################################
def __init__(self, **kwargs):
"""Initialise"""
super().__init__("Hunters Mark", **kwargs)
##########################################################################
def hook_target_additional_damage(self, _, source, target):
"""More damage"""
if source == self.caster:
return ("1d6", 0, None)
return ("", 0, None)
##############################################################################
##############################################################################
##############################################################################
class TestHuntersMark(SpellTest):
"""Test Spell"""
##########################################################################
def setUp(self):
"""test setup"""
super().setUp()
self.caster.add_action(HuntersMark())
##########################################################################
def test_cast(self):
"""test casting"""
self.caster.options_this_turn = [ActionCategory.BONUS]
self.assertFalse(self.enemy.has_effect("Hunters Mark"))
self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False)
self.assertTrue(self.enemy.has_effect("Hunters Mark"))
##########################################################################
def test_effect(self):
"""Test the effect of casting the spell"""
print(self.caster.arena)
self.caster.moves = 99
self.caster.options_this_turn = [ActionCategory.BONUS, ActionCategory.ACTION]
self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=True)
self.assertTrue(self.enemy.has_effect("Hunters Mark"))
self.caster.add_gear(Shortbow())
self.assertEqual(len(self.enemy.damage_this_turn), 0)
with patch.object(Creature, "rolld20") as mock:
mock.return_value = 18
with patch.object(dice, "roll") as mock_dice:
mock_dice.return_value = 5
self.caster.do_stuff(categ=ActionCategory.ACTION, moveto=True)
print(f"{self.enemy.damage_this_turn=}")
self.assertEqual(len(self.enemy.damage_this_turn), 2)
##########################################################################
def test_removal(self):
"""Test the effect gets removed"""
self.caster.options_this_turn = [ActionCategory.BONUS]
self.caster.do_stuff(categ=ActionCategory.BONUS, moveto=False)
self.assertTrue(self.enemy.has_effect("Hunters Mark"))
self.caster.remove_concentration()
self.assertFalse(self.enemy.has_effect("Hunters Mark"))
# EOF
| [((130, 29, 130, 39), 'pycs.gear.Shortbow', 'Shortbow', ({}, {}), '()', False, 'from pycs.gear import Shortbow\n'), ((132, 13, 132, 46), 'unittest.mock.patch.object', 'patch.object', ({(132, 26, 132, 34): 'Creature', (132, 36, 132, 45): '"""rolld20"""'}, {}), "(Creature, 'rolld20')", False, 'from unittest.mock import patch\n'), ((134, 17, 134, 43), 'unittest.mock.patch.object', 'patch.object', ({(134, 30, 134, 34): 'dice', (134, 36, 134, 42): '"""roll"""'}, {}), "(dice, 'roll')", False, 'from unittest.mock import patch\n')] |
armandok/pySLAM-D | utilities.py | ef7398806e021885b29702adf55acbedaf544ce6 | import numpy as np
def rot_to_angle(rot):
return np.arccos(0.5*np.trace(rot)-0.5)
def rot_to_heading(rot):
# This function calculates the heading angle of the rot matrix w.r.t. the y-axis
new_rot = rot[0:3:2, 0:3:2] # remove the mid row and column corresponding to the y-axis
new_rot = new_rot/np.linalg.det(new_rot)
return np.arctan2(new_rot[1, 0], new_rot[0, 0])
| [((12, 11, 12, 51), 'numpy.arctan2', 'np.arctan2', ({(12, 22, 12, 35): 'new_rot[1, 0]', (12, 37, 12, 50): 'new_rot[0, 0]'}, {}), '(new_rot[1, 0], new_rot[0, 0])', True, 'import numpy as np\n'), ((11, 22, 11, 44), 'numpy.linalg.det', 'np.linalg.det', ({(11, 36, 11, 43): 'new_rot'}, {}), '(new_rot)', True, 'import numpy as np\n'), ((5, 25, 5, 38), 'numpy.trace', 'np.trace', ({(5, 34, 5, 37): 'rot'}, {}), '(rot)', True, 'import numpy as np\n')] |
kyungjaelee/robosuite | robosuite/models/grippers/__init__.py | 0d73fcca9ed8e638632f4bd7b0f1b8ebf4640fb1 | from .gripper_model import GripperModel
from .gripper_factory import gripper_factory
from .gripper_tester import GripperTester
from .panda_gripper import PandaGripper
from .rethink_gripper import RethinkGripper
from .robotiq_85_gripper import Robotiq85Gripper
from .robotiq_three_finger_gripper import RobotiqThreeFingerGripper, RobotiqThreeFingerDexterousGripper
from .panda_gripper import PandaGripper
from .jaco_three_finger_gripper import JacoThreeFingerGripper, JacoThreeFingerDexterousGripper
from .robotiq_140_gripper import Robotiq140Gripper
from .wiping_gripper import WipingGripper
from .null_gripper import NullGripper
GRIPPER_MAPPING = {
"RethinkGripper": RethinkGripper,
"PandaGripper": PandaGripper,
"JacoThreeFingerGripper": JacoThreeFingerGripper,
"JacoThreeFingerDexterousGripper": JacoThreeFingerDexterousGripper,
"WipingGripper": WipingGripper,
"Robotiq85Gripper": Robotiq85Gripper,
"Robotiq140Gripper": Robotiq140Gripper,
"RobotiqThreeFingerGripper": RobotiqThreeFingerGripper,
"RobotiqThreeFingerDexterousGripper": RobotiqThreeFingerDexterousGripper,
None: NullGripper,
}
ALL_GRIPPERS = GRIPPER_MAPPING.keys()
| [] |
Idein/chainer-hand-pose | src/pose/visualizations/visualizations.py | 45c7b629a74bf13da8cc9b47d0ded7099c139e9b | import logging
logger = logging.getLogger(__name__)
import random
import chainercv
import numpy as np
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D # NOQA
from pose.hand_dataset.geometry_utils import normalize_joint_zyx
from pose.hand_dataset.image_utils import normalize_depth
# Decimal Code (R,G,B)
BASE_COLOR = {
"RED": (255, 0, 0),
"GREEN": (0, 255, 0),
"BLUE": (0, 0, 255),
"YELLOW": (255, 255, 0),
"CYAN": (0, 255, 255),
"MAGENTA": (255, 0, 255),
}
def vis_image(img, ax=None):
"""
extend chainercv.visualizations.vis_image
"""
C, H, W = img.shape
if C == 1:
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
# remove channnel dimension
ax.imshow(img.squeeze())
else:
ax = chainercv.visualizations.vis_image(img, ax)
return ax
def preprocess(point, ax, img):
input_point = np.asarray(point)
if input_point.ndim == 2:
input_point = np.expand_dims(point, axis=0)
H, W = None, None
if ax is None:
fig = plt.figure()
if input_point.shape[-1] == 3:
ax = fig.add_subplot(1, 1, 1, projection="3d")
else:
ax = fig.add_subplot(1, 1, 1)
if img is not None:
ax = vis_image(img, ax=ax)
_, H, W = img.shape
return input_point, ax, H, W
def vis_point(point, img=None, color=None, ax=None):
"""
Visualize points in an image, customized to our purpose.
Base implementation is taken from chainercv.visualizations.vis_image
"""
point, ax, H, W = preprocess(point, ax, img)
n_inst = len(point)
c = np.asarray(color) / 255. if color is not None else None
for i in range(n_inst):
# note that the shape of `point[i]` is (K,N) and the format of one is (y, x), (z,y,x).
# (K, N) -> (N, K)
pts = point[i].transpose() # (K,N) -> (N,K)
# resort coordinate order : yx -> xy or zyx -> xyz
pts = pts[::-1]
ax.scatter(*pts, c=c)
if W is not None:
ax.set_xlim(left=0, right=W)
if H is not None:
ax.set_ylim(bottom=H - 1, top=0)
return ax
def vis_edge(point, indices, img=None, color=None, ax=None):
"""
Visualize edges in an image
"""
point, ax, H, W = preprocess(point, ax, img)
n_inst = len(point)
if color is not None:
color = np.asarray(color) / 255.
else:
color = [None] * len(indices)
for i in range(n_inst):
# note that the shape of `point[i]` is (K,N) and the format of one is (y, x) or (z,y,x).
pts = point[i]
for ((s, t), c) in zip(indices, color):
# Select point which consists edge. It is a pair or point (start, target).
# Note that [::-1] does resort coordinate order: yx -> xy or zyx -> xyz
edge = pts[[s, t]].transpose()
edge = edge[::-1]
ax.plot(*edge, c=c)
if W is not None:
ax.set_xlim(left=0, right=W)
if H is not None:
ax.set_ylim(bottom=H - 1, top=0)
return ax
def vis_pose(point, indices, img=None, point_color=None, edge_color=None, ax=None):
ax = vis_point(point, img=img, color=point_color, ax=ax)
vis_edge(point, indices, img=img, color=edge_color, ax=ax)
def visualize_both(dataset, keypoint_names, edges, color_map, normalize=False):
import random
idx = random.randint(0, len(dataset) - 1)
logger.info("get example")
example = dataset.get_example(idx)
logger.info("Done get example")
fig = plt.figure(figsize=(8, 8))
ax1 = fig.add_subplot(221)
ax2 = fig.add_subplot(222)
ax3 = fig.add_subplot(223, projection="3d")
ax4 = fig.add_subplot(224, projection="3d")
color = [color_map[k] for k in keypoint_names]
edge_color = [color_map[s, t] for s, t in edges]
depth = example["depth"].astype(np.float32)
depth_joint = example["depth_joint"]
depth_camera = example["depth_camera"]
depth_vu, depth_z = depth_camera.zyx2vu(depth_joint, return_z=True)
z_size = example["param"]["z_size"]
if normalize:
depth = normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size)
depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size)
rgb = example["rgb"]
rgb_joint = example["rgb_joint"]
rgb_camera = example["rgb_camera"]
rgb_vu = rgb_camera.zyx2vu(rgb_joint)
rgb_joint = normalize_joint_zyx(rgb_joint, rgb_camera, z_size)
print(example["param"])
vis_point(rgb_vu, img=rgb, color=color, ax=ax1)
vis_edge(rgb_vu, indices=edges, color=edge_color, ax=ax1)
vis_point(rgb_joint, color=color, ax=ax3)
vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3)
vis_point(depth_vu, img=depth, color=color, ax=ax2)
vis_edge(depth_vu, indices=edges, color=edge_color, ax=ax2)
vis_point(depth_joint, color=color, ax=ax4)
vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4)
for ax in [ax3, ax4]:
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
ax.view_init(-65, -90)
plt.savefig("output.png")
plt.show()
def visualize_rgb(dataset, keypoint_names, edges, color_map, idx=None):
import random
if idx is None:
idx = random.randint(0, len(dataset) - 1)
logger.info("get example")
example = dataset.get_example(idx)
logger.info("Done get example")
fig = plt.figure(figsize=(5, 10))
ax1 = fig.add_subplot(211)
ax3 = fig.add_subplot(212, projection="3d")
color = [color_map[k] for k in keypoint_names]
edge_color = [color_map[s, t] for s, t in edges]
rgb = example["rgb"]
rgb_joint = example["rgb_joint"]
rgb_camera = example["rgb_camera"]
rgb_vu = rgb_camera.zyx2vu(rgb_joint)
vis_point(rgb_vu, img=rgb, color=color, ax=ax1)
vis_edge(rgb_vu, indices=edges, color=edge_color, ax=ax1)
vis_point(rgb_joint, color=color, ax=ax3)
vis_edge(rgb_joint, indices=edges, color=edge_color, ax=ax3)
for ax in [ax3]:
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
ax.view_init(-65, -90)
plt.savefig("output.png")
plt.show()
def visualize_depth(dataset, keypoint_names, edges, color_map, normalize=False):
idx = random.randint(0, len(dataset) - 1)
logger.info("get example")
example = dataset.get_example(idx)
logger.info("Done get example")
fig = plt.figure(figsize=(5, 10))
ax2 = fig.add_subplot(211)
ax4 = fig.add_subplot(212, projection="3d")
color = [color_map[k] for k in keypoint_names]
edge_color = [color_map[s, t] for s, t in edges]
depth = example["depth"].astype(np.float32)
depth_joint = example["depth_joint"]
depth_camera = example["depth_camera"]
depth_vu, depth_z = depth_camera.zyx2vu(depth_joint, return_z=True)
z_size = example["param"]["z_size"]
if normalize:
depth = normalize_depth(depth, z_com=depth_z.mean(), z_size=z_size)
depth_joint = normalize_joint_zyx(depth_joint, depth_camera, z_size)
print(example["param"])
vis_point(depth_vu, img=depth, color=color, ax=ax2)
vis_edge(depth_vu, indices=edges, color=edge_color, ax=ax2)
vis_point(depth_joint, color=color, ax=ax4)
vis_edge(depth_joint, indices=edges, color=edge_color, ax=ax4)
for ax in [ax4]:
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
ax.view_init(-65, -90)
plt.savefig("output.png")
plt.show()
| [((3, 9, 3, 36), 'logging.getLogger', 'logging.getLogger', ({(3, 27, 3, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((43, 18, 43, 35), 'numpy.asarray', 'np.asarray', ({(43, 29, 43, 34): 'point'}, {}), '(point)', True, 'import numpy as np\n'), ((119, 10, 119, 36), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'from matplotlib import pyplot as plt\n'), ((140, 16, 140, 66), 'pose.hand_dataset.geometry_utils.normalize_joint_zyx', 'normalize_joint_zyx', ({(140, 36, 140, 45): 'rgb_joint', (140, 47, 140, 57): 'rgb_camera', (140, 59, 140, 65): 'z_size'}, {}), '(rgb_joint, rgb_camera, z_size)', False, 'from pose.hand_dataset.geometry_utils import normalize_joint_zyx\n'), ((162, 4, 162, 29), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(162, 16, 162, 28): '"""output.png"""'}, {}), "('output.png')", True, 'from matplotlib import pyplot as plt\n'), ((163, 4, 163, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'from matplotlib import pyplot as plt\n'), ((173, 10, 173, 37), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'from matplotlib import pyplot as plt\n'), ((196, 4, 196, 29), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(196, 16, 196, 28): '"""output.png"""'}, {}), "('output.png')", True, 'from matplotlib import pyplot as plt\n'), ((197, 4, 197, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'from matplotlib import pyplot as plt\n'), ((205, 10, 205, 37), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'from matplotlib import pyplot as plt\n'), ((218, 18, 218, 72), 'pose.hand_dataset.geometry_utils.normalize_joint_zyx', 'normalize_joint_zyx', ({(218, 38, 218, 49): 'depth_joint', (218, 51, 218, 63): 'depth_camera', (218, 65, 218, 71): 'z_size'}, {}), '(depth_joint, depth_camera, z_size)', False, 'from pose.hand_dataset.geometry_utils import normalize_joint_zyx\n'), ((233, 4, 233, 29), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(233, 16, 233, 28): '"""output.png"""'}, {}), "('output.png')", True, 'from matplotlib import pyplot as plt\n'), ((234, 4, 234, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'from matplotlib import pyplot as plt\n'), ((38, 13, 38, 56), 'chainercv.visualizations.vis_image', 'chainercv.visualizations.vis_image', ({(38, 48, 38, 51): 'img', (38, 53, 38, 55): 'ax'}, {}), '(img, ax)', False, 'import chainercv\n'), ((46, 22, 46, 51), 'numpy.expand_dims', 'np.expand_dims', (), '', True, 'import numpy as np\n'), ((49, 14, 49, 26), 'matplotlib.pyplot.figure', 'plt.figure', ({}, {}), '()', True, 'from matplotlib import pyplot as plt\n'), ((134, 22, 134, 76), 'pose.hand_dataset.geometry_utils.normalize_joint_zyx', 'normalize_joint_zyx', ({(134, 42, 134, 53): 'depth_joint', (134, 55, 134, 67): 'depth_camera', (134, 69, 134, 75): 'z_size'}, {}), '(depth_joint, depth_camera, z_size)', False, 'from pose.hand_dataset.geometry_utils import normalize_joint_zyx\n'), ((33, 18, 33, 30), 'matplotlib.pyplot.figure', 'plt.figure', ({}, {}), '()', True, 'from matplotlib import pyplot as plt\n'), ((67, 8, 67, 25), 'numpy.asarray', 'np.asarray', ({(67, 19, 67, 24): 'color'}, {}), '(color)', True, 'import numpy as np\n'), ((89, 16, 89, 33), 'numpy.asarray', 'np.asarray', ({(89, 27, 89, 32): 'color'}, {}), '(color)', True, 'import numpy as np\n')] |
ticapix/automated-tasks | publication-erdf/flask_service.py | a0c73ad2939c6f1a2d91aea6fd309b5005455191 | #!/usr/bin/env python3
from flask import Flask
app = Flask(__name__)
@app.route('/process-email')
def process_email():
return "Hello World!"
if __name__ == "__main__":
app.run()
| [((4, 6, 4, 21), 'flask.Flask', 'Flask', ({(4, 12, 4, 20): '__name__'}, {}), '(__name__)', False, 'from flask import Flask\n')] |
mikiec84/speaking_detection | tools/nn/speaker.py | ed680138627c156e1f7b0af20d6517e2bea754cc | import os
import skimage.io
from torch.nn import Module
import torch.nn
from torchvision.models import resnet18
from nn.speaker_dataset import Dataset # @UnusedImport
os.environ['TORCH_MODEL_ZOO'] = '../data/'
VIDTIMIT_PATH = '../data/vidtimit/'
skimage.io.use_plugin('pil')
class Net(Module):
def __init__(self):
super().__init__()
resnet = resnet18(pretrained=True)
self.features = torch.nn.Sequential(*list(resnet.children())[:-1])
self.classifier = torch.nn.Sequential(
torch.nn.Linear(512, 2)
)
# print(len(list(self.features.parameters())))
for p in list(self.features.parameters())[:20]:
p.requires_grad = False
def forward(self, x, **kw):
# X = F.softmax(self.basenet(X))
f = self.features(x)
f = f.view(f.size(0), -1)
y = self.classifier(f)
return y
def get_speaking_detector_final():
m = torch.load('../data/speaker.pt')
m = m.eval();
return m
def get_speaking_detector(e):
m = torch.load('../data/speaker/model.e{}.pt'.format(e))
m = m.eval();
return m
| [((21, 17, 21, 42), 'torchvision.models.resnet18', 'resnet18', (), '', False, 'from torchvision.models import resnet18\n')] |
mdreves/model-analysis | tensorflow_model_analysis/util_test.py | 73760b27b763e322a92ea80ff0a768ad9ef74526 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple tests for util."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow_model_analysis import util
class UtilTest(tf.test.TestCase):
def testKwargsOnly(self):
@util.kwargs_only
def fn(a, b, c, d=None, e=5):
if d is None:
d = 100
if e is None:
e = 1000
return a + b + c + d + e
self.assertEqual(1 + 2 + 3 + 100 + 5, fn(a=1, b=2, c=3))
self.assertEqual(1 + 2 + 3 + 100 + 1000, fn(a=1, b=2, c=3, e=None))
with self.assertRaisesRegexp(TypeError, 'keyword-arguments only'):
fn(1, 2, 3)
with self.assertRaisesRegexp(TypeError, 'with c specified'):
fn(a=1, b=2, e=5) # pylint: disable=no-value-for-parameter
with self.assertRaisesRegexp(TypeError, 'with extraneous kwargs'):
fn(a=1, b=2, c=3, f=11) # pylint: disable=unexpected-keyword-arg
if __name__ == '__main__':
tf.test.main()
| [((47, 2, 47, 16), 'tensorflow.test.main', 'tf.test.main', ({}, {}), '()', True, 'import tensorflow as tf\n')] |
eivtho/PyLaia | laia/data/transforms/vision/random_beta_morphology.py | 2a7a6e2eeb9b5af68c0faed0c564b02063e72be0 | from typing import List, Tuple, Union
import numpy as np
import scipy.special
from PIL import Image, ImageFilter
class RandomBetaMorphology:
def __init__(
self, filter_size_min: int, filter_size_max: int, alpha: float, beta: float
) -> None:
assert filter_size_min % 2 != 0, "Filter size must be odd"
assert filter_size_max % 2 != 0, "Filter size must be odd"
self.filter_size_min = filter_size_min
self.filter_size_max = filter_size_max
self.alpha = alpha
self.beta = beta
self.filter_sizes, self.filter_probs = self._create_filter_distribution(
filter_size_min, filter_size_max, alpha, beta
)
@staticmethod
def _create_filter_distribution(
filter_size_min: int, filter_size_max: int, alpha: float, beta: float
) -> Tuple[List[int], Union[List[float], np.ndarray]]:
n = (filter_size_max - filter_size_min) // 2 + 1
if n < 2:
return [filter_size_min], np.asarray([1.0], dtype=np.float32)
filter_sizes = []
filter_probs = []
for k in range(n):
filter_sizes.append(filter_size_min + 2 * k)
filter_probs.append(
scipy.special.comb(n, k) * scipy.special.beta(alpha + k, n - k + beta)
)
np_filter_probs = np.asarray(filter_probs, dtype=np.float32)
np_filter_probs = filter_probs / np_filter_probs.sum()
return filter_sizes, np_filter_probs
def sample_filter_size(self):
filter_size = np.random.choice(self.filter_sizes, p=self.filter_probs)
return filter_size
def __call__(self, *args, **kwargs):
return NotImplementedError
def __repr__(self) -> str:
return (
f"vision.{self.__class__.__name__}("
f"filter_size_min={self.filter_size_min}, "
f"filter_size_max={self.filter_size_max}, "
f"alpha={self.alpha}, beta={self.beta})"
)
class Dilate(RandomBetaMorphology):
def __init__(
self,
filter_size_min: int = 3,
filter_size_max: int = 7,
alpha: float = 1,
beta: float = 3,
) -> None:
super().__init__(filter_size_min, filter_size_max, alpha, beta)
def __call__(self, img: Image) -> Image:
filter_size = self.sample_filter_size()
return img.filter(ImageFilter.MaxFilter(filter_size))
class Erode(RandomBetaMorphology):
def __init__(
self,
filter_size_min: int = 3,
filter_size_max: int = 5,
alpha: float = 1,
beta: float = 3,
) -> None:
super().__init__(filter_size_min, filter_size_max, alpha, beta)
def __call__(self, img: Image) -> Image:
filter_size = self.sample_filter_size()
return img.filter(ImageFilter.MinFilter(filter_size))
if __name__ == "__main__":
import argparse
from PIL import ImageOps
parser = argparse.ArgumentParser()
parser.add_argument("--operation", choices=("dilate", "erode"), default="dilate")
parser.add_argument("images", type=argparse.FileType("rb"), nargs="+")
args = parser.parse_args()
transformer = Dilate() if args.operation == "dilate" else Erode()
for f in args.images:
x = Image.open(f, "r").convert("L")
x = ImageOps.invert(x)
y = transformer(x)
w, h = x.size
z = Image.new("L", (w, 2 * h))
z.paste(x, (0, 0))
z.paste(y, (0, h))
z = z.resize(size=(w // 2, h), resample=Image.BICUBIC)
z.show()
input()
| [((91, 13, 91, 38), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((36, 26, 36, 68), 'numpy.asarray', 'np.asarray', (), '', True, 'import numpy as np\n'), ((41, 22, 41, 78), 'numpy.random.choice', 'np.random.choice', (), '', True, 'import numpy as np\n'), ((100, 12, 100, 30), 'PIL.ImageOps.invert', 'ImageOps.invert', ({(100, 28, 100, 29): 'x'}, {}), '(x)', False, 'from PIL import ImageOps\n'), ((104, 12, 104, 38), 'PIL.Image.new', 'Image.new', ({(104, 22, 104, 25): '"""L"""', (104, 27, 104, 37): '(w, 2 * h)'}, {}), "('L', (w, 2 * h))", False, 'from PIL import Image, ImageFilter\n'), ((68, 26, 68, 60), 'PIL.ImageFilter.MaxFilter', 'ImageFilter.MaxFilter', ({(68, 48, 68, 59): 'filter_size'}, {}), '(filter_size)', False, 'from PIL import Image, ImageFilter\n'), ((83, 26, 83, 60), 'PIL.ImageFilter.MinFilter', 'ImageFilter.MinFilter', ({(83, 48, 83, 59): 'filter_size'}, {}), '(filter_size)', False, 'from PIL import Image, ImageFilter\n'), ((93, 39, 93, 62), 'argparse.FileType', 'argparse.FileType', ({(93, 57, 93, 61): '"""rb"""'}, {}), "('rb')", False, 'import argparse\n'), ((28, 38, 28, 73), 'numpy.asarray', 'np.asarray', (), '', True, 'import numpy as np\n'), ((99, 12, 99, 30), 'PIL.Image.open', 'Image.open', ({(99, 23, 99, 24): 'f', (99, 26, 99, 29): '"""r"""'}, {}), "(f, 'r')", False, 'from PIL import Image, ImageFilter\n')] |
Addvilz/hemp | hemp/internal/utils.py | 2cd1d437fc59a8f7b24f5d150c623bf75c3b6747 | import sys
from fabric.utils import error, puts
from git import RemoteProgress
def print_err(message, func=None, exception=None, stdout=None, stderr=None):
error('[Hemp] ' + message, func, exception, stdout, stderr)
def print_info(text, show_prefix=None, end="\n", flush=True):
puts('[Hemp] ' + text, show_prefix, end, flush)
def print_git_output(stdout):
for line in stdout.split('\n'):
sys.stdout.write('[GIT] ' + line + '\n')
sys.stdout.flush()
class SimpleProgressPrinter(RemoteProgress):
def _parse_progress_line(self, line):
if '\r' in line:
line = line.replace('\r', '\r[GIT] ')
sys.stdout.write('[GIT] ' + line + '\n')
sys.stdout.flush()
| [((8, 4, 8, 63), 'fabric.utils.error', 'error', ({(8, 10, 8, 29): "('[Hemp] ' + message)", (8, 31, 8, 35): 'func', (8, 37, 8, 46): 'exception', (8, 48, 8, 54): 'stdout', (8, 56, 8, 62): 'stderr'}, {}), "('[Hemp] ' + message, func, exception, stdout, stderr)", False, 'from fabric.utils import error, puts\n'), ((12, 4, 12, 51), 'fabric.utils.puts', 'puts', ({(12, 9, 12, 25): "('[Hemp] ' + text)", (12, 27, 12, 38): 'show_prefix', (12, 40, 12, 43): 'end', (12, 45, 12, 50): 'flush'}, {}), "('[Hemp] ' + text, show_prefix, end, flush)", False, 'from fabric.utils import error, puts\n'), ((17, 8, 17, 48), 'sys.stdout.write', 'sys.stdout.write', ({(17, 25, 17, 47): "('[GIT] ' + line + '\\n')"}, {}), "('[GIT] ' + line + '\\n')", False, 'import sys\n'), ((18, 8, 18, 26), 'sys.stdout.flush', 'sys.stdout.flush', ({}, {}), '()', False, 'import sys\n'), ((25, 8, 25, 48), 'sys.stdout.write', 'sys.stdout.write', ({(25, 25, 25, 47): "('[GIT] ' + line + '\\n')"}, {}), "('[GIT] ' + line + '\\n')", False, 'import sys\n'), ((26, 8, 26, 26), 'sys.stdout.flush', 'sys.stdout.flush', ({}, {}), '()', False, 'import sys\n')] |
dla1635/hyLink | backend/links/sentence.py | 8f3d1b6b0cad57ce2f6861583eb2b523f9fceee7 | # -*- coding: utf-8 -*-
from collections import Counter
from konlpy.tag import Okt
class Sentence(object):
okt = Okt()
def __init__(self, text, index=0):
self.index = index
self.text = text.strip()
self.tokens = self.okt.phrases(self.text)
self.bow = Counter(self.tokens)
def __str__(self):
return self.text
def __hash__(self):
return self.index
| [((9, 10, 9, 15), 'konlpy.tag.Okt', 'Okt', ({}, {}), '()', False, 'from konlpy.tag import Okt\n'), ((15, 19, 15, 39), 'collections.Counter', 'Counter', ({(15, 27, 15, 38): 'self.tokens'}, {}), '(self.tokens)', False, 'from collections import Counter\n')] |
IMULMUL/barf-project | tests/arch/x86/test_x86parser.py | 9547ef843b8eb021c2c32c140e36173c0b4eafa3 | # Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import unittest
from barf.arch import ARCH_X86_MODE_32
from barf.arch import ARCH_X86_MODE_64
from barf.arch.x86.parser import X86Parser
class X86Parser32BitsTests(unittest.TestCase):
def setUp(self):
self._parser = X86Parser(ARCH_X86_MODE_32)
def test_two_oprnd_reg_reg(self):
asm = self._parser.parse("add eax, ebx")
self.assertEqual(str(asm), "add eax, ebx")
def test_two_oprnd_reg_imm(self):
asm = self._parser.parse("add eax, 0x12345678")
self.assertEqual(str(asm), "add eax, 0x12345678")
def test_two_oprnd_reg_mem(self):
asm = self._parser.parse("add eax, [ebx + edx * 4 + 0x10]")
self.assertEqual(str(asm), "add eax, [ebx+edx*4+0x10]")
def test_two_oprnd_mem_reg(self):
asm = self._parser.parse("add [ebx + edx * 4 + 0x10], eax")
self.assertEqual(str(asm), "add [ebx+edx*4+0x10], eax")
def test_one_oprnd_reg(self):
asm = self._parser.parse("inc eax")
self.assertEqual(str(asm), "inc eax")
def test_one_oprnd_imm(self):
asm = self._parser.parse("jmp 0x12345678")
self.assertEqual(str(asm), "jmp 0x12345678")
def test_one_oprnd_mem(self):
asm = self._parser.parse("inc dword ptr [ebx+edx*4+0x10]")
self.assertEqual(str(asm), "inc dword ptr [ebx+edx*4+0x10]")
def test_zero_oprnd(self):
asm = self._parser.parse("nop")
self.assertEqual(str(asm), "nop")
# Misc
# ======================================================================== #
def test_misc_1(self):
asm = self._parser.parse("mov dword ptr [-0x21524111], ecx")
self.assertEqual(str(asm), "mov dword ptr [-0x21524111], ecx")
self.assertNotEqual(str(asm), "mov dword ptr [0xdeadbeef], ecx")
def test_misc_2(self):
asm = self._parser.parse("fucompi st(1)")
self.assertEqual(str(asm), "fucompi st1")
class X86Parser64BitsTests(unittest.TestCase):
def setUp(self):
self._parser = X86Parser(ARCH_X86_MODE_64)
def test_64_two_oprnd_reg_reg(self):
asm = self._parser.parse("add rax, rbx")
self.assertEqual(str(asm), "add rax, rbx")
def test_64_two_oprnd_reg_reg_2(self):
asm = self._parser.parse("add rax, r8")
self.assertEqual(str(asm), "add rax, r8")
def test_64_two_oprnd_reg_mem(self):
asm = self._parser.parse("add rax, [rbx + r15 * 4 + 0x10]")
self.assertEqual(str(asm), "add rax, [rbx+r15*4+0x10]")
# Misc
# ======================================================================== #
def test_misc_offset_1(self):
asm = self._parser.parse("add byte ptr [rax+0xffffff89], cl")
self.assertEqual(str(asm), "add byte ptr [rax+0xffffff89], cl")
def main():
unittest.main()
if __name__ == '__main__':
main()
| [((122, 4, 122, 19), 'unittest.main', 'unittest.main', ({}, {}), '()', False, 'import unittest\n'), ((37, 23, 37, 50), 'barf.arch.x86.parser.X86Parser', 'X86Parser', ({(37, 33, 37, 49): 'ARCH_X86_MODE_32'}, {}), '(ARCH_X86_MODE_32)', False, 'from barf.arch.x86.parser import X86Parser\n'), ((96, 23, 96, 50), 'barf.arch.x86.parser.X86Parser', 'X86Parser', ({(96, 33, 96, 49): 'ARCH_X86_MODE_64'}, {}), '(ARCH_X86_MODE_64)', False, 'from barf.arch.x86.parser import X86Parser\n')] |
Chyi341152/pyConPaper | Concurrency/codeSample/Part4_Thread_Synchronuzation_Primitives/sema_signal.py | 851190d59f8dc85b4f2a0b47c6505edd0367a6fe | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
# sema_signal.py
#
# An example of using a semaphore for signaling between threads
import threading
import time
done = threading.Semaphore(0) # Resource control.
item = None
def producer():
global item
print("I'm the producer and I produce data.")
print("Producer is going to sleep.")
time.sleep(5)
item = "Hello"
print("Producer is alive. Signaling the consumer.")
done.release() # Increments the count and signals waiting threads
def consumer():
print("I'm a consumer and I want for date.")
print("Consumer is waiting.")
done.acquire() # Waits for the count is 0, otherwise decrements the count and continues
print("Consumer got", item)
t1 = threading.Thread(target=producer)
t2 = threading.Thread(target=consumer)
t1.start()
t2.start()
"""
Semaphore Uses:
1. Resource control
You can limit the number of threads performing certain operations.For example, performing database queries making network connections
2. Signaling
Semaphores can be used to send "signals" between threads. For example, having one thread wake up another thread
"""
| [((11, 7, 11, 29), 'threading.Semaphore', 'threading.Semaphore', ({(11, 27, 11, 28): '0'}, {}), '(0)', False, 'import threading\n'), ((30, 5, 30, 38), 'threading.Thread', 'threading.Thread', (), '', False, 'import threading\n'), ((31, 5, 31, 38), 'threading.Thread', 'threading.Thread', (), '', False, 'import threading\n'), ((18, 4, 18, 17), 'time.sleep', 'time.sleep', ({(18, 15, 18, 16): '(5)'}, {}), '(5)', False, 'import time\n')] |
KarrLab/wc_sim | temp_wc_analysis/analysis.py | 5b0ee03c3d19193fa67a3797d4258b753e6bc576 | '''Analysis utility functions.
:Author: Jonathan Karr <[email protected]>
:Date: 2016-03-26
:Copyright: 2016-2018, Karr Lab
:License: MIT
'''
# TODO(Arthur): IMPORTANT: refactor and replace
from matplotlib import pyplot
from matplotlib import ticker
from wc_lang import Model, Submodel
from scipy.constants import Avogadro
import numpy as np
import re
def plot(model, time = np.zeros(0),
species_counts = None, volume = np.zeros(0), extracellular_volume = np.zeros(0),
selected_species_compartments = [],
yDatas = {},
units = 'mM', title = '', fileName = ''):
#convert time to hours
time = time.copy() / 3600
#create figure
fig = pyplot.figure()
#extract data to plot
if not yDatas:
yDatas = {}
for species_compartment_id in selected_species_compartments:
#extract data
match = re.match('^(?P<speciesId>[a-z0-9\-_]+)\[(?P<compartmentId>[a-z0-9\-_]+)\]$',
species_compartment_id, re.I).groupdict()
speciesId = match['speciesId']
compartmentId = match['compartmentId']
if isinstance(model, Model):
species = model.get_component_by_id(speciesId, 'species')
compartment = model.get_component_by_id(compartmentId, 'compartments')
yData = species_counts[species.index, compartment.index, :]
elif isinstance(model, Submodel):
yData = species_counts[species_compartment_id]
else:
raise Exception('Invalid model type %s' % model.__class__.__name__)
#scale
if compartmentId == 'c':
V = volume
else:
V = extracellular_volume
if units == 'pM':
scale = 1 / Avogadro / V * 1e12
elif units == 'nM':
scale = 1 / Avogadro / V * 1e9
elif units == 'uM':
scale = 1 / Avogadro / V * 1e6
elif units == 'mM':
scale = 1 / Avogadro / V * 1e3
elif units == 'M':
scale = 1 / Avogadro / V * 1e0
elif units == 'molecules':
scale = 1
else:
raise Exception('Invalid units "%s"' % units)
yData *= scale
yDatas[species_compartment_id] = yData
#plot results
yMin = 1e12
yMax = -1e12
for label, yData in yDatas.items():
#update range
yMin = min(yMin, np.min(yData))
yMax = max(yMax, np.max(yData))
#add to plot
pyplot.plot(time, yData, label=label)
#set axis limits
pyplot.xlim((0, time[-1]))
pyplot.ylim((yMin, yMax))
#add axis labels and legend
if title:
pyplot.title(title)
pyplot.xlabel('Time (h)')
if units == 'molecules':
pyplot.ylabel('Copy number')
else:
pyplot.ylabel('Concentration (%s)' % units)
y_formatter = ticker.ScalarFormatter(useOffset=False)
pyplot.gca().get_yaxis().set_major_formatter(y_formatter)
if len(selected_species_compartments) > 1:
pyplot.legend()
#save
if fileName:
fig.savefig(fileName)
pyplot.close(fig)
| [((18, 23, 18, 34), 'numpy.zeros', 'np.zeros', ({(18, 32, 18, 33): '(0)'}, {}), '(0)', True, 'import numpy as np\n'), ((19, 36, 19, 47), 'numpy.zeros', 'np.zeros', ({(19, 45, 19, 46): '(0)'}, {}), '(0)', True, 'import numpy as np\n'), ((19, 72, 19, 83), 'numpy.zeros', 'np.zeros', ({(19, 81, 19, 82): '(0)'}, {}), '(0)', True, 'import numpy as np\n'), ((28, 10, 28, 25), 'matplotlib.pyplot.figure', 'pyplot.figure', ({}, {}), '()', False, 'from matplotlib import pyplot\n'), ((86, 4, 86, 30), 'matplotlib.pyplot.xlim', 'pyplot.xlim', ({(86, 16, 86, 29): '(0, time[-1])'}, {}), '((0, time[-1]))', False, 'from matplotlib import pyplot\n'), ((87, 4, 87, 29), 'matplotlib.pyplot.ylim', 'pyplot.ylim', ({(87, 16, 87, 28): '(yMin, yMax)'}, {}), '((yMin, yMax))', False, 'from matplotlib import pyplot\n'), ((93, 4, 93, 29), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', ({(93, 18, 93, 28): '"""Time (h)"""'}, {}), "('Time (h)')", False, 'from matplotlib import pyplot\n'), ((100, 18, 100, 57), 'matplotlib.ticker.ScalarFormatter', 'ticker.ScalarFormatter', (), '', False, 'from matplotlib import ticker\n'), ((83, 8, 83, 45), 'matplotlib.pyplot.plot', 'pyplot.plot', (), '', False, 'from matplotlib import pyplot\n'), ((91, 8, 91, 27), 'matplotlib.pyplot.title', 'pyplot.title', ({(91, 21, 91, 26): 'title'}, {}), '(title)', False, 'from matplotlib import pyplot\n'), ((96, 8, 96, 36), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', ({(96, 22, 96, 35): '"""Copy number"""'}, {}), "('Copy number')", False, 'from matplotlib import pyplot\n'), ((98, 8, 98, 51), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', ({(98, 22, 98, 50): "('Concentration (%s)' % units)"}, {}), "('Concentration (%s)' % units)", False, 'from matplotlib import pyplot\n'), ((104, 8, 104, 23), 'matplotlib.pyplot.legend', 'pyplot.legend', ({}, {}), '()', False, 'from matplotlib import pyplot\n'), ((109, 8, 109, 25), 'matplotlib.pyplot.close', 'pyplot.close', ({(109, 21, 109, 24): 'fig'}, {}), '(fig)', False, 'from matplotlib import pyplot\n'), ((79, 25, 79, 38), 'numpy.min', 'np.min', ({(79, 32, 79, 37): 'yData'}, {}), '(yData)', True, 'import numpy as np\n'), ((80, 25, 80, 38), 'numpy.max', 'np.max', ({(80, 32, 80, 37): 'yData'}, {}), '(yData)', True, 'import numpy as np\n'), ((35, 20, 36, 45), 're.match', 're.match', ({(35, 29, 35, 95): '"""^(?P<speciesId>[a-z0-9\\\\-_]+)\\\\[(?P<compartmentId>[a-z0-9\\\\-_]+)\\\\]$"""', (36, 16, 36, 38): 'species_compartment_id', (36, 40, 36, 44): 're.I'}, {}), "('^(?P<speciesId>[a-z0-9\\\\-_]+)\\\\[(?P<compartmentId>[a-z0-9\\\\-_]+)\\\\]$'\n , species_compartment_id, re.I)", False, 'import re\n'), ((101, 4, 101, 16), 'matplotlib.pyplot.gca', 'pyplot.gca', ({}, {}), '()', False, 'from matplotlib import pyplot\n')] |
bstuddard/bonsai | setup.py | 3610fc50a3b24818288d850048c2a23306215367 | from setuptools import setup, find_packages
with open("README.md", "r") as readme_file:
readme = readme_file.read()
requirements = [
'xgboost>=0.90',
'catboost>=0.26',
'bayesian-optimization>=1.2.0',
'numpy>=1.19.5',
'pandas>=1.1.5',
'matplotlib>=3.2.2',
'seaborn>=0.11.1',
'plotly>=4.4.1',
'pyyaml>=5.4.1'
]
setup(
name="bonsai-tree",
version="1.2",
author="Landon Buechner",
author_email="[email protected]",
description="Bayesian Optimization + Gradient Boosted Trees",
long_description=readme,
url="https://github.com/magi-1/bonsai",
packages=find_packages(),
package_data={'': ['*.yml']},
install_requires=requirements,
license = 'MIT',
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
) | [((26, 13, 26, 28), 'setuptools.find_packages', 'find_packages', ({}, {}), '()', False, 'from setuptools import setup, find_packages\n')] |
clockhart/pathogen | _scripts/increment_version.py | 1764d4a7d2dd7c1f5dcc08afc016ec4edf809c36 | """
increment_version.py
written in Python3
author: C. Lockhart <[email protected]>
"""
import yaml
# Read in version
with open('version.yml', 'r') as f:
version = yaml.safe_load(f.read())
# Strip "dev" out of micro
version['micro'] = int(str(version['micro']).replace('dev', ''))
# Update patch
version['micro'] += 1
# Add "dev" back to patch
if version['micro'] != 0:
version['micro'] = 'dev' + str(version['micro'])
# Output version
with open('version.yml', 'w') as f:
yaml.safe_dump(version, f, sort_keys=False)
# Transform version dict to string
version = '.'.join([str(version[key]) for key in ['major', 'minor', 'micro']])
# Write version string to pathogen/_version.py
with open('pathogen/version.py', 'w') as f:
f.write("__version__ = '{}'\n".format(version))
# Return
print(version)
| [((26, 4, 26, 47), 'yaml.safe_dump', 'yaml.safe_dump', (), '', False, 'import yaml\n')] |
TheoMathurin/holoviews | holoviews/core/data/ibis.py | 0defcef994d6dd6d2054f75a0e332d02d121f8b0 | import sys
import numpy
try:
from collections.abc import Iterable
except ImportError:
from collections import Iterable
from .. import util
from ..element import Element
from ..ndmapping import NdMapping, item_check, sorted_context
from .interface import Interface
from . import pandas
from .util import cached
class IbisInterface(Interface):
types = ()
datatype = "ibis"
default_partitions = 100
zero_indexed_backend_modules = [
'ibis.backends.omniscidb.client',
]
# the rowid is needed until ibis updates versions
@classmethod
def has_rowid(cls):
import ibis.expr.operations
return hasattr(ibis.expr.operations, "RowID")
@classmethod
def is_rowid_zero_indexed(cls, data):
try:
from ibis.client import find_backends, validate_backends
(backend,) = validate_backends(list(find_backends(data)))
except Exception:
backend = data._find_backend()
return type(backend).__module__ in cls.zero_indexed_backend_modules
@classmethod
def loaded(cls):
return "ibis" in sys.modules
@classmethod
def applies(cls, obj):
if not cls.loaded():
return False
from ibis.expr.types import Expr
return isinstance(obj, Expr)
@classmethod
def init(cls, eltype, data, keys, values):
params = eltype.param.objects()
index = params["kdims"]
columns = params["vdims"]
if isinstance(index.bounds[1], int):
ndim = min([index.bounds[1], len(index.default)])
else:
ndim = None
nvdim = columns.bounds[1] if isinstance(columns.bounds[1], int) else None
if keys and values is None:
values = [c for c in data.columns if c not in keys]
elif values and keys is None:
keys = [c for c in data.columns if c not in values][:ndim]
elif keys is None:
keys = list(data.columns[:ndim])
if values is None:
values = [
key
for key in data.columns[ndim : ((ndim + nvdim) if nvdim else None)]
if key not in keys
]
elif keys == [] and values is None:
values = list(data.columns[: nvdim if nvdim else None])
return data, dict(kdims=keys, vdims=values), {}
@classmethod
def compute(cls, dataset):
return dataset.clone(dataset.data.execute())
@classmethod
def persist(cls, dataset):
return cls.compute(dataset)
@classmethod
@cached
def length(self, dataset):
# Get the length by counting the length of an empty query.
return dataset.data[[]].count().execute()
@classmethod
@cached
def nonzero(cls, dataset):
# Make an empty query to see if a row is returned.
return bool(len(dataset.data[[]].head(1).execute()))
@classmethod
@cached
def range(cls, dataset, dimension):
dimension = dataset.get_dimension(dimension, strict=True)
if cls.dtype(dataset, dimension).kind in 'SUO':
return None, None
if dimension.nodata is not None:
return Interface.range(dataset, dimension)
column = dataset.data[dimension.name]
return tuple(
dataset.data.aggregate([column.min(), column.max()]).execute().values[0, :]
)
@classmethod
@cached
def values(
cls,
dataset,
dimension,
expanded=True,
flat=True,
compute=True,
keep_index=False,
):
dimension = dataset.get_dimension(dimension, strict=True)
data = dataset.data[dimension.name]
if not expanded:
data = data.distinct()
return data if keep_index or not compute else data.execute().values
@classmethod
def histogram(cls, expr, bins, density=True, weights=None):
bins = numpy.asarray(bins)
bins = [int(v) if bins.dtype.kind in 'iu' else float(v) for v in bins]
binned = expr.bucket(bins).name('bucket')
hist = numpy.zeros(len(bins)-1)
hist_bins = binned.value_counts().sort_by('bucket').execute()
for b, v in zip(hist_bins['bucket'], hist_bins['count']):
if numpy.isnan(b):
continue
hist[int(b)] = v
if weights is not None:
raise NotImplementedError("Weighted histograms currently "
"not implemented for IbisInterface.")
if density:
hist = hist/expr.count().execute()
return hist, bins
@classmethod
@cached
def shape(cls, dataset):
return cls.length(dataset), len(dataset.data.columns)
@classmethod
@cached
def dtype(cls, dataset, dimension):
dimension = dataset.get_dimension(dimension)
return dataset.data.head(0).execute().dtypes[dimension.name]
dimension_type = dtype
@classmethod
def sort(cls, dataset, by=[], reverse=False):
return dataset.data.sort_by([(dataset.get_dimension(x).name, not reverse) for x in by])
@classmethod
def redim(cls, dataset, dimensions):
return dataset.data.mutate(
**{v.name: dataset.data[k] for k, v in dimensions.items()}
)
validate = pandas.PandasInterface.validate
reindex = pandas.PandasInterface.reindex
@classmethod
def _index_ibis_table(cls, data):
import ibis
if not cls.has_rowid():
raise ValueError(
"iloc expressions are not supported for ibis version %s."
% ibis.__version__
)
if "hv_row_id__" in data.columns:
return data
if cls.is_rowid_zero_indexed(data):
return data.mutate(hv_row_id__=data.rowid())
else:
return data.mutate(hv_row_id__=data.rowid() - 1)
@classmethod
def iloc(cls, dataset, index):
rows, columns = index
scalar = all(map(util.isscalar, index))
if isinstance(columns, slice):
columns = [x.name for x in dataset.dimensions()[columns]]
elif numpy.isscalar(columns):
columns = [dataset.get_dimension(columns).name]
else:
columns = [dataset.get_dimension(d).name for d in columns]
data = cls._index_ibis_table(dataset.data[columns])
if scalar:
return (
data.filter(data.hv_row_id__ == rows)[columns]
.head(1)
.execute()
.iloc[0, 0]
)
if isinstance(rows, slice):
# We should use a pseudo column for the row number but i think that is still awaiting
# a pr on ibis
if any(x is not None for x in (rows.start, rows.stop, rows.step)):
predicates = []
if rows.start:
predicates += [data.hv_row_id__ >= rows.start]
if rows.stop:
predicates += [data.hv_row_id__ < rows.stop]
return data.filter(predicates).drop(["hv_row_id__"])
else:
if not isinstance(rows, Iterable):
rows = [rows]
return data.filter([data.hv_row_id__.isin(rows)]).drop(["hv_row_id__"])
return data.drop(["hv_row_id__"])
@classmethod
def unpack_scalar(cls, dataset, data):
"""
Given a dataset object and data in the appropriate format for
the interface, return a simple scalar.
"""
if len(data.columns) > 1 or data[[]].count().execute() != 1:
return data
return data.execute().iat[0, 0]
@classmethod
def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs):
# aggregate the necesary dimensions
index_dims = [dataset.get_dimension(d, strict=True) for d in dimensions]
element_dims = [kdim for kdim in dataset.kdims if kdim not in index_dims]
group_kwargs = {}
if group_type != "raw" and issubclass(group_type, Element):
group_kwargs = dict(util.get_param_values(dataset), kdims=element_dims)
group_kwargs.update(kwargs)
group_kwargs["dataset"] = dataset.dataset
group_by = [d.name for d in index_dims]
# execute a query against the table to find the unique groups.
groups = dataset.data.groupby(group_by).aggregate().execute()
# filter each group based on the predicate defined.
data = [
(
tuple(s.values.tolist()),
group_type(
dataset.data.filter(
[dataset.data[k] == v for k, v in s.to_dict().items()]
),
**group_kwargs
),
)
for i, s in groups.iterrows()
]
if issubclass(container_type, NdMapping):
with item_check(False), sorted_context(False):
return container_type(data, kdims=index_dims)
else:
return container_type(data)
@classmethod
def assign(cls, dataset, new_data):
return dataset.data.mutate(**new_data)
@classmethod
def add_dimension(cls, dataset, dimension, dim_pos, values, vdim):
import ibis
data = dataset.data
if dimension.name not in data.columns:
if not isinstance(values, ibis.Expr) and not numpy.isscalar(values):
raise ValueError("Cannot assign %s type as a Ibis table column, "
"expecting either ibis.Expr or scalar."
% type(values).__name__)
data = data.mutate(**{dimension.name: values})
return data
@classmethod
@cached
def isscalar(cls, dataset, dim):
return (
dataset.data[dataset.get_dimension(dim, strict=True).name]
.distinct()
.count()
.compute()
== 1
)
@classmethod
def select(cls, dataset, selection_mask=None, **selection):
if selection_mask is None:
selection_mask = cls.select_mask(dataset, selection)
indexed = cls.indexed(dataset, selection)
data = dataset.data
if isinstance(selection_mask, numpy.ndarray):
data = cls._index_ibis_table(data)
if selection_mask.dtype == numpy.dtype("bool"):
selection_mask = numpy.where(selection_mask)[0]
data = data.filter(
data["hv_row_id__"].isin(list(map(int, selection_mask)))
).drop(["hv_row_id__"])
elif selection_mask is not None and not (isinstance(selection_mask, list) and not selection_mask):
data = data.filter(selection_mask)
if indexed and data.count().execute() == 1 and len(dataset.vdims) == 1:
return data[dataset.vdims[0].name].execute().iloc[0]
return data
@classmethod
def select_mask(cls, dataset, selection):
import ibis
predicates = []
for dim, object in selection.items():
if isinstance(object, tuple):
object = slice(*object)
alias = dataset.get_dimension(dim).name
column = dataset.data[alias]
if isinstance(object, slice):
if object.start is not None:
# Workaround for dask issue #3392
bound = util.numpy_scalar_to_python(object.start)
predicates.append(bound <= column)
if object.stop is not None:
bound = util.numpy_scalar_to_python(object.stop)
predicates.append(column < bound)
elif isinstance(object, (set, list)):
# rowid conditions
condition = None
for id in object:
predicate = column == id
condition = (
predicate if condition is None else condition | predicate
)
if condition is not None:
predicates.append(condition)
elif callable(object):
predicates.append(object(column))
elif isinstance(object, ibis.Expr):
predicates.append(object)
else:
predicates.append(column == object)
return predicates
@classmethod
def sample(cls, dataset, samples=[]):
import ibis
dims = dataset.dimensions()
data = dataset.data
if all(util.isscalar(s) or len(s) == 1 for s in samples):
items = [s[0] if isinstance(s, tuple) else s for s in samples]
return data[data[dims[0].name].isin(items)]
predicates = None
for sample in samples:
if util.isscalar(sample):
sample = [sample]
if not sample:
continue
predicate = None
for i, v in enumerate(sample):
p = data[dims[i].name] == ibis.literal(util.numpy_scalar_to_python(v))
if predicate is None:
predicate = p
else:
predicate &= p
if predicates is None:
predicates = predicate
else:
predicates |= predicate
return data if predicates is None else data.filter(predicates)
@classmethod
def aggregate(cls, dataset, dimensions, function, **kwargs):
import ibis.expr.operations
data = dataset.data
columns = [d.name for d in dataset.kdims if d in dimensions]
values = dataset.dimensions("value", label="name")
new = data[columns + values]
function = {
numpy.min: ibis.expr.operations.Min,
numpy.nanmin: ibis.expr.operations.Min,
numpy.max: ibis.expr.operations.Max,
numpy.nanmax: ibis.expr.operations.Max,
numpy.mean: ibis.expr.operations.Mean,
numpy.nanmean: ibis.expr.operations.Mean,
numpy.std: ibis.expr.operations.StandardDev,
numpy.nanstd: ibis.expr.operations.StandardDev,
numpy.sum: ibis.expr.operations.Sum,
numpy.nansum: ibis.expr.operations.Sum,
numpy.var: ibis.expr.operations.Variance,
numpy.nanvar: ibis.expr.operations.Variance,
len: ibis.expr.operations.Count,
}.get(function, function)
if len(dimensions):
selection = new.groupby(columns)
if function is numpy.count_nonzero:
aggregation = selection.aggregate(
**{
x: ibis.expr.operations.Count(new[x], where=new[x] != 0).to_expr()
for x in new.columns
if x not in columns
}
)
else:
aggregation = selection.aggregate(
**{
x: function(new[x]).to_expr()
for x in new.columns
if x not in columns
}
)
else:
aggregation = new.aggregate(
**{x: function(new[x]).to_expr() for x in new.columns}
)
dropped = [x for x in values if x not in data.columns]
return aggregation, dropped
@classmethod
@cached
def mask(cls, dataset, mask, mask_value=numpy.nan):
raise NotImplementedError('Mask is not implemented for IbisInterface.')
@classmethod
@cached
def dframe(cls, dataset, dimensions):
return dataset.data[dimensions].execute()
Interface.register(IbisInterface)
| [((134, 15, 134, 34), 'numpy.asarray', 'numpy.asarray', ({(134, 29, 134, 33): 'bins'}, {}), '(bins)', False, 'import numpy\n'), ((140, 15, 140, 29), 'numpy.isnan', 'numpy.isnan', ({(140, 27, 140, 28): 'b'}, {}), '(b)', False, 'import numpy\n'), ((199, 13, 199, 36), 'numpy.isscalar', 'numpy.isscalar', ({(199, 28, 199, 35): 'columns'}, {}), '(columns)', False, 'import numpy\n'), ((313, 39, 313, 58), 'numpy.dtype', 'numpy.dtype', ({(313, 51, 313, 57): '"""bool"""'}, {}), "('bool')", False, 'import numpy\n'), ((39, 48, 39, 67), 'ibis.client.find_backends', 'find_backends', ({(39, 62, 39, 66): 'data'}, {}), '(data)', False, 'from ibis.client import find_backends, validate_backends\n'), ((286, 57, 286, 79), 'numpy.isscalar', 'numpy.isscalar', ({(286, 72, 286, 78): 'values'}, {}), '(values)', False, 'import numpy\n'), ((314, 33, 314, 60), 'numpy.where', 'numpy.where', ({(314, 45, 314, 59): 'selection_mask'}, {}), '(selection_mask)', False, 'import numpy\n'), ((417, 27, 417, 80), 'ibis.expr.operations.Count', 'ibis.expr.operations.Count', (), '', False, 'import ibis\n')] |
S0Imyr/Projet-4 | chess/models/tournament.py | 1d93e125bc6e44bc560f3ffc9b11e14e35291c98 | # -*- coding: utf-8 -*-
"""
Handles the tournament logic
"""
import datetime
from chess.utils.utils import get_new_id
from chess.models.actors import Player
from chess.models.round import Round
TOURNAMENT_ID_WIDTH = 8
NB_ROUND = 4
NB_PLAYERS = 8
NB_MATCH = 4
class Tournament:
""" The class Tournament is the central piece of the models. """
last_tournament_id = "0" * TOURNAMENT_ID_WIDTH
def __init__(self, name, location, timer_type, description):
Tournament.last_tournament_id = get_new_id(Tournament.last_tournament_id, TOURNAMENT_ID_WIDTH)
self.tournament_id = Tournament.last_tournament_id
self.name = name
self.location = location
self.start_date = None
self.end_date = None
self.timer_type = timer_type
self.description = description
self.number_of_rounds = NB_ROUND
self.rounds = []
self.list_of_players = []
self.players_assigned = False
self.finished = False
def define_players(self, actors):
""" Defines the list of identifier of the players who join the tournament.
:param actors:
:return: None
"""
for num_player in range(NB_PLAYERS):
self.list_of_players.append(Player(actors[num_player],
self.tournament_id,
num_player))
def init_round(self, num_round):
""" Launches the round number "num_round".
:param num_round: number of the round played
:return: None
"""
tour = Round(num_round, self.tournament_id, self.list_of_players)
tour.start_date = datetime.date.today()
tour.rank_players()
tour.define_matches()
self.rounds.append(tour)
def register_round_results(self, num_round, winner):
""" Registers the results of the round.
:param num_round: the round number.
:param winner: the list of the winners.
:return: None.
"""
self.rounds[num_round].register_results(winner)
self.rounds[num_round].assign_points()
self.rounds[num_round].finished = True
self.rounds[num_round].memorize_opponents()
self.rounds[num_round].rank_players()
self.rounds[num_round].end_date = datetime.date.today()
def tournament_to_dict(self):
""" Converts the tournament into a dictionary
:return: dictionary of the tournament instance.
"""
string_attributes = ['tournament_id',
'name',
'location',
'timer_type',
'description',
'number_of_rounds',
'players_assigned']
serialized_tournament = {}
for attribute in string_attributes:
serialized_tournament[attribute] = getattr(self, attribute)
serialized_tournament['rounds'] = []
for r0und in self.rounds:
serialized_tournament['rounds'].append(r0und.round_to_dict())
serialized_tournament['list_of_players'] = []
for player in self.list_of_players:
serialized_tournament['list_of_players'].append(player.player_to_dict())
serialized_tournament['start_date'] = str(self.start_date)
serialized_tournament['end_date'] = str(self.end_date)
return serialized_tournament
def end_tournament(self):
""" Handles the end of the tournament.
Adds the tournament_id to the players list of tournaments.
Defines the attribute finished and the end date of the tournament.
"""
for player in self.list_of_players:
player.actor.list_of_tournaments_played.append(self.tournament_id)
self.finished = True
self.end_date = datetime.date.today()
| [((28, 40, 28, 102), 'chess.utils.utils.get_new_id', 'get_new_id', ({(28, 51, 28, 80): 'Tournament.last_tournament_id', (28, 82, 28, 101): 'TOURNAMENT_ID_WIDTH'}, {}), '(Tournament.last_tournament_id, TOURNAMENT_ID_WIDTH)', False, 'from chess.utils.utils import get_new_id\n'), ((59, 15, 59, 73), 'chess.models.round.Round', 'Round', ({(59, 21, 59, 30): 'num_round', (59, 32, 59, 50): 'self.tournament_id', (59, 52, 59, 72): 'self.list_of_players'}, {}), '(num_round, self.tournament_id, self.list_of_players)', False, 'from chess.models.round import Round\n'), ((60, 26, 60, 47), 'datetime.date.today', 'datetime.date.today', ({}, {}), '()', False, 'import datetime\n'), ((78, 42, 78, 63), 'datetime.date.today', 'datetime.date.today', ({}, {}), '()', False, 'import datetime\n'), ((116, 24, 116, 45), 'datetime.date.today', 'datetime.date.today', ({}, {}), '()', False, 'import datetime\n'), ((49, 40, 51, 58), 'chess.models.actors.Player', 'Player', ({(49, 47, 49, 65): 'actors[num_player]', (50, 47, 50, 65): 'self.tournament_id', (51, 47, 51, 57): 'num_player'}, {}), '(actors[num_player], self.tournament_id, num_player)', False, 'from chess.models.actors import Player\n')] |
astubenazy/vrops-metric-collection | set-config.py | c4e5b8d7058759aa5eded74cc619d1dedcbc821a | # !/usr/bin python
"""
#
# set-config - a small python program to setup the configuration environment for data-collect.py
# data-collect.py contain the python program to gather Metrics from vROps
# Author Sajal Debnath <[email protected]>
#
"""
# Importing the required modules
import json
import base64
import os,sys
# Getting the absolute path from where the script is being run
def get_script_path():
return os.path.dirname(os.path.realpath(sys.argv[0]))
def get_the_inputs():
adapterkind = raw_input("Please enter Adapter Kind: ")
resourceKind = raw_input("Please enter Resource Kind: ")
servername = raw_input("Enter enter Server IP/FQDN: ")
serveruid = raw_input("Please enter user id: ")
serverpasswd = raw_input("Please enter vRops password: ")
encryptedvar = base64.b64encode(serverpasswd)
maxsamples = raw_input("Please enter the maximum number of samples to collect: ")
keys_to_monitor = raw_input("Please enter the number of keys to monitor: ")
keys = []
for i in range(int(keys_to_monitor)):
keys.append(raw_input("Enter the key: "))
data = {}
if int(maxsamples) < 1:
maxsamples = 1
data["adapterKind"] = adapterkind
data["resourceKind"] = resourceKind
data["sampleno"] = int(maxsamples)
serverdetails = {}
serverdetails["name"] = servername
serverdetails["userid"] = serveruid
serverdetails["password"] = encryptedvar
data["server"] = serverdetails
data["keys"] = keys
return data
# Getting the path where config.json file should be kept
path = get_script_path()
fullpath = path+"/"+"config.json"
# Getting the data for the config.json file
final_data = get_the_inputs()
# Saving the data to config.json file
with open(fullpath, 'w') as outfile:
json.dump(final_data, outfile, sort_keys = True, indent = 2, separators=(',', ':'), ensure_ascii=False) | [((27, 19, 27, 49), 'base64.b64encode', 'base64.b64encode', ({(27, 36, 27, 48): 'serverpasswd'}, {}), '(serverpasswd)', False, 'import base64\n'), ((65, 4, 65, 107), 'json.dump', 'json.dump', (), '', False, 'import json\n'), ((19, 27, 19, 56), 'os.path.realpath', 'os.path.realpath', ({(19, 44, 19, 55): 'sys.argv[0]'}, {}), '(sys.argv[0])', False, 'import os, sys\n')] |
inmanta/inmanta-core | tests/test_app.py | ae2153d57f124d00ad1b58e6d4bc6818364be4a8 | """
Copyright 2018 Inmanta
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contact: [email protected]
"""
import os
import re
import signal
import subprocess
import sys
from subprocess import TimeoutExpired
from threading import Timer
import pytest
import inmanta.util
from inmanta import const
def get_command(
tmp_dir,
stdout_log_level=None,
log_file=None,
log_level_log_file=None,
timed=False,
dbport=None,
dbname="inmanta",
dbhost=None,
dbuser=None,
dbpass=None,
config_dir=None,
server_extensions=[],
version=False,
):
root_dir = tmp_dir.mkdir("root").strpath
log_dir = os.path.join(root_dir, "log")
state_dir = os.path.join(root_dir, "data")
for directory in [log_dir, state_dir]:
os.mkdir(directory)
config_file = os.path.join(root_dir, "inmanta.cfg")
if dbport is not None:
port = dbport
else:
port = inmanta.util.get_free_tcp_port()
with open(config_file, "w+", encoding="utf-8") as f:
f.write("[config]\n")
f.write("log-dir=" + log_dir + "\n")
f.write("state-dir=" + state_dir + "\n")
f.write("[database]\n")
f.write("port=" + str(port) + "\n")
f.write("name=" + dbname + "\n")
if dbhost:
f.write(f"host={dbhost}\n")
if dbuser:
f.write(f"username={dbuser}\n")
if dbpass:
f.write(f"password={dbpass}\n")
f.write("[server]\n")
f.write(f"enabled_extensions={', '.join(server_extensions)}\n")
args = [sys.executable, "-m", "inmanta.app"]
if stdout_log_level:
args.append("-" + "v" * stdout_log_level)
if log_file:
log_file = os.path.join(log_dir, log_file)
args += ["--log-file", log_file]
if log_file and log_level_log_file:
args += ["--log-file-level", str(log_level_log_file)]
if timed:
args += ["--timed-logs"]
if config_dir:
args += ["--config-dir", config_dir]
if version:
args += ["--version"]
args += ["-c", config_file, "server"]
return (args, log_dir)
def do_run(args, env={}, cwd=None):
baseenv = os.environ.copy()
baseenv.update(env)
process = subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=baseenv)
return process
def convert_to_ascii(text):
return [line for line in text.decode("ascii").split("\n") if line != ""]
def do_kill(process, killtime=3, termtime=2):
def do_and_log(func, msg):
def w():
print(msg)
func()
return w
t1 = Timer(killtime, do_and_log(process.kill, "killed process"))
t2 = Timer(termtime, do_and_log(process.terminate, "terminated process"))
t1.start()
t2.start()
out, err = process.communicate()
t1.cancel()
t2.cancel()
stdout = convert_to_ascii(out)
stderr = convert_to_ascii(err)
return (stdout, stderr, process.returncode)
def run_without_tty(args, env={}, killtime=3, termtime=2):
process = do_run(args, env)
return do_kill(process, killtime, termtime)
def run_with_tty(args, killtime=3, termtime=2):
"""Could not get code for actual tty to run stable in docker, so we are faking it """
env = {const.ENVIRON_FORCE_TTY: "true"}
return run_without_tty(args, env=env, killtime=killtime, termtime=termtime)
def get_timestamp_regex():
return r"[\d]{4}\-[\d]{2}\-[\d]{2} [\d]{2}\:[\d]{2}\:[\d]{2}\,[\d]{3}"
def get_compiled_regexes(regexes, timed):
result = []
for regex in regexes:
if timed:
regex = get_timestamp_regex() + " " + regex
compiled_regex = re.compile(regex)
result.append(compiled_regex)
return result
def is_colorama_package_available():
try:
import colorama # noqa: F401
except ModuleNotFoundError:
return False
return True
def test_verify_that_colorama_package_is_not_present():
"""
The colorama package turns the colored characters in TTY-based terminal into uncolored characters.
As such, this package should not be present.
"""
assert not is_colorama_package_available()
@pytest.mark.parametrize_any(
"log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines",
[
(
3,
False,
False,
[r"[a-z.]*[ ]*INFO[\s]+Starting server endpoint", r"[a-z.]*[ ]*DEBUG[\s]+Starting Server Rest Endpoint"],
[],
),
(
2,
False,
False,
[r"[a-z.]*[ ]*INFO[\s]+Starting server endpoint"],
[r"[a-z.]*[ ]*DEBUG[\s]+Starting Server Rest Endpoint"],
),
(
3,
False,
True,
[
r"\x1b\[32m[a-z.]*[ ]*INFO[\s]*\x1b\[0m \x1b\[34mStarting server endpoint",
r"\x1b\[36m[a-z.]*[ ]*DEBUG[\s]*\x1b\[0m \x1b\[34mStarting Server Rest Endpoint",
],
[],
),
(
2,
False,
True,
[r"\x1b\[32m[a-z.]*[ ]*INFO[\s]*\x1b\[0m \x1b\[34mStarting server endpoint"],
[r"\x1b\[36m[a-z.]*[ ]*DEBUG[\s]*\x1b\[0m \x1b\[34mStarting Server Rest Endpoint"],
),
(
3,
True,
False,
[r"[a-z.]*[ ]*INFO[\s]+Starting server endpoint", r"[a-z.]*[ ]*DEBUG[\s]+Starting Server Rest Endpoint"],
[],
),
(
2,
True,
False,
[r"[a-z.]*[ ]*INFO[\s]+Starting server endpoint"],
[r"[a-z.]*[ ]*DEBUG[\s]+Starting Server Rest Endpoint"],
),
(
3,
True,
True,
[
r"\x1b\[32m[a-z.]*[ ]*INFO[\s]*\x1b\[0m \x1b\[34mStarting server endpoint",
r"\x1b\[36m[a-z.]*[ ]*DEBUG[\s]*\x1b\[0m \x1b\[34mStarting Server Rest Endpoint",
],
[],
),
(
2,
True,
True,
[r"\x1b\[32m[a-z.]*[ ]*INFO[\s]*\x1b\[0m \x1b\[34mStarting server endpoint"],
[r"\x1b\[36m[a-z.]*[ ]*DEBUG[\s]*\x1b\[0m \x1b\[34mStarting Server Rest Endpoint"],
),
],
)
@pytest.mark.timeout(20)
def test_no_log_file_set(tmpdir, log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines):
if is_colorama_package_available() and with_tty:
pytest.skip("Colorama is present")
(args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, timed=timed)
if with_tty:
(stdout, _, _) = run_with_tty(args)
else:
(stdout, _, _) = run_without_tty(args)
log_file = "server.log"
assert log_file not in os.listdir(log_dir)
assert len(stdout) != 0
check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, timed)
@pytest.mark.parametrize_any(
"log_level, with_tty, regexes_required_lines, regexes_forbidden_lines",
[
(
3,
False,
[
r"[a-z.]*[ ]*INFO[\s]+[a-x\.A-Z]*[\s]Starting server endpoint",
r"[a-z.]*[ ]*DEBUG[\s]+[a-x\.A-Z]*[\s]Starting Server Rest Endpoint",
],
[],
),
(
2,
False,
[r"[a-z.]*[ ]*INFO[\s]+[a-x\.A-Z]*[\s]Starting server endpoint"],
[r"[a-z.]*[ ]*DEBUG[\s]+[a-x\.A-Z]*[\s]Starting Server Rest Endpoint"],
),
(
3,
True,
[
r"[a-z.]*[ ]*INFO[\s]+[a-x\.A-Z]*[\s]Starting server endpoint",
r"[a-z.]*[ ]*DEBUG[\s]+[a-x\.A-Z]*[\s]Starting Server Rest Endpoint",
],
[],
),
(
2,
True,
[r"[a-z.]*[ ]*INFO[\s]+[a-x\.A-Z]*[\s]Starting server endpoint"],
[r"[a-z.]*[ ]*DEBUG[\s]+[a-x\.A-Z]*[\s]Starting Server Rest Endpoint"],
),
],
)
@pytest.mark.timeout(60)
def test_log_file_set(tmpdir, log_level, with_tty, regexes_required_lines, regexes_forbidden_lines):
if is_colorama_package_available() and with_tty:
pytest.skip("Colorama is present")
log_file = "server.log"
(args, log_dir) = get_command(tmpdir, stdout_log_level=log_level, log_file=log_file, log_level_log_file=log_level)
if with_tty:
(stdout, _, _) = run_with_tty(args)
else:
(stdout, _, _) = run_without_tty(args)
assert log_file in os.listdir(log_dir)
log_file = os.path.join(log_dir, log_file)
with open(log_file, "r") as f:
log_lines = f.readlines()
check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed=True)
check_logs(stdout, [], regexes_required_lines, timed=True)
check_logs(stdout, [], regexes_required_lines, timed=False)
def check_logs(log_lines, regexes_required_lines, regexes_forbidden_lines, timed):
compiled_regexes_requires_lines = get_compiled_regexes(regexes_required_lines, timed)
compiled_regexes_forbidden_lines = get_compiled_regexes(regexes_forbidden_lines, timed)
for line in log_lines:
print(line)
for regex in compiled_regexes_requires_lines:
if not any(regex.match(line) for line in log_lines):
pytest.fail("Required pattern was not found in log lines: %s" % (regex.pattern,))
for regex in compiled_regexes_forbidden_lines:
if any(regex.match(line) for line in log_lines):
pytest.fail("Forbidden pattern found in log lines: %s" % (regex.pattern,))
def test_check_shutdown():
process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), "miniapp.py")])
# wait for handler to be in place
try:
process.communicate(timeout=2)
except TimeoutExpired:
pass
process.send_signal(signal.SIGUSR1)
out, err, code = do_kill(process, killtime=3, termtime=1)
print(out, err)
assert code == 0
assert "----- Thread Dump ----" in out
assert "STOP" in out
assert "SHUTDOWN COMPLETE" in out
def test_check_bad_shutdown():
print([sys.executable, os.path.join(os.path.dirname(__file__), "miniapp.py"), "bad"])
process = do_run([sys.executable, os.path.join(os.path.dirname(__file__), "miniapp.py"), "bad"])
out, err, code = do_kill(process, killtime=5, termtime=2)
print(out, err)
assert code == 3
assert "----- Thread Dump ----" in out
assert "STOP" not in out
assert "SHUTDOWN COMPLETE" not in out
assert not err
def test_startup_failure(tmpdir, postgres_db, database_name):
(args, log_dir) = get_command(
tmpdir,
dbport=postgres_db.port,
dbname=database_name,
dbhost=postgres_db.host,
dbuser=postgres_db.user,
dbpass=postgres_db.password,
server_extensions=["badplugin"],
)
pp = ":".join(sys.path)
# Add a bad module
extrapath = os.path.join(os.path.dirname(__file__), "data", "bad_module_path")
(stdout, stderr, code) = run_without_tty(args, env={"PYTHONPATH": pp + ":" + extrapath}, killtime=15, termtime=10)
assert "inmanta ERROR Server setup failed" in stdout
assert (
"inmanta.server.protocol.SliceStartupException: "
"Slice badplugin.badslice failed to start because: Too bad, this plugin is broken"
) in stdout
assert code == 4
def test_compiler_exception_output(snippetcompiler):
snippetcompiler.setup_for_snippet(
"""
entity Test:
number attr
end
implement Test using std::none
o = Test(attr="1234")
"""
)
output = (
"""Could not set attribute `attr` on instance `__config__::Test (instantiated at ./main.cf:8)` """
"""(reported in Construct(Test) (./main.cf:8))
caused by:
Invalid value '1234', expected Number (reported in Construct(Test) (./main.cf:8))
"""
)
def exec(*cmd):
process = do_run([sys.executable, "-m", "inmanta.app"] + list(cmd), cwd=snippetcompiler.project_dir)
out, err = process.communicate(timeout=30)
assert out.decode() == ""
assert err.decode() == output
exec("compile")
exec("export", "-J", "out.json")
@pytest.mark.timeout(15)
@pytest.mark.parametrize_any(
"cmd", [(["-X", "compile"]), (["compile", "-X"]), (["compile"]), (["export", "-X"]), (["-X", "export"]), (["export"])]
)
def test_minus_x_option(snippetcompiler, cmd):
snippetcompiler.setup_for_snippet(
"""
entity Test:
nuber attr
end
"""
)
process = do_run([sys.executable, "-m", "inmanta.app"] + cmd, cwd=snippetcompiler.project_dir)
out, err = process.communicate(timeout=30)
assert out.decode() == ""
if "-X" in cmd:
assert "inmanta.ast.TypeNotFoundException: could not find type nuber in namespace" in str(err)
else:
assert "inmanta.ast.TypeNotFoundException: could not find type nuber in namespace" not in str(err)
@pytest.mark.timeout(20)
def test_warning_config_dir_option_on_server_command(tmpdir):
non_existing_dir = os.path.join(tmpdir, "non_existing_dir")
assert not os.path.isdir(non_existing_dir)
(args, _) = get_command(tmpdir, stdout_log_level=3, config_dir=non_existing_dir)
(stdout, _, _) = run_without_tty(args)
stdout = "".join(stdout)
assert "Starting server endpoint" in stdout
assert f"Config directory {non_existing_dir} doesn't exist" in stdout
@pytest.mark.timeout(20)
def test_warning_min_c_option_file_doesnt_exist(snippetcompiler, tmpdir):
non_existing_config_file = os.path.join(tmpdir, "non_existing_config_file")
snippetcompiler.setup_for_snippet(
"""
entity Test:
number attr
end
"""
)
config_options = ["-c", non_existing_config_file, "-vvv"]
args = [sys.executable, "-m", "inmanta.app"] + config_options + ["compile"]
process = do_run(args, cwd=snippetcompiler.project_dir)
out, err = process.communicate(timeout=30)
assert process.returncode == 0
out = out.decode()
err = err.decode()
all_output = out + err
assert "Starting compile" in all_output
assert "Compile done" in all_output
assert f"Config file {non_existing_config_file} doesn't exist" in all_output
@pytest.mark.parametrize_any(
"with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines",
[
(False, True, [r"Inmanta Service Orchestrator", r"Compiler version: ", r"Extensions:", r"\s*\* core:"], []),
(True, True, [r"Inmanta Service Orchestrator", r"Compiler version: ", r"Extensions:", r"\s*\* core:"], []),
(False, False, [], [r"Inmanta Service Orchestrator", r"Compiler version: ", r"Extensions:", r"\s*\* core:"]),
(True, False, [], [r"Inmanta Service Orchestrator", r"Compiler version: ", r"Extensions:", r"\s*\* core:"]),
],
)
@pytest.mark.timeout(20)
def test_version_argument_is_set(tmpdir, with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines):
(args, log_dir) = get_command(tmpdir, version=version_should_be_shown)
if with_tty:
(stdout, _, _) = run_with_tty(args, killtime=15, termtime=10)
else:
(stdout, _, _) = run_without_tty(args, killtime=15, termtime=10)
assert len(stdout) != 0
check_logs(stdout, regexes_required_lines, regexes_forbidden_lines, False)
def test_init_project(tmpdir):
args = [sys.executable, "-m", "inmanta.app", "project", "init", "-n", "test-project", "-o", tmpdir, "--default"]
(stdout, stderr, return_code) = run_without_tty(args, killtime=15, termtime=10)
test_project_path = os.path.join(tmpdir, "test-project")
assert return_code == 0
assert os.path.exists(test_project_path)
(stdout, stderr, return_code) = run_without_tty(args, killtime=15, termtime=10)
assert return_code != 0
assert len(stderr) == 1
assert "already exists" in stderr[0]
| [((169, 1, 235, 1), 'pytest.mark.parametrize_any', 'pytest.mark.parametrize_any', ({(170, 4, 170, 81): '"""log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines"""', (171, 4, 234, 5): "[(3, False, False, ['[a-z.]*[ ]*INFO[\\\\s]+Starting server endpoint',\n '[a-z.]*[ ]*DEBUG[\\\\s]+Starting Server Rest Endpoint'], []), (2, False,\n False, ['[a-z.]*[ ]*INFO[\\\\s]+Starting server endpoint'], [\n '[a-z.]*[ ]*DEBUG[\\\\s]+Starting Server Rest Endpoint']), (3, False, \n True, [\n '\\\\x1b\\\\[32m[a-z.]*[ ]*INFO[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting server endpoint'\n ,\n '\\\\x1b\\\\[36m[a-z.]*[ ]*DEBUG[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting Server Rest Endpoint'\n ], []), (2, False, True, [\n '\\\\x1b\\\\[32m[a-z.]*[ ]*INFO[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting server endpoint'\n ], [\n '\\\\x1b\\\\[36m[a-z.]*[ ]*DEBUG[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting Server Rest Endpoint'\n ]), (3, True, False, ['[a-z.]*[ ]*INFO[\\\\s]+Starting server endpoint',\n '[a-z.]*[ ]*DEBUG[\\\\s]+Starting Server Rest Endpoint'], []), (2, True, \n False, ['[a-z.]*[ ]*INFO[\\\\s]+Starting server endpoint'], [\n '[a-z.]*[ ]*DEBUG[\\\\s]+Starting Server Rest Endpoint']), (3, True, True,\n [\n '\\\\x1b\\\\[32m[a-z.]*[ ]*INFO[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting server endpoint'\n ,\n '\\\\x1b\\\\[36m[a-z.]*[ ]*DEBUG[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting Server Rest Endpoint'\n ], []), (2, True, True, [\n '\\\\x1b\\\\[32m[a-z.]*[ ]*INFO[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting server endpoint'\n ], [\n '\\\\x1b\\\\[36m[a-z.]*[ ]*DEBUG[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting Server Rest Endpoint'\n ])]"}, {}), "(\n 'log_level, timed, with_tty, regexes_required_lines, regexes_forbidden_lines'\n , [(3, False, False, ['[a-z.]*[ ]*INFO[\\\\s]+Starting server endpoint',\n '[a-z.]*[ ]*DEBUG[\\\\s]+Starting Server Rest Endpoint'], []), (2, False,\n False, ['[a-z.]*[ ]*INFO[\\\\s]+Starting server endpoint'], [\n '[a-z.]*[ ]*DEBUG[\\\\s]+Starting Server Rest Endpoint']), (3, False, \n True, [\n '\\\\x1b\\\\[32m[a-z.]*[ ]*INFO[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting server endpoint'\n ,\n '\\\\x1b\\\\[36m[a-z.]*[ ]*DEBUG[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting Server Rest Endpoint'\n ], []), (2, False, True, [\n '\\\\x1b\\\\[32m[a-z.]*[ ]*INFO[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting server endpoint'\n ], [\n '\\\\x1b\\\\[36m[a-z.]*[ ]*DEBUG[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting Server Rest Endpoint'\n ]), (3, True, False, ['[a-z.]*[ ]*INFO[\\\\s]+Starting server endpoint',\n '[a-z.]*[ ]*DEBUG[\\\\s]+Starting Server Rest Endpoint'], []), (2, True, \n False, ['[a-z.]*[ ]*INFO[\\\\s]+Starting server endpoint'], [\n '[a-z.]*[ ]*DEBUG[\\\\s]+Starting Server Rest Endpoint']), (3, True, True,\n [\n '\\\\x1b\\\\[32m[a-z.]*[ ]*INFO[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting server endpoint'\n ,\n '\\\\x1b\\\\[36m[a-z.]*[ ]*DEBUG[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting Server Rest Endpoint'\n ], []), (2, True, True, [\n '\\\\x1b\\\\[32m[a-z.]*[ ]*INFO[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting server endpoint'\n ], [\n '\\\\x1b\\\\[36m[a-z.]*[ ]*DEBUG[\\\\s]*\\\\x1b\\\\[0m \\\\x1b\\\\[34mStarting Server Rest Endpoint'\n ])])", False, 'import pytest\n'), ((236, 1, 236, 24), 'pytest.mark.timeout', 'pytest.mark.timeout', ({(236, 21, 236, 23): '(20)'}, {}), '(20)', False, 'import pytest\n'), ((252, 1, 286, 1), 'pytest.mark.parametrize_any', 'pytest.mark.parametrize_any', ({(253, 4, 253, 74): '"""log_level, with_tty, regexes_required_lines, regexes_forbidden_lines"""', (254, 4, 285, 5): "[(3, False, [\n '[a-z.]*[ ]*INFO[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting server endpoint',\n '[a-z.]*[ ]*DEBUG[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting Server Rest Endpoint'],\n []), (2, False, [\n '[a-z.]*[ ]*INFO[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting server endpoint'], [\n '[a-z.]*[ ]*DEBUG[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting Server Rest Endpoint']\n ), (3, True, [\n '[a-z.]*[ ]*INFO[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting server endpoint',\n '[a-z.]*[ ]*DEBUG[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting Server Rest Endpoint'],\n []), (2, True, [\n '[a-z.]*[ ]*INFO[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting server endpoint'], [\n '[a-z.]*[ ]*DEBUG[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting Server Rest Endpoint'])]"}, {}), "(\n 'log_level, with_tty, regexes_required_lines, regexes_forbidden_lines',\n [(3, False, [\n '[a-z.]*[ ]*INFO[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting server endpoint',\n '[a-z.]*[ ]*DEBUG[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting Server Rest Endpoint'],\n []), (2, False, [\n '[a-z.]*[ ]*INFO[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting server endpoint'], [\n '[a-z.]*[ ]*DEBUG[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting Server Rest Endpoint']\n ), (3, True, [\n '[a-z.]*[ ]*INFO[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting server endpoint',\n '[a-z.]*[ ]*DEBUG[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting Server Rest Endpoint'],\n []), (2, True, [\n '[a-z.]*[ ]*INFO[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting server endpoint'], [\n '[a-z.]*[ ]*DEBUG[\\\\s]+[a-x\\\\.A-Z]*[\\\\s]Starting Server Rest Endpoint'])])", False, 'import pytest\n'), ((287, 1, 287, 24), 'pytest.mark.timeout', 'pytest.mark.timeout', ({(287, 21, 287, 23): '(60)'}, {}), '(60)', False, 'import pytest\n'), ((401, 1, 401, 24), 'pytest.mark.timeout', 'pytest.mark.timeout', ({(401, 21, 401, 23): '(15)'}, {}), '(15)', False, 'import pytest\n'), ((402, 1, 404, 1), 'pytest.mark.parametrize_any', 'pytest.mark.parametrize_any', ({(403, 4, 403, 9): '"""cmd"""', (403, 11, 403, 122): "[['-X', 'compile'], ['compile', '-X'], ['compile'], ['export', '-X'], ['-X',\n 'export'], ['export']]"}, {}), "('cmd', [['-X', 'compile'], ['compile', '-X'], [\n 'compile'], ['export', '-X'], ['-X', 'export'], ['export']])", False, 'import pytest\n'), ((423, 1, 423, 24), 'pytest.mark.timeout', 'pytest.mark.timeout', ({(423, 21, 423, 23): '(20)'}, {}), '(20)', False, 'import pytest\n'), ((434, 1, 434, 24), 'pytest.mark.timeout', 'pytest.mark.timeout', ({(434, 21, 434, 23): '(20)'}, {}), '(20)', False, 'import pytest\n'), ((459, 1, 467, 1), 'pytest.mark.parametrize_any', 'pytest.mark.parametrize_any', ({(460, 4, 460, 88): '"""with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines"""', (461, 4, 466, 5): "[(False, True, ['Inmanta Service Orchestrator', 'Compiler version: ',\n 'Extensions:', '\\\\s*\\\\* core:'], []), (True, True, [\n 'Inmanta Service Orchestrator', 'Compiler version: ', 'Extensions:',\n '\\\\s*\\\\* core:'], []), (False, False, [], [\n 'Inmanta Service Orchestrator', 'Compiler version: ', 'Extensions:',\n '\\\\s*\\\\* core:']), (True, False, [], ['Inmanta Service Orchestrator',\n 'Compiler version: ', 'Extensions:', '\\\\s*\\\\* core:'])]"}, {}), "(\n 'with_tty, version_should_be_shown, regexes_required_lines, regexes_forbidden_lines'\n , [(False, True, ['Inmanta Service Orchestrator', 'Compiler version: ',\n 'Extensions:', '\\\\s*\\\\* core:'], []), (True, True, [\n 'Inmanta Service Orchestrator', 'Compiler version: ', 'Extensions:',\n '\\\\s*\\\\* core:'], []), (False, False, [], [\n 'Inmanta Service Orchestrator', 'Compiler version: ', 'Extensions:',\n '\\\\s*\\\\* core:']), (True, False, [], ['Inmanta Service Orchestrator',\n 'Compiler version: ', 'Extensions:', '\\\\s*\\\\* core:'])])", False, 'import pytest\n'), ((468, 1, 468, 24), 'pytest.mark.timeout', 'pytest.mark.timeout', ({(468, 21, 468, 23): '(20)'}, {}), '(20)', False, 'import pytest\n'), ((49, 14, 49, 43), 'os.path.join', 'os.path.join', ({(49, 27, 49, 35): 'root_dir', (49, 37, 49, 42): '"""log"""'}, {}), "(root_dir, 'log')", False, 'import os\n'), ((50, 16, 50, 46), 'os.path.join', 'os.path.join', ({(50, 29, 50, 37): 'root_dir', (50, 39, 50, 45): '"""data"""'}, {}), "(root_dir, 'data')", False, 'import os\n'), ((53, 18, 53, 55), 'os.path.join', 'os.path.join', ({(53, 31, 53, 39): 'root_dir', (53, 41, 53, 54): '"""inmanta.cfg"""'}, {}), "(root_dir, 'inmanta.cfg')", False, 'import os\n'), ((95, 14, 95, 31), 'os.environ.copy', 'os.environ.copy', ({}, {}), '()', False, 'import os\n'), ((97, 14, 97, 106), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((299, 15, 299, 46), 'os.path.join', 'os.path.join', ({(299, 28, 299, 35): 'log_dir', (299, 37, 299, 45): 'log_file'}, {}), '(log_dir, log_file)', False, 'import os\n'), ((425, 23, 425, 63), 'os.path.join', 'os.path.join', ({(425, 36, 425, 42): 'tmpdir', (425, 44, 425, 62): '"""non_existing_dir"""'}, {}), "(tmpdir, 'non_existing_dir')", False, 'import os\n'), ((436, 31, 436, 79), 'os.path.join', 'os.path.join', ({(436, 44, 436, 50): 'tmpdir', (436, 52, 436, 78): '"""non_existing_config_file"""'}, {}), "(tmpdir, 'non_existing_config_file')", False, 'import os\n'), ((482, 24, 482, 60), 'os.path.join', 'os.path.join', ({(482, 37, 482, 43): 'tmpdir', (482, 45, 482, 59): '"""test-project"""'}, {}), "(tmpdir, 'test-project')", False, 'import os\n'), ((484, 11, 484, 44), 'os.path.exists', 'os.path.exists', ({(484, 26, 484, 43): 'test_project_path'}, {}), '(test_project_path)', False, 'import os\n'), ((52, 8, 52, 27), 'os.mkdir', 'os.mkdir', ({(52, 17, 52, 26): 'directory'}, {}), '(directory)', False, 'import os\n'), ((80, 19, 80, 50), 'os.path.join', 'os.path.join', ({(80, 32, 80, 39): 'log_dir', (80, 41, 80, 49): 'log_file'}, {}), '(log_dir, log_file)', False, 'import os\n'), ((148, 25, 148, 42), 're.compile', 're.compile', ({(148, 36, 148, 41): 'regex'}, {}), '(regex)', False, 'import re\n'), ((239, 8, 239, 42), 'pytest.skip', 'pytest.skip', ({(239, 20, 239, 41): '"""Colorama is present"""'}, {}), "('Colorama is present')", False, 'import pytest\n'), ((247, 27, 247, 46), 'os.listdir', 'os.listdir', ({(247, 38, 247, 45): 'log_dir'}, {}), '(log_dir)', False, 'import os\n'), ((290, 8, 290, 42), 'pytest.skip', 'pytest.skip', ({(290, 20, 290, 41): '"""Colorama is present"""'}, {}), "('Colorama is present')", False, 'import pytest\n'), ((298, 23, 298, 42), 'os.listdir', 'os.listdir', ({(298, 34, 298, 41): 'log_dir'}, {}), '(log_dir)', False, 'import os\n'), ((360, 29, 360, 54), 'os.path.dirname', 'os.path.dirname', ({(360, 45, 360, 53): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((426, 15, 426, 46), 'os.path.isdir', 'os.path.isdir', ({(426, 29, 426, 45): 'non_existing_dir'}, {}), '(non_existing_dir)', False, 'import os\n'), ((314, 12, 314, 93), 'pytest.fail', 'pytest.fail', ({(314, 24, 314, 92): "('Required pattern was not found in log lines: %s' % (regex.pattern,))"}, {}), "('Required pattern was not found in log lines: %s' % (regex.\n pattern,))", False, 'import pytest\n'), ((317, 12, 317, 86), 'pytest.fail', 'pytest.fail', ({(317, 24, 317, 85): "('Forbidden pattern found in log lines: %s' % (regex.pattern,))"}, {}), "('Forbidden pattern found in log lines: %s' % (regex.pattern,))", False, 'import pytest\n'), ((321, 51, 321, 76), 'os.path.dirname', 'os.path.dirname', ({(321, 67, 321, 75): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((337, 40, 337, 65), 'os.path.dirname', 'os.path.dirname', ({(337, 56, 337, 64): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((338, 51, 338, 76), 'os.path.dirname', 'os.path.dirname', ({(338, 67, 338, 75): '__file__'}, {}), '(__file__)', False, 'import os\n')] |
yosho-18/AtCoder | AtC_Beg_Con_021-030/ABC027/C.py | 50f6d5c92a01792552c31ac912ce1cd557b06fb0 | n = int(input())
row = 0
for i in range(100):
if 2 ** i <= n <= 2 ** (i + 1) - 1:
row = i
break
def seki(k, n):
for _ in range(n):
k = 4 * k + 2
return k
k = 0
if row % 2 != 0:
k = 2
cri = seki(k, row // 2)
if n < cri:
print("Aoki")
else:
print("Takahashi")
else:
k = 1
cri = seki(k, row // 2)
if n < cri:
print("Takahashi")
else:
print("Aoki")
| [] |
Desi-Boyz/cod4x-server-B3-configs | extplugins/codvote.py | 03a323d7ea293efe1831ed315001391b9aaf532a | # CoDVote plugin for BigBrotherBot(B3) (www.bigbrotherbot.net)
# Copyright (C) 2015 ph03n1x
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# Changelog:
# v1.0.1 - Fixed vote remaining in progress if requirements for vote unmet.
# v1.0.2 - Added "!vote maps" to show what maps can be called into vote.
# - Fixed issue where person who called vote needed to vote as well. Changed to automatic yes vote.
__version__ = '1.0.2'
__author__ = 'ph03n1x'
import b3, threading
import b3.plugin
import b3.events
class CodvotePlugin(b3.plugin.Plugin):
adminPlugin = None
_vote = None # Stores which vote is currently in progress
_value = None # Stores the value of the vote
_votetime = 30 # Time before a vote will be canceled for not passing
_aVotes = {} # All votes allowed. Imported from "votes" section in config
_aMaps = {} # All vote allowed maps. Imported from "votemaps" section in config
_amt_yes = [] # Amount of players who voted yes. Checked against amount of players in game
_amt_no = []
_allplayers = [] # Amount of players in game
_mapRequested = None # Stores which map is being voted for
_kickRequested = None # Stores which player will be kicked if vote passed
_default_messages = {
'tovote': '^7Use ^2!yes ^7or ^2!no ^7 to vote',
'map': "Map vote in progress: Change map to ^3$s^7?",
'nextmap': "Next map vote in progress. Change next map to ^3$s^7?",
'kick': "Kick vote in progress: Kick ^2$s^7?",
'maprotate': "Rotate map vote in progress. Go to next map?",
'maprestart': "Maprestart vote in progress. Restart current map?",
'friendlyfire': "Friendlyfire vote in progress. Change friendlyfire mode to ^2$s^7?",
'killcam': "Killcam vote in progress. Turn killcam ^2$s^7?",
'scorelimit': "Scorelimit vote in progress. Change score limit to ^2$s^7?",
'timelimit': "Timelimit vote in progress. Change time limit to ^2$s^7?",
'roundlength': "Round length vote in progress. Change round length to ^2$s^7?",
'roundlimit': "Round limit vote in progress. Change round limit to ^2$s^7?",
}
def onStartup(self):
self.adminPlugin = self.console.getPlugin('admin')
if not self.adminPlugin:
self.error('Could not find admin plugin')
return
# Register commands
if 'commands' in self.config.sections():
for cmd in self.config.options('commands'):
level = self.config.get('commands', cmd)
sp = cmd.split('-')
alias = None
if len(sp) == 2:
cmd, alias = sp
func = self.getCmd(cmd)
if func:
self.adminPlugin.registerCommand(self, cmd, level, func, alias)
# Re-deploy commands for consideration of this plugin
self.adminPlugin.registerCommand(self, 'nextmap', 1, self.cmd_nextmap, 'nm')
self.adminPlugin.registerCommand(self, 'maprotate', 20, self.cmd_maprotate, None)
self.adminPlugin.registerCommand(self, 'allvotes', 1, self.cmd_allvotes, None)
# Register events
self.registerEvent('EVT_GAME_EXIT', self.onGameEnd)
def onLoadConfig(self):
# Load settings section
try:
self._votetime = self.config.getint('settings', 'votetime')
except:
self.debug('Unable to get [votetime] from settings. Using default: %s' % self._votetime)
# Load votemaps section
if self.config.has_section('votemaps'):
for (mapname, consolename) in self.config.items('votemaps'):
if mapname:
self._aMaps[mapname] = consolename
self.debug('Successfully entered maps for voting: %s' % self._aMaps)
# Load votes section
if self.config.has_section('votes'):
adLvl = {'guest': 0,
'user': 1,
'reg': 2,
'mod': 20,
'admin': 40,
'fulladmin': 60,
'senioradmin': 80,
'superadmin': 100}
for (entry, value) in self.config.items('votes'):
try:
value = int(value)
self._aVotes[entry.lower()] = value
except ValueError:
self._aVotes[entry.lower()] = adLvl[value]
self.debug('Allowed votes are: %s' % self._aVotes)
def getCmd(self, cmd):
cmd = 'cmd_%s' % cmd
if hasattr(self, cmd):
func = getattr(self, cmd)
return func
return None
######################### VOTE TIMING ##############################
def voteTimer(self):
t1 = threading.Timer((self._votetime - 5), self.voteMessage)
t1.start()
def voteMessage(self):
if self._vote:
self.console.say('^110 seconds until vote end!')
t2 = threading.Timer(10, self.denyVote)
t2.start()
######################### MAP HANDLING ##############################
def _search(self, maplist, partial):
a = []
for mapname, consolename in maplist.iteritems():
if partial in mapname:
a.append(mapname)
elif partial in consolename:
a.append(mapname)
return a
def mapvote(self, client, wantedMap):
# Find if map is in allowed list
match = self._search(self._aMaps, wantedMap)
if len(match) == 1:
self._mapRequested = match[0]
self._value = match[0]
return True
elif len(match) > 1:
match = (', ').join(match)
client.message('^1ABORTED!^7Multiple matches: %s' % match)
return False
elif len(match) == 0:
client.message('^1ABORTED!^7No maps matching your request')
return False
############### NEXTMAP FUNCTIONING ################
def onGameEnd(self, event):
"""
Handle EVT_GAME_ROUND_END
"""
if self._mapRequested:
self.confirmMap()
self._mapRequested = None
############### CONFIRM VOTES ######################
def confirmVote(self):
self.console.say('^3Vote passed!^7')
if self._vote == 'map':
self.confirmMap()
elif self._vote == 'nextmap':
self.debug('nextmap vote passed. Params already stored')
elif self._vote == 'kick':
self.confirmKick()
elif self._vote == 'maprotate':
if self._mapRequested:
self.confirmMap()
else:
self.console.rotateMap()
elif self._vote == 'maprestart':
self.confirmMaprestart()
elif self._vote == 'friendlyfire':
self.confirmFriendlyFire()
elif self._vote == 'killcam':
self.confirmKillCam()
elif self._vote == 'scorelimit':
self.confirmScoreLimit()
elif self._vote == 'timelimit':
self.confirmTimeLimit()
elif self._vote == 'roundlength':
self.confirmRoundLength()
elif self._vote == 'roundlimit':
self.confirmRoundLimit()
else:
self.error('Unable to commit. Vote: %s, Value: %s' % (self._vote, self._value))
self._vote = None
self._value = None
self._amt_no = []
self._amt_yes = []
self._allplayers = []
def denyVote(self):
if self._vote:
self.console.say('^3Vote failed!')
self._vote = None
self._value = None
self._amt_no = []
self._amt_yes = []
self._allplayers = []
def confirmKick(self):
# Note - to kick someone we need: client.kick(reason, keyword, admin, silent=True/False, data)
s = self._kickRequested
self.debug('Kick vote passed. Kicking %s' % s.name)
s.kick('Voted against', '', None, True, '')
self._kickRequested = None
def confirmMap(self):
# This will cycle to next map when needed.
self.console.write('map %s' % self._aMaps[self._mapRequested])
self._mapRequested = None
def confirmMaprestart(self):
# This will restart the current map
self.console.write('fast_restart')
def confirmFriendlyFire(self):
# This will toggle friendly fire on and off
setting = self._value
if not isinstance(setting, int):
if self._value == 'on':
setting = 1
elif self._value == 'off':
setting = 0
else:
self.debug('Unknown wanted setting for Friendlyfire. Toggling to next mode')
now = self.console.getCvar('scr_team_fftype').getInt()
if now >= 1:
setting = 0
elif now == 0:
setting = 1
self.console.setCvar('scr_team_fftype', int(setting))
def confirmKillCam(self):
# rcon for killcam: scr_game_allowkillcam - 0 or 1
setting = self._value
if self._value == 'on':
setting = 1
elif self._value == 'off':
setting = 0
if not isinstance(setting, int):
try:
setting = int(setting)
except ValueError:
now = self.console.getCvar('scr_game_allowkillcam').getInt()
self.debug('Setting being voted for is not valid. Toggling to next mode. Killcam currently: %s' % now)
if now == 0:
setting = 1
else:
setting = 0
self.console.setCvar('scr_game_allowkillcam', int(setting))
def confirmScoreLimit(self):
# CVAR to write is scr_<gametype>_scorelimit <number>
setting = self._value
gt = self.getGameType()
if not isinstance(setting, int):
try:
setting = int(setting)
except ValueError:
self.debug('ERROR: Could not set new scorelimit. Voted value is not integer')
return
cparams = 'scr_' + gt + '_scorelimit'
self.console.setCvar(cparams, setting)
def confirmTimeLimit(self):
setting = self._value
gt = self.getGameType()
if not isinstance(setting, int):
try:
setting = int(setting)
except ValueError:
self.debug('ERROR: Could not set new timelimit. Voted value is not integer')
return
cparams = 'scr_' + gt + '_timelimit'
self.console.setCvar(cparams, setting)
def confirmRoundLength(self):
setting = self._value
amodes = ['ctf', 'sd', 're', 'bas', 'dom']
gt = self.getGameType()
if not isinstance(setting, int):
try:
setting = int(setting)
except ValueError:
self.debug('ERROR: Could not set new round length. Voted value is not integer')
return
if gt in amodes:
cparams = 'scr_' + gt + '_roundlength'
self.console.setCvar(cparams, setting)
def confirmRoundLimit(self):
setting = self._value
amodes = ['ctf', 'sd', 're', 'bas', 'dom']
gt = self.getGameType()
if not isinstance(setting, int):
try:
setting = int(setting)
except ValueError:
self.debug('Could not set new round limit. Voted value is not integer')
return
if gt in amodes:
cparams = 'scr_' + gt + '_roundlimit'
self.console.setCvar(cparams, setting)
else:
self.debug('Could not set round limit as gametype do not have rounds')
def getGameType(self):
gametype = self.console.getCvar('g_gametype').getString()
if gametype:
return gametype
else:
self.debug('Error getting gametype. Response is %s' % gametype)
return False
def sendBroadcast(self):
# This wil broadcast vote message to server.
a = self._value
if a == 'maprestart' or a == 'maprotate':
self.console.say(self.getMessage(self._vote))
elif a != 'maprestart' and a != 'maprotate':
param = {'s': a}
self.console.say(self.getMessage(self._vote, param))
self.console.say(self.getMessage('tovote'))
def aquireCmdLock2(self, cmd, client, delay, all=True):
if client.maxLevel >= 20:
return True
elif cmd.time + 5 <= self.console.time():
return True
else:
return False
def checkIfAllowed(self, client, voteType):
if client.maxLevel >= self._aVotes[voteType]:
return True
else:
return False
#################################################################################
# COMMANDS #
#################################################################################
def cmd_vote(self, data, client, cmd=None):
"""\
!vote <setting> <value> - vote to change setting or cvar on server.
"""
# Check if vote already in progress
if self._vote:
client.message('^1ERROR^7: Vote already in progress')
return
# Check if we have enough data for vote
data = data.split()
if len(data) == 1 and data[0] == 'maprotate' or len(data) == 1 and data[0] == 'maprestart' or len(data) == 1 and data[0] == 'maps':
self._vote = data[0]
self._value = data[0]
elif len(data) == 2:
type = data[0]
value = data[1]
self._vote = type
self._value = value
else:
client.message('^1ERROR^7: Invalid usage. Type ^2!help vote ^7for info')
return
# Check if player is asking what maps can be voted on
if self._vote == 'maps':
v1 = self.checkIfAllowed(client, 'map')
v2 = self.checkIfAllowed(client, 'nextmap')
if v1 or v2:
cmd.sayLoudOrPM(client, 'Vote enabled maps: ^2%s' % (('^7, ^2').join(self._aMaps.keys())))
self._vote = None
self._value = None
return
else:
client.message('^2You do not have permission to call map votes')
self._vote = None
self._value = None
return
# Check if enough players in game to vote and store present players. Only players present at vote call can vote
playersInGame = 0
self._allplayers = []
for c in self.console.clients.getList():
if c.team != b3.TEAM_SPEC:
playersInGame += 1
self._allplayers.insert(0, c)
if playersInGame <= 1 and client.maxLevel < 100:
client.message('^1ABORT^7: Not enough players in game to vote.')
self._vote = None
return
# Check if type of vote is allowed
if self._vote not in self._aVotes:
client.message('Vote type not allowed. Use ^2!allvotes ^7for available votes.')
self._vote = None
return
# Check if player has permission to call vote type
v = self.checkIfAllowed(client, self._vote)
if not v:
client.message('You do not have permission to call this vote')
self._vote = None
return
# Get further info for proper processing
if self._vote == 'map' or self._vote == 'nextmap':
q = self.mapvote(client, self._value)
if not q:
self.debug('Vote aborted: Cannot vote for maps. mapvote turned out false')
self._vote = None
return
if self._vote == 'kick':
self._kickRequested = self.adminPlugin.findClientPrompt(self._value, client)
if self._kickRequested:
if self._kickRequested.maxLevel >= 20:
client.message('^1ABORTED^7: Cannot vote to kick admin!')
self._vote = None
self._value = None
self._kickRequested = None
return
self._value = self._kickRequested.name
else:
self.debug('could not get the person to kick')
self._vote = None
self._value = None
self._kickRequested = None
return
# Seems like vote is ok. Broadcast to server
self.sendBroadcast()
# Start timer
self.voteTimer()
# Set person who called vote as yes vote
self._amt_yes.insert(0, client)
if len(self._amt_yes) > (len(self._allplayers) / 2):
self.confirmVote()
def cmd_allvotes(self, data, client, cmd=None):
"""\
Show all the votes you are allowed to call
"""
allowed = []
for k in self._aVotes.keys():
if client.maxLevel >= self._aVotes[k]:
allowed.insert(0, k)
if len(allowed) > 0:
p = sorted(allowed)
x = (', ').join(p)
client.message('Allowed votes are: %s' % x)
elif len(allowed) == 0:
client.message('You are not allowed to call any votes')
def cmd_yes(self, data, client, cmd=None):
"""\
Vote yes to the vote in progress
"""
# Check if there is a vote in progress
if not self._vote:
client.message('No vote in progress')
return
# Check if player is allowed to vote
if client not in self._allplayers:
client.message('Sorry, you cannot enter current vote')
return
# Check if the player already voted. If not, register vote
if client in self._amt_yes or client in self._amt_no:
client.message('Are you drunk? You already voted!')
return
elif client not in self._amt_yes or client not in self._amt_no:
self._amt_yes.insert(0, client)
# Let player know that vote is registered
client.message('^3Your vote has been entered')
# Check if majority of players voted already
vYes = len(self._amt_yes)
vPass = len(self._allplayers) / 2
if vYes > vPass:
self.confirmVote()
def cmd_no(self, data, client=None, cmd=None):
"""\
Vote NO to the current vote
"""
# Check if there is a vote in progress
if not self._vote:
client.message('No vote in progress')
return
# Check if player is allowed to vote
if client not in self._allplayers:
client.message('Sorry, you cannot enter current vote')
return
# Check if the player already voted
if client in self._amt_yes or client in self._amt_no:
client.message('Are you drunk? You already voted!')
return
elif client not in self._amt_yes or client not in self._amt_no:
self._amt_no.insert(0, client)
# Let player know that vote is registered
client.message('^3Your vote has been entered')
# Check if majority of players voted
vNo = len(self._amt_no)
vPass = len(self._allplayers) / 2
if vNo > vPass:
self.denyVote()
def cmd_nextmap(self, data, client=None, cmd=None):
"""\
- list the next map in rotation
"""
if not self.aquireCmdLock2(cmd, client, 60, True):
client.message('^7Do not spam commands')
return
if self._mapRequested:
cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % self._mapRequested.title())
return
mapname = self.console.getNextMap()
if mapname:
cmd.sayLoudOrPM(client, '^7Next Map: ^2%s' % mapname)
else:
client.message('^1Error:^7 could not get map list')
def cmd_maprotate(self, data, client, cmd=None):
"""\
Cycle to next map in rotation
"""
if self._mapRequested:
self.confirmMap()
else:
self.console.rotateMap()
def cmd_veto(self, data, client, cmd=None):
"""\
Cancel a vote in progress
"""
if self._vote:
client.message('^3Vote canceled')
self.denyVote()
elif not self._vote:
client.message('^3No vote in progress')
| [((128, 13, 128, 68), 'threading.Timer', 'threading.Timer', ({(128, 30, 128, 48): 'self._votetime - 5', (128, 51, 128, 67): 'self.voteMessage'}, {}), '(self._votetime - 5, self.voteMessage)', False, 'import b3, threading\n'), ((134, 17, 134, 51), 'threading.Timer', 'threading.Timer', ({(134, 33, 134, 35): '10', (134, 37, 134, 50): 'self.denyVote'}, {}), '(10, self.denyVote)', False, 'import b3, threading\n')] |
bianan/cfl | utils.py | e09043d213c7330d5410e27ba90c943d4323dbe8 | # Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions for manipulating variables in Federated personalization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
TRAIN_NAME = "Train"
VALIDATION_NAME = "Validation"
TEST_NAME = "Test"
LOSS_NAME = "loss"
LOSS_SUMMARY_NAME = "perplexity"
# Vars type.
VARS_TYPE_ALL = "all"
VARS_TYPE_SHARED = "shared"
VARS_TYPE_PERSONAL = "personal"
def get_train_name_scope(var_scope):
return "/".join((var_scope, TRAIN_NAME))
def get_validation_name_scope(var_scope):
return "/".join((var_scope, VALIDATION_NAME))
def get_test_name_scope(var_scope):
return "/".join((var_scope, TEST_NAME))
def get_model_name_scope(var_scope):
return "/".join((var_scope, "Model"))
def get_update_name_scope(var_scope):
return "/".join((var_scope, "Update"))
def get_var_dict(vars_):
"""Gets a dict of var base_name (e.g. 'w') to the variable."""
var_dict = {}
for v in vars_:
var_base_name = get_base_name(v)
var_dict[var_base_name] = v
return var_dict
def get_var_value_ops(var_dict):
return {k: v.value() for k, v in var_dict.items()}
def get_base_name(var):
return var.name.split("/")[-1].split(":")[0]
def get_update_name(var, var_scope):
var_base_name = get_base_name(var)
var_update_name = "update_%s_%s" % (var_scope, var_base_name)
return var_update_name
def get_update_placeholder_name(var):
var_base_name = get_base_name(var)
placeholder_name = "placeholder_%s" % var_base_name
return placeholder_name
def generate_update_ops(vars_):
"""Generates update ops and placeholders.
For each var, it generates a placeholder to feed in the new values.
Then it takes the mean of the inputs along dimension 0.
Args:
vars_: Vars for which the update ops will be generated.
Returns:
update_ops: A list of update ops.
dict_update_placeholders: A dict of var base name to its update-placeholder.
"""
update_ops = []
dict_update_placeholders = {}
for v in vars_:
# For every var in the scope, add a placeholder to feed in the new values.
# The placeholder may need to hold multiple values, this happens
# when updating the server from many clients.
var_in_shape = [None] + v.shape.as_list()
var_in_name = get_update_placeholder_name(v)
var_in = tf.placeholder(v.dtype, shape=var_in_shape, name=var_in_name)
var_in_mean = tf.reduce_mean(var_in, 0)
update_op = v.assign(var_in_mean)
update_ops.append(update_op)
dict_update_placeholders[get_base_name(v)] = var_in
return update_ops, dict_update_placeholders
def print_vars_on_clients(clients, sess):
for c in clients.values():
print("client %d:" % c.id)
print(sess.run(c.read_ops_all_vars))
def add_prefix(prefix, name):
"""Adds prefix to name."""
return "/".join((prefix, name))
def add_suffix(suffix, name):
"""Adds subfix to name."""
return "/".join((name, suffix))
def get_attribute_dict(class_instance):
"""Gets a dict of attributeds of a class instance."""
# first start by grabbing the Class items
attribute_dict = dict((x, y)
for x, y in class_instance.__class__.__dict__.items()
if x[:2] != "__")
# then update the class items with the instance items
attribute_dict.update(class_instance.__dict__)
return attribute_dict
| [((105, 13, 105, 74), 'tensorflow.placeholder', 'tf.placeholder', (), '', True, 'import tensorflow as tf\n'), ((106, 18, 106, 43), 'tensorflow.reduce_mean', 'tf.reduce_mean', ({(106, 33, 106, 39): 'var_in', (106, 41, 106, 42): '0'}, {}), '(var_in, 0)', True, 'import tensorflow as tf\n')] |
mueller/mysql-shell | unittest/scripts/py_devapi/scripts/mysqlx_collection_remove.py | 29bafc5692bd536a12c4e41c54cb587375fe52cf | # Assumptions: validate_crud_functions available
# Assumes __uripwd is defined as <user>:<pwd>@<host>:<plugin_port>
from __future__ import print_function
from mysqlsh import mysqlx
mySession = mysqlx.get_session(__uripwd)
ensure_schema_does_not_exist(mySession, 'js_shell_test')
schema = mySession.create_schema('js_shell_test')
# Creates a test collection and inserts data into it
collection = schema.create_collection('collection1')
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA01", "name": 'jack', "age": 17, "gender": 'male'}).execute()
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA02", "name": 'adam', "age": 15, "gender": 'male'}).execute()
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA03", "name": 'brian', "age": 14, "gender": 'male'}).execute()
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA04", "name": 'alma', "age": 13, "gender": 'female'}).execute()
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA05", "name": 'carol', "age": 14, "gender": 'female'}).execute()
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA06", "name": 'donna', "age": 16, "gender": 'female'}).execute()
result = collection.add({"_id": "3C514FF38144B714E7119BCF48B4CA07", "name": 'angel', "age": 14, "gender": 'male'}).execute()
# ------------------------------------------------
# collection.remove Unit Testing: Dynamic Behavior
# ------------------------------------------------
#@ CollectionRemove: valid operations after remove
crud = collection.remove('some_condition')
validate_crud_functions(crud, ['sort', 'limit', 'bind', 'execute'])
#@ CollectionRemove: valid operations after sort
crud = crud.sort(['name'])
validate_crud_functions(crud, ['limit', 'bind', 'execute'])
#@ CollectionRemove: valid operations after limit
crud = crud.limit(1)
validate_crud_functions(crud, ['bind', 'execute'])
#@ CollectionRemove: valid operations after bind
crud = collection.remove('name = :data').bind('data', 'donna')
validate_crud_functions(crud, ['bind', 'execute'])
#@ CollectionRemove: valid operations after execute
result = crud.execute()
validate_crud_functions(crud, ['limit', 'bind', 'execute'])
#@ Reusing CRUD with binding
print('Deleted donna:', result.affected_items_count, '\n')
result=crud.bind('data', 'alma').execute()
print('Deleted alma:', result.affected_items_count, '\n')
# ----------------------------------------------
# collection.remove Unit Testing: Error Conditions
# ----------------------------------------------
#@# CollectionRemove: Error conditions on remove
crud = collection.remove()
crud = collection.remove(' ')
crud = collection.remove(5)
crud = collection.remove('test = "2')
#@# CollectionRemove: Error conditions sort
crud = collection.remove('some_condition').sort()
crud = collection.remove('some_condition').sort(5)
crud = collection.remove('some_condition').sort([])
crud = collection.remove('some_condition').sort(['name', 5])
crud = collection.remove('some_condition').sort('name', 5)
#@# CollectionRemove: Error conditions on limit
crud = collection.remove('some_condition').limit()
crud = collection.remove('some_condition').limit('')
#@# CollectionRemove: Error conditions on bind
crud = collection.remove('name = :data and age > :years').bind()
crud = collection.remove('name = :data and age > :years').bind(5, 5)
crud = collection.remove('name = :data and age > :years').bind('another', 5)
#@# CollectionRemove: Error conditions on execute
crud = collection.remove('name = :data and age > :years').execute()
crud = collection.remove('name = :data and age > :years').bind('years', 5).execute()
# ---------------------------------------
# collection.remove Unit Testing: Execution
# ---------------------------------------
#@ CollectionRemove: remove under condition
//! [CollectionRemove: remove under condition]
result = collection.remove('age = 15').execute()
print('Affected Rows:', result.affected_items_count, '\n')
docs = collection.find().execute().fetch_all()
print('Records Left:', len(docs), '\n')
//! [CollectionRemove: remove under condition]
#@ CollectionRemove: remove with binding
//! [CollectionRemove: remove with binding]
result = collection.remove('gender = :heorshe').limit(2).bind('heorshe', 'male').execute()
print('Affected Rows:', result.affected_items_count, '\n')
//! [CollectionRemove: remove with binding]
docs = collection.find().execute().fetch_all()
print('Records Left:', len(docs), '\n')
#@ CollectionRemove: full remove
//! [CollectionRemove: full remove]
result = collection.remove('1').execute()
print('Affected Rows:', result.affected_items_count, '\n')
docs = collection.find().execute().fetch_all()
print('Records Left:', len(docs), '\n')
//! [CollectionRemove: full remove]
# Cleanup
mySession.drop_schema('js_shell_test')
mySession.close()
| [] |
aditya270520/indian-flag | indian-flag.py | 65851eefdd229cca150d2bbe8fa61c9e06e120e0 | import turtle
turtle.bgcolor('black')
wn=turtle.Screen()
tr=turtle.Turtle()
move=1
tr.speed("fastest")
for i in range (360):
tr.write("ADITYA",'false','center',font=('Showcard gothic',50))
tr.penup()
tr.goto(-200,100)
tr.pendown()
tr.color("orange")
tr.right(move)
tr.forward(100)
tr.penup()
tr.color("white")
tr.pendown()
tr.right(30)
tr.forward(60)
tr.pendown()
tr.color("light green")
tr.left(10)
tr.forward(50)
tr.right(70)
tr.penup()
tr.pendown()
tr.color('light blue')
tr.forward(50)
tr.color('light green')
tr.pu()
tr.pd()
tr.color("light blue")
tr.forward(100)
tr.color('brown')
tr.forward(200)
tr.pu()
tr.pd()
tr.color('light green')
tr.circle(2)
tr.color('light blue')
tr.circle(4)
tr.pu()
tr.fd(20)
tr.pd()
tr.circle(6)
tr.pu()
tr.fd(40)
tr.pd()
tr.circle(8)
tr.pu()
tr.fd(80)
tr.pd()
tr.circle(10)
tr.pu()
tr.fd(120)
tr.pd()
tr.circle(20)
tr.color('yellow')
tr.circle(10)
tr.pu()
tr.pd()
tr.color('white')
tr.forward(150)
tr.color('red')
tr.fd(50)
tr.color ('blue')
tr.begin_fill()
tr.penup()
tr.home()
move=move+1
tr.penup()
tr.forward(50)
turtle.done() | [((2, 0, 2, 23), 'turtle.bgcolor', 'turtle.bgcolor', ({(2, 15, 2, 22): '"""black"""'}, {}), "('black')", False, 'import turtle\n'), ((3, 3, 3, 18), 'turtle.Screen', 'turtle.Screen', ({}, {}), '()', False, 'import turtle\n'), ((4, 3, 4, 18), 'turtle.Turtle', 'turtle.Turtle', ({}, {}), '()', False, 'import turtle\n'), ((75, 0, 75, 13), 'turtle.done', 'turtle.done', ({}, {}), '()', False, 'import turtle\n')] |
guiqiqi/leaf | leaf/rbac/model/__init__.py | 79e34f4b8fba8c6fd208b5a3049103dca2064ab5 | """用户, 组, 及相关认证数据库模型"""
from .group import Group
from .user import User
from .user import UserIndex
from .auth import Authentication
from .accesspoint import AccessPoint
| [] |
m-brito/Neps-Academy | programacao basica/7.py | 0d962fb921d74c5f97f10fcdd8a0f464c0ccdb14 | bino = int(input())
cino = int(input())
if (bino+cino)%2==0:
print("Bino")
else:
print("Cino")
| [] |
CalmScout/LeetCode | update_readme.py | 62720934b5906e6b255c7e91d3a6fa1d713e4391 | """
Script updates `README.md` with respect to files at ./easy and ./medium folders.
"""
import os
curr_dir = os.path.dirname(__file__)
with open(os.path.join(curr_dir, "README.md"), 'w') as readme:
readme.write("# LeetCode\nDeliberate practice in coding.\n")
langs = [l for l in os.listdir(curr_dir) if os.path.isdir(os.path.join(curr_dir, l)) and l[0] != '.']
for lang in langs:
readme.write("## {}\n".format(lang))
readme.write("### Easy\n")
easy = sorted(os.listdir(f"{curr_dir}/{lang}/easy"))
easy = [x.split("_")[0] for x in easy]
easy_solved = ""
for el in easy:
easy_solved += "{}, ".format(el)
readme.write(easy_solved[:-2] + "\n")
readme.write("### Medium\n")
medium = sorted(os.listdir(f"{curr_dir}/{lang}/medium"))
medium = [x.split("_")[0] for x in medium]
medium_solved = ""
for el in medium:
medium_solved += "{}, ".format(el)
readme.write(medium_solved[:-2] + '\n')
| [((6, 11, 6, 36), 'os.path.dirname', 'os.path.dirname', ({(6, 27, 6, 35): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((7, 10, 7, 45), 'os.path.join', 'os.path.join', ({(7, 23, 7, 31): 'curr_dir', (7, 33, 7, 44): '"""README.md"""'}, {}), "(curr_dir, 'README.md')", False, 'import os\n'), ((9, 24, 9, 44), 'os.listdir', 'os.listdir', ({(9, 35, 9, 43): 'curr_dir'}, {}), '(curr_dir)', False, 'import os\n'), ((13, 22, 13, 59), 'os.listdir', 'os.listdir', ({(13, 33, 13, 58): 'f"""{curr_dir}/{lang}/easy"""'}, {}), "(f'{curr_dir}/{lang}/easy')", False, 'import os\n'), ((20, 24, 20, 63), 'os.listdir', 'os.listdir', ({(20, 35, 20, 62): 'f"""{curr_dir}/{lang}/medium"""'}, {}), "(f'{curr_dir}/{lang}/medium')", False, 'import os\n'), ((9, 62, 9, 87), 'os.path.join', 'os.path.join', ({(9, 75, 9, 83): 'curr_dir', (9, 85, 9, 86): 'l'}, {}), '(curr_dir, l)', False, 'import os\n')] |
Jay-Iam/retriever | scripts/biotimesql.py | 26e321cdb86fcb4cb78184c4bf5c0c6902a97d2c | # -*- coding: utf-8 -*-
#retriever
import csv
from pkg_resources import parse_version
from retriever.lib.models import Table
from retriever.lib.templates import Script
try:
from retriever.lib.defaults import VERSION
try:
from retriever.lib.tools import open_fr, open_fw, open_csvw
except ImportError:
from retriever.lib.scripts import open_fr, open_fw
except ImportError:
from retriever import open_fr, open_fw, VERSION
class main(Script):
def __init__(self, **kwargs):
Script.__init__(self, **kwargs)
self.title = "Commercial Fisheries Monthly Trade Data by Product, Country/Association"
self.name = "biotimesql"
self.retriever_minimum_version = "2.2.0"
self.urls = {
"sql_file": "https://zenodo.org/record/2602708/files/BioTIMESQL02_04_2018.sql?download=1",
}
self.version = "1.0.1"
self.ref = "https://zenodo.org/record/1095628#.WskN7dPwYyn"
self.citation = "Dornelas M, Antão LH, Moyes F, et al. BioTIME: A database of biodiversity time series for the Anthropocene. Global Ecology & Biogeography. 2018; 00:1 - 26. https://doi.org/10.1111/geb.12729."
self.description = "The BioTIME database has species identities and abundances in ecological assemblages through time."
self.keywords = ["Time series", "Anthropocene", "Global"]
self.licenses = [{"name": "CC BY 4.0"}]
self.encoding = "latin1"
if parse_version(VERSION) <= parse_version("2.0.0"):
self.shortname = self.name
self.name = self.title
self.tags = self.keywords
def download(self, engine=None, debug=False):
Script.download(self, engine, debug)
engine = self.engine
original_sql_file = "BioTIMESQL02_04_2018.sql"
engine.download_file(self.urls["sql_file"], original_sql_file)
sql_data = open_fr(self.engine.format_filename(original_sql_file))
set_open = False
csv_writer = None
csv_file = None
table_name = None
NULL = None
for line in sql_data:
table_indicator = "-- Table structure for table "
if line.startswith(table_indicator):
st = line[len(table_indicator):].replace("`", "")
table_name = st.strip()
current_file_process = table_name
current_file_open = current_file_process
if set_open and not current_file_process == current_file_open:
csv_file.close()
set_open = False
else:
out_file = "{name}.csv".format(name=table_name)
csv_file = open_fw(engine.format_filename(out_file))
csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
set_open = True
if line.startswith("INSERT INTO `{table_name}`".format(table_name=table_name)):
row_val = line[line.index("VALUES (") + 8:-3]
table_rows = row_val.replace("\r\n","").split("),(")
for i_row in table_rows:
v = eval('[' + str(i_row) + ']')
csv_writer.writerows([v])
if csv_file:
csv_file.close()
# Create abundance table
table = Table("ID_ABUNDANCE", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_ABUNDANCE", ("int",)),
("ABUNDANCE_TYPE", ("char", "100")),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("abundance.csv"))
# Create allrawdata table
table = Table("allrawdata", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_ALL_RAW_DATA", ("int",)),
("ABUNDANCE", ("double",)),
("BIOMASS", ("double",)),
("ID_SPECIES", ("int",)),
("SAMPLE_DESC", ("char", 200)),
("PLOT", ("char", 150)),
("LATITUDE", ("double",)),
("LONGITUDE", ("double",)),
("DEPTH", ("double",)),
("DAY", ("int",)),
("MONTH", ("int",)),
("YEAR", ("int",)),
("STUDY_ID", ("int",)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("allrawdata.csv"))
# Create biomass table
table = Table("biomass", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [("ID_BIOMASS", ("int",)), ("BIOMASS_TYPE", ("char", "100"))]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("biomass.csv"))
# Create citation1 table
table = Table("citation1", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_CITATION1", ("int",)),
("STUDY_ID", ("int",)),
("CITATION_LINE", ("char",)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("citation1.csv"))
# Create contacts table
table = Table("contacts", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_CONTACTS", ("int",)),
("STUDY_ID", ("int",)),
("CONTACT_1", ("char", 500)),
("CONTACT_2", ("char", 500)),
("CONT_1_MAIL", ("char", 60)),
("CONT_2_MAIL", ("char", 60)),
("LICENSE", ("char", 200)),
("WEB_LINK", ("char", 200)),
("DATA_SOURCE", ("char", 250)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("contacts.csv"))
# Create countries table
table = Table("countries", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [("COUNT_ID", ("int",)), ("COUNTRY_NAME", ("char", 200))]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("countries.csv"))
# Create curation table
table = Table("curation", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_CURATION", ("int",)),
("STUDY_ID", ("int",)),
("LINK_ID", ("int",)),
("COMMENTS", ("char",)),
("DATE_STUDY_ADDED", ("char", 50)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("curation.csv"))
# Create datasets table
table = Table("datasets", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_DATASETS", ("int",)),
("STUDY_ID", ("int",)),
("TAXA", ("char", 50)),
("ORGANISMS", ("char", 200)),
("TITLE", ("char",800)),
("AB_BIO", ("char", 2)),
("HAS_PLOT", ("char", 10)),
("DATA_POINTS", ("char",)),
("START_YEAR", ("char",)),
("END_YEAR", ("char",)),
("CENT_LAT", ("double",)),
("CENT_LONG", ("double",)),
("NUMBER_OF_SPECIES", ("char",)),
("NUMBER_OF_SAMPLES", ("char",)),
("NUMBER_LAT_LONG", ("char",)),
("TOTAL", ("char",)),
("GRAIN_SIZE_TEXT", ("char",)),
("GRAIN_SQ_KM", ("double",)),
("AREA_SQ_KM", ("double",)),
("AB_TYPE", ("char", )),
("BIO_TYPE", ("char",)),
("SAMPLE_TYPE", ("char",)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("datasets.csv"))
# Create downloads table
table = Table("downloads", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("D_ID", ("int",)),
("STUDY", ("char", 25)),
("NAME", ("char", 150)),
("EMAIL", ("char", 150)),
("COUNTRY", ("char", 200)),
("ROLE", ("char", 150)),
("PURPOSE", ("char", 500)),
("LOCATION", ("char", 250)),
("DATE_STAMP", ("char",)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("downloads.csv"))
# Create methods table
table = Table("methods", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_METHODS", ("int",)),
("STUDY_ID", ("int",)),
("METHODS", ("char",)),
("SUMMARY_METHODS", ("char", 500)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("methods.csv"))
# Create sample table
table = Table("sample", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_SAMPLE", ("int",)),
("ID_TREAT", ("int",)),
("SAMPLE_DESC_NAME", ("char", 200)),
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("sample.csv"))
# Create site table
table = Table("site", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_SITE", ("int",)),
("STUDY_ID", ("int",)),
("REALM", ("char", 11)),
("CLIMATE", ("char", 20)),
("GENERAL_TREAT", ("char", 200)),
("TREATMENT", ("char", 200)),
("TREAT_COMMENTS", ("char", 250)),
("TREAT_DATE", ("char", 100)),
("CEN_LATITUDE", ("double",)),
("CEN_LONGITUDE", ("double",)),
("HABITAT", ("char", 100)),
("PROTECTED_AREA", ("char", 50)),
("AREA", ("double",)),
("BIOME_MAP", ("char", 500))
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("site.csv"))
# Create species table
table = Table("species", delimiter=",", header_rows=0, contains_pk=False)
table.columns = [
("ID_SPECIES", ("int",)),
("GENUS", ("char", 100)),
("SPECIES", ("char", 100)),
("GENUS_SPECIES", ("char", 100))
]
engine.table = table
engine.create_table()
engine.insert_data_from_file(engine.format_filename("species.csv"))
SCRIPT = main()
| [((23, 8, 23, 39), 'retriever.lib.templates.Script.__init__', 'Script.__init__', ({(23, 24, 23, 28): 'self'}, {}), '(self, **kwargs)', False, 'from retriever.lib.templates import Script\n'), ((44, 8, 44, 44), 'retriever.lib.templates.Script.download', 'Script.download', ({(44, 24, 44, 28): 'self', (44, 30, 44, 36): 'engine', (44, 38, 44, 43): 'debug'}, {}), '(self, engine, debug)', False, 'from retriever.lib.templates import Script\n'), ((81, 16, 81, 86), 'retriever.lib.models.Table', 'Table', (), '', False, 'from retriever.lib.models import Table\n'), ((91, 16, 91, 84), 'retriever.lib.models.Table', 'Table', (), '', False, 'from retriever.lib.models import Table\n'), ((112, 16, 112, 81), 'retriever.lib.models.Table', 'Table', (), '', False, 'from retriever.lib.models import Table\n'), ((119, 16, 119, 83), 'retriever.lib.models.Table', 'Table', (), '', False, 'from retriever.lib.models import Table\n'), ((130, 16, 130, 82), 'retriever.lib.models.Table', 'Table', (), '', False, 'from retriever.lib.models import Table\n'), ((147, 16, 147, 83), 'retriever.lib.models.Table', 'Table', (), '', False, 'from retriever.lib.models import Table\n'), ((154, 16, 154, 82), 'retriever.lib.models.Table', 'Table', (), '', False, 'from retriever.lib.models import Table\n'), ((167, 16, 167, 82), 'retriever.lib.models.Table', 'Table', (), '', False, 'from retriever.lib.models import Table\n'), ((197, 16, 197, 83), 'retriever.lib.models.Table', 'Table', (), '', False, 'from retriever.lib.models import Table\n'), ((214, 16, 214, 81), 'retriever.lib.models.Table', 'Table', (), '', False, 'from retriever.lib.models import Table\n'), ((226, 16, 226, 80), 'retriever.lib.models.Table', 'Table', (), '', False, 'from retriever.lib.models import Table\n'), ((237, 16, 237, 78), 'retriever.lib.models.Table', 'Table', (), '', False, 'from retriever.lib.models import Table\n'), ((259, 16, 259, 81), 'retriever.lib.models.Table', 'Table', (), '', False, 'from retriever.lib.models import Table\n'), ((38, 11, 38, 33), 'pkg_resources.parse_version', 'parse_version', ({(38, 25, 38, 32): 'VERSION'}, {}), '(VERSION)', False, 'from pkg_resources import parse_version\n'), ((38, 37, 38, 59), 'pkg_resources.parse_version', 'parse_version', ({(38, 51, 38, 58): '"""2.0.0"""'}, {}), "('2.0.0')", False, 'from pkg_resources import parse_version\n'), ((68, 33, 68, 76), 'csv.writer', 'csv.writer', (), '', False, 'import csv\n')] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.