file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
lc804-unique-morse-code-words.py | # coding=utf-8
import unittest
"""804. Unique Morse Code Words
https://leetcode.com/problems/unique-morse-code-words/description/
International Morse Code defines a standard encoding where each letter is
mapped to a series of dots and dashes, as follows: `"a"` maps to `".-"`, `"b"`
maps to `"-..."`, `"c"` maps to `"-.-."`, and so on.
For convenience, the full table for the 26 letters of the English alphabet is
given below:
[".-","-...","-.-.","-..",".","..-.","--.","....","..",".---","-.-",".-..","--","-.","---",".--.","--.-",".-.","...","-","..-","...-",".--","-..-","-.--","--.."]
Now, given a list of words, each word can be written as a concatenation of the
Morse code of each letter. For example, "cab" can be written as "-.-.-....-",
(which is the concatenation "-.-." \+ "-..." \+ ".-"). We'll call such a
concatenation, the transformation of a word.
Return the number of different transformations among all words we have.
**Example:**
**Input:** words = ["gin", "zen", "gig", "msg"]
**Output:** 2
**Explanation:**
The transformation of each word is:
"gin" -> "--...-."
"zen" -> "--...-."
"gig" -> "--...--."
"msg" -> "--...--."
There are 2 different transformations, "--...-." and "--...--.".
**Note:**
* The length of `words` will be at most `100`.
* Each `words[i]` will have length in range `[1, 12]`.
* `words[i]` will only consist of lowercase letters.
Similar Questions:
"""
class Solution(object):
def | (self, words):
"""
:type words: List[str]
:rtype: int
"""
self.CODE = [
".-", "-...", "-.-.", "-..", ".",
"..-.", "--.", "....", "..", ".---",
"-.-", ".-..", "--", "-.", "---",
".--.", "--.-", ".-.", "...", "-",
"..-", "...-", ".--", "-..-", "-.--", "--.."]
cache = {self._trans(i) for i in words}
return len(cache)
def _trans(self, w):
return ''.join(self.CODE[ord(i) - ord('a')] for i in w)
class T(unittest.TestCase):
def test(self):
s = Solution()
self.assertEqual(s.uniqueMorseRepresentations(["gin", "zen", "gig", "msg"]), 2)
if __name__ == "__main__":
unittest.main()
| uniqueMorseRepresentations | identifier_name |
lc804-unique-morse-code-words.py | # coding=utf-8
import unittest
"""804. Unique Morse Code Words
https://leetcode.com/problems/unique-morse-code-words/description/
International Morse Code defines a standard encoding where each letter is
mapped to a series of dots and dashes, as follows: `"a"` maps to `".-"`, `"b"`
maps to `"-..."`, `"c"` maps to `"-.-."`, and so on.
For convenience, the full table for the 26 letters of the English alphabet is
given below:
[".-","-...","-.-.","-..",".","..-.","--.","....","..",".---","-.-",".-..","--","-.","---",".--.","--.-",".-.","...","-","..-","...-",".--","-..-","-.--","--.."]
Now, given a list of words, each word can be written as a concatenation of the
Morse code of each letter. For example, "cab" can be written as "-.-.-....-",
(which is the concatenation "-.-." \+ "-..." \+ ".-"). We'll call such a
concatenation, the transformation of a word.
Return the number of different transformations among all words we have.
**Example:**
**Input:** words = ["gin", "zen", "gig", "msg"]
**Output:** 2
**Explanation:**
The transformation of each word is:
"gin" -> "--...-."
"zen" -> "--...-."
"gig" -> "--...--."
"msg" -> "--...--."
There are 2 different transformations, "--...-." and "--...--.".
**Note:**
* The length of `words` will be at most `100`.
* Each `words[i]` will have length in range `[1, 12]`.
* `words[i]` will only consist of lowercase letters.
Similar Questions:
"""
class Solution(object):
def uniqueMorseRepresentations(self, words):
"""
:type words: List[str]
:rtype: int
"""
self.CODE = [
".-", "-...", "-.-.", "-..", ".",
"..-.", "--.", "....", "..", ".---",
"-.-", ".-..", "--", "-.", "---",
".--.", "--.-", ".-.", "...", "-",
"..-", "...-", ".--", "-..-", "-.--", "--.."]
cache = {self._trans(i) for i in words}
return len(cache)
def _trans(self, w):
return ''.join(self.CODE[ord(i) - ord('a')] for i in w)
class T(unittest.TestCase):
def test(self):
s = Solution()
self.assertEqual(s.uniqueMorseRepresentations(["gin", "zen", "gig", "msg"]), 2)
if __name__ == "__main__":
| unittest.main() | conditional_block |
|
SourcePositionTest.py | import unittest
from os.path import relpath
from coalib.results.SourcePosition import SourcePosition
from coala_utils.ContextManagers import prepare_file
class SourcePositionTest(unittest.TestCase):
def test_initialization(self):
with self.assertRaises(TypeError):
SourcePosition(None, 0)
with self.assertRaises(ValueError):
SourcePosition('file', None, 1)
# However these should work:
SourcePosition('file', None, None)
SourcePosition('file', 4, None)
SourcePosition('file', 4, 5)
def test_string_conversion(self):
|
def test_json(self):
with prepare_file([''], None) as (_, filename):
uut = SourcePosition(filename, 1)
self.assertEqual(uut.__json__(use_relpath=True)
['file'], relpath(filename))
def assert_equal(self, first, second):
self.assertGreaterEqual(first, second)
self.assertEqual(first, second)
self.assertLessEqual(first, second)
def assert_ordering(self, greater, lesser):
self.assertGreater(greater, lesser)
self.assertGreaterEqual(greater, lesser)
self.assertNotEqual(greater, lesser)
self.assertLessEqual(lesser, greater)
self.assertLess(lesser, greater)
| uut = SourcePosition('filename', 1)
self.assertRegex(
repr(uut),
"<SourcePosition object\\(file='.*filename', line=1, "
'column=None\\) at 0x[0-9a-fA-F]+>')
self.assertEqual(str(uut), 'filename:1')
uut = SourcePosition('None', None)
self.assertRegex(
repr(uut),
"<SourcePosition object\\(file='.*None', line=None, column=None\\) "
'at 0x[0-9a-fA-F]+>')
self.assertEqual(str(uut), 'None')
uut = SourcePosition('filename', 3, 2)
self.assertEqual(str(uut), 'filename:3:2') | identifier_body |
SourcePositionTest.py | import unittest
from os.path import relpath
from coalib.results.SourcePosition import SourcePosition
from coala_utils.ContextManagers import prepare_file
class SourcePositionTest(unittest.TestCase):
def test_initialization(self):
with self.assertRaises(TypeError):
SourcePosition(None, 0)
with self.assertRaises(ValueError):
SourcePosition('file', None, 1)
# However these should work:
SourcePosition('file', None, None)
SourcePosition('file', 4, None)
SourcePosition('file', 4, 5)
def test_string_conversion(self):
uut = SourcePosition('filename', 1)
self.assertRegex(
repr(uut),
"<SourcePosition object\\(file='.*filename', line=1, "
'column=None\\) at 0x[0-9a-fA-F]+>')
self.assertEqual(str(uut), 'filename:1')
uut = SourcePosition('None', None)
self.assertRegex(
repr(uut),
"<SourcePosition object\\(file='.*None', line=None, column=None\\) "
'at 0x[0-9a-fA-F]+>')
self.assertEqual(str(uut), 'None')
uut = SourcePosition('filename', 3, 2)
self.assertEqual(str(uut), 'filename:3:2')
def test_json(self):
with prepare_file([''], None) as (_, filename):
uut = SourcePosition(filename, 1)
self.assertEqual(uut.__json__(use_relpath=True)
['file'], relpath(filename))
def assert_equal(self, first, second):
self.assertGreaterEqual(first, second)
self.assertEqual(first, second)
self.assertLessEqual(first, second)
def | (self, greater, lesser):
self.assertGreater(greater, lesser)
self.assertGreaterEqual(greater, lesser)
self.assertNotEqual(greater, lesser)
self.assertLessEqual(lesser, greater)
self.assertLess(lesser, greater)
| assert_ordering | identifier_name |
SourcePositionTest.py | import unittest
from os.path import relpath
from coalib.results.SourcePosition import SourcePosition
from coala_utils.ContextManagers import prepare_file
class SourcePositionTest(unittest.TestCase):
def test_initialization(self):
with self.assertRaises(TypeError): | SourcePosition(None, 0)
with self.assertRaises(ValueError):
SourcePosition('file', None, 1)
# However these should work:
SourcePosition('file', None, None)
SourcePosition('file', 4, None)
SourcePosition('file', 4, 5)
def test_string_conversion(self):
uut = SourcePosition('filename', 1)
self.assertRegex(
repr(uut),
"<SourcePosition object\\(file='.*filename', line=1, "
'column=None\\) at 0x[0-9a-fA-F]+>')
self.assertEqual(str(uut), 'filename:1')
uut = SourcePosition('None', None)
self.assertRegex(
repr(uut),
"<SourcePosition object\\(file='.*None', line=None, column=None\\) "
'at 0x[0-9a-fA-F]+>')
self.assertEqual(str(uut), 'None')
uut = SourcePosition('filename', 3, 2)
self.assertEqual(str(uut), 'filename:3:2')
def test_json(self):
with prepare_file([''], None) as (_, filename):
uut = SourcePosition(filename, 1)
self.assertEqual(uut.__json__(use_relpath=True)
['file'], relpath(filename))
def assert_equal(self, first, second):
self.assertGreaterEqual(first, second)
self.assertEqual(first, second)
self.assertLessEqual(first, second)
def assert_ordering(self, greater, lesser):
self.assertGreater(greater, lesser)
self.assertGreaterEqual(greater, lesser)
self.assertNotEqual(greater, lesser)
self.assertLessEqual(lesser, greater)
self.assertLess(lesser, greater) | random_line_split |
|
uk.js | (function($) {
$.Redactor.opts.langs['uk'] = {
html: 'Код',
video: 'Відео',
image: 'Зображення',
table: 'Таблиця',
link: 'Посилання',
link_insert: 'Вставити посилання ...',
link_edit: 'Edit link',
unlink: 'Видалити посилання',
formatting: 'Стилі',
paragraph: 'Звичайний текст',
quote: 'Цитата',
code: 'Код',
header1: 'Заголовок 1',
header2: 'Заголовок 2',
header3: 'Заголовок 3',
header4: 'Заголовок 4',
header5: 'Заголовок 5',
bold: 'Жирний',
italic: 'Похилий',
fontcolor: 'Колір тексту',
backcolor: 'Заливка тексту',
unorderedlist: 'Звичайний список',
orderedlist: 'Нумерований список',
outdent: 'Зменшити відступ',
indent: 'Збільшити відступ',
cancel: 'Скасувати',
insert: 'Вставити',
save: 'Зберегти',
_delete: 'Видалити',
insert_table: 'Вставити таблицю',
insert_row_above: 'Додати рядок зверху',
insert_row_below: 'Додати рядок знизу',
insert_column_left: 'Додати стовпець ліворуч',
insert_column_right: 'Додати стовпець праворуч',
delete_column: 'Видалити стовпець',
delete_row: 'Видалити рядок',
delete_table: 'Видалити таблицю',
rows: 'Рядки',
columns: 'Стовпці',
add_head: 'Додати заголовок',
delete_head: 'Видалити заголовок',
title: 'Підказка',
image_view: 'Завантажити зображення',
image_position: 'Обтікання текстом',
none: 'ні',
left: 'ліворуч',
right: 'праворуч',
image_web_link: 'Посилання на зображення',
text: 'Текст',
mailto: 'Ел. пошта',
web: 'URL',
video_html_code: 'Код відео ролика',
file: 'Файл',
upload: 'Завантажити',
download: 'Завантажити',
choose: 'Вибрати',
or_choose: 'Або виберіть',
drop_file_here: 'Перетягніть файл сюди',
align_left: 'По лівому краю',
align_center: 'По центру',
align_right: 'По правому краю',
align_justify: 'Вирівняти текст по ширині',
horizontalrule: 'Горизонтальная лінійка',
fullscreen: 'На весь екран', | alignment: 'Alignment',
filename: 'Name (optional)',
edit: 'Edit',
center: 'Center'
};
})(jQuery); | deleted: 'Закреслений',
anchor: 'Anchor',
link_new_tab: 'Open link in new tab',
underline: 'Underline', | random_line_split |
test_jenkins.py | # stdlib
from collections import defaultdict
import datetime
import logging
import os
import shutil
import tempfile
# 3p
import xml.etree.ElementTree as ET
# project
from tests.checks.common import AgentCheckTest
logger = logging.getLogger(__file__)
DATETIME_FORMAT = '%Y-%m-%d_%H-%M-%S'
LOG_DATA = 'Finished: SUCCESS'
SUCCESSFUL_BUILD = {'number': '99', 'result': 'SUCCESS', 'duration': '60'}
NO_RESULTS_YET = {'number': '99', 'duration': '60'}
UNSUCCESSFUL_BUILD = {'number': '99', 'result': 'ABORTED', 'duration': '60'}
CONFIG = """
init_config:
instances:
- name: default
jenkins_home: <JENKINS_HOME>
"""
def dict_to_xml(metadata_dict):
""" Convert a dict to xml for use in a build.xml file """
build = ET.Element('build')
for k, v in metadata_dict.iteritems():
node = ET.SubElement(build, k)
node.text = v
return ET.tostring(build)
def write_file(file_name, log_data):
with open(file_name, 'w') as log_file:
log_file.write(log_data)
class TestJenkins(AgentCheckTest):
CHECK_NAME = 'jenkins'
def setUp(self):
super(TestJenkins, self).setUp()
self.tmp_dir = tempfile.mkdtemp()
self.config = {
'init_config': {},
'instances': [{
'name': 'default',
'jenkins_home': self.tmp_dir
}]
}
self.instance = self.config['instances'][0]
self.config_yaml = CONFIG.replace('<JENKINS_HOME>', self.tmp_dir)
self._create_old_build()
def tearDown(self):
super(TestJenkins, self).tearDown()
# Clean up the temp directory
shutil.rmtree(self.tmp_dir)
def _create_old_build(self):
# As coded, the jenkins dd agent needs more than one result
# in order to get the last valid build.
# Create one for yesterday.
metadata = dict_to_xml(SUCCESSFUL_BUILD)
yesterday = datetime.date.today() - datetime.timedelta(days=1)
self._populate_build_dir(metadata, yesterday)
def _create_check(self):
# Create the jenkins check
self.load_check(self.config)
def _populate_build_dir(self, metadata, time=None):
# The jenkins dd agent requires the build metadata file and a log file of results
time = time or datetime.datetime.now()
datestring = time.strftime(DATETIME_FORMAT)
build_dir = os.path.join(self.tmp_dir, 'jobs', 'foo', 'builds', datestring)
os.makedirs(build_dir)
log_file = os.path.join(build_dir, 'log')
log_data = LOG_DATA
write_file(log_file, log_data)
metadata_file = os.path.join(build_dir, 'build.xml')
build_metadata = metadata
write_file(metadata_file, build_metadata)
def testParseBuildLog(self):
"""
Test doing a jenkins check. This will parse the logs but since there was no
previous high watermark no event will be created.
"""
metadata = dict_to_xml(SUCCESSFUL_BUILD)
self._populate_build_dir(metadata)
self._create_check()
self.run_check(self.config)
# The check method does not return anything, so this testcase passes
# if the high_watermark was set and no exceptions were raised.
self.assertTrue(self.check.high_watermarks[self.instance['name']]['foo'] > 0)
def testCheckSuccessfulEvent(self):
"""
Test that a successful build will create the correct metrics.
"""
metadata = dict_to_xml(SUCCESSFUL_BUILD)
self._populate_build_dir(metadata)
self._create_check()
# Set the high_water mark so that the next check will create events
self.check.high_watermarks['default'] = defaultdict(lambda: 0)
self.run_check(self.config)
metrics_names = [m[0] for m in self.metrics]
assert len(metrics_names) == 2
assert 'jenkins.job.success' in metrics_names
assert 'jenkins.job.duration' in metrics_names
metrics_tags = [m[3] for m in self.metrics]
for tag in metrics_tags:
assert 'job_name:foo' in tag.get('tags')
assert 'result:SUCCESS' in tag.get('tags')
assert 'build_number:99' in tag.get('tags')
def | (self):
"""
Test that an unsuccessful build will create the correct metrics.
"""
metadata = dict_to_xml(UNSUCCESSFUL_BUILD)
self._populate_build_dir(metadata)
self._create_check()
# Set the high_water mark so that the next check will create events
self.check.high_watermarks['default'] = defaultdict(lambda: 0)
self.run_check(self.config)
metrics_names = [m[0] for m in self.metrics]
assert len(metrics_names) == 2
assert 'jenkins.job.failure' in metrics_names
assert 'jenkins.job.duration' in metrics_names
metrics_tags = [m[3] for m in self.metrics]
for tag in metrics_tags:
assert 'job_name:foo' in tag.get('tags')
assert 'result:ABORTED' in tag.get('tags')
assert 'build_number:99' in tag.get('tags')
def testCheckWithRunningBuild(self):
"""
Test under the conditions of a jenkins build still running.
The build.xml file will exist but it will not yet have a result.
"""
metadata = dict_to_xml(NO_RESULTS_YET)
self._populate_build_dir(metadata)
self._create_check()
# Set the high_water mark so that the next check will create events
self.check.high_watermarks['default'] = defaultdict(lambda: 0)
self.run_check(self.config)
# The check method does not return anything, so this testcase passes
# if the high_watermark was NOT updated and no exceptions were raised.
assert self.check.high_watermarks[self.instance['name']]['foo'] == 0
| testCheckUnsuccessfulEvent | identifier_name |
test_jenkins.py | # stdlib
from collections import defaultdict
import datetime
import logging
import os
import shutil
import tempfile
# 3p
import xml.etree.ElementTree as ET
# project
from tests.checks.common import AgentCheckTest
logger = logging.getLogger(__file__)
DATETIME_FORMAT = '%Y-%m-%d_%H-%M-%S'
LOG_DATA = 'Finished: SUCCESS'
SUCCESSFUL_BUILD = {'number': '99', 'result': 'SUCCESS', 'duration': '60'}
NO_RESULTS_YET = {'number': '99', 'duration': '60'}
UNSUCCESSFUL_BUILD = {'number': '99', 'result': 'ABORTED', 'duration': '60'}
CONFIG = """
init_config:
instances:
- name: default
jenkins_home: <JENKINS_HOME>
"""
def dict_to_xml(metadata_dict):
|
def write_file(file_name, log_data):
with open(file_name, 'w') as log_file:
log_file.write(log_data)
class TestJenkins(AgentCheckTest):
CHECK_NAME = 'jenkins'
def setUp(self):
super(TestJenkins, self).setUp()
self.tmp_dir = tempfile.mkdtemp()
self.config = {
'init_config': {},
'instances': [{
'name': 'default',
'jenkins_home': self.tmp_dir
}]
}
self.instance = self.config['instances'][0]
self.config_yaml = CONFIG.replace('<JENKINS_HOME>', self.tmp_dir)
self._create_old_build()
def tearDown(self):
super(TestJenkins, self).tearDown()
# Clean up the temp directory
shutil.rmtree(self.tmp_dir)
def _create_old_build(self):
# As coded, the jenkins dd agent needs more than one result
# in order to get the last valid build.
# Create one for yesterday.
metadata = dict_to_xml(SUCCESSFUL_BUILD)
yesterday = datetime.date.today() - datetime.timedelta(days=1)
self._populate_build_dir(metadata, yesterday)
def _create_check(self):
# Create the jenkins check
self.load_check(self.config)
def _populate_build_dir(self, metadata, time=None):
# The jenkins dd agent requires the build metadata file and a log file of results
time = time or datetime.datetime.now()
datestring = time.strftime(DATETIME_FORMAT)
build_dir = os.path.join(self.tmp_dir, 'jobs', 'foo', 'builds', datestring)
os.makedirs(build_dir)
log_file = os.path.join(build_dir, 'log')
log_data = LOG_DATA
write_file(log_file, log_data)
metadata_file = os.path.join(build_dir, 'build.xml')
build_metadata = metadata
write_file(metadata_file, build_metadata)
def testParseBuildLog(self):
"""
Test doing a jenkins check. This will parse the logs but since there was no
previous high watermark no event will be created.
"""
metadata = dict_to_xml(SUCCESSFUL_BUILD)
self._populate_build_dir(metadata)
self._create_check()
self.run_check(self.config)
# The check method does not return anything, so this testcase passes
# if the high_watermark was set and no exceptions were raised.
self.assertTrue(self.check.high_watermarks[self.instance['name']]['foo'] > 0)
def testCheckSuccessfulEvent(self):
"""
Test that a successful build will create the correct metrics.
"""
metadata = dict_to_xml(SUCCESSFUL_BUILD)
self._populate_build_dir(metadata)
self._create_check()
# Set the high_water mark so that the next check will create events
self.check.high_watermarks['default'] = defaultdict(lambda: 0)
self.run_check(self.config)
metrics_names = [m[0] for m in self.metrics]
assert len(metrics_names) == 2
assert 'jenkins.job.success' in metrics_names
assert 'jenkins.job.duration' in metrics_names
metrics_tags = [m[3] for m in self.metrics]
for tag in metrics_tags:
assert 'job_name:foo' in tag.get('tags')
assert 'result:SUCCESS' in tag.get('tags')
assert 'build_number:99' in tag.get('tags')
def testCheckUnsuccessfulEvent(self):
"""
Test that an unsuccessful build will create the correct metrics.
"""
metadata = dict_to_xml(UNSUCCESSFUL_BUILD)
self._populate_build_dir(metadata)
self._create_check()
# Set the high_water mark so that the next check will create events
self.check.high_watermarks['default'] = defaultdict(lambda: 0)
self.run_check(self.config)
metrics_names = [m[0] for m in self.metrics]
assert len(metrics_names) == 2
assert 'jenkins.job.failure' in metrics_names
assert 'jenkins.job.duration' in metrics_names
metrics_tags = [m[3] for m in self.metrics]
for tag in metrics_tags:
assert 'job_name:foo' in tag.get('tags')
assert 'result:ABORTED' in tag.get('tags')
assert 'build_number:99' in tag.get('tags')
def testCheckWithRunningBuild(self):
"""
Test under the conditions of a jenkins build still running.
The build.xml file will exist but it will not yet have a result.
"""
metadata = dict_to_xml(NO_RESULTS_YET)
self._populate_build_dir(metadata)
self._create_check()
# Set the high_water mark so that the next check will create events
self.check.high_watermarks['default'] = defaultdict(lambda: 0)
self.run_check(self.config)
# The check method does not return anything, so this testcase passes
# if the high_watermark was NOT updated and no exceptions were raised.
assert self.check.high_watermarks[self.instance['name']]['foo'] == 0
| """ Convert a dict to xml for use in a build.xml file """
build = ET.Element('build')
for k, v in metadata_dict.iteritems():
node = ET.SubElement(build, k)
node.text = v
return ET.tostring(build) | identifier_body |
test_jenkins.py | # stdlib
from collections import defaultdict
import datetime
import logging
import os
import shutil
import tempfile
# 3p
import xml.etree.ElementTree as ET
# project
from tests.checks.common import AgentCheckTest
logger = logging.getLogger(__file__)
DATETIME_FORMAT = '%Y-%m-%d_%H-%M-%S'
LOG_DATA = 'Finished: SUCCESS'
SUCCESSFUL_BUILD = {'number': '99', 'result': 'SUCCESS', 'duration': '60'}
NO_RESULTS_YET = {'number': '99', 'duration': '60'}
UNSUCCESSFUL_BUILD = {'number': '99', 'result': 'ABORTED', 'duration': '60'}
CONFIG = """
init_config:
instances:
- name: default
jenkins_home: <JENKINS_HOME>
"""
def dict_to_xml(metadata_dict):
""" Convert a dict to xml for use in a build.xml file """
build = ET.Element('build')
for k, v in metadata_dict.iteritems():
node = ET.SubElement(build, k)
node.text = v
return ET.tostring(build)
def write_file(file_name, log_data):
with open(file_name, 'w') as log_file:
log_file.write(log_data)
class TestJenkins(AgentCheckTest):
CHECK_NAME = 'jenkins'
def setUp(self):
super(TestJenkins, self).setUp()
self.tmp_dir = tempfile.mkdtemp()
self.config = {
'init_config': {},
'instances': [{
'name': 'default',
'jenkins_home': self.tmp_dir
}]
}
self.instance = self.config['instances'][0]
self.config_yaml = CONFIG.replace('<JENKINS_HOME>', self.tmp_dir)
self._create_old_build()
def tearDown(self):
super(TestJenkins, self).tearDown()
# Clean up the temp directory
shutil.rmtree(self.tmp_dir)
def _create_old_build(self):
# As coded, the jenkins dd agent needs more than one result
# in order to get the last valid build.
# Create one for yesterday.
metadata = dict_to_xml(SUCCESSFUL_BUILD)
yesterday = datetime.date.today() - datetime.timedelta(days=1)
self._populate_build_dir(metadata, yesterday)
def _create_check(self):
# Create the jenkins check
self.load_check(self.config)
def _populate_build_dir(self, metadata, time=None):
# The jenkins dd agent requires the build metadata file and a log file of results
time = time or datetime.datetime.now()
datestring = time.strftime(DATETIME_FORMAT)
build_dir = os.path.join(self.tmp_dir, 'jobs', 'foo', 'builds', datestring)
os.makedirs(build_dir)
log_file = os.path.join(build_dir, 'log')
log_data = LOG_DATA
write_file(log_file, log_data)
metadata_file = os.path.join(build_dir, 'build.xml')
build_metadata = metadata
write_file(metadata_file, build_metadata)
def testParseBuildLog(self):
"""
Test doing a jenkins check. This will parse the logs but since there was no
previous high watermark no event will be created.
"""
metadata = dict_to_xml(SUCCESSFUL_BUILD)
self._populate_build_dir(metadata)
self._create_check()
self.run_check(self.config)
# The check method does not return anything, so this testcase passes
# if the high_watermark was set and no exceptions were raised.
self.assertTrue(self.check.high_watermarks[self.instance['name']]['foo'] > 0)
def testCheckSuccessfulEvent(self):
"""
Test that a successful build will create the correct metrics.
"""
metadata = dict_to_xml(SUCCESSFUL_BUILD)
self._populate_build_dir(metadata)
self._create_check()
# Set the high_water mark so that the next check will create events
self.check.high_watermarks['default'] = defaultdict(lambda: 0)
self.run_check(self.config)
metrics_names = [m[0] for m in self.metrics]
assert len(metrics_names) == 2
assert 'jenkins.job.success' in metrics_names
assert 'jenkins.job.duration' in metrics_names
metrics_tags = [m[3] for m in self.metrics]
for tag in metrics_tags:
assert 'job_name:foo' in tag.get('tags')
assert 'result:SUCCESS' in tag.get('tags')
assert 'build_number:99' in tag.get('tags')
def testCheckUnsuccessfulEvent(self):
"""
Test that an unsuccessful build will create the correct metrics.
"""
metadata = dict_to_xml(UNSUCCESSFUL_BUILD)
self._populate_build_dir(metadata)
self._create_check()
# Set the high_water mark so that the next check will create events
self.check.high_watermarks['default'] = defaultdict(lambda: 0)
self.run_check(self.config)
metrics_names = [m[0] for m in self.metrics]
assert len(metrics_names) == 2
assert 'jenkins.job.failure' in metrics_names
assert 'jenkins.job.duration' in metrics_names
metrics_tags = [m[3] for m in self.metrics]
for tag in metrics_tags:
|
def testCheckWithRunningBuild(self):
"""
Test under the conditions of a jenkins build still running.
The build.xml file will exist but it will not yet have a result.
"""
metadata = dict_to_xml(NO_RESULTS_YET)
self._populate_build_dir(metadata)
self._create_check()
# Set the high_water mark so that the next check will create events
self.check.high_watermarks['default'] = defaultdict(lambda: 0)
self.run_check(self.config)
# The check method does not return anything, so this testcase passes
# if the high_watermark was NOT updated and no exceptions were raised.
assert self.check.high_watermarks[self.instance['name']]['foo'] == 0
| assert 'job_name:foo' in tag.get('tags')
assert 'result:ABORTED' in tag.get('tags')
assert 'build_number:99' in tag.get('tags') | conditional_block |
test_jenkins.py | # stdlib
from collections import defaultdict
import datetime
import logging
import os
import shutil
import tempfile
# 3p
import xml.etree.ElementTree as ET
# project
from tests.checks.common import AgentCheckTest
logger = logging.getLogger(__file__)
DATETIME_FORMAT = '%Y-%m-%d_%H-%M-%S'
LOG_DATA = 'Finished: SUCCESS'
SUCCESSFUL_BUILD = {'number': '99', 'result': 'SUCCESS', 'duration': '60'}
NO_RESULTS_YET = {'number': '99', 'duration': '60'}
UNSUCCESSFUL_BUILD = {'number': '99', 'result': 'ABORTED', 'duration': '60'}
CONFIG = """
init_config:
instances:
- name: default
jenkins_home: <JENKINS_HOME>
"""
def dict_to_xml(metadata_dict):
""" Convert a dict to xml for use in a build.xml file """
build = ET.Element('build')
for k, v in metadata_dict.iteritems():
node = ET.SubElement(build, k)
node.text = v
return ET.tostring(build)
def write_file(file_name, log_data):
with open(file_name, 'w') as log_file:
log_file.write(log_data)
class TestJenkins(AgentCheckTest):
CHECK_NAME = 'jenkins'
def setUp(self):
super(TestJenkins, self).setUp()
self.tmp_dir = tempfile.mkdtemp()
self.config = {
'init_config': {},
'instances': [{
'name': 'default',
'jenkins_home': self.tmp_dir
}]
}
self.instance = self.config['instances'][0]
self.config_yaml = CONFIG.replace('<JENKINS_HOME>', self.tmp_dir)
self._create_old_build()
def tearDown(self):
super(TestJenkins, self).tearDown()
# Clean up the temp directory
shutil.rmtree(self.tmp_dir)
def _create_old_build(self):
# As coded, the jenkins dd agent needs more than one result
# in order to get the last valid build.
# Create one for yesterday.
metadata = dict_to_xml(SUCCESSFUL_BUILD)
yesterday = datetime.date.today() - datetime.timedelta(days=1)
self._populate_build_dir(metadata, yesterday)
def _create_check(self):
# Create the jenkins check
self.load_check(self.config)
def _populate_build_dir(self, metadata, time=None):
# The jenkins dd agent requires the build metadata file and a log file of results
time = time or datetime.datetime.now()
datestring = time.strftime(DATETIME_FORMAT)
build_dir = os.path.join(self.tmp_dir, 'jobs', 'foo', 'builds', datestring)
os.makedirs(build_dir)
log_file = os.path.join(build_dir, 'log')
log_data = LOG_DATA | build_metadata = metadata
write_file(metadata_file, build_metadata)
def testParseBuildLog(self):
"""
Test doing a jenkins check. This will parse the logs but since there was no
previous high watermark no event will be created.
"""
metadata = dict_to_xml(SUCCESSFUL_BUILD)
self._populate_build_dir(metadata)
self._create_check()
self.run_check(self.config)
# The check method does not return anything, so this testcase passes
# if the high_watermark was set and no exceptions were raised.
self.assertTrue(self.check.high_watermarks[self.instance['name']]['foo'] > 0)
def testCheckSuccessfulEvent(self):
"""
Test that a successful build will create the correct metrics.
"""
metadata = dict_to_xml(SUCCESSFUL_BUILD)
self._populate_build_dir(metadata)
self._create_check()
# Set the high_water mark so that the next check will create events
self.check.high_watermarks['default'] = defaultdict(lambda: 0)
self.run_check(self.config)
metrics_names = [m[0] for m in self.metrics]
assert len(metrics_names) == 2
assert 'jenkins.job.success' in metrics_names
assert 'jenkins.job.duration' in metrics_names
metrics_tags = [m[3] for m in self.metrics]
for tag in metrics_tags:
assert 'job_name:foo' in tag.get('tags')
assert 'result:SUCCESS' in tag.get('tags')
assert 'build_number:99' in tag.get('tags')
def testCheckUnsuccessfulEvent(self):
"""
Test that an unsuccessful build will create the correct metrics.
"""
metadata = dict_to_xml(UNSUCCESSFUL_BUILD)
self._populate_build_dir(metadata)
self._create_check()
# Set the high_water mark so that the next check will create events
self.check.high_watermarks['default'] = defaultdict(lambda: 0)
self.run_check(self.config)
metrics_names = [m[0] for m in self.metrics]
assert len(metrics_names) == 2
assert 'jenkins.job.failure' in metrics_names
assert 'jenkins.job.duration' in metrics_names
metrics_tags = [m[3] for m in self.metrics]
for tag in metrics_tags:
assert 'job_name:foo' in tag.get('tags')
assert 'result:ABORTED' in tag.get('tags')
assert 'build_number:99' in tag.get('tags')
def testCheckWithRunningBuild(self):
"""
Test under the conditions of a jenkins build still running.
The build.xml file will exist but it will not yet have a result.
"""
metadata = dict_to_xml(NO_RESULTS_YET)
self._populate_build_dir(metadata)
self._create_check()
# Set the high_water mark so that the next check will create events
self.check.high_watermarks['default'] = defaultdict(lambda: 0)
self.run_check(self.config)
# The check method does not return anything, so this testcase passes
# if the high_watermark was NOT updated and no exceptions were raised.
assert self.check.high_watermarks[self.instance['name']]['foo'] == 0 | write_file(log_file, log_data)
metadata_file = os.path.join(build_dir, 'build.xml') | random_line_split |
block_metadata.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{config::global::Config as GlobalConfig, errors::*};
use diem_crypto::HashValue;
use diem_types::{account_address::AccountAddress, block_metadata::BlockMetadata};
use std::str::FromStr;
#[derive(Debug)]
pub enum | {
Account(String),
Address(AccountAddress),
}
#[derive(Debug)]
pub enum Entry {
Proposer(Proposer),
Timestamp(u64),
}
impl FromStr for Entry {
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
let s = s.split_whitespace().collect::<String>();
let s = &s
.strip_prefix("//!")
.ok_or_else(|| ErrorKind::Other("txn config entry must start with //!".to_string()))?
.trim_start();
if let Some(s) = s.strip_prefix("proposer:") {
if s.is_empty() {
return Err(ErrorKind::Other("sender cannot be empty".to_string()).into());
}
return Ok(Entry::Proposer(Proposer::Account(s.to_string())));
}
if let Some(s) = s.strip_prefix("proposer-address:") {
if s.is_empty() {
return Err(ErrorKind::Other("sender address cannot be empty".to_string()).into());
}
return Ok(Entry::Proposer(Proposer::Address(
AccountAddress::from_hex_literal(s)?,
)));
}
if let Some(s) = s.strip_prefix("block-time:") {
return Ok(Entry::Timestamp(s.parse::<u64>()?));
}
Err(ErrorKind::Other(format!(
"failed to parse '{}' as transaction config entry",
s
))
.into())
}
}
/// Checks whether a line denotes the start of a new transaction.
pub fn is_new_block(s: &str) -> bool {
let s = s.trim();
if !s.starts_with("//!") {
return false;
}
s[3..].trim_start() == "block-prologue"
}
impl Entry {
pub fn try_parse(s: &str) -> Result<Option<Self>> {
if s.starts_with("//!") {
Ok(Some(s.parse::<Entry>()?))
} else {
Ok(None)
}
}
}
pub fn build_block_metadata(config: &GlobalConfig, entries: &[Entry]) -> Result<BlockMetadata> {
let mut timestamp = None;
let mut proposer = None;
for entry in entries {
match entry {
Entry::Proposer(s) => {
proposer = match s {
Proposer::Account(s) => Some(*config.get_account_for_name(s)?.address()),
Proposer::Address(addr) => Some(*addr),
};
}
Entry::Timestamp(new_timestamp) => timestamp = Some(new_timestamp),
}
}
if let (Some(t), Some(addr)) = (timestamp, proposer) {
// TODO: Add parser for hash value and vote maps.
Ok(BlockMetadata::new(HashValue::zero(), 0, *t, vec![], addr))
} else {
Err(ErrorKind::Other("Cannot generate block metadata".to_string()).into())
}
}
| Proposer | identifier_name |
block_metadata.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{config::global::Config as GlobalConfig, errors::*};
use diem_crypto::HashValue;
use diem_types::{account_address::AccountAddress, block_metadata::BlockMetadata};
use std::str::FromStr;
#[derive(Debug)]
pub enum Proposer {
Account(String),
Address(AccountAddress),
}
#[derive(Debug)]
pub enum Entry {
Proposer(Proposer),
Timestamp(u64),
}
impl FromStr for Entry {
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
let s = s.split_whitespace().collect::<String>();
let s = &s
.strip_prefix("//!")
.ok_or_else(|| ErrorKind::Other("txn config entry must start with //!".to_string()))?
.trim_start();
if let Some(s) = s.strip_prefix("proposer:") {
if s.is_empty() {
return Err(ErrorKind::Other("sender cannot be empty".to_string()).into());
}
return Ok(Entry::Proposer(Proposer::Account(s.to_string())));
}
if let Some(s) = s.strip_prefix("proposer-address:") {
if s.is_empty() {
return Err(ErrorKind::Other("sender address cannot be empty".to_string()).into());
}
return Ok(Entry::Proposer(Proposer::Address(
AccountAddress::from_hex_literal(s)?,
)));
}
if let Some(s) = s.strip_prefix("block-time:") {
return Ok(Entry::Timestamp(s.parse::<u64>()?));
}
Err(ErrorKind::Other(format!(
"failed to parse '{}' as transaction config entry",
s
))
.into())
}
}
/// Checks whether a line denotes the start of a new transaction.
pub fn is_new_block(s: &str) -> bool {
let s = s.trim();
if !s.starts_with("//!") {
return false;
}
s[3..].trim_start() == "block-prologue"
}
impl Entry {
pub fn try_parse(s: &str) -> Result<Option<Self>> |
}
pub fn build_block_metadata(config: &GlobalConfig, entries: &[Entry]) -> Result<BlockMetadata> {
let mut timestamp = None;
let mut proposer = None;
for entry in entries {
match entry {
Entry::Proposer(s) => {
proposer = match s {
Proposer::Account(s) => Some(*config.get_account_for_name(s)?.address()),
Proposer::Address(addr) => Some(*addr),
};
}
Entry::Timestamp(new_timestamp) => timestamp = Some(new_timestamp),
}
}
if let (Some(t), Some(addr)) = (timestamp, proposer) {
// TODO: Add parser for hash value and vote maps.
Ok(BlockMetadata::new(HashValue::zero(), 0, *t, vec![], addr))
} else {
Err(ErrorKind::Other("Cannot generate block metadata".to_string()).into())
}
}
| {
if s.starts_with("//!") {
Ok(Some(s.parse::<Entry>()?))
} else {
Ok(None)
}
} | identifier_body |
block_metadata.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{config::global::Config as GlobalConfig, errors::*};
use diem_crypto::HashValue;
use diem_types::{account_address::AccountAddress, block_metadata::BlockMetadata};
use std::str::FromStr;
#[derive(Debug)]
pub enum Proposer {
Account(String),
Address(AccountAddress),
}
#[derive(Debug)]
pub enum Entry {
Proposer(Proposer),
Timestamp(u64),
}
impl FromStr for Entry {
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
let s = s.split_whitespace().collect::<String>();
let s = &s
.strip_prefix("//!")
.ok_or_else(|| ErrorKind::Other("txn config entry must start with //!".to_string()))?
.trim_start();
if let Some(s) = s.strip_prefix("proposer:") {
if s.is_empty() {
return Err(ErrorKind::Other("sender cannot be empty".to_string()).into());
}
return Ok(Entry::Proposer(Proposer::Account(s.to_string())));
}
if let Some(s) = s.strip_prefix("proposer-address:") {
if s.is_empty() {
return Err(ErrorKind::Other("sender address cannot be empty".to_string()).into());
}
return Ok(Entry::Proposer(Proposer::Address(
AccountAddress::from_hex_literal(s)?,
)));
}
if let Some(s) = s.strip_prefix("block-time:") {
return Ok(Entry::Timestamp(s.parse::<u64>()?));
}
Err(ErrorKind::Other(format!(
"failed to parse '{}' as transaction config entry",
s
))
.into())
}
}
/// Checks whether a line denotes the start of a new transaction.
pub fn is_new_block(s: &str) -> bool {
let s = s.trim();
if !s.starts_with("//!") {
return false;
}
s[3..].trim_start() == "block-prologue"
}
impl Entry {
pub fn try_parse(s: &str) -> Result<Option<Self>> {
if s.starts_with("//!") {
Ok(Some(s.parse::<Entry>()?))
} else {
Ok(None)
}
}
}
pub fn build_block_metadata(config: &GlobalConfig, entries: &[Entry]) -> Result<BlockMetadata> {
let mut timestamp = None;
let mut proposer = None;
for entry in entries {
match entry {
Entry::Proposer(s) => {
proposer = match s {
Proposer::Account(s) => Some(*config.get_account_for_name(s)?.address()),
Proposer::Address(addr) => Some(*addr),
};
}
Entry::Timestamp(new_timestamp) => timestamp = Some(new_timestamp),
}
}
if let (Some(t), Some(addr)) = (timestamp, proposer) {
// TODO: Add parser for hash value and vote maps.
Ok(BlockMetadata::new(HashValue::zero(), 0, *t, vec![], addr))
} else |
}
| {
Err(ErrorKind::Other("Cannot generate block metadata".to_string()).into())
} | conditional_block |
block_metadata.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{config::global::Config as GlobalConfig, errors::*};
use diem_crypto::HashValue;
use diem_types::{account_address::AccountAddress, block_metadata::BlockMetadata};
use std::str::FromStr;
#[derive(Debug)]
pub enum Proposer {
Account(String),
Address(AccountAddress),
}
#[derive(Debug)]
pub enum Entry {
Proposer(Proposer),
Timestamp(u64),
}
impl FromStr for Entry {
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
let s = s.split_whitespace().collect::<String>();
let s = &s
.strip_prefix("//!")
.ok_or_else(|| ErrorKind::Other("txn config entry must start with //!".to_string()))?
.trim_start();
if let Some(s) = s.strip_prefix("proposer:") {
if s.is_empty() {
return Err(ErrorKind::Other("sender cannot be empty".to_string()).into());
}
return Ok(Entry::Proposer(Proposer::Account(s.to_string())));
}
if let Some(s) = s.strip_prefix("proposer-address:") {
if s.is_empty() {
return Err(ErrorKind::Other("sender address cannot be empty".to_string()).into());
} | return Ok(Entry::Proposer(Proposer::Address(
AccountAddress::from_hex_literal(s)?,
)));
}
if let Some(s) = s.strip_prefix("block-time:") {
return Ok(Entry::Timestamp(s.parse::<u64>()?));
}
Err(ErrorKind::Other(format!(
"failed to parse '{}' as transaction config entry",
s
))
.into())
}
}
/// Checks whether a line denotes the start of a new transaction.
pub fn is_new_block(s: &str) -> bool {
let s = s.trim();
if !s.starts_with("//!") {
return false;
}
s[3..].trim_start() == "block-prologue"
}
impl Entry {
pub fn try_parse(s: &str) -> Result<Option<Self>> {
if s.starts_with("//!") {
Ok(Some(s.parse::<Entry>()?))
} else {
Ok(None)
}
}
}
pub fn build_block_metadata(config: &GlobalConfig, entries: &[Entry]) -> Result<BlockMetadata> {
let mut timestamp = None;
let mut proposer = None;
for entry in entries {
match entry {
Entry::Proposer(s) => {
proposer = match s {
Proposer::Account(s) => Some(*config.get_account_for_name(s)?.address()),
Proposer::Address(addr) => Some(*addr),
};
}
Entry::Timestamp(new_timestamp) => timestamp = Some(new_timestamp),
}
}
if let (Some(t), Some(addr)) = (timestamp, proposer) {
// TODO: Add parser for hash value and vote maps.
Ok(BlockMetadata::new(HashValue::zero(), 0, *t, vec![], addr))
} else {
Err(ErrorKind::Other("Cannot generate block metadata".to_string()).into())
}
} | random_line_split |
|
cast.py | """
Provide functionality to interact with Cast devices on the network.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.cast/
"""
# pylint: disable=import-error
import logging
import voluptuous as vol
from homeassistant.components.media_player import (
MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO, SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK,
SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
SUPPORT_STOP, MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.const import (
CONF_HOST, STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING,
STATE_UNKNOWN)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['pychromecast==0.7.4']
_LOGGER = logging.getLogger(__name__)
CONF_IGNORE_CEC = 'ignore_cec'
CAST_SPLASH = 'https://home-assistant.io/images/cast/splash.png'
DEFAULT_PORT = 8009
SUPPORT_CAST = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_NEXT_TRACK | SUPPORT_PLAY_MEDIA | SUPPORT_STOP
KNOWN_HOSTS = []
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_HOST): cv.string,
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the cast platform."""
import pychromecast
# import CEC IGNORE attributes
ignore_cec = config.get(CONF_IGNORE_CEC, [])
if isinstance(ignore_cec, list):
pychromecast.IGNORE_CEC += ignore_cec
else:
_LOGGER.error('CEC config "%s" must be a list.', CONF_IGNORE_CEC)
hosts = []
if discovery_info and discovery_info in KNOWN_HOSTS:
return
elif discovery_info:
hosts = [discovery_info]
elif CONF_HOST in config:
hosts = [(config.get(CONF_HOST), DEFAULT_PORT)]
else:
hosts = [tuple(dev[:2]) for dev in pychromecast.discover_chromecasts()
if tuple(dev[:2]) not in KNOWN_HOSTS]
casts = []
for host in hosts:
try:
casts.append(CastDevice(*host))
KNOWN_HOSTS.append(host)
except pychromecast.ChromecastConnectionError:
pass
add_devices(casts)
class CastDevice(MediaPlayerDevice):
"""Representation of a Cast device on the network."""
# pylint: disable=abstract-method
# pylint: disable=too-many-public-methods
def __init__(self, host, port):
"""Initialize the Cast device."""
import pychromecast
self.cast = pychromecast.Chromecast(host, port)
self.cast.socket_client.receiver_controller.register_status_listener(
self)
self.cast.socket_client.media_controller.register_status_listener(self)
self.cast_status = self.cast.status
self.media_status = self.cast.media_controller.status
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the device."""
return self.cast.device.friendly_name
# MediaPlayerDevice properties and methods
@property
def state(self):
"""Return the state of the player."""
if self.media_status is None:
return STATE_UNKNOWN
elif self.media_status.player_is_playing:
return STATE_PLAYING
elif self.media_status.player_is_paused:
return STATE_PAUSED
elif self.media_status.player_is_idle:
return STATE_IDLE
elif self.cast.is_idle:
|
else:
return STATE_UNKNOWN
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self.cast_status.volume_level if self.cast_status else None
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self.cast_status.volume_muted if self.cast_status else None
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self.media_status.content_id if self.media_status else None
@property
def media_content_type(self):
"""Content type of current playing media."""
if self.media_status is None:
return None
elif self.media_status.media_is_tvshow:
return MEDIA_TYPE_TVSHOW
elif self.media_status.media_is_movie:
return MEDIA_TYPE_VIDEO
elif self.media_status.media_is_musictrack:
return MEDIA_TYPE_MUSIC
return None
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return self.media_status.duration if self.media_status else None
@property
def media_image_url(self):
"""Image url of current playing media."""
if self.media_status is None:
return None
images = self.media_status.images
return images[0].url if images else None
@property
def media_title(self):
"""Title of current playing media."""
return self.media_status.title if self.media_status else None
@property
def media_artist(self):
"""Artist of current playing media (Music track only)."""
return self.media_status.artist if self.media_status else None
@property
def media_album(self):
"""Album of current playing media (Music track only)."""
return self.media_status.album_name if self.media_status else None
@property
def media_album_artist(self):
"""Album arist of current playing media (Music track only)."""
return self.media_status.album_artist if self.media_status else None
@property
def media_track(self):
"""Track number of current playing media (Music track only)."""
return self.media_status.track if self.media_status else None
@property
def media_series_title(self):
"""The title of the series of current playing media (TV Show only)."""
return self.media_status.series_title if self.media_status else None
@property
def media_season(self):
"""Season of current playing media (TV Show only)."""
return self.media_status.season if self.media_status else None
@property
def media_episode(self):
"""Episode of current playing media (TV Show only)."""
return self.media_status.episode if self.media_status else None
@property
def app_id(self):
"""Return the ID of the current running app."""
return self.cast.app_id
@property
def app_name(self):
"""Name of the current running app."""
return self.cast.app_display_name
@property
def supported_media_commands(self):
"""Flag of media commands that are supported."""
return SUPPORT_CAST
def turn_on(self):
"""Turn on the ChromeCast."""
# The only way we can turn the Chromecast is on is by launching an app
if not self.cast.status or not self.cast.status.is_active_input:
import pychromecast
if self.cast.app_id:
self.cast.quit_app()
self.cast.play_media(
CAST_SPLASH, pychromecast.STREAM_TYPE_BUFFERED)
def turn_off(self):
"""Turn Chromecast off."""
self.cast.quit_app()
def mute_volume(self, mute):
"""Mute the volume."""
self.cast.set_volume_muted(mute)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self.cast.set_volume(volume)
def media_play(self):
"""Send play commmand."""
self.cast.media_controller.play()
def media_pause(self):
"""Send pause command."""
self.cast.media_controller.pause()
def media_stop(self):
"""Send stop command."""
self.cast.media_controller.stop()
def media_previous_track(self):
"""Send previous track command."""
self.cast.media_controller.rewind()
def media_next_track(self):
"""Send next track command."""
self.cast.media_controller.skip()
def media_seek(self, position):
"""Seek the media to a specific location."""
self.cast.media_controller.seek(position)
def play_media(self, media_type, media_id, **kwargs):
"""Play media from a URL."""
self.cast.media_controller.play_media(media_id, media_type)
# Implementation of chromecast status_listener methods
def new_cast_status(self, status):
"""Called when a new cast status is received."""
self.cast_status = status
self.update_ha_state()
def new_media_status(self, status):
"""Called when a new media status is received."""
self.media_status = status
self.update_ha_state()
| return STATE_OFF | conditional_block |
cast.py | """
Provide functionality to interact with Cast devices on the network.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.cast/
"""
# pylint: disable=import-error
import logging
import voluptuous as vol
from homeassistant.components.media_player import (
MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO, SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK,
SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
SUPPORT_STOP, MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.const import (
CONF_HOST, STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING,
STATE_UNKNOWN)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['pychromecast==0.7.4']
_LOGGER = logging.getLogger(__name__)
CONF_IGNORE_CEC = 'ignore_cec'
CAST_SPLASH = 'https://home-assistant.io/images/cast/splash.png'
DEFAULT_PORT = 8009
SUPPORT_CAST = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_NEXT_TRACK | SUPPORT_PLAY_MEDIA | SUPPORT_STOP
KNOWN_HOSTS = []
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_HOST): cv.string,
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the cast platform."""
import pychromecast
# import CEC IGNORE attributes
ignore_cec = config.get(CONF_IGNORE_CEC, [])
if isinstance(ignore_cec, list):
pychromecast.IGNORE_CEC += ignore_cec
else:
_LOGGER.error('CEC config "%s" must be a list.', CONF_IGNORE_CEC)
hosts = []
if discovery_info and discovery_info in KNOWN_HOSTS:
return
elif discovery_info:
hosts = [discovery_info]
elif CONF_HOST in config:
hosts = [(config.get(CONF_HOST), DEFAULT_PORT)]
else:
hosts = [tuple(dev[:2]) for dev in pychromecast.discover_chromecasts()
if tuple(dev[:2]) not in KNOWN_HOSTS]
casts = []
for host in hosts:
try:
casts.append(CastDevice(*host))
KNOWN_HOSTS.append(host)
except pychromecast.ChromecastConnectionError:
pass
add_devices(casts)
class CastDevice(MediaPlayerDevice):
"""Representation of a Cast device on the network."""
# pylint: disable=abstract-method
# pylint: disable=too-many-public-methods
def __init__(self, host, port):
"""Initialize the Cast device."""
import pychromecast
self.cast = pychromecast.Chromecast(host, port)
self.cast.socket_client.receiver_controller.register_status_listener(
self)
self.cast.socket_client.media_controller.register_status_listener(self)
self.cast_status = self.cast.status
self.media_status = self.cast.media_controller.status
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the device."""
return self.cast.device.friendly_name
# MediaPlayerDevice properties and methods
@property
def state(self):
"""Return the state of the player."""
if self.media_status is None:
return STATE_UNKNOWN
elif self.media_status.player_is_playing:
return STATE_PLAYING
elif self.media_status.player_is_paused:
return STATE_PAUSED
elif self.media_status.player_is_idle:
return STATE_IDLE
elif self.cast.is_idle:
return STATE_OFF
else:
return STATE_UNKNOWN
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self.cast_status.volume_level if self.cast_status else None
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self.cast_status.volume_muted if self.cast_status else None
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self.media_status.content_id if self.media_status else None
| @property
def media_content_type(self):
"""Content type of current playing media."""
if self.media_status is None:
return None
elif self.media_status.media_is_tvshow:
return MEDIA_TYPE_TVSHOW
elif self.media_status.media_is_movie:
return MEDIA_TYPE_VIDEO
elif self.media_status.media_is_musictrack:
return MEDIA_TYPE_MUSIC
return None
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return self.media_status.duration if self.media_status else None
@property
def media_image_url(self):
"""Image url of current playing media."""
if self.media_status is None:
return None
images = self.media_status.images
return images[0].url if images else None
@property
def media_title(self):
"""Title of current playing media."""
return self.media_status.title if self.media_status else None
@property
def media_artist(self):
"""Artist of current playing media (Music track only)."""
return self.media_status.artist if self.media_status else None
@property
def media_album(self):
"""Album of current playing media (Music track only)."""
return self.media_status.album_name if self.media_status else None
@property
def media_album_artist(self):
"""Album arist of current playing media (Music track only)."""
return self.media_status.album_artist if self.media_status else None
@property
def media_track(self):
"""Track number of current playing media (Music track only)."""
return self.media_status.track if self.media_status else None
@property
def media_series_title(self):
"""The title of the series of current playing media (TV Show only)."""
return self.media_status.series_title if self.media_status else None
@property
def media_season(self):
"""Season of current playing media (TV Show only)."""
return self.media_status.season if self.media_status else None
@property
def media_episode(self):
"""Episode of current playing media (TV Show only)."""
return self.media_status.episode if self.media_status else None
@property
def app_id(self):
"""Return the ID of the current running app."""
return self.cast.app_id
@property
def app_name(self):
"""Name of the current running app."""
return self.cast.app_display_name
@property
def supported_media_commands(self):
"""Flag of media commands that are supported."""
return SUPPORT_CAST
def turn_on(self):
"""Turn on the ChromeCast."""
# The only way we can turn the Chromecast is on is by launching an app
if not self.cast.status or not self.cast.status.is_active_input:
import pychromecast
if self.cast.app_id:
self.cast.quit_app()
self.cast.play_media(
CAST_SPLASH, pychromecast.STREAM_TYPE_BUFFERED)
def turn_off(self):
"""Turn Chromecast off."""
self.cast.quit_app()
def mute_volume(self, mute):
"""Mute the volume."""
self.cast.set_volume_muted(mute)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self.cast.set_volume(volume)
def media_play(self):
"""Send play commmand."""
self.cast.media_controller.play()
def media_pause(self):
"""Send pause command."""
self.cast.media_controller.pause()
def media_stop(self):
"""Send stop command."""
self.cast.media_controller.stop()
def media_previous_track(self):
"""Send previous track command."""
self.cast.media_controller.rewind()
def media_next_track(self):
"""Send next track command."""
self.cast.media_controller.skip()
def media_seek(self, position):
"""Seek the media to a specific location."""
self.cast.media_controller.seek(position)
def play_media(self, media_type, media_id, **kwargs):
"""Play media from a URL."""
self.cast.media_controller.play_media(media_id, media_type)
# Implementation of chromecast status_listener methods
def new_cast_status(self, status):
"""Called when a new cast status is received."""
self.cast_status = status
self.update_ha_state()
def new_media_status(self, status):
"""Called when a new media status is received."""
self.media_status = status
self.update_ha_state() | random_line_split |
|
cast.py | """
Provide functionality to interact with Cast devices on the network.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.cast/
"""
# pylint: disable=import-error
import logging
import voluptuous as vol
from homeassistant.components.media_player import (
MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO, SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK,
SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
SUPPORT_STOP, MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.const import (
CONF_HOST, STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING,
STATE_UNKNOWN)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['pychromecast==0.7.4']
_LOGGER = logging.getLogger(__name__)
CONF_IGNORE_CEC = 'ignore_cec'
CAST_SPLASH = 'https://home-assistant.io/images/cast/splash.png'
DEFAULT_PORT = 8009
SUPPORT_CAST = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_NEXT_TRACK | SUPPORT_PLAY_MEDIA | SUPPORT_STOP
KNOWN_HOSTS = []
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_HOST): cv.string,
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the cast platform."""
import pychromecast
# import CEC IGNORE attributes
ignore_cec = config.get(CONF_IGNORE_CEC, [])
if isinstance(ignore_cec, list):
pychromecast.IGNORE_CEC += ignore_cec
else:
_LOGGER.error('CEC config "%s" must be a list.', CONF_IGNORE_CEC)
hosts = []
if discovery_info and discovery_info in KNOWN_HOSTS:
return
elif discovery_info:
hosts = [discovery_info]
elif CONF_HOST in config:
hosts = [(config.get(CONF_HOST), DEFAULT_PORT)]
else:
hosts = [tuple(dev[:2]) for dev in pychromecast.discover_chromecasts()
if tuple(dev[:2]) not in KNOWN_HOSTS]
casts = []
for host in hosts:
try:
casts.append(CastDevice(*host))
KNOWN_HOSTS.append(host)
except pychromecast.ChromecastConnectionError:
pass
add_devices(casts)
class CastDevice(MediaPlayerDevice):
"""Representation of a Cast device on the network."""
# pylint: disable=abstract-method
# pylint: disable=too-many-public-methods
def __init__(self, host, port):
"""Initialize the Cast device."""
import pychromecast
self.cast = pychromecast.Chromecast(host, port)
self.cast.socket_client.receiver_controller.register_status_listener(
self)
self.cast.socket_client.media_controller.register_status_listener(self)
self.cast_status = self.cast.status
self.media_status = self.cast.media_controller.status
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the device."""
return self.cast.device.friendly_name
# MediaPlayerDevice properties and methods
@property
def state(self):
"""Return the state of the player."""
if self.media_status is None:
return STATE_UNKNOWN
elif self.media_status.player_is_playing:
return STATE_PLAYING
elif self.media_status.player_is_paused:
return STATE_PAUSED
elif self.media_status.player_is_idle:
return STATE_IDLE
elif self.cast.is_idle:
return STATE_OFF
else:
return STATE_UNKNOWN
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self.cast_status.volume_level if self.cast_status else None
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self.cast_status.volume_muted if self.cast_status else None
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self.media_status.content_id if self.media_status else None
@property
def media_content_type(self):
"""Content type of current playing media."""
if self.media_status is None:
return None
elif self.media_status.media_is_tvshow:
return MEDIA_TYPE_TVSHOW
elif self.media_status.media_is_movie:
return MEDIA_TYPE_VIDEO
elif self.media_status.media_is_musictrack:
return MEDIA_TYPE_MUSIC
return None
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return self.media_status.duration if self.media_status else None
@property
def media_image_url(self):
"""Image url of current playing media."""
if self.media_status is None:
return None
images = self.media_status.images
return images[0].url if images else None
@property
def media_title(self):
"""Title of current playing media."""
return self.media_status.title if self.media_status else None
@property
def media_artist(self):
"""Artist of current playing media (Music track only)."""
return self.media_status.artist if self.media_status else None
@property
def media_album(self):
"""Album of current playing media (Music track only)."""
return self.media_status.album_name if self.media_status else None
@property
def media_album_artist(self):
"""Album arist of current playing media (Music track only)."""
return self.media_status.album_artist if self.media_status else None
@property
def media_track(self):
"""Track number of current playing media (Music track only)."""
return self.media_status.track if self.media_status else None
@property
def media_series_title(self):
"""The title of the series of current playing media (TV Show only)."""
return self.media_status.series_title if self.media_status else None
@property
def media_season(self):
"""Season of current playing media (TV Show only)."""
return self.media_status.season if self.media_status else None
@property
def media_episode(self):
"""Episode of current playing media (TV Show only)."""
return self.media_status.episode if self.media_status else None
@property
def app_id(self):
"""Return the ID of the current running app."""
return self.cast.app_id
@property
def app_name(self):
"""Name of the current running app."""
return self.cast.app_display_name
@property
def supported_media_commands(self):
"""Flag of media commands that are supported."""
return SUPPORT_CAST
def turn_on(self):
"""Turn on the ChromeCast."""
# The only way we can turn the Chromecast is on is by launching an app
if not self.cast.status or not self.cast.status.is_active_input:
import pychromecast
if self.cast.app_id:
self.cast.quit_app()
self.cast.play_media(
CAST_SPLASH, pychromecast.STREAM_TYPE_BUFFERED)
def turn_off(self):
"""Turn Chromecast off."""
self.cast.quit_app()
def mute_volume(self, mute):
"""Mute the volume."""
self.cast.set_volume_muted(mute)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self.cast.set_volume(volume)
def media_play(self):
|
def media_pause(self):
"""Send pause command."""
self.cast.media_controller.pause()
def media_stop(self):
"""Send stop command."""
self.cast.media_controller.stop()
def media_previous_track(self):
"""Send previous track command."""
self.cast.media_controller.rewind()
def media_next_track(self):
"""Send next track command."""
self.cast.media_controller.skip()
def media_seek(self, position):
"""Seek the media to a specific location."""
self.cast.media_controller.seek(position)
def play_media(self, media_type, media_id, **kwargs):
"""Play media from a URL."""
self.cast.media_controller.play_media(media_id, media_type)
# Implementation of chromecast status_listener methods
def new_cast_status(self, status):
"""Called when a new cast status is received."""
self.cast_status = status
self.update_ha_state()
def new_media_status(self, status):
"""Called when a new media status is received."""
self.media_status = status
self.update_ha_state()
| """Send play commmand."""
self.cast.media_controller.play() | identifier_body |
cast.py | """
Provide functionality to interact with Cast devices on the network.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.cast/
"""
# pylint: disable=import-error
import logging
import voluptuous as vol
from homeassistant.components.media_player import (
MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO, SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK,
SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
SUPPORT_STOP, MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.const import (
CONF_HOST, STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING,
STATE_UNKNOWN)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['pychromecast==0.7.4']
_LOGGER = logging.getLogger(__name__)
CONF_IGNORE_CEC = 'ignore_cec'
CAST_SPLASH = 'https://home-assistant.io/images/cast/splash.png'
DEFAULT_PORT = 8009
SUPPORT_CAST = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_NEXT_TRACK | SUPPORT_PLAY_MEDIA | SUPPORT_STOP
KNOWN_HOSTS = []
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_HOST): cv.string,
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the cast platform."""
import pychromecast
# import CEC IGNORE attributes
ignore_cec = config.get(CONF_IGNORE_CEC, [])
if isinstance(ignore_cec, list):
pychromecast.IGNORE_CEC += ignore_cec
else:
_LOGGER.error('CEC config "%s" must be a list.', CONF_IGNORE_CEC)
hosts = []
if discovery_info and discovery_info in KNOWN_HOSTS:
return
elif discovery_info:
hosts = [discovery_info]
elif CONF_HOST in config:
hosts = [(config.get(CONF_HOST), DEFAULT_PORT)]
else:
hosts = [tuple(dev[:2]) for dev in pychromecast.discover_chromecasts()
if tuple(dev[:2]) not in KNOWN_HOSTS]
casts = []
for host in hosts:
try:
casts.append(CastDevice(*host))
KNOWN_HOSTS.append(host)
except pychromecast.ChromecastConnectionError:
pass
add_devices(casts)
class CastDevice(MediaPlayerDevice):
"""Representation of a Cast device on the network."""
# pylint: disable=abstract-method
# pylint: disable=too-many-public-methods
def __init__(self, host, port):
"""Initialize the Cast device."""
import pychromecast
self.cast = pychromecast.Chromecast(host, port)
self.cast.socket_client.receiver_controller.register_status_listener(
self)
self.cast.socket_client.media_controller.register_status_listener(self)
self.cast_status = self.cast.status
self.media_status = self.cast.media_controller.status
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the device."""
return self.cast.device.friendly_name
# MediaPlayerDevice properties and methods
@property
def state(self):
"""Return the state of the player."""
if self.media_status is None:
return STATE_UNKNOWN
elif self.media_status.player_is_playing:
return STATE_PLAYING
elif self.media_status.player_is_paused:
return STATE_PAUSED
elif self.media_status.player_is_idle:
return STATE_IDLE
elif self.cast.is_idle:
return STATE_OFF
else:
return STATE_UNKNOWN
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self.cast_status.volume_level if self.cast_status else None
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self.cast_status.volume_muted if self.cast_status else None
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self.media_status.content_id if self.media_status else None
@property
def media_content_type(self):
"""Content type of current playing media."""
if self.media_status is None:
return None
elif self.media_status.media_is_tvshow:
return MEDIA_TYPE_TVSHOW
elif self.media_status.media_is_movie:
return MEDIA_TYPE_VIDEO
elif self.media_status.media_is_musictrack:
return MEDIA_TYPE_MUSIC
return None
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return self.media_status.duration if self.media_status else None
@property
def media_image_url(self):
"""Image url of current playing media."""
if self.media_status is None:
return None
images = self.media_status.images
return images[0].url if images else None
@property
def media_title(self):
"""Title of current playing media."""
return self.media_status.title if self.media_status else None
@property
def media_artist(self):
"""Artist of current playing media (Music track only)."""
return self.media_status.artist if self.media_status else None
@property
def media_album(self):
"""Album of current playing media (Music track only)."""
return self.media_status.album_name if self.media_status else None
@property
def media_album_artist(self):
"""Album arist of current playing media (Music track only)."""
return self.media_status.album_artist if self.media_status else None
@property
def media_track(self):
"""Track number of current playing media (Music track only)."""
return self.media_status.track if self.media_status else None
@property
def media_series_title(self):
"""The title of the series of current playing media (TV Show only)."""
return self.media_status.series_title if self.media_status else None
@property
def media_season(self):
"""Season of current playing media (TV Show only)."""
return self.media_status.season if self.media_status else None
@property
def media_episode(self):
"""Episode of current playing media (TV Show only)."""
return self.media_status.episode if self.media_status else None
@property
def app_id(self):
"""Return the ID of the current running app."""
return self.cast.app_id
@property
def app_name(self):
"""Name of the current running app."""
return self.cast.app_display_name
@property
def | (self):
"""Flag of media commands that are supported."""
return SUPPORT_CAST
def turn_on(self):
"""Turn on the ChromeCast."""
# The only way we can turn the Chromecast is on is by launching an app
if not self.cast.status or not self.cast.status.is_active_input:
import pychromecast
if self.cast.app_id:
self.cast.quit_app()
self.cast.play_media(
CAST_SPLASH, pychromecast.STREAM_TYPE_BUFFERED)
def turn_off(self):
"""Turn Chromecast off."""
self.cast.quit_app()
def mute_volume(self, mute):
"""Mute the volume."""
self.cast.set_volume_muted(mute)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self.cast.set_volume(volume)
def media_play(self):
"""Send play commmand."""
self.cast.media_controller.play()
def media_pause(self):
"""Send pause command."""
self.cast.media_controller.pause()
def media_stop(self):
"""Send stop command."""
self.cast.media_controller.stop()
def media_previous_track(self):
"""Send previous track command."""
self.cast.media_controller.rewind()
def media_next_track(self):
"""Send next track command."""
self.cast.media_controller.skip()
def media_seek(self, position):
"""Seek the media to a specific location."""
self.cast.media_controller.seek(position)
def play_media(self, media_type, media_id, **kwargs):
"""Play media from a URL."""
self.cast.media_controller.play_media(media_id, media_type)
# Implementation of chromecast status_listener methods
def new_cast_status(self, status):
"""Called when a new cast status is received."""
self.cast_status = status
self.update_ha_state()
def new_media_status(self, status):
"""Called when a new media status is received."""
self.media_status = status
self.update_ha_state()
| supported_media_commands | identifier_name |
parser.py | import json
from datetime import datetime
from dojo.models import Finding
class MeterianParser(object):
def get_scan_types(self):
return ["Meterian Scan"]
def get_label_for_scan_types(self, scan_type):
return scan_type
def get_description_for_scan_types(self, scan_type):
return "Meterian JSON report output file can be imported."
def get_findings(self, report, test):
findings = []
report_json = json.load(report)
security_reports = self.get_security_reports(report_json)
scan_date = str(datetime.fromisoformat(report_json["timestamp"]).date())
for single_security_report in security_reports:
findings += self.do_get_findings(single_security_report, scan_date, test)
return findings
def get_security_reports(self, report_json):
if "reports" in report_json:
if "security" in report_json["reports"]:
if "reports" in report_json["reports"]["security"]:
return report_json["reports"]["security"]["reports"]
raise ValueError("Malformed report: the security reports are missing.")
def do_get_findings(self, single_security_report, scan_date, test):
findings = []
language = single_security_report["language"]
for dependency_report in single_security_report["reports"]:
lib_name = dependency_report["dependency"]["name"]
lib_ver = dependency_report["dependency"]["version"]
finding_title = lib_name + ":" + lib_ver
for advisory in dependency_report["advices"]:
severity = self.get_severity(advisory)
finding = Finding(
title=finding_title,
date=scan_date,
test=test,
severity=severity,
severity_justification="Issue severity of: **" + severity + "** from a base " +
"CVSS score of: **" + str(advisory.get('cvss')) + "**",
description=advisory['description'],
component_name=lib_name,
component_version=lib_ver,
false_p=False,
duplicate=False,
out_of_scope=False,
impact=severity,
static_finding=True,
dynamic_finding=False,
file_path="Manifest file",
unique_id_from_tool=advisory['id'],
tags=[language]
)
if 'cve' in advisory:
if "N/A" != advisory["cve"]:
finding.cve = advisory["cve"]
if "cwe" in advisory:
finding.cwe = int(advisory["cwe"].replace("CWE-", ""))
mitigation_msg = "## Remediation\n"
safe_versions = dependency_report["safeVersions"]
if "latestPatch" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestPatch"] + " or higher."
elif "latestMinor" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestMinor"] + " or higher."
elif "latestMajor" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestMajor"] + "."
else:
mitigation_msg = "We were not able to provide a safe version for this library.\nYou should consider replacing this component as it could be an issue for the safety of your application."
finding.mitigation = mitigation_msg
references = ""
for link in advisory["links"]:
ref_link = self.get_reference_url(link)
if ref_link is not None:
references += "- " + ref_link + "\n"
if references != "":
finding.references = references
findings.append(finding)
return findings
def get_severity(self, advisory):
# Following the CVSS Scoring per https://nvd.nist.gov/vuln-metrics/cvss
if 'cvss' in advisory: | severity = "High"
else:
severity = "Critical"
else:
if advisory["severity"] == "SUGGEST" or advisory["severity"] == "NA" or advisory["severity"] == "NONE":
severity = "Info"
else:
severity = advisory["severity"].title()
return severity
def get_reference_url(self, link_obj):
url = link_obj["url"]
if link_obj["type"] == "CVE":
url = "https://cve.mitre.org/cgi-bin/cvename.cgi?name=" + link_obj["url"]
elif link_obj["type"] == "NVD":
url = "https://nvd.nist.gov/vuln/detail/" + link_obj["url"]
return url | if advisory['cvss'] <= 3.9:
severity = "Low"
elif advisory['cvss'] >= 4.0 and advisory['cvss'] <= 6.9:
severity = "Medium"
elif advisory['cvss'] >= 7.0 and advisory['cvss'] <= 8.9: | random_line_split |
parser.py | import json
from datetime import datetime
from dojo.models import Finding
class MeterianParser(object):
def get_scan_types(self):
return ["Meterian Scan"]
def get_label_for_scan_types(self, scan_type):
return scan_type
def get_description_for_scan_types(self, scan_type):
return "Meterian JSON report output file can be imported."
def get_findings(self, report, test):
findings = []
report_json = json.load(report)
security_reports = self.get_security_reports(report_json)
scan_date = str(datetime.fromisoformat(report_json["timestamp"]).date())
for single_security_report in security_reports:
findings += self.do_get_findings(single_security_report, scan_date, test)
return findings
def get_security_reports(self, report_json):
if "reports" in report_json:
if "security" in report_json["reports"]:
if "reports" in report_json["reports"]["security"]:
return report_json["reports"]["security"]["reports"]
raise ValueError("Malformed report: the security reports are missing.")
def do_get_findings(self, single_security_report, scan_date, test):
findings = []
language = single_security_report["language"]
for dependency_report in single_security_report["reports"]:
lib_name = dependency_report["dependency"]["name"]
lib_ver = dependency_report["dependency"]["version"]
finding_title = lib_name + ":" + lib_ver
for advisory in dependency_report["advices"]:
severity = self.get_severity(advisory)
finding = Finding(
title=finding_title,
date=scan_date,
test=test,
severity=severity,
severity_justification="Issue severity of: **" + severity + "** from a base " +
"CVSS score of: **" + str(advisory.get('cvss')) + "**",
description=advisory['description'],
component_name=lib_name,
component_version=lib_ver,
false_p=False,
duplicate=False,
out_of_scope=False,
impact=severity,
static_finding=True,
dynamic_finding=False,
file_path="Manifest file",
unique_id_from_tool=advisory['id'],
tags=[language]
)
if 'cve' in advisory:
if "N/A" != advisory["cve"]:
finding.cve = advisory["cve"]
if "cwe" in advisory:
finding.cwe = int(advisory["cwe"].replace("CWE-", ""))
mitigation_msg = "## Remediation\n"
safe_versions = dependency_report["safeVersions"]
if "latestPatch" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestPatch"] + " or higher."
elif "latestMinor" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestMinor"] + " or higher."
elif "latestMajor" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestMajor"] + "."
else:
mitigation_msg = "We were not able to provide a safe version for this library.\nYou should consider replacing this component as it could be an issue for the safety of your application."
finding.mitigation = mitigation_msg
references = ""
for link in advisory["links"]:
ref_link = self.get_reference_url(link)
if ref_link is not None:
references += "- " + ref_link + "\n"
if references != "":
finding.references = references
findings.append(finding)
return findings
def get_severity(self, advisory):
# Following the CVSS Scoring per https://nvd.nist.gov/vuln-metrics/cvss
if 'cvss' in advisory:
if advisory['cvss'] <= 3.9:
severity = "Low"
elif advisory['cvss'] >= 4.0 and advisory['cvss'] <= 6.9:
severity = "Medium"
elif advisory['cvss'] >= 7.0 and advisory['cvss'] <= 8.9:
severity = "High"
else:
severity = "Critical"
else:
if advisory["severity"] == "SUGGEST" or advisory["severity"] == "NA" or advisory["severity"] == "NONE":
|
else:
severity = advisory["severity"].title()
return severity
def get_reference_url(self, link_obj):
url = link_obj["url"]
if link_obj["type"] == "CVE":
url = "https://cve.mitre.org/cgi-bin/cvename.cgi?name=" + link_obj["url"]
elif link_obj["type"] == "NVD":
url = "https://nvd.nist.gov/vuln/detail/" + link_obj["url"]
return url
| severity = "Info" | conditional_block |
parser.py | import json
from datetime import datetime
from dojo.models import Finding
class MeterianParser(object):
def get_scan_types(self):
return ["Meterian Scan"]
def get_label_for_scan_types(self, scan_type):
return scan_type
def get_description_for_scan_types(self, scan_type):
return "Meterian JSON report output file can be imported."
def get_findings(self, report, test):
findings = []
report_json = json.load(report)
security_reports = self.get_security_reports(report_json)
scan_date = str(datetime.fromisoformat(report_json["timestamp"]).date())
for single_security_report in security_reports:
findings += self.do_get_findings(single_security_report, scan_date, test)
return findings
def get_security_reports(self, report_json):
if "reports" in report_json:
if "security" in report_json["reports"]:
if "reports" in report_json["reports"]["security"]:
return report_json["reports"]["security"]["reports"]
raise ValueError("Malformed report: the security reports are missing.")
def do_get_findings(self, single_security_report, scan_date, test):
findings = []
language = single_security_report["language"]
for dependency_report in single_security_report["reports"]:
lib_name = dependency_report["dependency"]["name"]
lib_ver = dependency_report["dependency"]["version"]
finding_title = lib_name + ":" + lib_ver
for advisory in dependency_report["advices"]:
severity = self.get_severity(advisory)
finding = Finding(
title=finding_title,
date=scan_date,
test=test,
severity=severity,
severity_justification="Issue severity of: **" + severity + "** from a base " +
"CVSS score of: **" + str(advisory.get('cvss')) + "**",
description=advisory['description'],
component_name=lib_name,
component_version=lib_ver,
false_p=False,
duplicate=False,
out_of_scope=False,
impact=severity,
static_finding=True,
dynamic_finding=False,
file_path="Manifest file",
unique_id_from_tool=advisory['id'],
tags=[language]
)
if 'cve' in advisory:
if "N/A" != advisory["cve"]:
finding.cve = advisory["cve"]
if "cwe" in advisory:
finding.cwe = int(advisory["cwe"].replace("CWE-", ""))
mitigation_msg = "## Remediation\n"
safe_versions = dependency_report["safeVersions"]
if "latestPatch" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestPatch"] + " or higher."
elif "latestMinor" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestMinor"] + " or higher."
elif "latestMajor" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestMajor"] + "."
else:
mitigation_msg = "We were not able to provide a safe version for this library.\nYou should consider replacing this component as it could be an issue for the safety of your application."
finding.mitigation = mitigation_msg
references = ""
for link in advisory["links"]:
ref_link = self.get_reference_url(link)
if ref_link is not None:
references += "- " + ref_link + "\n"
if references != "":
finding.references = references
findings.append(finding)
return findings
def get_severity(self, advisory):
# Following the CVSS Scoring per https://nvd.nist.gov/vuln-metrics/cvss
if 'cvss' in advisory:
if advisory['cvss'] <= 3.9:
severity = "Low"
elif advisory['cvss'] >= 4.0 and advisory['cvss'] <= 6.9:
severity = "Medium"
elif advisory['cvss'] >= 7.0 and advisory['cvss'] <= 8.9:
severity = "High"
else:
severity = "Critical"
else:
if advisory["severity"] == "SUGGEST" or advisory["severity"] == "NA" or advisory["severity"] == "NONE":
severity = "Info"
else:
severity = advisory["severity"].title()
return severity
def | (self, link_obj):
url = link_obj["url"]
if link_obj["type"] == "CVE":
url = "https://cve.mitre.org/cgi-bin/cvename.cgi?name=" + link_obj["url"]
elif link_obj["type"] == "NVD":
url = "https://nvd.nist.gov/vuln/detail/" + link_obj["url"]
return url
| get_reference_url | identifier_name |
parser.py | import json
from datetime import datetime
from dojo.models import Finding
class MeterianParser(object):
def get_scan_types(self):
return ["Meterian Scan"]
def get_label_for_scan_types(self, scan_type):
return scan_type
def get_description_for_scan_types(self, scan_type):
|
def get_findings(self, report, test):
findings = []
report_json = json.load(report)
security_reports = self.get_security_reports(report_json)
scan_date = str(datetime.fromisoformat(report_json["timestamp"]).date())
for single_security_report in security_reports:
findings += self.do_get_findings(single_security_report, scan_date, test)
return findings
def get_security_reports(self, report_json):
if "reports" in report_json:
if "security" in report_json["reports"]:
if "reports" in report_json["reports"]["security"]:
return report_json["reports"]["security"]["reports"]
raise ValueError("Malformed report: the security reports are missing.")
def do_get_findings(self, single_security_report, scan_date, test):
findings = []
language = single_security_report["language"]
for dependency_report in single_security_report["reports"]:
lib_name = dependency_report["dependency"]["name"]
lib_ver = dependency_report["dependency"]["version"]
finding_title = lib_name + ":" + lib_ver
for advisory in dependency_report["advices"]:
severity = self.get_severity(advisory)
finding = Finding(
title=finding_title,
date=scan_date,
test=test,
severity=severity,
severity_justification="Issue severity of: **" + severity + "** from a base " +
"CVSS score of: **" + str(advisory.get('cvss')) + "**",
description=advisory['description'],
component_name=lib_name,
component_version=lib_ver,
false_p=False,
duplicate=False,
out_of_scope=False,
impact=severity,
static_finding=True,
dynamic_finding=False,
file_path="Manifest file",
unique_id_from_tool=advisory['id'],
tags=[language]
)
if 'cve' in advisory:
if "N/A" != advisory["cve"]:
finding.cve = advisory["cve"]
if "cwe" in advisory:
finding.cwe = int(advisory["cwe"].replace("CWE-", ""))
mitigation_msg = "## Remediation\n"
safe_versions = dependency_report["safeVersions"]
if "latestPatch" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestPatch"] + " or higher."
elif "latestMinor" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestMinor"] + " or higher."
elif "latestMajor" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestMajor"] + "."
else:
mitigation_msg = "We were not able to provide a safe version for this library.\nYou should consider replacing this component as it could be an issue for the safety of your application."
finding.mitigation = mitigation_msg
references = ""
for link in advisory["links"]:
ref_link = self.get_reference_url(link)
if ref_link is not None:
references += "- " + ref_link + "\n"
if references != "":
finding.references = references
findings.append(finding)
return findings
def get_severity(self, advisory):
# Following the CVSS Scoring per https://nvd.nist.gov/vuln-metrics/cvss
if 'cvss' in advisory:
if advisory['cvss'] <= 3.9:
severity = "Low"
elif advisory['cvss'] >= 4.0 and advisory['cvss'] <= 6.9:
severity = "Medium"
elif advisory['cvss'] >= 7.0 and advisory['cvss'] <= 8.9:
severity = "High"
else:
severity = "Critical"
else:
if advisory["severity"] == "SUGGEST" or advisory["severity"] == "NA" or advisory["severity"] == "NONE":
severity = "Info"
else:
severity = advisory["severity"].title()
return severity
def get_reference_url(self, link_obj):
url = link_obj["url"]
if link_obj["type"] == "CVE":
url = "https://cve.mitre.org/cgi-bin/cvename.cgi?name=" + link_obj["url"]
elif link_obj["type"] == "NVD":
url = "https://nvd.nist.gov/vuln/detail/" + link_obj["url"]
return url
| return "Meterian JSON report output file can be imported." | identifier_body |
urls.py | from django.conf.urls import patterns, url
from django.views.generic import RedirectView
from django.conf import settings
from . import views
products = r'/products/(?P<product>\w+)'
versions = r'/versions/(?P<versions>[;\w\.()]+)'
version = r'/versions/(?P<version>[;\w\.()]+)'
perm_legacy_redirect = settings.PERMANENT_LEGACY_REDIRECTS
urlpatterns = patterns(
'', # prefix
url('^robots\.txt$',
views.robots_txt,
name='robots_txt'),
url(r'^status/json/$',
views.status_json,
name='status_json'),
url(r'^status/revision/$',
views.status_revision,
name='status_revision'),
url(r'^crontabber-state/$',
views.crontabber_state,
name='crontabber_state'),
url('^crashes-per-day/$',
views.crashes_per_day,
name='crashes_per_day'),
url(r'^exploitability/$',
views.exploitability_report,
name='exploitability_report'),
url(r'^report/index/(?P<crash_id>[\w-]+)$',
views.report_index,
name='report_index'),
url(r'^search/quick/$',
views.quick_search,
name='quick_search'),
url(r'^buginfo/bug', views.buginfo,
name='buginfo'),
url(r'^rawdumps/(?P<crash_id>[\w-]{36})-(?P<name>\w+)\.'
r'(?P<extension>json|dmp|json\.gz)$',
views.raw_data,
name='raw_data_named'),
url(r'^rawdumps/(?P<crash_id>[\w-]{36}).(?P<extension>json|dmp)$',
views.raw_data,
name='raw_data'),
url(r'^login/$',
views.login,
name='login'),
url(r'^graphics_report/$',
views.graphics_report,
name='graphics_report'),
url(r'^about/throttling/$',
views.about_throttling,
name='about_throttling'),
# if we do a permanent redirect, the browser will "cache" the redirect and
# it will make it very hard to ever change the DEFAULT_PRODUCT
url(r'^$',
RedirectView.as_view(
url='/home/product/%s' % settings.DEFAULT_PRODUCT,
permanent=False # this is not a legacy URL
)),
# redirect deceased Advanced Search URL to Super Search
url(r'^query/$',
RedirectView.as_view(
url='/search/',
query_string=True,
permanent=True
)),
# redirect deceased Report List URL to Signature report
url(r'^report/list$',
RedirectView.as_view(
pattern_name='signature:signature_report',
query_string=True,
permanent=True
)),
# redirect deceased Daily Crashes URL to Crasher per Day
url(r'^daily$',
RedirectView.as_view(
pattern_name='crashstats:crashes_per_day',
query_string=True, | )),
# Redirect old independant pages to the unified Profile page.
url(r'^your-crashes/$',
RedirectView.as_view(
url='/profile/',
permanent=perm_legacy_redirect
)),
url(r'^permissions/$',
RedirectView.as_view(
url='/profile/',
permanent=perm_legacy_redirect
)),
# Redirect deleted status page to monitoring page.
url(
r'^status/$',
RedirectView.as_view(
pattern_name='monitoring:index',
permanent=not settings.DEBUG,
),
name='status_redirect',
),
# handle old-style URLs
url(r'^products/(?P<product>\w+)/$',
RedirectView.as_view(
url='/home/products/%(product)s',
permanent=perm_legacy_redirect
)),
url(r'^products/(?P<product>\w+)/versions/(?P<versions>[;\w\.()]+)/$',
RedirectView.as_view(
url='/home/products/%(product)s/versions/%(versions)s',
permanent=perm_legacy_redirect
)),
url('^home' + products + '/versions/$',
RedirectView.as_view(
url='/home/products/%(product)s',
permanent=perm_legacy_redirect
)),
) | permanent=True | random_line_split |
position.rs | * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Generic types for CSS handling of specified and computed values of
//! [`position`](https://drafts.csswg.org/css-backgrounds-3/#position)
#[derive(Clone, Copy, Debug, HasViewportPercentage, PartialEq, ToComputedValue)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
/// A generic type for representing a CSS [position](https://drafts.csswg.org/css-values/#position).
pub struct Position<H, V> {
/// The horizontal component of position.
pub horizontal: H,
/// The vertical component of position.
pub vertical: V,
}
impl<H, V> Position<H, V> {
/// Returns a new position.
pub fn new(horizontal: H, vertical: V) -> Self {
Self {
horizontal: horizontal,
vertical: vertical,
}
}
} | /* This Source Code Form is subject to the terms of the Mozilla Public | random_line_split |
|
position.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Generic types for CSS handling of specified and computed values of
//! [`position`](https://drafts.csswg.org/css-backgrounds-3/#position)
#[derive(Clone, Copy, Debug, HasViewportPercentage, PartialEq, ToComputedValue)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
/// A generic type for representing a CSS [position](https://drafts.csswg.org/css-values/#position).
pub struct | <H, V> {
/// The horizontal component of position.
pub horizontal: H,
/// The vertical component of position.
pub vertical: V,
}
impl<H, V> Position<H, V> {
/// Returns a new position.
pub fn new(horizontal: H, vertical: V) -> Self {
Self {
horizontal: horizontal,
vertical: vertical,
}
}
}
| Position | identifier_name |
queue.ts | import { QueueAction } from './QueueAction';
import { QueueScheduler } from './QueueScheduler';
/**
*
* 队列调度器
*
* <span class="informal">将每个任务都放到队列里,而不是立刻执行它们</span>
*
* `queue` 调度器, 当和延时一起使用的时候, 和 {@link async} 调度器行为一样。
*
* 当和延时一起使用, 它同步地调用当前任务,即调度的时候执行。然而当递归调用的时候,即在调度的任务内,
* 另一个任务由调度队列调度,而不是立即执行,该任务将被放在队列中,等待当前一个完成。
*
* 这意味着当你用 `queue` 调度程序执行任务时,你确信它会在调度程序启动之前的任何其他任务结束之前结束。
*
* @examples <caption>首先递归调度, 然后做一些事情</caption>
*
* Rx.Scheduler.queue.schedule(() => {
* Rx.Scheduler.queue.schedule(() => console.log('second')); // 不会立马执行,但是会放到队列里
*
* console.log('first');
* });
*
* // 日志:
* // "first"
* // "second"
*
*
* @example <caption>递归的重新调度自身</caption>
*
* Rx.Scheduler.queue.schedule(function(state) {
* if (state !== 0) {
* console.log('before', state);
* this.schedule(state - 1); // `this` 指向当前执行的 Action,
* // 我们使用新的状态重新调度
* console.log('after', state);
* }
* }, 0, 3);
*
* // 递归运行的调度器, 你的期望:
* // "before", 3 | * // "after", 2
* // "after", 3
*
* // 但实际使用队列的输入:
* // "before", 3
* // "after", 3
* // "before", 2
* // "after", 2
* // "before", 1
* // "after", 1
*
*
* @static true
* @name queue
* @owner Scheduler
*/
export const queue = new QueueScheduler(QueueAction); | * // "before", 2
* // "before", 1
* // "after", 1 | random_line_split |
NagiosConnector.ts | ///ts:ref=jquery.d.ts
/// <reference path="../../vendor/jquery.d.ts"/> ///ts:ref:generated
///ts:ref=NagiosResponse.d.ts
/// <reference path="../../jsonInterfaces/NagiosResponse.d.ts"/> ///ts:ref:generated
///ts:import=Connector
import Connector = require('../../connector/Connector'); ///ts:import:generated
///ts:import=ConnectorBase
import ConnectorBase = require('../../connector/ConnectorBase'); ///ts:import:generated
///ts:import=NagiosMonitorModel
import NagiosMonitorModel = require('../model/NagiosMonitorModel'); ///ts:import:generated
import jQuery = require('jquery');
import NagiosJsonResponse = require('NagiosJsonResponse');
/**
* Get data from Nagios {@link http://www.nagios.org/}
*/
class NagiosConnector extends ConnectorBase implements Connector {
//the nagios host
public static NAGIOS_PREFIX: string = '/cgi-bin/status-json.cgi?';
//suffix used to access json output by host
public static NAGIOS_HOST_SUFFIX: string = 'host=';
//suffix used to access JSON output by host group
public static NAGIOS_HOSTGROUP_SUFFIX: string = 'hostgroup=';
//key to pass to nagios if data for all hosts should be displayed
public static DISPLAY_ALL_HOSTS: string = 'all';
//suffix to tell nagios to send all service information for a hostgroup
public static SHOW_SERVICE_DETAILS: string = '&style=detail';
//suffix to add to a JSON call to make jQuery automatically use JSONP
public static NAGIOS_JSONP_SUFFIX: string = '&callback=?';
private static NAGIOS_HOSTINFO_PREFIX: string = '/cgi-bin/extinfo.cgi?type=1&host=';
//==================================================================================================================
// Functionality
//==================================================================================================================
public getRemoteData(model: NagiosMonitorModel): void {
jQuery.getJSON(this.getApiUrl(model),
(json: NagiosJsonResponse.NagiosServices) => {
NagiosConnector.updateModel(json, model);
}
).fail((jqXHR, textStatus, errorThrown) => {
if (console) |
});
//reload data periodically.
setTimeout(() => this.getRemoteData(model), ConnectorBase.getRandomTimeout());
}
public static updateModel(json: NagiosJsonResponse.NagiosServices, model: NagiosMonitorModel): void {
model.setData(json);
}
public getApiUrl(model: NagiosMonitorModel): string {
return this.getUrl(model.getHostname(),
NagiosConnector.NAGIOS_PREFIX +
NagiosConnector.NAGIOS_HOST_SUFFIX +
NagiosConnector.DISPLAY_ALL_HOSTS +
NagiosConnector.NAGIOS_JSONP_SUFFIX);
}
public getHostInfoUrl(nagiosHostname: string, hostname: string): string {
return this.getUrl(nagiosHostname, NagiosConnector.NAGIOS_HOSTINFO_PREFIX + hostname);
}
}
export = NagiosConnector;
| {
console.log(jqXHR, textStatus, errorThrown, this.getApiUrl(model));
} | conditional_block |
NagiosConnector.ts | ///ts:ref=jquery.d.ts
/// <reference path="../../vendor/jquery.d.ts"/> ///ts:ref:generated
///ts:ref=NagiosResponse.d.ts
/// <reference path="../../jsonInterfaces/NagiosResponse.d.ts"/> ///ts:ref:generated
///ts:import=Connector
import Connector = require('../../connector/Connector'); ///ts:import:generated
///ts:import=ConnectorBase | import NagiosMonitorModel = require('../model/NagiosMonitorModel'); ///ts:import:generated
import jQuery = require('jquery');
import NagiosJsonResponse = require('NagiosJsonResponse');
/**
* Get data from Nagios {@link http://www.nagios.org/}
*/
class NagiosConnector extends ConnectorBase implements Connector {
//the nagios host
public static NAGIOS_PREFIX: string = '/cgi-bin/status-json.cgi?';
//suffix used to access json output by host
public static NAGIOS_HOST_SUFFIX: string = 'host=';
//suffix used to access JSON output by host group
public static NAGIOS_HOSTGROUP_SUFFIX: string = 'hostgroup=';
//key to pass to nagios if data for all hosts should be displayed
public static DISPLAY_ALL_HOSTS: string = 'all';
//suffix to tell nagios to send all service information for a hostgroup
public static SHOW_SERVICE_DETAILS: string = '&style=detail';
//suffix to add to a JSON call to make jQuery automatically use JSONP
public static NAGIOS_JSONP_SUFFIX: string = '&callback=?';
private static NAGIOS_HOSTINFO_PREFIX: string = '/cgi-bin/extinfo.cgi?type=1&host=';
//==================================================================================================================
// Functionality
//==================================================================================================================
public getRemoteData(model: NagiosMonitorModel): void {
jQuery.getJSON(this.getApiUrl(model),
(json: NagiosJsonResponse.NagiosServices) => {
NagiosConnector.updateModel(json, model);
}
).fail((jqXHR, textStatus, errorThrown) => {
if (console) {
console.log(jqXHR, textStatus, errorThrown, this.getApiUrl(model));
}
});
//reload data periodically.
setTimeout(() => this.getRemoteData(model), ConnectorBase.getRandomTimeout());
}
public static updateModel(json: NagiosJsonResponse.NagiosServices, model: NagiosMonitorModel): void {
model.setData(json);
}
public getApiUrl(model: NagiosMonitorModel): string {
return this.getUrl(model.getHostname(),
NagiosConnector.NAGIOS_PREFIX +
NagiosConnector.NAGIOS_HOST_SUFFIX +
NagiosConnector.DISPLAY_ALL_HOSTS +
NagiosConnector.NAGIOS_JSONP_SUFFIX);
}
public getHostInfoUrl(nagiosHostname: string, hostname: string): string {
return this.getUrl(nagiosHostname, NagiosConnector.NAGIOS_HOSTINFO_PREFIX + hostname);
}
}
export = NagiosConnector; | import ConnectorBase = require('../../connector/ConnectorBase'); ///ts:import:generated
///ts:import=NagiosMonitorModel | random_line_split |
NagiosConnector.ts | ///ts:ref=jquery.d.ts
/// <reference path="../../vendor/jquery.d.ts"/> ///ts:ref:generated
///ts:ref=NagiosResponse.d.ts
/// <reference path="../../jsonInterfaces/NagiosResponse.d.ts"/> ///ts:ref:generated
///ts:import=Connector
import Connector = require('../../connector/Connector'); ///ts:import:generated
///ts:import=ConnectorBase
import ConnectorBase = require('../../connector/ConnectorBase'); ///ts:import:generated
///ts:import=NagiosMonitorModel
import NagiosMonitorModel = require('../model/NagiosMonitorModel'); ///ts:import:generated
import jQuery = require('jquery');
import NagiosJsonResponse = require('NagiosJsonResponse');
/**
* Get data from Nagios {@link http://www.nagios.org/}
*/
class NagiosConnector extends ConnectorBase implements Connector {
//the nagios host
public static NAGIOS_PREFIX: string = '/cgi-bin/status-json.cgi?';
//suffix used to access json output by host
public static NAGIOS_HOST_SUFFIX: string = 'host=';
//suffix used to access JSON output by host group
public static NAGIOS_HOSTGROUP_SUFFIX: string = 'hostgroup=';
//key to pass to nagios if data for all hosts should be displayed
public static DISPLAY_ALL_HOSTS: string = 'all';
//suffix to tell nagios to send all service information for a hostgroup
public static SHOW_SERVICE_DETAILS: string = '&style=detail';
//suffix to add to a JSON call to make jQuery automatically use JSONP
public static NAGIOS_JSONP_SUFFIX: string = '&callback=?';
private static NAGIOS_HOSTINFO_PREFIX: string = '/cgi-bin/extinfo.cgi?type=1&host=';
//==================================================================================================================
// Functionality
//==================================================================================================================
public getRemoteData(model: NagiosMonitorModel): void {
jQuery.getJSON(this.getApiUrl(model),
(json: NagiosJsonResponse.NagiosServices) => {
NagiosConnector.updateModel(json, model);
}
).fail((jqXHR, textStatus, errorThrown) => {
if (console) {
console.log(jqXHR, textStatus, errorThrown, this.getApiUrl(model));
}
});
//reload data periodically.
setTimeout(() => this.getRemoteData(model), ConnectorBase.getRandomTimeout());
}
public static | (json: NagiosJsonResponse.NagiosServices, model: NagiosMonitorModel): void {
model.setData(json);
}
public getApiUrl(model: NagiosMonitorModel): string {
return this.getUrl(model.getHostname(),
NagiosConnector.NAGIOS_PREFIX +
NagiosConnector.NAGIOS_HOST_SUFFIX +
NagiosConnector.DISPLAY_ALL_HOSTS +
NagiosConnector.NAGIOS_JSONP_SUFFIX);
}
public getHostInfoUrl(nagiosHostname: string, hostname: string): string {
return this.getUrl(nagiosHostname, NagiosConnector.NAGIOS_HOSTINFO_PREFIX + hostname);
}
}
export = NagiosConnector;
| updateModel | identifier_name |
NagiosConnector.ts | ///ts:ref=jquery.d.ts
/// <reference path="../../vendor/jquery.d.ts"/> ///ts:ref:generated
///ts:ref=NagiosResponse.d.ts
/// <reference path="../../jsonInterfaces/NagiosResponse.d.ts"/> ///ts:ref:generated
///ts:import=Connector
import Connector = require('../../connector/Connector'); ///ts:import:generated
///ts:import=ConnectorBase
import ConnectorBase = require('../../connector/ConnectorBase'); ///ts:import:generated
///ts:import=NagiosMonitorModel
import NagiosMonitorModel = require('../model/NagiosMonitorModel'); ///ts:import:generated
import jQuery = require('jquery');
import NagiosJsonResponse = require('NagiosJsonResponse');
/**
* Get data from Nagios {@link http://www.nagios.org/}
*/
class NagiosConnector extends ConnectorBase implements Connector {
//the nagios host
public static NAGIOS_PREFIX: string = '/cgi-bin/status-json.cgi?';
//suffix used to access json output by host
public static NAGIOS_HOST_SUFFIX: string = 'host=';
//suffix used to access JSON output by host group
public static NAGIOS_HOSTGROUP_SUFFIX: string = 'hostgroup=';
//key to pass to nagios if data for all hosts should be displayed
public static DISPLAY_ALL_HOSTS: string = 'all';
//suffix to tell nagios to send all service information for a hostgroup
public static SHOW_SERVICE_DETAILS: string = '&style=detail';
//suffix to add to a JSON call to make jQuery automatically use JSONP
public static NAGIOS_JSONP_SUFFIX: string = '&callback=?';
private static NAGIOS_HOSTINFO_PREFIX: string = '/cgi-bin/extinfo.cgi?type=1&host=';
//==================================================================================================================
// Functionality
//==================================================================================================================
public getRemoteData(model: NagiosMonitorModel): void {
jQuery.getJSON(this.getApiUrl(model),
(json: NagiosJsonResponse.NagiosServices) => {
NagiosConnector.updateModel(json, model);
}
).fail((jqXHR, textStatus, errorThrown) => {
if (console) {
console.log(jqXHR, textStatus, errorThrown, this.getApiUrl(model));
}
});
//reload data periodically.
setTimeout(() => this.getRemoteData(model), ConnectorBase.getRandomTimeout());
}
public static updateModel(json: NagiosJsonResponse.NagiosServices, model: NagiosMonitorModel): void {
model.setData(json);
}
public getApiUrl(model: NagiosMonitorModel): string |
public getHostInfoUrl(nagiosHostname: string, hostname: string): string {
return this.getUrl(nagiosHostname, NagiosConnector.NAGIOS_HOSTINFO_PREFIX + hostname);
}
}
export = NagiosConnector;
| {
return this.getUrl(model.getHostname(),
NagiosConnector.NAGIOS_PREFIX +
NagiosConnector.NAGIOS_HOST_SUFFIX +
NagiosConnector.DISPLAY_ALL_HOSTS +
NagiosConnector.NAGIOS_JSONP_SUFFIX);
} | identifier_body |
run.py | (15) # test subprocess not responding
try:
assert(len(sys.argv) > 1)
port = int(sys.argv[-1])
except:
print>>sys.stderr, "IDLE Subprocess: no IP port passed in sys.argv."
return
capture_warnings(True)
sys.argv[:] = [""]
sockthread = threading.Thread(target=manage_socket,
name='SockThread',
args=((LOCALHOST, port),))
sockthread.setDaemon(True)
sockthread.start()
while 1:
try:
if exit_now:
try:
exit()
except KeyboardInterrupt:
# exiting but got an extra KBI? Try again!
continue
try:
seq, request = rpc.request_queue.get(block=True, timeout=0.05)
except Queue.Empty:
continue
method, args, kwargs = request
ret = method(*args, **kwargs)
rpc.response_queue.put((seq, ret))
except KeyboardInterrupt:
if quitting:
exit_now = True
continue
except SystemExit:
capture_warnings(False)
raise
except:
type, value, tb = sys.exc_info()
try:
print_exception()
rpc.response_queue.put((seq, None))
except:
# Link didn't work, print same exception to __stderr__
traceback.print_exception(type, value, tb, file=sys.__stderr__)
exit()
else:
continue
def manage_socket(address):
for i in range(3):
time.sleep(i)
try:
server = MyRPCServer(address, MyHandler)
break
except socket.error as err:
print>>sys.__stderr__,"IDLE Subprocess: socket error: "\
+ err.args[1] + ", retrying...."
else:
print>>sys.__stderr__, "IDLE Subprocess: Connection to "\
"IDLE GUI failed, exiting."
show_socket_error(err, address)
global exit_now
exit_now = True
return
server.handle_request() # A single request only
def show_socket_error(err, address):
import Tkinter
import tkMessageBox
root = Tkinter.Tk()
root.withdraw()
if err.args[0] == 61: # connection refused
msg = "IDLE's subprocess can't connect to %s:%d. This may be due "\
"to your personal firewall configuration. It is safe to "\
"allow this internal connection because no data is visible on "\
"external ports." % address
tkMessageBox.showerror("IDLE Subprocess Error", msg, parent=root)
else:
tkMessageBox.showerror("IDLE Subprocess Error",
"Socket Error: %s" % err.args[1], parent=root)
root.destroy()
def print_exception():
import linecache
linecache.checkcache()
flush_stdout()
efile = sys.stderr
typ, val, tb = excinfo = sys.exc_info()
sys.last_type, sys.last_value, sys.last_traceback = excinfo
tbe = traceback.extract_tb(tb)
print>>efile, '\nTraceback (most recent call last):'
exclude = ("run.py", "rpc.py", "threading.py", "Queue.py",
"RemoteDebugger.py", "bdb.py")
cleanup_traceback(tbe, exclude)
traceback.print_list(tbe, file=efile)
lines = traceback.format_exception_only(typ, val)
for line in lines:
print>>efile, line,
def cleanup_traceback(tb, exclude):
"Remove excluded traces from beginning/end of tb; get cached lines"
orig_tb = tb[:]
while tb:
for rpcfile in exclude:
if tb[0][0].count(rpcfile):
break # found an exclude, break for: and delete tb[0]
else:
break # no excludes, have left RPC code, break while:
del tb[0]
while tb:
for rpcfile in exclude:
if tb[-1][0].count(rpcfile):
break
else:
break
del tb[-1]
if len(tb) == 0:
# exception was in IDLE internals, don't prune!
tb[:] = orig_tb[:]
print>>sys.stderr, "** IDLE Internal Exception: "
rpchandler = rpc.objecttable['exec'].rpchandler
for i in range(len(tb)):
fn, ln, nm, line = tb[i]
if nm == '?':
nm = "-toplevel-"
if fn.startswith("<pyshell#") and IOBinding.encoding != 'utf-8':
ln -= 1 # correction for coding cookie
if not line and fn.startswith("<pyshell#"):
line = rpchandler.remotecall('linecache', 'getline',
(fn, ln), {})
tb[i] = fn, ln, nm, line
def flush_stdout():
try:
if sys.stdout.softspace:
sys.stdout.softspace = 0
sys.stdout.write("\n")
except (AttributeError, EOFError):
pass
def exit():
"""Exit subprocess, possibly after first deleting sys.exitfunc
If config-main.cfg/.def 'General' 'delete-exitfunc' is True, then any
sys.exitfunc will be removed before exiting. (VPython support)
"""
if no_exitfunc:
try:
del sys.exitfunc
except AttributeError:
pass
capture_warnings(False)
sys.exit(0)
class MyRPCServer(rpc.RPCServer):
def handle_error(self, request, client_address):
"""Override RPCServer method for IDLE
Interrupt the MainThread and exit server if link is dropped.
"""
global quitting
try:
raise
except SystemExit:
raise
except EOFError:
global exit_now
exit_now = True
thread.interrupt_main()
except:
erf = sys.__stderr__
print>>erf, '\n' + '-'*40
print>>erf, 'Unhandled server exception!'
print>>erf, 'Thread: %s' % threading.currentThread().getName()
print>>erf, 'Client Address: ', client_address
print>>erf, 'Request: ', repr(request)
traceback.print_exc(file=erf)
print>>erf, '\n*** Unrecoverable, server exiting!'
print>>erf, '-'*40
quitting = True
thread.interrupt_main()
class MyHandler(rpc.RPCHandler):
def handle(self):
"""Override base method"""
executive = Executive(self)
self.register("exec", executive)
self.console = self.get_remote_proxy("console")
sys.stdin = PyShell.PseudoInputFile(self.console, "stdin",
IOBinding.encoding)
sys.stdout = PyShell.PseudoOutputFile(self.console, "stdout",
IOBinding.encoding)
sys.stderr = PyShell.PseudoOutputFile(self.console, "stderr",
IOBinding.encoding)
# Keep a reference to stdin so that it won't try to exit IDLE if
# sys.stdin gets changed from within IDLE's shell. See issue17838.
self._keep_stdin = sys.stdin
self.interp = self.get_remote_proxy("interp")
rpc.RPCHandler.getresponse(self, myseq=None, wait=0.05)
def exithook(self):
"override SocketIO method - wait for MainThread to shut us down"
time.sleep(10)
def EOFhook(self):
"Override SocketIO method - terminate wait on callback and exit thread"
global quitting
quitting = True
thread.interrupt_main()
def decode_interrupthook(self):
"interrupt awakened thread"
global quitting
quitting = True
thread.interrupt_main()
class Executive(object):
def __init__(self, rpchandler):
self.rpchandler = rpchandler
self.locals = __main__.__dict__
self.calltip = CallTips.CallTips()
self.autocomplete = AutoComplete.AutoComplete()
def runcode(self, code):
global interruptable
try:
self.usr_exc_info = None
interruptable = True
try:
exec code in self.locals
finally:
interruptable = False
except SystemExit:
# Scripts that raise SystemExit should just
# return to the interactive prompt
pass
except:
self.usr_exc_info = sys.exc_info()
if quitting:
exit()
print_exception()
jit = self.rpchandler.console.getvar("<<toggle-jit-stack-viewer>>")
if jit:
self.rpchandler.interp.open_remote_stack_viewer()
else:
flush_stdout()
def interrupt_the_server(self):
if interruptable:
thread.interrupt_main()
def start_the_debugger(self, gui_adap_oid):
return RemoteDebugger.start_debugger(self.rpchandler, gui_adap_oid)
def stop_the_debugger(self, idb_adap_oid):
"Unregister the Idb Adapter. Link objects and Idb then subject to GC"
self.rpchandler.unregister(idb_adap_oid)
def get_the_calltip(self, name):
return self.calltip.fetch_tip(name)
def get_the_completion_list(self, what, mode):
return self.autocomplete.fetch_completions(what, mode)
def | stackviewer | identifier_name |
|
run.py | . The Executive methods have access to the reference and
can pass it on to entities that they command
(e.g. RemoteDebugger.Debugger.start_debugger()). The latter, in turn, can
call MyHandler(SocketIO) register/unregister methods via the reference to
register and unregister themselves.
"""
global exit_now
global quitting
global no_exitfunc
no_exitfunc = del_exitfunc
#time.sleep(15) # test subprocess not responding
try:
assert(len(sys.argv) > 1)
port = int(sys.argv[-1])
except:
print>>sys.stderr, "IDLE Subprocess: no IP port passed in sys.argv."
return
capture_warnings(True)
sys.argv[:] = [""]
sockthread = threading.Thread(target=manage_socket,
name='SockThread',
args=((LOCALHOST, port),))
sockthread.setDaemon(True)
sockthread.start()
while 1:
try:
if exit_now:
try:
exit()
except KeyboardInterrupt:
# exiting but got an extra KBI? Try again!
continue
try:
seq, request = rpc.request_queue.get(block=True, timeout=0.05)
except Queue.Empty:
continue
method, args, kwargs = request
ret = method(*args, **kwargs)
rpc.response_queue.put((seq, ret))
except KeyboardInterrupt:
if quitting:
exit_now = True
continue
except SystemExit:
capture_warnings(False)
raise
except:
type, value, tb = sys.exc_info()
try:
print_exception()
rpc.response_queue.put((seq, None))
except:
# Link didn't work, print same exception to __stderr__
traceback.print_exception(type, value, tb, file=sys.__stderr__)
exit()
else:
continue
def manage_socket(address):
for i in range(3):
time.sleep(i)
try:
server = MyRPCServer(address, MyHandler)
break
except socket.error as err:
print>>sys.__stderr__,"IDLE Subprocess: socket error: "\
+ err.args[1] + ", retrying...."
else:
print>>sys.__stderr__, "IDLE Subprocess: Connection to "\
"IDLE GUI failed, exiting."
show_socket_error(err, address)
global exit_now
exit_now = True
return
server.handle_request() # A single request only
def show_socket_error(err, address):
import Tkinter
import tkMessageBox
root = Tkinter.Tk()
root.withdraw()
if err.args[0] == 61: # connection refused
msg = "IDLE's subprocess can't connect to %s:%d. This may be due "\
"to your personal firewall configuration. It is safe to "\
"allow this internal connection because no data is visible on "\
"external ports." % address
tkMessageBox.showerror("IDLE Subprocess Error", msg, parent=root)
else:
tkMessageBox.showerror("IDLE Subprocess Error",
"Socket Error: %s" % err.args[1], parent=root)
root.destroy()
def print_exception():
import linecache
linecache.checkcache()
flush_stdout()
efile = sys.stderr
typ, val, tb = excinfo = sys.exc_info()
sys.last_type, sys.last_value, sys.last_traceback = excinfo
tbe = traceback.extract_tb(tb)
print>>efile, '\nTraceback (most recent call last):'
exclude = ("run.py", "rpc.py", "threading.py", "Queue.py",
"RemoteDebugger.py", "bdb.py")
cleanup_traceback(tbe, exclude)
traceback.print_list(tbe, file=efile)
lines = traceback.format_exception_only(typ, val)
for line in lines:
print>>efile, line,
def cleanup_traceback(tb, exclude):
"Remove excluded traces from beginning/end of tb; get cached lines"
orig_tb = tb[:]
while tb:
for rpcfile in exclude:
if tb[0][0].count(rpcfile):
break # found an exclude, break for: and delete tb[0]
else:
break # no excludes, have left RPC code, break while:
del tb[0]
while tb:
for rpcfile in exclude:
if tb[-1][0].count(rpcfile):
break
else:
break
del tb[-1]
if len(tb) == 0:
# exception was in IDLE internals, don't prune!
tb[:] = orig_tb[:]
print>>sys.stderr, "** IDLE Internal Exception: "
rpchandler = rpc.objecttable['exec'].rpchandler
for i in range(len(tb)):
fn, ln, nm, line = tb[i]
if nm == '?':
nm = "-toplevel-"
if fn.startswith("<pyshell#") and IOBinding.encoding != 'utf-8':
ln -= 1 # correction for coding cookie
if not line and fn.startswith("<pyshell#"):
line = rpchandler.remotecall('linecache', 'getline',
(fn, ln), {})
tb[i] = fn, ln, nm, line
def flush_stdout():
try:
if sys.stdout.softspace:
sys.stdout.softspace = 0
sys.stdout.write("\n")
except (AttributeError, EOFError):
pass
def exit():
"""Exit subprocess, possibly after first deleting sys.exitfunc
If config-main.cfg/.def 'General' 'delete-exitfunc' is True, then any
sys.exitfunc will be removed before exiting. (VPython support)
"""
if no_exitfunc:
try:
del sys.exitfunc
except AttributeError:
pass
capture_warnings(False)
sys.exit(0)
class MyRPCServer(rpc.RPCServer):
def handle_error(self, request, client_address):
"""Override RPCServer method for IDLE
Interrupt the MainThread and exit server if link is dropped.
"""
global quitting
try:
raise
except SystemExit:
raise
except EOFError:
global exit_now
exit_now = True
thread.interrupt_main()
except:
erf = sys.__stderr__
print>>erf, '\n' + '-'*40
print>>erf, 'Unhandled server exception!'
print>>erf, 'Thread: %s' % threading.currentThread().getName()
print>>erf, 'Client Address: ', client_address
print>>erf, 'Request: ', repr(request)
traceback.print_exc(file=erf)
print>>erf, '\n*** Unrecoverable, server exiting!'
print>>erf, '-'*40
quitting = True
thread.interrupt_main()
class MyHandler(rpc.RPCHandler):
def handle(self):
"""Override base method"""
executive = Executive(self)
self.register("exec", executive)
self.console = self.get_remote_proxy("console")
sys.stdin = PyShell.PseudoInputFile(self.console, "stdin",
IOBinding.encoding)
sys.stdout = PyShell.PseudoOutputFile(self.console, "stdout",
IOBinding.encoding)
sys.stderr = PyShell.PseudoOutputFile(self.console, "stderr",
IOBinding.encoding)
# Keep a reference to stdin so that it won't try to exit IDLE if
# sys.stdin gets changed from within IDLE's shell. See issue17838.
self._keep_stdin = sys.stdin
self.interp = self.get_remote_proxy("interp")
rpc.RPCHandler.getresponse(self, myseq=None, wait=0.05)
def exithook(self):
"override SocketIO method - wait for MainThread to shut us down"
time.sleep(10)
def EOFhook(self):
"Override SocketIO method - terminate wait on callback and exit thread"
global quitting
quitting = True
thread.interrupt_main()
def decode_interrupthook(self):
"interrupt awakened thread"
global quitting
quitting = True
thread.interrupt_main()
class Executive(object):
def __init__(self, rpchandler):
self.rpchandler = rpchandler
self.locals = __main__.__dict__
self.calltip = CallTips.CallTips()
self.autocomplete = AutoComplete.AutoComplete()
def runcode(self, code):
global interruptable
try:
self.usr_exc_info = None
interruptable = True
try:
exec code in self.locals
finally:
interruptable = False
except SystemExit:
# Scripts that raise SystemExit should just
# return to the interactive prompt
pass
except:
self.usr_exc_info = sys.exc_info()
if quitting:
exit()
print_exception()
jit = self.rpchandler.console.getvar("<<toggle-jit-stack-viewer>>")
if jit:
self.rpchandler.interp.open_remote_stack_viewer()
else:
flush_stdout()
def interrupt_the_server(self):
if interruptable:
thread.interrupt_main()
def start_the_debugger(self, gui_adap_oid):
| return RemoteDebugger.start_debugger(self.rpchandler, gui_adap_oid) | identifier_body |
|
run.py | MyHandler, which inherits register/unregister methods from RPCHandler via
the mix-in class SocketIO.
When the RPCServer 'server' is instantiated, the TCPServer initialization
creates an instance of run.MyHandler and calls its handle() method.
handle() instantiates a run.Executive object, passing it a reference to the
MyHandler object. That reference is saved as attribute rpchandler of the
Executive instance. The Executive methods have access to the reference and
can pass it on to entities that they command
(e.g. RemoteDebugger.Debugger.start_debugger()). The latter, in turn, can
call MyHandler(SocketIO) register/unregister methods via the reference to
register and unregister themselves.
"""
global exit_now
global quitting
global no_exitfunc
no_exitfunc = del_exitfunc
#time.sleep(15) # test subprocess not responding
try:
assert(len(sys.argv) > 1)
port = int(sys.argv[-1])
except:
print>>sys.stderr, "IDLE Subprocess: no IP port passed in sys.argv."
return
capture_warnings(True)
sys.argv[:] = [""]
sockthread = threading.Thread(target=manage_socket,
name='SockThread',
args=((LOCALHOST, port),))
sockthread.setDaemon(True)
sockthread.start()
while 1:
try:
if exit_now:
try:
exit()
except KeyboardInterrupt:
# exiting but got an extra KBI? Try again!
continue
try:
seq, request = rpc.request_queue.get(block=True, timeout=0.05)
except Queue.Empty:
continue
method, args, kwargs = request
ret = method(*args, **kwargs)
rpc.response_queue.put((seq, ret))
except KeyboardInterrupt:
if quitting:
exit_now = True
continue
except SystemExit:
capture_warnings(False)
raise
except:
type, value, tb = sys.exc_info()
try:
print_exception()
rpc.response_queue.put((seq, None))
except:
# Link didn't work, print same exception to __stderr__
traceback.print_exception(type, value, tb, file=sys.__stderr__)
exit()
else:
continue
def manage_socket(address):
for i in range(3):
time.sleep(i)
try:
server = MyRPCServer(address, MyHandler)
break
except socket.error as err:
print>>sys.__stderr__,"IDLE Subprocess: socket error: "\
+ err.args[1] + ", retrying...."
else:
print>>sys.__stderr__, "IDLE Subprocess: Connection to "\
"IDLE GUI failed, exiting."
show_socket_error(err, address)
global exit_now
exit_now = True
return
server.handle_request() # A single request only
def show_socket_error(err, address):
import Tkinter
import tkMessageBox
root = Tkinter.Tk()
root.withdraw()
if err.args[0] == 61: # connection refused
msg = "IDLE's subprocess can't connect to %s:%d. This may be due "\
"to your personal firewall configuration. It is safe to "\
"allow this internal connection because no data is visible on "\
"external ports." % address
tkMessageBox.showerror("IDLE Subprocess Error", msg, parent=root)
else:
tkMessageBox.showerror("IDLE Subprocess Error",
"Socket Error: %s" % err.args[1], parent=root)
root.destroy()
def print_exception():
import linecache
linecache.checkcache()
flush_stdout()
efile = sys.stderr
typ, val, tb = excinfo = sys.exc_info()
sys.last_type, sys.last_value, sys.last_traceback = excinfo
tbe = traceback.extract_tb(tb)
print>>efile, '\nTraceback (most recent call last):'
exclude = ("run.py", "rpc.py", "threading.py", "Queue.py",
"RemoteDebugger.py", "bdb.py")
cleanup_traceback(tbe, exclude)
traceback.print_list(tbe, file=efile)
lines = traceback.format_exception_only(typ, val)
for line in lines:
print>>efile, line,
def cleanup_traceback(tb, exclude):
"Remove excluded traces from beginning/end of tb; get cached lines"
orig_tb = tb[:]
while tb:
for rpcfile in exclude:
if tb[0][0].count(rpcfile):
break # found an exclude, break for: and delete tb[0]
else:
break # no excludes, have left RPC code, break while:
del tb[0]
while tb:
for rpcfile in exclude:
if tb[-1][0].count(rpcfile):
break
else:
break
del tb[-1]
if len(tb) == 0:
# exception was in IDLE internals, don't prune!
tb[:] = orig_tb[:]
print>>sys.stderr, "** IDLE Internal Exception: "
rpchandler = rpc.objecttable['exec'].rpchandler
for i in range(len(tb)):
fn, ln, nm, line = tb[i]
if nm == '?':
nm = "-toplevel-"
if fn.startswith("<pyshell#") and IOBinding.encoding != 'utf-8':
ln -= 1 # correction for coding cookie
if not line and fn.startswith("<pyshell#"):
line = rpchandler.remotecall('linecache', 'getline',
(fn, ln), {})
tb[i] = fn, ln, nm, line
def flush_stdout():
try:
if sys.stdout.softspace:
sys.stdout.softspace = 0
sys.stdout.write("\n")
except (AttributeError, EOFError):
pass
def exit():
"""Exit subprocess, possibly after first deleting sys.exitfunc
If config-main.cfg/.def 'General' 'delete-exitfunc' is True, then any
sys.exitfunc will be removed before exiting. (VPython support)
"""
if no_exitfunc:
try:
del sys.exitfunc
except AttributeError:
pass
capture_warnings(False)
sys.exit(0)
class MyRPCServer(rpc.RPCServer):
def handle_error(self, request, client_address):
"""Override RPCServer method for IDLE
Interrupt the MainThread and exit server if link is dropped.
"""
global quitting
try:
raise
except SystemExit:
raise
except EOFError:
global exit_now
exit_now = True
thread.interrupt_main()
except:
erf = sys.__stderr__
print>>erf, '\n' + '-'*40
print>>erf, 'Unhandled server exception!'
print>>erf, 'Thread: %s' % threading.currentThread().getName()
print>>erf, 'Client Address: ', client_address
print>>erf, 'Request: ', repr(request)
traceback.print_exc(file=erf)
print>>erf, '\n*** Unrecoverable, server exiting!'
print>>erf, '-'*40
quitting = True
thread.interrupt_main()
class MyHandler(rpc.RPCHandler):
def handle(self):
"""Override base method"""
executive = Executive(self)
self.register("exec", executive)
self.console = self.get_remote_proxy("console")
sys.stdin = PyShell.PseudoInputFile(self.console, "stdin",
IOBinding.encoding)
sys.stdout = PyShell.PseudoOutputFile(self.console, "stdout",
IOBinding.encoding)
sys.stderr = PyShell.PseudoOutputFile(self.console, "stderr",
IOBinding.encoding)
# Keep a reference to stdin so that it won't try to exit IDLE if
# sys.stdin gets changed from within IDLE's shell. See issue17838.
self._keep_stdin = sys.stdin
self.interp = self.get_remote_proxy("interp")
rpc.RPCHandler.getresponse(self, myseq=None, wait=0.05)
def exithook(self):
"override SocketIO method - wait for MainThread to shut us down"
time.sleep(10)
def EOFhook(self):
"Override SocketIO method - terminate wait on callback and exit thread"
global quitting
quitting = True
thread.interrupt_main()
def decode_interrupthook(self):
"interrupt awakened thread"
global quitting
quitting = True
thread.interrupt_main()
class Executive(object):
def __init__(self, rpchandler):
self.rpchandler = rpchandler
self.locals = __main__.__dict__
self.calltip = CallTips.CallTips()
self.autocomplete = AutoComplete.AutoComplete()
def runcode(self, code):
global interruptable
try:
self.usr_exc_info = None
interruptable = True
try:
exec code in self.locals
finally:
interruptable = False
except SystemExit:
# Scripts that raise SystemExit should just
# return to the interactive prompt
pass
except:
self.usr_exc_info = sys.exc_info()
if quitting:
| exit() | conditional_block |
|
run.py | import socket
import traceback
import thread
import threading
import Queue
from idlelib import CallTips
from idlelib import AutoComplete
from idlelib import RemoteDebugger
from idlelib import RemoteObjectBrowser
from idlelib import StackViewer
from idlelib import rpc
from idlelib import PyShell
from idlelib import IOBinding
import __main__
LOCALHOST = '127.0.0.1'
import warnings
def idle_showwarning_subproc(
message, category, filename, lineno, file=None, line=None):
"""Show Idle-format warning after replacing warnings.showwarning.
The only difference is the formatter called.
"""
if file is None:
file = sys.stderr
try:
file.write(PyShell.idle_formatwarning(
message, category, filename, lineno, line))
except IOError:
pass # the file (probably stderr) is invalid - this warning gets lost.
_warnings_showwarning = None
def capture_warnings(capture):
"Replace warning.showwarning with idle_showwarning_subproc, or reverse."
global _warnings_showwarning
if capture:
if _warnings_showwarning is None:
_warnings_showwarning = warnings.showwarning
warnings.showwarning = idle_showwarning_subproc
else:
if _warnings_showwarning is not None:
warnings.showwarning = _warnings_showwarning
_warnings_showwarning = None
capture_warnings(True)
# Thread shared globals: Establish a queue between a subthread (which handles
# the socket) and the main thread (which runs user code), plus global
# completion, exit and interruptable (the main thread) flags:
exit_now = False
quitting = False
interruptable = False
def main(del_exitfunc=False):
"""Start the Python execution server in a subprocess
In the Python subprocess, RPCServer is instantiated with handlerclass
MyHandler, which inherits register/unregister methods from RPCHandler via
the mix-in class SocketIO.
When the RPCServer 'server' is instantiated, the TCPServer initialization
creates an instance of run.MyHandler and calls its handle() method.
handle() instantiates a run.Executive object, passing it a reference to the
MyHandler object. That reference is saved as attribute rpchandler of the
Executive instance. The Executive methods have access to the reference and
can pass it on to entities that they command
(e.g. RemoteDebugger.Debugger.start_debugger()). The latter, in turn, can
call MyHandler(SocketIO) register/unregister methods via the reference to
register and unregister themselves.
"""
global exit_now
global quitting
global no_exitfunc
no_exitfunc = del_exitfunc
#time.sleep(15) # test subprocess not responding
try:
assert(len(sys.argv) > 1)
port = int(sys.argv[-1])
except:
print>>sys.stderr, "IDLE Subprocess: no IP port passed in sys.argv."
return
capture_warnings(True)
sys.argv[:] = [""]
sockthread = threading.Thread(target=manage_socket,
name='SockThread',
args=((LOCALHOST, port),))
sockthread.setDaemon(True)
sockthread.start()
while 1:
try:
if exit_now:
try:
exit()
except KeyboardInterrupt:
# exiting but got an extra KBI? Try again!
continue
try:
seq, request = rpc.request_queue.get(block=True, timeout=0.05)
except Queue.Empty:
continue
method, args, kwargs = request
ret = method(*args, **kwargs)
rpc.response_queue.put((seq, ret))
except KeyboardInterrupt:
if quitting:
exit_now = True
continue
except SystemExit:
capture_warnings(False)
raise
except:
type, value, tb = sys.exc_info()
try:
print_exception()
rpc.response_queue.put((seq, None))
except:
# Link didn't work, print same exception to __stderr__
traceback.print_exception(type, value, tb, file=sys.__stderr__)
exit()
else:
continue
def manage_socket(address):
for i in range(3):
time.sleep(i)
try:
server = MyRPCServer(address, MyHandler)
break
except socket.error as err:
print>>sys.__stderr__,"IDLE Subprocess: socket error: "\
+ err.args[1] + ", retrying...."
else:
print>>sys.__stderr__, "IDLE Subprocess: Connection to "\
"IDLE GUI failed, exiting."
show_socket_error(err, address)
global exit_now
exit_now = True
return
server.handle_request() # A single request only
def show_socket_error(err, address):
import Tkinter
import tkMessageBox
root = Tkinter.Tk()
root.withdraw()
if err.args[0] == 61: # connection refused
msg = "IDLE's subprocess can't connect to %s:%d. This may be due "\
"to your personal firewall configuration. It is safe to "\
"allow this internal connection because no data is visible on "\
"external ports." % address
tkMessageBox.showerror("IDLE Subprocess Error", msg, parent=root)
else:
tkMessageBox.showerror("IDLE Subprocess Error",
"Socket Error: %s" % err.args[1], parent=root)
root.destroy()
def print_exception():
import linecache
linecache.checkcache()
flush_stdout()
efile = sys.stderr
typ, val, tb = excinfo = sys.exc_info()
sys.last_type, sys.last_value, sys.last_traceback = excinfo
tbe = traceback.extract_tb(tb)
print>>efile, '\nTraceback (most recent call last):'
exclude = ("run.py", "rpc.py", "threading.py", "Queue.py",
"RemoteDebugger.py", "bdb.py")
cleanup_traceback(tbe, exclude)
traceback.print_list(tbe, file=efile)
lines = traceback.format_exception_only(typ, val)
for line in lines:
print>>efile, line,
def cleanup_traceback(tb, exclude):
"Remove excluded traces from beginning/end of tb; get cached lines"
orig_tb = tb[:]
while tb:
for rpcfile in exclude:
if tb[0][0].count(rpcfile):
break # found an exclude, break for: and delete tb[0]
else:
break # no excludes, have left RPC code, break while:
del tb[0]
while tb:
for rpcfile in exclude:
if tb[-1][0].count(rpcfile):
break
else:
break
del tb[-1]
if len(tb) == 0:
# exception was in IDLE internals, don't prune!
tb[:] = orig_tb[:]
print>>sys.stderr, "** IDLE Internal Exception: "
rpchandler = rpc.objecttable['exec'].rpchandler
for i in range(len(tb)):
fn, ln, nm, line = tb[i]
if nm == '?':
nm = "-toplevel-"
if fn.startswith("<pyshell#") and IOBinding.encoding != 'utf-8':
ln -= 1 # correction for coding cookie
if not line and fn.startswith("<pyshell#"):
line = rpchandler.remotecall('linecache', 'getline',
(fn, ln), {})
tb[i] = fn, ln, nm, line
def flush_stdout():
try:
if sys.stdout.softspace:
sys.stdout.softspace = 0
sys.stdout.write("\n")
except (AttributeError, EOFError):
pass
def exit():
"""Exit subprocess, possibly after first deleting sys.exitfunc
If config-main.cfg/.def 'General' 'delete-exitfunc' is True, then any
sys.exitfunc will be removed before exiting. (VPython support)
"""
if no_exitfunc:
try:
del sys.exitfunc
except AttributeError:
pass
capture_warnings(False)
sys.exit(0)
class MyRPCServer(rpc.RPCServer):
def handle_error(self, request, client_address):
"""Override RPCServer method for IDLE
Interrupt the MainThread and exit server if link is dropped.
"""
global quitting
try:
raise
except SystemExit:
raise
except EOFError:
global exit_now
exit_now = True
thread.interrupt_main()
except:
erf = sys.__stderr__
print>>erf, '\n' + '-'*40
print>>erf, 'Unhandled server exception!'
print>>erf, 'Thread: %s' % threading.currentThread().getName()
print>>erf, 'Client Address: ', client_address
print>>erf, 'Request: ', repr(request)
traceback.print_exc(file=erf)
print>>erf, '\n*** Unrecoverable, server exiting!'
print>>erf, '-'*40
quitting = True
thread.interrupt_main()
class MyHandler(rpc.RPCHandler):
def handle(self):
"""Override base method"""
executive = Executive(self)
self.register("exec", executive)
| import sys
import linecache
import time
| random_line_split |
|
bdgbroadcall_cmd.py | # Time-stamp: <2019-09-25 10:04:48 taoliu>
"""Description: Fine-tuning script to call broad peaks from a single
bedGraph track for scores.
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD License (see the file LICENSE included with
the distribution).
"""
# ------------------------------------
# python modules
# ------------------------------------
import sys
import os
import logging
from MACS2.IO import BedGraphIO
# ------------------------------------
# constants
# ------------------------------------
logging.basicConfig(level=20,
format='%(levelname)-5s @ %(asctime)s: %(message)s ',
datefmt='%a, %d %b %Y %H:%M:%S',
stream=sys.stderr,
filemode="w"
)
# ------------------------------------
# Misc functions
# ------------------------------------
error = logging.critical # function alias
warn = logging.warning
debug = logging.debug
info = logging.info
# ------------------------------------
# Classes
# ------------------------------------
# ------------------------------------
# Main function
# ------------------------------------
def | ( options ):
info("Read and build bedGraph...")
bio = BedGraphIO.bedGraphIO(options.ifile)
btrack = bio.build_bdgtrack(baseline_value=0)
info("Call peaks from bedGraph...")
bpeaks = btrack.call_broadpeaks (lvl1_cutoff=options.cutoffpeak, lvl2_cutoff=options.cutofflink, min_length=options.minlen, lvl1_max_gap=options.lvl1maxgap, lvl2_max_gap=options.lvl2maxgap)
info("Write peaks...")
if options.ofile:
bf = open( os.path.join( options.outdir, options.ofile ), "w" )
options.oprefix = options.ofile
else:
bf = open ( os.path.join( options.outdir, "%s_c%.1f_C%.2f_l%d_g%d_G%d_broad.bed12" % (options.oprefix,options.cutoffpeak,options.cutofflink,options.minlen,options.lvl1maxgap,options.lvl2maxgap)), "w" )
bpeaks.write_to_gappedPeak(bf, name_prefix=(options.oprefix+"_broadRegion").encode(), score_column="score", trackline=options.trackline)
info("Done")
| run | identifier_name |
bdgbroadcall_cmd.py | # Time-stamp: <2019-09-25 10:04:48 taoliu>
"""Description: Fine-tuning script to call broad peaks from a single
bedGraph track for scores.
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD License (see the file LICENSE included with
the distribution).
"""
# ------------------------------------
# python modules
# ------------------------------------
import sys
import os
import logging
from MACS2.IO import BedGraphIO
# ------------------------------------
# constants
# ------------------------------------
logging.basicConfig(level=20,
format='%(levelname)-5s @ %(asctime)s: %(message)s ',
datefmt='%a, %d %b %Y %H:%M:%S',
stream=sys.stderr,
filemode="w"
)
# ------------------------------------
# Misc functions
# ------------------------------------
error = logging.critical # function alias
warn = logging.warning
debug = logging.debug
info = logging.info
# ------------------------------------
# Classes
# ------------------------------------
# ------------------------------------ | def run( options ):
info("Read and build bedGraph...")
bio = BedGraphIO.bedGraphIO(options.ifile)
btrack = bio.build_bdgtrack(baseline_value=0)
info("Call peaks from bedGraph...")
bpeaks = btrack.call_broadpeaks (lvl1_cutoff=options.cutoffpeak, lvl2_cutoff=options.cutofflink, min_length=options.minlen, lvl1_max_gap=options.lvl1maxgap, lvl2_max_gap=options.lvl2maxgap)
info("Write peaks...")
if options.ofile:
bf = open( os.path.join( options.outdir, options.ofile ), "w" )
options.oprefix = options.ofile
else:
bf = open ( os.path.join( options.outdir, "%s_c%.1f_C%.2f_l%d_g%d_G%d_broad.bed12" % (options.oprefix,options.cutoffpeak,options.cutofflink,options.minlen,options.lvl1maxgap,options.lvl2maxgap)), "w" )
bpeaks.write_to_gappedPeak(bf, name_prefix=(options.oprefix+"_broadRegion").encode(), score_column="score", trackline=options.trackline)
info("Done") | # Main function
# ------------------------------------ | random_line_split |
bdgbroadcall_cmd.py | # Time-stamp: <2019-09-25 10:04:48 taoliu>
"""Description: Fine-tuning script to call broad peaks from a single
bedGraph track for scores.
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD License (see the file LICENSE included with
the distribution).
"""
# ------------------------------------
# python modules
# ------------------------------------
import sys
import os
import logging
from MACS2.IO import BedGraphIO
# ------------------------------------
# constants
# ------------------------------------
logging.basicConfig(level=20,
format='%(levelname)-5s @ %(asctime)s: %(message)s ',
datefmt='%a, %d %b %Y %H:%M:%S',
stream=sys.stderr,
filemode="w"
)
# ------------------------------------
# Misc functions
# ------------------------------------
error = logging.critical # function alias
warn = logging.warning
debug = logging.debug
info = logging.info
# ------------------------------------
# Classes
# ------------------------------------
# ------------------------------------
# Main function
# ------------------------------------
def run( options ):
| info("Read and build bedGraph...")
bio = BedGraphIO.bedGraphIO(options.ifile)
btrack = bio.build_bdgtrack(baseline_value=0)
info("Call peaks from bedGraph...")
bpeaks = btrack.call_broadpeaks (lvl1_cutoff=options.cutoffpeak, lvl2_cutoff=options.cutofflink, min_length=options.minlen, lvl1_max_gap=options.lvl1maxgap, lvl2_max_gap=options.lvl2maxgap)
info("Write peaks...")
if options.ofile:
bf = open( os.path.join( options.outdir, options.ofile ), "w" )
options.oprefix = options.ofile
else:
bf = open ( os.path.join( options.outdir, "%s_c%.1f_C%.2f_l%d_g%d_G%d_broad.bed12" % (options.oprefix,options.cutoffpeak,options.cutofflink,options.minlen,options.lvl1maxgap,options.lvl2maxgap)), "w" )
bpeaks.write_to_gappedPeak(bf, name_prefix=(options.oprefix+"_broadRegion").encode(), score_column="score", trackline=options.trackline)
info("Done") | identifier_body |
|
bdgbroadcall_cmd.py | # Time-stamp: <2019-09-25 10:04:48 taoliu>
"""Description: Fine-tuning script to call broad peaks from a single
bedGraph track for scores.
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD License (see the file LICENSE included with
the distribution).
"""
# ------------------------------------
# python modules
# ------------------------------------
import sys
import os
import logging
from MACS2.IO import BedGraphIO
# ------------------------------------
# constants
# ------------------------------------
logging.basicConfig(level=20,
format='%(levelname)-5s @ %(asctime)s: %(message)s ',
datefmt='%a, %d %b %Y %H:%M:%S',
stream=sys.stderr,
filemode="w"
)
# ------------------------------------
# Misc functions
# ------------------------------------
error = logging.critical # function alias
warn = logging.warning
debug = logging.debug
info = logging.info
# ------------------------------------
# Classes
# ------------------------------------
# ------------------------------------
# Main function
# ------------------------------------
def run( options ):
info("Read and build bedGraph...")
bio = BedGraphIO.bedGraphIO(options.ifile)
btrack = bio.build_bdgtrack(baseline_value=0)
info("Call peaks from bedGraph...")
bpeaks = btrack.call_broadpeaks (lvl1_cutoff=options.cutoffpeak, lvl2_cutoff=options.cutofflink, min_length=options.minlen, lvl1_max_gap=options.lvl1maxgap, lvl2_max_gap=options.lvl2maxgap)
info("Write peaks...")
if options.ofile:
bf = open( os.path.join( options.outdir, options.ofile ), "w" )
options.oprefix = options.ofile
else:
|
bpeaks.write_to_gappedPeak(bf, name_prefix=(options.oprefix+"_broadRegion").encode(), score_column="score", trackline=options.trackline)
info("Done")
| bf = open ( os.path.join( options.outdir, "%s_c%.1f_C%.2f_l%d_g%d_G%d_broad.bed12" % (options.oprefix,options.cutoffpeak,options.cutofflink,options.minlen,options.lvl1maxgap,options.lvl2maxgap)), "w" ) | conditional_block |
ticker.ts | import { State } from './core/enum/state'
import { ITicker } from './core/interfaces/ITicker'
/**
* Main Fatina Ticker
* Parent of all the normal tween and sequence
*
* @export
* @class Ticker
* @extends {EventList}
* @implements {ITicker}
*/
export class Ticker implements ITicker {
public state = State.Idle
/**
* @private
*/
private timescale = 1
public elapsed = 0
public duration = 0
/**
* @private
*/
private tickCb: (dt: number) => void | undefined
/**
* @private
*/
private readonly ticks: Set<(dt: number) => void> = new Set()
/**
* @private
*/
private readonly newTicks: Set<(dt: number) => void> = new Set()
/**
* @private
*/
private parent: ITicker
/**
* @private
*/
private dt = 0
public setParent(parent: ITicker, tick: (dt: number) => void) {
this.tickCb = tick
this.parent = parent
}
/**
* Method used to change the timescale
*
* @param {number} scale
*/
public setTimescale(scale: number): void {
this.timescale = scale
}
/**
* Method used by the child to be updated
*
* @param {(dt: number) => void} cb
*/
public addTick(cb: (dt: number) => void): void {
this.newTicks.add(cb)
}
/**
* Method used by the child to not receive update anymore
*
* @param {(dt: number) => void} cb
*/
public removeTick(cb: (dt: number) => void): void {
if (!this.ticks.delete(cb)) |
}
/**
* Method used to tick all the child (tick listeners)
*
* @param {number} dt
* @returns
*/
public tick(dt: number) {
if (this.state !== State.Run) {
return
}
this.dt = dt * this.timescale
if (this.newTicks.size > 0) {
this.newTicks.forEach((tick) => this.ticks.add(tick))
this.newTicks.clear()
}
this.ticks.forEach((tick) => tick(this.dt))
this.elapsed += this.dt
}
public start(): void {
if (this.state === State.Idle) {
this.state = State.Run
}
}
public pause(): void {
if (this.state === State.Run) {
this.state = State.Pause
}
}
public resume(): void {
if (this.state === State.Pause) {
this.state = State.Run
}
}
public kill(): void {
if (this.state >= 3) {
return
}
if (this.parent && this.tickCb) {
this.parent.removeTick(this.tickCb)
}
this.state = State.Killed
}
public skip(): void {}
public reset(): void {
this.state = State.Idle
}
public get isIdle(): boolean {
return this.state === State.Idle
}
public get isRunning(): boolean {
return this.state === State.Run
}
public get isFinished(): boolean {
return this.state >= 3
}
public get isPaused(): boolean {
return this.state === State.Pause
}
}
| {
this.newTicks.delete(cb)
} | conditional_block |
ticker.ts | import { State } from './core/enum/state'
import { ITicker } from './core/interfaces/ITicker'
/**
* Main Fatina Ticker
* Parent of all the normal tween and sequence
*
* @export
* @class Ticker
* @extends {EventList}
* @implements {ITicker}
*/
export class Ticker implements ITicker {
public state = State.Idle
/**
* @private
*/
private timescale = 1
public elapsed = 0
public duration = 0
/**
* @private
*/
private tickCb: (dt: number) => void | undefined
/**
* @private
*/
private readonly ticks: Set<(dt: number) => void> = new Set()
/**
* @private
*/
private readonly newTicks: Set<(dt: number) => void> = new Set()
/**
* @private
*/
private parent: ITicker
/**
* @private
*/
private dt = 0
public setParent(parent: ITicker, tick: (dt: number) => void) {
this.tickCb = tick
this.parent = parent
}
/**
* Method used to change the timescale
*
* @param {number} scale
*/
public setTimescale(scale: number): void {
this.timescale = scale
}
/**
* Method used by the child to be updated
*
* @param {(dt: number) => void} cb
*/
public addTick(cb: (dt: number) => void): void {
this.newTicks.add(cb)
}
/**
* Method used by the child to not receive update anymore
*
* @param {(dt: number) => void} cb
*/
public removeTick(cb: (dt: number) => void): void {
if (!this.ticks.delete(cb)) {
this.newTicks.delete(cb)
}
}
/**
* Method used to tick all the child (tick listeners)
*
* @param {number} dt
* @returns
*/
public tick(dt: number) {
if (this.state !== State.Run) {
return
}
this.dt = dt * this.timescale
if (this.newTicks.size > 0) {
this.newTicks.forEach((tick) => this.ticks.add(tick))
this.newTicks.clear()
}
this.ticks.forEach((tick) => tick(this.dt))
this.elapsed += this.dt | }
public start(): void {
if (this.state === State.Idle) {
this.state = State.Run
}
}
public pause(): void {
if (this.state === State.Run) {
this.state = State.Pause
}
}
public resume(): void {
if (this.state === State.Pause) {
this.state = State.Run
}
}
public kill(): void {
if (this.state >= 3) {
return
}
if (this.parent && this.tickCb) {
this.parent.removeTick(this.tickCb)
}
this.state = State.Killed
}
public skip(): void {}
public reset(): void {
this.state = State.Idle
}
public get isIdle(): boolean {
return this.state === State.Idle
}
public get isRunning(): boolean {
return this.state === State.Run
}
public get isFinished(): boolean {
return this.state >= 3
}
public get isPaused(): boolean {
return this.state === State.Pause
}
} | random_line_split |
|
ticker.ts | import { State } from './core/enum/state'
import { ITicker } from './core/interfaces/ITicker'
/**
* Main Fatina Ticker
* Parent of all the normal tween and sequence
*
* @export
* @class Ticker
* @extends {EventList}
* @implements {ITicker}
*/
export class Ticker implements ITicker {
public state = State.Idle
/**
* @private
*/
private timescale = 1
public elapsed = 0
public duration = 0
/**
* @private
*/
private tickCb: (dt: number) => void | undefined
/**
* @private
*/
private readonly ticks: Set<(dt: number) => void> = new Set()
/**
* @private
*/
private readonly newTicks: Set<(dt: number) => void> = new Set()
/**
* @private
*/
private parent: ITicker
/**
* @private
*/
private dt = 0
public setParent(parent: ITicker, tick: (dt: number) => void) {
this.tickCb = tick
this.parent = parent
}
/**
* Method used to change the timescale
*
* @param {number} scale
*/
public setTimescale(scale: number): void {
this.timescale = scale
}
/**
* Method used by the child to be updated
*
* @param {(dt: number) => void} cb
*/
public addTick(cb: (dt: number) => void): void {
this.newTicks.add(cb)
}
/**
* Method used by the child to not receive update anymore
*
* @param {(dt: number) => void} cb
*/
public removeTick(cb: (dt: number) => void): void {
if (!this.ticks.delete(cb)) {
this.newTicks.delete(cb)
}
}
/**
* Method used to tick all the child (tick listeners)
*
* @param {number} dt
* @returns
*/
public tick(dt: number) {
if (this.state !== State.Run) {
return
}
this.dt = dt * this.timescale
if (this.newTicks.size > 0) {
this.newTicks.forEach((tick) => this.ticks.add(tick))
this.newTicks.clear()
}
this.ticks.forEach((tick) => tick(this.dt))
this.elapsed += this.dt
}
public start(): void {
if (this.state === State.Idle) {
this.state = State.Run
}
}
public pause(): void {
if (this.state === State.Run) {
this.state = State.Pause
}
}
public resume(): void {
if (this.state === State.Pause) {
this.state = State.Run
}
}
public kill(): void {
if (this.state >= 3) {
return
}
if (this.parent && this.tickCb) {
this.parent.removeTick(this.tickCb)
}
this.state = State.Killed
}
public skip(): void {}
public reset(): void {
this.state = State.Idle
}
public get isIdle(): boolean {
return this.state === State.Idle
}
public get isRunning(): boolean {
return this.state === State.Run
}
public get isFinished(): boolean {
return this.state >= 3
}
public get | (): boolean {
return this.state === State.Pause
}
}
| isPaused | identifier_name |
driver.ts |
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
// !! Do not remove the following START and END markers, they are parsed by the smoketest build
//*START
export interface IElement {
tagName: string;
className: string;
textContent: string;
attributes: { [name: string]: string; };
children: IElement[];
top: number;
left: number;
}
export interface IDriver {
readonly _serviceBrand: undefined;
getWindowIds(): Promise<number[]>;
capturePage(windowId: number): Promise<string>;
reloadWindow(windowId: number): Promise<void>;
exitApplication(): Promise<void>;
dispatchKeybinding(windowId: number, keybinding: string): Promise<void>;
click(windowId: number, selector: string, xoffset?: number | undefined, yoffset?: number | undefined): Promise<void>;
doubleClick(windowId: number, selector: string): Promise<void>;
setValue(windowId: number, selector: string, text: string): Promise<void>;
getTitle(windowId: number): Promise<string>;
isActiveElement(windowId: number, selector: string): Promise<boolean>;
getElements(windowId: number, selector: string, recursive?: boolean): Promise<IElement[]>;
getElementXY(windowId: number, selector: string, xoffset?: number, yoffset?: number): Promise<{ x: number; y: number; }>;
typeInEditor(windowId: number, selector: string, text: string): Promise<void>;
getTerminalBuffer(windowId: number, selector: string): Promise<string[]>;
writeInTerminal(windowId: number, selector: string, text: string): Promise<void>;
}
//*END
export const ID = 'driverService';
export const IDriver = createDecorator<IDriver>(ID);
export interface IWindowDriver {
click(selector: string, xoffset?: number | undefined, yoffset?: number | undefined): Promise<void>;
doubleClick(selector: string): Promise<void>;
setValue(selector: string, text: string): Promise<void>;
getTitle(): Promise<string>;
isActiveElement(selector: string): Promise<boolean>;
getElements(selector: string, recursive: boolean): Promise<IElement[]>;
getElementXY(selector: string, xoffset?: number, yoffset?: number): Promise<{ x: number; y: number; }>;
typeInEditor(selector: string, text: string): Promise<void>;
getTerminalBuffer(selector: string): Promise<string[]>;
writeInTerminal(selector: string, text: string): Promise<void>;
}
export interface IDriverOptions {
verbose: boolean;
}
export interface IWindowDriverRegistry {
registerWindowDriver(windowId: number): Promise<IDriverOptions>;
reloadWindowDriver(windowId: number): Promise<void>;
} | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ | random_line_split |
|
wss.js | }
var Excp = require('excp.js');
var Session = require('session.js');
var Table = require('table.js');
function | ( option ) {
this.isOpen = false;
this.events = {};
this.conn_events = {};
this.host = option['wss'] || option['host'];
this.ss = new Session( option );
this.ss.start();
this.cid = option.app || '';
this.prefix= option['table.prefix'] || '';
this.table_name = option['ws.table'] || 'message';
this.user_table = option['user.table'] || 'user';
this.tab = new Table( option, this.table_name );
/**
* 读取当前线上用户
* @return Promise
*/
this.liveUsers = function() {
var that = this;
return new _P(function (resolve, reject) {
var eventBack = null;
if ( typeof that.events['getConnections'] == 'function' ) {
eventBack = that.events['getConnections'];
}
that.events['getConnections'] = function( res, status ) {
if ( eventBack != null ) {
that.events['getConnections'] = eventBack;
}
if ( status == 'success' ) {
resolve( res.response );
return;
} else {
var error = res.error || '读取当前线上用户失败';
reject(new Excp( error ,500, {
'status': status,
'res':res
}));
}
}
that.send('getConnections').catch( function( e ) {reject(e)} );
});
}
/**
* 查询用户是否在线 ( xpm-server 1.0rc4 以上 )
* @param string uid 用户ID
* @return Promise 在线 true 不在线 false
*/
this.isOnline = function( uid ) {
var that = this;
return new _P(function (resolve, reject) {
var eventBack = null;
if ( typeof that.events['ping'] == 'function' ) {
eventBack = that.events['ping'];
}
that.events['ping'] = function( res, status ) {
if ( eventBack != null ) {
that.events['ping'] = eventBack;
}
if ( status == 'success' ) {
if ( res.response.resp == 'pong' ) {
resolve(true);
return;
}
resolve(false);
return;
} else {
var error = res.error || '读取用户在线信息失败';
reject(new Excp( error ,500, {
'status': status,
'res':res
}));
}
}
that.send('ping','user online', uid).catch( function( e ) {reject(e)} );
});
}
/**
* 接收 WebSocket Sever 事件
* @param string evnet 事件名称,有效值 ( open/close/message/error )
* @param function callback( res ) 回调函数
* @return this
*/
this.bind = function ( evnet, callback ) {
this.conn_events[evnet] = callback;
return this;
}
/**
* 接收指令并响应
* @param string command 指令名称
* @param function callback( res, status ) 指定响应函数
* callback 参数表:
* object res.request 请求参数 {"c": "command","b": {"info": "你好!"},"t": 2 }
* object res.response 请求用户
* 登录用户: {"_id": 2, "nickName": "柳敏", "gender": 0, "avatarUrl": "..","language": "zh_CN","id": "e8e989e04627b7b5d73f019456f65d7d"}
* 未登录用户: {"id": "e8e989e04627b7b5d73f019456f65d7d"}
*
* string res.error 错误描述
* string status 返回状态 succes/error
* @return this
*/
this.listen = function ( command, callback ) {
this.events[command] = callback;
return this;
}
/**
* 发送指令
* @param string command 指令名称
* @param object params 请求参数
* @param string/int receiver 接受者 ( 连接 id 或者用户 id )
* @return Promise
*/
this.send = function ( command, params, receiver ) {
var that = this;
return new _P(function (resolve, reject) {
if (that.isOpen !== true ) {
reject(new Excp('WebSocket未连接', 401, {
'command':command,
'params': params,
'receiver': receiver
}));
}
receiver = receiver || null;
wx.sendSocketMessage({
data:JSON.stringify({'c':command, 'b':params, 't':receiver }),
success: function( res ){
resolve( true );
},
fail: function( res ){
reject(new Excp('消息发送失败',500,
{
'command':command,
'params': params,
'res':res
}));
}
});
});
}
/**
* 打开 Websocket 信道
* @param string channel 信道地址
* @param boolen ignore true: 如果信道已连接反返回正确信息,默认为 true
* @return Promise
*/
this.open = function( channel, ignore ) {
var that = this;
if ( typeof ignore == 'undefined') ignore = true;
return new _P(function (resolve, reject) {
if ( ignore && that.isOpen) {
resolve(true);
return
}
// wx.connectSocket BUG Android success (函数返回值不正确 )
wx.connectSocket({
url: 'wss://' + that.host + channel + '?_sid=' + that.ss.id() + '&_prefix=' + that.prefix + '&_table=' + that.table_name + '&_user=' + that.user_table+ '&_cid=' + that.cid,
success:function( res, status ){},
fail: function( res ){
// console.log( 'wx.connectSocket fail', res);
reject( new Excp(
'WebSocket Error', 500, {
'res':res,
'isOpen': that.isOpen,
'channel':channel,
'host':that.host
}
));
return;
}
});
wx.onSocketOpen(function(res) {
that.isOpen = true;
if ( typeof that.conn_events['open'] == 'function' ) {
try {
that.conn_events['open']( res );
} catch(e){}
}
resolve( res );
return;
});
// BUG Android 接收不到错误通知
wx.onSocketError(function(res){
// that.isOpen = false;
if ( typeof that.conn_events['error'] == 'function' ) {
try {
that.conn_events['error']( res );
} catch(e){}
}
reject( new Excp(
'WebSocket Error', 500, {
'res':res,
'isOpen': that.isOpen,
'channel':channel,
'host':that.host
}
));
return;
});
wx.onSocketClose(function(res) {
that.isOpen = false;
if ( typeof that.conn_events['close'] == 'function' ) {
try {
that.conn_events['close']( res );
} catch(e){}
}
});
wx.onSocketMessage(function( res ){
if ( typeof that.conn_events['message'] == 'function' ) {
try {
that.conn_events['message']( res );
} catch(e){}
}
if ( typeof res.data !== 'string' ) {
return;
}
var resp = JSON.parse( res.data );
resp['data'] = resp['data'] || {};
var code = resp['code'];
var req = resp['data']['request'] || {};
var res = resp['data']['response'] || {};
var error = resp['data']['error'] || null;
var cmd = req['c'] || null;
if ( code !== 0 ) {
if ( typeof that.events[cmd] == 'function' ) {
that.events[cmd]( {'request':req, 'response':res,'error':error}, 'error');
}
return;
}
if ( typeof that.events[cmd] == 'function' ) {
that.events[cmd]( {'request':req, 'response':res}, 'success');
}
});
});
| Wss | identifier_name |
wss.js | }
var Excp = require('excp.js');
var Session = require('session.js');
var Table = require('table.js');
function Wss( option ) | */
this.liveUsers = function() {
var that = this;
return new _P(function (resolve, reject) {
var eventBack = null;
if ( typeof that.events['getConnections'] == 'function' ) {
eventBack = that.events['getConnections'];
}
that.events['getConnections'] = function( res, status ) {
if ( eventBack != null ) {
that.events['getConnections'] = eventBack;
}
if ( status == 'success' ) {
resolve( res.response );
return;
} else {
var error = res.error || '读取当前线上用户失败';
reject(new Excp( error ,500, {
'status': status,
'res':res
}));
}
}
that.send('getConnections').catch( function( e ) {reject(e)} );
});
}
/**
* 查询用户是否在线 ( xpm-server 1.0rc4 以上 )
* @param string uid 用户ID
* @return Promise 在线 true 不在线 false
*/
this.isOnline = function( uid ) {
var that = this;
return new _P(function (resolve, reject) {
var eventBack = null;
if ( typeof that.events['ping'] == 'function' ) {
eventBack = that.events['ping'];
}
that.events['ping'] = function( res, status ) {
if ( eventBack != null ) {
that.events['ping'] = eventBack;
}
if ( status == 'success' ) {
if ( res.response.resp == 'pong' ) {
resolve(true);
return;
}
resolve(false);
return;
} else {
var error = res.error || '读取用户在线信息失败';
reject(new Excp( error ,500, {
'status': status,
'res':res
}));
}
}
that.send('ping','user online', uid).catch( function( e ) {reject(e)} );
});
}
/**
* 接收 WebSocket Sever 事件
* @param string evnet 事件名称,有效值 ( open/close/message/error )
* @param function callback( res ) 回调函数
* @return this
*/
this.bind = function ( evnet, callback ) {
this.conn_events[evnet] = callback;
return this;
}
/**
* 接收指令并响应
* @param string command 指令名称
* @param function callback( res, status ) 指定响应函数
* callback 参数表:
* object res.request 请求参数 {"c": "command","b": {"info": "你好!"},"t": 2 }
* object res.response 请求用户
* 登录用户: {"_id": 2, "nickName": "柳敏", "gender": 0, "avatarUrl": "..","language": "zh_CN","id": "e8e989e04627b7b5d73f019456f65d7d"}
* 未登录用户: {"id": "e8e989e04627b7b5d73f019456f65d7d"}
*
* string res.error 错误描述
* string status 返回状态 succes/error
* @return this
*/
this.listen = function ( command, callback ) {
this.events[command] = callback;
return this;
}
/**
* 发送指令
* @param string command 指令名称
* @param object params 请求参数
* @param string/int receiver 接受者 ( 连接 id 或者用户 id )
* @return Promise
*/
this.send = function ( command, params, receiver ) {
var that = this;
return new _P(function (resolve, reject) {
if (that.isOpen !== true ) {
reject(new Excp('WebSocket未连接', 401, {
'command':command,
'params': params,
'receiver': receiver
}));
}
receiver = receiver || null;
wx.sendSocketMessage({
data:JSON.stringify({'c':command, 'b':params, 't':receiver }),
success: function( res ){
resolve( true );
},
fail: function( res ){
reject(new Excp('消息发送失败',500,
{
'command':command,
'params': params,
'res':res
}));
}
});
});
}
/**
* 打开 Websocket 信道
* @param string channel 信道地址
* @param boolen ignore true: 如果信道已连接反返回正确信息,默认为 true
* @return Promise
*/
this.open = function( channel, ignore ) {
var that = this;
if ( typeof ignore == 'undefined') ignore = true;
return new _P(function (resolve, reject) {
if ( ignore && that.isOpen) {
resolve(true);
return
}
// wx.connectSocket BUG Android success (函数返回值不正确 )
wx.connectSocket({
url: 'wss://' + that.host + channel + '?_sid=' + that.ss.id() + '&_prefix=' + that.prefix + '&_table=' + that.table_name + '&_user=' + that.user_table+ '&_cid=' + that.cid,
success:function( res, status ){},
fail: function( res ){
// console.log( 'wx.connectSocket fail', res);
reject( new Excp(
'WebSocket Error', 500, {
'res':res,
'isOpen': that.isOpen,
'channel':channel,
'host':that.host
}
));
return;
}
});
wx.onSocketOpen(function(res) {
that.isOpen = true;
if ( typeof that.conn_events['open'] == 'function' ) {
try {
that.conn_events['open']( res );
} catch(e){}
}
resolve( res );
return;
});
// BUG Android 接收不到错误通知
wx.onSocketError(function(res){
// that.isOpen = false;
if ( typeof that.conn_events['error'] == 'function' ) {
try {
that.conn_events['error']( res );
} catch(e){}
}
reject( new Excp(
'WebSocket Error', 500, {
'res':res,
'isOpen': that.isOpen,
'channel':channel,
'host':that.host
}
));
return;
});
wx.onSocketClose(function(res) {
that.isOpen = false;
if ( typeof that.conn_events['close'] == 'function' ) {
try {
that.conn_events['close']( res );
} catch(e){}
}
});
wx.onSocketMessage(function( res ){
if ( typeof that.conn_events['message'] == 'function' ) {
try {
that.conn_events['message']( res );
} catch(e){}
}
if ( typeof res.data !== 'string' ) {
return;
}
var resp = JSON.parse( res.data );
resp['data'] = resp['data'] || {};
var code = resp['code'];
var req = resp['data']['request'] || {};
var res = resp['data']['response'] || {};
var error = resp['data']['error'] || null;
var cmd = req['c'] || null;
if ( code !== 0 ) {
if ( typeof that.events[cmd] == 'function' ) {
that.events[cmd]( {'request':req, 'response':res,'error':error}, 'error');
}
return;
}
if ( typeof that.events[cmd] == 'function' ) {
that.events[cmd]( {'request':req, 'response':res}, 'success');
}
});
});
| {
this.isOpen = false;
this.events = {};
this.conn_events = {};
this.host = option['wss'] || option['host'];
this.ss = new Session( option );
this.ss.start();
this.cid = option.app || '';
this.prefix= option['table.prefix'] || '';
this.table_name = option['ws.table'] || 'message';
this.user_table = option['user.table'] || 'user';
this.tab = new Table( option, this.table_name );
/**
* 读取当前线上用户
* @return Promise | identifier_body |
wss.js | option.app || '';
this.prefix= option['table.prefix'] || '';
this.table_name = option['ws.table'] || 'message';
this.user_table = option['user.table'] || 'user';
this.tab = new Table( option, this.table_name );
/**
* 读取当前线上用户
* @return Promise
*/
this.liveUsers = function() {
var that = this;
return new _P(function (resolve, reject) {
var eventBack = null;
if ( typeof that.events['getConnections'] == 'function' ) {
eventBack = that.events['getConnections'];
}
that.events['getConnections'] = function( res, status ) {
if ( eventBack != null ) {
that.events['getConnections'] = eventBack;
}
if ( status == 'success' ) {
resolve( res.response );
return;
} else {
var error = res.error || '读取当前线上用户失败';
reject(new Excp( error ,500, {
'status': status,
'res':res
}));
}
}
that.send('getConnections').catch( function( e ) {reject(e)} );
});
}
/**
* 查询用户是否在线 ( xpm-server 1.0rc4 以上 )
* @param string uid 用户ID
* @return Promise 在线 true 不在线 false
*/
this.isOnline = function( uid ) {
var that = this;
return new _P(function (resolve, reject) {
var eventBack = null;
if ( typeof that.events['ping'] == 'function' ) {
eventBack = that.events['ping'];
}
that.events['ping'] = function( res, status ) {
if ( eventBack != null ) {
that.events['ping'] = eventBack;
}
if ( status == 'success' ) {
if ( res.response.resp == 'pong' ) {
resolve(true);
return;
}
resolve(false);
return;
} else {
var error = res.error || '读取用户在线信息失败';
reject(new Excp( error ,500, {
'status': status,
'res':res
}));
}
}
that.send('ping','user online', uid).catch( function( e ) {reject(e)} );
});
}
/**
* 接收 WebSocket Sever 事件
* @param string evnet 事件名称,有效值 ( open/close/message/error )
* @param function callback( res ) 回调函数
* @return this
*/
this.bind = function ( evnet, callback ) {
this.conn_events[evnet] = callback;
return this;
}
/**
* 接收指令并响应
* @param string command 指令名称
* @param function callback( res, status ) 指定响应函数
* callback 参数表:
* object res.request 请求参数 {"c": "command","b": {"info": "你好!"},"t": 2 }
* object res.response 请求用户
* 登录用户: {"_id": 2, "nickName": "柳敏", "gender": 0, "avatarUrl": "..","language": "zh_CN","id": "e8e989e04627b7b5d73f019456f65d7d"}
* 未登录用户: {"id": "e8e989e04627b7b5d73f019456f65d7d"}
*
* string res.error 错误描述
* string status 返回状态 succes/error
* @return this
*/
this.listen = function ( command, callback ) {
this.events[command] = callback;
return this;
}
/**
* 发送指令
* @param string command 指令名称
* @param object params 请求参数
* @param string/int receiver 接受者 ( 连接 id 或者用户 id )
* @return Promise
*/
this.send = function ( command, params, receiver ) {
var that = this;
return new _P(function (resolve, reject) {
if (that.isOpen !== true ) {
reject(new Excp('WebSocket未连接', 401, {
'command':command,
'params': params,
'receiver': receiver
}));
}
receiver = receiver || null;
wx.sendSocketMessage({
data:JSON.stringify({'c':command, 'b':params, 't':receiver }),
success: function( res ){
resolve( true );
},
fail: function( res ){
reject(new Excp('消息发送失败',500,
{
'command':command,
'params': params,
'res':res
}));
}
});
});
}
/**
* 打开 Websocket 信道
* @param string channel 信道地址
* @param boolen ignore true: 如果信道已连接反返回正确信息,默认为 true
* @return Promise
*/
this.open = function( channel, ignore ) {
var that = this;
if ( typeof ignore == 'undefined') ignore = true;
return new _P(function (resolve, reject) {
if ( ignore && that.isOpen) {
resolve(true);
return
}
// wx.connectSocket BUG Android success (函数返回值不正确 )
wx.connectSocket({
url: 'wss://' + that.host + channel + '?_sid=' + that.ss.id() + '&_prefix=' + that.prefix + '&_table=' + that.table_name + '&_user=' + that.user_table+ '&_cid=' + that.cid,
success:function( res, status ){},
fail: function( res ){
// console.log( 'wx.connectSocket fail', res);
reject( new Excp(
'WebSocket Error', 500, {
'res':res,
'isOpen': that.isOpen,
'channel':channel,
'host':that.host
}
));
return;
}
});
wx.onSocketOpen(function(res) {
that.isOpen = true;
if ( typeof that.conn_events['open'] == 'function' ) {
try {
that.conn_events['open']( res );
} catch(e){}
}
resolve( res );
return;
});
// BUG Android 接收不到错误通知
wx.onSocketError(function(res){
// that.isOpen = false;
if ( typeof that.conn_events['error'] == 'function' ) {
try {
that.conn_events['error']( res );
} catch(e){}
}
reject( new Excp(
'WebSocket Error', 500, {
'res':res,
'isOpen': that.isOpen,
'channel':channel,
'host':that.host
}
));
return;
});
wx.onSocketClose(function(res) {
that.isOpen = false;
if ( typeof that.conn_events['close'] == 'function' ) {
try {
that.conn_events['close']( res );
} catch(e){}
}
});
wx.onSocketMessage(function( res ){
if ( typeof that.conn_events['message'] == 'function' ) {
try {
that.conn_events['message']( res );
} catch(e){}
}
if ( typeof res.data !== 'string' ) {
return;
}
var resp = JSON.parse( res.data );
resp['data'] = resp['data'] || {};
var code = resp['code'];
var req = resp['data']['request'] || {};
var res = resp['data']['response'] || {};
var error = resp['data']['error'] || null;
var cmd = req['c'] || null;
if ( code !== 0 ) {
if ( typeof that.events[cmd] == 'function' ) {
that.events[cmd]( {'request':req, 'response':res,'error':error}, 'error');
}
return;
}
if ( typeof that.events[cmd] == 'function' ) {
that.events[cmd]( {'request':req, 'response':res}, 'success');
}
});
});
}
/**
* 关闭 Websocket 信道
* @return null
*/
this.close = function() {
wx.closeSocket();
}
/**
* 设定/查询信道 | 鉴权 ( 需要管理员权限 )
* @return Promise
*/
this._acl = function() {
}
}
module.exports = W | conditional_block |
|
wss.js | }
var Excp = require('excp.js');
var Session = require('session.js');
var Table = require('table.js');
function Wss( option ) {
this.isOpen = false;
this.events = {};
this.conn_events = {};
this.host = option['wss'] || option['host'];
this.ss = new Session( option );
this.ss.start();
this.cid = option.app || '';
this.prefix= option['table.prefix'] || '';
this.table_name = option['ws.table'] || 'message';
this.user_table = option['user.table'] || 'user';
this.tab = new Table( option, this.table_name );
/**
* 读取当前线上用户
* @return Promise
*/
this.liveUsers = function() {
var that = this;
return new _P(function (resolve, reject) {
var eventBack = null;
if ( typeof that.events['getConnections'] == 'function' ) {
eventBack = that.events['getConnections'];
}
that.events['getConnections'] = function( res, status ) {
if ( eventBack != null ) {
that.events['getConnections'] = eventBack;
}
if ( status == 'success' ) {
resolve( res.response );
return;
} else {
var error = res.error || '读取当前线上用户失败';
reject(new Excp( error ,500, {
'status': status,
'res':res
}));
}
}
that.send('getConnections').catch( function( e ) {reject(e)} );
});
}
/**
* 查询用户是否在线 ( xpm-server 1.0rc4 以上 )
* @param string uid 用户ID
* @return Promise 在线 true 不在线 false
*/
this.isOnline = function( uid ) {
var that = this;
return new _P(function (resolve, reject) {
var eventBack = null;
if ( typeof that.events['ping'] == 'function' ) {
eventBack = that.events['ping'];
}
that.events['ping'] = function( res, status ) {
if ( eventBack != null ) {
that.events['ping'] = eventBack;
}
if ( status == 'success' ) {
if ( res.response.resp == 'pong' ) {
resolve(true);
return;
}
resolve(false);
return;
} else {
var error = res.error || '读取用户在线信息失败';
reject(new Excp( error ,500, {
'status': status,
'res':res
}));
}
}
that.send('ping','user online', uid).catch( function( e ) {reject(e)} );
});
}
/**
* 接收 WebSocket Sever 事件
* @param string evnet 事件名称,有效值 ( open/close/message/error )
* @param function callback( res ) 回调函数
* @return this
*/
this.bind = function ( evnet, callback ) {
this.conn_events[evnet] = callback;
return this;
}
/**
* 接收指令并响应
* @param string command 指令名称
* @param function callback( res, status ) 指定响应函数
* callback 参数表:
* object res.request 请求参数 {"c": "command","b": {"info": "你好!"},"t": 2 }
* object res.response 请求用户
* 登录用户: {"_id": 2, "nickName": "柳敏", "gender": 0, "avatarUrl": "..","language": "zh_CN","id": "e8e989e04627b7b5d73f019456f65d7d"}
* 未登录用户: {"id": "e8e989e04627b7b5d73f019456f65d7d"}
*
* string res.error 错误描述
* string status 返回状态 succes/error
* @return this
*/
this.listen = function ( command, callback ) {
this.events[command] = callback;
return this;
}
/**
* 发送指令
* @param string command 指令名称
* @param object params 请求参数
* @param string/int receiver 接受者 ( 连接 id 或者用户 id )
* @return Promise
*/
this.send = function ( command, params, receiver ) {
var that = this;
return new _P(function (resolve, reject) {
if (that.isOpen !== true ) {
reject(new Excp('WebSocket未连接', 401, {
'command':command,
'params': params,
'receiver': receiver
}));
}
receiver = receiver || null;
wx.sendSocketMessage({
data:JSON.stringify({'c':command, 'b':params, 't':receiver }),
success: function( res ){
resolve( true );
},
fail: function( res ){
reject(new Excp('消息发送失败',500,
{
'command':command,
'params': params,
'res':res
}));
}
});
});
}
/**
* 打开 Websocket 信道
* @param string channel 信道地址
* @param boolen ignore true: 如果信道已连接反返回正确信息,默认为 true
* @return Promise
*/
this.open = function( channel, ignore ) {
var that = this;
if ( typeof ignore == 'undefined') ignore = true;
return new _P(function (resolve, reject) {
if ( ignore && that.isOpen) {
resolve(true);
return
}
// wx.connectSocket BUG Android success (函数返回值不正确 )
wx.connectSocket({
url: 'wss://' + that.host + channel + '?_sid=' + that.ss.id() + '&_prefix=' + that.prefix + '&_table=' + that.table_name + '&_user=' + that.user_table+ '&_cid=' + that.cid,
success:function( res, status ){},
fail: function( res ){
// console.log( 'wx.connectSocket fail', res);
reject( new Excp(
'WebSocket Error', 500, {
'res':res,
'isOpen': that.isOpen,
'channel':channel,
'host':that.host
}
));
return;
}
});
wx.onSocketOpen(function(res) {
that.isOpen = true;
if ( typeof that.conn_events['open'] == 'function' ) {
try {
that.conn_events['open']( res );
} catch(e){}
}
resolve( res );
return;
});
// BUG Android 接收不到错误通知
wx.onSocketError(function(res){
// that.isOpen = false;
if ( typeof that.conn_events['error'] == 'function' ) {
try {
that.conn_events['error']( res );
} catch(e){}
}
reject( new Excp(
'WebSocket Error', 500, {
'res':res,
'isOpen': that.isOpen,
'channel':channel,
'host':that.host
}
));
return;
});
wx.onSocketClose(function(res) {
that.isOpen = false;
if ( typeof that.conn_events['close'] == 'function' ) {
try {
that.conn_events['close']( res );
} catch(e){}
}
});
wx.onSocketMessage(function( res ){
if ( typeof that.conn_events['message'] == 'function' ) {
try { | return;
}
var resp = JSON.parse( res.data );
resp['data'] = resp['data'] || {};
var code = resp['code'];
var req = resp['data']['request'] || {};
var res = resp['data']['response'] || {};
var error = resp['data']['error'] || null;
var cmd = req['c'] || null;
if ( code !== 0 ) {
if ( typeof that.events[cmd] == 'function' ) {
that.events[cmd]( {'request':req, 'response':res,'error':error}, 'error');
}
return;
}
if ( typeof that.events[cmd] == 'function' ) {
that.events[cmd]( {'request':req, 'response':res}, 'success');
}
});
});
| that.conn_events['message']( res );
} catch(e){}
}
if ( typeof res.data !== 'string' ) { | random_line_split |
hateoas.py | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from flask import url_for
class Hateoas(object):
def link(self, rel, title, href):
return "<link rel='%s' title='%s' href='%s'/>" % (rel, title, href)
def create_link(self, item, rel='self'):
title = item.__class__.__name__.lower()
method = ".api_%s" % title
href = url_for(method, id=item.id, _external=True)
return self.link(rel, title, href)
def create_links(self, item):
| if item.category_id is not None:
links.append(self.create_link(item.category, rel='category'))
return links, link
else:
return False
def remove_links(self, item):
"""Remove HATEOAS link and links from item"""
if item.get('link'):
item.pop('link')
if item.get('links'):
item.pop('links')
return item
| cls = item.__class__.__name__.lower()
if cls == 'taskrun':
link = self.create_link(item)
links = []
if item.app_id is not None:
links.append(self.create_link(item.app, rel='parent'))
if item.task_id is not None:
links.append(self.create_link(item.task, rel='parent'))
return links, link
elif cls == 'task':
link = self.create_link(item)
links = []
if item.app_id is not None:
links = [self.create_link(item.app, rel='parent')]
return links, link
elif cls == 'category':
return None, self.create_link(item)
elif cls == 'app':
link = self.create_link(item)
links = [] | identifier_body |
hateoas.py | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from flask import url_for
class Hateoas(object):
def link(self, rel, title, href):
return "<link rel='%s' title='%s' href='%s'/>" % (rel, title, href)
def create_link(self, item, rel='self'):
title = item.__class__.__name__.lower()
method = ".api_%s" % title
href = url_for(method, id=item.id, _external=True)
return self.link(rel, title, href)
def create_links(self, item):
cls = item.__class__.__name__.lower()
if cls == 'taskrun':
|
elif cls == 'task':
link = self.create_link(item)
links = []
if item.app_id is not None:
links = [self.create_link(item.app, rel='parent')]
return links, link
elif cls == 'category':
return None, self.create_link(item)
elif cls == 'app':
link = self.create_link(item)
links = []
if item.category_id is not None:
links.append(self.create_link(item.category, rel='category'))
return links, link
else:
return False
def remove_links(self, item):
"""Remove HATEOAS link and links from item"""
if item.get('link'):
item.pop('link')
if item.get('links'):
item.pop('links')
return item
| link = self.create_link(item)
links = []
if item.app_id is not None:
links.append(self.create_link(item.app, rel='parent'))
if item.task_id is not None:
links.append(self.create_link(item.task, rel='parent'))
return links, link | conditional_block |
hateoas.py | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
|
class Hateoas(object):
def link(self, rel, title, href):
return "<link rel='%s' title='%s' href='%s'/>" % (rel, title, href)
def create_link(self, item, rel='self'):
title = item.__class__.__name__.lower()
method = ".api_%s" % title
href = url_for(method, id=item.id, _external=True)
return self.link(rel, title, href)
def create_links(self, item):
cls = item.__class__.__name__.lower()
if cls == 'taskrun':
link = self.create_link(item)
links = []
if item.app_id is not None:
links.append(self.create_link(item.app, rel='parent'))
if item.task_id is not None:
links.append(self.create_link(item.task, rel='parent'))
return links, link
elif cls == 'task':
link = self.create_link(item)
links = []
if item.app_id is not None:
links = [self.create_link(item.app, rel='parent')]
return links, link
elif cls == 'category':
return None, self.create_link(item)
elif cls == 'app':
link = self.create_link(item)
links = []
if item.category_id is not None:
links.append(self.create_link(item.category, rel='category'))
return links, link
else:
return False
def remove_links(self, item):
"""Remove HATEOAS link and links from item"""
if item.get('link'):
item.pop('link')
if item.get('links'):
item.pop('links')
return item | from flask import url_for
| random_line_split |
hateoas.py | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from flask import url_for
class Hateoas(object):
def | (self, rel, title, href):
return "<link rel='%s' title='%s' href='%s'/>" % (rel, title, href)
def create_link(self, item, rel='self'):
title = item.__class__.__name__.lower()
method = ".api_%s" % title
href = url_for(method, id=item.id, _external=True)
return self.link(rel, title, href)
def create_links(self, item):
cls = item.__class__.__name__.lower()
if cls == 'taskrun':
link = self.create_link(item)
links = []
if item.app_id is not None:
links.append(self.create_link(item.app, rel='parent'))
if item.task_id is not None:
links.append(self.create_link(item.task, rel='parent'))
return links, link
elif cls == 'task':
link = self.create_link(item)
links = []
if item.app_id is not None:
links = [self.create_link(item.app, rel='parent')]
return links, link
elif cls == 'category':
return None, self.create_link(item)
elif cls == 'app':
link = self.create_link(item)
links = []
if item.category_id is not None:
links.append(self.create_link(item.category, rel='category'))
return links, link
else:
return False
def remove_links(self, item):
"""Remove HATEOAS link and links from item"""
if item.get('link'):
item.pop('link')
if item.get('links'):
item.pop('links')
return item
| link | identifier_name |
socialnetwork.py | # -*- coding: utf-8 -*-
from canaimagnulinux.wizard.interfaces import IChat
from canaimagnulinux.wizard.interfaces import ISocialNetwork
from canaimagnulinux.wizard.utils import CanaimaGnuLinuxWizardMF as _
from collective.beaker.interfaces import ISession
from collective.z3cform.wizard import wizard
from plone import api
from plone.z3cform.fieldsets import group
from z3c.form import field
try:
from zope.browserpage import viewpagetemplatefile
except ImportError:
# Plone < 4.1
from zope.app.pagetemplate import viewpagetemplatefile
import logging
logger = logging.getLogger(__name__)
class ChatGroup(group.Group):
prefix = 'chats'
label = _(u'Chats Information')
fields = field.Fields(IChat)
class SocialNetworkGroup(group.Group):
prefix = 'socialnetwork'
label = _(u'Social Network Information')
fields = field.Fields(ISocialNetwork)
class SocialNetworkStep(wizard.GroupStep):
prefix = 'Social'
label = _(u'Social Network accounts')
description = _(u'Input your social networks details')
template = viewpagetemplatefile.ViewPageTemplateFile('templates/socialnetwork.pt')
fields = field.Fields()
groups = [ChatGroup, SocialNetworkGroup]
def | (self, context, request, wizard):
# Use collective.beaker for session managment
session = ISession(request, None)
self.sessionmanager = session
super(SocialNetworkStep, self).__init__(context, request, wizard)
def load(self, context):
member = api.user.get_current()
data = self.getContent()
# Chats group
if not data.get('irc', None):
irc = member.getProperty('irc')
if type(irc).__name__ == 'object':
irc = None
data['irc'] = irc
if not data.get('telegram', None):
telegram = member.getProperty('telegram')
if type(telegram).__name__ == 'object':
telegram = None
data['telegram'] = telegram
if not data.get('skype', None):
skype = member.getProperty('skype')
if type(skype).__name__ == 'object':
skype = None
data['skype'] = skype
# Social Network group
if not data.get('twitter', None):
twitter = member.getProperty('twitter')
if type(twitter).__name__ == 'object':
twitter = None
data['twitter'] = twitter
if not data.get('instagram', None):
instagram = member.getProperty('instagram')
if type(instagram).__name__ == 'object':
instagram = None
data['instagram'] = instagram
if not data.get('facebook', None):
facebook = member.getProperty('facebook')
if type(facebook).__name__ == 'object':
facebook = None
data['facebook'] = facebook
def apply(self, context, initial_finish=False):
data = self.getContent()
return data
def applyChanges(self, data):
member = api.user.get_current()
member.setMemberProperties(mapping={
'irc': data['irc'],
'telegram': data['telegram'],
'skype': data['skype'],
'twitter': data['twitter'],
'instagram': data['instagram'],
'facebook': data['facebook']}
)
| __init__ | identifier_name |
socialnetwork.py | # -*- coding: utf-8 -*-
from canaimagnulinux.wizard.interfaces import IChat
from canaimagnulinux.wizard.interfaces import ISocialNetwork
from canaimagnulinux.wizard.utils import CanaimaGnuLinuxWizardMF as _
from collective.beaker.interfaces import ISession
from collective.z3cform.wizard import wizard
from plone import api
from plone.z3cform.fieldsets import group
from z3c.form import field
try:
from zope.browserpage import viewpagetemplatefile
except ImportError: | # Plone < 4.1
from zope.app.pagetemplate import viewpagetemplatefile
import logging
logger = logging.getLogger(__name__)
class ChatGroup(group.Group):
prefix = 'chats'
label = _(u'Chats Information')
fields = field.Fields(IChat)
class SocialNetworkGroup(group.Group):
prefix = 'socialnetwork'
label = _(u'Social Network Information')
fields = field.Fields(ISocialNetwork)
class SocialNetworkStep(wizard.GroupStep):
prefix = 'Social'
label = _(u'Social Network accounts')
description = _(u'Input your social networks details')
template = viewpagetemplatefile.ViewPageTemplateFile('templates/socialnetwork.pt')
fields = field.Fields()
groups = [ChatGroup, SocialNetworkGroup]
def __init__(self, context, request, wizard):
# Use collective.beaker for session managment
session = ISession(request, None)
self.sessionmanager = session
super(SocialNetworkStep, self).__init__(context, request, wizard)
def load(self, context):
member = api.user.get_current()
data = self.getContent()
# Chats group
if not data.get('irc', None):
irc = member.getProperty('irc')
if type(irc).__name__ == 'object':
irc = None
data['irc'] = irc
if not data.get('telegram', None):
telegram = member.getProperty('telegram')
if type(telegram).__name__ == 'object':
telegram = None
data['telegram'] = telegram
if not data.get('skype', None):
skype = member.getProperty('skype')
if type(skype).__name__ == 'object':
skype = None
data['skype'] = skype
# Social Network group
if not data.get('twitter', None):
twitter = member.getProperty('twitter')
if type(twitter).__name__ == 'object':
twitter = None
data['twitter'] = twitter
if not data.get('instagram', None):
instagram = member.getProperty('instagram')
if type(instagram).__name__ == 'object':
instagram = None
data['instagram'] = instagram
if not data.get('facebook', None):
facebook = member.getProperty('facebook')
if type(facebook).__name__ == 'object':
facebook = None
data['facebook'] = facebook
def apply(self, context, initial_finish=False):
data = self.getContent()
return data
def applyChanges(self, data):
member = api.user.get_current()
member.setMemberProperties(mapping={
'irc': data['irc'],
'telegram': data['telegram'],
'skype': data['skype'],
'twitter': data['twitter'],
'instagram': data['instagram'],
'facebook': data['facebook']}
) | random_line_split |
|
socialnetwork.py | # -*- coding: utf-8 -*-
from canaimagnulinux.wizard.interfaces import IChat
from canaimagnulinux.wizard.interfaces import ISocialNetwork
from canaimagnulinux.wizard.utils import CanaimaGnuLinuxWizardMF as _
from collective.beaker.interfaces import ISession
from collective.z3cform.wizard import wizard
from plone import api
from plone.z3cform.fieldsets import group
from z3c.form import field
try:
from zope.browserpage import viewpagetemplatefile
except ImportError:
# Plone < 4.1
from zope.app.pagetemplate import viewpagetemplatefile
import logging
logger = logging.getLogger(__name__)
class ChatGroup(group.Group):
prefix = 'chats'
label = _(u'Chats Information')
fields = field.Fields(IChat)
class SocialNetworkGroup(group.Group):
prefix = 'socialnetwork'
label = _(u'Social Network Information')
fields = field.Fields(ISocialNetwork)
class SocialNetworkStep(wizard.GroupStep):
prefix = 'Social'
label = _(u'Social Network accounts')
description = _(u'Input your social networks details')
template = viewpagetemplatefile.ViewPageTemplateFile('templates/socialnetwork.pt')
fields = field.Fields()
groups = [ChatGroup, SocialNetworkGroup]
def __init__(self, context, request, wizard):
# Use collective.beaker for session managment
session = ISession(request, None)
self.sessionmanager = session
super(SocialNetworkStep, self).__init__(context, request, wizard)
def load(self, context):
member = api.user.get_current()
data = self.getContent()
# Chats group
if not data.get('irc', None):
irc = member.getProperty('irc')
if type(irc).__name__ == 'object':
irc = None
data['irc'] = irc
if not data.get('telegram', None):
|
if not data.get('skype', None):
skype = member.getProperty('skype')
if type(skype).__name__ == 'object':
skype = None
data['skype'] = skype
# Social Network group
if not data.get('twitter', None):
twitter = member.getProperty('twitter')
if type(twitter).__name__ == 'object':
twitter = None
data['twitter'] = twitter
if not data.get('instagram', None):
instagram = member.getProperty('instagram')
if type(instagram).__name__ == 'object':
instagram = None
data['instagram'] = instagram
if not data.get('facebook', None):
facebook = member.getProperty('facebook')
if type(facebook).__name__ == 'object':
facebook = None
data['facebook'] = facebook
def apply(self, context, initial_finish=False):
data = self.getContent()
return data
def applyChanges(self, data):
member = api.user.get_current()
member.setMemberProperties(mapping={
'irc': data['irc'],
'telegram': data['telegram'],
'skype': data['skype'],
'twitter': data['twitter'],
'instagram': data['instagram'],
'facebook': data['facebook']}
)
| telegram = member.getProperty('telegram')
if type(telegram).__name__ == 'object':
telegram = None
data['telegram'] = telegram | conditional_block |
socialnetwork.py | # -*- coding: utf-8 -*-
from canaimagnulinux.wizard.interfaces import IChat
from canaimagnulinux.wizard.interfaces import ISocialNetwork
from canaimagnulinux.wizard.utils import CanaimaGnuLinuxWizardMF as _
from collective.beaker.interfaces import ISession
from collective.z3cform.wizard import wizard
from plone import api
from plone.z3cform.fieldsets import group
from z3c.form import field
try:
from zope.browserpage import viewpagetemplatefile
except ImportError:
# Plone < 4.1
from zope.app.pagetemplate import viewpagetemplatefile
import logging
logger = logging.getLogger(__name__)
class ChatGroup(group.Group):
prefix = 'chats'
label = _(u'Chats Information')
fields = field.Fields(IChat)
class SocialNetworkGroup(group.Group):
prefix = 'socialnetwork'
label = _(u'Social Network Information')
fields = field.Fields(ISocialNetwork)
class SocialNetworkStep(wizard.GroupStep):
| if not data.get('irc', None):
irc = member.getProperty('irc')
if type(irc).__name__ == 'object':
irc = None
data['irc'] = irc
if not data.get('telegram', None):
telegram = member.getProperty('telegram')
if type(telegram).__name__ == 'object':
telegram = None
data['telegram'] = telegram
if not data.get('skype', None):
skype = member.getProperty('skype')
if type(skype).__name__ == 'object':
skype = None
data['skype'] = skype
# Social Network group
if not data.get('twitter', None):
twitter = member.getProperty('twitter')
if type(twitter).__name__ == 'object':
twitter = None
data['twitter'] = twitter
if not data.get('instagram', None):
instagram = member.getProperty('instagram')
if type(instagram).__name__ == 'object':
instagram = None
data['instagram'] = instagram
if not data.get('facebook', None):
facebook = member.getProperty('facebook')
if type(facebook).__name__ == 'object':
facebook = None
data['facebook'] = facebook
def apply(self, context, initial_finish=False):
data = self.getContent()
return data
def applyChanges(self, data):
member = api.user.get_current()
member.setMemberProperties(mapping={
'irc': data['irc'],
'telegram': data['telegram'],
'skype': data['skype'],
'twitter': data['twitter'],
'instagram': data['instagram'],
'facebook': data['facebook']}
)
| prefix = 'Social'
label = _(u'Social Network accounts')
description = _(u'Input your social networks details')
template = viewpagetemplatefile.ViewPageTemplateFile('templates/socialnetwork.pt')
fields = field.Fields()
groups = [ChatGroup, SocialNetworkGroup]
def __init__(self, context, request, wizard):
# Use collective.beaker for session managment
session = ISession(request, None)
self.sessionmanager = session
super(SocialNetworkStep, self).__init__(context, request, wizard)
def load(self, context):
member = api.user.get_current()
data = self.getContent()
# Chats group | identifier_body |
index.js | date line processing
createTile = require('./tile'); // final simplified tile generation
function geojsonvt(data, options) {
return new GeoJSONVT(data, options);
}
function GeoJSONVT(data, options) {
options = this.options = extend(Object.create(this.options), options);
var debug = options.debug;
if (debug) console.time('preprocess data');
var z2 = 1 << options.maxZoom, // 2^z
features = convert(data, options.tolerance / (z2 * options.extent));
this.tiles = {};
this.tileCoords = [];
if (debug) {
console.timeEnd('preprocess data');
console.log('index: maxZoom: %d, maxPoints: %d', options.indexMaxZoom, options.indexMaxPoints);
console.time('generate tiles');
this.stats = {};
this.total = 0;
}
features = wrap(features, options.buffer / options.extent, intersectX);
// start slicing from the top tile down
if (features.length) this.splitTile(features, 0, 0, 0);
if (debug) {
if (features.length) console.log('features: %d, points: %d', this.tiles[0].numFeatures, this.tiles[0].numPoints);
console.timeEnd('generate tiles');
console.log('tiles generated:', this.total, JSON.stringify(this.stats));
}
}
GeoJSONVT.prototype.options = {
maxZoom: 14, // max zoom to preserve detail on
indexMaxZoom: 5, // max zoom in the tile index
indexMaxPoints: 100000, // max number of points per tile in the tile index
solidChildren: false, // whether to tile solid square tiles further
tolerance: 3, // simplification tolerance (higher means simpler)
extent: 4096, // tile extent
buffer: 64, // tile buffer on each side
debug: 0 // logging level (0, 1 or 2)
}; | var stack = [features, z, x, y],
options = this.options,
debug = options.debug;
// avoid recursion by using a processing queue
while (stack.length) {
y = stack.pop();
x = stack.pop();
z = stack.pop();
features = stack.pop();
var z2 = 1 << z,
id = toID(z, x, y),
tile = this.tiles[id],
tileTolerance = z === options.maxZoom ? 0 : options.tolerance / (z2 * options.extent);
if (!tile) {
if (debug > 1) console.time('creation');
tile = this.tiles[id] = createTile(features, z2, x, y, tileTolerance, z === options.maxZoom);
this.tileCoords.push({z: z, x: x, y: y});
if (debug) {
if (debug > 1) {
console.log('tile z%d-%d-%d (features: %d, points: %d, simplified: %d)',
z, x, y, tile.numFeatures, tile.numPoints, tile.numSimplified);
console.timeEnd('creation');
}
var key = 'z' + z;
this.stats[key] = (this.stats[key] || 0) + 1;
this.total++;
}
}
// save reference to original geometry in tile so that we can drill down later if we stop now
tile.source = features;
// stop tiling if the tile is solid clipped square
if (!options.solidChildren && isClippedSquare(tile, options.extent, options.buffer)) continue;
// if it's the first-pass tiling
if (!cz) {
// stop tiling if we reached max zoom, or if the tile is too simple
if (z === options.indexMaxZoom || tile.numPoints <= options.indexMaxPoints) continue;
// if a drilldown to a specific tile
} else {
// stop tiling if we reached base zoom or our target tile zoom
if (z === options.maxZoom || z === cz) continue;
// stop tiling if it's not an ancestor of the target tile
var m = 1 << (cz - z);
if (x !== Math.floor(cx / m) || y !== Math.floor(cy / m)) continue;
}
// if we slice further down, no need to keep source geometry
tile.source = null;
if (debug > 1) console.time('clipping');
// values we'll use for clipping
var k1 = 0.5 * options.buffer / options.extent,
k2 = 0.5 - k1,
k3 = 0.5 + k1,
k4 = 1 + k1,
tl, bl, tr, br, left, right;
tl = bl = tr = br = null;
left = clip(features, z2, x - k1, x + k3, 0, intersectX, tile.min[0], tile.max[0]);
right = clip(features, z2, x + k2, x + k4, 0, intersectX, tile.min[0], tile.max[0]);
if (left) {
tl = clip(left, z2, y - k1, y + k3, 1, intersectY, tile.min[1], tile.max[1]);
bl = clip(left, z2, y + k2, y + k4, 1, intersectY, tile.min[1], tile.max[1]);
}
if (right) {
tr = clip(right, z2, y - k1, y + k3, 1, intersectY, tile.min[1], tile.max[1]);
br = clip(right, z2, y + k2, y + k4, 1, intersectY, tile.min[1], tile.max[1]);
}
if (debug > 1) console.timeEnd('clipping');
if (tl) stack.push(tl, z + 1, x * 2, y * 2);
if (bl) stack.push(bl, z + 1, x * 2, y * 2 + 1);
if (tr) stack.push(tr, z + 1, x * 2 + 1, y * 2);
if (br) stack.push(br, z + 1, x * 2 + 1, y * 2 + 1);
}
};
GeoJSONVT.prototype.getTile = function (z, x, y) {
var options = this.options,
extent = options.extent,
debug = options.debug;
var z2 = 1 << z;
x = ((x % z2) + z2) % z2; // wrap tile x coordinate
var id = toID(z, x, y);
if (this.tiles[id]) return transform.tile(this.tiles[id], extent);
if (debug > 1) console.log('drilling down to z%d-%d-%d', z, x, y);
var z0 = z,
x0 = x,
y0 = y,
parent;
while (!parent && z0 > 0) {
z0--;
x0 = Math.floor(x0 / 2);
y0 = Math.floor(y0 / 2);
parent = this.tiles[toID(z0, x0, y0)];
}
if (!parent) return null;
if (debug > 1) console.log('found parent tile z%d-%d-%d', z0, x0, y0);
// if we found a parent tile containing the original geometry, we can drill down from it
if (parent.source) {
if (isClippedSquare(parent, extent, options.buffer)) return transform.tile(parent, extent);
if (debug > 1) console.time('drilling down');
this.splitTile(parent.source, z0, x0, y0, z, x, y);
if (debug > 1) console.timeEnd('drilling down');
}
if (!this.tiles[id]) return null;
return transform.tile(this.tiles[id], extent);
};
function toID(z, x, y) {
return (((1 << z) * y + x) * 32) + z;
}
function intersectX(a, b, x) {
return [x, (x - a[0]) * (b[1] - a[1]) / (b[0] - a[0]) + a[1], 1];
}
function intersectY(a, b, y) {
return [(y - a[1]) * (b[0] - a[0]) / (b[1] - a[1]) + a[0], y, 1];
}
function extend(dest, src) {
for (var i in src) dest[i] = src[i];
return dest;
}
// checks whether a tile is a whole-area fill after clipping; if it is, there's no sense slicing it further
function isClippedSquare(tile, extent, buffer) {
var features = tile.source;
if (features.length !== 1) return false;
var feature = |
GeoJSONVT.prototype.splitTile = function (features, z, x, y, cz, cx, cy) {
| random_line_split |
index.js | date line processing
createTile = require('./tile'); // final simplified tile generation
function geojsonvt(data, options) {
return new GeoJSONVT(data, options);
}
function GeoJSONVT(data, options) {
options = this.options = extend(Object.create(this.options), options);
var debug = options.debug;
if (debug) console.time('preprocess data');
var z2 = 1 << options.maxZoom, // 2^z
features = convert(data, options.tolerance / (z2 * options.extent));
this.tiles = {};
this.tileCoords = [];
if (debug) {
console.timeEnd('preprocess data');
console.log('index: maxZoom: %d, maxPoints: %d', options.indexMaxZoom, options.indexMaxPoints);
console.time('generate tiles');
this.stats = {};
this.total = 0;
}
features = wrap(features, options.buffer / options.extent, intersectX);
// start slicing from the top tile down
if (features.length) this.splitTile(features, 0, 0, 0);
if (debug) {
if (features.length) console.log('features: %d, points: %d', this.tiles[0].numFeatures, this.tiles[0].numPoints);
console.timeEnd('generate tiles');
console.log('tiles generated:', this.total, JSON.stringify(this.stats));
}
}
GeoJSONVT.prototype.options = {
maxZoom: 14, // max zoom to preserve detail on
indexMaxZoom: 5, // max zoom in the tile index
indexMaxPoints: 100000, // max number of points per tile in the tile index
solidChildren: false, // whether to tile solid square tiles further
tolerance: 3, // simplification tolerance (higher means simpler)
extent: 4096, // tile extent
buffer: 64, // tile buffer on each side
debug: 0 // logging level (0, 1 or 2)
};
GeoJSONVT.prototype.splitTile = function (features, z, x, y, cz, cx, cy) {
var stack = [features, z, x, y],
options = this.options,
debug = options.debug;
// avoid recursion by using a processing queue
while (stack.length) {
y = stack.pop();
x = stack.pop();
z = stack.pop();
features = stack.pop();
var z2 = 1 << z,
id = toID(z, x, y),
tile = this.tiles[id],
tileTolerance = z === options.maxZoom ? 0 : options.tolerance / (z2 * options.extent);
if (!tile) {
if (debug > 1) console.time('creation');
tile = this.tiles[id] = createTile(features, z2, x, y, tileTolerance, z === options.maxZoom);
this.tileCoords.push({z: z, x: x, y: y});
if (debug) {
if (debug > 1) {
console.log('tile z%d-%d-%d (features: %d, points: %d, simplified: %d)',
z, x, y, tile.numFeatures, tile.numPoints, tile.numSimplified);
console.timeEnd('creation');
}
var key = 'z' + z;
this.stats[key] = (this.stats[key] || 0) + 1;
this.total++;
}
}
// save reference to original geometry in tile so that we can drill down later if we stop now
tile.source = features;
// stop tiling if the tile is solid clipped square
if (!options.solidChildren && isClippedSquare(tile, options.extent, options.buffer)) continue;
// if it's the first-pass tiling
if (!cz) {
// stop tiling if we reached max zoom, or if the tile is too simple
if (z === options.indexMaxZoom || tile.numPoints <= options.indexMaxPoints) continue;
// if a drilldown to a specific tile
} else {
// stop tiling if we reached base zoom or our target tile zoom
if (z === options.maxZoom || z === cz) continue;
// stop tiling if it's not an ancestor of the target tile
var m = 1 << (cz - z);
if (x !== Math.floor(cx / m) || y !== Math.floor(cy / m)) continue;
}
// if we slice further down, no need to keep source geometry
tile.source = null;
if (debug > 1) console.time('clipping');
// values we'll use for clipping
var k1 = 0.5 * options.buffer / options.extent,
k2 = 0.5 - k1,
k3 = 0.5 + k1,
k4 = 1 + k1,
tl, bl, tr, br, left, right;
tl = bl = tr = br = null;
left = clip(features, z2, x - k1, x + k3, 0, intersectX, tile.min[0], tile.max[0]);
right = clip(features, z2, x + k2, x + k4, 0, intersectX, tile.min[0], tile.max[0]);
if (left) {
tl = clip(left, z2, y - k1, y + k3, 1, intersectY, tile.min[1], tile.max[1]);
bl = clip(left, z2, y + k2, y + k4, 1, intersectY, tile.min[1], tile.max[1]);
}
if (right) {
tr = clip(right, z2, y - k1, y + k3, 1, intersectY, tile.min[1], tile.max[1]);
br = clip(right, z2, y + k2, y + k4, 1, intersectY, tile.min[1], tile.max[1]);
}
if (debug > 1) console.timeEnd('clipping');
if (tl) stack.push(tl, z + 1, x * 2, y * 2);
if (bl) stack.push(bl, z + 1, x * 2, y * 2 + 1);
if (tr) stack.push(tr, z + 1, x * 2 + 1, y * 2);
if (br) stack.push(br, z + 1, x * 2 + 1, y * 2 + 1);
}
};
GeoJSONVT.prototype.getTile = function (z, x, y) {
var options = this.options,
extent = options.extent,
debug = options.debug;
var z2 = 1 << z;
x = ((x % z2) + z2) % z2; // wrap tile x coordinate
var id = toID(z, x, y);
if (this.tiles[id]) return transform.tile(this.tiles[id], extent);
if (debug > 1) console.log('drilling down to z%d-%d-%d', z, x, y);
var z0 = z,
x0 = x,
y0 = y,
parent;
while (!parent && z0 > 0) {
z0--;
x0 = Math.floor(x0 / 2);
y0 = Math.floor(y0 / 2);
parent = this.tiles[toID(z0, x0, y0)];
}
if (!parent) return null;
if (debug > 1) console.log('found parent tile z%d-%d-%d', z0, x0, y0);
// if we found a parent tile containing the original geometry, we can drill down from it
if (parent.source) {
if (isClippedSquare(parent, extent, options.buffer)) return transform.tile(parent, extent);
if (debug > 1) console.time('drilling down');
this.splitTile(parent.source, z0, x0, y0, z, x, y);
if (debug > 1) console.timeEnd('drilling down');
}
if (!this.tiles[id]) return null;
return transform.tile(this.tiles[id], extent);
};
function toID(z, x, y) {
return (((1 << z) * y + x) * 32) + z;
}
function intersectX(a, b, x) {
return [x, (x - a[0]) * (b[1] - a[1]) / (b[0] - a[0]) + a[1], 1];
}
function | (a, b, y) {
return [(y - a[1]) * (b[0] - a[0]) / (b[1] - a[1]) + a[0], y, 1];
}
function extend(dest, src) {
for (var i in src) dest[i] = src[i];
return dest;
}
// checks whether a tile is a whole-area fill after clipping; if it is, there's no sense slicing it further
function isClippedSquare(tile, extent, buffer) {
var features = tile.source;
if (features.length !== 1) return false;
var feature = | intersectY | identifier_name |
index.js | date line processing
createTile = require('./tile'); // final simplified tile generation
function geojsonvt(data, options) {
return new GeoJSONVT(data, options);
}
function GeoJSONVT(data, options) {
options = this.options = extend(Object.create(this.options), options);
var debug = options.debug;
if (debug) console.time('preprocess data');
var z2 = 1 << options.maxZoom, // 2^z
features = convert(data, options.tolerance / (z2 * options.extent));
this.tiles = {};
this.tileCoords = [];
if (debug) {
console.timeEnd('preprocess data');
console.log('index: maxZoom: %d, maxPoints: %d', options.indexMaxZoom, options.indexMaxPoints);
console.time('generate tiles');
this.stats = {};
this.total = 0;
}
features = wrap(features, options.buffer / options.extent, intersectX);
// start slicing from the top tile down
if (features.length) this.splitTile(features, 0, 0, 0);
if (debug) {
if (features.length) console.log('features: %d, points: %d', this.tiles[0].numFeatures, this.tiles[0].numPoints);
console.timeEnd('generate tiles');
console.log('tiles generated:', this.total, JSON.stringify(this.stats));
}
}
GeoJSONVT.prototype.options = {
maxZoom: 14, // max zoom to preserve detail on
indexMaxZoom: 5, // max zoom in the tile index
indexMaxPoints: 100000, // max number of points per tile in the tile index
solidChildren: false, // whether to tile solid square tiles further
tolerance: 3, // simplification tolerance (higher means simpler)
extent: 4096, // tile extent
buffer: 64, // tile buffer on each side
debug: 0 // logging level (0, 1 or 2)
};
GeoJSONVT.prototype.splitTile = function (features, z, x, y, cz, cx, cy) {
var stack = [features, z, x, y],
options = this.options,
debug = options.debug;
// avoid recursion by using a processing queue
while (stack.length) {
y = stack.pop();
x = stack.pop();
z = stack.pop();
features = stack.pop();
var z2 = 1 << z,
id = toID(z, x, y),
tile = this.tiles[id],
tileTolerance = z === options.maxZoom ? 0 : options.tolerance / (z2 * options.extent);
if (!tile) {
if (debug > 1) console.time('creation');
tile = this.tiles[id] = createTile(features, z2, x, y, tileTolerance, z === options.maxZoom);
this.tileCoords.push({z: z, x: x, y: y});
if (debug) {
if (debug > 1) {
console.log('tile z%d-%d-%d (features: %d, points: %d, simplified: %d)',
z, x, y, tile.numFeatures, tile.numPoints, tile.numSimplified);
console.timeEnd('creation');
}
var key = 'z' + z;
this.stats[key] = (this.stats[key] || 0) + 1;
this.total++;
}
}
// save reference to original geometry in tile so that we can drill down later if we stop now
tile.source = features;
// stop tiling if the tile is solid clipped square
if (!options.solidChildren && isClippedSquare(tile, options.extent, options.buffer)) continue;
// if it's the first-pass tiling
if (!cz) {
// stop tiling if we reached max zoom, or if the tile is too simple
if (z === options.indexMaxZoom || tile.numPoints <= options.indexMaxPoints) continue;
// if a drilldown to a specific tile
} else {
// stop tiling if we reached base zoom or our target tile zoom
if (z === options.maxZoom || z === cz) continue;
// stop tiling if it's not an ancestor of the target tile
var m = 1 << (cz - z);
if (x !== Math.floor(cx / m) || y !== Math.floor(cy / m)) continue;
}
// if we slice further down, no need to keep source geometry
tile.source = null;
if (debug > 1) console.time('clipping');
// values we'll use for clipping
var k1 = 0.5 * options.buffer / options.extent,
k2 = 0.5 - k1,
k3 = 0.5 + k1,
k4 = 1 + k1,
tl, bl, tr, br, left, right;
tl = bl = tr = br = null;
left = clip(features, z2, x - k1, x + k3, 0, intersectX, tile.min[0], tile.max[0]);
right = clip(features, z2, x + k2, x + k4, 0, intersectX, tile.min[0], tile.max[0]);
if (left) {
tl = clip(left, z2, y - k1, y + k3, 1, intersectY, tile.min[1], tile.max[1]);
bl = clip(left, z2, y + k2, y + k4, 1, intersectY, tile.min[1], tile.max[1]);
}
if (right) {
tr = clip(right, z2, y - k1, y + k3, 1, intersectY, tile.min[1], tile.max[1]);
br = clip(right, z2, y + k2, y + k4, 1, intersectY, tile.min[1], tile.max[1]);
}
if (debug > 1) console.timeEnd('clipping');
if (tl) stack.push(tl, z + 1, x * 2, y * 2);
if (bl) stack.push(bl, z + 1, x * 2, y * 2 + 1);
if (tr) stack.push(tr, z + 1, x * 2 + 1, y * 2);
if (br) stack.push(br, z + 1, x * 2 + 1, y * 2 + 1);
}
};
GeoJSONVT.prototype.getTile = function (z, x, y) {
var options = this.options,
extent = options.extent,
debug = options.debug;
var z2 = 1 << z;
x = ((x % z2) + z2) % z2; // wrap tile x coordinate
var id = toID(z, x, y);
if (this.tiles[id]) return transform.tile(this.tiles[id], extent);
if (debug > 1) console.log('drilling down to z%d-%d-%d', z, x, y);
var z0 = z,
x0 = x,
y0 = y,
parent;
while (!parent && z0 > 0) {
z0--;
x0 = Math.floor(x0 / 2);
y0 = Math.floor(y0 / 2);
parent = this.tiles[toID(z0, x0, y0)];
}
if (!parent) return null;
if (debug > 1) console.log('found parent tile z%d-%d-%d', z0, x0, y0);
// if we found a parent tile containing the original geometry, we can drill down from it
if (parent.source) {
if (isClippedSquare(parent, extent, options.buffer)) return transform.tile(parent, extent);
if (debug > 1) console.time('drilling down');
this.splitTile(parent.source, z0, x0, y0, z, x, y);
if (debug > 1) console.timeEnd('drilling down');
}
if (!this.tiles[id]) return null;
return transform.tile(this.tiles[id], extent);
};
function toID(z, x, y) {
return (((1 << z) * y + x) * 32) + z;
}
function intersectX(a, b, x) {
return [x, (x - a[0]) * (b[1] - a[1]) / (b[0] - a[0]) + a[1], 1];
}
function intersectY(a, b, y) {
return [(y - a[1]) * (b[0] - a[0]) / (b[1] - a[1]) + a[0], y, 1];
}
function extend(dest, src) |
// checks whether a tile is a whole-area fill after clipping; if it is, there's no sense slicing it further
function isClippedSquare(tile, extent, buffer) {
var features = tile.source;
if (features.length !== 1) return false;
var feature | {
for (var i in src) dest[i] = src[i];
return dest;
} | identifier_body |
IncTemplate.js | 'use strict';
var util = require('util');
var GtReq = require('../GtReq');
var BaseTemplate = require('./BaseTemplate');
function | (options) {
BaseTemplate.call(this, options);
options = util._extend({
transmissionContent: '',
incAppId: ''
}, options);
util._extend(this, options);
}
util.inherits(IncTemplate, BaseTemplate);
IncTemplate.prototype.getActionChain = function() {
var actionChain1 = new GtReq.ActionChain({
actionId: 1,
type: GtReq.ActionChain.Type.Goto,
next: 10030
});
var appStartUp = new GtReq.AppStartUp({
android: '',
symbia: '',
ios: ''
});
// 启动app
// Start the app
var actionChain2 = new GtReq.ActionChain({
actionId: 10030,
type: GtReq.ActionChain.Type.startapp,
appid: this.incAppId,
autostart: 1 === this.transmissionType,
appstartupid: appStartUp,
failedAction: 100,
next: 100
});
// 结束
// Finish
var actionChain3 = new GtReq.ActionChain({
actionId: 100,
type: GtReq.ActionChain.Type.eoa
});
var actionChains = [actionChain1, actionChain2, actionChain3];
return actionChains;
};
IncTemplate.prototype.getTransmissionContent = function() {
return this.transmissionContent;
};
IncTemplate.prototype.getPushType = function() {
return 'TransmissionMsg';
};
/**
* 设置 透传消息类型 1:收到通知立即启动应用 2:收到通知不启动应用
* Set direct display message type 1:Start the app once gets notification. 2:Not to start the app once gets notification
* @param transmissionType
*/
IncTemplate.prototype.setTransmissionType = function(transmissionType) {
this.transmissionType = transmissionType;
return this;
};
IncTemplate.prototype.setTransmissionContent = function(transmissionContent) {
this.transmissionContent = transmissionContent;
return this;
};
IncTemplate.prototype.setIncAppId = function(incAppId) {
this.incAppId = incAppId;
return this;
};
module.exports = IncTemplate; | IncTemplate | identifier_name |
IncTemplate.js | 'use strict';
var util = require('util');
var GtReq = require('../GtReq');
var BaseTemplate = require('./BaseTemplate');
function IncTemplate(options) {
BaseTemplate.call(this, options);
options = util._extend({
transmissionContent: '',
incAppId: ''
}, options);
util._extend(this, options);
}
util.inherits(IncTemplate, BaseTemplate);
IncTemplate.prototype.getActionChain = function() {
var actionChain1 = new GtReq.ActionChain({
actionId: 1,
type: GtReq.ActionChain.Type.Goto,
next: 10030
});
var appStartUp = new GtReq.AppStartUp({
| // 启动app
// Start the app
var actionChain2 = new GtReq.ActionChain({
actionId: 10030,
type: GtReq.ActionChain.Type.startapp,
appid: this.incAppId,
autostart: 1 === this.transmissionType,
appstartupid: appStartUp,
failedAction: 100,
next: 100
});
// 结束
// Finish
var actionChain3 = new GtReq.ActionChain({
actionId: 100,
type: GtReq.ActionChain.Type.eoa
});
var actionChains = [actionChain1, actionChain2, actionChain3];
return actionChains;
};
IncTemplate.prototype.getTransmissionContent = function() {
return this.transmissionContent;
};
IncTemplate.prototype.getPushType = function() {
return 'TransmissionMsg';
};
/**
* 设置 透传消息类型 1:收到通知立即启动应用 2:收到通知不启动应用
* Set direct display message type 1:Start the app once gets notification. 2:Not to start the app once gets notification
* @param transmissionType
*/
IncTemplate.prototype.setTransmissionType = function(transmissionType) {
this.transmissionType = transmissionType;
return this;
};
IncTemplate.prototype.setTransmissionContent = function(transmissionContent) {
this.transmissionContent = transmissionContent;
return this;
};
IncTemplate.prototype.setIncAppId = function(incAppId) {
this.incAppId = incAppId;
return this;
};
module.exports = IncTemplate; | android: '',
symbia: '',
ios: ''
});
| random_line_split |
IncTemplate.js | 'use strict';
var util = require('util');
var GtReq = require('../GtReq');
var BaseTemplate = require('./BaseTemplate');
function IncTemplate(options) |
util.inherits(IncTemplate, BaseTemplate);
IncTemplate.prototype.getActionChain = function() {
var actionChain1 = new GtReq.ActionChain({
actionId: 1,
type: GtReq.ActionChain.Type.Goto,
next: 10030
});
var appStartUp = new GtReq.AppStartUp({
android: '',
symbia: '',
ios: ''
});
// 启动app
// Start the app
var actionChain2 = new GtReq.ActionChain({
actionId: 10030,
type: GtReq.ActionChain.Type.startapp,
appid: this.incAppId,
autostart: 1 === this.transmissionType,
appstartupid: appStartUp,
failedAction: 100,
next: 100
});
// 结束
// Finish
var actionChain3 = new GtReq.ActionChain({
actionId: 100,
type: GtReq.ActionChain.Type.eoa
});
var actionChains = [actionChain1, actionChain2, actionChain3];
return actionChains;
};
IncTemplate.prototype.getTransmissionContent = function() {
return this.transmissionContent;
};
IncTemplate.prototype.getPushType = function() {
return 'TransmissionMsg';
};
/**
* 设置 透传消息类型 1:收到通知立即启动应用 2:收到通知不启动应用
* Set direct display message type 1:Start the app once gets notification. 2:Not to start the app once gets notification
* @param transmissionType
*/
IncTemplate.prototype.setTransmissionType = function(transmissionType) {
this.transmissionType = transmissionType;
return this;
};
IncTemplate.prototype.setTransmissionContent = function(transmissionContent) {
this.transmissionContent = transmissionContent;
return this;
};
IncTemplate.prototype.setIncAppId = function(incAppId) {
this.incAppId = incAppId;
return this;
};
module.exports = IncTemplate; | {
BaseTemplate.call(this, options);
options = util._extend({
transmissionContent: '',
incAppId: ''
}, options);
util._extend(this, options);
} | identifier_body |
applayerframetype.rs | /* Copyright (C) 2021 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{self, parse_macro_input, DeriveInput};
pub fn | (input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let name = input.ident;
let mut fields = Vec::new();
let mut vals = Vec::new();
let mut cstrings = Vec::new();
let mut names = Vec::new();
match input.data {
syn::Data::Enum(ref data) => {
for (i, v) in (&data.variants).into_iter().enumerate() {
fields.push(v.ident.clone());
let name = transform_name(&v.ident.to_string());
let cname = format!("{}\0", name);
names.push(name);
cstrings.push(cname);
vals.push(i as u8);
}
}
_ => panic!("AppLayerFrameType can only be derived for enums"),
}
let expanded = quote! {
impl crate::applayer::AppLayerFrameType for #name {
fn from_u8(val: u8) -> Option<Self> {
match val {
#( #vals => Some(#name::#fields) ,)*
_ => None,
}
}
fn as_u8(&self) -> u8 {
match *self {
#( #name::#fields => #vals ,)*
}
}
fn to_cstring(&self) -> *const std::os::raw::c_char {
let s = match *self {
#( #name::#fields => #cstrings ,)*
};
s.as_ptr() as *const std::os::raw::c_char
}
fn from_str(s: &str) -> Option<#name> {
match s {
#( #names => Some(#name::#fields) ,)*
_ => None
}
}
}
};
proc_macro::TokenStream::from(expanded)
}
fn transform_name(name: &str) -> String {
let mut xname = String::new();
let chars: Vec<char> = name.chars().collect();
for i in 0..chars.len() {
if i > 0 && i < chars.len() - 1 && chars[i].is_uppercase() && chars[i + 1].is_lowercase() {
xname.push('.');
}
xname.push_str(&chars[i].to_lowercase().to_string());
}
xname
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_transform_name() {
assert_eq!(transform_name("One"), "one");
assert_eq!(transform_name("OneTwo"), "one.two");
assert_eq!(transform_name("OneTwoThree"), "one.two.three");
assert_eq!(transform_name("NBSS"), "nbss");
assert_eq!(transform_name("NBSSHdr"), "nbss.hdr");
assert_eq!(transform_name("SMB3Data"), "smb3.data");
}
}
| derive_app_layer_frame_type | identifier_name |
applayerframetype.rs | /* Copyright (C) 2021 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{self, parse_macro_input, DeriveInput};
pub fn derive_app_layer_frame_type(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let name = input.ident;
let mut fields = Vec::new();
let mut vals = Vec::new();
let mut cstrings = Vec::new();
let mut names = Vec::new();
match input.data {
syn::Data::Enum(ref data) => {
for (i, v) in (&data.variants).into_iter().enumerate() {
fields.push(v.ident.clone());
let name = transform_name(&v.ident.to_string());
let cname = format!("{}\0", name);
names.push(name);
cstrings.push(cname);
vals.push(i as u8);
}
}
_ => panic!("AppLayerFrameType can only be derived for enums"),
}
let expanded = quote! {
impl crate::applayer::AppLayerFrameType for #name {
fn from_u8(val: u8) -> Option<Self> {
match val {
#( #vals => Some(#name::#fields) ,)*
_ => None,
}
}
fn as_u8(&self) -> u8 {
match *self {
#( #name::#fields => #vals ,)*
}
}
fn to_cstring(&self) -> *const std::os::raw::c_char {
let s = match *self {
#( #name::#fields => #cstrings ,)*
};
s.as_ptr() as *const std::os::raw::c_char
}
fn from_str(s: &str) -> Option<#name> {
match s {
#( #names => Some(#name::#fields) ,)*
_ => None
}
}
}
};
proc_macro::TokenStream::from(expanded)
}
fn transform_name(name: &str) -> String {
let mut xname = String::new();
let chars: Vec<char> = name.chars().collect();
for i in 0..chars.len() {
if i > 0 && i < chars.len() - 1 && chars[i].is_uppercase() && chars[i + 1].is_lowercase() |
xname.push_str(&chars[i].to_lowercase().to_string());
}
xname
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_transform_name() {
assert_eq!(transform_name("One"), "one");
assert_eq!(transform_name("OneTwo"), "one.two");
assert_eq!(transform_name("OneTwoThree"), "one.two.three");
assert_eq!(transform_name("NBSS"), "nbss");
assert_eq!(transform_name("NBSSHdr"), "nbss.hdr");
assert_eq!(transform_name("SMB3Data"), "smb3.data");
}
}
| {
xname.push('.');
} | conditional_block |
applayerframetype.rs | /* Copyright (C) 2021 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{self, parse_macro_input, DeriveInput};
pub fn derive_app_layer_frame_type(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let name = input.ident;
let mut fields = Vec::new();
let mut vals = Vec::new();
let mut cstrings = Vec::new();
let mut names = Vec::new();
match input.data {
syn::Data::Enum(ref data) => {
for (i, v) in (&data.variants).into_iter().enumerate() {
fields.push(v.ident.clone());
let name = transform_name(&v.ident.to_string());
let cname = format!("{}\0", name);
names.push(name);
cstrings.push(cname);
vals.push(i as u8);
}
}
_ => panic!("AppLayerFrameType can only be derived for enums"),
}
let expanded = quote! {
impl crate::applayer::AppLayerFrameType for #name {
fn from_u8(val: u8) -> Option<Self> {
match val {
#( #vals => Some(#name::#fields) ,)*
_ => None,
}
}
fn as_u8(&self) -> u8 {
match *self {
#( #name::#fields => #vals ,)*
}
}
fn to_cstring(&self) -> *const std::os::raw::c_char {
let s = match *self {
#( #name::#fields => #cstrings ,)*
};
s.as_ptr() as *const std::os::raw::c_char
}
fn from_str(s: &str) -> Option<#name> {
match s {
#( #names => Some(#name::#fields) ,)*
_ => None
}
}
}
};
proc_macro::TokenStream::from(expanded)
}
fn transform_name(name: &str) -> String {
let mut xname = String::new();
let chars: Vec<char> = name.chars().collect();
for i in 0..chars.len() {
if i > 0 && i < chars.len() - 1 && chars[i].is_uppercase() && chars[i + 1].is_lowercase() {
xname.push('.');
}
xname.push_str(&chars[i].to_lowercase().to_string());
}
xname
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_transform_name() |
}
| {
assert_eq!(transform_name("One"), "one");
assert_eq!(transform_name("OneTwo"), "one.two");
assert_eq!(transform_name("OneTwoThree"), "one.two.three");
assert_eq!(transform_name("NBSS"), "nbss");
assert_eq!(transform_name("NBSSHdr"), "nbss.hdr");
assert_eq!(transform_name("SMB3Data"), "smb3.data");
} | identifier_body |
applayerframetype.rs | /* Copyright (C) 2021 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
* | * You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{self, parse_macro_input, DeriveInput};
pub fn derive_app_layer_frame_type(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let name = input.ident;
let mut fields = Vec::new();
let mut vals = Vec::new();
let mut cstrings = Vec::new();
let mut names = Vec::new();
match input.data {
syn::Data::Enum(ref data) => {
for (i, v) in (&data.variants).into_iter().enumerate() {
fields.push(v.ident.clone());
let name = transform_name(&v.ident.to_string());
let cname = format!("{}\0", name);
names.push(name);
cstrings.push(cname);
vals.push(i as u8);
}
}
_ => panic!("AppLayerFrameType can only be derived for enums"),
}
let expanded = quote! {
impl crate::applayer::AppLayerFrameType for #name {
fn from_u8(val: u8) -> Option<Self> {
match val {
#( #vals => Some(#name::#fields) ,)*
_ => None,
}
}
fn as_u8(&self) -> u8 {
match *self {
#( #name::#fields => #vals ,)*
}
}
fn to_cstring(&self) -> *const std::os::raw::c_char {
let s = match *self {
#( #name::#fields => #cstrings ,)*
};
s.as_ptr() as *const std::os::raw::c_char
}
fn from_str(s: &str) -> Option<#name> {
match s {
#( #names => Some(#name::#fields) ,)*
_ => None
}
}
}
};
proc_macro::TokenStream::from(expanded)
}
fn transform_name(name: &str) -> String {
let mut xname = String::new();
let chars: Vec<char> = name.chars().collect();
for i in 0..chars.len() {
if i > 0 && i < chars.len() - 1 && chars[i].is_uppercase() && chars[i + 1].is_lowercase() {
xname.push('.');
}
xname.push_str(&chars[i].to_lowercase().to_string());
}
xname
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_transform_name() {
assert_eq!(transform_name("One"), "one");
assert_eq!(transform_name("OneTwo"), "one.two");
assert_eq!(transform_name("OneTwoThree"), "one.two.three");
assert_eq!(transform_name("NBSS"), "nbss");
assert_eq!(transform_name("NBSSHdr"), "nbss.hdr");
assert_eq!(transform_name("SMB3Data"), "smb3.data");
}
} | * This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* | random_line_split |
appointment_tags.py | #
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <[email protected]>
#
from django.template.defaultfilters import register
from appointment.constants import EVENT_STATUS, ALARM_STATUS, ALARM_METHOD
@register.filter(name='event_status')
def event_status(value):
"""Event Status Templatetag"""
if not value:
return ''
STATUS = dict(EVENT_STATUS)
try:
return STATUS[value].encode('utf-8')
except:
return ''
@register.filter(name='alarm_status')
def alarm_status(value):
"""Alarm Status Templatetag"""
if not value:
return ''
STATUS = dict(ALARM_STATUS)
try:
return STATUS[value].encode('utf-8')
except:
return ''
@register.filter(name='alarm_method')
def alarm_method(value):
"""Alarm Method Templatetag"""
if not value:
|
METHOD = dict(ALARM_METHOD)
try:
return METHOD[value].encode('utf-8')
except:
return ''
| return '' | conditional_block |
appointment_tags.py | #
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <[email protected]>
#
from django.template.defaultfilters import register
from appointment.constants import EVENT_STATUS, ALARM_STATUS, ALARM_METHOD
@register.filter(name='event_status')
def event_status(value):
"""Event Status Templatetag"""
if not value:
return ''
STATUS = dict(EVENT_STATUS)
try:
return STATUS[value].encode('utf-8')
except:
return ''
@register.filter(name='alarm_status')
def alarm_status(value):
"""Alarm Status Templatetag"""
if not value:
return ''
STATUS = dict(ALARM_STATUS)
try:
return STATUS[value].encode('utf-8')
except:
return ''
@register.filter(name='alarm_method')
def alarm_method(value):
"""Alarm Method Templatetag"""
if not value:
return ''
METHOD = dict(ALARM_METHOD)
try:
return METHOD[value].encode('utf-8')
except: | return '' | random_line_split |
|
appointment_tags.py | #
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <[email protected]>
#
from django.template.defaultfilters import register
from appointment.constants import EVENT_STATUS, ALARM_STATUS, ALARM_METHOD
@register.filter(name='event_status')
def | (value):
"""Event Status Templatetag"""
if not value:
return ''
STATUS = dict(EVENT_STATUS)
try:
return STATUS[value].encode('utf-8')
except:
return ''
@register.filter(name='alarm_status')
def alarm_status(value):
"""Alarm Status Templatetag"""
if not value:
return ''
STATUS = dict(ALARM_STATUS)
try:
return STATUS[value].encode('utf-8')
except:
return ''
@register.filter(name='alarm_method')
def alarm_method(value):
"""Alarm Method Templatetag"""
if not value:
return ''
METHOD = dict(ALARM_METHOD)
try:
return METHOD[value].encode('utf-8')
except:
return ''
| event_status | identifier_name |
appointment_tags.py | #
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <[email protected]>
#
from django.template.defaultfilters import register
from appointment.constants import EVENT_STATUS, ALARM_STATUS, ALARM_METHOD
@register.filter(name='event_status')
def event_status(value):
"""Event Status Templatetag"""
if not value:
return ''
STATUS = dict(EVENT_STATUS)
try:
return STATUS[value].encode('utf-8')
except:
return ''
@register.filter(name='alarm_status')
def alarm_status(value):
"""Alarm Status Templatetag"""
if not value:
return ''
STATUS = dict(ALARM_STATUS)
try:
return STATUS[value].encode('utf-8')
except:
return ''
@register.filter(name='alarm_method')
def alarm_method(value):
| """Alarm Method Templatetag"""
if not value:
return ''
METHOD = dict(ALARM_METHOD)
try:
return METHOD[value].encode('utf-8')
except:
return '' | identifier_body |
|
fetch.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::RequestBinding::RequestInfo;
use dom::bindings::codegen::Bindings::RequestBinding::RequestInit;
use dom::bindings::codegen::Bindings::ResponseBinding::ResponseBinding::ResponseMethods;
use dom::bindings::codegen::Bindings::ResponseBinding::ResponseType as DOMResponseType;
use dom::bindings::error::Error;
use dom::bindings::inheritance::Castable;
use dom::bindings::refcounted::{Trusted, TrustedPromise};
use dom::bindings::reflector::DomObject;
use dom::bindings::root::DomRoot;
use dom::bindings::trace::RootedTraceableBox;
use dom::globalscope::GlobalScope;
use dom::headers::Guard;
use dom::promise::Promise;
use dom::request::Request;
use dom::response::Response;
use dom::serviceworkerglobalscope::ServiceWorkerGlobalScope;
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use js::jsapi::JSAutoCompartment;
use net_traits::{FetchChannels, FetchResponseListener, NetworkError};
use net_traits::{FilteredMetadata, FetchMetadata, Metadata};
use net_traits::CoreResourceMsg::Fetch as NetTraitsFetch;
use net_traits::request::{Request as NetTraitsRequest, ServiceWorkersMode};
use net_traits::request::RequestInit as NetTraitsRequestInit;
use network_listener::{NetworkListener, PreInvoke};
use servo_url::ServoUrl;
use std::mem;
use std::rc::Rc;
use std::sync::{Arc, Mutex};
use task_source::TaskSourceName;
struct FetchContext {
fetch_promise: Option<TrustedPromise>,
response_object: Trusted<Response>,
body: Vec<u8>,
}
/// RAII fetch canceller object. By default initialized to not having a canceller
/// in it, however you can ask it for a cancellation receiver to send to Fetch
/// in which case it will store the sender. You can manually cancel it
/// or let it cancel on Drop in that case.
#[derive(Default, JSTraceable, MallocSizeOf)]
pub struct FetchCanceller {
#[ignore_malloc_size_of = "channels are hard"]
cancel_chan: Option<ipc::IpcSender<()>>
}
impl FetchCanceller {
/// Create an empty FetchCanceller
pub fn new() -> Self {
Default::default()
}
/// Obtain an IpcReceiver to send over to Fetch, and initialize
/// the internal sender
pub fn initialize(&mut self) -> ipc::IpcReceiver<()> {
// cancel previous fetch
self.cancel();
let (rx, tx) = ipc::channel().unwrap();
self.cancel_chan = Some(rx);
tx
}
/// Cancel a fetch if it is ongoing
pub fn cancel(&mut self) {
if let Some(chan) = self.cancel_chan.take() {
// stop trying to make fetch happen
// it's not going to happen
// The receiver will be destroyed if the request has already completed;
// so we throw away the error. Cancellation is a courtesy call,
// we don't actually care if the other side heard.
let _ = chan.send(());
}
}
/// Use this if you don't want it to send a cancellation request
/// on drop (e.g. if the fetch completes)
pub fn ignore(&mut self) {
let _ = self.cancel_chan.take();
}
}
impl Drop for FetchCanceller {
fn drop(&mut self) {
self.cancel()
}
}
fn from_referrer_to_referrer_url(request: &NetTraitsRequest) -> Option<ServoUrl> {
request.referrer.to_url().map(|url| url.clone())
}
fn request_init_from_request(request: NetTraitsRequest) -> NetTraitsRequestInit {
NetTraitsRequestInit {
method: request.method.clone(),
url: request.url(),
headers: request.headers.clone(),
unsafe_request: request.unsafe_request,
body: request.body.clone(),
destination: request.destination,
synchronous: request.synchronous,
mode: request.mode.clone(),
use_cors_preflight: request.use_cors_preflight,
credentials_mode: request.credentials_mode,
use_url_credentials: request.use_url_credentials,
origin: GlobalScope::current().expect("No current global object").origin().immutable().clone(),
referrer_url: from_referrer_to_referrer_url(&request),
referrer_policy: request.referrer_policy,
pipeline_id: request.pipeline_id,
redirect_mode: request.redirect_mode,
cache_mode: request.cache_mode,
..NetTraitsRequestInit::default()
}
}
// https://fetch.spec.whatwg.org/#fetch-method
#[allow(unrooted_must_root)]
pub fn Fetch(global: &GlobalScope, input: RequestInfo, init: RootedTraceableBox<RequestInit>) -> Rc<Promise> {
let core_resource_thread = global.core_resource_thread();
// Step 1
let promise = Promise::new(global);
let response = Response::new(global);
// Step 2
let request = match Request::Constructor(global, input, init) {
Err(e) => {
promise.reject_error(e);
return promise;
},
Ok(r) => r.get_request(),
};
let mut request_init = request_init_from_request(request);
// Step 3
if global.downcast::<ServiceWorkerGlobalScope>().is_some() {
request_init.service_workers_mode = ServiceWorkersMode::Foreign;
}
// Step 4
response.Headers().set_guard(Guard::Immutable);
// Step 5
let (action_sender, action_receiver) = ipc::channel().unwrap();
let fetch_context = Arc::new(Mutex::new(FetchContext {
fetch_promise: Some(TrustedPromise::new(promise.clone())),
response_object: Trusted::new(&*response),
body: vec![],
}));
let listener = NetworkListener {
context: fetch_context,
task_source: global.networking_task_source(),
canceller: Some(global.task_canceller(TaskSourceName::Networking))
};
ROUTER.add_route(action_receiver.to_opaque(), Box::new(move |message| {
listener.notify_fetch(message.to().unwrap());
}));
core_resource_thread.send(
NetTraitsFetch(request_init, FetchChannels::ResponseMsg(action_sender, None))).unwrap();
promise
}
impl PreInvoke for FetchContext {}
impl FetchResponseListener for FetchContext {
fn process_request_body(&mut self) {
// TODO
}
fn process_request_eof(&mut self) {
// TODO
}
#[allow(unrooted_must_root)]
fn | (&mut self, fetch_metadata: Result<FetchMetadata, NetworkError>) {
let promise = self.fetch_promise.take().expect("fetch promise is missing").root();
// JSAutoCompartment needs to be manually made.
// Otherwise, Servo will crash.
let promise_cx = promise.global().get_cx();
let _ac = JSAutoCompartment::new(promise_cx, promise.reflector().get_jsobject().get());
match fetch_metadata {
// Step 4.1
Err(_) => {
promise.reject_error(Error::Type("Network error occurred".to_string()));
self.fetch_promise = Some(TrustedPromise::new(promise));
self.response_object.root().set_type(DOMResponseType::Error);
return;
},
// Step 4.2
Ok(metadata) => {
match metadata {
FetchMetadata::Unfiltered(m) => {
fill_headers_with_metadata(self.response_object.root(), m);
self.response_object.root().set_type(DOMResponseType::Default);
},
FetchMetadata::Filtered { filtered, .. } => match filtered {
FilteredMetadata::Basic(m) => {
fill_headers_with_metadata(self.response_object.root(), m);
self.response_object.root().set_type(DOMResponseType::Basic);
},
FilteredMetadata::Cors(m) => {
fill_headers_with_metadata(self.response_object.root(), m);
self.response_object.root().set_type(DOMResponseType::Cors);
},
FilteredMetadata::Opaque =>
self.response_object.root().set_type(DOMResponseType::Opaque),
FilteredMetadata::OpaqueRedirect =>
self.response_object.root().set_type(DOMResponseType::Opaqueredirect)
}
}
}
}
// Step 4.3
promise.resolve_native(&self.response_object.root());
self.fetch_promise = Some(TrustedPromise::new(promise));
}
fn process_response_chunk(&mut self, mut chunk: Vec<u8>) {
self.body.append(&mut chunk);
}
fn process_response_eof(&mut self, _response: Result<(), NetworkError>) {
let response = self.response_object.root();
let global = response.global();
let cx = global.get_cx();
let _ac = JSAutoCompartment::new(cx, global.reflector().get_jsobject().get());
response.finish(mem::replace(&mut self.body, vec![]));
// TODO
// ... trailerObject is not supported in Servo yet.
}
}
fn fill_headers_with_metadata(r: DomRoot<Response>, m: Metadata) {
r.set_headers(m.headers);
r.set_raw_status(m.status);
r.set_final_url(m.final_url | process_response | identifier_name |
fetch.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::RequestBinding::RequestInfo;
use dom::bindings::codegen::Bindings::RequestBinding::RequestInit;
use dom::bindings::codegen::Bindings::ResponseBinding::ResponseBinding::ResponseMethods;
use dom::bindings::codegen::Bindings::ResponseBinding::ResponseType as DOMResponseType;
use dom::bindings::error::Error;
use dom::bindings::inheritance::Castable;
use dom::bindings::refcounted::{Trusted, TrustedPromise};
use dom::bindings::reflector::DomObject;
use dom::bindings::root::DomRoot;
use dom::bindings::trace::RootedTraceableBox;
use dom::globalscope::GlobalScope;
use dom::headers::Guard;
use dom::promise::Promise;
use dom::request::Request;
use dom::response::Response;
use dom::serviceworkerglobalscope::ServiceWorkerGlobalScope;
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use js::jsapi::JSAutoCompartment;
use net_traits::{FetchChannels, FetchResponseListener, NetworkError};
use net_traits::{FilteredMetadata, FetchMetadata, Metadata};
use net_traits::CoreResourceMsg::Fetch as NetTraitsFetch;
use net_traits::request::{Request as NetTraitsRequest, ServiceWorkersMode};
use net_traits::request::RequestInit as NetTraitsRequestInit;
use network_listener::{NetworkListener, PreInvoke};
use servo_url::ServoUrl;
use std::mem;
use std::rc::Rc;
use std::sync::{Arc, Mutex};
use task_source::TaskSourceName;
struct FetchContext {
fetch_promise: Option<TrustedPromise>,
response_object: Trusted<Response>,
body: Vec<u8>,
}
/// RAII fetch canceller object. By default initialized to not having a canceller
/// in it, however you can ask it for a cancellation receiver to send to Fetch
/// in which case it will store the sender. You can manually cancel it
/// or let it cancel on Drop in that case.
#[derive(Default, JSTraceable, MallocSizeOf)]
pub struct FetchCanceller {
#[ignore_malloc_size_of = "channels are hard"]
cancel_chan: Option<ipc::IpcSender<()>>
}
impl FetchCanceller {
/// Create an empty FetchCanceller
pub fn new() -> Self {
Default::default()
}
/// Obtain an IpcReceiver to send over to Fetch, and initialize
/// the internal sender
pub fn initialize(&mut self) -> ipc::IpcReceiver<()> {
// cancel previous fetch
self.cancel();
let (rx, tx) = ipc::channel().unwrap();
self.cancel_chan = Some(rx);
tx
}
/// Cancel a fetch if it is ongoing
pub fn cancel(&mut self) {
if let Some(chan) = self.cancel_chan.take() {
// stop trying to make fetch happen
// it's not going to happen
// The receiver will be destroyed if the request has already completed;
// so we throw away the error. Cancellation is a courtesy call,
// we don't actually care if the other side heard.
let _ = chan.send(());
}
}
/// Use this if you don't want it to send a cancellation request
/// on drop (e.g. if the fetch completes)
pub fn ignore(&mut self) {
let _ = self.cancel_chan.take();
}
}
impl Drop for FetchCanceller {
fn drop(&mut self) {
self.cancel()
}
}
fn from_referrer_to_referrer_url(request: &NetTraitsRequest) -> Option<ServoUrl> {
request.referrer.to_url().map(|url| url.clone())
}
fn request_init_from_request(request: NetTraitsRequest) -> NetTraitsRequestInit {
NetTraitsRequestInit {
method: request.method.clone(),
url: request.url(),
headers: request.headers.clone(),
unsafe_request: request.unsafe_request,
body: request.body.clone(),
destination: request.destination,
synchronous: request.synchronous,
mode: request.mode.clone(),
use_cors_preflight: request.use_cors_preflight,
credentials_mode: request.credentials_mode,
use_url_credentials: request.use_url_credentials,
origin: GlobalScope::current().expect("No current global object").origin().immutable().clone(),
referrer_url: from_referrer_to_referrer_url(&request),
referrer_policy: request.referrer_policy,
pipeline_id: request.pipeline_id,
redirect_mode: request.redirect_mode,
cache_mode: request.cache_mode,
..NetTraitsRequestInit::default()
}
}
// https://fetch.spec.whatwg.org/#fetch-method
#[allow(unrooted_must_root)]
pub fn Fetch(global: &GlobalScope, input: RequestInfo, init: RootedTraceableBox<RequestInit>) -> Rc<Promise> {
let core_resource_thread = global.core_resource_thread();
// Step 1
let promise = Promise::new(global);
let response = Response::new(global);
// Step 2
let request = match Request::Constructor(global, input, init) {
Err(e) => {
promise.reject_error(e);
return promise;
},
Ok(r) => r.get_request(),
};
let mut request_init = request_init_from_request(request);
// Step 3
if global.downcast::<ServiceWorkerGlobalScope>().is_some() {
request_init.service_workers_mode = ServiceWorkersMode::Foreign;
}
// Step 4
response.Headers().set_guard(Guard::Immutable);
// Step 5
let (action_sender, action_receiver) = ipc::channel().unwrap();
let fetch_context = Arc::new(Mutex::new(FetchContext {
fetch_promise: Some(TrustedPromise::new(promise.clone())),
response_object: Trusted::new(&*response),
body: vec![],
}));
let listener = NetworkListener {
context: fetch_context,
task_source: global.networking_task_source(),
canceller: Some(global.task_canceller(TaskSourceName::Networking))
};
ROUTER.add_route(action_receiver.to_opaque(), Box::new(move |message| {
listener.notify_fetch(message.to().unwrap());
}));
core_resource_thread.send(
NetTraitsFetch(request_init, FetchChannels::ResponseMsg(action_sender, None))).unwrap();
promise
}
impl PreInvoke for FetchContext {}
impl FetchResponseListener for FetchContext {
fn process_request_body(&mut self) {
// TODO
}
fn process_request_eof(&mut self) {
// TODO
}
#[allow(unrooted_must_root)]
fn process_response(&mut self, fetch_metadata: Result<FetchMetadata, NetworkError>) | self.response_object.root().set_type(DOMResponseType::Default);
},
FetchMetadata::Filtered { filtered, .. } => match filtered {
FilteredMetadata::Basic(m) => {
fill_headers_with_metadata(self.response_object.root(), m);
self.response_object.root().set_type(DOMResponseType::Basic);
},
FilteredMetadata::Cors(m) => {
fill_headers_with_metadata(self.response_object.root(), m);
self.response_object.root().set_type(DOMResponseType::Cors);
},
FilteredMetadata::Opaque =>
self.response_object.root().set_type(DOMResponseType::Opaque),
FilteredMetadata::OpaqueRedirect =>
self.response_object.root().set_type(DOMResponseType::Opaqueredirect)
}
}
}
}
// Step 4.3
promise.resolve_native(&self.response_object.root());
self.fetch_promise = Some(TrustedPromise::new(promise));
}
fn process_response_chunk(&mut self, mut chunk: Vec<u8>) {
self.body.append(&mut chunk);
}
fn process_response_eof(&mut self, _response: Result<(), NetworkError>) {
let response = self.response_object.root();
let global = response.global();
let cx = global.get_cx();
let _ac = JSAutoCompartment::new(cx, global.reflector().get_jsobject().get());
response.finish(mem::replace(&mut self.body, vec![]));
// TODO
// ... trailerObject is not supported in Servo yet.
}
}
fn fill_headers_with_metadata(r: DomRoot<Response>, m: Metadata) {
r.set_headers(m.headers);
r.set_raw_status(m.status);
r.set_final_url(m.final_url | {
let promise = self.fetch_promise.take().expect("fetch promise is missing").root();
// JSAutoCompartment needs to be manually made.
// Otherwise, Servo will crash.
let promise_cx = promise.global().get_cx();
let _ac = JSAutoCompartment::new(promise_cx, promise.reflector().get_jsobject().get());
match fetch_metadata {
// Step 4.1
Err(_) => {
promise.reject_error(Error::Type("Network error occurred".to_string()));
self.fetch_promise = Some(TrustedPromise::new(promise));
self.response_object.root().set_type(DOMResponseType::Error);
return;
},
// Step 4.2
Ok(metadata) => {
match metadata {
FetchMetadata::Unfiltered(m) => {
fill_headers_with_metadata(self.response_object.root(), m); | identifier_body |
fetch.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::RequestBinding::RequestInfo;
use dom::bindings::codegen::Bindings::RequestBinding::RequestInit;
use dom::bindings::codegen::Bindings::ResponseBinding::ResponseBinding::ResponseMethods;
use dom::bindings::codegen::Bindings::ResponseBinding::ResponseType as DOMResponseType;
use dom::bindings::error::Error;
use dom::bindings::inheritance::Castable;
use dom::bindings::refcounted::{Trusted, TrustedPromise};
use dom::bindings::reflector::DomObject;
use dom::bindings::root::DomRoot;
use dom::bindings::trace::RootedTraceableBox;
use dom::globalscope::GlobalScope;
use dom::headers::Guard;
use dom::promise::Promise;
use dom::request::Request;
use dom::response::Response;
use dom::serviceworkerglobalscope::ServiceWorkerGlobalScope;
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use js::jsapi::JSAutoCompartment;
use net_traits::{FetchChannels, FetchResponseListener, NetworkError};
use net_traits::{FilteredMetadata, FetchMetadata, Metadata};
use net_traits::CoreResourceMsg::Fetch as NetTraitsFetch;
use net_traits::request::{Request as NetTraitsRequest, ServiceWorkersMode};
use net_traits::request::RequestInit as NetTraitsRequestInit;
use network_listener::{NetworkListener, PreInvoke};
use servo_url::ServoUrl;
use std::mem;
use std::rc::Rc;
use std::sync::{Arc, Mutex};
use task_source::TaskSourceName;
struct FetchContext {
fetch_promise: Option<TrustedPromise>,
response_object: Trusted<Response>,
body: Vec<u8>,
}
/// RAII fetch canceller object. By default initialized to not having a canceller
/// in it, however you can ask it for a cancellation receiver to send to Fetch
/// in which case it will store the sender. You can manually cancel it
/// or let it cancel on Drop in that case.
#[derive(Default, JSTraceable, MallocSizeOf)]
pub struct FetchCanceller {
#[ignore_malloc_size_of = "channels are hard"]
cancel_chan: Option<ipc::IpcSender<()>>
}
impl FetchCanceller {
/// Create an empty FetchCanceller
pub fn new() -> Self {
Default::default()
}
/// Obtain an IpcReceiver to send over to Fetch, and initialize
/// the internal sender
pub fn initialize(&mut self) -> ipc::IpcReceiver<()> {
// cancel previous fetch
self.cancel();
let (rx, tx) = ipc::channel().unwrap();
self.cancel_chan = Some(rx);
tx
}
/// Cancel a fetch if it is ongoing
pub fn cancel(&mut self) {
if let Some(chan) = self.cancel_chan.take() {
// stop trying to make fetch happen
// it's not going to happen
// The receiver will be destroyed if the request has already completed;
// so we throw away the error. Cancellation is a courtesy call,
// we don't actually care if the other side heard.
let _ = chan.send(());
}
}
/// Use this if you don't want it to send a cancellation request
/// on drop (e.g. if the fetch completes)
pub fn ignore(&mut self) {
let _ = self.cancel_chan.take();
}
}
impl Drop for FetchCanceller {
fn drop(&mut self) {
self.cancel()
}
}
fn from_referrer_to_referrer_url(request: &NetTraitsRequest) -> Option<ServoUrl> {
request.referrer.to_url().map(|url| url.clone())
}
fn request_init_from_request(request: NetTraitsRequest) -> NetTraitsRequestInit {
NetTraitsRequestInit {
method: request.method.clone(),
url: request.url(), | mode: request.mode.clone(),
use_cors_preflight: request.use_cors_preflight,
credentials_mode: request.credentials_mode,
use_url_credentials: request.use_url_credentials,
origin: GlobalScope::current().expect("No current global object").origin().immutable().clone(),
referrer_url: from_referrer_to_referrer_url(&request),
referrer_policy: request.referrer_policy,
pipeline_id: request.pipeline_id,
redirect_mode: request.redirect_mode,
cache_mode: request.cache_mode,
..NetTraitsRequestInit::default()
}
}
// https://fetch.spec.whatwg.org/#fetch-method
#[allow(unrooted_must_root)]
pub fn Fetch(global: &GlobalScope, input: RequestInfo, init: RootedTraceableBox<RequestInit>) -> Rc<Promise> {
let core_resource_thread = global.core_resource_thread();
// Step 1
let promise = Promise::new(global);
let response = Response::new(global);
// Step 2
let request = match Request::Constructor(global, input, init) {
Err(e) => {
promise.reject_error(e);
return promise;
},
Ok(r) => r.get_request(),
};
let mut request_init = request_init_from_request(request);
// Step 3
if global.downcast::<ServiceWorkerGlobalScope>().is_some() {
request_init.service_workers_mode = ServiceWorkersMode::Foreign;
}
// Step 4
response.Headers().set_guard(Guard::Immutable);
// Step 5
let (action_sender, action_receiver) = ipc::channel().unwrap();
let fetch_context = Arc::new(Mutex::new(FetchContext {
fetch_promise: Some(TrustedPromise::new(promise.clone())),
response_object: Trusted::new(&*response),
body: vec![],
}));
let listener = NetworkListener {
context: fetch_context,
task_source: global.networking_task_source(),
canceller: Some(global.task_canceller(TaskSourceName::Networking))
};
ROUTER.add_route(action_receiver.to_opaque(), Box::new(move |message| {
listener.notify_fetch(message.to().unwrap());
}));
core_resource_thread.send(
NetTraitsFetch(request_init, FetchChannels::ResponseMsg(action_sender, None))).unwrap();
promise
}
impl PreInvoke for FetchContext {}
impl FetchResponseListener for FetchContext {
fn process_request_body(&mut self) {
// TODO
}
fn process_request_eof(&mut self) {
// TODO
}
#[allow(unrooted_must_root)]
fn process_response(&mut self, fetch_metadata: Result<FetchMetadata, NetworkError>) {
let promise = self.fetch_promise.take().expect("fetch promise is missing").root();
// JSAutoCompartment needs to be manually made.
// Otherwise, Servo will crash.
let promise_cx = promise.global().get_cx();
let _ac = JSAutoCompartment::new(promise_cx, promise.reflector().get_jsobject().get());
match fetch_metadata {
// Step 4.1
Err(_) => {
promise.reject_error(Error::Type("Network error occurred".to_string()));
self.fetch_promise = Some(TrustedPromise::new(promise));
self.response_object.root().set_type(DOMResponseType::Error);
return;
},
// Step 4.2
Ok(metadata) => {
match metadata {
FetchMetadata::Unfiltered(m) => {
fill_headers_with_metadata(self.response_object.root(), m);
self.response_object.root().set_type(DOMResponseType::Default);
},
FetchMetadata::Filtered { filtered, .. } => match filtered {
FilteredMetadata::Basic(m) => {
fill_headers_with_metadata(self.response_object.root(), m);
self.response_object.root().set_type(DOMResponseType::Basic);
},
FilteredMetadata::Cors(m) => {
fill_headers_with_metadata(self.response_object.root(), m);
self.response_object.root().set_type(DOMResponseType::Cors);
},
FilteredMetadata::Opaque =>
self.response_object.root().set_type(DOMResponseType::Opaque),
FilteredMetadata::OpaqueRedirect =>
self.response_object.root().set_type(DOMResponseType::Opaqueredirect)
}
}
}
}
// Step 4.3
promise.resolve_native(&self.response_object.root());
self.fetch_promise = Some(TrustedPromise::new(promise));
}
fn process_response_chunk(&mut self, mut chunk: Vec<u8>) {
self.body.append(&mut chunk);
}
fn process_response_eof(&mut self, _response: Result<(), NetworkError>) {
let response = self.response_object.root();
let global = response.global();
let cx = global.get_cx();
let _ac = JSAutoCompartment::new(cx, global.reflector().get_jsobject().get());
response.finish(mem::replace(&mut self.body, vec![]));
// TODO
// ... trailerObject is not supported in Servo yet.
}
}
fn fill_headers_with_metadata(r: DomRoot<Response>, m: Metadata) {
r.set_headers(m.headers);
r.set_raw_status(m.status);
r.set_final_url(m.final_url);
| headers: request.headers.clone(),
unsafe_request: request.unsafe_request,
body: request.body.clone(),
destination: request.destination,
synchronous: request.synchronous, | random_line_split |
fetch.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::RequestBinding::RequestInfo;
use dom::bindings::codegen::Bindings::RequestBinding::RequestInit;
use dom::bindings::codegen::Bindings::ResponseBinding::ResponseBinding::ResponseMethods;
use dom::bindings::codegen::Bindings::ResponseBinding::ResponseType as DOMResponseType;
use dom::bindings::error::Error;
use dom::bindings::inheritance::Castable;
use dom::bindings::refcounted::{Trusted, TrustedPromise};
use dom::bindings::reflector::DomObject;
use dom::bindings::root::DomRoot;
use dom::bindings::trace::RootedTraceableBox;
use dom::globalscope::GlobalScope;
use dom::headers::Guard;
use dom::promise::Promise;
use dom::request::Request;
use dom::response::Response;
use dom::serviceworkerglobalscope::ServiceWorkerGlobalScope;
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use js::jsapi::JSAutoCompartment;
use net_traits::{FetchChannels, FetchResponseListener, NetworkError};
use net_traits::{FilteredMetadata, FetchMetadata, Metadata};
use net_traits::CoreResourceMsg::Fetch as NetTraitsFetch;
use net_traits::request::{Request as NetTraitsRequest, ServiceWorkersMode};
use net_traits::request::RequestInit as NetTraitsRequestInit;
use network_listener::{NetworkListener, PreInvoke};
use servo_url::ServoUrl;
use std::mem;
use std::rc::Rc;
use std::sync::{Arc, Mutex};
use task_source::TaskSourceName;
struct FetchContext {
fetch_promise: Option<TrustedPromise>,
response_object: Trusted<Response>,
body: Vec<u8>,
}
/// RAII fetch canceller object. By default initialized to not having a canceller
/// in it, however you can ask it for a cancellation receiver to send to Fetch
/// in which case it will store the sender. You can manually cancel it
/// or let it cancel on Drop in that case.
#[derive(Default, JSTraceable, MallocSizeOf)]
pub struct FetchCanceller {
#[ignore_malloc_size_of = "channels are hard"]
cancel_chan: Option<ipc::IpcSender<()>>
}
impl FetchCanceller {
/// Create an empty FetchCanceller
pub fn new() -> Self {
Default::default()
}
/// Obtain an IpcReceiver to send over to Fetch, and initialize
/// the internal sender
pub fn initialize(&mut self) -> ipc::IpcReceiver<()> {
// cancel previous fetch
self.cancel();
let (rx, tx) = ipc::channel().unwrap();
self.cancel_chan = Some(rx);
tx
}
/// Cancel a fetch if it is ongoing
pub fn cancel(&mut self) {
if let Some(chan) = self.cancel_chan.take() |
}
/// Use this if you don't want it to send a cancellation request
/// on drop (e.g. if the fetch completes)
pub fn ignore(&mut self) {
let _ = self.cancel_chan.take();
}
}
impl Drop for FetchCanceller {
fn drop(&mut self) {
self.cancel()
}
}
fn from_referrer_to_referrer_url(request: &NetTraitsRequest) -> Option<ServoUrl> {
request.referrer.to_url().map(|url| url.clone())
}
fn request_init_from_request(request: NetTraitsRequest) -> NetTraitsRequestInit {
NetTraitsRequestInit {
method: request.method.clone(),
url: request.url(),
headers: request.headers.clone(),
unsafe_request: request.unsafe_request,
body: request.body.clone(),
destination: request.destination,
synchronous: request.synchronous,
mode: request.mode.clone(),
use_cors_preflight: request.use_cors_preflight,
credentials_mode: request.credentials_mode,
use_url_credentials: request.use_url_credentials,
origin: GlobalScope::current().expect("No current global object").origin().immutable().clone(),
referrer_url: from_referrer_to_referrer_url(&request),
referrer_policy: request.referrer_policy,
pipeline_id: request.pipeline_id,
redirect_mode: request.redirect_mode,
cache_mode: request.cache_mode,
..NetTraitsRequestInit::default()
}
}
// https://fetch.spec.whatwg.org/#fetch-method
#[allow(unrooted_must_root)]
pub fn Fetch(global: &GlobalScope, input: RequestInfo, init: RootedTraceableBox<RequestInit>) -> Rc<Promise> {
let core_resource_thread = global.core_resource_thread();
// Step 1
let promise = Promise::new(global);
let response = Response::new(global);
// Step 2
let request = match Request::Constructor(global, input, init) {
Err(e) => {
promise.reject_error(e);
return promise;
},
Ok(r) => r.get_request(),
};
let mut request_init = request_init_from_request(request);
// Step 3
if global.downcast::<ServiceWorkerGlobalScope>().is_some() {
request_init.service_workers_mode = ServiceWorkersMode::Foreign;
}
// Step 4
response.Headers().set_guard(Guard::Immutable);
// Step 5
let (action_sender, action_receiver) = ipc::channel().unwrap();
let fetch_context = Arc::new(Mutex::new(FetchContext {
fetch_promise: Some(TrustedPromise::new(promise.clone())),
response_object: Trusted::new(&*response),
body: vec![],
}));
let listener = NetworkListener {
context: fetch_context,
task_source: global.networking_task_source(),
canceller: Some(global.task_canceller(TaskSourceName::Networking))
};
ROUTER.add_route(action_receiver.to_opaque(), Box::new(move |message| {
listener.notify_fetch(message.to().unwrap());
}));
core_resource_thread.send(
NetTraitsFetch(request_init, FetchChannels::ResponseMsg(action_sender, None))).unwrap();
promise
}
impl PreInvoke for FetchContext {}
impl FetchResponseListener for FetchContext {
fn process_request_body(&mut self) {
// TODO
}
fn process_request_eof(&mut self) {
// TODO
}
#[allow(unrooted_must_root)]
fn process_response(&mut self, fetch_metadata: Result<FetchMetadata, NetworkError>) {
let promise = self.fetch_promise.take().expect("fetch promise is missing").root();
// JSAutoCompartment needs to be manually made.
// Otherwise, Servo will crash.
let promise_cx = promise.global().get_cx();
let _ac = JSAutoCompartment::new(promise_cx, promise.reflector().get_jsobject().get());
match fetch_metadata {
// Step 4.1
Err(_) => {
promise.reject_error(Error::Type("Network error occurred".to_string()));
self.fetch_promise = Some(TrustedPromise::new(promise));
self.response_object.root().set_type(DOMResponseType::Error);
return;
},
// Step 4.2
Ok(metadata) => {
match metadata {
FetchMetadata::Unfiltered(m) => {
fill_headers_with_metadata(self.response_object.root(), m);
self.response_object.root().set_type(DOMResponseType::Default);
},
FetchMetadata::Filtered { filtered, .. } => match filtered {
FilteredMetadata::Basic(m) => {
fill_headers_with_metadata(self.response_object.root(), m);
self.response_object.root().set_type(DOMResponseType::Basic);
},
FilteredMetadata::Cors(m) => {
fill_headers_with_metadata(self.response_object.root(), m);
self.response_object.root().set_type(DOMResponseType::Cors);
},
FilteredMetadata::Opaque =>
self.response_object.root().set_type(DOMResponseType::Opaque),
FilteredMetadata::OpaqueRedirect =>
self.response_object.root().set_type(DOMResponseType::Opaqueredirect)
}
}
}
}
// Step 4.3
promise.resolve_native(&self.response_object.root());
self.fetch_promise = Some(TrustedPromise::new(promise));
}
fn process_response_chunk(&mut self, mut chunk: Vec<u8>) {
self.body.append(&mut chunk);
}
fn process_response_eof(&mut self, _response: Result<(), NetworkError>) {
let response = self.response_object.root();
let global = response.global();
let cx = global.get_cx();
let _ac = JSAutoCompartment::new(cx, global.reflector().get_jsobject().get());
response.finish(mem::replace(&mut self.body, vec![]));
// TODO
// ... trailerObject is not supported in Servo yet.
}
}
fn fill_headers_with_metadata(r: DomRoot<Response>, m: Metadata) {
r.set_headers(m.headers);
r.set_raw_status(m.status);
r.set_final_url(m.final | {
// stop trying to make fetch happen
// it's not going to happen
// The receiver will be destroyed if the request has already completed;
// so we throw away the error. Cancellation is a courtesy call,
// we don't actually care if the other side heard.
let _ = chan.send(());
} | conditional_block |
nb.js | /*! Select2 4.0.0 | https://github.com/select2/select2/blob/master/LICENSE.md */
(function () {
if (jQuery && jQuery.fn && jQuery.fn.select2 && jQuery.fn.select2.amd)var e = jQuery.fn.select2.amd;
return e.define("select2/i18n/nb", [], function () {
return {
inputTooLong: function (e) {
var t = e.input.length - e.maximum;
return "Vennligst fjern " + t + " tegn"
}, inputTooShort: function (e) {
var t = e.minimum - e.input.length, n = "Vennligst skriv inn ";
return t > 1 ? n += " flere tegn" : n += " tegn til", n
}, loadingMore: function () {
return "Laster flere resultater…"
}, maximumSelected: function (e) {
return "Du kan velge maks " + e.maximum + " elementer"
}, noResults: function () {
return "Ingen treff"
}, searching: function () {
return "Søker…"
}
} | }), {define: e.define, require: e.require}
})(); | random_line_split |
|
path.ts | import { each } from '@antv/util';
import MaskBase from './base';
/**
* @ignore
* 多个点构成的 Path 辅助框 Action
*/
class PathMask extends MaskBase {
// 生成 mask 的路径
protected getMaskPath() {
const points = this.points;
const path = [];
if (points.length) {
each(points, (point, index) => {
if (index === 0) {
path.push(['M', point.x, point.y]);
} else {
path.push(['L', | 'L', points[0].x, points[0].y]);
}
return path;
}
protected getMaskAttrs() {
return {
path: this.getMaskPath(),
};
}
/**
* 添加一个点
*/
public addPoint() {
this.resize();
}
}
export default PathMask;
| point.x, point.y]);
}
});
path.push([ | conditional_block |
path.ts | import { each } from '@antv/util';
import MaskBase from './base'; | class PathMask extends MaskBase {
// 生成 mask 的路径
protected getMaskPath() {
const points = this.points;
const path = [];
if (points.length) {
each(points, (point, index) => {
if (index === 0) {
path.push(['M', point.x, point.y]);
} else {
path.push(['L', point.x, point.y]);
}
});
path.push(['L', points[0].x, points[0].y]);
}
return path;
}
protected getMaskAttrs() {
return {
path: this.getMaskPath(),
};
}
/**
* 添加一个点
*/
public addPoint() {
this.resize();
}
}
export default PathMask; |
/**
* @ignore
* 多个点构成的 Path 辅助框 Action
*/ | random_line_split |
path.ts | import { each } from '@antv/util';
import MaskBase from './base';
/**
* @ignore
* 多个点构成的 Path 辅助框 Action
*/
class PathMask extends MaskBase {
// 生成 mask 的路径
protected getMaskPath() {
const points = this.po | {
return {
path: this.getMaskPath(),
};
}
/**
* 添加一个点
*/
public addPoint() {
this.resize();
}
}
export default PathMask;
| ints;
const path = [];
if (points.length) {
each(points, (point, index) => {
if (index === 0) {
path.push(['M', point.x, point.y]);
} else {
path.push(['L', point.x, point.y]);
}
});
path.push(['L', points[0].x, points[0].y]);
}
return path;
}
protected getMaskAttrs() | identifier_body |
path.ts | import { each } from '@antv/util';
import MaskBase from './base';
/**
* @ignore
* 多个点构成的 Path 辅助框 Action
*/
class PathMask extends MaskBase {
// 生成 mask 的路径
protected getMaskPath() {
const points = this.points;
const path = [];
if (points.length) {
each(points, (point, index) => {
if (index === 0) {
path.push(['M', point.x, point.y]);
} else {
path.push(['L', point.x, point.y]);
}
});
path.push(['L', points[0].x, points[0].y]);
}
return path;
}
protected getMaskAttrs() {
return | : this.getMaskPath(),
};
}
/**
* 添加一个点
*/
public addPoint() {
this.resize();
}
}
export default PathMask;
| {
path | identifier_name |
passport-local-mongoose-tests.ts | /// <reference types="express" />
/**
* Created by Linus Brolin <https://github.com/linusbrolin/>.
*/
import {
Schema,
model,
Document,
PassportLocalDocument,
PassportLocalSchema,
PassportLocalModel,
PassportLocalOptions, | import * as passport from 'passport';
import { Strategy as LocalStrategy } from 'passport-local';
//#region Test Models
interface User extends PassportLocalDocument {
_id: string;
username: string;
hash: string;
salt: string;
attempts: number;
last: Date;
}
const UserSchema = new Schema({
username: String,
hash: String,
salt: String,
attempts: Number,
last: Date
}) as PassportLocalSchema;
let options: PassportLocalOptions = <PassportLocalOptions>{};
options.iterations = 25000;
options.keylen = 512;
options.digestAlgorithm = 'sha256';
options.interval = 100;
options.usernameField = 'username';
options.usernameUnique = true;
options.usernameLowerCase = true;
options.hashField = 'hash';
options.saltField = 'salt';
options.saltlen = 32;
options.attemptsField = 'attempts';
options.lastLoginField = 'last';
options.selectFields = 'undefined';
options.populateFields = 'undefined';
options.encoding = 'hex';
options.limitAttempts = false;
options.maxAttempts = Infinity;
options.passwordValidator = function(password: string, cb: (err: any) => void): void {};
options.usernameQueryFields = [];
let errorMessages: PassportLocalErrorMessages = {};
errorMessages.MissingPasswordError = 'No password was given';
errorMessages.AttemptTooSoonError = 'Account is currently locked. Try again later';
errorMessages.TooManyAttemptsError = 'Account locked due to too many failed login attempts';
errorMessages.NoSaltValueStoredError = 'Authentication not possible. No salt value stored';
errorMessages.IncorrectPasswordError = 'Password or username are incorrect';
errorMessages.IncorrectUsernameError = 'Password or username are incorrect';
errorMessages.MissingUsernameError = 'No username was given';
errorMessages.UserExistsError = 'A user with the given username is already registered';
options.errorMessages = errorMessages;
UserSchema.plugin(passportLocalMongoose, options);
interface UserModel<T extends Document> extends PassportLocalModel<T> {}
let UserModel: UserModel<User> = model<User>('User', UserSchema);
//#endregion
//#region Test Passport/Passport-Local
passport.use(UserModel.createStrategy());
passport.use('login', new LocalStrategy({
passReqToCallback: true,
usernameField: 'username',
passwordField: 'password'
},
(req: any, username: string, password: string, done: (err: any, res: any, msg?: any) => void) => {
process.nextTick(() => {
UserModel
.findOne({ 'username': username })
.exec((err: any, user: User) => {
if (err) {
console.log(err);
return done(err, null);
}
if (!user) {
console.log(errorMessages.IncorrectUsernameError);
return done(null, false, errorMessages.IncorrectUsernameError);
}
user.authenticate(password, function(autherr: any, authuser: User, autherrmsg: any) {
if (autherr) {
console.log(autherr);
return done(autherr, null);
}
if (!authuser) {
console.log(errorMessages.IncorrectPasswordError);
return done(null, false, errorMessages.IncorrectPasswordError);
}
return done(null, authuser);
});
});
});
})
);
passport.serializeUser(UserModel.serializeUser());
passport.deserializeUser(UserModel.deserializeUser());
let router: Router = Router();
router.post('/login', passport.authenticate('local'), function(req: Request, res: Response) {
res.redirect('/');
});
//#endregion | PassportLocalErrorMessages
} from 'mongoose';
import * as passportLocalMongoose from 'passport-local-mongoose';
import { Router, Request, Response } from 'express'; | random_line_split |
passport-local-mongoose-tests.ts | /// <reference types="express" />
/**
* Created by Linus Brolin <https://github.com/linusbrolin/>.
*/
import {
Schema,
model,
Document,
PassportLocalDocument,
PassportLocalSchema,
PassportLocalModel,
PassportLocalOptions,
PassportLocalErrorMessages
} from 'mongoose';
import * as passportLocalMongoose from 'passport-local-mongoose';
import { Router, Request, Response } from 'express';
import * as passport from 'passport';
import { Strategy as LocalStrategy } from 'passport-local';
//#region Test Models
interface User extends PassportLocalDocument {
_id: string;
username: string;
hash: string;
salt: string;
attempts: number;
last: Date;
}
const UserSchema = new Schema({
username: String,
hash: String,
salt: String,
attempts: Number,
last: Date
}) as PassportLocalSchema;
let options: PassportLocalOptions = <PassportLocalOptions>{};
options.iterations = 25000;
options.keylen = 512;
options.digestAlgorithm = 'sha256';
options.interval = 100;
options.usernameField = 'username';
options.usernameUnique = true;
options.usernameLowerCase = true;
options.hashField = 'hash';
options.saltField = 'salt';
options.saltlen = 32;
options.attemptsField = 'attempts';
options.lastLoginField = 'last';
options.selectFields = 'undefined';
options.populateFields = 'undefined';
options.encoding = 'hex';
options.limitAttempts = false;
options.maxAttempts = Infinity;
options.passwordValidator = function(password: string, cb: (err: any) => void): void {};
options.usernameQueryFields = [];
let errorMessages: PassportLocalErrorMessages = {};
errorMessages.MissingPasswordError = 'No password was given';
errorMessages.AttemptTooSoonError = 'Account is currently locked. Try again later';
errorMessages.TooManyAttemptsError = 'Account locked due to too many failed login attempts';
errorMessages.NoSaltValueStoredError = 'Authentication not possible. No salt value stored';
errorMessages.IncorrectPasswordError = 'Password or username are incorrect';
errorMessages.IncorrectUsernameError = 'Password or username are incorrect';
errorMessages.MissingUsernameError = 'No username was given';
errorMessages.UserExistsError = 'A user with the given username is already registered';
options.errorMessages = errorMessages;
UserSchema.plugin(passportLocalMongoose, options);
interface UserModel<T extends Document> extends PassportLocalModel<T> {}
let UserModel: UserModel<User> = model<User>('User', UserSchema);
//#endregion
//#region Test Passport/Passport-Local
passport.use(UserModel.createStrategy());
passport.use('login', new LocalStrategy({
passReqToCallback: true,
usernameField: 'username',
passwordField: 'password'
},
(req: any, username: string, password: string, done: (err: any, res: any, msg?: any) => void) => {
process.nextTick(() => {
UserModel
.findOne({ 'username': username })
.exec((err: any, user: User) => {
if (err) {
console.log(err);
return done(err, null);
}
if (!user) {
console.log(errorMessages.IncorrectUsernameError);
return done(null, false, errorMessages.IncorrectUsernameError);
}
user.authenticate(password, function(autherr: any, authuser: User, autherrmsg: any) {
if (autherr) {
console.log(autherr);
return done(autherr, null);
}
if (!authuser) |
return done(null, authuser);
});
});
});
})
);
passport.serializeUser(UserModel.serializeUser());
passport.deserializeUser(UserModel.deserializeUser());
let router: Router = Router();
router.post('/login', passport.authenticate('local'), function(req: Request, res: Response) {
res.redirect('/');
});
//#endregion
| {
console.log(errorMessages.IncorrectPasswordError);
return done(null, false, errorMessages.IncorrectPasswordError);
} | conditional_block |
autoregressive_layers.py | import numpy as np
import six
import tensorflow as tf
from tensorflow_probability.python.bijectors.masked_autoregressive import (
AutoregressiveNetwork, _create_degrees, _create_input_order,
_make_dense_autoregressive_masks, _make_masked_constraint,
_make_masked_initializer)
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import tensorshape_util
__all__ = ['AutoregressiveDense']
class AutoregressiveDense(AutoregressiveNetwork):
""" Masked autoregressive network - a generalized version of MADE.
MADE is autoencoder which require equality in the number of dimensions
between input and output.
MAN enables these numbers to be different.
"""
def build(self, input_shape):
"""See tfkl.Layer.build."""
assert self._event_shape is not None, \
'Unlike MADE, MAN require specified event_shape at __init__'
# `event_shape` wasn't specied at __init__, so infer from `input_shape`.
self._input_size = input_shape[-1]
# Construct the masks.
self._input_order = _create_input_order(
self._input_size,
self._input_order_param,
)
units = [] if self._hidden_units is None else list(self._hidden_units)
units.append(self._event_size)
masks = _make_dense_autoregressive_masks(
params=self._params,
event_size=self._input_size,
hidden_units=units,
input_order=self._input_order,
hidden_degrees=self._hidden_degrees,
)
masks = masks[:-1]
masks[-1] = np.reshape(
np.tile(masks[-1][..., tf.newaxis], [1, 1, self._params]),
[masks[-1].shape[0], self._event_size * self._params])
self._masks = masks
# create placeholder for ouput
inputs = tf.keras.Input((self._input_size,), dtype=self.dtype)
outputs = [inputs]
if self._conditional:
conditional_input = tf.keras.Input((self._conditional_size,),
dtype=self.dtype)
inputs = [inputs, conditional_input]
# Input-to-hidden, hidden-to-hidden, and hidden-to-output layers:
# [..., self._event_size] -> [..., self._hidden_units[0]].
# [..., self._hidden_units[k-1]] -> [..., self._hidden_units[k]].
# [..., self._hidden_units[-1]] -> [..., event_size * self._params].
layer_output_sizes = list(
self._hidden_units) + [self._event_size * self._params]
for k in range(len(self._masks)):
autoregressive_output = tf.keras.layers.Dense(
layer_output_sizes[k],
activation=None,
use_bias=self._use_bias,
kernel_initializer=_make_masked_initializer(self._masks[k],
self._kernel_initializer),
bias_initializer=self._bias_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer,
kernel_constraint=_make_masked_constraint(self._masks[k],
self._kernel_constraint),
bias_constraint=self._bias_constraint,
dtype=self.dtype)(outputs[-1])
if (self._conditional and
((self._conditional_layers == 'all_layers') or
((self._conditional_layers == 'first_layer') and (k == 0)))):
conditional_output = tf.keras.layers.Dense(
layer_output_sizes[k],
activation=None,
use_bias=False,
kernel_initializer=self._kernel_initializer,
bias_initializer=None,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=None,
kernel_constraint=self._kernel_constraint,
bias_constraint=None,
dtype=self.dtype)(conditional_input)
outputs.append(
tf.keras.layers.Add()([autoregressive_output, conditional_output]))
else:
outputs.append(autoregressive_output)
# last hidden layer, activation | if k + 1 < len(self._masks):
outputs.append(
tf.keras.layers.Activation(self._activation)(outputs[-1]))
self._network = tf.keras.models.Model(inputs=inputs, outputs=outputs[-1])
# Allow network to be called with inputs of shapes that don't match
# the specs of the network's input layers.
self._network.input_spec = None
# Record that the layer has been built.
super(AutoregressiveNetwork, self).build(input_shape)
def call(self, x, conditional_input=None):
"""Transforms the inputs and returns the outputs.
Suppose `x` has shape `batch_shape + event_shape` and `conditional_input`
has shape `conditional_batch_shape + conditional_event_shape`. Then, the
output shape is:
`broadcast(batch_shape, conditional_batch_shape) + event_shape + [params]`.
Also see `tfkl.Layer.call` for some generic discussion about Layer calling.
Args:
x: A `Tensor`. Primary input to the layer.
conditional_input: A `Tensor. Conditional input to the layer. This is
required iff the layer is conditional.
Returns:
y: A `Tensor`. The output of the layer. Note that the leading dimensions
follow broadcasting rules described above.
"""
with tf.name_scope(self.name or 'MaskedAutoregressiveNetwork_call'):
x = tf.convert_to_tensor(x, dtype=self.dtype, name='x')
input_shape = ps.shape(x)
if tensorshape_util.rank(x.shape) == 1:
x = x[tf.newaxis, ...]
if self._conditional:
if conditional_input is None:
raise ValueError('`conditional_input` must be passed as a named '
'argument')
conditional_input = tf.convert_to_tensor(conditional_input,
dtype=self.dtype,
name='conditional_input')
conditional_batch_shape = ps.shape(conditional_input)[:-1]
if tensorshape_util.rank(conditional_input.shape) == 1:
conditional_input = conditional_input[tf.newaxis, ...]
x = [x, conditional_input]
output_shape = ps.concat([
ps.broadcast_shape(conditional_batch_shape, input_shape[:-1]),
(self._event_size,)
],
axis=0)
else:
output_shape = ps.concat([input_shape[:-1], (self._event_size,)],
axis=0)
return tf.reshape(self._network(x),
tf.concat([output_shape, [self._params]], axis=0)) | random_line_split |
|
autoregressive_layers.py | import numpy as np
import six
import tensorflow as tf
from tensorflow_probability.python.bijectors.masked_autoregressive import (
AutoregressiveNetwork, _create_degrees, _create_input_order,
_make_dense_autoregressive_masks, _make_masked_constraint,
_make_masked_initializer)
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import tensorshape_util
__all__ = ['AutoregressiveDense']
class AutoregressiveDense(AutoregressiveNetwork):
""" Masked autoregressive network - a generalized version of MADE.
MADE is autoencoder which require equality in the number of dimensions
between input and output.
MAN enables these numbers to be different.
"""
def build(self, input_shape):
"""See tfkl.Layer.build."""
assert self._event_shape is not None, \
'Unlike MADE, MAN require specified event_shape at __init__'
# `event_shape` wasn't specied at __init__, so infer from `input_shape`.
self._input_size = input_shape[-1]
# Construct the masks.
self._input_order = _create_input_order(
self._input_size,
self._input_order_param,
)
units = [] if self._hidden_units is None else list(self._hidden_units)
units.append(self._event_size)
masks = _make_dense_autoregressive_masks(
params=self._params,
event_size=self._input_size,
hidden_units=units,
input_order=self._input_order,
hidden_degrees=self._hidden_degrees,
)
masks = masks[:-1]
masks[-1] = np.reshape(
np.tile(masks[-1][..., tf.newaxis], [1, 1, self._params]),
[masks[-1].shape[0], self._event_size * self._params])
self._masks = masks
# create placeholder for ouput
inputs = tf.keras.Input((self._input_size,), dtype=self.dtype)
outputs = [inputs]
if self._conditional:
conditional_input = tf.keras.Input((self._conditional_size,),
dtype=self.dtype)
inputs = [inputs, conditional_input]
# Input-to-hidden, hidden-to-hidden, and hidden-to-output layers:
# [..., self._event_size] -> [..., self._hidden_units[0]].
# [..., self._hidden_units[k-1]] -> [..., self._hidden_units[k]].
# [..., self._hidden_units[-1]] -> [..., event_size * self._params].
layer_output_sizes = list(
self._hidden_units) + [self._event_size * self._params]
for k in range(len(self._masks)):
autoregressive_output = tf.keras.layers.Dense(
layer_output_sizes[k],
activation=None,
use_bias=self._use_bias,
kernel_initializer=_make_masked_initializer(self._masks[k],
self._kernel_initializer),
bias_initializer=self._bias_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer,
kernel_constraint=_make_masked_constraint(self._masks[k],
self._kernel_constraint),
bias_constraint=self._bias_constraint,
dtype=self.dtype)(outputs[-1])
if (self._conditional and
((self._conditional_layers == 'all_layers') or
((self._conditional_layers == 'first_layer') and (k == 0)))):
conditional_output = tf.keras.layers.Dense(
layer_output_sizes[k],
activation=None,
use_bias=False,
kernel_initializer=self._kernel_initializer,
bias_initializer=None,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=None,
kernel_constraint=self._kernel_constraint,
bias_constraint=None,
dtype=self.dtype)(conditional_input)
outputs.append(
tf.keras.layers.Add()([autoregressive_output, conditional_output]))
else:
outputs.append(autoregressive_output)
# last hidden layer, activation
if k + 1 < len(self._masks):
outputs.append(
tf.keras.layers.Activation(self._activation)(outputs[-1]))
self._network = tf.keras.models.Model(inputs=inputs, outputs=outputs[-1])
# Allow network to be called with inputs of shapes that don't match
# the specs of the network's input layers.
self._network.input_spec = None
# Record that the layer has been built.
super(AutoregressiveNetwork, self).build(input_shape)
def call(self, x, conditional_input=None):
"""Transforms the inputs and returns the outputs.
Suppose `x` has shape `batch_shape + event_shape` and `conditional_input`
has shape `conditional_batch_shape + conditional_event_shape`. Then, the
output shape is:
`broadcast(batch_shape, conditional_batch_shape) + event_shape + [params]`.
Also see `tfkl.Layer.call` for some generic discussion about Layer calling.
Args:
x: A `Tensor`. Primary input to the layer.
conditional_input: A `Tensor. Conditional input to the layer. This is
required iff the layer is conditional.
Returns:
y: A `Tensor`. The output of the layer. Note that the leading dimensions
follow broadcasting rules described above.
"""
with tf.name_scope(self.name or 'MaskedAutoregressiveNetwork_call'):
x = tf.convert_to_tensor(x, dtype=self.dtype, name='x')
input_shape = ps.shape(x)
if tensorshape_util.rank(x.shape) == 1:
x = x[tf.newaxis, ...]
if self._conditional:
|
else:
output_shape = ps.concat([input_shape[:-1], (self._event_size,)],
axis=0)
return tf.reshape(self._network(x),
tf.concat([output_shape, [self._params]], axis=0))
| if conditional_input is None:
raise ValueError('`conditional_input` must be passed as a named '
'argument')
conditional_input = tf.convert_to_tensor(conditional_input,
dtype=self.dtype,
name='conditional_input')
conditional_batch_shape = ps.shape(conditional_input)[:-1]
if tensorshape_util.rank(conditional_input.shape) == 1:
conditional_input = conditional_input[tf.newaxis, ...]
x = [x, conditional_input]
output_shape = ps.concat([
ps.broadcast_shape(conditional_batch_shape, input_shape[:-1]),
(self._event_size,)
],
axis=0) | conditional_block |
autoregressive_layers.py | import numpy as np
import six
import tensorflow as tf
from tensorflow_probability.python.bijectors.masked_autoregressive import (
AutoregressiveNetwork, _create_degrees, _create_input_order,
_make_dense_autoregressive_masks, _make_masked_constraint,
_make_masked_initializer)
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import tensorshape_util
__all__ = ['AutoregressiveDense']
class | (AutoregressiveNetwork):
""" Masked autoregressive network - a generalized version of MADE.
MADE is autoencoder which require equality in the number of dimensions
between input and output.
MAN enables these numbers to be different.
"""
def build(self, input_shape):
"""See tfkl.Layer.build."""
assert self._event_shape is not None, \
'Unlike MADE, MAN require specified event_shape at __init__'
# `event_shape` wasn't specied at __init__, so infer from `input_shape`.
self._input_size = input_shape[-1]
# Construct the masks.
self._input_order = _create_input_order(
self._input_size,
self._input_order_param,
)
units = [] if self._hidden_units is None else list(self._hidden_units)
units.append(self._event_size)
masks = _make_dense_autoregressive_masks(
params=self._params,
event_size=self._input_size,
hidden_units=units,
input_order=self._input_order,
hidden_degrees=self._hidden_degrees,
)
masks = masks[:-1]
masks[-1] = np.reshape(
np.tile(masks[-1][..., tf.newaxis], [1, 1, self._params]),
[masks[-1].shape[0], self._event_size * self._params])
self._masks = masks
# create placeholder for ouput
inputs = tf.keras.Input((self._input_size,), dtype=self.dtype)
outputs = [inputs]
if self._conditional:
conditional_input = tf.keras.Input((self._conditional_size,),
dtype=self.dtype)
inputs = [inputs, conditional_input]
# Input-to-hidden, hidden-to-hidden, and hidden-to-output layers:
# [..., self._event_size] -> [..., self._hidden_units[0]].
# [..., self._hidden_units[k-1]] -> [..., self._hidden_units[k]].
# [..., self._hidden_units[-1]] -> [..., event_size * self._params].
layer_output_sizes = list(
self._hidden_units) + [self._event_size * self._params]
for k in range(len(self._masks)):
autoregressive_output = tf.keras.layers.Dense(
layer_output_sizes[k],
activation=None,
use_bias=self._use_bias,
kernel_initializer=_make_masked_initializer(self._masks[k],
self._kernel_initializer),
bias_initializer=self._bias_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer,
kernel_constraint=_make_masked_constraint(self._masks[k],
self._kernel_constraint),
bias_constraint=self._bias_constraint,
dtype=self.dtype)(outputs[-1])
if (self._conditional and
((self._conditional_layers == 'all_layers') or
((self._conditional_layers == 'first_layer') and (k == 0)))):
conditional_output = tf.keras.layers.Dense(
layer_output_sizes[k],
activation=None,
use_bias=False,
kernel_initializer=self._kernel_initializer,
bias_initializer=None,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=None,
kernel_constraint=self._kernel_constraint,
bias_constraint=None,
dtype=self.dtype)(conditional_input)
outputs.append(
tf.keras.layers.Add()([autoregressive_output, conditional_output]))
else:
outputs.append(autoregressive_output)
# last hidden layer, activation
if k + 1 < len(self._masks):
outputs.append(
tf.keras.layers.Activation(self._activation)(outputs[-1]))
self._network = tf.keras.models.Model(inputs=inputs, outputs=outputs[-1])
# Allow network to be called with inputs of shapes that don't match
# the specs of the network's input layers.
self._network.input_spec = None
# Record that the layer has been built.
super(AutoregressiveNetwork, self).build(input_shape)
def call(self, x, conditional_input=None):
"""Transforms the inputs and returns the outputs.
Suppose `x` has shape `batch_shape + event_shape` and `conditional_input`
has shape `conditional_batch_shape + conditional_event_shape`. Then, the
output shape is:
`broadcast(batch_shape, conditional_batch_shape) + event_shape + [params]`.
Also see `tfkl.Layer.call` for some generic discussion about Layer calling.
Args:
x: A `Tensor`. Primary input to the layer.
conditional_input: A `Tensor. Conditional input to the layer. This is
required iff the layer is conditional.
Returns:
y: A `Tensor`. The output of the layer. Note that the leading dimensions
follow broadcasting rules described above.
"""
with tf.name_scope(self.name or 'MaskedAutoregressiveNetwork_call'):
x = tf.convert_to_tensor(x, dtype=self.dtype, name='x')
input_shape = ps.shape(x)
if tensorshape_util.rank(x.shape) == 1:
x = x[tf.newaxis, ...]
if self._conditional:
if conditional_input is None:
raise ValueError('`conditional_input` must be passed as a named '
'argument')
conditional_input = tf.convert_to_tensor(conditional_input,
dtype=self.dtype,
name='conditional_input')
conditional_batch_shape = ps.shape(conditional_input)[:-1]
if tensorshape_util.rank(conditional_input.shape) == 1:
conditional_input = conditional_input[tf.newaxis, ...]
x = [x, conditional_input]
output_shape = ps.concat([
ps.broadcast_shape(conditional_batch_shape, input_shape[:-1]),
(self._event_size,)
],
axis=0)
else:
output_shape = ps.concat([input_shape[:-1], (self._event_size,)],
axis=0)
return tf.reshape(self._network(x),
tf.concat([output_shape, [self._params]], axis=0))
| AutoregressiveDense | identifier_name |
autoregressive_layers.py | import numpy as np
import six
import tensorflow as tf
from tensorflow_probability.python.bijectors.masked_autoregressive import (
AutoregressiveNetwork, _create_degrees, _create_input_order,
_make_dense_autoregressive_masks, _make_masked_constraint,
_make_masked_initializer)
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import tensorshape_util
__all__ = ['AutoregressiveDense']
class AutoregressiveDense(AutoregressiveNetwork):
""" Masked autoregressive network - a generalized version of MADE.
MADE is autoencoder which require equality in the number of dimensions
between input and output.
MAN enables these numbers to be different.
"""
def build(self, input_shape):
| masks[-1] = np.reshape(
np.tile(masks[-1][..., tf.newaxis], [1, 1, self._params]),
[masks[-1].shape[0], self._event_size * self._params])
self._masks = masks
# create placeholder for ouput
inputs = tf.keras.Input((self._input_size,), dtype=self.dtype)
outputs = [inputs]
if self._conditional:
conditional_input = tf.keras.Input((self._conditional_size,),
dtype=self.dtype)
inputs = [inputs, conditional_input]
# Input-to-hidden, hidden-to-hidden, and hidden-to-output layers:
# [..., self._event_size] -> [..., self._hidden_units[0]].
# [..., self._hidden_units[k-1]] -> [..., self._hidden_units[k]].
# [..., self._hidden_units[-1]] -> [..., event_size * self._params].
layer_output_sizes = list(
self._hidden_units) + [self._event_size * self._params]
for k in range(len(self._masks)):
autoregressive_output = tf.keras.layers.Dense(
layer_output_sizes[k],
activation=None,
use_bias=self._use_bias,
kernel_initializer=_make_masked_initializer(self._masks[k],
self._kernel_initializer),
bias_initializer=self._bias_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer,
kernel_constraint=_make_masked_constraint(self._masks[k],
self._kernel_constraint),
bias_constraint=self._bias_constraint,
dtype=self.dtype)(outputs[-1])
if (self._conditional and
((self._conditional_layers == 'all_layers') or
((self._conditional_layers == 'first_layer') and (k == 0)))):
conditional_output = tf.keras.layers.Dense(
layer_output_sizes[k],
activation=None,
use_bias=False,
kernel_initializer=self._kernel_initializer,
bias_initializer=None,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=None,
kernel_constraint=self._kernel_constraint,
bias_constraint=None,
dtype=self.dtype)(conditional_input)
outputs.append(
tf.keras.layers.Add()([autoregressive_output, conditional_output]))
else:
outputs.append(autoregressive_output)
# last hidden layer, activation
if k + 1 < len(self._masks):
outputs.append(
tf.keras.layers.Activation(self._activation)(outputs[-1]))
self._network = tf.keras.models.Model(inputs=inputs, outputs=outputs[-1])
# Allow network to be called with inputs of shapes that don't match
# the specs of the network's input layers.
self._network.input_spec = None
# Record that the layer has been built.
super(AutoregressiveNetwork, self).build(input_shape)
def call(self, x, conditional_input=None):
"""Transforms the inputs and returns the outputs.
Suppose `x` has shape `batch_shape + event_shape` and `conditional_input`
has shape `conditional_batch_shape + conditional_event_shape`. Then, the
output shape is:
`broadcast(batch_shape, conditional_batch_shape) + event_shape + [params]`.
Also see `tfkl.Layer.call` for some generic discussion about Layer calling.
Args:
x: A `Tensor`. Primary input to the layer.
conditional_input: A `Tensor. Conditional input to the layer. This is
required iff the layer is conditional.
Returns:
y: A `Tensor`. The output of the layer. Note that the leading dimensions
follow broadcasting rules described above.
"""
with tf.name_scope(self.name or 'MaskedAutoregressiveNetwork_call'):
x = tf.convert_to_tensor(x, dtype=self.dtype, name='x')
input_shape = ps.shape(x)
if tensorshape_util.rank(x.shape) == 1:
x = x[tf.newaxis, ...]
if self._conditional:
if conditional_input is None:
raise ValueError('`conditional_input` must be passed as a named '
'argument')
conditional_input = tf.convert_to_tensor(conditional_input,
dtype=self.dtype,
name='conditional_input')
conditional_batch_shape = ps.shape(conditional_input)[:-1]
if tensorshape_util.rank(conditional_input.shape) == 1:
conditional_input = conditional_input[tf.newaxis, ...]
x = [x, conditional_input]
output_shape = ps.concat([
ps.broadcast_shape(conditional_batch_shape, input_shape[:-1]),
(self._event_size,)
],
axis=0)
else:
output_shape = ps.concat([input_shape[:-1], (self._event_size,)],
axis=0)
return tf.reshape(self._network(x),
tf.concat([output_shape, [self._params]], axis=0))
| """See tfkl.Layer.build."""
assert self._event_shape is not None, \
'Unlike MADE, MAN require specified event_shape at __init__'
# `event_shape` wasn't specied at __init__, so infer from `input_shape`.
self._input_size = input_shape[-1]
# Construct the masks.
self._input_order = _create_input_order(
self._input_size,
self._input_order_param,
)
units = [] if self._hidden_units is None else list(self._hidden_units)
units.append(self._event_size)
masks = _make_dense_autoregressive_masks(
params=self._params,
event_size=self._input_size,
hidden_units=units,
input_order=self._input_order,
hidden_degrees=self._hidden_degrees,
)
masks = masks[:-1] | identifier_body |
zippy.component.ts | import {Component, Input} from '@angular/core';
@Component({
selector: 'zippy',
//templateUrl:'../app/html/zippyComponent.html',
template: `
<div class="zippy">
<div class="zippyHeader" (click)="chevronClick()">
{{title}}
<i class="pull-right glyphicon"
[ngClass]= "{
'glyphicon-chevron-down':!isSelected,
'glyphicon-chevron-up':isSelected
}"
></i>
</div>
<div *ngIf="isSelected" class="zippyBody">
<ng-content></ng-content>
</div>
</div> `,
styles: [``],
styleUrls: ['../../assets/stylesheets/styles.css'] | })
/**
*
*/
export class ZippyComponent{
constructor(){
console.log(this.title + '..........' + this.isSelected);
}
ngOnInit(){
this.isSelected = true ? this.priority == 1 : this.isSelected;
console.log(this.title + '..........' + this.isSelected + '....' + this.priority);
}
isSelected= false;
@Input() title: string;
@Input() priority: Number;
chevronClick(){
this.isSelected = !this.isSelected;
}
} | , providers: []
| random_line_split |
zippy.component.ts | import {Component, Input} from '@angular/core';
@Component({
selector: 'zippy',
//templateUrl:'../app/html/zippyComponent.html',
template: `
<div class="zippy">
<div class="zippyHeader" (click)="chevronClick()">
{{title}}
<i class="pull-right glyphicon"
[ngClass]= "{
'glyphicon-chevron-down':!isSelected,
'glyphicon-chevron-up':isSelected
}"
></i>
</div>
<div *ngIf="isSelected" class="zippyBody">
<ng-content></ng-content>
</div>
</div> `,
styles: [``],
styleUrls: ['../../assets/stylesheets/styles.css']
, providers: []
})
/**
*
*/
export class ZippyComponent{
| (){
console.log(this.title + '..........' + this.isSelected);
}
ngOnInit(){
this.isSelected = true ? this.priority == 1 : this.isSelected;
console.log(this.title + '..........' + this.isSelected + '....' + this.priority);
}
isSelected= false;
@Input() title: string;
@Input() priority: Number;
chevronClick(){
this.isSelected = !this.isSelected;
}
} | constructor | identifier_name |
zippy.component.ts | import {Component, Input} from '@angular/core';
@Component({
selector: 'zippy',
//templateUrl:'../app/html/zippyComponent.html',
template: `
<div class="zippy">
<div class="zippyHeader" (click)="chevronClick()">
{{title}}
<i class="pull-right glyphicon"
[ngClass]= "{
'glyphicon-chevron-down':!isSelected,
'glyphicon-chevron-up':isSelected
}"
></i>
</div>
<div *ngIf="isSelected" class="zippyBody">
<ng-content></ng-content>
</div>
</div> `,
styles: [``],
styleUrls: ['../../assets/stylesheets/styles.css']
, providers: []
})
/**
*
*/
export class ZippyComponent{
constructor() |
ngOnInit(){
this.isSelected = true ? this.priority == 1 : this.isSelected;
console.log(this.title + '..........' + this.isSelected + '....' + this.priority);
}
isSelected= false;
@Input() title: string;
@Input() priority: Number;
chevronClick(){
this.isSelected = !this.isSelected;
}
} | {
console.log(this.title + '..........' + this.isSelected);
} | identifier_body |
query-language.ts | import {customElement, bindable} from 'aurelia-templating';
import {inject} from 'aurelia-dependency-injection';
import {Utils, DomUtils} from 'marvelous-aurelia-core/utils';
import {AureliaUtils} from 'marvelous-aurelia-core/aureliaUtils';
@customElement('m-query-language')
@inject(Element, AureliaUtils)
export class QueryLanguage {
@bindable({ attribute: 'options' }) options: IQueryLanguageOptions;
autoCompletionResult: IAutoCompletionResult;
selectedCompletionIndex: number = 0;
errors: string[] = [];
private _subs = [];
private _queryInputElement: HTMLInputElement;
private _preventFromFocusOut = false;
private _loading: boolean;
private _lastSubmittedQuery: string;
query = '';
constructor(private _element: Element, private _aureliaUtils: AureliaUtils) {
}
| () {
this.validateOptions();
this.createOptions();
this.registerInputHandlers();
}
detached() {
this._subs.forEach(x => x());
this._subs = [];
}
submit() {
if (this._lastSubmittedQuery === this.query) {
// submits only if query has some changes
return;
}
let promise = this.options.onSubmit();
if (!promise || !(promise.then instanceof Function)) {
return;
}
this._lastSubmittedQuery = this.query;
this._loading = true;
promise.then((x) => {
this._loading = false;
if (!x) {
return;
}
// if wrapped with DataSourceResult<T>
// then uses `queryLanguage`
// otherwise result is assumed to be QueryLanguageFilterResult<T>
let result = x.queryLanguage || x;
this.errors = result.errors || [];
}, () => this._loading = false);
}
createOptions() {
let o = this.options;
o.inlineButton = o.inlineButton === undefined ? true : o.inlineButton;
o.inlineButtonText = o.inlineButtonText || 'Apply';
o.submitOnFocusOut = o.submitOnFocusOut === undefined ? false : o.submitOnFocusOut;
o.onSubmit = o.onSubmit || Utils.noop;
}
validateOptions() {
if (!this.options) {
throw new Error('`options` attribute is required.');
}
}
autoComplete() {
let result = this.autoCompletionResult;
let current = result.Completions[this.selectedCompletionIndex];
let newQuery = this.query.substr(0, result.StartPosition);
newQuery += current.Text;
let caretPosition = newQuery.length;
newQuery += this.query.substr(result.StartPosition + result.Length);
this.query = newQuery;
this.hideCompletions();
DomUtils.setCaretPosition(this._queryInputElement, caretPosition);
}
anyCompletion() {
if (!this.autoCompletionResult || !this.autoCompletionResult.Completions) {
return false;
}
return this.autoCompletionResult.Completions.length != 0;
}
hideCompletions() {
this.selectedCompletionIndex = 0;
if (this.autoCompletionResult)
this.autoCompletionResult.Completions = [];
}
select(completion: IAutoCompletionRow) {
this.selectedCompletionIndex = this.autoCompletionResult.Completions.indexOf(completion);
}
selectNext() {
if (this.selectedCompletionIndex == this.autoCompletionResult.Completions.length - 1) {
this.selectedCompletionIndex = 0;
return;
}
this.selectedCompletionIndex++;
}
selectPrevious() {
if (this.selectedCompletionIndex == 0) {
this.selectedCompletionIndex = this.autoCompletionResult.Completions.length - 1;
return;
}
this.selectedCompletionIndex--;
}
refreshCompletions(caretPosition = DomUtils.getCaretPosition(this._queryInputElement)) {
// TODO: debaunce
if (!this.options.autoComplete) {
return;
}
let promise = undefined;
let params = {
query: this.query,
caretPosition: caretPosition,
skip: 0
}
let func = Utils.createReadFunction(this.options.autoComplete, {
allowData: false,
dataMissingError: '`autoComplete` has to be either url or a function.',
shouldReturnUrlOrPromiseError: '`autoComplete` function should return url or promise.'
});
// TODO: race condition! only last one should resolve
func(params).then((x: IAutoCompletionResult) => {
this.selectedCompletionIndex = 0;
this.autoCompletionResult = x;
});
}
onCompletionClick(ev) {
Utils.preventDefaultAndPropagation(ev);
this.autoComplete();
}
private registerInputHandlers() {
let isInputClick = false;
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "keyup", (ev: KeyboardEvent) => {
switch (ev.which) {
case 37: // Left
case 39: // Right
case 36: // Home
case 35: // End
this.refreshCompletions();
break;
case 38: // Up
case 40: // Down
if (!this.anyCompletion()) {
this.refreshCompletions();
}
break;
case 27: // Esc
this.hideCompletions();
break;
case 16: // Shift
case 17: // Ctrl
case 18: // Alt
case 255: // Fn
case 13: // Enter
case 9: // Tab
break;
default:
this.refreshCompletions();
break;
}
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "keydown", (ev: KeyboardEvent) => {
if (!this.anyCompletion()) {
if (ev.which === 13) { // Enter
this.submit();
Utils.preventDefaultAndPropagation(ev);
}
return;
}
switch (ev.which) {
case 38: // Up
this.selectPrevious();
Utils.preventDefaultAndPropagation(ev);
break;
case 40: // Down
this.selectNext();
Utils.preventDefaultAndPropagation(ev);
break;
case 13: // Enter
case 9: // Tab
this.autoComplete();
Utils.preventDefaultAndPropagation(ev);
break;
}
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "mouseup", (ev: KeyboardEvent) => {
this.refreshCompletions();
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "mousedown", (ev: KeyboardEvent) => {
isInputClick = true;
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "focus", (ev) => {
if (!isInputClick && !this._preventFromFocusOut) {
this.refreshCompletions();
}
isInputClick = false;
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "blur", (ev) => {
if (this._preventFromFocusOut) {
Utils.preventDefaultAndPropagation(ev);
return;
}
this.hideCompletions();
isInputClick = false;
if (this.options.submitOnFocusOut) {
this.submit();
}
}));
}
}
export interface IQueryLanguageOptions {
autoComplete?: ((IAutoCompletionParams) => any) | string;
inlineButton?: boolean;
inlineButtonText?: string;
submitOnFocusOut?: boolean;
onSubmit?: () => any;
}
export interface IAutoCompletionParams {
query: string,
caretPosition: number,
skip: number
}
export interface IAutoCompletionResult {
StartPosition: number,
Length: number,
Completions: IAutoCompletionRow[],
IsNextPageAvailable: boolean,
Errors: string[],
HasErrors: boolean
}
export interface IAutoCompletionRow {
Text: string,
Group: string
} | attached | identifier_name |
query-language.ts | import {customElement, bindable} from 'aurelia-templating';
import {inject} from 'aurelia-dependency-injection';
import {Utils, DomUtils} from 'marvelous-aurelia-core/utils';
import {AureliaUtils} from 'marvelous-aurelia-core/aureliaUtils';
@customElement('m-query-language')
@inject(Element, AureliaUtils)
export class QueryLanguage {
@bindable({ attribute: 'options' }) options: IQueryLanguageOptions;
autoCompletionResult: IAutoCompletionResult;
selectedCompletionIndex: number = 0;
errors: string[] = [];
private _subs = [];
private _queryInputElement: HTMLInputElement;
private _preventFromFocusOut = false;
private _loading: boolean;
private _lastSubmittedQuery: string;
query = '';
constructor(private _element: Element, private _aureliaUtils: AureliaUtils) {
}
attached() {
this.validateOptions();
this.createOptions();
this.registerInputHandlers();
}
detached() {
this._subs.forEach(x => x());
this._subs = [];
}
submit() {
if (this._lastSubmittedQuery === this.query) {
// submits only if query has some changes
return;
}
let promise = this.options.onSubmit();
if (!promise || !(promise.then instanceof Function)) {
return;
}
this._lastSubmittedQuery = this.query;
this._loading = true;
promise.then((x) => {
this._loading = false;
if (!x) {
return;
}
// if wrapped with DataSourceResult<T>
// then uses `queryLanguage`
// otherwise result is assumed to be QueryLanguageFilterResult<T>
let result = x.queryLanguage || x;
this.errors = result.errors || [];
}, () => this._loading = false);
}
createOptions() {
let o = this.options;
o.inlineButton = o.inlineButton === undefined ? true : o.inlineButton;
o.inlineButtonText = o.inlineButtonText || 'Apply';
o.submitOnFocusOut = o.submitOnFocusOut === undefined ? false : o.submitOnFocusOut;
o.onSubmit = o.onSubmit || Utils.noop;
}
validateOptions() {
if (!this.options) {
throw new Error('`options` attribute is required.');
}
}
autoComplete() {
let result = this.autoCompletionResult;
let current = result.Completions[this.selectedCompletionIndex];
let newQuery = this.query.substr(0, result.StartPosition);
newQuery += current.Text;
let caretPosition = newQuery.length;
newQuery += this.query.substr(result.StartPosition + result.Length);
this.query = newQuery;
this.hideCompletions();
DomUtils.setCaretPosition(this._queryInputElement, caretPosition);
}
anyCompletion() {
if (!this.autoCompletionResult || !this.autoCompletionResult.Completions) {
return false;
}
return this.autoCompletionResult.Completions.length != 0;
}
hideCompletions() |
select(completion: IAutoCompletionRow) {
this.selectedCompletionIndex = this.autoCompletionResult.Completions.indexOf(completion);
}
selectNext() {
if (this.selectedCompletionIndex == this.autoCompletionResult.Completions.length - 1) {
this.selectedCompletionIndex = 0;
return;
}
this.selectedCompletionIndex++;
}
selectPrevious() {
if (this.selectedCompletionIndex == 0) {
this.selectedCompletionIndex = this.autoCompletionResult.Completions.length - 1;
return;
}
this.selectedCompletionIndex--;
}
refreshCompletions(caretPosition = DomUtils.getCaretPosition(this._queryInputElement)) {
// TODO: debaunce
if (!this.options.autoComplete) {
return;
}
let promise = undefined;
let params = {
query: this.query,
caretPosition: caretPosition,
skip: 0
}
let func = Utils.createReadFunction(this.options.autoComplete, {
allowData: false,
dataMissingError: '`autoComplete` has to be either url or a function.',
shouldReturnUrlOrPromiseError: '`autoComplete` function should return url or promise.'
});
// TODO: race condition! only last one should resolve
func(params).then((x: IAutoCompletionResult) => {
this.selectedCompletionIndex = 0;
this.autoCompletionResult = x;
});
}
onCompletionClick(ev) {
Utils.preventDefaultAndPropagation(ev);
this.autoComplete();
}
private registerInputHandlers() {
let isInputClick = false;
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "keyup", (ev: KeyboardEvent) => {
switch (ev.which) {
case 37: // Left
case 39: // Right
case 36: // Home
case 35: // End
this.refreshCompletions();
break;
case 38: // Up
case 40: // Down
if (!this.anyCompletion()) {
this.refreshCompletions();
}
break;
case 27: // Esc
this.hideCompletions();
break;
case 16: // Shift
case 17: // Ctrl
case 18: // Alt
case 255: // Fn
case 13: // Enter
case 9: // Tab
break;
default:
this.refreshCompletions();
break;
}
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "keydown", (ev: KeyboardEvent) => {
if (!this.anyCompletion()) {
if (ev.which === 13) { // Enter
this.submit();
Utils.preventDefaultAndPropagation(ev);
}
return;
}
switch (ev.which) {
case 38: // Up
this.selectPrevious();
Utils.preventDefaultAndPropagation(ev);
break;
case 40: // Down
this.selectNext();
Utils.preventDefaultAndPropagation(ev);
break;
case 13: // Enter
case 9: // Tab
this.autoComplete();
Utils.preventDefaultAndPropagation(ev);
break;
}
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "mouseup", (ev: KeyboardEvent) => {
this.refreshCompletions();
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "mousedown", (ev: KeyboardEvent) => {
isInputClick = true;
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "focus", (ev) => {
if (!isInputClick && !this._preventFromFocusOut) {
this.refreshCompletions();
}
isInputClick = false;
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "blur", (ev) => {
if (this._preventFromFocusOut) {
Utils.preventDefaultAndPropagation(ev);
return;
}
this.hideCompletions();
isInputClick = false;
if (this.options.submitOnFocusOut) {
this.submit();
}
}));
}
}
export interface IQueryLanguageOptions {
autoComplete?: ((IAutoCompletionParams) => any) | string;
inlineButton?: boolean;
inlineButtonText?: string;
submitOnFocusOut?: boolean;
onSubmit?: () => any;
}
export interface IAutoCompletionParams {
query: string,
caretPosition: number,
skip: number
}
export interface IAutoCompletionResult {
StartPosition: number,
Length: number,
Completions: IAutoCompletionRow[],
IsNextPageAvailable: boolean,
Errors: string[],
HasErrors: boolean
}
export interface IAutoCompletionRow {
Text: string,
Group: string
} | {
this.selectedCompletionIndex = 0;
if (this.autoCompletionResult)
this.autoCompletionResult.Completions = [];
} | identifier_body |
query-language.ts | import {customElement, bindable} from 'aurelia-templating';
import {inject} from 'aurelia-dependency-injection';
import {Utils, DomUtils} from 'marvelous-aurelia-core/utils';
import {AureliaUtils} from 'marvelous-aurelia-core/aureliaUtils';
@customElement('m-query-language')
@inject(Element, AureliaUtils)
export class QueryLanguage {
@bindable({ attribute: 'options' }) options: IQueryLanguageOptions;
autoCompletionResult: IAutoCompletionResult;
selectedCompletionIndex: number = 0;
errors: string[] = [];
private _subs = [];
private _queryInputElement: HTMLInputElement;
private _preventFromFocusOut = false;
private _loading: boolean;
private _lastSubmittedQuery: string;
query = '';
constructor(private _element: Element, private _aureliaUtils: AureliaUtils) {
}
attached() {
this.validateOptions();
this.createOptions();
this.registerInputHandlers();
}
detached() {
this._subs.forEach(x => x());
this._subs = [];
}
submit() {
if (this._lastSubmittedQuery === this.query) {
// submits only if query has some changes
return;
}
let promise = this.options.onSubmit();
if (!promise || !(promise.then instanceof Function)) {
return;
} |
this._loading = true;
promise.then((x) => {
this._loading = false;
if (!x) {
return;
}
// if wrapped with DataSourceResult<T>
// then uses `queryLanguage`
// otherwise result is assumed to be QueryLanguageFilterResult<T>
let result = x.queryLanguage || x;
this.errors = result.errors || [];
}, () => this._loading = false);
}
createOptions() {
let o = this.options;
o.inlineButton = o.inlineButton === undefined ? true : o.inlineButton;
o.inlineButtonText = o.inlineButtonText || 'Apply';
o.submitOnFocusOut = o.submitOnFocusOut === undefined ? false : o.submitOnFocusOut;
o.onSubmit = o.onSubmit || Utils.noop;
}
validateOptions() {
if (!this.options) {
throw new Error('`options` attribute is required.');
}
}
autoComplete() {
let result = this.autoCompletionResult;
let current = result.Completions[this.selectedCompletionIndex];
let newQuery = this.query.substr(0, result.StartPosition);
newQuery += current.Text;
let caretPosition = newQuery.length;
newQuery += this.query.substr(result.StartPosition + result.Length);
this.query = newQuery;
this.hideCompletions();
DomUtils.setCaretPosition(this._queryInputElement, caretPosition);
}
anyCompletion() {
if (!this.autoCompletionResult || !this.autoCompletionResult.Completions) {
return false;
}
return this.autoCompletionResult.Completions.length != 0;
}
hideCompletions() {
this.selectedCompletionIndex = 0;
if (this.autoCompletionResult)
this.autoCompletionResult.Completions = [];
}
select(completion: IAutoCompletionRow) {
this.selectedCompletionIndex = this.autoCompletionResult.Completions.indexOf(completion);
}
selectNext() {
if (this.selectedCompletionIndex == this.autoCompletionResult.Completions.length - 1) {
this.selectedCompletionIndex = 0;
return;
}
this.selectedCompletionIndex++;
}
selectPrevious() {
if (this.selectedCompletionIndex == 0) {
this.selectedCompletionIndex = this.autoCompletionResult.Completions.length - 1;
return;
}
this.selectedCompletionIndex--;
}
refreshCompletions(caretPosition = DomUtils.getCaretPosition(this._queryInputElement)) {
// TODO: debaunce
if (!this.options.autoComplete) {
return;
}
let promise = undefined;
let params = {
query: this.query,
caretPosition: caretPosition,
skip: 0
}
let func = Utils.createReadFunction(this.options.autoComplete, {
allowData: false,
dataMissingError: '`autoComplete` has to be either url or a function.',
shouldReturnUrlOrPromiseError: '`autoComplete` function should return url or promise.'
});
// TODO: race condition! only last one should resolve
func(params).then((x: IAutoCompletionResult) => {
this.selectedCompletionIndex = 0;
this.autoCompletionResult = x;
});
}
onCompletionClick(ev) {
Utils.preventDefaultAndPropagation(ev);
this.autoComplete();
}
private registerInputHandlers() {
let isInputClick = false;
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "keyup", (ev: KeyboardEvent) => {
switch (ev.which) {
case 37: // Left
case 39: // Right
case 36: // Home
case 35: // End
this.refreshCompletions();
break;
case 38: // Up
case 40: // Down
if (!this.anyCompletion()) {
this.refreshCompletions();
}
break;
case 27: // Esc
this.hideCompletions();
break;
case 16: // Shift
case 17: // Ctrl
case 18: // Alt
case 255: // Fn
case 13: // Enter
case 9: // Tab
break;
default:
this.refreshCompletions();
break;
}
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "keydown", (ev: KeyboardEvent) => {
if (!this.anyCompletion()) {
if (ev.which === 13) { // Enter
this.submit();
Utils.preventDefaultAndPropagation(ev);
}
return;
}
switch (ev.which) {
case 38: // Up
this.selectPrevious();
Utils.preventDefaultAndPropagation(ev);
break;
case 40: // Down
this.selectNext();
Utils.preventDefaultAndPropagation(ev);
break;
case 13: // Enter
case 9: // Tab
this.autoComplete();
Utils.preventDefaultAndPropagation(ev);
break;
}
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "mouseup", (ev: KeyboardEvent) => {
this.refreshCompletions();
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "mousedown", (ev: KeyboardEvent) => {
isInputClick = true;
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "focus", (ev) => {
if (!isInputClick && !this._preventFromFocusOut) {
this.refreshCompletions();
}
isInputClick = false;
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "blur", (ev) => {
if (this._preventFromFocusOut) {
Utils.preventDefaultAndPropagation(ev);
return;
}
this.hideCompletions();
isInputClick = false;
if (this.options.submitOnFocusOut) {
this.submit();
}
}));
}
}
export interface IQueryLanguageOptions {
autoComplete?: ((IAutoCompletionParams) => any) | string;
inlineButton?: boolean;
inlineButtonText?: string;
submitOnFocusOut?: boolean;
onSubmit?: () => any;
}
export interface IAutoCompletionParams {
query: string,
caretPosition: number,
skip: number
}
export interface IAutoCompletionResult {
StartPosition: number,
Length: number,
Completions: IAutoCompletionRow[],
IsNextPageAvailable: boolean,
Errors: string[],
HasErrors: boolean
}
export interface IAutoCompletionRow {
Text: string,
Group: string
} |
this._lastSubmittedQuery = this.query; | random_line_split |
query-language.ts | import {customElement, bindable} from 'aurelia-templating';
import {inject} from 'aurelia-dependency-injection';
import {Utils, DomUtils} from 'marvelous-aurelia-core/utils';
import {AureliaUtils} from 'marvelous-aurelia-core/aureliaUtils';
@customElement('m-query-language')
@inject(Element, AureliaUtils)
export class QueryLanguage {
@bindable({ attribute: 'options' }) options: IQueryLanguageOptions;
autoCompletionResult: IAutoCompletionResult;
selectedCompletionIndex: number = 0;
errors: string[] = [];
private _subs = [];
private _queryInputElement: HTMLInputElement;
private _preventFromFocusOut = false;
private _loading: boolean;
private _lastSubmittedQuery: string;
query = '';
constructor(private _element: Element, private _aureliaUtils: AureliaUtils) {
}
attached() {
this.validateOptions();
this.createOptions();
this.registerInputHandlers();
}
detached() {
this._subs.forEach(x => x());
this._subs = [];
}
submit() {
if (this._lastSubmittedQuery === this.query) {
// submits only if query has some changes
return;
}
let promise = this.options.onSubmit();
if (!promise || !(promise.then instanceof Function)) {
return;
}
this._lastSubmittedQuery = this.query;
this._loading = true;
promise.then((x) => {
this._loading = false;
if (!x) {
return;
}
// if wrapped with DataSourceResult<T>
// then uses `queryLanguage`
// otherwise result is assumed to be QueryLanguageFilterResult<T>
let result = x.queryLanguage || x;
this.errors = result.errors || [];
}, () => this._loading = false);
}
createOptions() {
let o = this.options;
o.inlineButton = o.inlineButton === undefined ? true : o.inlineButton;
o.inlineButtonText = o.inlineButtonText || 'Apply';
o.submitOnFocusOut = o.submitOnFocusOut === undefined ? false : o.submitOnFocusOut;
o.onSubmit = o.onSubmit || Utils.noop;
}
validateOptions() {
if (!this.options) {
throw new Error('`options` attribute is required.');
}
}
autoComplete() {
let result = this.autoCompletionResult;
let current = result.Completions[this.selectedCompletionIndex];
let newQuery = this.query.substr(0, result.StartPosition);
newQuery += current.Text;
let caretPosition = newQuery.length;
newQuery += this.query.substr(result.StartPosition + result.Length);
this.query = newQuery;
this.hideCompletions();
DomUtils.setCaretPosition(this._queryInputElement, caretPosition);
}
anyCompletion() {
if (!this.autoCompletionResult || !this.autoCompletionResult.Completions) {
return false;
}
return this.autoCompletionResult.Completions.length != 0;
}
hideCompletions() {
this.selectedCompletionIndex = 0;
if (this.autoCompletionResult)
this.autoCompletionResult.Completions = [];
}
select(completion: IAutoCompletionRow) {
this.selectedCompletionIndex = this.autoCompletionResult.Completions.indexOf(completion);
}
selectNext() {
if (this.selectedCompletionIndex == this.autoCompletionResult.Completions.length - 1) |
this.selectedCompletionIndex++;
}
selectPrevious() {
if (this.selectedCompletionIndex == 0) {
this.selectedCompletionIndex = this.autoCompletionResult.Completions.length - 1;
return;
}
this.selectedCompletionIndex--;
}
refreshCompletions(caretPosition = DomUtils.getCaretPosition(this._queryInputElement)) {
// TODO: debaunce
if (!this.options.autoComplete) {
return;
}
let promise = undefined;
let params = {
query: this.query,
caretPosition: caretPosition,
skip: 0
}
let func = Utils.createReadFunction(this.options.autoComplete, {
allowData: false,
dataMissingError: '`autoComplete` has to be either url or a function.',
shouldReturnUrlOrPromiseError: '`autoComplete` function should return url or promise.'
});
// TODO: race condition! only last one should resolve
func(params).then((x: IAutoCompletionResult) => {
this.selectedCompletionIndex = 0;
this.autoCompletionResult = x;
});
}
onCompletionClick(ev) {
Utils.preventDefaultAndPropagation(ev);
this.autoComplete();
}
private registerInputHandlers() {
let isInputClick = false;
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "keyup", (ev: KeyboardEvent) => {
switch (ev.which) {
case 37: // Left
case 39: // Right
case 36: // Home
case 35: // End
this.refreshCompletions();
break;
case 38: // Up
case 40: // Down
if (!this.anyCompletion()) {
this.refreshCompletions();
}
break;
case 27: // Esc
this.hideCompletions();
break;
case 16: // Shift
case 17: // Ctrl
case 18: // Alt
case 255: // Fn
case 13: // Enter
case 9: // Tab
break;
default:
this.refreshCompletions();
break;
}
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "keydown", (ev: KeyboardEvent) => {
if (!this.anyCompletion()) {
if (ev.which === 13) { // Enter
this.submit();
Utils.preventDefaultAndPropagation(ev);
}
return;
}
switch (ev.which) {
case 38: // Up
this.selectPrevious();
Utils.preventDefaultAndPropagation(ev);
break;
case 40: // Down
this.selectNext();
Utils.preventDefaultAndPropagation(ev);
break;
case 13: // Enter
case 9: // Tab
this.autoComplete();
Utils.preventDefaultAndPropagation(ev);
break;
}
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "mouseup", (ev: KeyboardEvent) => {
this.refreshCompletions();
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "mousedown", (ev: KeyboardEvent) => {
isInputClick = true;
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "focus", (ev) => {
if (!isInputClick && !this._preventFromFocusOut) {
this.refreshCompletions();
}
isInputClick = false;
}));
this._subs.push(DomUtils.addEventListener(this._queryInputElement, "blur", (ev) => {
if (this._preventFromFocusOut) {
Utils.preventDefaultAndPropagation(ev);
return;
}
this.hideCompletions();
isInputClick = false;
if (this.options.submitOnFocusOut) {
this.submit();
}
}));
}
}
export interface IQueryLanguageOptions {
autoComplete?: ((IAutoCompletionParams) => any) | string;
inlineButton?: boolean;
inlineButtonText?: string;
submitOnFocusOut?: boolean;
onSubmit?: () => any;
}
export interface IAutoCompletionParams {
query: string,
caretPosition: number,
skip: number
}
export interface IAutoCompletionResult {
StartPosition: number,
Length: number,
Completions: IAutoCompletionRow[],
IsNextPageAvailable: boolean,
Errors: string[],
HasErrors: boolean
}
export interface IAutoCompletionRow {
Text: string,
Group: string
} | {
this.selectedCompletionIndex = 0;
return;
} | conditional_block |
helper.py | from __future__ import division
from __future__ import unicode_literals
from builtins import range
from past.utils import old_div
import hashlib
import os
import random
import string
import tempfile
import re
import time
import urllib
from datetime import datetime
from datetime import timedelta
from elodie.compatability import _rename
from elodie.external.pyexiftool import ExifTool
from elodie.dependencies import get_exiftool
from elodie import constants
def checksum(file_path, blocksize=65536):
hasher = hashlib.sha256()
with open(file_path, 'rb') as f:
buf = f.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = f.read(blocksize)
return hasher.hexdigest()
return None
def create_working_folder(format=None):
temporary_folder = tempfile.gettempdir()
folder = os.path.join(temporary_folder, random_string(10, format), random_string(10, format))
os.makedirs(folder)
return (temporary_folder, folder)
def download_file(name, destination):
try:
url_to_file = 'https://s3.amazonaws.com/jmathai/github/elodie/{}'.format(name)
# urlretrieve works differently for python 2 and 3
if constants.python_version < 3:
final_name = '{}/{}{}'.format(destination, random_string(10), os.path.splitext(name)[1])
urllib.urlretrieve(
url_to_file, | final_name, headers = urllib.request.urlretrieve(url_to_file)
return final_name
except Exception as e:
return False
def get_file(name):
file_path = get_file_path(name)
if not os.path.isfile(file_path):
return False
return file_path
def get_file_path(name):
current_folder = os.path.dirname(os.path.realpath(__file__))
return os.path.join(current_folder, 'files', name)
def get_test_location():
return (61.013710, 99.196656, 'Siberia')
def populate_folder(number_of_files, include_invalid=False):
folder = '%s/%s' % (tempfile.gettempdir(), random_string(10))
os.makedirs(folder)
for x in range(0, number_of_files):
ext = 'jpg' if x % 2 == 0 else 'txt'
fname = '%s/%s.%s' % (folder, x, ext)
with open(fname, 'a'):
os.utime(fname, None)
if include_invalid:
fname = '%s/%s' % (folder, 'invalid.invalid')
with open(fname, 'a'):
os.utime(fname, None)
return folder
def random_string(length, format=None):
format_choice = string.ascii_uppercase + string.digits
if format == 'int':
format_choice = string.digits
elif format == 'str':
format_choice = string.asci_uppercase
return ''.join(random.SystemRandom().choice(format_choice) for _ in range(length))
def random_decimal():
return random.random()
def random_coordinate(coordinate, precision):
# Here we add to the decimal section of the coordinate by a given precision
return coordinate + ((old_div(10.0, (10.0**precision))) * random_decimal())
def temp_dir():
return tempfile.gettempdir()
def is_windows():
return os.name == 'nt'
# path_tz_fix(file_name)
# Change timestamp in file_name by the offset
# between UTC and local time, i.e.
# 2015-12-05_00-59-26-with-title-some-title.jpg ->
# 2015-12-04_20-59-26-with-title-some-title.jpg
# (Windows only)
def path_tz_fix(file_name):
if is_windows():
# Calculate the offset between UTC and local time
tz_shift = old_div((datetime.fromtimestamp(0) -
datetime.utcfromtimestamp(0)).seconds,3600)
# replace timestamp in file_name
m = re.search('(\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2})',file_name)
t_date = datetime.fromtimestamp(time.mktime(time.strptime(m.group(0), '%Y-%m-%d_%H-%M-%S')))
s_date_fix = (t_date-timedelta(hours=tz_shift)).strftime('%Y-%m-%d_%H-%M-%S')
return re.sub('\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}',s_date_fix,file_name)
else:
return file_name
# time_convert(s_time)
# Change s_time (struct_time) by the offset
# between UTC and local time
# (Windows only)
def time_convert(s_time):
if is_windows():
return time.gmtime((time.mktime(s_time)))
else:
return s_time
# isclose(a,b,rel_tol)
# To compare float coordinates a and b
# with relative tolerance c
def isclose(a, b, rel_tol = 1e-8):
if not isinstance(a, (int, float)) or not isinstance(b, (int, float)):
return False
diff = abs(a - b)
return (diff <= abs(rel_tol * a) and
diff <= abs(rel_tol * b))
def reset_dbs():
""" Back up hash_db and location_db """
# This is no longer needed. See gh-322
# https://github.com/jmathai/elodie/issues/322
pass
def restore_dbs():
""" Restore back ups of hash_db and location_db """
# This is no longer needed. See gh-322
# https://github.com/jmathai/elodie/issues/322
pass
def setup_module():
exiftool_addedargs = [
u'-config',
u'"{}"'.format(constants.exiftool_config)
]
ExifTool(executable_=get_exiftool(), addedargs=exiftool_addedargs).start()
def teardown_module():
ExifTool().terminate | final_name
)
else: | random_line_split |
helper.py | from __future__ import division
from __future__ import unicode_literals
from builtins import range
from past.utils import old_div
import hashlib
import os
import random
import string
import tempfile
import re
import time
import urllib
from datetime import datetime
from datetime import timedelta
from elodie.compatability import _rename
from elodie.external.pyexiftool import ExifTool
from elodie.dependencies import get_exiftool
from elodie import constants
def checksum(file_path, blocksize=65536):
hasher = hashlib.sha256()
with open(file_path, 'rb') as f:
buf = f.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = f.read(blocksize)
return hasher.hexdigest()
return None
def create_working_folder(format=None):
temporary_folder = tempfile.gettempdir()
folder = os.path.join(temporary_folder, random_string(10, format), random_string(10, format))
os.makedirs(folder)
return (temporary_folder, folder)
def download_file(name, destination):
try:
url_to_file = 'https://s3.amazonaws.com/jmathai/github/elodie/{}'.format(name)
# urlretrieve works differently for python 2 and 3
if constants.python_version < 3:
final_name = '{}/{}{}'.format(destination, random_string(10), os.path.splitext(name)[1])
urllib.urlretrieve(
url_to_file,
final_name
)
else:
final_name, headers = urllib.request.urlretrieve(url_to_file)
return final_name
except Exception as e:
return False
def get_file(name):
file_path = get_file_path(name)
if not os.path.isfile(file_path):
return False
return file_path
def get_file_path(name):
current_folder = os.path.dirname(os.path.realpath(__file__))
return os.path.join(current_folder, 'files', name)
def get_test_location():
return (61.013710, 99.196656, 'Siberia')
def populate_folder(number_of_files, include_invalid=False):
folder = '%s/%s' % (tempfile.gettempdir(), random_string(10))
os.makedirs(folder)
for x in range(0, number_of_files):
ext = 'jpg' if x % 2 == 0 else 'txt'
fname = '%s/%s.%s' % (folder, x, ext)
with open(fname, 'a'):
os.utime(fname, None)
if include_invalid:
fname = '%s/%s' % (folder, 'invalid.invalid')
with open(fname, 'a'):
os.utime(fname, None)
return folder
def random_string(length, format=None):
format_choice = string.ascii_uppercase + string.digits
if format == 'int':
format_choice = string.digits
elif format == 'str':
format_choice = string.asci_uppercase
return ''.join(random.SystemRandom().choice(format_choice) for _ in range(length))
def random_decimal():
return random.random()
def random_coordinate(coordinate, precision):
# Here we add to the decimal section of the coordinate by a given precision
return coordinate + ((old_div(10.0, (10.0**precision))) * random_decimal())
def temp_dir():
return tempfile.gettempdir()
def is_windows():
return os.name == 'nt'
# path_tz_fix(file_name)
# Change timestamp in file_name by the offset
# between UTC and local time, i.e.
# 2015-12-05_00-59-26-with-title-some-title.jpg ->
# 2015-12-04_20-59-26-with-title-some-title.jpg
# (Windows only)
def path_tz_fix(file_name):
if is_windows():
# Calculate the offset between UTC and local time
|
else:
return file_name
# time_convert(s_time)
# Change s_time (struct_time) by the offset
# between UTC and local time
# (Windows only)
def time_convert(s_time):
if is_windows():
return time.gmtime((time.mktime(s_time)))
else:
return s_time
# isclose(a,b,rel_tol)
# To compare float coordinates a and b
# with relative tolerance c
def isclose(a, b, rel_tol = 1e-8):
if not isinstance(a, (int, float)) or not isinstance(b, (int, float)):
return False
diff = abs(a - b)
return (diff <= abs(rel_tol * a) and
diff <= abs(rel_tol * b))
def reset_dbs():
""" Back up hash_db and location_db """
# This is no longer needed. See gh-322
# https://github.com/jmathai/elodie/issues/322
pass
def restore_dbs():
""" Restore back ups of hash_db and location_db """
# This is no longer needed. See gh-322
# https://github.com/jmathai/elodie/issues/322
pass
def setup_module():
exiftool_addedargs = [
u'-config',
u'"{}"'.format(constants.exiftool_config)
]
ExifTool(executable_=get_exiftool(), addedargs=exiftool_addedargs).start()
def teardown_module():
ExifTool().terminate
| tz_shift = old_div((datetime.fromtimestamp(0) -
datetime.utcfromtimestamp(0)).seconds,3600)
# replace timestamp in file_name
m = re.search('(\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2})',file_name)
t_date = datetime.fromtimestamp(time.mktime(time.strptime(m.group(0), '%Y-%m-%d_%H-%M-%S')))
s_date_fix = (t_date-timedelta(hours=tz_shift)).strftime('%Y-%m-%d_%H-%M-%S')
return re.sub('\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}',s_date_fix,file_name) | conditional_block |
helper.py | from __future__ import division
from __future__ import unicode_literals
from builtins import range
from past.utils import old_div
import hashlib
import os
import random
import string
import tempfile
import re
import time
import urllib
from datetime import datetime
from datetime import timedelta
from elodie.compatability import _rename
from elodie.external.pyexiftool import ExifTool
from elodie.dependencies import get_exiftool
from elodie import constants
def checksum(file_path, blocksize=65536):
hasher = hashlib.sha256()
with open(file_path, 'rb') as f:
buf = f.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = f.read(blocksize)
return hasher.hexdigest()
return None
def create_working_folder(format=None):
temporary_folder = tempfile.gettempdir()
folder = os.path.join(temporary_folder, random_string(10, format), random_string(10, format))
os.makedirs(folder)
return (temporary_folder, folder)
def download_file(name, destination):
try:
url_to_file = 'https://s3.amazonaws.com/jmathai/github/elodie/{}'.format(name)
# urlretrieve works differently for python 2 and 3
if constants.python_version < 3:
final_name = '{}/{}{}'.format(destination, random_string(10), os.path.splitext(name)[1])
urllib.urlretrieve(
url_to_file,
final_name
)
else:
final_name, headers = urllib.request.urlretrieve(url_to_file)
return final_name
except Exception as e:
return False
def get_file(name):
file_path = get_file_path(name)
if not os.path.isfile(file_path):
return False
return file_path
def get_file_path(name):
current_folder = os.path.dirname(os.path.realpath(__file__))
return os.path.join(current_folder, 'files', name)
def get_test_location():
return (61.013710, 99.196656, 'Siberia')
def populate_folder(number_of_files, include_invalid=False):
folder = '%s/%s' % (tempfile.gettempdir(), random_string(10))
os.makedirs(folder)
for x in range(0, number_of_files):
ext = 'jpg' if x % 2 == 0 else 'txt'
fname = '%s/%s.%s' % (folder, x, ext)
with open(fname, 'a'):
os.utime(fname, None)
if include_invalid:
fname = '%s/%s' % (folder, 'invalid.invalid')
with open(fname, 'a'):
os.utime(fname, None)
return folder
def random_string(length, format=None):
format_choice = string.ascii_uppercase + string.digits
if format == 'int':
format_choice = string.digits
elif format == 'str':
format_choice = string.asci_uppercase
return ''.join(random.SystemRandom().choice(format_choice) for _ in range(length))
def | ():
return random.random()
def random_coordinate(coordinate, precision):
# Here we add to the decimal section of the coordinate by a given precision
return coordinate + ((old_div(10.0, (10.0**precision))) * random_decimal())
def temp_dir():
return tempfile.gettempdir()
def is_windows():
return os.name == 'nt'
# path_tz_fix(file_name)
# Change timestamp in file_name by the offset
# between UTC and local time, i.e.
# 2015-12-05_00-59-26-with-title-some-title.jpg ->
# 2015-12-04_20-59-26-with-title-some-title.jpg
# (Windows only)
def path_tz_fix(file_name):
if is_windows():
# Calculate the offset between UTC and local time
tz_shift = old_div((datetime.fromtimestamp(0) -
datetime.utcfromtimestamp(0)).seconds,3600)
# replace timestamp in file_name
m = re.search('(\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2})',file_name)
t_date = datetime.fromtimestamp(time.mktime(time.strptime(m.group(0), '%Y-%m-%d_%H-%M-%S')))
s_date_fix = (t_date-timedelta(hours=tz_shift)).strftime('%Y-%m-%d_%H-%M-%S')
return re.sub('\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}',s_date_fix,file_name)
else:
return file_name
# time_convert(s_time)
# Change s_time (struct_time) by the offset
# between UTC and local time
# (Windows only)
def time_convert(s_time):
if is_windows():
return time.gmtime((time.mktime(s_time)))
else:
return s_time
# isclose(a,b,rel_tol)
# To compare float coordinates a and b
# with relative tolerance c
def isclose(a, b, rel_tol = 1e-8):
if not isinstance(a, (int, float)) or not isinstance(b, (int, float)):
return False
diff = abs(a - b)
return (diff <= abs(rel_tol * a) and
diff <= abs(rel_tol * b))
def reset_dbs():
""" Back up hash_db and location_db """
# This is no longer needed. See gh-322
# https://github.com/jmathai/elodie/issues/322
pass
def restore_dbs():
""" Restore back ups of hash_db and location_db """
# This is no longer needed. See gh-322
# https://github.com/jmathai/elodie/issues/322
pass
def setup_module():
exiftool_addedargs = [
u'-config',
u'"{}"'.format(constants.exiftool_config)
]
ExifTool(executable_=get_exiftool(), addedargs=exiftool_addedargs).start()
def teardown_module():
ExifTool().terminate
| random_decimal | identifier_name |
helper.py | from __future__ import division
from __future__ import unicode_literals
from builtins import range
from past.utils import old_div
import hashlib
import os
import random
import string
import tempfile
import re
import time
import urllib
from datetime import datetime
from datetime import timedelta
from elodie.compatability import _rename
from elodie.external.pyexiftool import ExifTool
from elodie.dependencies import get_exiftool
from elodie import constants
def checksum(file_path, blocksize=65536):
hasher = hashlib.sha256()
with open(file_path, 'rb') as f:
buf = f.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = f.read(blocksize)
return hasher.hexdigest()
return None
def create_working_folder(format=None):
temporary_folder = tempfile.gettempdir()
folder = os.path.join(temporary_folder, random_string(10, format), random_string(10, format))
os.makedirs(folder)
return (temporary_folder, folder)
def download_file(name, destination):
try:
url_to_file = 'https://s3.amazonaws.com/jmathai/github/elodie/{}'.format(name)
# urlretrieve works differently for python 2 and 3
if constants.python_version < 3:
final_name = '{}/{}{}'.format(destination, random_string(10), os.path.splitext(name)[1])
urllib.urlretrieve(
url_to_file,
final_name
)
else:
final_name, headers = urllib.request.urlretrieve(url_to_file)
return final_name
except Exception as e:
return False
def get_file(name):
file_path = get_file_path(name)
if not os.path.isfile(file_path):
return False
return file_path
def get_file_path(name):
current_folder = os.path.dirname(os.path.realpath(__file__))
return os.path.join(current_folder, 'files', name)
def get_test_location():
return (61.013710, 99.196656, 'Siberia')
def populate_folder(number_of_files, include_invalid=False):
folder = '%s/%s' % (tempfile.gettempdir(), random_string(10))
os.makedirs(folder)
for x in range(0, number_of_files):
ext = 'jpg' if x % 2 == 0 else 'txt'
fname = '%s/%s.%s' % (folder, x, ext)
with open(fname, 'a'):
os.utime(fname, None)
if include_invalid:
fname = '%s/%s' % (folder, 'invalid.invalid')
with open(fname, 'a'):
os.utime(fname, None)
return folder
def random_string(length, format=None):
format_choice = string.ascii_uppercase + string.digits
if format == 'int':
format_choice = string.digits
elif format == 'str':
format_choice = string.asci_uppercase
return ''.join(random.SystemRandom().choice(format_choice) for _ in range(length))
def random_decimal():
return random.random()
def random_coordinate(coordinate, precision):
# Here we add to the decimal section of the coordinate by a given precision
return coordinate + ((old_div(10.0, (10.0**precision))) * random_decimal())
def temp_dir():
return tempfile.gettempdir()
def is_windows():
return os.name == 'nt'
# path_tz_fix(file_name)
# Change timestamp in file_name by the offset
# between UTC and local time, i.e.
# 2015-12-05_00-59-26-with-title-some-title.jpg ->
# 2015-12-04_20-59-26-with-title-some-title.jpg
# (Windows only)
def path_tz_fix(file_name):
if is_windows():
# Calculate the offset between UTC and local time
tz_shift = old_div((datetime.fromtimestamp(0) -
datetime.utcfromtimestamp(0)).seconds,3600)
# replace timestamp in file_name
m = re.search('(\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2})',file_name)
t_date = datetime.fromtimestamp(time.mktime(time.strptime(m.group(0), '%Y-%m-%d_%H-%M-%S')))
s_date_fix = (t_date-timedelta(hours=tz_shift)).strftime('%Y-%m-%d_%H-%M-%S')
return re.sub('\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}',s_date_fix,file_name)
else:
return file_name
# time_convert(s_time)
# Change s_time (struct_time) by the offset
# between UTC and local time
# (Windows only)
def time_convert(s_time):
|
# isclose(a,b,rel_tol)
# To compare float coordinates a and b
# with relative tolerance c
def isclose(a, b, rel_tol = 1e-8):
if not isinstance(a, (int, float)) or not isinstance(b, (int, float)):
return False
diff = abs(a - b)
return (diff <= abs(rel_tol * a) and
diff <= abs(rel_tol * b))
def reset_dbs():
""" Back up hash_db and location_db """
# This is no longer needed. See gh-322
# https://github.com/jmathai/elodie/issues/322
pass
def restore_dbs():
""" Restore back ups of hash_db and location_db """
# This is no longer needed. See gh-322
# https://github.com/jmathai/elodie/issues/322
pass
def setup_module():
exiftool_addedargs = [
u'-config',
u'"{}"'.format(constants.exiftool_config)
]
ExifTool(executable_=get_exiftool(), addedargs=exiftool_addedargs).start()
def teardown_module():
ExifTool().terminate
| if is_windows():
return time.gmtime((time.mktime(s_time)))
else:
return s_time | identifier_body |
main.py | #!/usr/bin/python
# MinerLite - A client side miner controller.
# This will launch cgminer with a few delay seconds and
# retrieve the local data and post it into somewhere!
#
# Author: Yanxiang Wu
# Release Under GPL 3
# Used code from cgminer python API example
import socket
import json
import sys
import subprocess
import time
import os
path = "/home/ltcminer/mining/cgminer/cgminer"
log_file = "/home/ltcminer/mining/minerlite.log"
def linesplit(socket):
buffer = socket.recv(4096)
done = False
while not done:
more = socket.recv(4096)
if not more:
done = True
else:
buffer = buffer+more
if buffer:
|
def retrieve_cgminer_info(command, parameter):
"""retrieve status of devices from cgminer
"""
api_ip = '127.0.0.1'
api_port = 4028
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect((api_ip,int(api_port)))
if not parameter:
s.send(json.dumps({"command":command,"parameter":parameter}))
else:
s.send(json.dumps({"command":command}))
response = linesplit(s)
response = response.replace('\x00','')
return_val = response
response = json.loads(response)
# print response
s.close()
return return_val
def run_cgminer(path):
subprocess.Popen([path, "--api-listen"])
print "Starting cgminer in 2 seconds"
time.sleep(2)
print "Running cgminer ..."
run_cgminer(path)
time.sleep(15)
with open(log_file, 'a') as logfile:
try:
logfile.write( retrieve_cgminer_info("devs", None) )
except socket.error:
pass | return buffer | conditional_block |
main.py | #!/usr/bin/python
# MinerLite - A client side miner controller.
# This will launch cgminer with a few delay seconds and
# retrieve the local data and post it into somewhere!
#
# Author: Yanxiang Wu
# Release Under GPL 3
# Used code from cgminer python API example
import socket
import json
import sys
import subprocess
import time
import os
path = "/home/ltcminer/mining/cgminer/cgminer"
log_file = "/home/ltcminer/mining/minerlite.log"
def linesplit(socket):
buffer = socket.recv(4096)
done = False
while not done:
more = socket.recv(4096)
if not more:
done = True
else:
buffer = buffer+more
if buffer:
return buffer
def retrieve_cgminer_info(command, parameter):
"""retrieve status of devices from cgminer
"""
api_ip = '127.0.0.1'
api_port = 4028
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect((api_ip,int(api_port)))
if not parameter:
s.send(json.dumps({"command":command,"parameter":parameter}))
else:
s.send(json.dumps({"command":command}))
response = linesplit(s)
response = response.replace('\x00','')
return_val = response
response = json.loads(response)
# print response
s.close()
return return_val
def run_cgminer(path):
subprocess.Popen([path, "--api-listen"])
print "Starting cgminer in 2 seconds"
time.sleep(2)
print "Running cgminer ..."
run_cgminer(path)
time.sleep(15)
with open(log_file, 'a') as logfile:
try: | except socket.error:
pass | logfile.write( retrieve_cgminer_info("devs", None) ) | random_line_split |
main.py | #!/usr/bin/python
# MinerLite - A client side miner controller.
# This will launch cgminer with a few delay seconds and
# retrieve the local data and post it into somewhere!
#
# Author: Yanxiang Wu
# Release Under GPL 3
# Used code from cgminer python API example
import socket
import json
import sys
import subprocess
import time
import os
path = "/home/ltcminer/mining/cgminer/cgminer"
log_file = "/home/ltcminer/mining/minerlite.log"
def linesplit(socket):
buffer = socket.recv(4096)
done = False
while not done:
more = socket.recv(4096)
if not more:
done = True
else:
buffer = buffer+more
if buffer:
return buffer
def retrieve_cgminer_info(command, parameter):
"""retrieve status of devices from cgminer
"""
api_ip = '127.0.0.1'
api_port = 4028
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect((api_ip,int(api_port)))
if not parameter:
s.send(json.dumps({"command":command,"parameter":parameter}))
else:
s.send(json.dumps({"command":command}))
response = linesplit(s)
response = response.replace('\x00','')
return_val = response
response = json.loads(response)
# print response
s.close()
return return_val
def | (path):
subprocess.Popen([path, "--api-listen"])
print "Starting cgminer in 2 seconds"
time.sleep(2)
print "Running cgminer ..."
run_cgminer(path)
time.sleep(15)
with open(log_file, 'a') as logfile:
try:
logfile.write( retrieve_cgminer_info("devs", None) )
except socket.error:
pass | run_cgminer | identifier_name |
main.py | #!/usr/bin/python
# MinerLite - A client side miner controller.
# This will launch cgminer with a few delay seconds and
# retrieve the local data and post it into somewhere!
#
# Author: Yanxiang Wu
# Release Under GPL 3
# Used code from cgminer python API example
import socket
import json
import sys
import subprocess
import time
import os
path = "/home/ltcminer/mining/cgminer/cgminer"
log_file = "/home/ltcminer/mining/minerlite.log"
def linesplit(socket):
|
def retrieve_cgminer_info(command, parameter):
"""retrieve status of devices from cgminer
"""
api_ip = '127.0.0.1'
api_port = 4028
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect((api_ip,int(api_port)))
if not parameter:
s.send(json.dumps({"command":command,"parameter":parameter}))
else:
s.send(json.dumps({"command":command}))
response = linesplit(s)
response = response.replace('\x00','')
return_val = response
response = json.loads(response)
# print response
s.close()
return return_val
def run_cgminer(path):
subprocess.Popen([path, "--api-listen"])
print "Starting cgminer in 2 seconds"
time.sleep(2)
print "Running cgminer ..."
run_cgminer(path)
time.sleep(15)
with open(log_file, 'a') as logfile:
try:
logfile.write( retrieve_cgminer_info("devs", None) )
except socket.error:
pass | buffer = socket.recv(4096)
done = False
while not done:
more = socket.recv(4096)
if not more:
done = True
else:
buffer = buffer+more
if buffer:
return buffer | identifier_body |
Paginator.test.tsx | import { render, screen } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import Paginator from '../Paginator'
import type { PaginatorProps } from '../Paginator'
const mockPageChange = jest.fn()
| [1, 2, 6],
[1, 3, 7],
[1, 4, 8],
[1, 5, 9],
[2, 5, 9],
[3, 5, 9],
[1, 6, 8],
[2, 6, 9],
[3, 6, 10],
[4, 6, 10],
[1, 7, 8],
[2, 7, 9],
[3, 7, 10],
[4, 7, 11],
[5, 7, 10],
[6, 7, 9],
[7, 7, 8],
[1, 8, 8],
[1, 9, 8],
[1, 10, 8],
[1, 20, 8],
[1, 100, 8],
[2, 100, 9],
[3, 100, 10],
[4, 100, 11],
[5, 100, 11],
[50, 100, 11],
[500, 1000, 11],
[5000, 10000, 11],
[97, 100, 11],
[98, 100, 10],
[99, 100, 9],
[100, 100, 8],
]
const setup = ({ activePage, totalPages, onPageChange }: PaginatorProps) => {
const utils = render(
<Paginator
activePage={activePage}
totalPages={totalPages}
onPageChange={onPageChange}
/>,
)
return {
...utils,
}
}
describe('Paginator', () => {
it.each(testCases)(
'renders correctly the inner elements for active %s and %s pages',
(activePage, totalPages, renderedItems) => {
setup({ activePage, totalPages, onPageChange: mockPageChange })
expect(screen.getAllByTestId('paginator-item').length).toBe(renderedItems)
},
)
it('activePage has active prop', () => {
const [activePage, totalPages, , activeItem] = testCases[0]
setup({ activePage, totalPages, onPageChange: mockPageChange })
expect(screen.getAllByTestId('paginator-item')[activeItem]).toHaveClass(
'active',
)
})
it('calls onPageChange when clicking PaginatorItem', () => {
const [activePage, totalPages, , activeItem] = testCases[0]
setup({ activePage, totalPages, onPageChange: mockPageChange })
userEvent.click(screen.getAllByTestId('paginator-item')[activeItem])
expect(mockPageChange).toHaveBeenCalled()
})
}) | const testCases = [
// activePage, totalPages, renderedItems, activeItem
[3, 10, 10, 4],
[1, 1, 5], | random_line_split |
ball.js | function Ball() {
// ball id
this.ballId = 'B_'+Math.floor(Math.random() * 100000000);
// at first - we pick the direction randomlly -
// there are 4 possible directions (1,2,3,4)
// 1: up left, 2: up right, 3: down right, 4: down left
this.direction = Math.floor(Math.random() * 2) + 1;
// ball speed
this.ballSpeed = 1;
// ball size
this.ballSize = 15; // in px
// ball interval
this.ballInterval = undefined;
// ball class name
this.ballClass = 'ball';
// init the ball
this.init();
}
Ball.prototype.init = function() {
this.appendBall();
}
Ball.prototype.getBall = function() {
return document.getElementById(this.ballId);
}
Ball.prototype.appendBall = function() {
var b = document.createElement('div');
b.setAttribute('id',this.ballId);
b.setAttribute('class',this.ballClass);
document.body.appendChild(b);
}
Ball.prototype.move = function() {
var that = this;
that.ballInterval = setInterval(function() {
switch (that.direction) {
case 1:
that.getBall().style.left = that.getBall().offsetLeft - that.ballSpeed + 'px';
that.getBall().style.top = that.getBall().offsetTop - that.ballSpeed + 'px';
break;
case 2:
that.getBall().style.left = that.getBall().offsetLeft + that.ballSpeed + 'px'; | case 3:
that.getBall().style.left = that.getBall().offsetLeft + that.ballSpeed + 'px';
that.getBall().style.top = that.getBall().offsetTop + that.ballSpeed + 'px';
break;
case 4:
that.getBall().style.left = that.getBall().offsetLeft - that.ballSpeed + 'px';
that.getBall().style.top = that.getBall().offsetTop + that.ballSpeed + 'px';
break;
}
},1);
}
Ball.prototype.stop = function() {
clearInterval(this.ballInterval);
} | that.getBall().style.top = that.getBall().offsetTop - that.ballSpeed + 'px';
break; | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.