file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
test_msvc9compiler.py | """Tests for distutils.msvc9compiler."""
import sys
import unittest
import os
from distutils.errors import DistutilsPlatformError
from distutils.tests import support
from test.support import run_unittest
_MANIFEST = """\
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
manifestVersion="1.0">
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false">
</requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC90.CRT"
version="9.0.21022.8" processorArchitecture="x86"
publicKeyToken="XXXX">
</assemblyIdentity>
</dependentAssembly>
</dependency>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
version="9.0.21022.8" processorArchitecture="x86"
publicKeyToken="XXXX"></assemblyIdentity>
</dependentAssembly>
</dependency>
</assembly>
"""
_CLEANED_MANIFEST = """\
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
manifestVersion="1.0">
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false">
</requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
</dependency>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
version="9.0.21022.8" processorArchitecture="x86"
publicKeyToken="XXXX"></assemblyIdentity>
</dependentAssembly>
</dependency>
</assembly>"""
if sys.platform=="win32":
|
else:
SKIP_MESSAGE = "These tests are only for win32"
@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE)
class msvc9compilerTestCase(support.TempdirManager,
unittest.TestCase):
def test_no_compiler(self):
# makes sure query_vcvarsall throws
# a DistutilsPlatformError if the compiler
# is not found
from distutils.msvc9compiler import query_vcvarsall
def _find_vcvarsall(version):
return None
from distutils import msvc9compiler
old_find_vcvarsall = msvc9compiler.find_vcvarsall
msvc9compiler.find_vcvarsall = _find_vcvarsall
try:
self.assertRaises(DistutilsPlatformError, query_vcvarsall,
'wont find this version')
finally:
msvc9compiler.find_vcvarsall = old_find_vcvarsall
def test_reg_class(self):
from distutils.msvc9compiler import Reg
self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx')
# looking for values that should exist on all
# windows registeries versions.
path = r'Control Panel\Desktop'
v = Reg.get_value(path, 'dragfullwindows')
self.assertTrue(v in ('0', '1', '2'))
import winreg
HKCU = winreg.HKEY_CURRENT_USER
keys = Reg.read_keys(HKCU, 'xxxx')
self.assertEqual(keys, None)
keys = Reg.read_keys(HKCU, r'Control Panel')
self.assertTrue('Desktop' in keys)
def test_remove_visual_c_ref(self):
from distutils.msvc9compiler import MSVCCompiler
tempdir = self.mkdtemp()
manifest = os.path.join(tempdir, 'manifest')
f = open(manifest, 'w')
try:
f.write(_MANIFEST)
finally:
f.close()
compiler = MSVCCompiler()
compiler._remove_visual_c_ref(manifest)
# see what we got
f = open(manifest)
try:
# removing trailing spaces
content = '\n'.join([line.rstrip() for line in f.readlines()])
finally:
f.close()
# makes sure the manifest was properly cleaned
self.assertEqual(content, _CLEANED_MANIFEST)
def test_suite():
return unittest.makeSuite(msvc9compilerTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
| from distutils.msvccompiler import get_build_version
if get_build_version()>=8.0:
SKIP_MESSAGE = None
else:
SKIP_MESSAGE = "These tests are only for MSVC8.0 or above" | conditional_block |
test_msvc9compiler.py | """Tests for distutils.msvc9compiler."""
import sys
import unittest
import os
from distutils.errors import DistutilsPlatformError
from distutils.tests import support
from test.support import run_unittest
_MANIFEST = """\
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
manifestVersion="1.0">
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false">
</requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC90.CRT"
version="9.0.21022.8" processorArchitecture="x86"
publicKeyToken="XXXX">
</assemblyIdentity>
</dependentAssembly>
</dependency>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
version="9.0.21022.8" processorArchitecture="x86"
publicKeyToken="XXXX"></assemblyIdentity>
</dependentAssembly>
</dependency>
</assembly>
"""
_CLEANED_MANIFEST = """\
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
manifestVersion="1.0">
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false">
</requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
</dependency>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
version="9.0.21022.8" processorArchitecture="x86"
publicKeyToken="XXXX"></assemblyIdentity>
</dependentAssembly>
</dependency>
</assembly>"""
if sys.platform=="win32":
from distutils.msvccompiler import get_build_version
if get_build_version()>=8.0:
SKIP_MESSAGE = None
else:
SKIP_MESSAGE = "These tests are only for MSVC8.0 or above"
else:
SKIP_MESSAGE = "These tests are only for win32"
@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE)
class msvc9compilerTestCase(support.TempdirManager,
unittest.TestCase):
def test_no_compiler(self):
# makes sure query_vcvarsall throws
# a DistutilsPlatformError if the compiler
# is not found
from distutils.msvc9compiler import query_vcvarsall
def _find_vcvarsall(version):
|
from distutils import msvc9compiler
old_find_vcvarsall = msvc9compiler.find_vcvarsall
msvc9compiler.find_vcvarsall = _find_vcvarsall
try:
self.assertRaises(DistutilsPlatformError, query_vcvarsall,
'wont find this version')
finally:
msvc9compiler.find_vcvarsall = old_find_vcvarsall
def test_reg_class(self):
from distutils.msvc9compiler import Reg
self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx')
# looking for values that should exist on all
# windows registeries versions.
path = r'Control Panel\Desktop'
v = Reg.get_value(path, 'dragfullwindows')
self.assertTrue(v in ('0', '1', '2'))
import winreg
HKCU = winreg.HKEY_CURRENT_USER
keys = Reg.read_keys(HKCU, 'xxxx')
self.assertEqual(keys, None)
keys = Reg.read_keys(HKCU, r'Control Panel')
self.assertTrue('Desktop' in keys)
def test_remove_visual_c_ref(self):
from distutils.msvc9compiler import MSVCCompiler
tempdir = self.mkdtemp()
manifest = os.path.join(tempdir, 'manifest')
f = open(manifest, 'w')
try:
f.write(_MANIFEST)
finally:
f.close()
compiler = MSVCCompiler()
compiler._remove_visual_c_ref(manifest)
# see what we got
f = open(manifest)
try:
# removing trailing spaces
content = '\n'.join([line.rstrip() for line in f.readlines()])
finally:
f.close()
# makes sure the manifest was properly cleaned
self.assertEqual(content, _CLEANED_MANIFEST)
def test_suite():
return unittest.makeSuite(msvc9compilerTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
| return None | identifier_body |
test_survey_integration.py | # coding=utf-8
from datetime import datetime
from euphorie.client import model
from euphorie.client.tests.utils import addAccount
from euphorie.client.tests.utils import addSurvey
from euphorie.content.tests.utils import BASIC_SURVEY
from euphorie.testing import EuphorieIntegrationTestCase
from lxml import html
from plone import api
from Products.Five.browser.metaconfigure import ViewNotCallableError
from time import sleep
from zope.event import notify
from zope.lifecycleevent import ObjectModifiedEvent
class TestSurveyViews(EuphorieIntegrationTestCase):
def | (self):
"""We have some views to display and set the published column
for a survey session
"""
with api.env.adopt_user("admin"):
survey = addSurvey(self.portal, BASIC_SURVEY)
account = addAccount(password="secret")
survey_session = model.SurveySession(
id=123,
title=u"Dummy session",
created=datetime(2012, 4, 22, 23, 5, 12),
modified=datetime(2012, 4, 23, 11, 50, 30),
zodb_path="nl/ict/software-development",
account=account,
company=model.Company(country="nl", employees="1-9", referer="other"),
)
model.Session.add(survey_session)
survey = self.portal.client.nl.ict["software-development"]
session_id = "++session++%d" % survey_session.id
traversed_survey_session = survey.restrictedTraverse(session_id)
with api.env.adopt_user(user=survey_session.account):
with self._get_view(
"publication_date", traversed_survey_session, survey_session
) as view:
# The view is not callable but
# has traversable allowed attributes
self.assertRaises(ViewNotCallableError, view)
# We have some default values that will be changed
# when publishing/unpublishing the session
self.assertEqual(survey_session.last_publisher, None)
self.assertEqual(survey_session.published, None)
self.assertEqual(survey_session.last_modifier, None)
self.assertEqual(survey_session.review_state, "private")
# Calling set_date will result in having this session published
# and the publication time and the publisher will be recorded
# If no referer is set,
# the methods will redirect to the context url
self.assertEqual(
view.set_date(),
"{url}/{session_id}".format(
url=survey.absolute_url(), session_id=session_id
),
)
self.assertEqual(survey_session.last_publisher, survey_session.account)
self.assertIsInstance(survey_session.published, datetime)
self.assertEqual(survey_session.review_state, "published")
old_modified = survey_session.modified
old_published = survey_session.published
old_modifier = survey_session.last_modifier
# Changing the HTTP_REFERER will redirect there
# and calling reset_date will update the published date
view.request.set("HTTP_REFERER", "foo")
# We need to wait at least one second because the datetime
# is stored with that accuracy
sleep(1)
self.assertEqual(view.reset_date(), "foo")
self.assertEqual(survey_session.last_publisher, survey_session.account)
# The publisher and publication dates are set. The modification date
# is not touched.
self.assertEqual(survey_session.modified, old_modified)
self.assertEqual(survey_session.last_modifier, old_modifier)
self.assertTrue(survey_session.published > old_published)
# Calling unset_date will restore the publication info
self.assertEqual(view.unset_date(), "foo")
self.assertEqual(survey_session.last_publisher, None)
self.assertEqual(survey_session.published, None)
self.assertEqual(survey_session.review_state, "private")
# We also have a menu view
with self._get_view(
"publication_menu", traversed_survey_session, survey_session
) as view:
soup = html.fromstring(view())
self.assertListEqual(
["publication_date/set_date#content"],
[
el.attrib["action"].rpartition("@@")[-1]
for el in soup.cssselect("form")
],
)
# We trigger the session to be private
survey_session.published = "foo"
soup = html.fromstring(view())
self.assertListEqual(
[
"publication_date/unset_date#content",
"publication_date/reset_date#content",
],
[
el.attrib["action"].rpartition("@@")[-1]
for el in soup.cssselect("form")
],
)
def test_modify_updates_last_modifier(self):
account = addAccount(password="secret")
survey_session = model.SurveySession(
title=u"Dummy session", account=account, zodb_path=""
)
self.assertEqual(survey_session.modified, None)
self.assertEqual(survey_session.last_modifier, None)
with api.env.adopt_user(user=account):
notify(ObjectModifiedEvent(survey_session))
self.assertIsInstance(survey_session.modified, datetime)
self.assertEqual(survey_session.last_modifier, account)
| test_survey_publication_date_views | identifier_name |
test_survey_integration.py | # coding=utf-8
from datetime import datetime
from euphorie.client import model
from euphorie.client.tests.utils import addAccount
from euphorie.client.tests.utils import addSurvey
from euphorie.content.tests.utils import BASIC_SURVEY
from euphorie.testing import EuphorieIntegrationTestCase
from lxml import html
from plone import api
from Products.Five.browser.metaconfigure import ViewNotCallableError
from time import sleep
from zope.event import notify
from zope.lifecycleevent import ObjectModifiedEvent
class TestSurveyViews(EuphorieIntegrationTestCase):
def test_survey_publication_date_views(self):
|
with api.env.adopt_user(user=survey_session.account):
with self._get_view(
"publication_date", traversed_survey_session, survey_session
) as view:
# The view is not callable but
# has traversable allowed attributes
self.assertRaises(ViewNotCallableError, view)
# We have some default values that will be changed
# when publishing/unpublishing the session
self.assertEqual(survey_session.last_publisher, None)
self.assertEqual(survey_session.published, None)
self.assertEqual(survey_session.last_modifier, None)
self.assertEqual(survey_session.review_state, "private")
# Calling set_date will result in having this session published
# and the publication time and the publisher will be recorded
# If no referer is set,
# the methods will redirect to the context url
self.assertEqual(
view.set_date(),
"{url}/{session_id}".format(
url=survey.absolute_url(), session_id=session_id
),
)
self.assertEqual(survey_session.last_publisher, survey_session.account)
self.assertIsInstance(survey_session.published, datetime)
self.assertEqual(survey_session.review_state, "published")
old_modified = survey_session.modified
old_published = survey_session.published
old_modifier = survey_session.last_modifier
# Changing the HTTP_REFERER will redirect there
# and calling reset_date will update the published date
view.request.set("HTTP_REFERER", "foo")
# We need to wait at least one second because the datetime
# is stored with that accuracy
sleep(1)
self.assertEqual(view.reset_date(), "foo")
self.assertEqual(survey_session.last_publisher, survey_session.account)
# The publisher and publication dates are set. The modification date
# is not touched.
self.assertEqual(survey_session.modified, old_modified)
self.assertEqual(survey_session.last_modifier, old_modifier)
self.assertTrue(survey_session.published > old_published)
# Calling unset_date will restore the publication info
self.assertEqual(view.unset_date(), "foo")
self.assertEqual(survey_session.last_publisher, None)
self.assertEqual(survey_session.published, None)
self.assertEqual(survey_session.review_state, "private")
# We also have a menu view
with self._get_view(
"publication_menu", traversed_survey_session, survey_session
) as view:
soup = html.fromstring(view())
self.assertListEqual(
["publication_date/set_date#content"],
[
el.attrib["action"].rpartition("@@")[-1]
for el in soup.cssselect("form")
],
)
# We trigger the session to be private
survey_session.published = "foo"
soup = html.fromstring(view())
self.assertListEqual(
[
"publication_date/unset_date#content",
"publication_date/reset_date#content",
],
[
el.attrib["action"].rpartition("@@")[-1]
for el in soup.cssselect("form")
],
)
def test_modify_updates_last_modifier(self):
account = addAccount(password="secret")
survey_session = model.SurveySession(
title=u"Dummy session", account=account, zodb_path=""
)
self.assertEqual(survey_session.modified, None)
self.assertEqual(survey_session.last_modifier, None)
with api.env.adopt_user(user=account):
notify(ObjectModifiedEvent(survey_session))
self.assertIsInstance(survey_session.modified, datetime)
self.assertEqual(survey_session.last_modifier, account)
| """We have some views to display and set the published column
for a survey session
"""
with api.env.adopt_user("admin"):
survey = addSurvey(self.portal, BASIC_SURVEY)
account = addAccount(password="secret")
survey_session = model.SurveySession(
id=123,
title=u"Dummy session",
created=datetime(2012, 4, 22, 23, 5, 12),
modified=datetime(2012, 4, 23, 11, 50, 30),
zodb_path="nl/ict/software-development",
account=account,
company=model.Company(country="nl", employees="1-9", referer="other"),
)
model.Session.add(survey_session)
survey = self.portal.client.nl.ict["software-development"]
session_id = "++session++%d" % survey_session.id
traversed_survey_session = survey.restrictedTraverse(session_id) | identifier_body |
test_survey_integration.py | # coding=utf-8
from datetime import datetime
from euphorie.client import model
from euphorie.client.tests.utils import addAccount
from euphorie.client.tests.utils import addSurvey
from euphorie.content.tests.utils import BASIC_SURVEY
from euphorie.testing import EuphorieIntegrationTestCase
from lxml import html
from plone import api
from Products.Five.browser.metaconfigure import ViewNotCallableError
from time import sleep
from zope.event import notify
from zope.lifecycleevent import ObjectModifiedEvent
class TestSurveyViews(EuphorieIntegrationTestCase):
def test_survey_publication_date_views(self):
"""We have some views to display and set the published column
for a survey session
"""
with api.env.adopt_user("admin"):
survey = addSurvey(self.portal, BASIC_SURVEY)
account = addAccount(password="secret")
survey_session = model.SurveySession(
id=123,
title=u"Dummy session",
created=datetime(2012, 4, 22, 23, 5, 12),
modified=datetime(2012, 4, 23, 11, 50, 30),
zodb_path="nl/ict/software-development",
account=account,
company=model.Company(country="nl", employees="1-9", referer="other"),
)
model.Session.add(survey_session)
survey = self.portal.client.nl.ict["software-development"]
session_id = "++session++%d" % survey_session.id
traversed_survey_session = survey.restrictedTraverse(session_id)
with api.env.adopt_user(user=survey_session.account):
with self._get_view(
"publication_date", traversed_survey_session, survey_session
) as view: | self.assertEqual(survey_session.last_publisher, None)
self.assertEqual(survey_session.published, None)
self.assertEqual(survey_session.last_modifier, None)
self.assertEqual(survey_session.review_state, "private")
# Calling set_date will result in having this session published
# and the publication time and the publisher will be recorded
# If no referer is set,
# the methods will redirect to the context url
self.assertEqual(
view.set_date(),
"{url}/{session_id}".format(
url=survey.absolute_url(), session_id=session_id
),
)
self.assertEqual(survey_session.last_publisher, survey_session.account)
self.assertIsInstance(survey_session.published, datetime)
self.assertEqual(survey_session.review_state, "published")
old_modified = survey_session.modified
old_published = survey_session.published
old_modifier = survey_session.last_modifier
# Changing the HTTP_REFERER will redirect there
# and calling reset_date will update the published date
view.request.set("HTTP_REFERER", "foo")
# We need to wait at least one second because the datetime
# is stored with that accuracy
sleep(1)
self.assertEqual(view.reset_date(), "foo")
self.assertEqual(survey_session.last_publisher, survey_session.account)
# The publisher and publication dates are set. The modification date
# is not touched.
self.assertEqual(survey_session.modified, old_modified)
self.assertEqual(survey_session.last_modifier, old_modifier)
self.assertTrue(survey_session.published > old_published)
# Calling unset_date will restore the publication info
self.assertEqual(view.unset_date(), "foo")
self.assertEqual(survey_session.last_publisher, None)
self.assertEqual(survey_session.published, None)
self.assertEqual(survey_session.review_state, "private")
# We also have a menu view
with self._get_view(
"publication_menu", traversed_survey_session, survey_session
) as view:
soup = html.fromstring(view())
self.assertListEqual(
["publication_date/set_date#content"],
[
el.attrib["action"].rpartition("@@")[-1]
for el in soup.cssselect("form")
],
)
# We trigger the session to be private
survey_session.published = "foo"
soup = html.fromstring(view())
self.assertListEqual(
[
"publication_date/unset_date#content",
"publication_date/reset_date#content",
],
[
el.attrib["action"].rpartition("@@")[-1]
for el in soup.cssselect("form")
],
)
def test_modify_updates_last_modifier(self):
account = addAccount(password="secret")
survey_session = model.SurveySession(
title=u"Dummy session", account=account, zodb_path=""
)
self.assertEqual(survey_session.modified, None)
self.assertEqual(survey_session.last_modifier, None)
with api.env.adopt_user(user=account):
notify(ObjectModifiedEvent(survey_session))
self.assertIsInstance(survey_session.modified, datetime)
self.assertEqual(survey_session.last_modifier, account) | # The view is not callable but
# has traversable allowed attributes
self.assertRaises(ViewNotCallableError, view)
# We have some default values that will be changed
# when publishing/unpublishing the session | random_line_split |
cd_hit.py | Path=True),
# sequence identity threshold, default 0.9
# this is the default cd-hit's "global sequence identity" calc'd as :
# number of identical amino acids in alignment
# divided by the full length of the shorter sequence
'-c':ValuedParameter('-',Name='c',Delimiter=' '),
# use global sequence identity, default 1
# if set to 0, then use local sequence identity, calculated as :
# number of identical amino acids in alignment
# divided by the length of the alignment
# NOTE!!! don't use -G 0 unless you use alignment coverage controls
# see options -aL, -AL, -aS, -AS
'-g':ValuedParameter('-',Name='g',Delimiter=' '),
# band_width of alignment, default 20
'-b':ValuedParameter('-',Name='b',Delimiter=' '),
# max available memory (Mbyte), default 400
'-M':ValuedParameter('-',Name='M',Delimiter=' '),
# word_length, default 8, see user's guide for choosing it
'-n':ValuedParameter('-',Name='n',Delimiter=' '),
# length of throw_away_sequences, default 10
'-l':ValuedParameter('-',Name='l',Delimiter=' '),
# tolerance for redundance, default 2
'-t':ValuedParameter('-',Name='t',Delimiter=' '),
# length of description in .clstr file, default 20
# if set to 0, it takes the fasta defline and stops at first space
'-d':ValuedParameter('-',Name='d',Delimiter=' '),
# length difference cutoff, default 0.0
# if set to 0.9, the shorter sequences need to be
# at least 90% length of the representative of the cluster
'-s':ValuedParameter('-',Name='s',Delimiter=' '),
# length difference cutoff in amino acid, default 999999
# f set to 60, the length difference between the shorter sequences
# and the representative of the cluster can not be bigger than 60
'-S':ValuedParameter('-',Name='S',Delimiter=' '),
# alignment coverage for the longer sequence, default 0.0
# if set to 0.9, the alignment must covers 90% of the sequence
'-aL':ValuedParameter('-',Name='aL',Delimiter=' '),
# alignment coverage control for the longer sequence, default 99999999
# if set to 60, and the length of the sequence is 400,
# then the alignment must be >= 340 (400-60) residues
'-AL':ValuedParameter('-',Name='AL',Delimiter=' '),
# alignment coverage for the shorter sequence, default 0.0
# if set to 0.9, the alignment must covers 90% of the sequence
'-aS':ValuedParameter('-',Name='aS',Delimiter=' '),
# alignment coverage control for the shorter sequence, default 99999999
# if set to 60, and the length of the sequence is 400,
# then the alignment must be >= 340 (400-60) residues
'-AS':ValuedParameter('-',Name='AS',Delimiter=' '),
# 1 or 0, default 0, by default, sequences are stored in RAM
# if set to 1, sequence are stored on hard drive
# it is recommended to use -B 1 for huge databases
'-B':ValuedParameter('-',Name='B',Delimiter=' '),
# 1 or 0, default 0
# if set to 1, print alignment overlap in .clstr file
'-p':ValuedParameter('-',Name='p',Delimiter=' '),
# 1 or 0, default 0
# by cd-hit's default algorithm, a sequence is clustered to the first
# cluster that meet the threshold (fast cluster). If set to 1, the program
# will cluster it into the most similar cluster that meet the threshold
# (accurate but slow mode)
# but either 1 or 0 won't change the representatives of final clusters
'-g':ValuedParameter('-',Name='g',Delimiter=' '),
# print this help
'-h':ValuedParameter('-',Name='h',Delimiter=' ')
}
_synonyms = {'Similarity':'-c'}
def getHelp(self):
|
def _input_as_multiline_string(self, data):
"""Writes data to tempfile and sets -i parameter
data -- list of lines
"""
if data:
self.Parameters['-i']\
.on(super(CD_HIT,self)._input_as_multiline_string(data))
return ''
def _input_as_lines(self, data):
"""Writes data to tempfile and sets -i parameter
data -- list of lines, ready to be written to file
"""
if data:
self.Parameters['-i']\
.on(super(CD_HIT,self)._input_as_lines(data))
return ''
def _input_as_seqs(self, data):
"""Creates a list of seqs to pass to _input_as_lines
data -- list like object of sequences
"""
lines = []
for i,s in enumerate(data):
# will number the sequences 1,2,3, etc...
lines.append(''.join(['>',str(i+1)]))
lines.append(s)
return self._input_as_lines(lines)
def _input_as_string(self, data):
"""Makes data the value of a specific parameter"""
if data:
self.Parameters['-i'].on(str(data))
return ''
def _get_seqs_outfile(self):
"""Returns the absolute path to the seqs outfile"""
if self.Parameters['-o'].isOn():
return self.Parameters['-o'].Value
else:
raise ValueError, "No output file specified"
def _get_clstr_outfile(self):
"""Returns the absolute path to the clstr outfile"""
if self.Parameters['-o'].isOn():
return ''.join([self.Parameters['-o'].Value, '.clstr'])
else:
raise ValueError, "No output file specified"
def _get_result_paths(self, data):
"""Return dict of {key: ResultPath}"""
result = {}
result['FASTA'] = ResultPath(Path=self._get_seqs_outfile())
result['CLSTR'] = ResultPath(Path=self._get_clstr_outfile())
return result
class CD_HIT_EST(CD_HIT):
"""cd-hit Application Controller
Use this version of CD-HIT if your MolType is PROTEIN
"""
_command = 'cd-hit-est'
_input_handler = '_input_as_multiline_string'
_parameters = CD_HIT._parameters
_parameters.update({\
# 1 or 0, default 0, by default only +/+ strand alignment
# if set to 1, do both +/+ & +/- alignments
'-r':ValuedParameter('-',Name='r',Delimiter=' ')
})
def cdhit_clusters_from_seqs(seqs, moltype, params=None):
"""Returns the CD-HIT clusters given seqs
seqs : dict like collection of sequences
moltype : cogent.core.moltype object
params : cd-hit parameters
NOTE: This method will call CD_HIT if moltype is PROTIEN,
CD_HIT_EST if moltype is RNA/DNA, and raise if any other
moltype is passed.
"""
# keys are not remapped. Tested against seq_ids of 100char length
seqs = SequenceCollection(seqs, MolType=moltype)
#Create mapping between abbreviated IDs and full IDs
int_map, int_keys = seqs.getIntMap()
#Create SequenceCollection from int_map.
int_map = SequenceCollection(int_map,MolType=moltype)
# setup params and make sure the output argument is set
if params is None:
params = {}
if '-o' not in params:
params['-o'] = get_tmp_filename()
# call the correct version of cd-hit base on moltype
working_dir = get_tmp_filename()
if moltype is PROTEIN:
app = CD_HIT(WorkingDir=working_dir, params=params)
| """Method that points to documentation"""
help_str =\
"""
CD-HIT is hosted as an open source project at:
http://www.bioinformatics.org/cd-hit/
The following papers should be cited if this resource is used:
Clustering of highly homologous sequences to reduce thesize of large
protein database", Weizhong Li, Lukasz Jaroszewski & Adam Godzik
Bioinformatics, (2001) 17:282-283
Tolerating some redundancy significantly speeds up clustering of large
protein databases", Weizhong Li, Lukasz Jaroszewski & Adam Godzik
Bioinformatics, (2002) 18:77-82
"""
return help_str | identifier_body |
cd_hit.py | Path=True),
# sequence identity threshold, default 0.9
# this is the default cd-hit's "global sequence identity" calc'd as :
# number of identical amino acids in alignment
# divided by the full length of the shorter sequence
'-c':ValuedParameter('-',Name='c',Delimiter=' '),
# use global sequence identity, default 1
# if set to 0, then use local sequence identity, calculated as :
# number of identical amino acids in alignment
# divided by the length of the alignment
# NOTE!!! don't use -G 0 unless you use alignment coverage controls
# see options -aL, -AL, -aS, -AS
'-g':ValuedParameter('-',Name='g',Delimiter=' '),
# band_width of alignment, default 20
'-b':ValuedParameter('-',Name='b',Delimiter=' '),
# max available memory (Mbyte), default 400
'-M':ValuedParameter('-',Name='M',Delimiter=' '),
# word_length, default 8, see user's guide for choosing it
'-n':ValuedParameter('-',Name='n',Delimiter=' '),
# length of throw_away_sequences, default 10
'-l':ValuedParameter('-',Name='l',Delimiter=' '),
# tolerance for redundance, default 2
'-t':ValuedParameter('-',Name='t',Delimiter=' '),
# length of description in .clstr file, default 20
# if set to 0, it takes the fasta defline and stops at first space
'-d':ValuedParameter('-',Name='d',Delimiter=' '),
# length difference cutoff, default 0.0
# if set to 0.9, the shorter sequences need to be
# at least 90% length of the representative of the cluster
'-s':ValuedParameter('-',Name='s',Delimiter=' '),
# length difference cutoff in amino acid, default 999999
# f set to 60, the length difference between the shorter sequences
# and the representative of the cluster can not be bigger than 60
'-S':ValuedParameter('-',Name='S',Delimiter=' '),
# alignment coverage for the longer sequence, default 0.0
# if set to 0.9, the alignment must covers 90% of the sequence
'-aL':ValuedParameter('-',Name='aL',Delimiter=' '),
# alignment coverage control for the longer sequence, default 99999999
# if set to 60, and the length of the sequence is 400,
# then the alignment must be >= 340 (400-60) residues
'-AL':ValuedParameter('-',Name='AL',Delimiter=' '),
# alignment coverage for the shorter sequence, default 0.0
# if set to 0.9, the alignment must covers 90% of the sequence
'-aS':ValuedParameter('-',Name='aS',Delimiter=' '),
# alignment coverage control for the shorter sequence, default 99999999
# if set to 60, and the length of the sequence is 400,
# then the alignment must be >= 340 (400-60) residues
'-AS':ValuedParameter('-',Name='AS',Delimiter=' '),
# 1 or 0, default 0, by default, sequences are stored in RAM
# if set to 1, sequence are stored on hard drive
# it is recommended to use -B 1 for huge databases
'-B':ValuedParameter('-',Name='B',Delimiter=' '),
# 1 or 0, default 0
# if set to 1, print alignment overlap in .clstr file
'-p':ValuedParameter('-',Name='p',Delimiter=' '),
# 1 or 0, default 0
# by cd-hit's default algorithm, a sequence is clustered to the first
# cluster that meet the threshold (fast cluster). If set to 1, the program
# will cluster it into the most similar cluster that meet the threshold
# (accurate but slow mode)
# but either 1 or 0 won't change the representatives of final clusters
'-g':ValuedParameter('-',Name='g',Delimiter=' '),
# print this help
'-h':ValuedParameter('-',Name='h',Delimiter=' ')
}
_synonyms = {'Similarity':'-c'}
def getHelp(self):
"""Method that points to documentation"""
help_str =\
"""
CD-HIT is hosted as an open source project at:
http://www.bioinformatics.org/cd-hit/
The following papers should be cited if this resource is used:
Clustering of highly homologous sequences to reduce thesize of large
protein database", Weizhong Li, Lukasz Jaroszewski & Adam Godzik
Bioinformatics, (2001) 17:282-283
Tolerating some redundancy significantly speeds up clustering of large
protein databases", Weizhong Li, Lukasz Jaroszewski & Adam Godzik
Bioinformatics, (2002) 18:77-82
"""
return help_str
def _input_as_multiline_string(self, data):
"""Writes data to tempfile and sets -i parameter
data -- list of lines
"""
if data:
self.Parameters['-i']\
.on(super(CD_HIT,self)._input_as_multiline_string(data))
return ''
def _input_as_lines(self, data):
"""Writes data to tempfile and sets -i parameter
data -- list of lines, ready to be written to file
"""
if data:
self.Parameters['-i']\
.on(super(CD_HIT,self)._input_as_lines(data))
return ''
def _input_as_seqs(self, data):
"""Creates a list of seqs to pass to _input_as_lines
data -- list like object of sequences
"""
lines = []
for i,s in enumerate(data):
# will number the sequences 1,2,3, etc...
lines.append(''.join(['>',str(i+1)]))
lines.append(s)
return self._input_as_lines(lines)
def _input_as_string(self, data):
"""Makes data the value of a specific parameter"""
if data:
self.Parameters['-i'].on(str(data))
return ''
def _get_seqs_outfile(self):
"""Returns the absolute path to the seqs outfile"""
if self.Parameters['-o'].isOn():
return self.Parameters['-o'].Value
else:
raise ValueError, "No output file specified"
def _get_clstr_outfile(self):
"""Returns the absolute path to the clstr outfile"""
if self.Parameters['-o'].isOn():
return ''.join([self.Parameters['-o'].Value, '.clstr'])
else:
raise ValueError, "No output file specified"
def _get_result_paths(self, data):
"""Return dict of {key: ResultPath}"""
result = {}
result['FASTA'] = ResultPath(Path=self._get_seqs_outfile())
result['CLSTR'] = ResultPath(Path=self._get_clstr_outfile())
return result
class CD_HIT_EST(CD_HIT):
"""cd-hit Application Controller
Use this version of CD-HIT if your MolType is PROTEIN
"""
_command = 'cd-hit-est'
_input_handler = '_input_as_multiline_string'
_parameters = CD_HIT._parameters
_parameters.update({\
# 1 or 0, default 0, by default only +/+ strand alignment
# if set to 1, do both +/+ & +/- alignments
'-r':ValuedParameter('-',Name='r',Delimiter=' ')
})
def cdhit_clusters_from_seqs(seqs, moltype, params=None):
"""Returns the CD-HIT clusters given seqs
seqs : dict like collection of sequences
moltype : cogent.core.moltype object
params : cd-hit parameters
NOTE: This method will call CD_HIT if moltype is PROTIEN,
CD_HIT_EST if moltype is RNA/DNA, and raise if any other
moltype is passed.
"""
# keys are not remapped. Tested against seq_ids of 100char length
seqs = SequenceCollection(seqs, MolType=moltype)
#Create mapping between abbreviated IDs and full IDs
int_map, int_keys = seqs.getIntMap()
#Create SequenceCollection from int_map.
int_map = SequenceCollection(int_map,MolType=moltype)
# setup params and make sure the output argument is set | # call the correct version of cd-hit base on moltype
working_dir = get_tmp_filename()
if moltype is PROTEIN:
app = CD_HIT(WorkingDir=working_dir, params=params)
elif | if params is None:
params = {}
if '-o' not in params:
params['-o'] = get_tmp_filename()
| random_line_split |
cd_hit.py | Path=True),
# sequence identity threshold, default 0.9
# this is the default cd-hit's "global sequence identity" calc'd as :
# number of identical amino acids in alignment
# divided by the full length of the shorter sequence
'-c':ValuedParameter('-',Name='c',Delimiter=' '),
# use global sequence identity, default 1
# if set to 0, then use local sequence identity, calculated as :
# number of identical amino acids in alignment
# divided by the length of the alignment
# NOTE!!! don't use -G 0 unless you use alignment coverage controls
# see options -aL, -AL, -aS, -AS
'-g':ValuedParameter('-',Name='g',Delimiter=' '),
# band_width of alignment, default 20
'-b':ValuedParameter('-',Name='b',Delimiter=' '),
# max available memory (Mbyte), default 400
'-M':ValuedParameter('-',Name='M',Delimiter=' '),
# word_length, default 8, see user's guide for choosing it
'-n':ValuedParameter('-',Name='n',Delimiter=' '),
# length of throw_away_sequences, default 10
'-l':ValuedParameter('-',Name='l',Delimiter=' '),
# tolerance for redundance, default 2
'-t':ValuedParameter('-',Name='t',Delimiter=' '),
# length of description in .clstr file, default 20
# if set to 0, it takes the fasta defline and stops at first space
'-d':ValuedParameter('-',Name='d',Delimiter=' '),
# length difference cutoff, default 0.0
# if set to 0.9, the shorter sequences need to be
# at least 90% length of the representative of the cluster
'-s':ValuedParameter('-',Name='s',Delimiter=' '),
# length difference cutoff in amino acid, default 999999
# f set to 60, the length difference between the shorter sequences
# and the representative of the cluster can not be bigger than 60
'-S':ValuedParameter('-',Name='S',Delimiter=' '),
# alignment coverage for the longer sequence, default 0.0
# if set to 0.9, the alignment must covers 90% of the sequence
'-aL':ValuedParameter('-',Name='aL',Delimiter=' '),
# alignment coverage control for the longer sequence, default 99999999
# if set to 60, and the length of the sequence is 400,
# then the alignment must be >= 340 (400-60) residues
'-AL':ValuedParameter('-',Name='AL',Delimiter=' '),
# alignment coverage for the shorter sequence, default 0.0
# if set to 0.9, the alignment must covers 90% of the sequence
'-aS':ValuedParameter('-',Name='aS',Delimiter=' '),
# alignment coverage control for the shorter sequence, default 99999999
# if set to 60, and the length of the sequence is 400,
# then the alignment must be >= 340 (400-60) residues
'-AS':ValuedParameter('-',Name='AS',Delimiter=' '),
# 1 or 0, default 0, by default, sequences are stored in RAM
# if set to 1, sequence are stored on hard drive
# it is recommended to use -B 1 for huge databases
'-B':ValuedParameter('-',Name='B',Delimiter=' '),
# 1 or 0, default 0
# if set to 1, print alignment overlap in .clstr file
'-p':ValuedParameter('-',Name='p',Delimiter=' '),
# 1 or 0, default 0
# by cd-hit's default algorithm, a sequence is clustered to the first
# cluster that meet the threshold (fast cluster). If set to 1, the program
# will cluster it into the most similar cluster that meet the threshold
# (accurate but slow mode)
# but either 1 or 0 won't change the representatives of final clusters
'-g':ValuedParameter('-',Name='g',Delimiter=' '),
# print this help
'-h':ValuedParameter('-',Name='h',Delimiter=' ')
}
_synonyms = {'Similarity':'-c'}
def getHelp(self):
"""Method that points to documentation"""
help_str =\
"""
CD-HIT is hosted as an open source project at:
http://www.bioinformatics.org/cd-hit/
The following papers should be cited if this resource is used:
Clustering of highly homologous sequences to reduce thesize of large
protein database", Weizhong Li, Lukasz Jaroszewski & Adam Godzik
Bioinformatics, (2001) 17:282-283
Tolerating some redundancy significantly speeds up clustering of large
protein databases", Weizhong Li, Lukasz Jaroszewski & Adam Godzik
Bioinformatics, (2002) 18:77-82
"""
return help_str
def _input_as_multiline_string(self, data):
"""Writes data to tempfile and sets -i parameter
data -- list of lines
"""
if data:
self.Parameters['-i']\
.on(super(CD_HIT,self)._input_as_multiline_string(data))
return ''
def _input_as_lines(self, data):
"""Writes data to tempfile and sets -i parameter
data -- list of lines, ready to be written to file
"""
if data:
self.Parameters['-i']\
.on(super(CD_HIT,self)._input_as_lines(data))
return ''
def _input_as_seqs(self, data):
"""Creates a list of seqs to pass to _input_as_lines
data -- list like object of sequences
"""
lines = []
for i,s in enumerate(data):
# will number the sequences 1,2,3, etc...
lines.append(''.join(['>',str(i+1)]))
lines.append(s)
return self._input_as_lines(lines)
def | (self, data):
"""Makes data the value of a specific parameter"""
if data:
self.Parameters['-i'].on(str(data))
return ''
def _get_seqs_outfile(self):
"""Returns the absolute path to the seqs outfile"""
if self.Parameters['-o'].isOn():
return self.Parameters['-o'].Value
else:
raise ValueError, "No output file specified"
def _get_clstr_outfile(self):
"""Returns the absolute path to the clstr outfile"""
if self.Parameters['-o'].isOn():
return ''.join([self.Parameters['-o'].Value, '.clstr'])
else:
raise ValueError, "No output file specified"
def _get_result_paths(self, data):
"""Return dict of {key: ResultPath}"""
result = {}
result['FASTA'] = ResultPath(Path=self._get_seqs_outfile())
result['CLSTR'] = ResultPath(Path=self._get_clstr_outfile())
return result
class CD_HIT_EST(CD_HIT):
"""cd-hit Application Controller
Use this version of CD-HIT if your MolType is PROTEIN
"""
_command = 'cd-hit-est'
_input_handler = '_input_as_multiline_string'
_parameters = CD_HIT._parameters
_parameters.update({\
# 1 or 0, default 0, by default only +/+ strand alignment
# if set to 1, do both +/+ & +/- alignments
'-r':ValuedParameter('-',Name='r',Delimiter=' ')
})
def cdhit_clusters_from_seqs(seqs, moltype, params=None):
"""Returns the CD-HIT clusters given seqs
seqs : dict like collection of sequences
moltype : cogent.core.moltype object
params : cd-hit parameters
NOTE: This method will call CD_HIT if moltype is PROTIEN,
CD_HIT_EST if moltype is RNA/DNA, and raise if any other
moltype is passed.
"""
# keys are not remapped. Tested against seq_ids of 100char length
seqs = SequenceCollection(seqs, MolType=moltype)
#Create mapping between abbreviated IDs and full IDs
int_map, int_keys = seqs.getIntMap()
#Create SequenceCollection from int_map.
int_map = SequenceCollection(int_map,MolType=moltype)
# setup params and make sure the output argument is set
if params is None:
params = {}
if '-o' not in params:
params['-o'] = get_tmp_filename()
# call the correct version of cd-hit base on moltype
working_dir = get_tmp_filename()
if moltype is PROTEIN:
app = CD_HIT(WorkingDir=working_dir, params=params)
| _input_as_string | identifier_name |
cd_hit.py | =True),
# sequence identity threshold, default 0.9
# this is the default cd-hit's "global sequence identity" calc'd as :
# number of identical amino acids in alignment
# divided by the full length of the shorter sequence
'-c':ValuedParameter('-',Name='c',Delimiter=' '),
# use global sequence identity, default 1
# if set to 0, then use local sequence identity, calculated as :
# number of identical amino acids in alignment
# divided by the length of the alignment
# NOTE!!! don't use -G 0 unless you use alignment coverage controls
# see options -aL, -AL, -aS, -AS
'-g':ValuedParameter('-',Name='g',Delimiter=' '),
# band_width of alignment, default 20
'-b':ValuedParameter('-',Name='b',Delimiter=' '),
# max available memory (Mbyte), default 400
'-M':ValuedParameter('-',Name='M',Delimiter=' '),
# word_length, default 8, see user's guide for choosing it
'-n':ValuedParameter('-',Name='n',Delimiter=' '),
# length of throw_away_sequences, default 10
'-l':ValuedParameter('-',Name='l',Delimiter=' '),
# tolerance for redundance, default 2
'-t':ValuedParameter('-',Name='t',Delimiter=' '),
# length of description in .clstr file, default 20
# if set to 0, it takes the fasta defline and stops at first space
'-d':ValuedParameter('-',Name='d',Delimiter=' '),
# length difference cutoff, default 0.0
# if set to 0.9, the shorter sequences need to be
# at least 90% length of the representative of the cluster
'-s':ValuedParameter('-',Name='s',Delimiter=' '),
# length difference cutoff in amino acid, default 999999
# f set to 60, the length difference between the shorter sequences
# and the representative of the cluster can not be bigger than 60
'-S':ValuedParameter('-',Name='S',Delimiter=' '),
# alignment coverage for the longer sequence, default 0.0
# if set to 0.9, the alignment must covers 90% of the sequence
'-aL':ValuedParameter('-',Name='aL',Delimiter=' '),
# alignment coverage control for the longer sequence, default 99999999
# if set to 60, and the length of the sequence is 400,
# then the alignment must be >= 340 (400-60) residues
'-AL':ValuedParameter('-',Name='AL',Delimiter=' '),
# alignment coverage for the shorter sequence, default 0.0
# if set to 0.9, the alignment must covers 90% of the sequence
'-aS':ValuedParameter('-',Name='aS',Delimiter=' '),
# alignment coverage control for the shorter sequence, default 99999999
# if set to 60, and the length of the sequence is 400,
# then the alignment must be >= 340 (400-60) residues
'-AS':ValuedParameter('-',Name='AS',Delimiter=' '),
# 1 or 0, default 0, by default, sequences are stored in RAM
# if set to 1, sequence are stored on hard drive
# it is recommended to use -B 1 for huge databases
'-B':ValuedParameter('-',Name='B',Delimiter=' '),
# 1 or 0, default 0
# if set to 1, print alignment overlap in .clstr file
'-p':ValuedParameter('-',Name='p',Delimiter=' '),
# 1 or 0, default 0
# by cd-hit's default algorithm, a sequence is clustered to the first
# cluster that meet the threshold (fast cluster). If set to 1, the program
# will cluster it into the most similar cluster that meet the threshold
# (accurate but slow mode)
# but either 1 or 0 won't change the representatives of final clusters
'-g':ValuedParameter('-',Name='g',Delimiter=' '),
# print this help
'-h':ValuedParameter('-',Name='h',Delimiter=' ')
}
_synonyms = {'Similarity':'-c'}
def getHelp(self):
"""Method that points to documentation"""
help_str =\
"""
CD-HIT is hosted as an open source project at:
http://www.bioinformatics.org/cd-hit/
The following papers should be cited if this resource is used:
Clustering of highly homologous sequences to reduce thesize of large
protein database", Weizhong Li, Lukasz Jaroszewski & Adam Godzik
Bioinformatics, (2001) 17:282-283
Tolerating some redundancy significantly speeds up clustering of large
protein databases", Weizhong Li, Lukasz Jaroszewski & Adam Godzik
Bioinformatics, (2002) 18:77-82
"""
return help_str
def _input_as_multiline_string(self, data):
"""Writes data to tempfile and sets -i parameter
data -- list of lines
"""
if data:
self.Parameters['-i']\
.on(super(CD_HIT,self)._input_as_multiline_string(data))
return ''
def _input_as_lines(self, data):
"""Writes data to tempfile and sets -i parameter
data -- list of lines, ready to be written to file
"""
if data:
self.Parameters['-i']\
.on(super(CD_HIT,self)._input_as_lines(data))
return ''
def _input_as_seqs(self, data):
"""Creates a list of seqs to pass to _input_as_lines
data -- list like object of sequences
"""
lines = []
for i,s in enumerate(data):
# will number the sequences 1,2,3, etc...
lines.append(''.join(['>',str(i+1)]))
lines.append(s)
return self._input_as_lines(lines)
def _input_as_string(self, data):
"""Makes data the value of a specific parameter"""
if data:
self.Parameters['-i'].on(str(data))
return ''
def _get_seqs_outfile(self):
"""Returns the absolute path to the seqs outfile"""
if self.Parameters['-o'].isOn():
return self.Parameters['-o'].Value
else:
raise ValueError, "No output file specified"
def _get_clstr_outfile(self):
"""Returns the absolute path to the clstr outfile"""
if self.Parameters['-o'].isOn():
return ''.join([self.Parameters['-o'].Value, '.clstr'])
else:
|
def _get_result_paths(self, data):
"""Return dict of {key: ResultPath}"""
result = {}
result['FASTA'] = ResultPath(Path=self._get_seqs_outfile())
result['CLSTR'] = ResultPath(Path=self._get_clstr_outfile())
return result
class CD_HIT_EST(CD_HIT):
"""cd-hit Application Controller
Use this version of CD-HIT if your MolType is PROTEIN
"""
_command = 'cd-hit-est'
_input_handler = '_input_as_multiline_string'
_parameters = CD_HIT._parameters
_parameters.update({\
# 1 or 0, default 0, by default only +/+ strand alignment
# if set to 1, do both +/+ & +/- alignments
'-r':ValuedParameter('-',Name='r',Delimiter=' ')
})
def cdhit_clusters_from_seqs(seqs, moltype, params=None):
"""Returns the CD-HIT clusters given seqs
seqs : dict like collection of sequences
moltype : cogent.core.moltype object
params : cd-hit parameters
NOTE: This method will call CD_HIT if moltype is PROTIEN,
CD_HIT_EST if moltype is RNA/DNA, and raise if any other
moltype is passed.
"""
# keys are not remapped. Tested against seq_ids of 100char length
seqs = SequenceCollection(seqs, MolType=moltype)
#Create mapping between abbreviated IDs and full IDs
int_map, int_keys = seqs.getIntMap()
#Create SequenceCollection from int_map.
int_map = SequenceCollection(int_map,MolType=moltype)
# setup params and make sure the output argument is set
if params is None:
params = {}
if '-o' not in params:
params['-o'] = get_tmp_filename()
# call the correct version of cd-hit base on moltype
working_dir = get_tmp_filename()
if moltype is PROTEIN:
app = CD_HIT(WorkingDir=working_dir, params=params)
| raise ValueError, "No output file specified" | conditional_block |
multiple-identity-card-addition-model.service.ts | import {Injectable} from '@angular/core';
import {DatabaseColumn, MultipleAdditionModel} from '../multiple-addition.model';
import {SnotifyService} from 'ng-snotify';
import {IdentityCard} from '../../interfaces/server/identity-card.interface';
import {IdentityCardService} from '../../core/identity-card.service';
import {AddIdentityCardsResponse} from '../../interfaces/server/add-identity-cards-response.interface';
import {HotTableRegisterer} from '@handsontable/angular';
import {createToBarcodeConverter} from "../../util/converters";
import {identityCardBarcodeValidator} from "../../util/validators";
/**
* A model class used for the insertion of multiple identity cards from a table file into the database
*/
@Injectable()
export class MultipleIdentityCardAdditionModelService extends MultipleAdditionModel<IdentityCard> {
/**
* An array containing all database columns used for identity cards
*/
public readonly allDatabaseHeaders: Array<DatabaseColumn<IdentityCard>> = [
{
title: 'Barcode',
required: true,
multiple: false,
convert(value: string, entity: IdentityCard) {
entity.barcode = value;
}
},
{
title: 'Unbenutzt',
required: false,
multiple: true,
convert(value: string, entity: IdentityCard) {
// do nothing
}
}];
/**
* An array containing all handsontable columns used for identity cards
*/
public readonly columns: Array<any> = [
{
data: 'barcode',
type: 'text',
validator: identityCardBarcodeValidator
}
];
/**
* An array containing all column names/labels for identity cards in the same order as in `columns`
*/
public readonly columnHeaders: Array<string> = [
'Barcode'
];
/**
* The content menu settings for the handsontable in the confirmation step
*/
public readonly contextMenuItems: object = {
items: {
'row_above': {},
'row_below': {},
'hsep1': '---------',
'remove_row': {},
'hsep2': '---------',
'converter1': {
name: 'Zu Ausweis-Barcodes',
callback: createToBarcodeConverter("33", () => this.hotRegisterer.getInstance('confirmation-hot-table'), () => this.items)
}
}
};
/**
* Constructor
*
* @param identityCardService A service used to query the server for identity card relevant information
* @param hotRegisterer A service used to interact with handsontable instances
* @param snotifyService A service used to work with snotify
*/
| (private identityCardService: IdentityCardService,
private hotRegisterer: HotTableRegisterer,
private snotifyService: SnotifyService) {
super();
}
/**
* Verifies the given identity cards with the server.
* Afterwards the handsontable will be rerendered
*
* @param identityCards A list of identity cards to be verified
*/
public verifyItems(identityCards: Array<IdentityCard>): void {
this.identityCardService.verifyIdentityCards(identityCards, result => {
const hot = this.hotRegisterer.getInstance('confirmation-hot-table');
if (!result) {
this.verificationResult = {
verified: false
};
this.snotifyService.error('Es ist ein unerwarteter Fehler aufgetreten', {timeout: 0});
} else {
this.verificationResult = {
verified: result.valid,
badBarcodes: [],
duplicateBarcodes: []
};
if (result.alreadyExistingBarcodes && result.alreadyExistingBarcodes.length > 0) {
this.snotifyService.warning(
`Bei ${result.alreadyExistingBarcodes.length} Einträgen existiert der Barcode bereits`,
{timeout: 0}
);
this.verificationResult.badBarcodes.push(...result.alreadyExistingBarcodes);
}
if (result.duplicateBarcodes && result.duplicateBarcodes.length > 0) {
this.snotifyService.warning(
`${result.duplicateBarcodes.length} Einträgen haben einen mehrfach existierenden Barcode`,
{timeout: 0}
);
this.verificationResult.duplicateBarcodes.push(...result.duplicateBarcodes);
}
if (result.valid) {
this.snotifyService.success('Alle Einträge sind valide', {timeout: 0});
}
}
hot.render();
});
}
/**
* Inserts the given list of identity cards in the database
*
* @param items The identity cards to be inserted
*/
public insertItems(items: Array<IdentityCard>): void {
this.insertionResult = null;
this.identityCardService.addIdentityCards(items, (response: AddIdentityCardsResponse) => {
if (response.success) {
this.insertionResult = {
success: true,
message: `${this.items.length} Ausweise wurden erfolgreich der Datenbank hinzufügt!`
};
} else {
this.insertionResult = {
success: false,
message: 'Es ist ein Fehler beim Hinzufügen der Ausweise aufgetreten!'
};
}
});
}
}
| constructor | identifier_name |
multiple-identity-card-addition-model.service.ts | import {Injectable} from '@angular/core';
import {DatabaseColumn, MultipleAdditionModel} from '../multiple-addition.model';
import {SnotifyService} from 'ng-snotify';
import {IdentityCard} from '../../interfaces/server/identity-card.interface';
import {IdentityCardService} from '../../core/identity-card.service';
import {AddIdentityCardsResponse} from '../../interfaces/server/add-identity-cards-response.interface';
import {HotTableRegisterer} from '@handsontable/angular';
import {createToBarcodeConverter} from "../../util/converters";
import {identityCardBarcodeValidator} from "../../util/validators";
/**
* A model class used for the insertion of multiple identity cards from a table file into the database
*/
@Injectable()
export class MultipleIdentityCardAdditionModelService extends MultipleAdditionModel<IdentityCard> {
/**
* An array containing all database columns used for identity cards
*/
public readonly allDatabaseHeaders: Array<DatabaseColumn<IdentityCard>> = [
{
title: 'Barcode',
required: true,
multiple: false,
convert(value: string, entity: IdentityCard) {
entity.barcode = value;
}
},
{
title: 'Unbenutzt',
required: false,
multiple: true,
convert(value: string, entity: IdentityCard) {
// do nothing
}
}];
/**
* An array containing all handsontable columns used for identity cards
*/
public readonly columns: Array<any> = [
{
data: 'barcode',
type: 'text',
validator: identityCardBarcodeValidator
}
];
/**
* An array containing all column names/labels for identity cards in the same order as in `columns`
*/
public readonly columnHeaders: Array<string> = [
'Barcode'
];
/**
* The content menu settings for the handsontable in the confirmation step
*/
public readonly contextMenuItems: object = {
items: {
'row_above': {},
'row_below': {},
'hsep1': '---------',
'remove_row': {},
'hsep2': '---------',
'converter1': {
name: 'Zu Ausweis-Barcodes',
callback: createToBarcodeConverter("33", () => this.hotRegisterer.getInstance('confirmation-hot-table'), () => this.items)
}
}
};
/**
* Constructor
*
* @param identityCardService A service used to query the server for identity card relevant information
* @param hotRegisterer A service used to interact with handsontable instances
* @param snotifyService A service used to work with snotify
*/
constructor(private identityCardService: IdentityCardService,
private hotRegisterer: HotTableRegisterer,
private snotifyService: SnotifyService) {
super();
}
/**
* Verifies the given identity cards with the server.
* Afterwards the handsontable will be rerendered
*
* @param identityCards A list of identity cards to be verified
*/
public verifyItems(identityCards: Array<IdentityCard>): void {
this.identityCardService.verifyIdentityCards(identityCards, result => {
const hot = this.hotRegisterer.getInstance('confirmation-hot-table');
if (!result) {
this.verificationResult = {
verified: false
};
this.snotifyService.error('Es ist ein unerwarteter Fehler aufgetreten', {timeout: 0});
} else {
this.verificationResult = {
verified: result.valid,
badBarcodes: [],
duplicateBarcodes: []
};
if (result.alreadyExistingBarcodes && result.alreadyExistingBarcodes.length > 0) {
this.snotifyService.warning(
`Bei ${result.alreadyExistingBarcodes.length} Einträgen existiert der Barcode bereits`,
{timeout: 0}
);
this.verificationResult.badBarcodes.push(...result.alreadyExistingBarcodes);
}
if (result.duplicateBarcodes && result.duplicateBarcodes.length > 0) { | if (result.valid) {
this.snotifyService.success('Alle Einträge sind valide', {timeout: 0});
}
}
hot.render();
});
}
/**
* Inserts the given list of identity cards in the database
*
* @param items The identity cards to be inserted
*/
public insertItems(items: Array<IdentityCard>): void {
this.insertionResult = null;
this.identityCardService.addIdentityCards(items, (response: AddIdentityCardsResponse) => {
if (response.success) {
this.insertionResult = {
success: true,
message: `${this.items.length} Ausweise wurden erfolgreich der Datenbank hinzufügt!`
};
} else {
this.insertionResult = {
success: false,
message: 'Es ist ein Fehler beim Hinzufügen der Ausweise aufgetreten!'
};
}
});
}
}
|
this.snotifyService.warning(
`${result.duplicateBarcodes.length} Einträgen haben einen mehrfach existierenden Barcode`,
{timeout: 0}
);
this.verificationResult.duplicateBarcodes.push(...result.duplicateBarcodes);
}
| conditional_block |
multiple-identity-card-addition-model.service.ts | import {Injectable} from '@angular/core';
import {DatabaseColumn, MultipleAdditionModel} from '../multiple-addition.model';
import {SnotifyService} from 'ng-snotify';
import {IdentityCard} from '../../interfaces/server/identity-card.interface';
import {IdentityCardService} from '../../core/identity-card.service';
import {AddIdentityCardsResponse} from '../../interfaces/server/add-identity-cards-response.interface';
import {HotTableRegisterer} from '@handsontable/angular';
import {createToBarcodeConverter} from "../../util/converters";
import {identityCardBarcodeValidator} from "../../util/validators";
/**
* A model class used for the insertion of multiple identity cards from a table file into the database
*/
@Injectable()
export class MultipleIdentityCardAdditionModelService extends MultipleAdditionModel<IdentityCard> {
/**
* An array containing all database columns used for identity cards
*/
public readonly allDatabaseHeaders: Array<DatabaseColumn<IdentityCard>> = [
{
title: 'Barcode',
required: true,
multiple: false,
convert(value: string, entity: IdentityCard) {
entity.barcode = value;
}
},
{
title: 'Unbenutzt',
required: false,
multiple: true,
convert(value: string, entity: IdentityCard) {
// do nothing
}
}];
/**
* An array containing all handsontable columns used for identity cards
*/
public readonly columns: Array<any> = [
{
data: 'barcode',
type: 'text',
validator: identityCardBarcodeValidator
}
];
/**
* An array containing all column names/labels for identity cards in the same order as in `columns`
*/
public readonly columnHeaders: Array<string> = [
'Barcode'
];
/**
* The content menu settings for the handsontable in the confirmation step
*/
public readonly contextMenuItems: object = {
items: {
'row_above': {},
'row_below': {},
'hsep1': '---------',
'remove_row': {},
'hsep2': '---------',
'converter1': {
name: 'Zu Ausweis-Barcodes',
callback: createToBarcodeConverter("33", () => this.hotRegisterer.getInstance('confirmation-hot-table'), () => this.items)
}
}
};
/**
* Constructor
*
* @param identityCardService A service used to query the server for identity card relevant information
* @param hotRegisterer A service used to interact with handsontable instances
* @param snotifyService A service used to work with snotify
*/
constructor(private identityCardService: IdentityCardService,
private hotRegisterer: HotTableRegisterer,
private snotifyService: SnotifyService) {
super();
}
/**
* Verifies the given identity cards with the server.
* Afterwards the handsontable will be rerendered
*
* @param identityCards A list of identity cards to be verified
*/
public verifyItems(identityCards: Array<IdentityCard>): void {
this.identityCardService.verifyIdentityCards(identityCards, result => {
const hot = this.hotRegisterer.getInstance('confirmation-hot-table');
if (!result) {
this.verificationResult = {
verified: false
};
this.snotifyService.error('Es ist ein unerwarteter Fehler aufgetreten', {timeout: 0});
} else {
this.verificationResult = {
verified: result.valid,
badBarcodes: [], | duplicateBarcodes: []
};
if (result.alreadyExistingBarcodes && result.alreadyExistingBarcodes.length > 0) {
this.snotifyService.warning(
`Bei ${result.alreadyExistingBarcodes.length} Einträgen existiert der Barcode bereits`,
{timeout: 0}
);
this.verificationResult.badBarcodes.push(...result.alreadyExistingBarcodes);
}
if (result.duplicateBarcodes && result.duplicateBarcodes.length > 0) {
this.snotifyService.warning(
`${result.duplicateBarcodes.length} Einträgen haben einen mehrfach existierenden Barcode`,
{timeout: 0}
);
this.verificationResult.duplicateBarcodes.push(...result.duplicateBarcodes);
}
if (result.valid) {
this.snotifyService.success('Alle Einträge sind valide', {timeout: 0});
}
}
hot.render();
});
}
/**
* Inserts the given list of identity cards in the database
*
* @param items The identity cards to be inserted
*/
public insertItems(items: Array<IdentityCard>): void {
this.insertionResult = null;
this.identityCardService.addIdentityCards(items, (response: AddIdentityCardsResponse) => {
if (response.success) {
this.insertionResult = {
success: true,
message: `${this.items.length} Ausweise wurden erfolgreich der Datenbank hinzufügt!`
};
} else {
this.insertionResult = {
success: false,
message: 'Es ist ein Fehler beim Hinzufügen der Ausweise aufgetreten!'
};
}
});
}
} | random_line_split |
|
vertex_outside.rs | use crate::mock_graph::{
arbitrary::{GuidedArbGraph, Limit},
MockVertex, TestGraph,
};
use graphene::{
core::{Ensure, Graph},
impl_ensurer,
};
use quickcheck::{Arbitrary, Gen};
use std::collections::HashSet;
/// An arbitrary graph and a vertex that is guaranteed to not be in it.
#[derive(Clone, Debug)]
pub struct VertexOutside<G>(pub G, pub MockVertex)
where
G: GuidedArbGraph,
G::Graph: TestGraph;
impl<G> Ensure for VertexOutside<G>
where
G: GuidedArbGraph,
G::Graph: TestGraph,
{
fn ensure_unvalidated(_c: Self::Ensured, _: ()) -> Self
{
unimplemented!()
}
fn validate(_c: &Self::Ensured, _: &()) -> bool
{
unimplemented!()
}
}
impl_ensurer! {
use<G> VertexOutside<G>: Ensure
as (self.0): G
where
G: GuidedArbGraph,
G::Graph: TestGraph,
}
impl<Gr> GuidedArbGraph for VertexOutside<Gr>
where
Gr: GuidedArbGraph,
Gr::Graph: TestGraph,
{
fn choose_size<G: Gen>(
g: &mut G,
v_min: usize,
v_max: usize,
e_min: usize,
e_max: usize,
) -> (usize, usize)
{
Gr::choose_size(g, v_min, v_max, e_min, e_max)
}
fn | <G: Gen>(g: &mut G, v_count: usize, e_count: usize) -> Self
{
let graph = Gr::arbitrary_fixed(g, v_count, e_count);
// Find a vertex that isn't in the graph
let mut v = MockVertex::arbitrary(g);
while graph.graph().contains_vertex(v)
{
v = MockVertex::arbitrary(g);
}
Self(graph, v)
}
fn shrink_guided(&self, limits: HashSet<Limit>) -> Box<dyn Iterator<Item = Self>>
{
let mut result = Vec::new();
// First shrink the graph, keeping only the shrunk ones where the vertex
// stays invalid
result.extend(
self.0
.shrink_guided(limits)
.filter(|g| !g.graph().contains_vertex(self.1))
.map(|g| Self(g, self.1)),
);
// We then shrink the vertex, keeping only the shrunk values
// that are invalid in the graph
result.extend(
self.1
.shrink()
.filter(|&v| self.0.graph().contains_vertex(v))
.map(|v| Self(self.0.clone(), v)),
);
Box::new(result.into_iter())
}
}
| arbitrary_fixed | identifier_name |
vertex_outside.rs | use crate::mock_graph::{
arbitrary::{GuidedArbGraph, Limit},
MockVertex, TestGraph,
};
use graphene::{
core::{Ensure, Graph},
impl_ensurer,
};
use quickcheck::{Arbitrary, Gen};
use std::collections::HashSet;
/// An arbitrary graph and a vertex that is guaranteed to not be in it.
#[derive(Clone, Debug)]
pub struct VertexOutside<G>(pub G, pub MockVertex)
where
G: GuidedArbGraph,
G::Graph: TestGraph;
impl<G> Ensure for VertexOutside<G>
where
G: GuidedArbGraph, | {
fn ensure_unvalidated(_c: Self::Ensured, _: ()) -> Self
{
unimplemented!()
}
fn validate(_c: &Self::Ensured, _: &()) -> bool
{
unimplemented!()
}
}
impl_ensurer! {
use<G> VertexOutside<G>: Ensure
as (self.0): G
where
G: GuidedArbGraph,
G::Graph: TestGraph,
}
impl<Gr> GuidedArbGraph for VertexOutside<Gr>
where
Gr: GuidedArbGraph,
Gr::Graph: TestGraph,
{
fn choose_size<G: Gen>(
g: &mut G,
v_min: usize,
v_max: usize,
e_min: usize,
e_max: usize,
) -> (usize, usize)
{
Gr::choose_size(g, v_min, v_max, e_min, e_max)
}
fn arbitrary_fixed<G: Gen>(g: &mut G, v_count: usize, e_count: usize) -> Self
{
let graph = Gr::arbitrary_fixed(g, v_count, e_count);
// Find a vertex that isn't in the graph
let mut v = MockVertex::arbitrary(g);
while graph.graph().contains_vertex(v)
{
v = MockVertex::arbitrary(g);
}
Self(graph, v)
}
fn shrink_guided(&self, limits: HashSet<Limit>) -> Box<dyn Iterator<Item = Self>>
{
let mut result = Vec::new();
// First shrink the graph, keeping only the shrunk ones where the vertex
// stays invalid
result.extend(
self.0
.shrink_guided(limits)
.filter(|g| !g.graph().contains_vertex(self.1))
.map(|g| Self(g, self.1)),
);
// We then shrink the vertex, keeping only the shrunk values
// that are invalid in the graph
result.extend(
self.1
.shrink()
.filter(|&v| self.0.graph().contains_vertex(v))
.map(|v| Self(self.0.clone(), v)),
);
Box::new(result.into_iter())
}
} | G::Graph: TestGraph, | random_line_split |
vertex_outside.rs | use crate::mock_graph::{
arbitrary::{GuidedArbGraph, Limit},
MockVertex, TestGraph,
};
use graphene::{
core::{Ensure, Graph},
impl_ensurer,
};
use quickcheck::{Arbitrary, Gen};
use std::collections::HashSet;
/// An arbitrary graph and a vertex that is guaranteed to not be in it.
#[derive(Clone, Debug)]
pub struct VertexOutside<G>(pub G, pub MockVertex)
where
G: GuidedArbGraph,
G::Graph: TestGraph;
impl<G> Ensure for VertexOutside<G>
where
G: GuidedArbGraph,
G::Graph: TestGraph,
{
fn ensure_unvalidated(_c: Self::Ensured, _: ()) -> Self
{
unimplemented!()
}
fn validate(_c: &Self::Ensured, _: &()) -> bool
{
unimplemented!()
}
}
impl_ensurer! {
use<G> VertexOutside<G>: Ensure
as (self.0): G
where
G: GuidedArbGraph,
G::Graph: TestGraph,
}
impl<Gr> GuidedArbGraph for VertexOutside<Gr>
where
Gr: GuidedArbGraph,
Gr::Graph: TestGraph,
{
fn choose_size<G: Gen>(
g: &mut G,
v_min: usize,
v_max: usize,
e_min: usize,
e_max: usize,
) -> (usize, usize)
{
Gr::choose_size(g, v_min, v_max, e_min, e_max)
}
fn arbitrary_fixed<G: Gen>(g: &mut G, v_count: usize, e_count: usize) -> Self
|
fn shrink_guided(&self, limits: HashSet<Limit>) -> Box<dyn Iterator<Item = Self>>
{
let mut result = Vec::new();
// First shrink the graph, keeping only the shrunk ones where the vertex
// stays invalid
result.extend(
self.0
.shrink_guided(limits)
.filter(|g| !g.graph().contains_vertex(self.1))
.map(|g| Self(g, self.1)),
);
// We then shrink the vertex, keeping only the shrunk values
// that are invalid in the graph
result.extend(
self.1
.shrink()
.filter(|&v| self.0.graph().contains_vertex(v))
.map(|v| Self(self.0.clone(), v)),
);
Box::new(result.into_iter())
}
}
| {
let graph = Gr::arbitrary_fixed(g, v_count, e_count);
// Find a vertex that isn't in the graph
let mut v = MockVertex::arbitrary(g);
while graph.graph().contains_vertex(v)
{
v = MockVertex::arbitrary(g);
}
Self(graph, v)
} | identifier_body |
glib_gobject.rs | // GObject Introspection Rust bindings.
// Copyright (C) 2014 Luis Araujo <[email protected]>
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
extern crate libc;
use glib_gobject::libc
::{c_void, c_char, c_int, c_uint, c_float,
c_long, c_ulong, c_double, size_t};
/* GObject */
pub type GType = size_t;
pub type GBoolean = c_int;
pub type GPointer = *c_void;
pub type GQuark = c_uint;
pub enum GData {}
#[deriving(Show, FromPrimitive)]
pub enum GParamFlags {
GParamReadable = 1 << 0,
GParamWritable = 1 << 1,
GParamConstruct = 1 << 2,
GParamConstructOnly = 1 << 3,
GParamLaxValidation = 1 << 4,
GParamStaticName = 1 << 5,
GParamStaticNick = 1 << 6,
GParamSTaticBlurb = 1 << 7,
/* User defined flags go up to 30 */
GParamDeprecated = 1 << 31
}
#[deriving(Show, FromPrimitive)]
pub enum GSignalFlags {
GSignalRunFirst = 1 << 0,
GSignalRunLast = 1 << 1,
GSignalRunCleanup = 1 << 2,
GSignalNoRecurse = 1 << 3,
GSignalDetailed = 1 << 4,
GSignalAction = 1 << 5,
GSignalNoHooks = 1 << 6,
GSignalMustCollect = 1 << 7,
GSignalDeprecated = 1 << 8
}
pub struct GObject {
g_type_instance: GTypeInstance,
/*< private >*/
// volatile guint ref_count;
ref_count: c_uint,
qdata: *GData
}
struct GTypeInstance {
/*< private >*/
g_class: *GTypeClass
}
struct GTypeClass {
/*< private >*/
g_type: GType
}
pub enum GValueData {
GValueDataVInt(c_int),
GValueDataVUInt(c_uint),
GValueDataVLong(c_long),
GValueDataVULong(c_ulong),
GValueDataVInt64(i64),
GValueDataVUInt64(u64),
GValueDataVFloat(c_float),
GValueDataVDouble(c_double),
GValueDataVPointer(GPointer)
}
pub struct GValue {
/*< private >*/
g_type: GType,
/* public for GTypeValueTable methods */
data: [GValueData, ..2]
}
/* GLib */
pub enum | {}
pub enum GMappedFile {}
/* TODO: Get higher level structs for lists using generics */
pub struct GSList
{
data: GPointer,
next: *GSList
}
pub struct GList {
data: GPointer,
next: *GList,
prev: *GList
}
pub struct GError {
domain: GQuark,
code: c_int,
message: *c_char
}
| GOptionGroup | identifier_name |
glib_gobject.rs | // GObject Introspection Rust bindings.
// Copyright (C) 2014 Luis Araujo <[email protected]>
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
extern crate libc;
use glib_gobject::libc
::{c_void, c_char, c_int, c_uint, c_float,
c_long, c_ulong, c_double, size_t};
/* GObject */
pub type GType = size_t;
pub type GBoolean = c_int;
pub type GPointer = *c_void;
pub type GQuark = c_uint;
pub enum GData {}
#[deriving(Show, FromPrimitive)]
pub enum GParamFlags {
GParamReadable = 1 << 0,
GParamWritable = 1 << 1,
GParamConstruct = 1 << 2,
GParamConstructOnly = 1 << 3,
GParamLaxValidation = 1 << 4,
GParamStaticName = 1 << 5,
GParamStaticNick = 1 << 6,
GParamSTaticBlurb = 1 << 7,
/* User defined flags go up to 30 */
GParamDeprecated = 1 << 31
}
#[deriving(Show, FromPrimitive)]
pub enum GSignalFlags {
GSignalRunFirst = 1 << 0,
GSignalRunLast = 1 << 1,
GSignalRunCleanup = 1 << 2,
GSignalNoRecurse = 1 << 3,
GSignalDetailed = 1 << 4,
GSignalAction = 1 << 5,
GSignalNoHooks = 1 << 6,
GSignalMustCollect = 1 << 7,
GSignalDeprecated = 1 << 8
}
pub struct GObject {
g_type_instance: GTypeInstance,
/*< private >*/
// volatile guint ref_count;
ref_count: c_uint,
qdata: *GData
}
struct GTypeInstance {
/*< private >*/
g_class: *GTypeClass
}
struct GTypeClass {
/*< private >*/
g_type: GType
}
pub enum GValueData {
GValueDataVInt(c_int),
GValueDataVUInt(c_uint),
GValueDataVLong(c_long),
GValueDataVULong(c_ulong),
GValueDataVInt64(i64), |
pub struct GValue {
/*< private >*/
g_type: GType,
/* public for GTypeValueTable methods */
data: [GValueData, ..2]
}
/* GLib */
pub enum GOptionGroup {}
pub enum GMappedFile {}
/* TODO: Get higher level structs for lists using generics */
pub struct GSList
{
data: GPointer,
next: *GSList
}
pub struct GList {
data: GPointer,
next: *GList,
prev: *GList
}
pub struct GError {
domain: GQuark,
code: c_int,
message: *c_char
} | GValueDataVUInt64(u64),
GValueDataVFloat(c_float),
GValueDataVDouble(c_double),
GValueDataVPointer(GPointer)
} | random_line_split |
animationFrame.d.ts | import { AnimationFrameScheduler } from './AnimationFrameScheduler';
/**
*
* Animation Frame Scheduler
*
* <span class="informal">Perform task when `window.requestAnimationFrame` would fire</span>
*
* When `animationFrame` scheduler is used with delay, it will fall back to {@link asyncScheduler} scheduler
* behaviour.
*
* Without delay, `animationFrame` scheduler can be used to create smooth browser animations.
* It makes sure scheduled task will happen just before next browser content repaint,
* thus performing animations as efficiently as possible.
* | * import { animationFrameScheduler } from 'rxjs';
*
* const div = document.querySelector('div');
*
* animationFrameScheduler.schedule(function(height) {
* div.style.height = height + "px";
*
* this.schedule(height + 1); // `this` references currently executing Action,
* // which we reschedule with new state
* }, 0, 0);
*
* // You will see a div element growing in height
* ```
*
* @static true
* @name animationFrame
* @owner Scheduler
*/
export declare const animationFrame: AnimationFrameScheduler; | * ## Example
* Schedule div height animation
* ```javascript
* // html: <div style="background: #0ff;"></div> | random_line_split |
labeled-break.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
pub fn main() {
'foo: loop {
loop {
break 'foo;
}
}
'bar: for _ in 0..100 {
loop {
break 'bar; | break 'foobar;
}
}
} | }
}
'foobar: while 1 + 1 == 2 {
loop { | random_line_split |
labeled-break.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
pub fn main() | {
'foo: loop {
loop {
break 'foo;
}
}
'bar: for _ in 0..100 {
loop {
break 'bar;
}
}
'foobar: while 1 + 1 == 2 {
loop {
break 'foobar;
}
}
} | identifier_body |
|
labeled-break.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
pub fn | () {
'foo: loop {
loop {
break 'foo;
}
}
'bar: for _ in 0..100 {
loop {
break 'bar;
}
}
'foobar: while 1 + 1 == 2 {
loop {
break 'foobar;
}
}
}
| main | identifier_name |
reinigung-ff.spec.ts | import { browser, element, by, $ } from 'protractor';
import { NavBarPage } from './../page-objects/jhi-page-objects';
const path = require('path');
describe('Reinigung e2e test', () => {
let navBarPage: NavBarPage;
let reinigungDialogPage: ReinigungDialogPage;
let reinigungComponentsPage: ReinigungComponentsPage;
const fileToUpload = '../../../../main/webapp/content/images/logo-jhipster.png';
const absolutePath = path.resolve(__dirname, fileToUpload);
beforeAll(() => {
browser.get('/');
browser.waitForAngular();
navBarPage = new NavBarPage();
navBarPage.getSignInPage().autoSignInUsing('admin', 'admin');
browser.waitForAngular();
});
it('should load Reinigungs', () => {
navBarPage.goToEntity('reinigung-ff');
reinigungComponentsPage = new ReinigungComponentsPage();
expect(reinigungComponentsPage.getTitle()).toMatch(/ffManagementSuiteApp.reinigung.home.title/);
});
it('should load create Reinigung dialog', () => {
reinigungComponentsPage.clickOnCreateButton();
reinigungDialogPage = new ReinigungDialogPage();
expect(reinigungDialogPage.getModalTitle()).toMatch(/ffManagementSuiteApp.reinigung.home.createOrEditLabel/);
reinigungDialogPage.close(); | it('should create and save Reinigungs', () => {
reinigungComponentsPage.clickOnCreateButton();
reinigungDialogPage.setDurchfuehrungInput('2000-12-31');
expect(reinigungDialogPage.getDurchfuehrungInput()).toMatch('2000-12-31');
reinigungDialogPage.save();
expect(reinigungDialogPage.getSaveButton().isPresent()).toBeFalsy();
});
afterAll(() => {
navBarPage.autoSignOut();
});
});
export class ReinigungComponentsPage {
createButton = element(by.css('.jh-create-entity'));
title = element.all(by.css('jhi-reinigung-ff div h2 span')).first();
clickOnCreateButton() {
return this.createButton.click();
}
getTitle() {
return this.title.getAttribute('jhiTranslate');
}
}
export class ReinigungDialogPage {
modalTitle = element(by.css('h4#myReinigungLabel'));
saveButton = element(by.css('.modal-footer .btn.btn-primary'));
closeButton = element(by.css('button.close'));
durchfuehrungInput = element(by.css('input#field_durchfuehrung'));
getModalTitle() {
return this.modalTitle.getAttribute('jhiTranslate');
}
setDurchfuehrungInput = function (durchfuehrung) {
this.durchfuehrungInput.sendKeys(durchfuehrung);
}
getDurchfuehrungInput = function () {
return this.durchfuehrungInput.getAttribute('value');
}
save() {
this.saveButton.click();
}
close() {
this.closeButton.click();
}
getSaveButton() {
return this.saveButton;
}
} | });
| random_line_split |
reinigung-ff.spec.ts | import { browser, element, by, $ } from 'protractor';
import { NavBarPage } from './../page-objects/jhi-page-objects';
const path = require('path');
describe('Reinigung e2e test', () => {
let navBarPage: NavBarPage;
let reinigungDialogPage: ReinigungDialogPage;
let reinigungComponentsPage: ReinigungComponentsPage;
const fileToUpload = '../../../../main/webapp/content/images/logo-jhipster.png';
const absolutePath = path.resolve(__dirname, fileToUpload);
beforeAll(() => {
browser.get('/');
browser.waitForAngular();
navBarPage = new NavBarPage();
navBarPage.getSignInPage().autoSignInUsing('admin', 'admin');
browser.waitForAngular();
});
it('should load Reinigungs', () => {
navBarPage.goToEntity('reinigung-ff');
reinigungComponentsPage = new ReinigungComponentsPage();
expect(reinigungComponentsPage.getTitle()).toMatch(/ffManagementSuiteApp.reinigung.home.title/);
});
it('should load create Reinigung dialog', () => {
reinigungComponentsPage.clickOnCreateButton();
reinigungDialogPage = new ReinigungDialogPage();
expect(reinigungDialogPage.getModalTitle()).toMatch(/ffManagementSuiteApp.reinigung.home.createOrEditLabel/);
reinigungDialogPage.close();
});
it('should create and save Reinigungs', () => {
reinigungComponentsPage.clickOnCreateButton();
reinigungDialogPage.setDurchfuehrungInput('2000-12-31');
expect(reinigungDialogPage.getDurchfuehrungInput()).toMatch('2000-12-31');
reinigungDialogPage.save();
expect(reinigungDialogPage.getSaveButton().isPresent()).toBeFalsy();
});
afterAll(() => {
navBarPage.autoSignOut();
});
});
export class ReinigungComponentsPage {
createButton = element(by.css('.jh-create-entity'));
title = element.all(by.css('jhi-reinigung-ff div h2 span')).first();
clickOnCreateButton() {
return this.createButton.click();
}
getTitle() {
return this.title.getAttribute('jhiTranslate');
}
}
export class ReinigungDialogPage {
modalTitle = element(by.css('h4#myReinigungLabel'));
saveButton = element(by.css('.modal-footer .btn.btn-primary'));
closeButton = element(by.css('button.close'));
durchfuehrungInput = element(by.css('input#field_durchfuehrung'));
getModalTitle() {
return this.modalTitle.getAttribute('jhiTranslate');
}
setDurchfuehrungInput = function (durchfuehrung) {
this.durchfuehrungInput.sendKeys(durchfuehrung);
}
getDurchfuehrungInput = function () {
return this.durchfuehrungInput.getAttribute('value');
}
save() |
close() {
this.closeButton.click();
}
getSaveButton() {
return this.saveButton;
}
}
| {
this.saveButton.click();
} | identifier_body |
reinigung-ff.spec.ts | import { browser, element, by, $ } from 'protractor';
import { NavBarPage } from './../page-objects/jhi-page-objects';
const path = require('path');
describe('Reinigung e2e test', () => {
let navBarPage: NavBarPage;
let reinigungDialogPage: ReinigungDialogPage;
let reinigungComponentsPage: ReinigungComponentsPage;
const fileToUpload = '../../../../main/webapp/content/images/logo-jhipster.png';
const absolutePath = path.resolve(__dirname, fileToUpload);
beforeAll(() => {
browser.get('/');
browser.waitForAngular();
navBarPage = new NavBarPage();
navBarPage.getSignInPage().autoSignInUsing('admin', 'admin');
browser.waitForAngular();
});
it('should load Reinigungs', () => {
navBarPage.goToEntity('reinigung-ff');
reinigungComponentsPage = new ReinigungComponentsPage();
expect(reinigungComponentsPage.getTitle()).toMatch(/ffManagementSuiteApp.reinigung.home.title/);
});
it('should load create Reinigung dialog', () => {
reinigungComponentsPage.clickOnCreateButton();
reinigungDialogPage = new ReinigungDialogPage();
expect(reinigungDialogPage.getModalTitle()).toMatch(/ffManagementSuiteApp.reinigung.home.createOrEditLabel/);
reinigungDialogPage.close();
});
it('should create and save Reinigungs', () => {
reinigungComponentsPage.clickOnCreateButton();
reinigungDialogPage.setDurchfuehrungInput('2000-12-31');
expect(reinigungDialogPage.getDurchfuehrungInput()).toMatch('2000-12-31');
reinigungDialogPage.save();
expect(reinigungDialogPage.getSaveButton().isPresent()).toBeFalsy();
});
afterAll(() => {
navBarPage.autoSignOut();
});
});
export class ReinigungComponentsPage {
createButton = element(by.css('.jh-create-entity'));
title = element.all(by.css('jhi-reinigung-ff div h2 span')).first();
clickOnCreateButton() {
return this.createButton.click();
}
getTitle() {
return this.title.getAttribute('jhiTranslate');
}
}
export class ReinigungDialogPage {
modalTitle = element(by.css('h4#myReinigungLabel'));
saveButton = element(by.css('.modal-footer .btn.btn-primary'));
closeButton = element(by.css('button.close'));
durchfuehrungInput = element(by.css('input#field_durchfuehrung'));
getModalTitle() {
return this.modalTitle.getAttribute('jhiTranslate');
}
setDurchfuehrungInput = function (durchfuehrung) {
this.durchfuehrungInput.sendKeys(durchfuehrung);
}
getDurchfuehrungInput = function () {
return this.durchfuehrungInput.getAttribute('value');
}
save() {
this.saveButton.click();
}
| () {
this.closeButton.click();
}
getSaveButton() {
return this.saveButton;
}
}
| close | identifier_name |
Local_Select.spec.ts | import { createStore } from 'test/support/Helpers'
import Model from '@/model/Model'
describe('Feature – Hooks – Local Select', () => {
it('can process beforeSelect hook', async () => {
class User extends Model {
static entity = 'users'
// @Attribute
id!: number
// @Attribute('')
role!: string
static fields() {
return {
id: this.attr(null),
role: this.attr('') | }
}
createStore([{ model: User }])
await User.create({
data: [
{ id: 1, role: 'admin' },
{ id: 2, role: 'admin' },
{ id: 3, role: 'user' }
]
})
const users = User.query().get()
expect(users.length).toBe(2)
expect(users[0].role).toBe('admin')
expect(users[1].role).toBe('admin')
})
}) | }
}
static beforeSelect(records: any) {
return records.filter((record: User) => record.role === 'admin') | random_line_split |
Local_Select.spec.ts | import { createStore } from 'test/support/Helpers'
import Model from '@/model/Model'
describe('Feature – Hooks – Local Select', () => {
it('can process beforeSelect hook', async () => {
class User extends Model {
static entity = 'users'
// @Attribute
id!: number
// @Attribute('')
role!: string
static fiel |
return {
id: this.attr(null),
role: this.attr('')
}
}
static beforeSelect(records: any) {
return records.filter((record: User) => record.role === 'admin')
}
}
createStore([{ model: User }])
await User.create({
data: [
{ id: 1, role: 'admin' },
{ id: 2, role: 'admin' },
{ id: 3, role: 'user' }
]
})
const users = User.query().get()
expect(users.length).toBe(2)
expect(users[0].role).toBe('admin')
expect(users[1].role).toBe('admin')
})
})
| ds() { | identifier_name |
setup.py | #!/usr/bin/env python2
# References:
# https://fedoraproject.org/wiki/Koji/ServerHowTo
# https://github.com/sbadakhc/kojak/blob/master/scripts/install/install
import util.cfg as cfg
import util.pkg as pkg
import util.cred as cred | #
log.info("General update")
pkg.clean()
pkg.update()
log.info("Install EPEL")
pkg.install("https://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm")
#
# Kojid (Koji Builder)
#
log.info("Install Koji Builder")
pkg.install("koji-builder")
koji_url = dict()
koji_url["web"] = "http://koji/koji"
koji_url["top"] = "http://koji/kojifiles"
koji_url["hub"] = "http://koji/kojihub"
log.info("Configure Koji Builder")
with cfg.mod_ini("/etc/kojid/kojid.conf") as i:
i.kojid.sleeptime = 2
i.kojid.maxjobs = 20
i.kojid.server = koji_url["hub"]
i.kojid.topurl = koji_url["top"]
# i.kojid.cert is set at runtime
i.kojid.ca = cred.ca_crt
i.kojid.serverca = cred.ca_crt
i.kojid.smtphost = "koji"
i.kojid.from_addr = "Koji Build System <buildsys@kojibuilder>"
#
# Koji CLI
#
log.info("Configure Koji CLI")
with cfg.mod_ini("/etc/koji.conf") as i:
i.koji.server = koji_url["hub"]
i.koji.weburl = koji_url["web"]
i.koji.topurl = koji_url["top"]
i.koji.topdir = "/mnt/koji"
i.koji.cert = cred.user["kojiadmin"].pem
i.koji.ca = cred.ca_crt
i.koji.serverca = cred.ca_crt
pkg.clean() | from util.log import log
#
# Setup | random_line_split |
list.js | import { combineReducers } from 'redux';
export function error(state = null, action) {
switch (action.type) {
case 'PRODUCT_LIST_ERROR':
return action.error;
case 'PRODUCT_LIST_MERCURE_DELETED':
return `${action.retrieved['@id']} has been deleted by another user.`;
case 'PRODUCT_LIST_RESET':
return null;
default:
return state;
}
}
export function loading(state = false, action) {
switch (action.type) {
case 'PRODUCT_LIST_LOADING':
return action.loading;
case 'PRODUCT_LIST_RESET':
return false;
default:
return state;
}
}
export function retrieved(state = null, action) {
switch (action.type) {
case 'PRODUCT_LIST_SUCCESS':
return action.retrieved;
case 'PRODUCT_LIST_RESET':
return null;
case 'PRODUCT_LIST_MERCURE_MESSAGE':
return {
...state,
'hydra:member': state['hydra:member'].map(item =>
item['@id'] === action.retrieved['@id'] ? action.retrieved : item
)
};
case 'PRODUCT_LIST_MERCURE_DELETED':
return {
...state,
'hydra:member': state['hydra:member'].filter(
item => item['@id'] !== action.retrieved['@id']
)
};
default:
return state;
}
}
export function eventSource(state = null, action) |
export default combineReducers({ error, loading, retrieved, eventSource });
| {
switch (action.type) {
case 'PRODUCT_LIST_MERCURE_OPEN':
return action.eventSource;
case 'PRODUCT_LIST_RESET':
return null;
default:
return state;
}
} | identifier_body |
list.js | import { combineReducers } from 'redux';
export function error(state = null, action) {
switch (action.type) {
case 'PRODUCT_LIST_ERROR':
return action.error;
case 'PRODUCT_LIST_MERCURE_DELETED':
return `${action.retrieved['@id']} has been deleted by another user.`;
case 'PRODUCT_LIST_RESET':
return null;
default:
return state;
}
}
export function loading(state = false, action) {
switch (action.type) {
case 'PRODUCT_LIST_LOADING':
return action.loading;
case 'PRODUCT_LIST_RESET':
return false;
default:
return state;
}
}
export function retrieved(state = null, action) {
switch (action.type) {
case 'PRODUCT_LIST_SUCCESS':
return action.retrieved;
case 'PRODUCT_LIST_RESET':
return null;
case 'PRODUCT_LIST_MERCURE_MESSAGE':
return {
...state,
'hydra:member': state['hydra:member'].map(item =>
item['@id'] === action.retrieved['@id'] ? action.retrieved : item
)
};
case 'PRODUCT_LIST_MERCURE_DELETED':
return {
...state,
'hydra:member': state['hydra:member'].filter(
item => item['@id'] !== action.retrieved['@id']
)
};
default:
return state;
}
}
export function | (state = null, action) {
switch (action.type) {
case 'PRODUCT_LIST_MERCURE_OPEN':
return action.eventSource;
case 'PRODUCT_LIST_RESET':
return null;
default:
return state;
}
}
export default combineReducers({ error, loading, retrieved, eventSource });
| eventSource | identifier_name |
list.js | import { combineReducers } from 'redux';
export function error(state = null, action) {
switch (action.type) {
case 'PRODUCT_LIST_ERROR':
return action.error;
case 'PRODUCT_LIST_MERCURE_DELETED':
return `${action.retrieved['@id']} has been deleted by another user.`;
case 'PRODUCT_LIST_RESET':
return null;
default:
return state;
}
}
export function loading(state = false, action) {
switch (action.type) {
case 'PRODUCT_LIST_LOADING':
return action.loading;
case 'PRODUCT_LIST_RESET':
return false; |
export function retrieved(state = null, action) {
switch (action.type) {
case 'PRODUCT_LIST_SUCCESS':
return action.retrieved;
case 'PRODUCT_LIST_RESET':
return null;
case 'PRODUCT_LIST_MERCURE_MESSAGE':
return {
...state,
'hydra:member': state['hydra:member'].map(item =>
item['@id'] === action.retrieved['@id'] ? action.retrieved : item
)
};
case 'PRODUCT_LIST_MERCURE_DELETED':
return {
...state,
'hydra:member': state['hydra:member'].filter(
item => item['@id'] !== action.retrieved['@id']
)
};
default:
return state;
}
}
export function eventSource(state = null, action) {
switch (action.type) {
case 'PRODUCT_LIST_MERCURE_OPEN':
return action.eventSource;
case 'PRODUCT_LIST_RESET':
return null;
default:
return state;
}
}
export default combineReducers({ error, loading, retrieved, eventSource }); |
default:
return state;
}
} | random_line_split |
0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-27 19:05
from __future__ import unicode_literals
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
| ('last_name', models.CharField(max_length=50, verbose_name='Last Name')),
('phone', models.CharField(max_length=20, verbose_name='Phone')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('email', models.EmailField(max_length=254, verbose_name='Email')),
('first_name', models.CharField(max_length=50, verbose_name='First Name')), | identifier_body |
0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-27 19:05
from __future__ import unicode_literals
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('email', models.EmailField(max_length=254, verbose_name='Email')),
('first_name', models.CharField(max_length=50, verbose_name='First Name')),
('last_name', models.CharField(max_length=50, verbose_name='Last Name')),
('phone', models.CharField(max_length=20, verbose_name='Phone')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
], | ),
] | random_line_split |
|
0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-27 19:05
from __future__ import unicode_literals
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class | (migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('email', models.EmailField(max_length=254, verbose_name='Email')),
('first_name', models.CharField(max_length=50, verbose_name='First Name')),
('last_name', models.CharField(max_length=50, verbose_name='Last Name')),
('phone', models.CharField(max_length=20, verbose_name='Phone')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| Migration | identifier_name |
init.js | /*
* Copyright 2015 Caleb Brose, Chris Fogerty, Rob Sheehy, Zach Taylor, Nick Miller
* | * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// auth/init.js
// Handles user session data and view access control.
var authService = require('./authService'),
sessionService = require('./sessionService'),
loginController = require('./loginController');
var auth = angular.module('lighthouse.auth', []);
auth.controller('loginController', loginController);
auth.factory('authService', authService);
auth.factory('sessionService', sessionService);
module.exports = auth; | random_line_split |
|
objdetect.rs | -> Vec<Rect> {
self.detect_with_params(mat, 1.1, 3, Size2i::default(), Size2i::default())
}
/// Detects the object using parameters specified.
///
/// * `mat` - Matrix of the type CV_8U containing an image where objects are
/// detected.
/// * `scale_factor` - Parameter specifying how much the image size is
/// reduced at each image scale.
/// * `min_neighbors` - Parameter specifying how many neighbors each
/// candidate rectangle should have to retain it.
/// * `min_size` - Minimum possible object size. Objects smaller than that
/// are ignored.
/// * `max_size` - Maximum possible object size. Objects larger than that
/// are ignored
///
/// OpenCV has a parameter (`flags`) that's not used at all.
pub fn detect_with_params(
&self,
mat: &Mat,
scale_factor: f32,
min_neighbors: c_int,
min_size: Size2i,
max_size: Size2i,
) -> Vec<Rect> {
let mut c_result = CVec::<Rect>::default();
unsafe {
cv_cascade_classifier_detect(
self.inner,
mat.inner,
&mut c_result,
scale_factor as c_double,
min_neighbors,
0,
min_size,
max_size,
)
}
c_result.unpack()
}
}
impl Drop for CascadeClassifier {
fn drop(&mut self) {
unsafe {
cv_cascade_classifier_drop(self.inner);
}
}
}
#[derive(Debug, Clone, Copy)]
/// Opaque type for C/C++ SvmDetector object
pub enum CSvmDetector {}
/// SvmDetector
#[derive(Debug)]
pub struct SvmDetector {
/// Pointer to the inner data structure
pub(crate) inner: *mut CSvmDetector,
}
extern "C" {
fn cv_hog_default_people_detector() -> *mut CSvmDetector;
fn cv_hog_daimler_people_detector() -> *mut CSvmDetector;
fn cv_hog_detector_drop(d: *mut CSvmDetector);
}
impl SvmDetector {
/// The built-in people detector.
///
/// The size of the default people detector is 64x128, that mean that the
/// people you would want to detect have to be atleast 64x128.
pub fn default_people_detector() -> SvmDetector {
SvmDetector {
inner: unsafe { cv_hog_default_people_detector() },
}
}
/// Returns the Daimler people detector.
pub fn daimler_people_detector() -> SvmDetector {
SvmDetector {
inner: unsafe { cv_hog_daimler_people_detector() },
}
}
}
impl Drop for SvmDetector {
fn drop(&mut self) {
unsafe {
cv_hog_detector_drop(self.inner);
}
}
}
/// Parameters that controls the behavior of HOG.
#[derive(Debug, Clone, Copy)]
pub struct HogParams {
/// Detection window size. Align to block size and block stride. The default
/// is 64x128, trained the same as original paper.
pub win_size: Size2i,
/// Block size in pixels. Align to cell size. Only (16,16) is supported for
/// now (at least for GPU).
pub block_size: Size2i,
/// Block stride. It must be a multiple of cell size.
pub block_stride: Size2i,
/// Cell size. Only (8, 8) is supported for now.
pub cell_size: Size2i,
/// Number of bins. Only 9 bins per cell are supported for now.
pub nbins: c_int,
/// Gaussian smoothing window parameter. Default -1 for CPU and 4.0 for GPU.
pub win_sigma: f64,
/// L2-Hys normalization method shrinkage. Default 0.2.
pub l2hys_threshold: f64,
/// Flag to specify whether the gamma correction preprocessing is required
/// or not. Default false.
pub gamma_correction: bool,
/// Maximum number of detection window increases (HOG scales). Default: 64.
pub nlevels: usize,
// =======================================================================
// Functions from detect function
// =======================================================================
/// Threshold for the distance between features and SVM classifying
/// plane. Usually it is 0 and should be specfied in the detector
/// coefficients (as the last free coefficient). But if the free coefficient
/// is omitted (which is allowed), you can specify it manually here.
pub hit_threshold: f64,
/// Window stride. It must be a multiple of block stride.
pub win_stride: Size2i,
/// Padding
pub padding: Size2i,
/// Coefficient of the detection window increase.
pub scale: f64,
/// Coefficient to regulate the similarity threshold. When detected, some
/// objects can be covered by many rectangles. 0 means not to perform
/// grouping.
pub group_threshold: c_int,
/// The useMeanShiftGrouping parameter is a boolean indicating whether or
/// not mean-shift grouping should be performed to handle potential
/// overlapping bounding boxes. While this value should not be set and users
/// should employ non-maxima suppression instead, we support setting it as a
/// library function.
pub use_meanshift_grouping: bool,
/// The `finalThreshold` parameter is mainly used to select the clusters
/// that have at least `finalThreshold + 1` rectangles. This parameter is
/// passed when meanShift is enabled; the function rejects the small
/// clusters containing less than or equal to `finalThreshold` rectangles,
/// computes the average rectangle size for the rest of the accepted
/// clusters and adds those to the output rectangle list.
pub final_threshold: f64,
}
const DEFAULT_WIN_SIGMA: f64 = -1f64;
const DEFAULT_NLEVELS: usize = 64;
impl Default for HogParams {
fn default() -> HogParams {
let win_sigma = {
if cfg!(feature = "cuda") {
4.0
} else {
DEFAULT_WIN_SIGMA
}
};
HogParams {
win_size: Size2i::new(64, 128),
block_size: Size2i::new(16, 16),
block_stride: Size2i::new(8, 8),
cell_size: Size2i::new(8, 8),
nbins: 9,
win_sigma: win_sigma,
l2hys_threshold: 0.2,
gamma_correction: false,
nlevels: DEFAULT_NLEVELS,
hit_threshold: 0f64,
win_stride: Size2i::new(8, 8),
padding: Size2i::default(),
scale: 1.05,
group_threshold: 2,
final_threshold: 2.0,
use_meanshift_grouping: false,
}
}
}
enum CHogDescriptor {}
/// `HogDescriptor` implements Histogram of Oriented Gradients.
#[derive(Debug)]
pub struct HogDescriptor {
inner: *mut CHogDescriptor,
/// Hog parameters.
pub params: HogParams,
}
unsafe impl Send for HogDescriptor {}
extern "C" {
fn cv_hog_new() -> *mut CHogDescriptor;
fn cv_hog_drop(hog: *mut CHogDescriptor);
fn cv_hog_set_svm_detector(hog: *mut CHogDescriptor, svm: *mut CSvmDetector);
fn cv_hog_detect(
hog: *mut CHogDescriptor,
image: *mut CMat,
objs: *mut CVec<Rect>,
weights: *mut CVec<c_double>,
win_stride: Size2i,
padding: Size2i,
scale: c_double,
final_threshold: c_double,
use_means_shift: bool,
);
}
impl Default for HogDescriptor {
fn default() -> HogDescriptor {
HogDescriptor {
inner: unsafe { cv_hog_new() },
params: HogParams::default(),
}
}
}
impl ObjectDetect for HogDescriptor {
fn detect(&self, image: &Mat) -> Vec<(Rect, f64)> {
let mut detected = CVec::<Rect>::default();
let mut weights = CVec::<c_double>::default();
unsafe {
cv_hog_detect(
self.inner,
image.inner,
&mut detected,
&mut weights,
self.params.win_stride,
self.params.padding,
self.params.scale,
self.params.final_threshold,
self.params.use_meanshift_grouping,
)
}
let results = detected.unpack();
let weights = weights.unpack();
results.into_iter().zip(weights).collect::<Vec<_>>()
}
}
impl HogDescriptor {
/// Creates a HogDescriptor with provided parameters.
pub fn with_params(params: HogParams) -> HogDescriptor {
HogDescriptor {
inner: unsafe { cv_hog_new() },
params: params,
}
}
| /// Sets the SVM detector.
pub fn set_svm_detector(&mut self, detector: SvmDetector) {
unsafe { cv_hog_set_svm_detector(self.inner, detector.inner) }
}
} | random_line_split |
|
objdetect.rs | c_int,
min_size: Size2i,
max_size: Size2i,
);
}
/// We can safely send the classifier (a mutable pointer) to a different thread
unsafe impl Send for CascadeClassifier {}
/// An object detect trait.
pub trait ObjectDetect {
/// Detects the object inside this image and returns a list of detections
/// with their confidence.
fn detect(&self, image: &Mat) -> Vec<(Rect, f64)>;
}
/// Cascade classifier class for object detection.
#[derive(Debug)]
pub struct CascadeClassifier {
inner: *mut CCascadeClassifier,
}
impl ObjectDetect for CascadeClassifier {
fn detect(&self, image: &Mat) -> Vec<(Rect, f64)> {
self.detect_multiscale(image)
.into_iter()
.map(|r| (r, 0f64))
.collect::<Vec<_>>()
}
}
impl CascadeClassifier {
/// Creates a cascade classifier, uninitialized. Before use, call load.
pub fn new() -> CascadeClassifier {
CascadeClassifier {
inner: unsafe { cv_cascade_classifier_new() },
}
}
/// Creates a cascade classifier using the model specified.
pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self, Error> {
let cc = CascadeClassifier::new();
cc.load(path)?;
Ok(cc)
}
/// Loads the classifier model from a path.
pub fn load<P: AsRef<Path>>(&self, path: P) -> Result<(), Error> {
if let Some(p) = path.as_ref().to_str() {
let s = CString::new(p)?;
if unsafe { cv_cascade_classifier_load(self.inner, (&s).as_ptr()) } {
return Ok(());
}
}
Err(CvError::InvalidPath(path.as_ref().to_path_buf()).into())
}
/// The default detection uses scale factor 1.1, minNeighbors 3, no min size
/// or max size.
pub fn detect_multiscale(&self, mat: &Mat) -> Vec<Rect> {
self.detect_with_params(mat, 1.1, 3, Size2i::default(), Size2i::default())
}
/// Detects the object using parameters specified.
///
/// * `mat` - Matrix of the type CV_8U containing an image where objects are
/// detected.
/// * `scale_factor` - Parameter specifying how much the image size is
/// reduced at each image scale.
/// * `min_neighbors` - Parameter specifying how many neighbors each
/// candidate rectangle should have to retain it.
/// * `min_size` - Minimum possible object size. Objects smaller than that
/// are ignored.
/// * `max_size` - Maximum possible object size. Objects larger than that
/// are ignored
///
/// OpenCV has a parameter (`flags`) that's not used at all.
pub fn detect_with_params(
&self,
mat: &Mat,
scale_factor: f32,
min_neighbors: c_int,
min_size: Size2i,
max_size: Size2i,
) -> Vec<Rect> {
let mut c_result = CVec::<Rect>::default();
unsafe {
cv_cascade_classifier_detect(
self.inner,
mat.inner,
&mut c_result,
scale_factor as c_double,
min_neighbors,
0,
min_size,
max_size,
)
}
c_result.unpack()
}
}
impl Drop for CascadeClassifier {
fn drop(&mut self) {
unsafe {
cv_cascade_classifier_drop(self.inner);
}
}
}
#[derive(Debug, Clone, Copy)]
/// Opaque type for C/C++ SvmDetector object
pub enum CSvmDetector {}
/// SvmDetector
#[derive(Debug)]
pub struct SvmDetector {
/// Pointer to the inner data structure
pub(crate) inner: *mut CSvmDetector,
}
extern "C" {
fn cv_hog_default_people_detector() -> *mut CSvmDetector;
fn cv_hog_daimler_people_detector() -> *mut CSvmDetector;
fn cv_hog_detector_drop(d: *mut CSvmDetector);
}
impl SvmDetector {
/// The built-in people detector.
///
/// The size of the default people detector is 64x128, that mean that the
/// people you would want to detect have to be atleast 64x128.
pub fn default_people_detector() -> SvmDetector {
SvmDetector {
inner: unsafe { cv_hog_default_people_detector() },
}
}
/// Returns the Daimler people detector.
pub fn daimler_people_detector() -> SvmDetector {
SvmDetector {
inner: unsafe { cv_hog_daimler_people_detector() },
}
}
}
impl Drop for SvmDetector {
fn drop(&mut self) {
unsafe {
cv_hog_detector_drop(self.inner);
}
}
}
/// Parameters that controls the behavior of HOG.
#[derive(Debug, Clone, Copy)]
pub struct HogParams {
/// Detection window size. Align to block size and block stride. The default
/// is 64x128, trained the same as original paper.
pub win_size: Size2i,
/// Block size in pixels. Align to cell size. Only (16,16) is supported for
/// now (at least for GPU).
pub block_size: Size2i,
/// Block stride. It must be a multiple of cell size.
pub block_stride: Size2i,
/// Cell size. Only (8, 8) is supported for now.
pub cell_size: Size2i,
/// Number of bins. Only 9 bins per cell are supported for now.
pub nbins: c_int,
/// Gaussian smoothing window parameter. Default -1 for CPU and 4.0 for GPU.
pub win_sigma: f64,
/// L2-Hys normalization method shrinkage. Default 0.2.
pub l2hys_threshold: f64,
/// Flag to specify whether the gamma correction preprocessing is required
/// or not. Default false.
pub gamma_correction: bool,
/// Maximum number of detection window increases (HOG scales). Default: 64.
pub nlevels: usize,
// =======================================================================
// Functions from detect function
// =======================================================================
/// Threshold for the distance between features and SVM classifying
/// plane. Usually it is 0 and should be specfied in the detector
/// coefficients (as the last free coefficient). But if the free coefficient
/// is omitted (which is allowed), you can specify it manually here.
pub hit_threshold: f64,
/// Window stride. It must be a multiple of block stride.
pub win_stride: Size2i,
/// Padding
pub padding: Size2i,
/// Coefficient of the detection window increase.
pub scale: f64,
/// Coefficient to regulate the similarity threshold. When detected, some
/// objects can be covered by many rectangles. 0 means not to perform
/// grouping.
pub group_threshold: c_int,
/// The useMeanShiftGrouping parameter is a boolean indicating whether or
/// not mean-shift grouping should be performed to handle potential
/// overlapping bounding boxes. While this value should not be set and users
/// should employ non-maxima suppression instead, we support setting it as a
/// library function.
pub use_meanshift_grouping: bool,
/// The `finalThreshold` parameter is mainly used to select the clusters
/// that have at least `finalThreshold + 1` rectangles. This parameter is
/// passed when meanShift is enabled; the function rejects the small
/// clusters containing less than or equal to `finalThreshold` rectangles,
/// computes the average rectangle size for the rest of the accepted
/// clusters and adds those to the output rectangle list.
pub final_threshold: f64,
}
const DEFAULT_WIN_SIGMA: f64 = -1f64;
const DEFAULT_NLEVELS: usize = 64;
impl Default for HogParams {
fn default() -> HogParams {
let win_sigma = {
if cfg!(feature = "cuda") {
4.0
} else {
DEFAULT_WIN_SIGMA
}
};
HogParams {
win_size: Size2i::new(64, 128),
block_size: Size2i::new(16, 16),
block_stride: Size2i::new(8, 8),
cell_size: Size2i::new(8, 8),
nbins: 9,
win_sigma: win_sigma,
l2hys_threshold: 0.2,
gamma_correction: false,
nlevels: DEFAULT_NLEVELS,
hit_threshold: 0f64,
win_stride: Size2i::new(8, 8),
padding: Size2i::default(),
scale: 1.05,
group_threshold: 2,
final_threshold: 2.0,
use_meanshift_grouping: false,
}
}
}
enum CHogDescriptor {}
/// `HogDescriptor` implements Histogram of Oriented Gradients.
#[derive(Debug)]
pub struct | {
inner: *mut CHogDescriptor,
| HogDescriptor | identifier_name |
objdetect.rs | c_int,
min_size: Size2i,
max_size: Size2i,
);
}
/// We can safely send the classifier (a mutable pointer) to a different thread
unsafe impl Send for CascadeClassifier {}
/// An object detect trait.
pub trait ObjectDetect {
/// Detects the object inside this image and returns a list of detections
/// with their confidence.
fn detect(&self, image: &Mat) -> Vec<(Rect, f64)>;
}
/// Cascade classifier class for object detection.
#[derive(Debug)]
pub struct CascadeClassifier {
inner: *mut CCascadeClassifier,
}
impl ObjectDetect for CascadeClassifier {
fn detect(&self, image: &Mat) -> Vec<(Rect, f64)> {
self.detect_multiscale(image)
.into_iter()
.map(|r| (r, 0f64))
.collect::<Vec<_>>()
}
}
impl CascadeClassifier {
/// Creates a cascade classifier, uninitialized. Before use, call load.
pub fn new() -> CascadeClassifier {
CascadeClassifier {
inner: unsafe { cv_cascade_classifier_new() },
}
}
/// Creates a cascade classifier using the model specified.
pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self, Error> {
let cc = CascadeClassifier::new();
cc.load(path)?;
Ok(cc)
}
/// Loads the classifier model from a path.
pub fn load<P: AsRef<Path>>(&self, path: P) -> Result<(), Error> |
/// The default detection uses scale factor 1.1, minNeighbors 3, no min size
/// or max size.
pub fn detect_multiscale(&self, mat: &Mat) -> Vec<Rect> {
self.detect_with_params(mat, 1.1, 3, Size2i::default(), Size2i::default())
}
/// Detects the object using parameters specified.
///
/// * `mat` - Matrix of the type CV_8U containing an image where objects are
/// detected.
/// * `scale_factor` - Parameter specifying how much the image size is
/// reduced at each image scale.
/// * `min_neighbors` - Parameter specifying how many neighbors each
/// candidate rectangle should have to retain it.
/// * `min_size` - Minimum possible object size. Objects smaller than that
/// are ignored.
/// * `max_size` - Maximum possible object size. Objects larger than that
/// are ignored
///
/// OpenCV has a parameter (`flags`) that's not used at all.
pub fn detect_with_params(
&self,
mat: &Mat,
scale_factor: f32,
min_neighbors: c_int,
min_size: Size2i,
max_size: Size2i,
) -> Vec<Rect> {
let mut c_result = CVec::<Rect>::default();
unsafe {
cv_cascade_classifier_detect(
self.inner,
mat.inner,
&mut c_result,
scale_factor as c_double,
min_neighbors,
0,
min_size,
max_size,
)
}
c_result.unpack()
}
}
impl Drop for CascadeClassifier {
fn drop(&mut self) {
unsafe {
cv_cascade_classifier_drop(self.inner);
}
}
}
#[derive(Debug, Clone, Copy)]
/// Opaque type for C/C++ SvmDetector object
pub enum CSvmDetector {}
/// SvmDetector
#[derive(Debug)]
pub struct SvmDetector {
/// Pointer to the inner data structure
pub(crate) inner: *mut CSvmDetector,
}
extern "C" {
fn cv_hog_default_people_detector() -> *mut CSvmDetector;
fn cv_hog_daimler_people_detector() -> *mut CSvmDetector;
fn cv_hog_detector_drop(d: *mut CSvmDetector);
}
impl SvmDetector {
/// The built-in people detector.
///
/// The size of the default people detector is 64x128, that mean that the
/// people you would want to detect have to be atleast 64x128.
pub fn default_people_detector() -> SvmDetector {
SvmDetector {
inner: unsafe { cv_hog_default_people_detector() },
}
}
/// Returns the Daimler people detector.
pub fn daimler_people_detector() -> SvmDetector {
SvmDetector {
inner: unsafe { cv_hog_daimler_people_detector() },
}
}
}
impl Drop for SvmDetector {
fn drop(&mut self) {
unsafe {
cv_hog_detector_drop(self.inner);
}
}
}
/// Parameters that controls the behavior of HOG.
#[derive(Debug, Clone, Copy)]
pub struct HogParams {
/// Detection window size. Align to block size and block stride. The default
/// is 64x128, trained the same as original paper.
pub win_size: Size2i,
/// Block size in pixels. Align to cell size. Only (16,16) is supported for
/// now (at least for GPU).
pub block_size: Size2i,
/// Block stride. It must be a multiple of cell size.
pub block_stride: Size2i,
/// Cell size. Only (8, 8) is supported for now.
pub cell_size: Size2i,
/// Number of bins. Only 9 bins per cell are supported for now.
pub nbins: c_int,
/// Gaussian smoothing window parameter. Default -1 for CPU and 4.0 for GPU.
pub win_sigma: f64,
/// L2-Hys normalization method shrinkage. Default 0.2.
pub l2hys_threshold: f64,
/// Flag to specify whether the gamma correction preprocessing is required
/// or not. Default false.
pub gamma_correction: bool,
/// Maximum number of detection window increases (HOG scales). Default: 64.
pub nlevels: usize,
// =======================================================================
// Functions from detect function
// =======================================================================
/// Threshold for the distance between features and SVM classifying
/// plane. Usually it is 0 and should be specfied in the detector
/// coefficients (as the last free coefficient). But if the free coefficient
/// is omitted (which is allowed), you can specify it manually here.
pub hit_threshold: f64,
/// Window stride. It must be a multiple of block stride.
pub win_stride: Size2i,
/// Padding
pub padding: Size2i,
/// Coefficient of the detection window increase.
pub scale: f64,
/// Coefficient to regulate the similarity threshold. When detected, some
/// objects can be covered by many rectangles. 0 means not to perform
/// grouping.
pub group_threshold: c_int,
/// The useMeanShiftGrouping parameter is a boolean indicating whether or
/// not mean-shift grouping should be performed to handle potential
/// overlapping bounding boxes. While this value should not be set and users
/// should employ non-maxima suppression instead, we support setting it as a
/// library function.
pub use_meanshift_grouping: bool,
/// The `finalThreshold` parameter is mainly used to select the clusters
/// that have at least `finalThreshold + 1` rectangles. This parameter is
/// passed when meanShift is enabled; the function rejects the small
/// clusters containing less than or equal to `finalThreshold` rectangles,
/// computes the average rectangle size for the rest of the accepted
/// clusters and adds those to the output rectangle list.
pub final_threshold: f64,
}
const DEFAULT_WIN_SIGMA: f64 = -1f64;
const DEFAULT_NLEVELS: usize = 64;
impl Default for HogParams {
fn default() -> HogParams {
let win_sigma = {
if cfg!(feature = "cuda") {
4.0
} else {
DEFAULT_WIN_SIGMA
}
};
HogParams {
win_size: Size2i::new(64, 128),
block_size: Size2i::new(16, 16),
block_stride: Size2i::new(8, 8),
cell_size: Size2i::new(8, 8),
nbins: 9,
win_sigma: win_sigma,
l2hys_threshold: 0.2,
gamma_correction: false,
nlevels: DEFAULT_NLEVELS,
hit_threshold: 0f64,
win_stride: Size2i::new(8, 8),
padding: Size2i::default(),
scale: 1.05,
group_threshold: 2,
final_threshold: 2.0,
use_meanshift_grouping: false,
}
}
}
enum CHogDescriptor {}
/// `HogDescriptor` implements Histogram of Oriented Gradients.
#[derive(Debug)]
pub struct HogDescriptor {
inner: *mut CHogDescriptor,
| {
if let Some(p) = path.as_ref().to_str() {
let s = CString::new(p)?;
if unsafe { cv_cascade_classifier_load(self.inner, (&s).as_ptr()) } {
return Ok(());
}
}
Err(CvError::InvalidPath(path.as_ref().to_path_buf()).into())
} | identifier_body |
objdetect.rs | c_int,
min_size: Size2i,
max_size: Size2i,
);
}
/// We can safely send the classifier (a mutable pointer) to a different thread
unsafe impl Send for CascadeClassifier {}
/// An object detect trait.
pub trait ObjectDetect {
/// Detects the object inside this image and returns a list of detections
/// with their confidence.
fn detect(&self, image: &Mat) -> Vec<(Rect, f64)>;
}
/// Cascade classifier class for object detection.
#[derive(Debug)]
pub struct CascadeClassifier {
inner: *mut CCascadeClassifier,
}
impl ObjectDetect for CascadeClassifier {
fn detect(&self, image: &Mat) -> Vec<(Rect, f64)> {
self.detect_multiscale(image)
.into_iter()
.map(|r| (r, 0f64))
.collect::<Vec<_>>()
}
}
impl CascadeClassifier {
/// Creates a cascade classifier, uninitialized. Before use, call load.
pub fn new() -> CascadeClassifier {
CascadeClassifier {
inner: unsafe { cv_cascade_classifier_new() },
}
}
/// Creates a cascade classifier using the model specified.
pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self, Error> {
let cc = CascadeClassifier::new();
cc.load(path)?;
Ok(cc)
}
/// Loads the classifier model from a path.
pub fn load<P: AsRef<Path>>(&self, path: P) -> Result<(), Error> {
if let Some(p) = path.as_ref().to_str() |
Err(CvError::InvalidPath(path.as_ref().to_path_buf()).into())
}
/// The default detection uses scale factor 1.1, minNeighbors 3, no min size
/// or max size.
pub fn detect_multiscale(&self, mat: &Mat) -> Vec<Rect> {
self.detect_with_params(mat, 1.1, 3, Size2i::default(), Size2i::default())
}
/// Detects the object using parameters specified.
///
/// * `mat` - Matrix of the type CV_8U containing an image where objects are
/// detected.
/// * `scale_factor` - Parameter specifying how much the image size is
/// reduced at each image scale.
/// * `min_neighbors` - Parameter specifying how many neighbors each
/// candidate rectangle should have to retain it.
/// * `min_size` - Minimum possible object size. Objects smaller than that
/// are ignored.
/// * `max_size` - Maximum possible object size. Objects larger than that
/// are ignored
///
/// OpenCV has a parameter (`flags`) that's not used at all.
pub fn detect_with_params(
&self,
mat: &Mat,
scale_factor: f32,
min_neighbors: c_int,
min_size: Size2i,
max_size: Size2i,
) -> Vec<Rect> {
let mut c_result = CVec::<Rect>::default();
unsafe {
cv_cascade_classifier_detect(
self.inner,
mat.inner,
&mut c_result,
scale_factor as c_double,
min_neighbors,
0,
min_size,
max_size,
)
}
c_result.unpack()
}
}
impl Drop for CascadeClassifier {
fn drop(&mut self) {
unsafe {
cv_cascade_classifier_drop(self.inner);
}
}
}
#[derive(Debug, Clone, Copy)]
/// Opaque type for C/C++ SvmDetector object
pub enum CSvmDetector {}
/// SvmDetector
#[derive(Debug)]
pub struct SvmDetector {
/// Pointer to the inner data structure
pub(crate) inner: *mut CSvmDetector,
}
extern "C" {
fn cv_hog_default_people_detector() -> *mut CSvmDetector;
fn cv_hog_daimler_people_detector() -> *mut CSvmDetector;
fn cv_hog_detector_drop(d: *mut CSvmDetector);
}
impl SvmDetector {
/// The built-in people detector.
///
/// The size of the default people detector is 64x128, that mean that the
/// people you would want to detect have to be atleast 64x128.
pub fn default_people_detector() -> SvmDetector {
SvmDetector {
inner: unsafe { cv_hog_default_people_detector() },
}
}
/// Returns the Daimler people detector.
pub fn daimler_people_detector() -> SvmDetector {
SvmDetector {
inner: unsafe { cv_hog_daimler_people_detector() },
}
}
}
impl Drop for SvmDetector {
fn drop(&mut self) {
unsafe {
cv_hog_detector_drop(self.inner);
}
}
}
/// Parameters that controls the behavior of HOG.
#[derive(Debug, Clone, Copy)]
pub struct HogParams {
/// Detection window size. Align to block size and block stride. The default
/// is 64x128, trained the same as original paper.
pub win_size: Size2i,
/// Block size in pixels. Align to cell size. Only (16,16) is supported for
/// now (at least for GPU).
pub block_size: Size2i,
/// Block stride. It must be a multiple of cell size.
pub block_stride: Size2i,
/// Cell size. Only (8, 8) is supported for now.
pub cell_size: Size2i,
/// Number of bins. Only 9 bins per cell are supported for now.
pub nbins: c_int,
/// Gaussian smoothing window parameter. Default -1 for CPU and 4.0 for GPU.
pub win_sigma: f64,
/// L2-Hys normalization method shrinkage. Default 0.2.
pub l2hys_threshold: f64,
/// Flag to specify whether the gamma correction preprocessing is required
/// or not. Default false.
pub gamma_correction: bool,
/// Maximum number of detection window increases (HOG scales). Default: 64.
pub nlevels: usize,
// =======================================================================
// Functions from detect function
// =======================================================================
/// Threshold for the distance between features and SVM classifying
/// plane. Usually it is 0 and should be specfied in the detector
/// coefficients (as the last free coefficient). But if the free coefficient
/// is omitted (which is allowed), you can specify it manually here.
pub hit_threshold: f64,
/// Window stride. It must be a multiple of block stride.
pub win_stride: Size2i,
/// Padding
pub padding: Size2i,
/// Coefficient of the detection window increase.
pub scale: f64,
/// Coefficient to regulate the similarity threshold. When detected, some
/// objects can be covered by many rectangles. 0 means not to perform
/// grouping.
pub group_threshold: c_int,
/// The useMeanShiftGrouping parameter is a boolean indicating whether or
/// not mean-shift grouping should be performed to handle potential
/// overlapping bounding boxes. While this value should not be set and users
/// should employ non-maxima suppression instead, we support setting it as a
/// library function.
pub use_meanshift_grouping: bool,
/// The `finalThreshold` parameter is mainly used to select the clusters
/// that have at least `finalThreshold + 1` rectangles. This parameter is
/// passed when meanShift is enabled; the function rejects the small
/// clusters containing less than or equal to `finalThreshold` rectangles,
/// computes the average rectangle size for the rest of the accepted
/// clusters and adds those to the output rectangle list.
pub final_threshold: f64,
}
const DEFAULT_WIN_SIGMA: f64 = -1f64;
const DEFAULT_NLEVELS: usize = 64;
impl Default for HogParams {
fn default() -> HogParams {
let win_sigma = {
if cfg!(feature = "cuda") {
4.0
} else {
DEFAULT_WIN_SIGMA
}
};
HogParams {
win_size: Size2i::new(64, 128),
block_size: Size2i::new(16, 16),
block_stride: Size2i::new(8, 8),
cell_size: Size2i::new(8, 8),
nbins: 9,
win_sigma: win_sigma,
l2hys_threshold: 0.2,
gamma_correction: false,
nlevels: DEFAULT_NLEVELS,
hit_threshold: 0f64,
win_stride: Size2i::new(8, 8),
padding: Size2i::default(),
scale: 1.05,
group_threshold: 2,
final_threshold: 2.0,
use_meanshift_grouping: false,
}
}
}
enum CHogDescriptor {}
/// `HogDescriptor` implements Histogram of Oriented Gradients.
#[derive(Debug)]
pub struct HogDescriptor {
inner: *mut CHogDescriptor,
| {
let s = CString::new(p)?;
if unsafe { cv_cascade_classifier_load(self.inner, (&s).as_ptr()) } {
return Ok(());
}
} | conditional_block |
git.py | License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import shutil
from cerbero.config import Platform
from cerbero.utils import shell
# Clean-up LD environment to avoid library version mismatches while running
# the system subversion
CLEAN_ENV = os.environ.copy()
if CLEAN_ENV.has_key('LD_LIBRARY_PATH'):
CLEAN_ENV.pop('LD_LIBRARY_PATH')
GIT = 'git'
def init(git_dir):
'''
Initialize a git repository with 'git init'
@param git_dir: path of the git repository
@type git_dir: str
'''
shell.call('mkdir -p %s' % git_dir)
shell.call('%s init' % GIT, git_dir, env=CLEAN_ENV)
def clean(git_dir):
'''
Clean a git respository with clean -dfx
@param git_dir: path of the git repository
@type git_dir: str
'''
return shell.call('%s clean -dfx' % GIT, git_dir, env=CLEAN_ENV)
def list_tags(git_dir, fail=True):
|
def create_tag(git_dir, tagname, tagdescription, commit, fail=True):
'''
Create a tag using commit
@param git_dir: path of the git repository
@type git_dir: str
@param tagname: name of the tag to create
@type tagname: str
@param tagdescription: the tag description
@type tagdescription: str
@param commit: the tag commit to use
@type commit: str
@param fail: raise an error if the command failed
@type fail: false
'''
shell.call('%s tag -s %s -m "%s" %s' %
(GIT, tagname, tagdescription, commit), git_dir, fail=fail,
env=CLEAN_ENV)
return shell.call('%s push origin %s' % (GIT, tagname), git_dir, fail=fail,
env=CLEAN_ENV)
def delete_tag(git_dir, tagname, fail=True):
'''
Delete a tag
@param git_dir: path of the git repository
@type git_dir: str
@param tagname: name of the tag to delete
@type tagname: str
@param fail: raise an error if the command failed
@type fail: false
'''
return shell.call('%s tag -d %s' % (GIT, tagname), git_dir, fail=fail,
env=CLEAN_ENV)
def fetch(git_dir, fail=True):
'''
Fetch all refs from all the remotes
@param git_dir: path of the git repository
@type git_dir: str
@param fail: raise an error if the command failed
@type fail: false
'''
return shell.call('%s fetch --all' % GIT, git_dir, fail=fail, env=CLEAN_ENV)
def submodules_update(git_dir, src_dir=None, fail=True):
'''
Update somdules from local directory
@param git_dir: path of the git repository
@type git_dir: str
@param src_dir: path or base URI of the source directory
@type src_dir: src
@param fail: raise an error if the command failed
@type fail: false
'''
if src_dir:
config = shell.check_call('%s config --file=.gitmodules --list' % GIT,
git_dir)
config_array = [s.split('=', 1) for s in config.split('\n')]
for c in config_array:
if c[0].startswith('submodule.') and c[0].endswith('.path'):
submodule = c[0][len('submodule.'):-len('.path')]
shell.call("%s config --file=.gitmodules submodule.%s.url %s" %
(GIT, submodule, os.path.join(src_dir, c[1])),
git_dir)
shell.call("%s submodule init" % GIT, git_dir)
shell.call("%s submodule sync" % GIT, git_dir)
shell.call("%s submodule update" % GIT, git_dir, fail=fail)
if src_dir:
for c in config_array:
if c[0].startswith('submodule.') and c[0].endswith('.url'):
shell.call("%s config --file=.gitmodules %s %s" %
(GIT, c[0], c[1]), git_dir)
shell.call("%s submodule sync" % GIT, git_dir)
def checkout(git_dir, commit):
'''
Reset a git repository to a given commit
@param git_dir: path of the git repository
@type git_dir: str
@param commit: the commit to checkout
@type commit: str
'''
return shell.call('%s reset --hard %s' % (GIT, commit), git_dir,
env=CLEAN_ENV)
def get_hash(git_dir, commit):
'''
Get a commit hash from a valid commit.
Can be used to check if a commit exists
@param git_dir: path of the git repository
@type git_dir: str
@param commit: the commit to log
@type commit: str
'''
return shell.check_call('%s show -s --pretty=%%H %s' %
(GIT, commit), git_dir, env=CLEAN_ENV)
def local_checkout(git_dir, local_git_dir, commit):
'''
Clone a repository for a given commit in a different location
@param git_dir: destination path of the git repository
@type git_dir: str
@param local_git_dir: path of the source git repository
@type local_git_dir: str
@param commit: the commit to checkout
@type commit: false
'''
# reset to a commit in case it's the first checkout and the masterbranch is
# missing
branch_name = 'cerbero_build'
shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir,
env=CLEAN_ENV)
shell.call('%s branch %s' % (GIT, branch_name), local_git_dir, fail=False,
env=CLEAN_ENV)
shell.call('%s checkout %s' % (GIT, branch_name), local_git_dir,
env=CLEAN_ENV)
shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir,
env=CLEAN_ENV)
shell.call('%s clone %s -s -b %s .' % (GIT, local_git_dir,
branch_name),
git_dir, env=CLEAN_ENV)
submodules_update(git_dir, local_git_dir)
def add_remote(git_dir, name, url):
'''
Add a remote to a git repository
@param git_dir: destination path of the git repository
@type git_dir: str
@param name: name of the remote
@type name: str
@param url: url of the remote
@type url: str
'''
try:
shell.call('%s remote add %s %s' % (GIT, name, url), git_dir,
env=CLEAN_ENV)
except:
shell.call('%s remote set-url %s %s' % (GIT, name, url), git_dir,
env=CLEAN_ENV)
def check_line_endings(platform):
'''
Checks if on windows we don't use the automatic line endings conversion
as it breaks everything
@param platform: the host platform
@type platform: L{cerbero.config.Platform}
@return: true if git config is core.autorlf=false
@rtype: bool
'''
if platform != Platform.WINDOWS:
return True
val = shell.check_call('%s config --get core.autocrlf' % GIT, env=CLEAN_ENV)
if ('false' in val.lower()):
return True
return False
def init_directory(git_dir):
'''
Initialize a git repository with the contents
of a directory
@param git_dir: path of the git repository
@type git_dir: str
'''
init(git_dir)
try:
| '''
List all tags
@param git_dir: path of the git repository
@type git_dir: str
@param fail: raise an error if the command failed
@type fail: false
@return: list of tag names (str)
@rtype: list
'''
tags = shell.check_call('%s tag -l' % GIT, git_dir, fail=fail, env=CLEAN_ENV)
tags = tags.strip()
if tags:
tags = tags.split('\n')
return tags | identifier_body |
git.py | # License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import shutil
from cerbero.config import Platform
from cerbero.utils import shell
# Clean-up LD environment to avoid library version mismatches while running
# the system subversion
CLEAN_ENV = os.environ.copy()
if CLEAN_ENV.has_key('LD_LIBRARY_PATH'):
CLEAN_ENV.pop('LD_LIBRARY_PATH')
GIT = 'git'
def init(git_dir):
'''
Initialize a git repository with 'git init'
@param git_dir: path of the git repository
@type git_dir: str
'''
shell.call('mkdir -p %s' % git_dir)
shell.call('%s init' % GIT, git_dir, env=CLEAN_ENV)
def clean(git_dir):
'''
Clean a git respository with clean -dfx | '''
return shell.call('%s clean -dfx' % GIT, git_dir, env=CLEAN_ENV)
def list_tags(git_dir, fail=True):
'''
List all tags
@param git_dir: path of the git repository
@type git_dir: str
@param fail: raise an error if the command failed
@type fail: false
@return: list of tag names (str)
@rtype: list
'''
tags = shell.check_call('%s tag -l' % GIT, git_dir, fail=fail, env=CLEAN_ENV)
tags = tags.strip()
if tags:
tags = tags.split('\n')
return tags
def create_tag(git_dir, tagname, tagdescription, commit, fail=True):
'''
Create a tag using commit
@param git_dir: path of the git repository
@type git_dir: str
@param tagname: name of the tag to create
@type tagname: str
@param tagdescription: the tag description
@type tagdescription: str
@param commit: the tag commit to use
@type commit: str
@param fail: raise an error if the command failed
@type fail: false
'''
shell.call('%s tag -s %s -m "%s" %s' %
(GIT, tagname, tagdescription, commit), git_dir, fail=fail,
env=CLEAN_ENV)
return shell.call('%s push origin %s' % (GIT, tagname), git_dir, fail=fail,
env=CLEAN_ENV)
def delete_tag(git_dir, tagname, fail=True):
'''
Delete a tag
@param git_dir: path of the git repository
@type git_dir: str
@param tagname: name of the tag to delete
@type tagname: str
@param fail: raise an error if the command failed
@type fail: false
'''
return shell.call('%s tag -d %s' % (GIT, tagname), git_dir, fail=fail,
env=CLEAN_ENV)
def fetch(git_dir, fail=True):
'''
Fetch all refs from all the remotes
@param git_dir: path of the git repository
@type git_dir: str
@param fail: raise an error if the command failed
@type fail: false
'''
return shell.call('%s fetch --all' % GIT, git_dir, fail=fail, env=CLEAN_ENV)
def submodules_update(git_dir, src_dir=None, fail=True):
'''
Update somdules from local directory
@param git_dir: path of the git repository
@type git_dir: str
@param src_dir: path or base URI of the source directory
@type src_dir: src
@param fail: raise an error if the command failed
@type fail: false
'''
if src_dir:
config = shell.check_call('%s config --file=.gitmodules --list' % GIT,
git_dir)
config_array = [s.split('=', 1) for s in config.split('\n')]
for c in config_array:
if c[0].startswith('submodule.') and c[0].endswith('.path'):
submodule = c[0][len('submodule.'):-len('.path')]
shell.call("%s config --file=.gitmodules submodule.%s.url %s" %
(GIT, submodule, os.path.join(src_dir, c[1])),
git_dir)
shell.call("%s submodule init" % GIT, git_dir)
shell.call("%s submodule sync" % GIT, git_dir)
shell.call("%s submodule update" % GIT, git_dir, fail=fail)
if src_dir:
for c in config_array:
if c[0].startswith('submodule.') and c[0].endswith('.url'):
shell.call("%s config --file=.gitmodules %s %s" %
(GIT, c[0], c[1]), git_dir)
shell.call("%s submodule sync" % GIT, git_dir)
def checkout(git_dir, commit):
'''
Reset a git repository to a given commit
@param git_dir: path of the git repository
@type git_dir: str
@param commit: the commit to checkout
@type commit: str
'''
return shell.call('%s reset --hard %s' % (GIT, commit), git_dir,
env=CLEAN_ENV)
def get_hash(git_dir, commit):
'''
Get a commit hash from a valid commit.
Can be used to check if a commit exists
@param git_dir: path of the git repository
@type git_dir: str
@param commit: the commit to log
@type commit: str
'''
return shell.check_call('%s show -s --pretty=%%H %s' %
(GIT, commit), git_dir, env=CLEAN_ENV)
def local_checkout(git_dir, local_git_dir, commit):
'''
Clone a repository for a given commit in a different location
@param git_dir: destination path of the git repository
@type git_dir: str
@param local_git_dir: path of the source git repository
@type local_git_dir: str
@param commit: the commit to checkout
@type commit: false
'''
# reset to a commit in case it's the first checkout and the masterbranch is
# missing
branch_name = 'cerbero_build'
shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir,
env=CLEAN_ENV)
shell.call('%s branch %s' % (GIT, branch_name), local_git_dir, fail=False,
env=CLEAN_ENV)
shell.call('%s checkout %s' % (GIT, branch_name), local_git_dir,
env=CLEAN_ENV)
shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir,
env=CLEAN_ENV)
shell.call('%s clone %s -s -b %s .' % (GIT, local_git_dir,
branch_name),
git_dir, env=CLEAN_ENV)
submodules_update(git_dir, local_git_dir)
def add_remote(git_dir, name, url):
'''
Add a remote to a git repository
@param git_dir: destination path of the git repository
@type git_dir: str
@param name: name of the remote
@type name: str
@param url: url of the remote
@type url: str
'''
try:
shell.call('%s remote add %s %s' % (GIT, name, url), git_dir,
env=CLEAN_ENV)
except:
shell.call('%s remote set-url %s %s' % (GIT, name, url), git_dir,
env=CLEAN_ENV)
def check_line_endings(platform):
'''
Checks if on windows we don't use the automatic line endings conversion
as it breaks everything
@param platform: the host platform
@type platform: L{cerbero.config.Platform}
@return: true if git config is core.autorlf=false
@rtype: bool
'''
if platform != Platform.WINDOWS:
return True
val = shell.check_call('%s config --get core.autocrlf' % GIT, env=CLEAN_ENV)
if ('false' in val.lower()):
return True
return False
def init_directory(git_dir):
'''
Initialize a git repository with the contents
of a directory
@param git_dir: path of the git repository
@type git_dir: str
'''
init(git_dir)
try:
|
@param git_dir: path of the git repository
@type git_dir: str | random_line_split |
git.py | License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import shutil
from cerbero.config import Platform
from cerbero.utils import shell
# Clean-up LD environment to avoid library version mismatches while running
# the system subversion
CLEAN_ENV = os.environ.copy()
if CLEAN_ENV.has_key('LD_LIBRARY_PATH'):
CLEAN_ENV.pop('LD_LIBRARY_PATH')
GIT = 'git'
def init(git_dir):
'''
Initialize a git repository with 'git init'
@param git_dir: path of the git repository
@type git_dir: str
'''
shell.call('mkdir -p %s' % git_dir)
shell.call('%s init' % GIT, git_dir, env=CLEAN_ENV)
def clean(git_dir):
'''
Clean a git respository with clean -dfx
@param git_dir: path of the git repository
@type git_dir: str
'''
return shell.call('%s clean -dfx' % GIT, git_dir, env=CLEAN_ENV)
def list_tags(git_dir, fail=True):
'''
List all tags
@param git_dir: path of the git repository
@type git_dir: str
@param fail: raise an error if the command failed
@type fail: false
@return: list of tag names (str)
@rtype: list
'''
tags = shell.check_call('%s tag -l' % GIT, git_dir, fail=fail, env=CLEAN_ENV)
tags = tags.strip()
if tags:
tags = tags.split('\n')
return tags
def create_tag(git_dir, tagname, tagdescription, commit, fail=True):
'''
Create a tag using commit
@param git_dir: path of the git repository
@type git_dir: str
@param tagname: name of the tag to create
@type tagname: str
@param tagdescription: the tag description
@type tagdescription: str
@param commit: the tag commit to use
@type commit: str
@param fail: raise an error if the command failed
@type fail: false
'''
shell.call('%s tag -s %s -m "%s" %s' %
(GIT, tagname, tagdescription, commit), git_dir, fail=fail,
env=CLEAN_ENV)
return shell.call('%s push origin %s' % (GIT, tagname), git_dir, fail=fail,
env=CLEAN_ENV)
def delete_tag(git_dir, tagname, fail=True):
'''
Delete a tag
@param git_dir: path of the git repository
@type git_dir: str
@param tagname: name of the tag to delete
@type tagname: str
@param fail: raise an error if the command failed
@type fail: false
'''
return shell.call('%s tag -d %s' % (GIT, tagname), git_dir, fail=fail,
env=CLEAN_ENV)
def fetch(git_dir, fail=True):
'''
Fetch all refs from all the remotes
@param git_dir: path of the git repository
@type git_dir: str
@param fail: raise an error if the command failed
@type fail: false
'''
return shell.call('%s fetch --all' % GIT, git_dir, fail=fail, env=CLEAN_ENV)
def submodules_update(git_dir, src_dir=None, fail=True):
'''
Update somdules from local directory
@param git_dir: path of the git repository
@type git_dir: str
@param src_dir: path or base URI of the source directory
@type src_dir: src
@param fail: raise an error if the command failed
@type fail: false
'''
if src_dir:
config = shell.check_call('%s config --file=.gitmodules --list' % GIT,
git_dir)
config_array = [s.split('=', 1) for s in config.split('\n')]
for c in config_array:
if c[0].startswith('submodule.') and c[0].endswith('.path'):
submodule = c[0][len('submodule.'):-len('.path')]
shell.call("%s config --file=.gitmodules submodule.%s.url %s" %
(GIT, submodule, os.path.join(src_dir, c[1])),
git_dir)
shell.call("%s submodule init" % GIT, git_dir)
shell.call("%s submodule sync" % GIT, git_dir)
shell.call("%s submodule update" % GIT, git_dir, fail=fail)
if src_dir:
for c in config_array:
|
shell.call("%s submodule sync" % GIT, git_dir)
def checkout(git_dir, commit):
'''
Reset a git repository to a given commit
@param git_dir: path of the git repository
@type git_dir: str
@param commit: the commit to checkout
@type commit: str
'''
return shell.call('%s reset --hard %s' % (GIT, commit), git_dir,
env=CLEAN_ENV)
def get_hash(git_dir, commit):
'''
Get a commit hash from a valid commit.
Can be used to check if a commit exists
@param git_dir: path of the git repository
@type git_dir: str
@param commit: the commit to log
@type commit: str
'''
return shell.check_call('%s show -s --pretty=%%H %s' %
(GIT, commit), git_dir, env=CLEAN_ENV)
def local_checkout(git_dir, local_git_dir, commit):
'''
Clone a repository for a given commit in a different location
@param git_dir: destination path of the git repository
@type git_dir: str
@param local_git_dir: path of the source git repository
@type local_git_dir: str
@param commit: the commit to checkout
@type commit: false
'''
# reset to a commit in case it's the first checkout and the masterbranch is
# missing
branch_name = 'cerbero_build'
shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir,
env=CLEAN_ENV)
shell.call('%s branch %s' % (GIT, branch_name), local_git_dir, fail=False,
env=CLEAN_ENV)
shell.call('%s checkout %s' % (GIT, branch_name), local_git_dir,
env=CLEAN_ENV)
shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir,
env=CLEAN_ENV)
shell.call('%s clone %s -s -b %s .' % (GIT, local_git_dir,
branch_name),
git_dir, env=CLEAN_ENV)
submodules_update(git_dir, local_git_dir)
def add_remote(git_dir, name, url):
'''
Add a remote to a git repository
@param git_dir: destination path of the git repository
@type git_dir: str
@param name: name of the remote
@type name: str
@param url: url of the remote
@type url: str
'''
try:
shell.call('%s remote add %s %s' % (GIT, name, url), git_dir,
env=CLEAN_ENV)
except:
shell.call('%s remote set-url %s %s' % (GIT, name, url), git_dir,
env=CLEAN_ENV)
def check_line_endings(platform):
'''
Checks if on windows we don't use the automatic line endings conversion
as it breaks everything
@param platform: the host platform
@type platform: L{cerbero.config.Platform}
@return: true if git config is core.autorlf=false
@rtype: bool
'''
if platform != Platform.WINDOWS:
return True
val = shell.check_call('%s config --get core.autocrlf' % GIT, env=CLEAN_ENV)
if ('false' in val.lower()):
return True
return False
def init_directory(git_dir):
'''
Initialize a git repository with the contents
of a directory
@param git_dir: path of the git repository
@type git_dir: str
'''
init(git_dir)
try:
| if c[0].startswith('submodule.') and c[0].endswith('.url'):
shell.call("%s config --file=.gitmodules %s %s" %
(GIT, c[0], c[1]), git_dir) | conditional_block |
git.py | See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import shutil
from cerbero.config import Platform
from cerbero.utils import shell
# Clean-up LD environment to avoid library version mismatches while running
# the system subversion
CLEAN_ENV = os.environ.copy()
if CLEAN_ENV.has_key('LD_LIBRARY_PATH'):
CLEAN_ENV.pop('LD_LIBRARY_PATH')
GIT = 'git'
def init(git_dir):
'''
Initialize a git repository with 'git init'
@param git_dir: path of the git repository
@type git_dir: str
'''
shell.call('mkdir -p %s' % git_dir)
shell.call('%s init' % GIT, git_dir, env=CLEAN_ENV)
def clean(git_dir):
'''
Clean a git respository with clean -dfx
@param git_dir: path of the git repository
@type git_dir: str
'''
return shell.call('%s clean -dfx' % GIT, git_dir, env=CLEAN_ENV)
def list_tags(git_dir, fail=True):
'''
List all tags
@param git_dir: path of the git repository
@type git_dir: str
@param fail: raise an error if the command failed
@type fail: false
@return: list of tag names (str)
@rtype: list
'''
tags = shell.check_call('%s tag -l' % GIT, git_dir, fail=fail, env=CLEAN_ENV)
tags = tags.strip()
if tags:
tags = tags.split('\n')
return tags
def create_tag(git_dir, tagname, tagdescription, commit, fail=True):
'''
Create a tag using commit
@param git_dir: path of the git repository
@type git_dir: str
@param tagname: name of the tag to create
@type tagname: str
@param tagdescription: the tag description
@type tagdescription: str
@param commit: the tag commit to use
@type commit: str
@param fail: raise an error if the command failed
@type fail: false
'''
shell.call('%s tag -s %s -m "%s" %s' %
(GIT, tagname, tagdescription, commit), git_dir, fail=fail,
env=CLEAN_ENV)
return shell.call('%s push origin %s' % (GIT, tagname), git_dir, fail=fail,
env=CLEAN_ENV)
def delete_tag(git_dir, tagname, fail=True):
'''
Delete a tag
@param git_dir: path of the git repository
@type git_dir: str
@param tagname: name of the tag to delete
@type tagname: str
@param fail: raise an error if the command failed
@type fail: false
'''
return shell.call('%s tag -d %s' % (GIT, tagname), git_dir, fail=fail,
env=CLEAN_ENV)
def fetch(git_dir, fail=True):
'''
Fetch all refs from all the remotes
@param git_dir: path of the git repository
@type git_dir: str
@param fail: raise an error if the command failed
@type fail: false
'''
return shell.call('%s fetch --all' % GIT, git_dir, fail=fail, env=CLEAN_ENV)
def submodules_update(git_dir, src_dir=None, fail=True):
'''
Update somdules from local directory
@param git_dir: path of the git repository
@type git_dir: str
@param src_dir: path or base URI of the source directory
@type src_dir: src
@param fail: raise an error if the command failed
@type fail: false
'''
if src_dir:
config = shell.check_call('%s config --file=.gitmodules --list' % GIT,
git_dir)
config_array = [s.split('=', 1) for s in config.split('\n')]
for c in config_array:
if c[0].startswith('submodule.') and c[0].endswith('.path'):
submodule = c[0][len('submodule.'):-len('.path')]
shell.call("%s config --file=.gitmodules submodule.%s.url %s" %
(GIT, submodule, os.path.join(src_dir, c[1])),
git_dir)
shell.call("%s submodule init" % GIT, git_dir)
shell.call("%s submodule sync" % GIT, git_dir)
shell.call("%s submodule update" % GIT, git_dir, fail=fail)
if src_dir:
for c in config_array:
if c[0].startswith('submodule.') and c[0].endswith('.url'):
shell.call("%s config --file=.gitmodules %s %s" %
(GIT, c[0], c[1]), git_dir)
shell.call("%s submodule sync" % GIT, git_dir)
def checkout(git_dir, commit):
'''
Reset a git repository to a given commit
@param git_dir: path of the git repository
@type git_dir: str
@param commit: the commit to checkout
@type commit: str
'''
return shell.call('%s reset --hard %s' % (GIT, commit), git_dir,
env=CLEAN_ENV)
def get_hash(git_dir, commit):
'''
Get a commit hash from a valid commit.
Can be used to check if a commit exists
@param git_dir: path of the git repository
@type git_dir: str
@param commit: the commit to log
@type commit: str
'''
return shell.check_call('%s show -s --pretty=%%H %s' %
(GIT, commit), git_dir, env=CLEAN_ENV)
def local_checkout(git_dir, local_git_dir, commit):
'''
Clone a repository for a given commit in a different location
@param git_dir: destination path of the git repository
@type git_dir: str
@param local_git_dir: path of the source git repository
@type local_git_dir: str
@param commit: the commit to checkout
@type commit: false
'''
# reset to a commit in case it's the first checkout and the masterbranch is
# missing
branch_name = 'cerbero_build'
shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir,
env=CLEAN_ENV)
shell.call('%s branch %s' % (GIT, branch_name), local_git_dir, fail=False,
env=CLEAN_ENV)
shell.call('%s checkout %s' % (GIT, branch_name), local_git_dir,
env=CLEAN_ENV)
shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir,
env=CLEAN_ENV)
shell.call('%s clone %s -s -b %s .' % (GIT, local_git_dir,
branch_name),
git_dir, env=CLEAN_ENV)
submodules_update(git_dir, local_git_dir)
def add_remote(git_dir, name, url):
'''
Add a remote to a git repository
@param git_dir: destination path of the git repository
@type git_dir: str
@param name: name of the remote
@type name: str
@param url: url of the remote
@type url: str
'''
try:
shell.call('%s remote add %s %s' % (GIT, name, url), git_dir,
env=CLEAN_ENV)
except:
shell.call('%s remote set-url %s %s' % (GIT, name, url), git_dir,
env=CLEAN_ENV)
def check_line_endings(platform):
'''
Checks if on windows we don't use the automatic line endings conversion
as it breaks everything
@param platform: the host platform
@type platform: L{cerbero.config.Platform}
@return: true if git config is core.autorlf=false
@rtype: bool
'''
if platform != Platform.WINDOWS:
return True
val = shell.check_call('%s config --get core.autocrlf' % GIT, env=CLEAN_ENV)
if ('false' in val.lower()):
return True
return False
def init_directory(git_dir):
'''
Initialize a git repository with the contents
of a directory
@param git_dir: path of the git repository
@type git_dir: str
'''
init(git_dir)
try:
shell.call('%s add --force -A .' % GIT, git_dir, env=CLEAN_ENV)
shell.call('%s commit -m "Initial commit" > /dev/null 2>&1' % GIT,
git_dir, env=CLEAN_ENV)
except:
pass
def | apply_patch | identifier_name |
|
DialogTypes.ts | import { Optional } from '@ephox/katamari';
import { Dialog } from 'tinymce/core/api/ui/Ui';
export type ListValue = Dialog.ListBoxSingleItemSpec;
export type ListGroup = Dialog.ListBoxNestedItemSpec;
export type ListItem = Dialog.ListBoxItemSpec;
export interface UserListItem {
text?: string;
title?: string;
value?: string;
url?: string;
menu?: UserListItem[];
}
export interface LinkDialogCatalog {
link: Optional<ListItem[]>;
targets: Optional<ListItem[]>;
rels: Optional<ListItem[]>;
classes: Optional<ListItem[]>;
anchor: Optional<ListItem[]>;
}
export interface LinkDialogInfo {
readonly anchor: {
readonly url: Optional<string>;
readonly text: Optional<string>;
readonly target: Optional<string>;
readonly rel: Optional<string>;
readonly linkClass: Optional<string>;
readonly title: Optional<string>;
};
readonly catalogs: LinkDialogCatalog;
readonly flags: {
readonly titleEnabled: boolean;
};
readonly optNode: Optional<HTMLAnchorElement>;
readonly onSubmit?: (api: Dialog.DialogInstanceApi<LinkDialogData>) => void;
}
export interface LinkDialogUrlData {
readonly value: string;
readonly meta?: LinkUrlMeta;
}
export type LinkDialogKey = 'text' | 'target' | 'rel' | 'linkClass' | 'title';
export interface LinkDialogData {
readonly url: LinkDialogUrlData;
readonly text: string;
readonly title: string;
readonly anchor: string;
readonly link: string;
readonly rel: string;
readonly target: string;
readonly linkClass: string;
}
export interface LinkDialogOutput {
readonly href: string;
readonly text: Optional<string>;
readonly target: Optional<string>;
readonly rel: Optional<string>;
readonly class: Optional<string>;
readonly title: Optional<string>;
}
interface LinkUrlMeta {
readonly text?: string;
readonly title?: string;
readonly attach?: () => void;
readonly original?: {
readonly value: string;
};
}
export interface AttachState {
readonly href?: string;
readonly attach?: () => void; | } | random_line_split |
|
MainController.js | /*
* The MIT License (MIT)
*
* Copyright (c) 2014 Marcel Mika, marcelmika.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
/**
* Main Controller
*
* This controller creates instances of all controllers in the app and injects objects that are necessary for them.
* It also holds instances of objects that are needed across the app.
*/
Y.namespace('LIMS.Controller');
Y.LIMS.Controller.MainController = Y.Base.create('mainController', Y.Base, [Y.LIMS.Controller.ControllerExtension], {
/**
* The initializer runs when a MainController instance is created, and gives
* us an opportunity to set up all sub controllers
*/
initializer: function () {
var buddyDetails = this.get('buddyDetails'),
settingsModel = this.get('settingsModel'),
notification = this.get('notification'),
properties = this.get('properties'),
serverTime = this.get('serverTimeModel'),
poller = this.get('poller'),
rootNode = this.getRootNode();
// Attach events
this._attachEvents();
// Load the most fresh server time to count server time offset
serverTime.load(function (err) {
// Update to the optimal offset that we get from the server.
// If there is an error properties contain offset read from the
// html as a fallback.
if (!err) |
// Group
new Y.LIMS.Controller.GroupViewController({
container: rootNode.one('.buddy-list'),
properties: properties,
poller: poller
});
// Presence
new Y.LIMS.Controller.PresenceViewController({
container: rootNode.one('.status-panel'),
buddyDetails: buddyDetails
});
// Settings
new Y.LIMS.Controller.SettingsViewController({
container: rootNode.one('.chat-settings'),
model: settingsModel
});
// Conversation
new Y.LIMS.Controller.ConversationsController({
container: rootNode.one('.lims-tabs'),
buddyDetails: buddyDetails,
settings: settingsModel,
notification: notification,
properties: properties,
poller: poller
});
});
},
/**
* This is called whenever the user session expires
*/
sessionExpired: function () {
// Fire an event so the other controllers know about the expiration
Y.fire('userSessionExpired');
},
/**
* Attach local functions to events
*
* @private
*/
_attachEvents: function () {
// Global events
Y.on('initializationFinished', this._onInitializationFinished, this);
// Panel events
Y.on('panelShown', this._onPanelShown, this);
Y.on('panelHidden', this._onPanelHidden, this);
Y.on('userSessionExpired', this._onSessionExpired, this);
},
/**
* Called when the initialization is finished
*
* @private
*/
_onInitializationFinished: function () {
// We can now show the portlet
this.showPortlet();
},
/**
* Called when any panel is shown
*
* @param panel
* @private
*/
_onPanelShown: function (panel) {
var panelId = panel.get('panelId');
// Store current active panel id
this.set('activePanelId', panelId);
// Update settings
this.get('settingsModel').updateActivePanel(panelId);
},
/**
* Called when any panel is hidden
*
* @param panel
* @private
*/
_onPanelHidden: function (panel) {
// If the hidden panel is currently active panel it means that no panel is currently active
if (this.get('activePanelId') === panel.get('panelId')) {
// Update settings
this.get('settingsModel').updateActivePanel(null);
}
},
/**
* Called when the user session expires
*
* @private
*/
_onSessionExpired: function () {
// Hide the whole portlet
Y.LIMS.Core.Util.hide(this.getRootNode());
}
}, {
// Add custom model attributes here. These attributes will contain your
// model's data. See the docs for Y.Attribute to learn more about defining
// attributes.
ATTRS: {
/**
* Buddy details related of the currently logged user
*
* {Y.LIMS.Model.BuddyModelItem}
*/
buddyDetails: {
valueFn: function () {
// We need settings to determine user
var properties = new Y.LIMS.Core.Properties();
// Get logged user
return new Y.LIMS.Model.BuddyModelItem({
buddyId: properties.getCurrentUserId(),
male: properties.getCurrentUserMale(),
portraitId: properties.getCurrentUserPortraitId(),
portraitImageToken: properties.getCurrentUserPortraitImageToken(),
portraitToken: properties.getCurrentUserPortraitToken(),
screenName: properties.getCurrentUserScreenName(),
fullName: properties.getCurrentUserFullName()
});
}
},
/**
* Settings of the currently logged user
*
* {Y.LIMS.Model.SettingsModel}
*/
settingsModel: {
valueFn: function () {
return new Y.LIMS.Model.SettingsModel({
buddy: this.get('buddyDetails')
});
}
},
/**
* Current server time
*
* {Y.LIMS.Model.ServerTimeModel}
*/
serverTimeModel: {
valueFn: function () {
return new Y.LIMS.Model.ServerTimeModel();
}
},
/**
* Notification object responsible for the incoming message notification
*
* {Y.LIMS.Core.Notification}
*/
notification: {
valueFn: function () {
return new Y.LIMS.Core.Notification({
settings: this.get('settingsModel'),
container: this.getRootNode().one('.lims-sound'),
properties: this.get('properties')
});
}
},
/**
* An instance of poller that periodically refreshes models that are subscribed
*
* {Y.LIMS.Core.Poller}
*/
poller: {
valueFn: function () {
return new Y.LIMS.Core.Poller();
}
},
/**
* Properties object that holds the global portlet properties
*
* {Y.LIMS.Core.Properties}
*/
properties: {
valueFn: function () {
return new Y.LIMS.Core.Properties();
}
},
/**
* ID of the current active panel
*
* {string}
*/
activePanelId: {
value: null // default value
}
}
});
| {
properties.set('offset', new Date().getTime() - serverTime.get('time'));
} | conditional_block |
MainController.js | /*
* The MIT License (MIT)
*
* Copyright (c) 2014 Marcel Mika, marcelmika.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
/**
* Main Controller
*
* This controller creates instances of all controllers in the app and injects objects that are necessary for them.
* It also holds instances of objects that are needed across the app.
*/
Y.namespace('LIMS.Controller');
Y.LIMS.Controller.MainController = Y.Base.create('mainController', Y.Base, [Y.LIMS.Controller.ControllerExtension], {
/**
* The initializer runs when a MainController instance is created, and gives
* us an opportunity to set up all sub controllers
*/
initializer: function () {
var buddyDetails = this.get('buddyDetails'),
settingsModel = this.get('settingsModel'),
notification = this.get('notification'),
properties = this.get('properties'),
serverTime = this.get('serverTimeModel'),
poller = this.get('poller'),
rootNode = this.getRootNode();
// Attach events
this._attachEvents();
// Load the most fresh server time to count server time offset
serverTime.load(function (err) {
// Update to the optimal offset that we get from the server.
// If there is an error properties contain offset read from the
// html as a fallback.
if (!err) {
properties.set('offset', new Date().getTime() - serverTime.get('time'));
}
// Group
new Y.LIMS.Controller.GroupViewController({
container: rootNode.one('.buddy-list'),
properties: properties,
poller: poller
});
// Presence
new Y.LIMS.Controller.PresenceViewController({
container: rootNode.one('.status-panel'),
buddyDetails: buddyDetails
});
// Settings
new Y.LIMS.Controller.SettingsViewController({
container: rootNode.one('.chat-settings'),
model: settingsModel
});
// Conversation
new Y.LIMS.Controller.ConversationsController({
container: rootNode.one('.lims-tabs'),
buddyDetails: buddyDetails,
settings: settingsModel,
notification: notification,
properties: properties,
poller: poller
});
});
},
/**
* This is called whenever the user session expires
*/
sessionExpired: function () {
// Fire an event so the other controllers know about the expiration
Y.fire('userSessionExpired');
},
/**
* Attach local functions to events
*
* @private
*/
_attachEvents: function () {
// Global events
Y.on('initializationFinished', this._onInitializationFinished, this);
// Panel events
Y.on('panelShown', this._onPanelShown, this);
Y.on('panelHidden', this._onPanelHidden, this);
Y.on('userSessionExpired', this._onSessionExpired, this);
},
/**
* Called when the initialization is finished
*
* @private
*/
_onInitializationFinished: function () {
// We can now show the portlet
this.showPortlet();
},
/**
* Called when any panel is shown
*
* @param panel
* @private
*/
_onPanelShown: function (panel) {
var panelId = panel.get('panelId'); | },
/**
* Called when any panel is hidden
*
* @param panel
* @private
*/
_onPanelHidden: function (panel) {
// If the hidden panel is currently active panel it means that no panel is currently active
if (this.get('activePanelId') === panel.get('panelId')) {
// Update settings
this.get('settingsModel').updateActivePanel(null);
}
},
/**
* Called when the user session expires
*
* @private
*/
_onSessionExpired: function () {
// Hide the whole portlet
Y.LIMS.Core.Util.hide(this.getRootNode());
}
}, {
// Add custom model attributes here. These attributes will contain your
// model's data. See the docs for Y.Attribute to learn more about defining
// attributes.
ATTRS: {
/**
* Buddy details related of the currently logged user
*
* {Y.LIMS.Model.BuddyModelItem}
*/
buddyDetails: {
valueFn: function () {
// We need settings to determine user
var properties = new Y.LIMS.Core.Properties();
// Get logged user
return new Y.LIMS.Model.BuddyModelItem({
buddyId: properties.getCurrentUserId(),
male: properties.getCurrentUserMale(),
portraitId: properties.getCurrentUserPortraitId(),
portraitImageToken: properties.getCurrentUserPortraitImageToken(),
portraitToken: properties.getCurrentUserPortraitToken(),
screenName: properties.getCurrentUserScreenName(),
fullName: properties.getCurrentUserFullName()
});
}
},
/**
* Settings of the currently logged user
*
* {Y.LIMS.Model.SettingsModel}
*/
settingsModel: {
valueFn: function () {
return new Y.LIMS.Model.SettingsModel({
buddy: this.get('buddyDetails')
});
}
},
/**
* Current server time
*
* {Y.LIMS.Model.ServerTimeModel}
*/
serverTimeModel: {
valueFn: function () {
return new Y.LIMS.Model.ServerTimeModel();
}
},
/**
* Notification object responsible for the incoming message notification
*
* {Y.LIMS.Core.Notification}
*/
notification: {
valueFn: function () {
return new Y.LIMS.Core.Notification({
settings: this.get('settingsModel'),
container: this.getRootNode().one('.lims-sound'),
properties: this.get('properties')
});
}
},
/**
* An instance of poller that periodically refreshes models that are subscribed
*
* {Y.LIMS.Core.Poller}
*/
poller: {
valueFn: function () {
return new Y.LIMS.Core.Poller();
}
},
/**
* Properties object that holds the global portlet properties
*
* {Y.LIMS.Core.Properties}
*/
properties: {
valueFn: function () {
return new Y.LIMS.Core.Properties();
}
},
/**
* ID of the current active panel
*
* {string}
*/
activePanelId: {
value: null // default value
}
}
}); | // Store current active panel id
this.set('activePanelId', panelId);
// Update settings
this.get('settingsModel').updateActivePanel(panelId); | random_line_split |
utils.py | def load_config(default_values, user_values):
if user_values is None:
return default_values
config = {}
for k, v in user_values.items():
if k in default_values:
if isinstance(v, dict):
cloned = user_values[k].copy()
for key, value in default_values[k].items():
if key is not None and key not in user_values[k] \
or user_values[k][key] == '':
cloned[key] = value
config[k] = cloned
else:
config[k] = v
else:
config[k] = v
for k, v in default_values.items():
if k not in config:
config[k] = v
return config
def import_class(full_path):
path_split = full_path.split('.')
path = ".".join(path_split[:-1]) | klass = path_split[-1:]
mod = __import__(path, fromlist=[klass])
return getattr(mod, klass[0]) | random_line_split |
|
utils.py |
def load_config(default_values, user_values):
if user_values is None:
return default_values
config = {}
for k, v in user_values.items():
if k in default_values:
if isinstance(v, dict):
cloned = user_values[k].copy()
for key, value in default_values[k].items():
if key is not None and key not in user_values[k] \
or user_values[k][key] == '':
cloned[key] = value
config[k] = cloned
else:
config[k] = v
else:
config[k] = v
for k, v in default_values.items():
if k not in config:
config[k] = v
return config
def | (full_path):
path_split = full_path.split('.')
path = ".".join(path_split[:-1])
klass = path_split[-1:]
mod = __import__(path, fromlist=[klass])
return getattr(mod, klass[0])
| import_class | identifier_name |
utils.py |
def load_config(default_values, user_values):
if user_values is None:
return default_values
config = {}
for k, v in user_values.items():
if k in default_values:
if isinstance(v, dict):
cloned = user_values[k].copy()
for key, value in default_values[k].items():
if key is not None and key not in user_values[k] \
or user_values[k][key] == '':
cloned[key] = value
config[k] = cloned
else:
config[k] = v
else:
config[k] = v
for k, v in default_values.items():
if k not in config:
|
return config
def import_class(full_path):
path_split = full_path.split('.')
path = ".".join(path_split[:-1])
klass = path_split[-1:]
mod = __import__(path, fromlist=[klass])
return getattr(mod, klass[0])
| config[k] = v | conditional_block |
utils.py |
def load_config(default_values, user_values):
|
def import_class(full_path):
path_split = full_path.split('.')
path = ".".join(path_split[:-1])
klass = path_split[-1:]
mod = __import__(path, fromlist=[klass])
return getattr(mod, klass[0])
| if user_values is None:
return default_values
config = {}
for k, v in user_values.items():
if k in default_values:
if isinstance(v, dict):
cloned = user_values[k].copy()
for key, value in default_values[k].items():
if key is not None and key not in user_values[k] \
or user_values[k][key] == '':
cloned[key] = value
config[k] = cloned
else:
config[k] = v
else:
config[k] = v
for k, v in default_values.items():
if k not in config:
config[k] = v
return config | identifier_body |
tangy-forms-player.component.ts | /environment';
import { FormInfo, FormTemplate } from 'src/app/tangy-forms/classes/form-info.class';
import { TangyFormResponseModel } from 'tangy-form/tangy-form-response-model.js';
import { Subject } from 'rxjs';
import { TangyFormsInfoService } from 'src/app/tangy-forms/tangy-forms-info-service';
import { Component, ViewChild, ElementRef, Input } from '@angular/core';
import { _TRANSLATE } from '../../shared/translation-marker';
import { TangyFormService } from '../tangy-form.service';
const sleep = (milliseconds) => new Promise((res) => setTimeout(() => res(true), milliseconds))
@Component({
selector: 'app-tangy-forms-player',
templateUrl: './tangy-forms-player.component.html',
styleUrls: ['./tangy-forms-player.component.css']
})
export class TangyFormsPlayerComponent {
// Use one of three to do different things.
// 1. Use this to have the component load the response for you.
@Input('formResponseId') formResponseId:string
// 2. Use this if you want to attach the form response yourself.
@Input('response') response:TangyFormResponseModel
// 3. Use this is you want a new form response.
@Input('formId') formId:string
@Input('templateId') templateId:string
@Input('location') location:any
@Input('skipSaving') skipSaving = false
@Input('preventSubmit') preventSubmit = false
@Input('metadata') metadata:any
$rendered = new Subject()
$beforeSubmit = new Subject()
$submit = new Subject()
$afterSubmit = new Subject()
$resubmit = new Subject()
$afterResubmit = new Subject()
$saved = new Subject()
rendered = false
_inject = {}
formInfo:FormInfo
formTemplatesInContext:Array<FormTemplate>
formEl:any
throttledSaveLoaded;
throttledSaveFiring;
window:any;
@ViewChild('container', {static: true}) container: ElementRef;
constructor(
private tangyFormsInfoService:TangyFormsInfoService,
private tangyFormService: TangyFormService,
) {
this.window = window
}
inject(name, value) {
if (this.formEl) {
this.formEl.inject(name, value)
} else {
this._inject[name] = value
}
}
isDirty() {
if (this.formEl) {
const state = this.formEl.store.getState()
const isDirty = state.items.some((acc, item) => item.isDirty)
return isDirty
} else {
return true
}
}
isComplete() {
if (this.formEl) {
return this.formEl.store.getState().form.complete
} else {
return true
}
}
unlock() {
this.formEl.unlock()
}
async render() {
// Get form ingredients.
const formResponse = this.response
? new TangyFormResponseModel(this.response)
: this.formResponseId
? new TangyFormResponseModel(await this.tangyFormService.getResponse(this.formResponseId))
: ''
// happens during testing
if (!this.response && this.formResponseId) {
this.response = await this.tangyFormService.getResponse(this.formResponseId)
}
this.formId = this.formId
? this.formId
: formResponse['form']['id']
this.formInfo = await this.tangyFormsInfoService.getFormInfo(this.formId)
this.formTemplatesInContext = this.formInfo.templates ? this.formInfo.templates.filter(template => template.appContext === environment.appContext) : []
if (this.templateId) {
let templateMarkup = await this.tangyFormsInfoService.getFormTemplateMarkup(this.formId, this.templateId)
eval(`this.container.nativeElement.innerHTML = \`${templateMarkup}\``)
} else {
let formVersionId
if (window.location.hostname === 'localhost') {
// We are in preview mode, use FormInfo.src for markup.
formVersionId = ''
} else if (!this.formInfo.formVersions) {
// No form versions defined, use FormInfo.src for markup.
formVersionId = ''
} else if (this.formInfo.formVersions && !formResponse) {
// We have form versions defined and we are creating a new form response. Let's use the version set for use in FormInfo.formVersionId.
formVersionId = this.formInfo.formVersionId
} else if (formResponse["formVersionId"]) {
// We are resuming a Form Response with the version set. Use that.
formVersionId = formResponse["formVersionId"]
} else if (!formResponse["formVersionId"]) {
// We are resuming a Form Response that has never heard of form versions. Use the FIRST form version listed.
// This is useful for projects that did not start with using Form Versions. To get started, create two Form Versions
// where the first form version is for Form Responses before Form Versions, and the second version is the new version
// for all new form responses.
formVersionId = this.formInfo.formVersions[0].id
}
let formHtml = await this.tangyFormService.getFormMarkup(this.formId, formVersionId)
// Put the form on the screen.
const container = this.container.nativeElement
container.innerHTML = formHtml
let formEl = container.querySelector('tangy-form')
this.formEl = formEl;
for (let name of Object.keys(this._inject)) {
this.formEl.inject(name, this._inject[name])
}
// Put a response in the store by issuing the FORM_OPEN action.
if (formResponse) {
formEl.response = formResponse
} else {
formEl.newResponse()
this.formResponseId = formEl.response._id
formEl.response.formVersionId = this.formInfo.formVersionId
this.throttledSaveResponse(formEl.response)
}
this.response = formEl.response
// Listen up, save in the db.
if (!this.skipSaving && !this.response.complete) {
formEl.addEventListener('TANGY_FORM_UPDATE', _ => {
let response = _.target.store.getState()
this.throttledSaveResponse(response)
})
}
formEl.addEventListener('before-submit', (event) => {
this.$beforeSubmit.next(true)
})
formEl.addEventListener('submit', (event) => {
if (this.preventSubmit) event.preventDefault()
this.$submit.next(true)
})
formEl.addEventListener('after-submit', async (event) => {
if (this.preventSubmit) event.preventDefault()
while (this.throttledSaveFiring === true) {
await sleep(1000)
}
this.$afterSubmit.next(true)
})
formEl.addEventListener('resubmit', async (event) => {
if (this.preventSubmit) event.preventDefault()
while (this.throttledSaveFiring === true) {
await sleep(1000)
}
this.$resubmit.next(true)
})
formEl.addEventListener('after-resubmit', async (event) => {
if (this.preventSubmit) event.preventDefault()
while (this.throttledSaveFiring === true) {
await sleep(1000)
}
this.$afterResubmit.next(true)
})
}
this.$rendered.next(true)
this.rendered = true
}
setTemplate(templateId) {
this.templateId = templateId
this.render()
}
// Prevent parallel saves which leads to race conditions. Only save the first and then last state of the store.
// Everything else in between we can ignore.
async throttledSaveResponse(response) {
// If already loaded, return.
if (this.throttledSaveLoaded) return
// Throttle this fire by waiting until last fire is done.
if (this.throttledSaveFiring) {
this.throttledSaveLoaded = true
while (this.throttledSaveFiring) await sleep(200)
this.throttledSaveLoaded = false
}
// Fire it.
this.throttledSaveFiring = true
await this.saveResponse(response)
this.throttledSaveFiring = false
}
async saveResponse(state) {
let stateDoc = {}
stateDoc = await this.tangyFormService.getResponse(state._id)
if (stateDoc && stateDoc['complete'] && state.complete && stateDoc['form'] && !stateDoc['form'].hasSummary) | else {
if (!stateDoc) {
let r = await this.tangyFormService.saveResponse(state)
stateDoc = await this.tangyFormService.getResponse(state._id)
}
await this.tangyFormService.saveResponse({
...state,
_rev: stateDoc['_rev'],
location: this.location || state.location,
...this.metadata
})
}
this.response = state
| {
// Since what is in the database is complete, and it's still complete, and it doesn't have
// a summary where they might add some input, don't save! They are probably reviewing data.
} | conditional_block |
tangy-forms-player.component.ts | /environment';
import { FormInfo, FormTemplate } from 'src/app/tangy-forms/classes/form-info.class';
import { TangyFormResponseModel } from 'tangy-form/tangy-form-response-model.js';
import { Subject } from 'rxjs';
import { TangyFormsInfoService } from 'src/app/tangy-forms/tangy-forms-info-service';
import { Component, ViewChild, ElementRef, Input } from '@angular/core';
import { _TRANSLATE } from '../../shared/translation-marker';
import { TangyFormService } from '../tangy-form.service';
const sleep = (milliseconds) => new Promise((res) => setTimeout(() => res(true), milliseconds))
@Component({
selector: 'app-tangy-forms-player',
templateUrl: './tangy-forms-player.component.html',
styleUrls: ['./tangy-forms-player.component.css']
})
export class TangyFormsPlayerComponent {
// Use one of three to do different things.
// 1. Use this to have the component load the response for you.
@Input('formResponseId') formResponseId:string
// 2. Use this if you want to attach the form response yourself.
@Input('response') response:TangyFormResponseModel
// 3. Use this is you want a new form response.
@Input('formId') formId:string
@Input('templateId') templateId:string
@Input('location') location:any
@Input('skipSaving') skipSaving = false
@Input('preventSubmit') preventSubmit = false
@Input('metadata') metadata:any
$rendered = new Subject()
$beforeSubmit = new Subject()
$submit = new Subject()
$afterSubmit = new Subject()
$resubmit = new Subject()
$afterResubmit = new Subject()
$saved = new Subject()
rendered = false
_inject = {}
formInfo:FormInfo
formTemplatesInContext:Array<FormTemplate>
formEl:any
throttledSaveLoaded;
throttledSaveFiring;
window:any;
@ViewChild('container', {static: true}) container: ElementRef;
constructor(
private tangyFormsInfoService:TangyFormsInfoService,
private tangyFormService: TangyFormService,
) {
this.window = window
}
inject(name, value) |
isDirty() {
if (this.formEl) {
const state = this.formEl.store.getState()
const isDirty = state.items.some((acc, item) => item.isDirty)
return isDirty
} else {
return true
}
}
isComplete() {
if (this.formEl) {
return this.formEl.store.getState().form.complete
} else {
return true
}
}
unlock() {
this.formEl.unlock()
}
async render() {
// Get form ingredients.
const formResponse = this.response
? new TangyFormResponseModel(this.response)
: this.formResponseId
? new TangyFormResponseModel(await this.tangyFormService.getResponse(this.formResponseId))
: ''
// happens during testing
if (!this.response && this.formResponseId) {
this.response = await this.tangyFormService.getResponse(this.formResponseId)
}
this.formId = this.formId
? this.formId
: formResponse['form']['id']
this.formInfo = await this.tangyFormsInfoService.getFormInfo(this.formId)
this.formTemplatesInContext = this.formInfo.templates ? this.formInfo.templates.filter(template => template.appContext === environment.appContext) : []
if (this.templateId) {
let templateMarkup = await this.tangyFormsInfoService.getFormTemplateMarkup(this.formId, this.templateId)
eval(`this.container.nativeElement.innerHTML = \`${templateMarkup}\``)
} else {
let formVersionId
if (window.location.hostname === 'localhost') {
// We are in preview mode, use FormInfo.src for markup.
formVersionId = ''
} else if (!this.formInfo.formVersions) {
// No form versions defined, use FormInfo.src for markup.
formVersionId = ''
} else if (this.formInfo.formVersions && !formResponse) {
// We have form versions defined and we are creating a new form response. Let's use the version set for use in FormInfo.formVersionId.
formVersionId = this.formInfo.formVersionId
} else if (formResponse["formVersionId"]) {
// We are resuming a Form Response with the version set. Use that.
formVersionId = formResponse["formVersionId"]
} else if (!formResponse["formVersionId"]) {
// We are resuming a Form Response that has never heard of form versions. Use the FIRST form version listed.
// This is useful for projects that did not start with using Form Versions. To get started, create two Form Versions
// where the first form version is for Form Responses before Form Versions, and the second version is the new version
// for all new form responses.
formVersionId = this.formInfo.formVersions[0].id
}
let formHtml = await this.tangyFormService.getFormMarkup(this.formId, formVersionId)
// Put the form on the screen.
const container = this.container.nativeElement
container.innerHTML = formHtml
let formEl = container.querySelector('tangy-form')
this.formEl = formEl;
for (let name of Object.keys(this._inject)) {
this.formEl.inject(name, this._inject[name])
}
// Put a response in the store by issuing the FORM_OPEN action.
if (formResponse) {
formEl.response = formResponse
} else {
formEl.newResponse()
this.formResponseId = formEl.response._id
formEl.response.formVersionId = this.formInfo.formVersionId
this.throttledSaveResponse(formEl.response)
}
this.response = formEl.response
// Listen up, save in the db.
if (!this.skipSaving && !this.response.complete) {
formEl.addEventListener('TANGY_FORM_UPDATE', _ => {
let response = _.target.store.getState()
this.throttledSaveResponse(response)
})
}
formEl.addEventListener('before-submit', (event) => {
this.$beforeSubmit.next(true)
})
formEl.addEventListener('submit', (event) => {
if (this.preventSubmit) event.preventDefault()
this.$submit.next(true)
})
formEl.addEventListener('after-submit', async (event) => {
if (this.preventSubmit) event.preventDefault()
while (this.throttledSaveFiring === true) {
await sleep(1000)
}
this.$afterSubmit.next(true)
})
formEl.addEventListener('resubmit', async (event) => {
if (this.preventSubmit) event.preventDefault()
while (this.throttledSaveFiring === true) {
await sleep(1000)
}
this.$resubmit.next(true)
})
formEl.addEventListener('after-resubmit', async (event) => {
if (this.preventSubmit) event.preventDefault()
while (this.throttledSaveFiring === true) {
await sleep(1000)
}
this.$afterResubmit.next(true)
})
}
this.$rendered.next(true)
this.rendered = true
}
setTemplate(templateId) {
this.templateId = templateId
this.render()
}
// Prevent parallel saves which leads to race conditions. Only save the first and then last state of the store.
// Everything else in between we can ignore.
async throttledSaveResponse(response) {
// If already loaded, return.
if (this.throttledSaveLoaded) return
// Throttle this fire by waiting until last fire is done.
if (this.throttledSaveFiring) {
this.throttledSaveLoaded = true
while (this.throttledSaveFiring) await sleep(200)
this.throttledSaveLoaded = false
}
// Fire it.
this.throttledSaveFiring = true
await this.saveResponse(response)
this.throttledSaveFiring = false
}
async saveResponse(state) {
let stateDoc = {}
stateDoc = await this.tangyFormService.getResponse(state._id)
if (stateDoc && stateDoc['complete'] && state.complete && stateDoc['form'] && !stateDoc['form'].hasSummary) {
// Since what is in the database is complete, and it's still complete, and it doesn't have
// a summary where they might add some input, don't save! They are probably reviewing data.
} else {
if (!stateDoc) {
let r = await this.tangyFormService.saveResponse(state)
stateDoc = await this.tangyFormService.getResponse(state._id)
}
await this.tangyFormService.saveResponse({
...state,
_rev: stateDoc['_rev'],
location: this.location || state.location,
...this.metadata
})
}
this.response = state
| {
if (this.formEl) {
this.formEl.inject(name, value)
} else {
this._inject[name] = value
}
} | identifier_body |
tangy-forms-player.component.ts | } from 'src/app/tangy-forms/classes/form-info.class';
import { TangyFormResponseModel } from 'tangy-form/tangy-form-response-model.js';
import { Subject } from 'rxjs';
import { TangyFormsInfoService } from 'src/app/tangy-forms/tangy-forms-info-service';
import { Component, ViewChild, ElementRef, Input } from '@angular/core';
import { _TRANSLATE } from '../../shared/translation-marker';
import { TangyFormService } from '../tangy-form.service';
const sleep = (milliseconds) => new Promise((res) => setTimeout(() => res(true), milliseconds))
@Component({
selector: 'app-tangy-forms-player',
templateUrl: './tangy-forms-player.component.html',
styleUrls: ['./tangy-forms-player.component.css']
})
export class TangyFormsPlayerComponent {
// Use one of three to do different things.
// 1. Use this to have the component load the response for you.
@Input('formResponseId') formResponseId:string
// 2. Use this if you want to attach the form response yourself.
@Input('response') response:TangyFormResponseModel
// 3. Use this is you want a new form response.
@Input('formId') formId:string
@Input('templateId') templateId:string
@Input('location') location:any
@Input('skipSaving') skipSaving = false
@Input('preventSubmit') preventSubmit = false
@Input('metadata') metadata:any
$rendered = new Subject()
$beforeSubmit = new Subject()
$submit = new Subject()
$afterSubmit = new Subject()
$resubmit = new Subject()
$afterResubmit = new Subject()
$saved = new Subject()
rendered = false
_inject = {}
formInfo:FormInfo
formTemplatesInContext:Array<FormTemplate>
formEl:any
throttledSaveLoaded;
throttledSaveFiring;
window:any;
@ViewChild('container', {static: true}) container: ElementRef;
constructor(
private tangyFormsInfoService:TangyFormsInfoService,
private tangyFormService: TangyFormService,
) {
this.window = window
}
inject(name, value) {
if (this.formEl) {
this.formEl.inject(name, value)
} else {
this._inject[name] = value
}
}
isDirty() {
if (this.formEl) {
const state = this.formEl.store.getState()
const isDirty = state.items.some((acc, item) => item.isDirty)
return isDirty
} else {
return true
}
}
isComplete() {
if (this.formEl) {
return this.formEl.store.getState().form.complete
} else {
return true
}
}
unlock() {
this.formEl.unlock()
}
async render() {
// Get form ingredients.
const formResponse = this.response
? new TangyFormResponseModel(this.response)
: this.formResponseId
? new TangyFormResponseModel(await this.tangyFormService.getResponse(this.formResponseId))
: ''
// happens during testing
if (!this.response && this.formResponseId) {
this.response = await this.tangyFormService.getResponse(this.formResponseId)
}
this.formId = this.formId
? this.formId
: formResponse['form']['id']
this.formInfo = await this.tangyFormsInfoService.getFormInfo(this.formId)
this.formTemplatesInContext = this.formInfo.templates ? this.formInfo.templates.filter(template => template.appContext === environment.appContext) : []
if (this.templateId) {
let templateMarkup = await this.tangyFormsInfoService.getFormTemplateMarkup(this.formId, this.templateId)
eval(`this.container.nativeElement.innerHTML = \`${templateMarkup}\``)
} else {
let formVersionId
if (window.location.hostname === 'localhost') {
// We are in preview mode, use FormInfo.src for markup.
formVersionId = ''
} else if (!this.formInfo.formVersions) {
// No form versions defined, use FormInfo.src for markup.
formVersionId = ''
} else if (this.formInfo.formVersions && !formResponse) {
// We have form versions defined and we are creating a new form response. Let's use the version set for use in FormInfo.formVersionId.
formVersionId = this.formInfo.formVersionId
} else if (formResponse["formVersionId"]) {
// We are resuming a Form Response with the version set. Use that.
formVersionId = formResponse["formVersionId"]
} else if (!formResponse["formVersionId"]) {
// We are resuming a Form Response that has never heard of form versions. Use the FIRST form version listed.
// This is useful for projects that did not start with using Form Versions. To get started, create two Form Versions
// where the first form version is for Form Responses before Form Versions, and the second version is the new version
// for all new form responses.
formVersionId = this.formInfo.formVersions[0].id
}
let formHtml = await this.tangyFormService.getFormMarkup(this.formId, formVersionId)
// Put the form on the screen.
const container = this.container.nativeElement
container.innerHTML = formHtml
let formEl = container.querySelector('tangy-form')
this.formEl = formEl;
for (let name of Object.keys(this._inject)) {
this.formEl.inject(name, this._inject[name])
}
// Put a response in the store by issuing the FORM_OPEN action.
if (formResponse) {
formEl.response = formResponse
} else {
formEl.newResponse()
this.formResponseId = formEl.response._id
formEl.response.formVersionId = this.formInfo.formVersionId
this.throttledSaveResponse(formEl.response)
}
this.response = formEl.response
// Listen up, save in the db.
if (!this.skipSaving && !this.response.complete) {
formEl.addEventListener('TANGY_FORM_UPDATE', _ => {
let response = _.target.store.getState()
this.throttledSaveResponse(response)
})
}
formEl.addEventListener('before-submit', (event) => {
this.$beforeSubmit.next(true)
})
formEl.addEventListener('submit', (event) => {
if (this.preventSubmit) event.preventDefault()
this.$submit.next(true)
})
formEl.addEventListener('after-submit', async (event) => {
if (this.preventSubmit) event.preventDefault()
while (this.throttledSaveFiring === true) {
await sleep(1000)
}
this.$afterSubmit.next(true)
})
formEl.addEventListener('resubmit', async (event) => {
if (this.preventSubmit) event.preventDefault()
while (this.throttledSaveFiring === true) {
await sleep(1000)
}
this.$resubmit.next(true)
})
formEl.addEventListener('after-resubmit', async (event) => {
if (this.preventSubmit) event.preventDefault()
while (this.throttledSaveFiring === true) {
await sleep(1000)
}
this.$afterResubmit.next(true)
})
}
this.$rendered.next(true)
this.rendered = true
}
setTemplate(templateId) {
this.templateId = templateId
this.render()
}
// Prevent parallel saves which leads to race conditions. Only save the first and then last state of the store.
// Everything else in between we can ignore.
async throttledSaveResponse(response) {
// If already loaded, return.
if (this.throttledSaveLoaded) return
// Throttle this fire by waiting until last fire is done.
if (this.throttledSaveFiring) {
this.throttledSaveLoaded = true
while (this.throttledSaveFiring) await sleep(200)
this.throttledSaveLoaded = false
}
// Fire it.
this.throttledSaveFiring = true
await this.saveResponse(response)
this.throttledSaveFiring = false
}
async saveResponse(state) {
let stateDoc = {}
stateDoc = await this.tangyFormService.getResponse(state._id)
if (stateDoc && stateDoc['complete'] && state.complete && stateDoc['form'] && !stateDoc['form'].hasSummary) {
// Since what is in the database is complete, and it's still complete, and it doesn't have
// a summary where they might add some input, don't save! They are probably reviewing data.
} else {
if (!stateDoc) {
let r = await this.tangyFormService.saveResponse(state)
stateDoc = await this.tangyFormService.getResponse(state._id)
}
await this.tangyFormService.saveResponse({
...state,
_rev: stateDoc['_rev'],
location: this.location || state.location,
...this.metadata
})
}
this.response = state
this.$saved.next(state)
}
| print | identifier_name |
|
tangy-forms-player.component.ts | /environment';
import { FormInfo, FormTemplate } from 'src/app/tangy-forms/classes/form-info.class';
import { TangyFormResponseModel } from 'tangy-form/tangy-form-response-model.js';
import { Subject } from 'rxjs';
import { TangyFormsInfoService } from 'src/app/tangy-forms/tangy-forms-info-service';
import { Component, ViewChild, ElementRef, Input } from '@angular/core';
import { _TRANSLATE } from '../../shared/translation-marker';
import { TangyFormService } from '../tangy-form.service';
const sleep = (milliseconds) => new Promise((res) => setTimeout(() => res(true), milliseconds)) | @Component({
selector: 'app-tangy-forms-player',
templateUrl: './tangy-forms-player.component.html',
styleUrls: ['./tangy-forms-player.component.css']
})
export class TangyFormsPlayerComponent {
// Use one of three to do different things.
// 1. Use this to have the component load the response for you.
@Input('formResponseId') formResponseId:string
// 2. Use this if you want to attach the form response yourself.
@Input('response') response:TangyFormResponseModel
// 3. Use this is you want a new form response.
@Input('formId') formId:string
@Input('templateId') templateId:string
@Input('location') location:any
@Input('skipSaving') skipSaving = false
@Input('preventSubmit') preventSubmit = false
@Input('metadata') metadata:any
$rendered = new Subject()
$beforeSubmit = new Subject()
$submit = new Subject()
$afterSubmit = new Subject()
$resubmit = new Subject()
$afterResubmit = new Subject()
$saved = new Subject()
rendered = false
_inject = {}
formInfo:FormInfo
formTemplatesInContext:Array<FormTemplate>
formEl:any
throttledSaveLoaded;
throttledSaveFiring;
window:any;
@ViewChild('container', {static: true}) container: ElementRef;
constructor(
private tangyFormsInfoService:TangyFormsInfoService,
private tangyFormService: TangyFormService,
) {
this.window = window
}
inject(name, value) {
if (this.formEl) {
this.formEl.inject(name, value)
} else {
this._inject[name] = value
}
}
isDirty() {
if (this.formEl) {
const state = this.formEl.store.getState()
const isDirty = state.items.some((acc, item) => item.isDirty)
return isDirty
} else {
return true
}
}
isComplete() {
if (this.formEl) {
return this.formEl.store.getState().form.complete
} else {
return true
}
}
unlock() {
this.formEl.unlock()
}
async render() {
// Get form ingredients.
const formResponse = this.response
? new TangyFormResponseModel(this.response)
: this.formResponseId
? new TangyFormResponseModel(await this.tangyFormService.getResponse(this.formResponseId))
: ''
// happens during testing
if (!this.response && this.formResponseId) {
this.response = await this.tangyFormService.getResponse(this.formResponseId)
}
this.formId = this.formId
? this.formId
: formResponse['form']['id']
this.formInfo = await this.tangyFormsInfoService.getFormInfo(this.formId)
this.formTemplatesInContext = this.formInfo.templates ? this.formInfo.templates.filter(template => template.appContext === environment.appContext) : []
if (this.templateId) {
let templateMarkup = await this.tangyFormsInfoService.getFormTemplateMarkup(this.formId, this.templateId)
eval(`this.container.nativeElement.innerHTML = \`${templateMarkup}\``)
} else {
let formVersionId
if (window.location.hostname === 'localhost') {
// We are in preview mode, use FormInfo.src for markup.
formVersionId = ''
} else if (!this.formInfo.formVersions) {
// No form versions defined, use FormInfo.src for markup.
formVersionId = ''
} else if (this.formInfo.formVersions && !formResponse) {
// We have form versions defined and we are creating a new form response. Let's use the version set for use in FormInfo.formVersionId.
formVersionId = this.formInfo.formVersionId
} else if (formResponse["formVersionId"]) {
// We are resuming a Form Response with the version set. Use that.
formVersionId = formResponse["formVersionId"]
} else if (!formResponse["formVersionId"]) {
// We are resuming a Form Response that has never heard of form versions. Use the FIRST form version listed.
// This is useful for projects that did not start with using Form Versions. To get started, create two Form Versions
// where the first form version is for Form Responses before Form Versions, and the second version is the new version
// for all new form responses.
formVersionId = this.formInfo.formVersions[0].id
}
let formHtml = await this.tangyFormService.getFormMarkup(this.formId, formVersionId)
// Put the form on the screen.
const container = this.container.nativeElement
container.innerHTML = formHtml
let formEl = container.querySelector('tangy-form')
this.formEl = formEl;
for (let name of Object.keys(this._inject)) {
this.formEl.inject(name, this._inject[name])
}
// Put a response in the store by issuing the FORM_OPEN action.
if (formResponse) {
formEl.response = formResponse
} else {
formEl.newResponse()
this.formResponseId = formEl.response._id
formEl.response.formVersionId = this.formInfo.formVersionId
this.throttledSaveResponse(formEl.response)
}
this.response = formEl.response
// Listen up, save in the db.
if (!this.skipSaving && !this.response.complete) {
formEl.addEventListener('TANGY_FORM_UPDATE', _ => {
let response = _.target.store.getState()
this.throttledSaveResponse(response)
})
}
formEl.addEventListener('before-submit', (event) => {
this.$beforeSubmit.next(true)
})
formEl.addEventListener('submit', (event) => {
if (this.preventSubmit) event.preventDefault()
this.$submit.next(true)
})
formEl.addEventListener('after-submit', async (event) => {
if (this.preventSubmit) event.preventDefault()
while (this.throttledSaveFiring === true) {
await sleep(1000)
}
this.$afterSubmit.next(true)
})
formEl.addEventListener('resubmit', async (event) => {
if (this.preventSubmit) event.preventDefault()
while (this.throttledSaveFiring === true) {
await sleep(1000)
}
this.$resubmit.next(true)
})
formEl.addEventListener('after-resubmit', async (event) => {
if (this.preventSubmit) event.preventDefault()
while (this.throttledSaveFiring === true) {
await sleep(1000)
}
this.$afterResubmit.next(true)
})
}
this.$rendered.next(true)
this.rendered = true
}
setTemplate(templateId) {
this.templateId = templateId
this.render()
}
// Prevent parallel saves which leads to race conditions. Only save the first and then last state of the store.
// Everything else in between we can ignore.
async throttledSaveResponse(response) {
// If already loaded, return.
if (this.throttledSaveLoaded) return
// Throttle this fire by waiting until last fire is done.
if (this.throttledSaveFiring) {
this.throttledSaveLoaded = true
while (this.throttledSaveFiring) await sleep(200)
this.throttledSaveLoaded = false
}
// Fire it.
this.throttledSaveFiring = true
await this.saveResponse(response)
this.throttledSaveFiring = false
}
async saveResponse(state) {
let stateDoc = {}
stateDoc = await this.tangyFormService.getResponse(state._id)
if (stateDoc && stateDoc['complete'] && state.complete && stateDoc['form'] && !stateDoc['form'].hasSummary) {
// Since what is in the database is complete, and it's still complete, and it doesn't have
// a summary where they might add some input, don't save! They are probably reviewing data.
} else {
if (!stateDoc) {
let r = await this.tangyFormService.saveResponse(state)
stateDoc = await this.tangyFormService.getResponse(state._id)
}
await this.tangyFormService.saveResponse({
...state,
_rev: stateDoc['_rev'],
location: this.location || state.location,
...this.metadata
})
}
this.response = state
| random_line_split |
|
index.tsx | import * as React from 'react';
import * as MomentTS from 'moment'; import MomentBabel from 'moment'; const moment = typeof MomentTS === 'function' ? MomentTS : MomentBabel; // moment import uses export = {} syntax - which works differently in typescript and babel, so we load both and pick the one that worked :'(
import { find } from 'lodash';
import H from '../../../../../components/h/';
import Indent from '../../../../../components/indent/';
import Markdown from '../../../../../components/markdown/';
import * as SVGIcons from '../../../../../icons/';
import { sortExperience } from '../../';
const SVGLinkIcon = require('!svg-react-loader!svg-icon/dist/svg/awesome/chain.svg');
import { Container, Header, Date, SVGLink, LinksAndIcons, Icon, Icons, Projects } from './styles';
import * as styles from './styles';
import { Experience as ExperienceType, Project as ProjectType } from '../../../../../../data/experiences/index.types';
export const formatDate = (date: string) => {
return date ? moment(date).format('YYYY-MM') : 'present';
};
export class Experience extends React.Component<{ experience: ExperienceType | ProjectType, level: number }, any> {
render() {
const { experience: { title, start, end, summaryMarkdown, projects, portfolio, icons }, level } = this.props;
const renderDate = start || end;
const renderProjects = projects && projects.length > 0;
const renderIcons = icons && icons.length > 0;
const headerLevel = level ? level : 3;
const headerInline = headerLevel > 3;
return (
<Container renderProjects={renderProjects} inline={headerInline} >
<Header>
{(() => {
const TitleH = styles[`Title${headerLevel}`];
return <TitleH level={headerLevel} >{title}</TitleH>;
})()}
{!renderDate ? null : <Date>{`${formatDate(start)} to ${formatDate(end)}`}</Date>}
<LinksAndIcons>
{portfolio ? <SVGLink target={`_blank`} href={portfolio.link} rel={`nofollow`}><SVGLinkIcon title={portfolio.hoverTitle}/></SVGLink> : null}
{!renderIcons ? null :
<Icons>
{icons.map((icon, i) => {
const SVGIcon = SVGIcons[icon];
return (
<Icon key={i}>
<SVGIcon title={icon}/>
</Icon>
); })}
</Icons>}
</LinksAndIcons>
</Header>
<Indent>
<Markdown source={summaryMarkdown}/>
{!renderProjects ? null :
<Projects>
<H level={headerLevel} inline >PROJECTS</H>
{projects.map((project: ProjectType, i: number) => (
<Experience experience={project} level={headerLevel + 1} key={i} />
))}
</Projects>
}
</Indent>
</Container>
);
}
} |
export default Experience; | random_line_split |
|
index.tsx | import * as React from 'react';
import * as MomentTS from 'moment'; import MomentBabel from 'moment'; const moment = typeof MomentTS === 'function' ? MomentTS : MomentBabel; // moment import uses export = {} syntax - which works differently in typescript and babel, so we load both and pick the one that worked :'(
import { find } from 'lodash';
import H from '../../../../../components/h/';
import Indent from '../../../../../components/indent/';
import Markdown from '../../../../../components/markdown/';
import * as SVGIcons from '../../../../../icons/';
import { sortExperience } from '../../';
const SVGLinkIcon = require('!svg-react-loader!svg-icon/dist/svg/awesome/chain.svg');
import { Container, Header, Date, SVGLink, LinksAndIcons, Icon, Icons, Projects } from './styles';
import * as styles from './styles';
import { Experience as ExperienceType, Project as ProjectType } from '../../../../../../data/experiences/index.types';
export const formatDate = (date: string) => {
return date ? moment(date).format('YYYY-MM') : 'present';
};
export class | extends React.Component<{ experience: ExperienceType | ProjectType, level: number }, any> {
render() {
const { experience: { title, start, end, summaryMarkdown, projects, portfolio, icons }, level } = this.props;
const renderDate = start || end;
const renderProjects = projects && projects.length > 0;
const renderIcons = icons && icons.length > 0;
const headerLevel = level ? level : 3;
const headerInline = headerLevel > 3;
return (
<Container renderProjects={renderProjects} inline={headerInline} >
<Header>
{(() => {
const TitleH = styles[`Title${headerLevel}`];
return <TitleH level={headerLevel} >{title}</TitleH>;
})()}
{!renderDate ? null : <Date>{`${formatDate(start)} to ${formatDate(end)}`}</Date>}
<LinksAndIcons>
{portfolio ? <SVGLink target={`_blank`} href={portfolio.link} rel={`nofollow`}><SVGLinkIcon title={portfolio.hoverTitle}/></SVGLink> : null}
{!renderIcons ? null :
<Icons>
{icons.map((icon, i) => {
const SVGIcon = SVGIcons[icon];
return (
<Icon key={i}>
<SVGIcon title={icon}/>
</Icon>
); })}
</Icons>}
</LinksAndIcons>
</Header>
<Indent>
<Markdown source={summaryMarkdown}/>
{!renderProjects ? null :
<Projects>
<H level={headerLevel} inline >PROJECTS</H>
{projects.map((project: ProjectType, i: number) => (
<Experience experience={project} level={headerLevel + 1} key={i} />
))}
</Projects>
}
</Indent>
</Container>
);
}
}
export default Experience;
| Experience | identifier_name |
sequenceOperator.tests.ts | import * as chai from 'chai';
import * as slimdom from 'slimdom';
import { evaluateXPathToBoolean, evaluateXPathToNumbers } from 'fontoxpath';
import evaluateXPathToAsyncSingleton from 'test-helpers/evaluateXPathToAsyncSingleton'; | it('creates an empty sequence', () => chai.assert.deepEqual(evaluateXPathToNumbers('()'), []));
it('normalizes sequences', () =>
chai.assert.deepEqual(evaluateXPathToNumbers('(1,2,(3,4))'), [1, 2, 3, 4]));
});
describe('range', () => {
it('creates a sequence', () =>
chai.assert.deepEqual(evaluateXPathToNumbers('1 to 10'), [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]));
it('creates an empty sequence when passed a > b', () =>
chai.assert.deepEqual(evaluateXPathToNumbers('10 to 1'), []));
it('creates an empty sequence when passed () to 10', () =>
chai.assert.deepEqual(evaluateXPathToNumbers('() to 10'), []));
it('creates a sequence of correct length', () =>
chai.assert.isTrue(evaluateXPathToBoolean('(1 to 10) => count() = 10')));
it('creates an empty sequence when passed 1 to ()', () =>
chai.assert.deepEqual(evaluateXPathToNumbers('1 to ()'), []));
}); |
describe('sequence', () => {
it('creates a sequence', () =>
chai.assert.deepEqual(evaluateXPathToNumbers('(1,2,3)'), [1, 2, 3]));
| random_line_split |
users.rs | #![crate_name = "uu_users"]
/*
* This file is part of the uutils coreutils package.
*
* (c) KokaKiwi <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/* last synced with: whoami (GNU coreutils) 8.22 */
// Allow dead code here in order to keep all fields, constants here, for consistency.
#![allow(dead_code)]
extern crate getopts;
extern crate libc;
#[macro_use]
extern crate uucore;
use getopts::Options;
use std::ffi::{CStr, CString};
use std::mem;
use std::ptr;
use uucore::utmpx::*;
extern {
fn getutxent() -> *const c_utmp;
fn getutxid(ut: *const c_utmp) -> *const c_utmp;
fn getutxline(ut: *const c_utmp) -> *const c_utmp;
fn pututxline(ut: *const c_utmp) -> *const c_utmp;
fn setutxent();
fn endutxent();
#[cfg(any(target_os = "macos", target_os = "linux"))]
fn utmpxname(file: *const libc::c_char) -> libc::c_int;
}
#[cfg(target_os = "freebsd")]
unsafe extern fn utmpxname(_file: *const libc::c_char) -> libc::c_int {
0
}
static NAME: &'static str = "users";
static VERSION: &'static str = env!("CARGO_PKG_VERSION");
pub fn uumain(args: Vec<String>) -> i32 {
let mut opts = Options::new();
opts.optflag("h", "help", "display this help and exit");
opts.optflag("V", "version", "output version information and exit");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => panic!("{}", f),
};
if matches.opt_present("help") {
println!("{} {}", NAME, VERSION);
println!("");
println!("Usage:");
println!(" {} [OPTION]... [FILE]", NAME);
println!("");
println!("{}", opts.usage("Output who is currently logged in according to FILE."));
return 0;
}
if matches.opt_present("version") {
println!("{} {}", NAME, VERSION);
return 0;
}
let filename = if !matches.free.is_empty() {
matches.free[0].as_ref()
} else {
DEFAULT_FILE
};
exec(filename);
0
}
fn exec(filename: &str) | }
}
endutxent();
}
if !users.is_empty() {
users.sort();
println!("{}", users.join(" "));
}
}
| {
unsafe {
utmpxname(CString::new(filename).unwrap().as_ptr());
}
let mut users = vec!();
unsafe {
setutxent();
loop {
let line = getutxent();
if line == ptr::null() {
break;
}
if (*line).ut_type == USER_PROCESS {
let user = String::from_utf8_lossy(CStr::from_ptr(mem::transmute(&(*line).ut_user)).to_bytes()).to_string();
users.push(user); | identifier_body |
users.rs | #![crate_name = "uu_users"]
/*
* This file is part of the uutils coreutils package.
*
* (c) KokaKiwi <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/* last synced with: whoami (GNU coreutils) 8.22 */
// Allow dead code here in order to keep all fields, constants here, for consistency.
#![allow(dead_code)]
extern crate getopts;
extern crate libc;
#[macro_use]
extern crate uucore;
use getopts::Options;
use std::ffi::{CStr, CString};
use std::mem;
use std::ptr;
use uucore::utmpx::*;
extern {
fn getutxent() -> *const c_utmp;
fn getutxid(ut: *const c_utmp) -> *const c_utmp;
fn getutxline(ut: *const c_utmp) -> *const c_utmp;
fn pututxline(ut: *const c_utmp) -> *const c_utmp;
fn setutxent();
fn endutxent();
#[cfg(any(target_os = "macos", target_os = "linux"))]
fn utmpxname(file: *const libc::c_char) -> libc::c_int;
}
#[cfg(target_os = "freebsd")]
unsafe extern fn utmpxname(_file: *const libc::c_char) -> libc::c_int {
0
}
static NAME: &'static str = "users";
static VERSION: &'static str = env!("CARGO_PKG_VERSION");
pub fn uumain(args: Vec<String>) -> i32 {
let mut opts = Options::new();
opts.optflag("h", "help", "display this help and exit");
opts.optflag("V", "version", "output version information and exit");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => panic!("{}", f),
};
if matches.opt_present("help") {
println!("{} {}", NAME, VERSION);
println!("");
println!("Usage:");
println!(" {} [OPTION]... [FILE]", NAME);
println!("");
println!("{}", opts.usage("Output who is currently logged in according to FILE."));
return 0;
}
if matches.opt_present("version") {
println!("{} {}", NAME, VERSION);
return 0;
}
let filename = if !matches.free.is_empty() {
matches.free[0].as_ref()
} else {
DEFAULT_FILE
};
exec(filename);
0
}
fn exec(filename: &str) {
unsafe {
utmpxname(CString::new(filename).unwrap().as_ptr());
}
let mut users = vec!();
unsafe {
setutxent();
loop {
let line = getutxent();
if line == ptr::null() {
break;
}
if (*line).ut_type == USER_PROCESS {
let user = String::from_utf8_lossy(CStr::from_ptr(mem::transmute(&(*line).ut_user)).to_bytes()).to_string();
users.push(user);
}
}
endutxent();
}
if !users.is_empty() { | users.sort();
println!("{}", users.join(" "));
}
} | random_line_split |
|
users.rs | #![crate_name = "uu_users"]
/*
* This file is part of the uutils coreutils package.
*
* (c) KokaKiwi <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/* last synced with: whoami (GNU coreutils) 8.22 */
// Allow dead code here in order to keep all fields, constants here, for consistency.
#![allow(dead_code)]
extern crate getopts;
extern crate libc;
#[macro_use]
extern crate uucore;
use getopts::Options;
use std::ffi::{CStr, CString};
use std::mem;
use std::ptr;
use uucore::utmpx::*;
extern {
fn getutxent() -> *const c_utmp;
fn getutxid(ut: *const c_utmp) -> *const c_utmp;
fn getutxline(ut: *const c_utmp) -> *const c_utmp;
fn pututxline(ut: *const c_utmp) -> *const c_utmp;
fn setutxent();
fn endutxent();
#[cfg(any(target_os = "macos", target_os = "linux"))]
fn utmpxname(file: *const libc::c_char) -> libc::c_int;
}
#[cfg(target_os = "freebsd")]
unsafe extern fn | (_file: *const libc::c_char) -> libc::c_int {
0
}
static NAME: &'static str = "users";
static VERSION: &'static str = env!("CARGO_PKG_VERSION");
pub fn uumain(args: Vec<String>) -> i32 {
let mut opts = Options::new();
opts.optflag("h", "help", "display this help and exit");
opts.optflag("V", "version", "output version information and exit");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => panic!("{}", f),
};
if matches.opt_present("help") {
println!("{} {}", NAME, VERSION);
println!("");
println!("Usage:");
println!(" {} [OPTION]... [FILE]", NAME);
println!("");
println!("{}", opts.usage("Output who is currently logged in according to FILE."));
return 0;
}
if matches.opt_present("version") {
println!("{} {}", NAME, VERSION);
return 0;
}
let filename = if !matches.free.is_empty() {
matches.free[0].as_ref()
} else {
DEFAULT_FILE
};
exec(filename);
0
}
fn exec(filename: &str) {
unsafe {
utmpxname(CString::new(filename).unwrap().as_ptr());
}
let mut users = vec!();
unsafe {
setutxent();
loop {
let line = getutxent();
if line == ptr::null() {
break;
}
if (*line).ut_type == USER_PROCESS {
let user = String::from_utf8_lossy(CStr::from_ptr(mem::transmute(&(*line).ut_user)).to_bytes()).to_string();
users.push(user);
}
}
endutxent();
}
if !users.is_empty() {
users.sort();
println!("{}", users.join(" "));
}
}
| utmpxname | identifier_name |
mod.rs | pub mod message;
use std::net::TcpStream;
use std::io;
use std::io::{BufReader, Write, BufRead, Error};
use irc::message::{Message, MessageError};
/// Contains methods that handle common IRC functionality.
pub trait Irc {
/// Sends login credentials to the server.
fn login(&mut self, username: &str, oauth: &str) -> Result<(), io::Error>;
}
impl Irc for TcpStream {
fn login(&mut self, username: &str, oauth: &str) -> Result<(), io::Error> {
writeln!(self, "USER {} 0 * :{}", username, username)?;
writeln!(self, "PASS {}", oauth)?;
writeln!(self, "NICK {}", username)?;
writeln!(self, "CAP REQ :twitch.tv/membership")?;
Ok(())
}
}
pub trait StreamUtil {
/// Sends a `&str` to the server immediately.
fn send_line(&mut self, string: &str) -> Result<(), io::Error>;
}
impl StreamUtil for TcpStream {
fn send_line(&mut self, string: &str) -> Result<(), io::Error> |
}
/// Represents an error caused by reading a [`Message`] from the server.
/// [`Message`]: message/enum.Message.html
#[derive(Debug)]
pub enum ReadLineError {
/// The error is related to the connection.
Connection(Error),
/// The error is related to the attempt to parse the message received from the server.
Message(MessageError),
}
pub trait ReaderUtil {
/// Reads a line directly from the connected server.
fn read_line_raw(&mut self) -> Result<String, Error>;
/// Reads a line from the server and parses a [`Message`] from it.
/// [`Message`]: message/enum.Message.html
fn read_message(&mut self) -> Result<Message, ReadLineError>;
}
impl ReaderUtil for BufReader<TcpStream> {
fn read_line_raw(&mut self) -> Result<String, Error> {
let mut line = String::new();
self.read_line(&mut line)?;
Ok(line)
}
fn read_message(&mut self) -> Result<Message, ReadLineError> {
self.read_line_raw()
.map_err(ReadLineError::Connection)
.and_then(|line| Message::parse(&line).map_err(ReadLineError::Message))
}
} | {
writeln!(self, "{}", string)?;
self.flush()
} | identifier_body |
mod.rs | pub mod message;
use std::net::TcpStream;
use std::io;
use std::io::{BufReader, Write, BufRead, Error};
use irc::message::{Message, MessageError};
/// Contains methods that handle common IRC functionality.
pub trait Irc {
/// Sends login credentials to the server.
fn login(&mut self, username: &str, oauth: &str) -> Result<(), io::Error>;
}
impl Irc for TcpStream {
fn login(&mut self, username: &str, oauth: &str) -> Result<(), io::Error> {
writeln!(self, "USER {} 0 * :{}", username, username)?;
writeln!(self, "PASS {}", oauth)?;
writeln!(self, "NICK {}", username)?;
writeln!(self, "CAP REQ :twitch.tv/membership")?;
Ok(())
}
}
pub trait StreamUtil {
/// Sends a `&str` to the server immediately.
fn send_line(&mut self, string: &str) -> Result<(), io::Error>;
}
impl StreamUtil for TcpStream {
fn send_line(&mut self, string: &str) -> Result<(), io::Error> {
writeln!(self, "{}", string)?;
self.flush()
}
}
/// Represents an error caused by reading a [`Message`] from the server.
/// [`Message`]: message/enum.Message.html
#[derive(Debug)]
pub enum | {
/// The error is related to the connection.
Connection(Error),
/// The error is related to the attempt to parse the message received from the server.
Message(MessageError),
}
pub trait ReaderUtil {
/// Reads a line directly from the connected server.
fn read_line_raw(&mut self) -> Result<String, Error>;
/// Reads a line from the server and parses a [`Message`] from it.
/// [`Message`]: message/enum.Message.html
fn read_message(&mut self) -> Result<Message, ReadLineError>;
}
impl ReaderUtil for BufReader<TcpStream> {
fn read_line_raw(&mut self) -> Result<String, Error> {
let mut line = String::new();
self.read_line(&mut line)?;
Ok(line)
}
fn read_message(&mut self) -> Result<Message, ReadLineError> {
self.read_line_raw()
.map_err(ReadLineError::Connection)
.and_then(|line| Message::parse(&line).map_err(ReadLineError::Message))
}
} | ReadLineError | identifier_name |
mod.rs | pub mod message;
use std::net::TcpStream;
use std::io;
use std::io::{BufReader, Write, BufRead, Error};
use irc::message::{Message, MessageError};
/// Contains methods that handle common IRC functionality.
pub trait Irc {
/// Sends login credentials to the server.
fn login(&mut self, username: &str, oauth: &str) -> Result<(), io::Error>;
}
impl Irc for TcpStream {
fn login(&mut self, username: &str, oauth: &str) -> Result<(), io::Error> {
writeln!(self, "USER {} 0 * :{}", username, username)?;
writeln!(self, "PASS {}", oauth)?;
writeln!(self, "NICK {}", username)?;
writeln!(self, "CAP REQ :twitch.tv/membership")?;
Ok(())
}
}
pub trait StreamUtil {
/// Sends a `&str` to the server immediately.
fn send_line(&mut self, string: &str) -> Result<(), io::Error>;
}
impl StreamUtil for TcpStream { |
/// Represents an error caused by reading a [`Message`] from the server.
/// [`Message`]: message/enum.Message.html
#[derive(Debug)]
pub enum ReadLineError {
/// The error is related to the connection.
Connection(Error),
/// The error is related to the attempt to parse the message received from the server.
Message(MessageError),
}
pub trait ReaderUtil {
/// Reads a line directly from the connected server.
fn read_line_raw(&mut self) -> Result<String, Error>;
/// Reads a line from the server and parses a [`Message`] from it.
/// [`Message`]: message/enum.Message.html
fn read_message(&mut self) -> Result<Message, ReadLineError>;
}
impl ReaderUtil for BufReader<TcpStream> {
fn read_line_raw(&mut self) -> Result<String, Error> {
let mut line = String::new();
self.read_line(&mut line)?;
Ok(line)
}
fn read_message(&mut self) -> Result<Message, ReadLineError> {
self.read_line_raw()
.map_err(ReadLineError::Connection)
.and_then(|line| Message::parse(&line).map_err(ReadLineError::Message))
}
} | fn send_line(&mut self, string: &str) -> Result<(), io::Error> {
writeln!(self, "{}", string)?;
self.flush()
}
} | random_line_split |
plugin.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from keystoneclient.auth.identity import v2 as v2_auth
from keystoneclient.auth.identity import v3 as v3_auth
from keystoneclient.v3 import client as v3_client
from openstack_auth.plugin import base
from openstack_auth import exceptions
from openstack_auth import utils
__all__ = ['FederatedTokenPlugin']
| def get_plugin(self, auth_url=None, token=None, project_id=None,
**kwargs):
if not all((auth_url, token)):
return None
if utils.get_keystone_version() >= 3:
return v3_auth.Token(auth_url=auth_url,
token=token,
project_id=project_id,
reauthenticate=False)
else:
return v2_auth.Token(auth_url=auth_url,
token=token,
tenant_id=project_id,
reauthenticate=False)
def list_projects(self, session, auth_plugin, auth_ref=None):
if utils.get_keystone_version() < 3:
msg = _('Cannot list federated tokens from v2 API')
raise exceptions.KeystoneAuthException(msg)
client = v3_client.Client(session=session, auth=auth_plugin)
return client.federation.projects.list() |
class FederatedTokenPlugin(base.BasePlugin):
"""Authenticate against keystone with an existing token."""
| random_line_split |
plugin.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from keystoneclient.auth.identity import v2 as v2_auth
from keystoneclient.auth.identity import v3 as v3_auth
from keystoneclient.v3 import client as v3_client
from openstack_auth.plugin import base
from openstack_auth import exceptions
from openstack_auth import utils
__all__ = ['FederatedTokenPlugin']
class FederatedTokenPlugin(base.BasePlugin):
"""Authenticate against keystone with an existing token."""
def get_plugin(self, auth_url=None, token=None, project_id=None,
**kwargs):
|
def list_projects(self, session, auth_plugin, auth_ref=None):
if utils.get_keystone_version() < 3:
msg = _('Cannot list federated tokens from v2 API')
raise exceptions.KeystoneAuthException(msg)
client = v3_client.Client(session=session, auth=auth_plugin)
return client.federation.projects.list()
| if not all((auth_url, token)):
return None
if utils.get_keystone_version() >= 3:
return v3_auth.Token(auth_url=auth_url,
token=token,
project_id=project_id,
reauthenticate=False)
else:
return v2_auth.Token(auth_url=auth_url,
token=token,
tenant_id=project_id,
reauthenticate=False) | identifier_body |
plugin.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from keystoneclient.auth.identity import v2 as v2_auth
from keystoneclient.auth.identity import v3 as v3_auth
from keystoneclient.v3 import client as v3_client
from openstack_auth.plugin import base
from openstack_auth import exceptions
from openstack_auth import utils
__all__ = ['FederatedTokenPlugin']
class FederatedTokenPlugin(base.BasePlugin):
"""Authenticate against keystone with an existing token."""
def | (self, auth_url=None, token=None, project_id=None,
**kwargs):
if not all((auth_url, token)):
return None
if utils.get_keystone_version() >= 3:
return v3_auth.Token(auth_url=auth_url,
token=token,
project_id=project_id,
reauthenticate=False)
else:
return v2_auth.Token(auth_url=auth_url,
token=token,
tenant_id=project_id,
reauthenticate=False)
def list_projects(self, session, auth_plugin, auth_ref=None):
if utils.get_keystone_version() < 3:
msg = _('Cannot list federated tokens from v2 API')
raise exceptions.KeystoneAuthException(msg)
client = v3_client.Client(session=session, auth=auth_plugin)
return client.federation.projects.list()
| get_plugin | identifier_name |
plugin.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from keystoneclient.auth.identity import v2 as v2_auth
from keystoneclient.auth.identity import v3 as v3_auth
from keystoneclient.v3 import client as v3_client
from openstack_auth.plugin import base
from openstack_auth import exceptions
from openstack_auth import utils
__all__ = ['FederatedTokenPlugin']
class FederatedTokenPlugin(base.BasePlugin):
"""Authenticate against keystone with an existing token."""
def get_plugin(self, auth_url=None, token=None, project_id=None,
**kwargs):
if not all((auth_url, token)):
|
if utils.get_keystone_version() >= 3:
return v3_auth.Token(auth_url=auth_url,
token=token,
project_id=project_id,
reauthenticate=False)
else:
return v2_auth.Token(auth_url=auth_url,
token=token,
tenant_id=project_id,
reauthenticate=False)
def list_projects(self, session, auth_plugin, auth_ref=None):
if utils.get_keystone_version() < 3:
msg = _('Cannot list federated tokens from v2 API')
raise exceptions.KeystoneAuthException(msg)
client = v3_client.Client(session=session, auth=auth_plugin)
return client.federation.projects.list()
| return None | conditional_block |
get_key_digest.py | # -*- coding: utf-8 -*-
################################################################################
# Copyright 2013-2015 Aerospike, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from __future__ import print_function
import aerospike
import sys
from optparse import OptionParser
################################################################################
# Options Parsing
################################################################################
usage = "usage: %prog [options] key"
optparser = OptionParser(usage=usage, add_help_option=False)
optparser.add_option(
"--help", dest="help", action="store_true",
help="Displays this message.")
optparser.add_option(
"-U", "--username", dest="username", type="string", metavar="<USERNAME>",
help="Username to connect to database.")
optparser.add_option(
"-P", "--password", dest="password", type="string", metavar="<PASSWORD>",
help="Password to connect to database.")
optparser.add_option(
"-h", "--host", dest="host", type="string", default="127.0.0.1", metavar="<ADDRESS>",
help="Address of Aerospike server.")
optparser.add_option(
"-p", "--port", dest="port", type="int", default=3000, metavar="<PORT>",
help="Port of the Aerospike server.")
optparser.add_option(
"--timeout", dest="timeout", type="int", default=1000, metavar="<MS>",
help="Client timeout")
optparser.add_option(
"-n", "--namespace", dest="namespace", type="string", default="test", metavar="<NS>",
help="Port of the Aerospike server.")
optparser.add_option(
"-s", "--set", dest="set", type="string", default="demo", metavar="<SET>",
help="Port of the Aerospike server.")
(options, args) = optparser.parse_args()
if options.help:
optparser.print_help()
print()
sys.exit(1)
if len(args) != 1:
|
################################################################################
# Client Configuration
################################################################################
config = {
'hosts': [ (options.host, options.port) ],
'policies': {
'timeout': options.timeout
}
}
################################################################################
# Application
################################################################################
exitCode = 0
try:
# ----------------------------------------------------------------------------
# Connect to Cluster
# ----------------------------------------------------------------------------
client = aerospike.client(config).connect(options.username, options.password)
# ----------------------------------------------------------------------------
# Perform Operation
# ----------------------------------------------------------------------------
try:
namespace = options.namespace if options.namespace and options.namespace != 'None' else None
set = options.set if options.set and options.set != 'None' else None
key = args.pop(0)
digest = client.get_key_digest(namespace, set, key)
print("---")
print("Digest is: ", digest)
except Exception as e:
print("error: {0}".format(e), file=sys.stderr)
exitCode = 2
# ----------------------------------------------------------------------------
# Close Connection to Cluster
# ----------------------------------------------------------------------------
client.close()
except Exception as e:
print("error: {0}".format(e), file=sys.stderr)
exitCode = 3
################################################################################
# Exit
################################################################################
sys.exit(exitCode)
| optparser.print_help()
print()
sys.exit(1) | conditional_block |
get_key_digest.py | # -*- coding: utf-8 -*-
################################################################################
# Copyright 2013-2015 Aerospike, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from __future__ import print_function
import aerospike
import sys
from optparse import OptionParser
|
optparser = OptionParser(usage=usage, add_help_option=False)
optparser.add_option(
"--help", dest="help", action="store_true",
help="Displays this message.")
optparser.add_option(
"-U", "--username", dest="username", type="string", metavar="<USERNAME>",
help="Username to connect to database.")
optparser.add_option(
"-P", "--password", dest="password", type="string", metavar="<PASSWORD>",
help="Password to connect to database.")
optparser.add_option(
"-h", "--host", dest="host", type="string", default="127.0.0.1", metavar="<ADDRESS>",
help="Address of Aerospike server.")
optparser.add_option(
"-p", "--port", dest="port", type="int", default=3000, metavar="<PORT>",
help="Port of the Aerospike server.")
optparser.add_option(
"--timeout", dest="timeout", type="int", default=1000, metavar="<MS>",
help="Client timeout")
optparser.add_option(
"-n", "--namespace", dest="namespace", type="string", default="test", metavar="<NS>",
help="Port of the Aerospike server.")
optparser.add_option(
"-s", "--set", dest="set", type="string", default="demo", metavar="<SET>",
help="Port of the Aerospike server.")
(options, args) = optparser.parse_args()
if options.help:
optparser.print_help()
print()
sys.exit(1)
if len(args) != 1:
optparser.print_help()
print()
sys.exit(1)
################################################################################
# Client Configuration
################################################################################
config = {
'hosts': [ (options.host, options.port) ],
'policies': {
'timeout': options.timeout
}
}
################################################################################
# Application
################################################################################
exitCode = 0
try:
# ----------------------------------------------------------------------------
# Connect to Cluster
# ----------------------------------------------------------------------------
client = aerospike.client(config).connect(options.username, options.password)
# ----------------------------------------------------------------------------
# Perform Operation
# ----------------------------------------------------------------------------
try:
namespace = options.namespace if options.namespace and options.namespace != 'None' else None
set = options.set if options.set and options.set != 'None' else None
key = args.pop(0)
digest = client.get_key_digest(namespace, set, key)
print("---")
print("Digest is: ", digest)
except Exception as e:
print("error: {0}".format(e), file=sys.stderr)
exitCode = 2
# ----------------------------------------------------------------------------
# Close Connection to Cluster
# ----------------------------------------------------------------------------
client.close()
except Exception as e:
print("error: {0}".format(e), file=sys.stderr)
exitCode = 3
################################################################################
# Exit
################################################################################
sys.exit(exitCode) | ################################################################################
# Options Parsing
################################################################################
usage = "usage: %prog [options] key" | random_line_split |
robotiq_gripper_sensor_test.py | # Copyright 2020 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for robotiq_gripper_sensor."""
from absl.testing import absltest
from dm_robotics.moma.models.end_effectors.robot_hands import robotiq_2f85
from dm_robotics.moma.sensors import robotiq_gripper_observations
from dm_robotics.moma.sensors import robotiq_gripper_sensor
# Absolute tolerance parameter.
_A_TOL = 5e-03
# Relative tolerance parameter.
_R_TOL = 0.01
class RobotiqGripperSensorTest(absltest.TestCase):
|
if __name__ == '__main__':
absltest.main()
| def test_sensor_has_all_observables(self):
name = 'gripper'
gripper = robotiq_2f85.Robotiq2F85(name=name)
sensor = robotiq_gripper_sensor.RobotiqGripperSensor(
gripper=gripper, name=name)
sensor.initialize_for_task(0.1, 0.001, 100)
expected_observable_names = set(
sensor.get_obs_key(obs)
for obs in robotiq_gripper_observations.Observations)
actual_observable_names = set(sensor.observables.keys())
self.assertSameElements(expected_observable_names, actual_observable_names) | identifier_body |
robotiq_gripper_sensor_test.py | # Copyright 2020 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License"); | # http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for robotiq_gripper_sensor."""
from absl.testing import absltest
from dm_robotics.moma.models.end_effectors.robot_hands import robotiq_2f85
from dm_robotics.moma.sensors import robotiq_gripper_observations
from dm_robotics.moma.sensors import robotiq_gripper_sensor
# Absolute tolerance parameter.
_A_TOL = 5e-03
# Relative tolerance parameter.
_R_TOL = 0.01
class RobotiqGripperSensorTest(absltest.TestCase):
def test_sensor_has_all_observables(self):
name = 'gripper'
gripper = robotiq_2f85.Robotiq2F85(name=name)
sensor = robotiq_gripper_sensor.RobotiqGripperSensor(
gripper=gripper, name=name)
sensor.initialize_for_task(0.1, 0.001, 100)
expected_observable_names = set(
sensor.get_obs_key(obs)
for obs in robotiq_gripper_observations.Observations)
actual_observable_names = set(sensor.observables.keys())
self.assertSameElements(expected_observable_names, actual_observable_names)
if __name__ == '__main__':
absltest.main() | # you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# | random_line_split |
robotiq_gripper_sensor_test.py | # Copyright 2020 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for robotiq_gripper_sensor."""
from absl.testing import absltest
from dm_robotics.moma.models.end_effectors.robot_hands import robotiq_2f85
from dm_robotics.moma.sensors import robotiq_gripper_observations
from dm_robotics.moma.sensors import robotiq_gripper_sensor
# Absolute tolerance parameter.
_A_TOL = 5e-03
# Relative tolerance parameter.
_R_TOL = 0.01
class RobotiqGripperSensorTest(absltest.TestCase):
def | (self):
name = 'gripper'
gripper = robotiq_2f85.Robotiq2F85(name=name)
sensor = robotiq_gripper_sensor.RobotiqGripperSensor(
gripper=gripper, name=name)
sensor.initialize_for_task(0.1, 0.001, 100)
expected_observable_names = set(
sensor.get_obs_key(obs)
for obs in robotiq_gripper_observations.Observations)
actual_observable_names = set(sensor.observables.keys())
self.assertSameElements(expected_observable_names, actual_observable_names)
if __name__ == '__main__':
absltest.main()
| test_sensor_has_all_observables | identifier_name |
robotiq_gripper_sensor_test.py | # Copyright 2020 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for robotiq_gripper_sensor."""
from absl.testing import absltest
from dm_robotics.moma.models.end_effectors.robot_hands import robotiq_2f85
from dm_robotics.moma.sensors import robotiq_gripper_observations
from dm_robotics.moma.sensors import robotiq_gripper_sensor
# Absolute tolerance parameter.
_A_TOL = 5e-03
# Relative tolerance parameter.
_R_TOL = 0.01
class RobotiqGripperSensorTest(absltest.TestCase):
def test_sensor_has_all_observables(self):
name = 'gripper'
gripper = robotiq_2f85.Robotiq2F85(name=name)
sensor = robotiq_gripper_sensor.RobotiqGripperSensor(
gripper=gripper, name=name)
sensor.initialize_for_task(0.1, 0.001, 100)
expected_observable_names = set(
sensor.get_obs_key(obs)
for obs in robotiq_gripper_observations.Observations)
actual_observable_names = set(sensor.observables.keys())
self.assertSameElements(expected_observable_names, actual_observable_names)
if __name__ == '__main__':
| absltest.main() | conditional_block |
|
TestWinapp.py | common
if 'nt' == os.name:
import _winreg
else:
def fake_detect_registry_key(f):
return True
import bleachbit.Windows
bleachbit.Windows.detect_registry_key = fake_detect_registry_key
def get_winapp2():
"""Download and cache winapp2.ini. Return local filename."""
url = "http://www.winapp2.com/Winapp2.ini"
tmpdir = None
if 'posix' == os.name:
tmpdir = '/tmp'
if 'nt' == os.name:
tmpdir = os.getenv('TMP')
fn = os.path.join(tmpdir, 'bleachbit_test_winapp2.ini')
if os.path.exists(fn):
import time
import stat
age_seconds = time.time() - os.stat(fn)[stat.ST_MTIME]
if age_seconds > (24 * 36 * 36):
print 'note: deleting stale file %s ' % fn
os.remove(fn)
if not os.path.exists(fn):
f = file(fn, 'w')
import urllib2
txt = urllib2.urlopen(url).read()
f.write(txt)
return fn
class WinappTestCase(unittest.TestCase):
"""Test cases for Winapp"""
def run_all(self, cleaner, really_delete):
"""Test all the cleaner options"""
for (option_id, __name) in cleaner.get_options():
for cmd in cleaner.get_commands(option_id):
for result in cmd.execute(really_delete):
common.validate_result(self, result, really_delete)
def test_remote(self):
"""Test with downloaded file"""
winapps = Winapp(get_winapp2())
for cleaner in winapps.get_cleaners():
self.run_all(cleaner, False)
def test_detectos(self):
"""Test detectos function"""
# Tests are in the format (required_ver, mock, expected_return)
tests = (('5.1', '5.1', True),
('5.1', '6.0', False),
('6.0', '5.1', False),
('|5.1', '5.1', True),
('|5.1', '6.0', False),
('6.1|', '5.1', False),
('6.1|', '6.0', False),
('6.1|', '6.1', True),
('6.1|', '6.2', True),
('6.2|', '5.1', False),
('6.2|', '6.0', False),
('6.2|', '6.1', False),
('6.2|', '6.2', True))
for (s, mock, expected_return) in tests:
|
def test_detect_file(self):
"""Test detect_file function"""
tests = [('%windir%\\system32\\kernel32.dll', True),
('%windir%\\system32', True),
('%ProgramFiles%\\Internet Explorer', True),
('%ProgramFiles%\\Internet Explorer\\', True),
('%windir%\\doesnotexist', False),
('%windir%\\system*', True),
('%windir%\\*ystem32', True),
('%windir%\\*ystem3*', True)]
# On 64-bit Windows, Winapp2.ini expands the %ProgramFiles% environment
# variable to also %ProgramW6432%, so test unique entries in
# %ProgramW6432%.
import struct
if not 32 == 8 * struct.calcsize('P'):
raise NotImplementedError('expecting 32-bit Python')
if os.getenv('ProgramW6432'):
dir_64 = os.listdir(os.getenv('ProgramFiles'))
dir_32 = os.listdir(os.getenv('ProgramW6432'))
dir_32_unique = set(dir_32) - set(dir_64)
if dir_32 and not dir_32_unique:
raise RuntimeError(
'Test expects objects in %ProgramW6432% not in %ProgramFiles%')
for pathname in dir_32_unique:
tests.append(('%%ProgramFiles%%\\%s' % pathname, True))
else:
print 'NOTE: skipping %ProgramW6432% tests because WoW64 not detected'
for (pathname, expected_return) in tests:
actual_return = detect_file(pathname)
msg = 'detect_file(%s) returned %s' % (pathname, actual_return)
self.assertEqual(expected_return, actual_return, msg)
def test_fake(self):
"""Test with fake file"""
ini_fn = None
keyfull = 'HKCU\\Software\\BleachBit\\DeleteThisKey'
subkey = 'Software\\BleachBit\\DeleteThisKey\\AndThisKey'
def setup_fake(f1_filename=None):
"""Setup the test environment"""
dirname = tempfile.mkdtemp(prefix='bleachbit-test-winapp')
f1 = os.path.join(dirname, f1_filename or 'deleteme.log')
file(f1, 'w').write('')
dirname2 = os.path.join(dirname, 'sub')
os.mkdir(dirname2)
f2 = os.path.join(dirname2, 'deleteme.log')
file(f2, 'w').write('')
fbak = os.path.join(dirname, 'deleteme.bak')
file(fbak, 'w').write('')
self.assertTrue(os.path.exists(f1))
self.assertTrue(os.path.exists(f2))
self.assertTrue(os.path.exists(fbak))
hkey = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, subkey)
hkey.Close()
self.assertTrue(detect_registry_key(keyfull))
self.assertTrue(detect_registry_key('HKCU\\%s' % subkey))
return (dirname, f1, f2, fbak)
def ini2cleaner(filekey, do_next=True):
ini = file(ini_fn, 'w')
ini.write('[someapp]\n')
ini.write('LangSecRef=3021\n')
ini.write(filekey)
ini.write('\n')
ini.close()
self.assertTrue(os.path.exists(ini_fn))
if do_next:
return Winapp(ini_fn).get_cleaners().next()
else:
return Winapp(ini_fn).get_cleaners()
# reuse this path to store a winapp2.ini file in
import tempfile
(ini_h, ini_fn) = tempfile.mkstemp(suffix='.ini', prefix='winapp2')
os.close(ini_h)
# a set of tests
tests = [
# single file
('FileKey1=%s|deleteme.log', None,
False, True, False, True, True, True),
# special characters for XML
('FileKey1=%s|special_chars_&-\'.txt', 'special_chars_&-\'.txt',
False, True, False, True, True, True),
# *.log
('FileKey1=%s|*.LOG', None, False, True, False, True, True, True),
# semicolon separates different file types
('FileKey1=%s|*.log;*.bak', None,
False, True, False, True, False, True),
# *.*
('FileKey1=%s|*.*', None, False, True, False, True, False, True),
# recurse *.*
('FileKey1=%s|*.*|RECURSE', None, False,
True, False, False, False, True),
# remove self *.*, this removes the directory
('FileKey1=%s|*.*|REMOVESELF', None,
False, False, False, False, False, True),
]
# Add positive detection, where the detection believes the application is present,
# to all the tests, which are also positive.
new_tests = []
for test in tests:
for detect in (
"\nDetectFile=%%APPDATA%%\\Microsoft",
"\nDetectFile1=%%APPDATA%%\\Microsoft\nDetectFile2=%%APPDATA%%\\does_not_exist",
"\nDetectFile1=%%APPDATA%%\\does_not_exist\nDetectFile2=%%APPDATA%%\\Microsoft",
"\nDetect=HKCU\\Software\\Microsoft",
"\nDetect1=HKCU\\Software\\Microsoft\nDetect2=HKCU\\Software\\does_not_exist",
"\nDetect1=HKCU\\Software\\does_not_exist\nDetect2=HKCU\\Software\\Microsoft"):
new_ini = test[0] + detect
new_test = [new_ini, ] + [x for x in test[1:]]
new_tests.append(new_test)
positive_tests = tests + new_tests
# execute positive tests
for test in positive_tests:
print 'positive test: ', test
(dirname, f1, f2, fbak) = setup_fake(test[1])
cleaner = ini2cleaner(test[0] % dirname)
| actual_return = detectos(s, mock)
self.assertEqual(expected_return, actual_return,
'detectos(%s, %s)==%s instead of %s' % (s, mock,
actual_return, expected_return)) | conditional_block |
TestWinapp.py | common
if 'nt' == os.name:
import _winreg
else:
def fake_detect_registry_key(f):
return True
import bleachbit.Windows
bleachbit.Windows.detect_registry_key = fake_detect_registry_key
def get_winapp2():
"""Download and cache winapp2.ini. Return local filename."""
url = "http://www.winapp2.com/Winapp2.ini"
tmpdir = None
if 'posix' == os.name:
tmpdir = '/tmp'
if 'nt' == os.name:
tmpdir = os.getenv('TMP')
fn = os.path.join(tmpdir, 'bleachbit_test_winapp2.ini')
if os.path.exists(fn):
import time
import stat
age_seconds = time.time() - os.stat(fn)[stat.ST_MTIME]
if age_seconds > (24 * 36 * 36):
print 'note: deleting stale file %s ' % fn
os.remove(fn)
if not os.path.exists(fn):
f = file(fn, 'w')
import urllib2
txt = urllib2.urlopen(url).read()
f.write(txt)
return fn
class WinappTestCase(unittest.TestCase):
"""Test cases for Winapp"""
def run_all(self, cleaner, really_delete):
"""Test all the cleaner options"""
for (option_id, __name) in cleaner.get_options():
for cmd in cleaner.get_commands(option_id):
for result in cmd.execute(really_delete):
common.validate_result(self, result, really_delete)
def test_remote(self):
"""Test with downloaded file"""
winapps = Winapp(get_winapp2())
for cleaner in winapps.get_cleaners():
self.run_all(cleaner, False)
def test_detectos(self):
"""Test detectos function"""
# Tests are in the format (required_ver, mock, expected_return)
tests = (('5.1', '5.1', True),
('5.1', '6.0', False),
('6.0', '5.1', False),
('|5.1', '5.1', True),
('|5.1', '6.0', False),
('6.1|', '5.1', False),
('6.1|', '6.0', False),
('6.1|', '6.1', True),
('6.1|', '6.2', True),
('6.2|', '5.1', False),
('6.2|', '6.0', False),
('6.2|', '6.1', False),
('6.2|', '6.2', True))
for (s, mock, expected_return) in tests:
actual_return = detectos(s, mock)
self.assertEqual(expected_return, actual_return,
'detectos(%s, %s)==%s instead of %s' % (s, mock,
actual_return, expected_return))
def test_detect_file(self):
"""Test detect_file function"""
tests = [('%windir%\\system32\\kernel32.dll', True),
('%windir%\\system32', True),
('%ProgramFiles%\\Internet Explorer', True),
('%ProgramFiles%\\Internet Explorer\\', True),
('%windir%\\doesnotexist', False),
('%windir%\\system*', True),
('%windir%\\*ystem32', True),
('%windir%\\*ystem3*', True)]
# On 64-bit Windows, Winapp2.ini expands the %ProgramFiles% environment
# variable to also %ProgramW6432%, so test unique entries in
# %ProgramW6432%.
import struct
if not 32 == 8 * struct.calcsize('P'):
raise NotImplementedError('expecting 32-bit Python')
if os.getenv('ProgramW6432'):
dir_64 = os.listdir(os.getenv('ProgramFiles'))
dir_32 = os.listdir(os.getenv('ProgramW6432'))
dir_32_unique = set(dir_32) - set(dir_64)
if dir_32 and not dir_32_unique:
raise RuntimeError(
'Test expects objects in %ProgramW6432% not in %ProgramFiles%')
for pathname in dir_32_unique:
tests.append(('%%ProgramFiles%%\\%s' % pathname, True))
else:
print 'NOTE: skipping %ProgramW6432% tests because WoW64 not detected'
for (pathname, expected_return) in tests:
actual_return = detect_file(pathname)
msg = 'detect_file(%s) returned %s' % (pathname, actual_return)
self.assertEqual(expected_return, actual_return, msg)
def test_fake(self):
"""Test with fake file"""
ini_fn = None
keyfull = 'HKCU\\Software\\BleachBit\\DeleteThisKey'
subkey = 'Software\\BleachBit\\DeleteThisKey\\AndThisKey'
def setup_fake(f1_filename=None):
| self.assertTrue(detect_registry_key(keyfull))
self.assertTrue(detect_registry_key('HKCU\\%s' % subkey))
return (dirname, f1, f2, fbak)
def ini2cleaner(filekey, do_next=True):
ini = file(ini_fn, 'w')
ini.write('[someapp]\n')
ini.write('LangSecRef=3021\n')
ini.write(filekey)
ini.write('\n')
ini.close()
self.assertTrue(os.path.exists(ini_fn))
if do_next:
return Winapp(ini_fn).get_cleaners().next()
else:
return Winapp(ini_fn).get_cleaners()
# reuse this path to store a winapp2.ini file in
import tempfile
(ini_h, ini_fn) = tempfile.mkstemp(suffix='.ini', prefix='winapp2')
os.close(ini_h)
# a set of tests
tests = [
# single file
('FileKey1=%s|deleteme.log', None,
False, True, False, True, True, True),
# special characters for XML
('FileKey1=%s|special_chars_&-\'.txt', 'special_chars_&-\'.txt',
False, True, False, True, True, True),
# *.log
('FileKey1=%s|*.LOG', None, False, True, False, True, True, True),
# semicolon separates different file types
('FileKey1=%s|*.log;*.bak', None,
False, True, False, True, False, True),
# *.*
('FileKey1=%s|*.*', None, False, True, False, True, False, True),
# recurse *.*
('FileKey1=%s|*.*|RECURSE', None, False,
True, False, False, False, True),
# remove self *.*, this removes the directory
('FileKey1=%s|*.*|REMOVESELF', None,
False, False, False, False, False, True),
]
# Add positive detection, where the detection believes the application is present,
# to all the tests, which are also positive.
new_tests = []
for test in tests:
for detect in (
"\nDetectFile=%%APPDATA%%\\Microsoft",
"\nDetectFile1=%%APPDATA%%\\Microsoft\nDetectFile2=%%APPDATA%%\\does_not_exist",
"\nDetectFile1=%%APPDATA%%\\does_not_exist\nDetectFile2=%%APPDATA%%\\Microsoft",
"\nDetect=HKCU\\Software\\Microsoft",
"\nDetect1=HKCU\\Software\\Microsoft\nDetect2=HKCU\\Software\\does_not_exist",
"\nDetect1=HKCU\\Software\\does_not_exist\nDetect2=HKCU\\Software\\Microsoft"):
new_ini = test[0] + detect
new_test = [new_ini, ] + [x for x in test[1:]]
new_tests.append(new_test)
positive_tests = tests + new_tests
# execute positive tests
for test in positive_tests:
print 'positive test: ', test
(dirname, f1, f2, fbak) = setup_fake(test[1])
cleaner = ini2cleaner(test[0] % dirname)
self | """Setup the test environment"""
dirname = tempfile.mkdtemp(prefix='bleachbit-test-winapp')
f1 = os.path.join(dirname, f1_filename or 'deleteme.log')
file(f1, 'w').write('')
dirname2 = os.path.join(dirname, 'sub')
os.mkdir(dirname2)
f2 = os.path.join(dirname2, 'deleteme.log')
file(f2, 'w').write('')
fbak = os.path.join(dirname, 'deleteme.bak')
file(fbak, 'w').write('')
self.assertTrue(os.path.exists(f1))
self.assertTrue(os.path.exists(f2))
self.assertTrue(os.path.exists(fbak))
hkey = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, subkey)
hkey.Close()
| identifier_body |
TestWinapp.py | common
if 'nt' == os.name:
import _winreg
else:
def fake_detect_registry_key(f):
return True
import bleachbit.Windows
bleachbit.Windows.detect_registry_key = fake_detect_registry_key
def get_winapp2():
"""Download and cache winapp2.ini. Return local filename."""
url = "http://www.winapp2.com/Winapp2.ini"
tmpdir = None
if 'posix' == os.name:
tmpdir = '/tmp'
if 'nt' == os.name:
tmpdir = os.getenv('TMP')
fn = os.path.join(tmpdir, 'bleachbit_test_winapp2.ini')
if os.path.exists(fn):
import time
import stat
age_seconds = time.time() - os.stat(fn)[stat.ST_MTIME]
if age_seconds > (24 * 36 * 36):
print 'note: deleting stale file %s ' % fn
os.remove(fn)
if not os.path.exists(fn):
f = file(fn, 'w')
import urllib2
txt = urllib2.urlopen(url).read()
f.write(txt)
return fn
class WinappTestCase(unittest.TestCase):
"""Test cases for Winapp"""
def | (self, cleaner, really_delete):
"""Test all the cleaner options"""
for (option_id, __name) in cleaner.get_options():
for cmd in cleaner.get_commands(option_id):
for result in cmd.execute(really_delete):
common.validate_result(self, result, really_delete)
def test_remote(self):
"""Test with downloaded file"""
winapps = Winapp(get_winapp2())
for cleaner in winapps.get_cleaners():
self.run_all(cleaner, False)
def test_detectos(self):
"""Test detectos function"""
# Tests are in the format (required_ver, mock, expected_return)
tests = (('5.1', '5.1', True),
('5.1', '6.0', False),
('6.0', '5.1', False),
('|5.1', '5.1', True),
('|5.1', '6.0', False),
('6.1|', '5.1', False),
('6.1|', '6.0', False),
('6.1|', '6.1', True),
('6.1|', '6.2', True),
('6.2|', '5.1', False),
('6.2|', '6.0', False),
('6.2|', '6.1', False),
('6.2|', '6.2', True))
for (s, mock, expected_return) in tests:
actual_return = detectos(s, mock)
self.assertEqual(expected_return, actual_return,
'detectos(%s, %s)==%s instead of %s' % (s, mock,
actual_return, expected_return))
def test_detect_file(self):
"""Test detect_file function"""
tests = [('%windir%\\system32\\kernel32.dll', True),
('%windir%\\system32', True),
('%ProgramFiles%\\Internet Explorer', True),
('%ProgramFiles%\\Internet Explorer\\', True),
('%windir%\\doesnotexist', False),
('%windir%\\system*', True),
('%windir%\\*ystem32', True),
('%windir%\\*ystem3*', True)]
# On 64-bit Windows, Winapp2.ini expands the %ProgramFiles% environment
# variable to also %ProgramW6432%, so test unique entries in
# %ProgramW6432%.
import struct
if not 32 == 8 * struct.calcsize('P'):
raise NotImplementedError('expecting 32-bit Python')
if os.getenv('ProgramW6432'):
dir_64 = os.listdir(os.getenv('ProgramFiles'))
dir_32 = os.listdir(os.getenv('ProgramW6432'))
dir_32_unique = set(dir_32) - set(dir_64)
if dir_32 and not dir_32_unique:
raise RuntimeError(
'Test expects objects in %ProgramW6432% not in %ProgramFiles%')
for pathname in dir_32_unique:
tests.append(('%%ProgramFiles%%\\%s' % pathname, True))
else:
print 'NOTE: skipping %ProgramW6432% tests because WoW64 not detected'
for (pathname, expected_return) in tests:
actual_return = detect_file(pathname)
msg = 'detect_file(%s) returned %s' % (pathname, actual_return)
self.assertEqual(expected_return, actual_return, msg)
def test_fake(self):
"""Test with fake file"""
ini_fn = None
keyfull = 'HKCU\\Software\\BleachBit\\DeleteThisKey'
subkey = 'Software\\BleachBit\\DeleteThisKey\\AndThisKey'
def setup_fake(f1_filename=None):
"""Setup the test environment"""
dirname = tempfile.mkdtemp(prefix='bleachbit-test-winapp')
f1 = os.path.join(dirname, f1_filename or 'deleteme.log')
file(f1, 'w').write('')
dirname2 = os.path.join(dirname, 'sub')
os.mkdir(dirname2)
f2 = os.path.join(dirname2, 'deleteme.log')
file(f2, 'w').write('')
fbak = os.path.join(dirname, 'deleteme.bak')
file(fbak, 'w').write('')
self.assertTrue(os.path.exists(f1))
self.assertTrue(os.path.exists(f2))
self.assertTrue(os.path.exists(fbak))
hkey = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, subkey)
hkey.Close()
self.assertTrue(detect_registry_key(keyfull))
self.assertTrue(detect_registry_key('HKCU\\%s' % subkey))
return (dirname, f1, f2, fbak)
def ini2cleaner(filekey, do_next=True):
ini = file(ini_fn, 'w')
ini.write('[someapp]\n')
ini.write('LangSecRef=3021\n')
ini.write(filekey)
ini.write('\n')
ini.close()
self.assertTrue(os.path.exists(ini_fn))
if do_next:
return Winapp(ini_fn).get_cleaners().next()
else:
return Winapp(ini_fn).get_cleaners()
# reuse this path to store a winapp2.ini file in
import tempfile
(ini_h, ini_fn) = tempfile.mkstemp(suffix='.ini', prefix='winapp2')
os.close(ini_h)
# a set of tests
tests = [
# single file
('FileKey1=%s|deleteme.log', None,
False, True, False, True, True, True),
# special characters for XML
('FileKey1=%s|special_chars_&-\'.txt', 'special_chars_&-\'.txt',
False, True, False, True, True, True),
# *.log
('FileKey1=%s|*.LOG', None, False, True, False, True, True, True),
# semicolon separates different file types
('FileKey1=%s|*.log;*.bak', None,
False, True, False, True, False, True),
# *.*
('FileKey1=%s|*.*', None, False, True, False, True, False, True),
# recurse *.*
('FileKey1=%s|*.*|RECURSE', None, False,
True, False, False, False, True),
# remove self *.*, this removes the directory
('FileKey1=%s|*.*|REMOVESELF', None,
False, False, False, False, False, True),
]
# Add positive detection, where the detection believes the application is present,
# to all the tests, which are also positive.
new_tests = []
for test in tests:
for detect in (
"\nDetectFile=%%APPDATA%%\\Microsoft",
"\nDetectFile1=%%APPDATA%%\\Microsoft\nDetectFile2=%%APPDATA%%\\does_not_exist",
"\nDetectFile1=%%APPDATA%%\\does_not_exist\nDetectFile2=%%APPDATA%%\\Microsoft",
"\nDetect=HKCU\\Software\\Microsoft",
"\nDetect1=HKCU\\Software\\Microsoft\nDetect2=HKCU\\Software\\does_not_exist",
"\nDetect1=HKCU\\Software\\does_not_exist\nDetect2=HKCU\\Software\\Microsoft"):
new_ini = test[0] + detect
new_test = [new_ini, ] + [x for x in test[1:]]
new_tests.append(new_test)
positive_tests = tests + new_tests
# execute positive tests
for test in positive_tests:
print 'positive test: ', test
(dirname, f1, f2, fbak) = setup_fake(test[1])
cleaner = ini2cleaner(test[0] % dirname)
| run_all | identifier_name |
TestWinapp.py | common
if 'nt' == os.name:
import _winreg
else:
def fake_detect_registry_key(f):
return True
import bleachbit.Windows
bleachbit.Windows.detect_registry_key = fake_detect_registry_key
def get_winapp2():
"""Download and cache winapp2.ini. Return local filename."""
url = "http://www.winapp2.com/Winapp2.ini"
tmpdir = None
if 'posix' == os.name:
tmpdir = '/tmp'
if 'nt' == os.name:
tmpdir = os.getenv('TMP')
fn = os.path.join(tmpdir, 'bleachbit_test_winapp2.ini')
if os.path.exists(fn):
import time
import stat
age_seconds = time.time() - os.stat(fn)[stat.ST_MTIME]
if age_seconds > (24 * 36 * 36):
print 'note: deleting stale file %s ' % fn
os.remove(fn)
if not os.path.exists(fn):
f = file(fn, 'w')
import urllib2
txt = urllib2.urlopen(url).read()
f.write(txt)
return fn
class WinappTestCase(unittest.TestCase):
"""Test cases for Winapp"""
def run_all(self, cleaner, really_delete):
"""Test all the cleaner options"""
for (option_id, __name) in cleaner.get_options():
for cmd in cleaner.get_commands(option_id):
for result in cmd.execute(really_delete):
common.validate_result(self, result, really_delete)
def test_remote(self):
"""Test with downloaded file"""
winapps = Winapp(get_winapp2())
for cleaner in winapps.get_cleaners():
self.run_all(cleaner, False)
def test_detectos(self):
"""Test detectos function"""
# Tests are in the format (required_ver, mock, expected_return)
tests = (('5.1', '5.1', True),
('5.1', '6.0', False),
('6.0', '5.1', False),
('|5.1', '5.1', True),
('|5.1', '6.0', False),
('6.1|', '5.1', False),
('6.1|', '6.0', False),
('6.1|', '6.1', True),
('6.1|', '6.2', True),
('6.2|', '5.1', False),
('6.2|', '6.0', False),
('6.2|', '6.1', False),
('6.2|', '6.2', True))
for (s, mock, expected_return) in tests:
actual_return = detectos(s, mock)
self.assertEqual(expected_return, actual_return,
'detectos(%s, %s)==%s instead of %s' % (s, mock,
actual_return, expected_return))
def test_detect_file(self):
"""Test detect_file function"""
tests = [('%windir%\\system32\\kernel32.dll', True),
('%windir%\\system32', True),
('%ProgramFiles%\\Internet Explorer', True),
('%ProgramFiles%\\Internet Explorer\\', True),
('%windir%\\doesnotexist', False),
('%windir%\\system*', True),
('%windir%\\*ystem32', True),
('%windir%\\*ystem3*', True)]
# On 64-bit Windows, Winapp2.ini expands the %ProgramFiles% environment
# variable to also %ProgramW6432%, so test unique entries in
# %ProgramW6432%.
import struct
if not 32 == 8 * struct.calcsize('P'):
raise NotImplementedError('expecting 32-bit Python')
if os.getenv('ProgramW6432'):
dir_64 = os.listdir(os.getenv('ProgramFiles'))
dir_32 = os.listdir(os.getenv('ProgramW6432'))
dir_32_unique = set(dir_32) - set(dir_64)
if dir_32 and not dir_32_unique:
raise RuntimeError(
'Test expects objects in %ProgramW6432% not in %ProgramFiles%')
for pathname in dir_32_unique:
tests.append(('%%ProgramFiles%%\\%s' % pathname, True))
else:
print 'NOTE: skipping %ProgramW6432% tests because WoW64 not detected'
for (pathname, expected_return) in tests:
actual_return = detect_file(pathname)
msg = 'detect_file(%s) returned %s' % (pathname, actual_return)
self.assertEqual(expected_return, actual_return, msg)
def test_fake(self):
"""Test with fake file"""
ini_fn = None
keyfull = 'HKCU\\Software\\BleachBit\\DeleteThisKey'
subkey = 'Software\\BleachBit\\DeleteThisKey\\AndThisKey'
def setup_fake(f1_filename=None):
"""Setup the test environment"""
dirname = tempfile.mkdtemp(prefix='bleachbit-test-winapp')
f1 = os.path.join(dirname, f1_filename or 'deleteme.log')
file(f1, 'w').write('')
dirname2 = os.path.join(dirname, 'sub')
os.mkdir(dirname2)
f2 = os.path.join(dirname2, 'deleteme.log')
file(f2, 'w').write('')
fbak = os.path.join(dirname, 'deleteme.bak')
file(fbak, 'w').write('')
self.assertTrue(os.path.exists(f1))
self.assertTrue(os.path.exists(f2))
self.assertTrue(os.path.exists(fbak))
hkey = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, subkey)
hkey.Close()
self.assertTrue(detect_registry_key(keyfull))
self.assertTrue(detect_registry_key('HKCU\\%s' % subkey))
return (dirname, f1, f2, fbak)
def ini2cleaner(filekey, do_next=True):
ini = file(ini_fn, 'w')
ini.write('[someapp]\n')
ini.write('LangSecRef=3021\n')
ini.write(filekey)
ini.write('\n')
ini.close()
self.assertTrue(os.path.exists(ini_fn))
if do_next:
return Winapp(ini_fn).get_cleaners().next()
else:
return Winapp(ini_fn).get_cleaners()
# reuse this path to store a winapp2.ini file in
import tempfile
(ini_h, ini_fn) = tempfile.mkstemp(suffix='.ini', prefix='winapp2')
os.close(ini_h)
# a set of tests
tests = [
# single file
('FileKey1=%s|deleteme.log', None,
False, True, False, True, True, True),
# special characters for XML
('FileKey1=%s|special_chars_&-\'.txt', 'special_chars_&-\'.txt',
False, True, False, True, True, True),
# *.log
('FileKey1=%s|*.LOG', None, False, True, False, True, True, True),
# semicolon separates different file types
('FileKey1=%s|*.log;*.bak', None,
False, True, False, True, False, True),
# *.*
('FileKey1=%s|*.*', None, False, True, False, True, False, True),
# recurse *.*
('FileKey1=%s|*.*|RECURSE', None, False,
True, False, False, False, True),
# remove self *.*, this removes the directory
('FileKey1=%s|*.*|REMOVESELF', None,
False, False, False, False, False, True),
]
# Add positive detection, where the detection believes the application is present,
# to all the tests, which are also positive.
new_tests = []
for test in tests:
for detect in (
"\nDetectFile=%%APPDATA%%\\Microsoft",
"\nDetectFile1=%%APPDATA%%\\Microsoft\nDetectFile2=%%APPDATA%%\\does_not_exist",
"\nDetectFile1=%%APPDATA%%\\does_not_exist\nDetectFile2=%%APPDATA%%\\Microsoft", | "\nDetect1=HKCU\\Software\\does_not_exist\nDetect2=HKCU\\Software\\Microsoft"):
new_ini = test[0] + detect
new_test = [new_ini, ] + [x for x in test[1:]]
new_tests.append(new_test)
positive_tests = tests + new_tests
# execute positive tests
for test in positive_tests:
print 'positive test: ', test
(dirname, f1, f2, fbak) = setup_fake(test[1])
cleaner = ini2cleaner(test[0] % dirname)
self | "\nDetect=HKCU\\Software\\Microsoft",
"\nDetect1=HKCU\\Software\\Microsoft\nDetect2=HKCU\\Software\\does_not_exist", | random_line_split |
builder.py | import glob
import fnmatch
import itertools
import logging
import os
import re
import six
import sys
import yaml
from .dockerfile import Dockerfile
from .image import ImageBuilder
from .config import Config
class Builder(object) :
def __init__(self, config=None, **kwds) :
self.logger = logging.getLogger(type(self).__name__)
self.kwds = kwds
self.images = {}
if config is None:
config = Config()
config.update(dict(
images= [
{
'path': 'docker/*',
}
],
))
self.patterns = []
for image in config['images']:
# When path is provided and globbed, Dockerfile refers to its location
# When path is provided but not globbed, Dockerfile refers to the current path
# When Dockerfile is provided and globbed, path must not be globbed, both
# refers to the current directory
path = image.get('path', None)
dockerfile = image.get('Dockerfile', 'Dockerfile')
name = image.get('name', None)
if path is None:
path = '.'
if '*' in path:
if '*' in dockerfile:
raise ValueError('Ambiguity in your configuration for %r, globbing can'
'be done either in "Dockerfile" or "path" key but not both at the'
'same time' % image)
dockerfile = os.path.join(path, dockerfile)
path = re.compile(re.sub('^.*/([^*]*)$', r'(?P<path>.*)/\1', dockerfile))
if name is None:
name = dockerfile
if '*' in name:
start = re.sub('^([^*]*/|).*', r'^\1(?P<name>.*)', dockerfile)
end = re.sub('^.*\*(?:|[^/]*)(/.*)$', r'\1$', dockerfile)
name = re.compile(start + end)
pattern = {
'name': name,
'path': path,
'Dockerfile': dockerfile,
}
self.patterns.append(pattern)
self.config = config
def get_matching_pattern(self, pattern, name, path):
pattern = pattern[name]
if isinstance(pattern, six.string_types):
return pattern
else:
match = pattern.match(path)
if match:
return match.group(name)
return None
def getImage(self, image_name):
try:
return self.images[image_name]
except KeyError:
self.logger.debug('image builder cache miss, try to find it')
for img_cfg in self.patterns:
for path in glob.glob(img_cfg['Dockerfile']):
found_image_name = self.get_matching_pattern(img_cfg, 'name', path)
context_path = self.get_matching_pattern(img_cfg, 'path', path)
if found_image_name == image_name:
image = ImageBuilder(image_name,
contextPath=context_path,
dockerfile=path,
tagResolver=self,
**self.kwds
)
self.images[image_name] = image
return image
raise KeyError("Cannot find image %s" % image_name)
def imageTag(self, imgName) :
imgBuilder = self.images.get(imgName, None)
if imgBuilder :
return imgBuilder.buildTag()
return None
def build(self, client, names=None, child_images=[]) :
if isinstance(names, six.string_types):
names = [names]
def iter_buildable_deps(name):
"""
instanciates a builder for each image dependency
does nothing when the image cannot be build
"""
for dep_name, _ in self.getImage(name).imageDeps():
try:
self.getImage(dep_name)
yield dep_name
except KeyError:
continue
for name in names:
if name in child_images:
raise RuntimeError("dependency loop detected, %s some how depends on itself %s" %
(name, ' -> '.join(child_images + [name]))
)
for dep_name in iter_buildable_deps(name):
self.build(client, dep_name, child_images=child_images+[name])
for name in names:
self.getImage(name).build(client)
def tag(self, client, tags, images, **kwds):
if tags is None:
tags = ['latest']
for image in images:
self.getImage(image).tag(client, tags, **kwds)
COMMAND_NAME='build'
def add_options(parser):
from . import addCommonOptions, commonSetUp
from .dockerfile import addDockerfileOptions
from .image import addImageOptions
try:
add = parser.add_argument
except AttributeError:
add = parser.add_option
add("image", nargs="*",
help="images to build")
add("-t", "--tag", dest="tag", default=None, action='append',
help="tag(s) to be applied to the resulting image in case of success")
add("--registry", dest="registry", default=[], action='append',
help="Registry on which the image should tagged (<registry>/<name>:<tag>)")
addCommonOptions(parser)
addDockerfileOptions(parser)
addImageOptions(parser)
def main(argv=sys.argv, args=None) :
|
if __name__ == "__main__" :
main()
| """
Builds a list of images
"""
from . import commonSetUp
if not args:
import argparse
parser = argparse.ArgumentParser()
add_options(parser)
args = parser.parse_args(argv[1:])
import sys, os
import yaml
from docker import Client
from . import commonSetUp
commonSetUp(args)
builder = Builder()
builder.build(Client.from_env(), args.image)
builder.tag(Client.from_env(), args.tag, args.image, registries=args.registry) | identifier_body |
builder.py | import glob
import fnmatch
import itertools
import logging
import os
import re
import six
import sys
import yaml
from .dockerfile import Dockerfile
from .image import ImageBuilder
from .config import Config
class Builder(object) :
def __init__(self, config=None, **kwds) :
self.logger = logging.getLogger(type(self).__name__)
self.kwds = kwds
self.images = {}
if config is None:
config = Config()
config.update(dict(
images= [
{
'path': 'docker/*',
}
],
))
self.patterns = []
for image in config['images']:
# When path is provided and globbed, Dockerfile refers to its location
# When path is provided but not globbed, Dockerfile refers to the current path
# When Dockerfile is provided and globbed, path must not be globbed, both
# refers to the current directory
path = image.get('path', None)
dockerfile = image.get('Dockerfile', 'Dockerfile')
name = image.get('name', None)
if path is None:
path = '.'
if '*' in path:
if '*' in dockerfile:
raise ValueError('Ambiguity in your configuration for %r, globbing can'
'be done either in "Dockerfile" or "path" key but not both at the'
'same time' % image)
dockerfile = os.path.join(path, dockerfile)
path = re.compile(re.sub('^.*/([^*]*)$', r'(?P<path>.*)/\1', dockerfile))
if name is None:
name = dockerfile |
pattern = {
'name': name,
'path': path,
'Dockerfile': dockerfile,
}
self.patterns.append(pattern)
self.config = config
def get_matching_pattern(self, pattern, name, path):
pattern = pattern[name]
if isinstance(pattern, six.string_types):
return pattern
else:
match = pattern.match(path)
if match:
return match.group(name)
return None
def getImage(self, image_name):
try:
return self.images[image_name]
except KeyError:
self.logger.debug('image builder cache miss, try to find it')
for img_cfg in self.patterns:
for path in glob.glob(img_cfg['Dockerfile']):
found_image_name = self.get_matching_pattern(img_cfg, 'name', path)
context_path = self.get_matching_pattern(img_cfg, 'path', path)
if found_image_name == image_name:
image = ImageBuilder(image_name,
contextPath=context_path,
dockerfile=path,
tagResolver=self,
**self.kwds
)
self.images[image_name] = image
return image
raise KeyError("Cannot find image %s" % image_name)
def imageTag(self, imgName) :
imgBuilder = self.images.get(imgName, None)
if imgBuilder :
return imgBuilder.buildTag()
return None
def build(self, client, names=None, child_images=[]) :
if isinstance(names, six.string_types):
names = [names]
def iter_buildable_deps(name):
"""
instanciates a builder for each image dependency
does nothing when the image cannot be build
"""
for dep_name, _ in self.getImage(name).imageDeps():
try:
self.getImage(dep_name)
yield dep_name
except KeyError:
continue
for name in names:
if name in child_images:
raise RuntimeError("dependency loop detected, %s some how depends on itself %s" %
(name, ' -> '.join(child_images + [name]))
)
for dep_name in iter_buildable_deps(name):
self.build(client, dep_name, child_images=child_images+[name])
for name in names:
self.getImage(name).build(client)
def tag(self, client, tags, images, **kwds):
if tags is None:
tags = ['latest']
for image in images:
self.getImage(image).tag(client, tags, **kwds)
COMMAND_NAME='build'
def add_options(parser):
from . import addCommonOptions, commonSetUp
from .dockerfile import addDockerfileOptions
from .image import addImageOptions
try:
add = parser.add_argument
except AttributeError:
add = parser.add_option
add("image", nargs="*",
help="images to build")
add("-t", "--tag", dest="tag", default=None, action='append',
help="tag(s) to be applied to the resulting image in case of success")
add("--registry", dest="registry", default=[], action='append',
help="Registry on which the image should tagged (<registry>/<name>:<tag>)")
addCommonOptions(parser)
addDockerfileOptions(parser)
addImageOptions(parser)
def main(argv=sys.argv, args=None) :
"""
Builds a list of images
"""
from . import commonSetUp
if not args:
import argparse
parser = argparse.ArgumentParser()
add_options(parser)
args = parser.parse_args(argv[1:])
import sys, os
import yaml
from docker import Client
from . import commonSetUp
commonSetUp(args)
builder = Builder()
builder.build(Client.from_env(), args.image)
builder.tag(Client.from_env(), args.tag, args.image, registries=args.registry)
if __name__ == "__main__" :
main() | if '*' in name:
start = re.sub('^([^*]*/|).*', r'^\1(?P<name>.*)', dockerfile)
end = re.sub('^.*\*(?:|[^/]*)(/.*)$', r'\1$', dockerfile)
name = re.compile(start + end) | random_line_split |
builder.py | import glob
import fnmatch
import itertools
import logging
import os
import re
import six
import sys
import yaml
from .dockerfile import Dockerfile
from .image import ImageBuilder
from .config import Config
class Builder(object) :
def __init__(self, config=None, **kwds) :
self.logger = logging.getLogger(type(self).__name__)
self.kwds = kwds
self.images = {}
if config is None:
config = Config()
config.update(dict(
images= [
{
'path': 'docker/*',
}
],
))
self.patterns = []
for image in config['images']:
# When path is provided and globbed, Dockerfile refers to its location
# When path is provided but not globbed, Dockerfile refers to the current path
# When Dockerfile is provided and globbed, path must not be globbed, both
# refers to the current directory
path = image.get('path', None)
dockerfile = image.get('Dockerfile', 'Dockerfile')
name = image.get('name', None)
if path is None:
path = '.'
if '*' in path:
if '*' in dockerfile:
raise ValueError('Ambiguity in your configuration for %r, globbing can'
'be done either in "Dockerfile" or "path" key but not both at the'
'same time' % image)
dockerfile = os.path.join(path, dockerfile)
path = re.compile(re.sub('^.*/([^*]*)$', r'(?P<path>.*)/\1', dockerfile))
if name is None:
name = dockerfile
if '*' in name:
start = re.sub('^([^*]*/|).*', r'^\1(?P<name>.*)', dockerfile)
end = re.sub('^.*\*(?:|[^/]*)(/.*)$', r'\1$', dockerfile)
name = re.compile(start + end)
pattern = {
'name': name,
'path': path,
'Dockerfile': dockerfile,
}
self.patterns.append(pattern)
self.config = config
def get_matching_pattern(self, pattern, name, path):
pattern = pattern[name]
if isinstance(pattern, six.string_types):
return pattern
else:
match = pattern.match(path)
if match:
return match.group(name)
return None
def getImage(self, image_name):
try:
return self.images[image_name]
except KeyError:
self.logger.debug('image builder cache miss, try to find it')
for img_cfg in self.patterns:
for path in glob.glob(img_cfg['Dockerfile']):
found_image_name = self.get_matching_pattern(img_cfg, 'name', path)
context_path = self.get_matching_pattern(img_cfg, 'path', path)
if found_image_name == image_name:
image = ImageBuilder(image_name,
contextPath=context_path,
dockerfile=path,
tagResolver=self,
**self.kwds
)
self.images[image_name] = image
return image
raise KeyError("Cannot find image %s" % image_name)
def imageTag(self, imgName) :
imgBuilder = self.images.get(imgName, None)
if imgBuilder :
return imgBuilder.buildTag()
return None
def build(self, client, names=None, child_images=[]) :
if isinstance(names, six.string_types):
names = [names]
def iter_buildable_deps(name):
"""
instanciates a builder for each image dependency
does nothing when the image cannot be build
"""
for dep_name, _ in self.getImage(name).imageDeps():
try:
self.getImage(dep_name)
yield dep_name
except KeyError:
continue
for name in names:
if name in child_images:
raise RuntimeError("dependency loop detected, %s some how depends on itself %s" %
(name, ' -> '.join(child_images + [name]))
)
for dep_name in iter_buildable_deps(name):
self.build(client, dep_name, child_images=child_images+[name])
for name in names:
self.getImage(name).build(client)
def tag(self, client, tags, images, **kwds):
if tags is None:
tags = ['latest']
for image in images:
self.getImage(image).tag(client, tags, **kwds)
COMMAND_NAME='build'
def | (parser):
from . import addCommonOptions, commonSetUp
from .dockerfile import addDockerfileOptions
from .image import addImageOptions
try:
add = parser.add_argument
except AttributeError:
add = parser.add_option
add("image", nargs="*",
help="images to build")
add("-t", "--tag", dest="tag", default=None, action='append',
help="tag(s) to be applied to the resulting image in case of success")
add("--registry", dest="registry", default=[], action='append',
help="Registry on which the image should tagged (<registry>/<name>:<tag>)")
addCommonOptions(parser)
addDockerfileOptions(parser)
addImageOptions(parser)
def main(argv=sys.argv, args=None) :
"""
Builds a list of images
"""
from . import commonSetUp
if not args:
import argparse
parser = argparse.ArgumentParser()
add_options(parser)
args = parser.parse_args(argv[1:])
import sys, os
import yaml
from docker import Client
from . import commonSetUp
commonSetUp(args)
builder = Builder()
builder.build(Client.from_env(), args.image)
builder.tag(Client.from_env(), args.tag, args.image, registries=args.registry)
if __name__ == "__main__" :
main()
| add_options | identifier_name |
builder.py | import glob
import fnmatch
import itertools
import logging
import os
import re
import six
import sys
import yaml
from .dockerfile import Dockerfile
from .image import ImageBuilder
from .config import Config
class Builder(object) :
def __init__(self, config=None, **kwds) :
self.logger = logging.getLogger(type(self).__name__)
self.kwds = kwds
self.images = {}
if config is None:
config = Config()
config.update(dict(
images= [
{
'path': 'docker/*',
}
],
))
self.patterns = []
for image in config['images']:
# When path is provided and globbed, Dockerfile refers to its location
# When path is provided but not globbed, Dockerfile refers to the current path
# When Dockerfile is provided and globbed, path must not be globbed, both
# refers to the current directory
path = image.get('path', None)
dockerfile = image.get('Dockerfile', 'Dockerfile')
name = image.get('name', None)
if path is None:
path = '.'
if '*' in path:
|
if name is None:
name = dockerfile
if '*' in name:
start = re.sub('^([^*]*/|).*', r'^\1(?P<name>.*)', dockerfile)
end = re.sub('^.*\*(?:|[^/]*)(/.*)$', r'\1$', dockerfile)
name = re.compile(start + end)
pattern = {
'name': name,
'path': path,
'Dockerfile': dockerfile,
}
self.patterns.append(pattern)
self.config = config
def get_matching_pattern(self, pattern, name, path):
pattern = pattern[name]
if isinstance(pattern, six.string_types):
return pattern
else:
match = pattern.match(path)
if match:
return match.group(name)
return None
def getImage(self, image_name):
try:
return self.images[image_name]
except KeyError:
self.logger.debug('image builder cache miss, try to find it')
for img_cfg in self.patterns:
for path in glob.glob(img_cfg['Dockerfile']):
found_image_name = self.get_matching_pattern(img_cfg, 'name', path)
context_path = self.get_matching_pattern(img_cfg, 'path', path)
if found_image_name == image_name:
image = ImageBuilder(image_name,
contextPath=context_path,
dockerfile=path,
tagResolver=self,
**self.kwds
)
self.images[image_name] = image
return image
raise KeyError("Cannot find image %s" % image_name)
def imageTag(self, imgName) :
imgBuilder = self.images.get(imgName, None)
if imgBuilder :
return imgBuilder.buildTag()
return None
def build(self, client, names=None, child_images=[]) :
if isinstance(names, six.string_types):
names = [names]
def iter_buildable_deps(name):
"""
instanciates a builder for each image dependency
does nothing when the image cannot be build
"""
for dep_name, _ in self.getImage(name).imageDeps():
try:
self.getImage(dep_name)
yield dep_name
except KeyError:
continue
for name in names:
if name in child_images:
raise RuntimeError("dependency loop detected, %s some how depends on itself %s" %
(name, ' -> '.join(child_images + [name]))
)
for dep_name in iter_buildable_deps(name):
self.build(client, dep_name, child_images=child_images+[name])
for name in names:
self.getImage(name).build(client)
def tag(self, client, tags, images, **kwds):
if tags is None:
tags = ['latest']
for image in images:
self.getImage(image).tag(client, tags, **kwds)
COMMAND_NAME='build'
def add_options(parser):
from . import addCommonOptions, commonSetUp
from .dockerfile import addDockerfileOptions
from .image import addImageOptions
try:
add = parser.add_argument
except AttributeError:
add = parser.add_option
add("image", nargs="*",
help="images to build")
add("-t", "--tag", dest="tag", default=None, action='append',
help="tag(s) to be applied to the resulting image in case of success")
add("--registry", dest="registry", default=[], action='append',
help="Registry on which the image should tagged (<registry>/<name>:<tag>)")
addCommonOptions(parser)
addDockerfileOptions(parser)
addImageOptions(parser)
def main(argv=sys.argv, args=None) :
"""
Builds a list of images
"""
from . import commonSetUp
if not args:
import argparse
parser = argparse.ArgumentParser()
add_options(parser)
args = parser.parse_args(argv[1:])
import sys, os
import yaml
from docker import Client
from . import commonSetUp
commonSetUp(args)
builder = Builder()
builder.build(Client.from_env(), args.image)
builder.tag(Client.from_env(), args.tag, args.image, registries=args.registry)
if __name__ == "__main__" :
main()
| if '*' in dockerfile:
raise ValueError('Ambiguity in your configuration for %r, globbing can'
'be done either in "Dockerfile" or "path" key but not both at the'
'same time' % image)
dockerfile = os.path.join(path, dockerfile)
path = re.compile(re.sub('^.*/([^*]*)$', r'(?P<path>.*)/\1', dockerfile)) | conditional_block |
cli.py | import datetime
import logging
import textwrap
import time
import click
import hatarake
import hatarake.net as requests
from hatarake.config import Config
logger = logging.getLogger(__name__)
@click.group()
@click.option('-v', '--verbosity', count=True)
def main(verbosity):
|
@main.command()
@click.option('--start', help='start time')
@click.argument('duration', type=int)
@click.argument('title')
def submit(start, duration, title):
'''Submit a pomodoro to the server'''
config = Config(hatarake.CONFIG_PATH)
api = config.get('server', 'api')
token = config.get('server', 'token')
response = requests.post(
api,
headers={
'Authorization': 'Token %s' % token,
},
data={
'created': start,
'duration': duration,
'title': title,
}
)
response.raise_for_status()
click.echo(response.text)
@main.command()
@click.option('--duration', type=int, default=2)
@click.option('--api_server', envvar='HATARAKE_API_SERVER')
@click.option('--api_token', envvar='HATARAKE_API_TOKEN')
@click.argument('title')
def append(duration, title, api_server=None, api_token=None):
'''Append time to a pomodoro'''
config = Config(hatarake.CONFIG_PATH)
api = api_server if api_server else config.get('server', 'api')
token = api_token if api_token else config.get('server', 'token')
end = datetime.datetime.utcnow().replace(microsecond=0)
start = end - datetime.timedelta(minutes=duration)
# Split the tags out of the title
# For now, we remove the tags from the final title to make things neater
# but in the future, may want to leave the hash tag in the full title
tags = {tag.strip("#") for tag in title.split() if tag.startswith("#")}
title = ' '.join({tag for tag in title.split() if not tag.startswith('#')})
response = requests.post(
api + '/append',
headers={
'Authorization': 'Token %s' % token,
},
data={
'start': start.isoformat(),
'end': end.isoformat(),
'category': tags,
'title': title,
}
)
response.raise_for_status()
click.echo(response.text)
@main.command()
@click.option('--api_server', envvar='HATARAKE_API_SERVER')
@click.option('--api_token', envvar='HATARAKE_API_TOKEN')
@click.argument('label')
@click.argument('duration', type=int)
def countdown(api_server, api_token, label, duration):
'''Submit a new countdown'''
config = Config(hatarake.CONFIG_PATH)
api = api_server if api_server else config.get('countdown', 'api')
token = api_token if api_token else config.get('countdown', 'token')
created = datetime.datetime.now() + datetime.timedelta(minutes=duration)
response = requests.put(
api,
headers={
'Authorization': 'Token %s' % token,
},
data={
'created': created.replace(microsecond=0).isoformat(),
'label': label,
}
)
response.raise_for_status()
click.echo(response.text)
@main.command()
@click.argument('key')
@click.argument('value')
def stat(key, value):
'''Submit stat data to server'''
config = Config(hatarake.CONFIG_PATH)
response = requests.post(
config.get('stat', 'api'),
headers={
'Authorization': 'Token %s' % config.get('stat', 'token'),
},
data={
'key': key,
'value': value,
}
)
logger.info('POSTing to %s %s', response.request.url, response.request.body)
response.raise_for_status()
click.echo(response.text)
@main.command()
@click.argument('name', default='heartbeat')
def heartbeat(name):
config = Config(hatarake.CONFIG_PATH)
url = config.get('prometheus', 'pushgateway')
payload = textwrap.dedent('''
# TYPE {name} gauge
# HELP {name} Last heartbeat based on unixtimestamp
{name} {time}
''').format(name=name, time=int(time.time())).lstrip()
response = requests.post(url, data=payload)
response.raise_for_status()
click.echo(response.text)
| logging.basicConfig(level=logging.WARNING - verbosity * 10)
logging.getLogger('gntp').setLevel(logging.ERROR - verbosity * 10) | identifier_body |
cli.py | import datetime
import logging
import textwrap
import time
import click
import hatarake
import hatarake.net as requests
from hatarake.config import Config
logger = logging.getLogger(__name__)
@click.group()
@click.option('-v', '--verbosity', count=True)
def main(verbosity):
logging.basicConfig(level=logging.WARNING - verbosity * 10)
logging.getLogger('gntp').setLevel(logging.ERROR - verbosity * 10)
@main.command()
@click.option('--start', help='start time')
@click.argument('duration', type=int)
@click.argument('title')
def submit(start, duration, title):
'''Submit a pomodoro to the server'''
config = Config(hatarake.CONFIG_PATH)
api = config.get('server', 'api')
token = config.get('server', 'token')
response = requests.post(
api,
headers={
'Authorization': 'Token %s' % token,
},
data={
'created': start,
'duration': duration,
'title': title,
}
)
response.raise_for_status()
click.echo(response.text)
@main.command()
@click.option('--duration', type=int, default=2)
@click.option('--api_server', envvar='HATARAKE_API_SERVER')
@click.option('--api_token', envvar='HATARAKE_API_TOKEN')
@click.argument('title')
def append(duration, title, api_server=None, api_token=None):
'''Append time to a pomodoro'''
config = Config(hatarake.CONFIG_PATH)
api = api_server if api_server else config.get('server', 'api')
token = api_token if api_token else config.get('server', 'token')
end = datetime.datetime.utcnow().replace(microsecond=0)
start = end - datetime.timedelta(minutes=duration)
# Split the tags out of the title
# For now, we remove the tags from the final title to make things neater
# but in the future, may want to leave the hash tag in the full title
tags = {tag.strip("#") for tag in title.split() if tag.startswith("#")}
title = ' '.join({tag for tag in title.split() if not tag.startswith('#')})
response = requests.post(
api + '/append',
headers={
'Authorization': 'Token %s' % token,
},
data={
'start': start.isoformat(),
'end': end.isoformat(),
'category': tags,
'title': title,
}
)
response.raise_for_status()
click.echo(response.text)
@main.command()
@click.option('--api_server', envvar='HATARAKE_API_SERVER')
@click.option('--api_token', envvar='HATARAKE_API_TOKEN')
@click.argument('label')
@click.argument('duration', type=int)
def countdown(api_server, api_token, label, duration):
'''Submit a new countdown'''
config = Config(hatarake.CONFIG_PATH)
api = api_server if api_server else config.get('countdown', 'api')
token = api_token if api_token else config.get('countdown', 'token')
created = datetime.datetime.now() + datetime.timedelta(minutes=duration)
response = requests.put(
api,
headers={
'Authorization': 'Token %s' % token,
},
data={
'created': created.replace(microsecond=0).isoformat(),
'label': label,
}
)
response.raise_for_status()
click.echo(response.text)
@main.command()
@click.argument('key')
@click.argument('value')
def stat(key, value):
'''Submit stat data to server'''
config = Config(hatarake.CONFIG_PATH)
response = requests.post(
config.get('stat', 'api'),
headers={
'Authorization': 'Token %s' % config.get('stat', 'token'),
},
data={
'key': key,
'value': value,
}
)
logger.info('POSTing to %s %s', response.request.url, response.request.body)
response.raise_for_status() |
@main.command()
@click.argument('name', default='heartbeat')
def heartbeat(name):
config = Config(hatarake.CONFIG_PATH)
url = config.get('prometheus', 'pushgateway')
payload = textwrap.dedent('''
# TYPE {name} gauge
# HELP {name} Last heartbeat based on unixtimestamp
{name} {time}
''').format(name=name, time=int(time.time())).lstrip()
response = requests.post(url, data=payload)
response.raise_for_status()
click.echo(response.text) | click.echo(response.text)
| random_line_split |
cli.py | import datetime
import logging
import textwrap
import time
import click
import hatarake
import hatarake.net as requests
from hatarake.config import Config
logger = logging.getLogger(__name__)
@click.group()
@click.option('-v', '--verbosity', count=True)
def main(verbosity):
logging.basicConfig(level=logging.WARNING - verbosity * 10)
logging.getLogger('gntp').setLevel(logging.ERROR - verbosity * 10)
@main.command()
@click.option('--start', help='start time')
@click.argument('duration', type=int)
@click.argument('title')
def | (start, duration, title):
'''Submit a pomodoro to the server'''
config = Config(hatarake.CONFIG_PATH)
api = config.get('server', 'api')
token = config.get('server', 'token')
response = requests.post(
api,
headers={
'Authorization': 'Token %s' % token,
},
data={
'created': start,
'duration': duration,
'title': title,
}
)
response.raise_for_status()
click.echo(response.text)
@main.command()
@click.option('--duration', type=int, default=2)
@click.option('--api_server', envvar='HATARAKE_API_SERVER')
@click.option('--api_token', envvar='HATARAKE_API_TOKEN')
@click.argument('title')
def append(duration, title, api_server=None, api_token=None):
'''Append time to a pomodoro'''
config = Config(hatarake.CONFIG_PATH)
api = api_server if api_server else config.get('server', 'api')
token = api_token if api_token else config.get('server', 'token')
end = datetime.datetime.utcnow().replace(microsecond=0)
start = end - datetime.timedelta(minutes=duration)
# Split the tags out of the title
# For now, we remove the tags from the final title to make things neater
# but in the future, may want to leave the hash tag in the full title
tags = {tag.strip("#") for tag in title.split() if tag.startswith("#")}
title = ' '.join({tag for tag in title.split() if not tag.startswith('#')})
response = requests.post(
api + '/append',
headers={
'Authorization': 'Token %s' % token,
},
data={
'start': start.isoformat(),
'end': end.isoformat(),
'category': tags,
'title': title,
}
)
response.raise_for_status()
click.echo(response.text)
@main.command()
@click.option('--api_server', envvar='HATARAKE_API_SERVER')
@click.option('--api_token', envvar='HATARAKE_API_TOKEN')
@click.argument('label')
@click.argument('duration', type=int)
def countdown(api_server, api_token, label, duration):
'''Submit a new countdown'''
config = Config(hatarake.CONFIG_PATH)
api = api_server if api_server else config.get('countdown', 'api')
token = api_token if api_token else config.get('countdown', 'token')
created = datetime.datetime.now() + datetime.timedelta(minutes=duration)
response = requests.put(
api,
headers={
'Authorization': 'Token %s' % token,
},
data={
'created': created.replace(microsecond=0).isoformat(),
'label': label,
}
)
response.raise_for_status()
click.echo(response.text)
@main.command()
@click.argument('key')
@click.argument('value')
def stat(key, value):
'''Submit stat data to server'''
config = Config(hatarake.CONFIG_PATH)
response = requests.post(
config.get('stat', 'api'),
headers={
'Authorization': 'Token %s' % config.get('stat', 'token'),
},
data={
'key': key,
'value': value,
}
)
logger.info('POSTing to %s %s', response.request.url, response.request.body)
response.raise_for_status()
click.echo(response.text)
@main.command()
@click.argument('name', default='heartbeat')
def heartbeat(name):
config = Config(hatarake.CONFIG_PATH)
url = config.get('prometheus', 'pushgateway')
payload = textwrap.dedent('''
# TYPE {name} gauge
# HELP {name} Last heartbeat based on unixtimestamp
{name} {time}
''').format(name=name, time=int(time.time())).lstrip()
response = requests.post(url, data=payload)
response.raise_for_status()
click.echo(response.text)
| submit | identifier_name |
checkNameAvailabilityInput.js | /*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
/**
* Input of CheckNameAvailability API.
*
*/
class CheckNameAvailabilityInput {
/**
* Create a CheckNameAvailabilityInput.
* @member {string} name The resource name to validate.
* @member {string} type The type of the resource whose name is to be
* validated. Possible values include: 'Microsoft.Network/frontDoors',
* 'Microsoft.Network/frontDoors/frontendEndpoints'
*/
| () {
}
/**
* Defines the metadata of CheckNameAvailabilityInput
*
* @returns {object} metadata of CheckNameAvailabilityInput
*
*/
mapper() {
return {
required: false,
serializedName: 'CheckNameAvailabilityInput',
type: {
name: 'Composite',
className: 'CheckNameAvailabilityInput',
modelProperties: {
name: {
required: true,
serializedName: 'name',
type: {
name: 'String'
}
},
type: {
required: true,
serializedName: 'type',
type: {
name: 'Enum',
allowedValues: [ 'Microsoft.Network/frontDoors', 'Microsoft.Network/frontDoors/frontendEndpoints' ]
}
}
}
}
};
}
}
module.exports = CheckNameAvailabilityInput;
| constructor | identifier_name |
fa.js | OC.L10N.register(
"templateeditor",
{
"Could not load template" : "امکان بارگذاری قالب وجود ندارد",
"Saved" : "ذخیره شد", | "Sharing email - public link shares (plain text fallback)" : "ایمیل اشتراک گذاری-لینک عمومی اشتراک گذاری(plain text fallback)",
"Sharing email (HTML)" : "اشتراکگذاری ایمیل (HTML)",
"Sharing email (plain text fallback)" : "ایمیل اشتراک گذاری (plain text fallback)",
"Lost password mail" : "ایمیل فراموش کردن رمز عبور",
"New user email (HTML)" : "ایمیل کاربری جدید (HTML)",
"New user email (plain text fallback)" : "ایمیل کاربر جدید (plain text fallback)",
"Activity notification mail" : "ایمیل هشدار فعالیت",
"Mail Templates" : "قالبهای ایمیل",
"Theme" : "تم",
"Template" : "قالب",
"Please choose a template" : "لطفا یک قالب انتخاب کنید",
"Save" : "ذخیره"
},
"nplurals=1; plural=0;"); | "Reset" : "تنظیم مجدد",
"An error occurred" : "یک خطا رخ داده است",
"Sharing email - public link shares (HTML)" : "ایمیل اشتراک گذاری-لینک عمومی اشتراک گذاری(HTML)", | random_line_split |
import_panel_example.py | # This file is part of the pyqualtrics package.
# For copyright and licensing information about this package, see the
# NOTICE.txt and LICENSE.txt files in its top-level directory; they are
# available at https://github.com/Baguage/pyqualtrics
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyqualtrics import Qualtrics
import os
user = None # os.environ["QUALTRICS_USER"]
token = None # os.environ["QUALTRICS_TOKEN"]
if __name__ == "__main__":
print "This is an example of panel import"
print "Make sure you have set QUALTRICS_USER, QUALTRICS_TOKEN and QUALTRICS_LIBRARY_ID enviroment variable"
# Note is user and token are None, QUALTRICS_USER and QUALTRICS_TOKEN environment variables will be used instead
qualtrics = Qualtrics(user, token)
library_id = os.environ["QUALTRICS_LIBRARY_ID"]
panel_id = qualtrics.importJsonPanel(
library_id,
Name="New Panel Created by PyQualtrics library (DELETE ME)",
panel=[
{"Email": "[email protected]", "FirstName": "PyQualtrics", "LastName": "Library", "SubjectID": "123"},
{"Email": "[email protected]", "FirstName": "PyQualtrics2", "LastName": "Library2"}
],
headers=["Email", "FirstName", "LastName", "ExternalRef", "SubjectID"],
AllED=1)
if qualtrics.last_error_message:
print "Error creating panel: " + qualtrics.last_error_message
else:
| print "Panel created successfully, PanelID: " + panel_id | conditional_block |
|
import_panel_example.py | # This file is part of the pyqualtrics package.
# For copyright and licensing information about this package, see the
# NOTICE.txt and LICENSE.txt files in its top-level directory; they are
# available at https://github.com/Baguage/pyqualtrics
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyqualtrics import Qualtrics
import os
user = None # os.environ["QUALTRICS_USER"]
token = None # os.environ["QUALTRICS_TOKEN"]
if __name__ == "__main__":
print "This is an example of panel import"
print "Make sure you have set QUALTRICS_USER, QUALTRICS_TOKEN and QUALTRICS_LIBRARY_ID enviroment variable"
# Note is user and token are None, QUALTRICS_USER and QUALTRICS_TOKEN environment variables will be used instead
qualtrics = Qualtrics(user, token)
library_id = os.environ["QUALTRICS_LIBRARY_ID"]
panel_id = qualtrics.importJsonPanel(
library_id,
Name="New Panel Created by PyQualtrics library (DELETE ME)",
panel=[
{"Email": "[email protected]", "FirstName": "PyQualtrics", "LastName": "Library", "SubjectID": "123"},
{"Email": "[email protected]", "FirstName": "PyQualtrics2", "LastName": "Library2"} | AllED=1)
if qualtrics.last_error_message:
print "Error creating panel: " + qualtrics.last_error_message
else:
print "Panel created successfully, PanelID: " + panel_id | ],
headers=["Email", "FirstName", "LastName", "ExternalRef", "SubjectID"], | random_line_split |
anyid.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::num::NonZeroU64;
#[cfg(any(test, feature = "for-tests"))]
use quickcheck_arbitrary_derive::Arbitrary;
use serde_derive::Deserialize;
use serde_derive::Serialize;
use type_macros::auto_wire;
use types::HgId;
use crate::AnyFileContentId;
use crate::IndexableId;
use crate::UploadToken;
blake2_hash!(BonsaiChangesetId);
#[auto_wire]
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub enum AnyId {
#[id(1)]
AnyFileContentId(AnyFileContentId),
#[id(2)]
HgFilenodeId(HgId),
#[id(3)]
HgTreeId(HgId),
#[id(4)]
HgChangesetId(HgId),
#[id(5)]
BonsaiChangesetId(BonsaiChangesetId),
}
impl Default for AnyId {
fn | () -> Self {
Self::AnyFileContentId(AnyFileContentId::default())
}
}
#[auto_wire]
#[derive(Clone, Default, Debug, Serialize, Deserialize, Eq, PartialEq)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub struct LookupRequest {
#[id(1)]
pub id: AnyId,
#[id(2)]
pub bubble_id: Option<NonZeroU64>,
/// If present and the original id is not, lookup will also look into this
/// bubble, and if the id is present, copy it to the requested bubble.
#[id(3)]
pub copy_from_bubble_id: Option<NonZeroU64>,
}
#[auto_wire]
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub enum LookupResult {
/// Id was present, upload token for it is returned
#[id(1)]
Present(UploadToken),
/// Id was not present, only its id is returned
#[id(2)]
NotPresent(IndexableId),
// Possible to add an Error variant in the future if we don't want to
// swallow the errors
}
impl Default for LookupResult {
fn default() -> Self {
Self::NotPresent(Default::default())
}
}
#[auto_wire]
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub struct LookupResponse {
#[id(3)]
pub result: LookupResult,
}
| default | identifier_name |
anyid.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::num::NonZeroU64;
#[cfg(any(test, feature = "for-tests"))]
use quickcheck_arbitrary_derive::Arbitrary;
use serde_derive::Deserialize;
use serde_derive::Serialize;
use type_macros::auto_wire;
use types::HgId;
use crate::AnyFileContentId;
use crate::IndexableId;
use crate::UploadToken; | blake2_hash!(BonsaiChangesetId);
#[auto_wire]
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub enum AnyId {
#[id(1)]
AnyFileContentId(AnyFileContentId),
#[id(2)]
HgFilenodeId(HgId),
#[id(3)]
HgTreeId(HgId),
#[id(4)]
HgChangesetId(HgId),
#[id(5)]
BonsaiChangesetId(BonsaiChangesetId),
}
impl Default for AnyId {
fn default() -> Self {
Self::AnyFileContentId(AnyFileContentId::default())
}
}
#[auto_wire]
#[derive(Clone, Default, Debug, Serialize, Deserialize, Eq, PartialEq)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub struct LookupRequest {
#[id(1)]
pub id: AnyId,
#[id(2)]
pub bubble_id: Option<NonZeroU64>,
/// If present and the original id is not, lookup will also look into this
/// bubble, and if the id is present, copy it to the requested bubble.
#[id(3)]
pub copy_from_bubble_id: Option<NonZeroU64>,
}
#[auto_wire]
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub enum LookupResult {
/// Id was present, upload token for it is returned
#[id(1)]
Present(UploadToken),
/// Id was not present, only its id is returned
#[id(2)]
NotPresent(IndexableId),
// Possible to add an Error variant in the future if we don't want to
// swallow the errors
}
impl Default for LookupResult {
fn default() -> Self {
Self::NotPresent(Default::default())
}
}
#[auto_wire]
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub struct LookupResponse {
#[id(3)]
pub result: LookupResult,
} | random_line_split |
|
anyid.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::num::NonZeroU64;
#[cfg(any(test, feature = "for-tests"))]
use quickcheck_arbitrary_derive::Arbitrary;
use serde_derive::Deserialize;
use serde_derive::Serialize;
use type_macros::auto_wire;
use types::HgId;
use crate::AnyFileContentId;
use crate::IndexableId;
use crate::UploadToken;
blake2_hash!(BonsaiChangesetId);
#[auto_wire]
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub enum AnyId {
#[id(1)]
AnyFileContentId(AnyFileContentId),
#[id(2)]
HgFilenodeId(HgId),
#[id(3)]
HgTreeId(HgId),
#[id(4)]
HgChangesetId(HgId),
#[id(5)]
BonsaiChangesetId(BonsaiChangesetId),
}
impl Default for AnyId {
fn default() -> Self |
}
#[auto_wire]
#[derive(Clone, Default, Debug, Serialize, Deserialize, Eq, PartialEq)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub struct LookupRequest {
#[id(1)]
pub id: AnyId,
#[id(2)]
pub bubble_id: Option<NonZeroU64>,
/// If present and the original id is not, lookup will also look into this
/// bubble, and if the id is present, copy it to the requested bubble.
#[id(3)]
pub copy_from_bubble_id: Option<NonZeroU64>,
}
#[auto_wire]
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub enum LookupResult {
/// Id was present, upload token for it is returned
#[id(1)]
Present(UploadToken),
/// Id was not present, only its id is returned
#[id(2)]
NotPresent(IndexableId),
// Possible to add an Error variant in the future if we don't want to
// swallow the errors
}
impl Default for LookupResult {
fn default() -> Self {
Self::NotPresent(Default::default())
}
}
#[auto_wire]
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub struct LookupResponse {
#[id(3)]
pub result: LookupResult,
}
| {
Self::AnyFileContentId(AnyFileContentId::default())
} | identifier_body |
cargo_pkgid.rs | use ops;
use core::{MultiShell, Source, PackageIdSpec};
use sources::{PathSource};
use util::{CargoResult, human};
pub fn pkgid(manifest_path: &Path,
spec: Option<&str>,
_shell: &mut MultiShell) -> CargoResult<PackageIdSpec> {
let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));
try!(source.update());
let package = try!(source.get_root_package());
let lockfile = package.get_root().join("Cargo.lock");
let source_id = package.get_package_id().get_source_id();
let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) {
Some(resolve) => resolve,
None => return Err(human("A Cargo.lock must exist for this command"))
};
let pkgid = match spec {
Some(spec) => try!(resolve.query(spec)),
None => package.get_package_id(),
}; | Ok(PackageIdSpec::from_package_id(pkgid))
} | random_line_split |
|
cargo_pkgid.rs | use ops;
use core::{MultiShell, Source, PackageIdSpec};
use sources::{PathSource};
use util::{CargoResult, human};
pub fn pkgid(manifest_path: &Path,
spec: Option<&str>,
_shell: &mut MultiShell) -> CargoResult<PackageIdSpec> | {
let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));
try!(source.update());
let package = try!(source.get_root_package());
let lockfile = package.get_root().join("Cargo.lock");
let source_id = package.get_package_id().get_source_id();
let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) {
Some(resolve) => resolve,
None => return Err(human("A Cargo.lock must exist for this command"))
};
let pkgid = match spec {
Some(spec) => try!(resolve.query(spec)),
None => package.get_package_id(),
};
Ok(PackageIdSpec::from_package_id(pkgid))
} | identifier_body |
|
cargo_pkgid.rs | use ops;
use core::{MultiShell, Source, PackageIdSpec};
use sources::{PathSource};
use util::{CargoResult, human};
pub fn | (manifest_path: &Path,
spec: Option<&str>,
_shell: &mut MultiShell) -> CargoResult<PackageIdSpec> {
let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));
try!(source.update());
let package = try!(source.get_root_package());
let lockfile = package.get_root().join("Cargo.lock");
let source_id = package.get_package_id().get_source_id();
let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) {
Some(resolve) => resolve,
None => return Err(human("A Cargo.lock must exist for this command"))
};
let pkgid = match spec {
Some(spec) => try!(resolve.query(spec)),
None => package.get_package_id(),
};
Ok(PackageIdSpec::from_package_id(pkgid))
}
| pkgid | identifier_name |
webglbuffer.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
// https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl
use crate::dom::bindings::codegen::Bindings::WebGLBufferBinding;
use crate::dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLRenderingContextConstants;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::DomRoot;
use crate::dom::webglobject::WebGLObject;
use crate::dom::webglrenderingcontext::WebGLRenderingContext;
use canvas_traits::webgl::webgl_channel;
use canvas_traits::webgl::{WebGLBufferId, WebGLCommand, WebGLError, WebGLResult};
use dom_struct::dom_struct;
use ipc_channel::ipc;
use std::cell::Cell;
#[dom_struct]
pub struct WebGLBuffer {
webgl_object: WebGLObject,
id: WebGLBufferId,
/// The target to which this buffer was bound the first time
target: Cell<Option<u32>>,
capacity: Cell<usize>,
marked_for_deletion: Cell<bool>,
attached_counter: Cell<u32>,
/// https://www.khronos.org/registry/OpenGL-Refpages/es2.0/xhtml/glGetBufferParameteriv.xml
usage: Cell<u32>,
}
impl WebGLBuffer {
fn new_inherited(context: &WebGLRenderingContext, id: WebGLBufferId) -> Self {
Self {
webgl_object: WebGLObject::new_inherited(context),
id,
target: Default::default(),
capacity: Default::default(),
marked_for_deletion: Default::default(),
attached_counter: Default::default(),
usage: Cell::new(WebGLRenderingContextConstants::STATIC_DRAW),
}
}
pub fn maybe_new(context: &WebGLRenderingContext) -> Option<DomRoot<Self>> {
let (sender, receiver) = webgl_channel().unwrap();
context.send_command(WebGLCommand::CreateBuffer(sender));
receiver
.recv()
.unwrap()
.map(|id| WebGLBuffer::new(context, id))
}
pub fn new(context: &WebGLRenderingContext, id: WebGLBufferId) -> DomRoot<Self> {
reflect_dom_object(
Box::new(WebGLBuffer::new_inherited(context, id)),
&*context.global(),
WebGLBufferBinding::Wrap,
)
}
}
impl WebGLBuffer {
pub fn id(&self) -> WebGLBufferId {
self.id
}
pub fn buffer_data(&self, data: &[u8], usage: u32) -> WebGLResult<()> {
match usage {
WebGLRenderingContextConstants::STREAM_DRAW |
WebGLRenderingContextConstants::STATIC_DRAW |
WebGLRenderingContextConstants::DYNAMIC_DRAW => (),
_ => return Err(WebGLError::InvalidEnum),
}
self.capacity.set(data.len());
self.usage.set(usage);
let (sender, receiver) = ipc::bytes_channel().unwrap();
self.upcast::<WebGLObject>()
.context()
.send_command(WebGLCommand::BufferData(
self.target.get().unwrap(),
receiver,
usage,
));
sender.send(data).unwrap();
Ok(())
}
pub fn capacity(&self) -> usize {
self.capacity.get()
}
pub fn mark_for_deletion(&self, fallible: bool) {
if self.marked_for_deletion.get() {
return;
}
self.marked_for_deletion.set(true);
if self.is_deleted() {
self.delete(fallible);
}
}
fn delete(&self, fallible: bool) {
assert!(self.is_deleted());
let context = self.upcast::<WebGLObject>().context();
let cmd = WebGLCommand::DeleteBuffer(self.id);
if fallible {
context.send_command_ignored(cmd);
} else {
context.send_command(cmd);
}
}
pub fn is_marked_for_deletion(&self) -> bool {
self.marked_for_deletion.get()
}
pub fn is_deleted(&self) -> bool {
self.marked_for_deletion.get() && !self.is_attached()
}
pub fn target(&self) -> Option<u32> {
self.target.get()
}
pub fn set_target(&self, target: u32) -> WebGLResult<()> {
if self.target.get().map_or(false, |t| t != target) {
return Err(WebGLError::InvalidOperation);
}
self.target.set(Some(target));
Ok(())
}
pub fn is_attached(&self) -> bool {
self.attached_counter.get() != 0
}
pub fn increment_attached_counter(&self) {
self.attached_counter.set(
self.attached_counter
.get()
.checked_add(1)
.expect("refcount overflowed"),
);
}
pub fn decrement_attached_counter(&self) {
self.attached_counter.set(
self.attached_counter
.get()
.checked_sub(1)
.expect("refcount underflowed"),
);
if self.is_deleted() |
}
pub fn usage(&self) -> u32 {
self.usage.get()
}
}
impl Drop for WebGLBuffer {
fn drop(&mut self) {
self.mark_for_deletion(true);
}
}
| {
self.delete(false);
} | conditional_block |
webglbuffer.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
// https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl
use crate::dom::bindings::codegen::Bindings::WebGLBufferBinding;
use crate::dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLRenderingContextConstants;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::DomRoot;
use crate::dom::webglobject::WebGLObject;
use crate::dom::webglrenderingcontext::WebGLRenderingContext;
use canvas_traits::webgl::webgl_channel;
use canvas_traits::webgl::{WebGLBufferId, WebGLCommand, WebGLError, WebGLResult};
use dom_struct::dom_struct;
use ipc_channel::ipc;
use std::cell::Cell;
#[dom_struct]
pub struct WebGLBuffer {
webgl_object: WebGLObject,
id: WebGLBufferId,
/// The target to which this buffer was bound the first time
target: Cell<Option<u32>>,
capacity: Cell<usize>,
marked_for_deletion: Cell<bool>,
attached_counter: Cell<u32>,
/// https://www.khronos.org/registry/OpenGL-Refpages/es2.0/xhtml/glGetBufferParameteriv.xml
usage: Cell<u32>,
}
impl WebGLBuffer {
fn new_inherited(context: &WebGLRenderingContext, id: WebGLBufferId) -> Self {
Self {
webgl_object: WebGLObject::new_inherited(context),
id,
target: Default::default(),
capacity: Default::default(),
marked_for_deletion: Default::default(),
attached_counter: Default::default(),
usage: Cell::new(WebGLRenderingContextConstants::STATIC_DRAW),
}
}
pub fn maybe_new(context: &WebGLRenderingContext) -> Option<DomRoot<Self>> {
let (sender, receiver) = webgl_channel().unwrap();
context.send_command(WebGLCommand::CreateBuffer(sender));
receiver
.recv()
.unwrap()
.map(|id| WebGLBuffer::new(context, id))
}
pub fn new(context: &WebGLRenderingContext, id: WebGLBufferId) -> DomRoot<Self> {
reflect_dom_object(
Box::new(WebGLBuffer::new_inherited(context, id)),
&*context.global(),
WebGLBufferBinding::Wrap,
)
}
}
impl WebGLBuffer {
pub fn id(&self) -> WebGLBufferId {
self.id
}
pub fn buffer_data(&self, data: &[u8], usage: u32) -> WebGLResult<()> {
match usage {
WebGLRenderingContextConstants::STREAM_DRAW |
WebGLRenderingContextConstants::STATIC_DRAW |
WebGLRenderingContextConstants::DYNAMIC_DRAW => (),
_ => return Err(WebGLError::InvalidEnum),
}
self.capacity.set(data.len());
self.usage.set(usage);
let (sender, receiver) = ipc::bytes_channel().unwrap();
self.upcast::<WebGLObject>()
.context()
.send_command(WebGLCommand::BufferData(
self.target.get().unwrap(),
receiver,
usage,
));
sender.send(data).unwrap();
Ok(())
}
pub fn capacity(&self) -> usize {
self.capacity.get()
}
pub fn mark_for_deletion(&self, fallible: bool) {
if self.marked_for_deletion.get() {
return;
}
self.marked_for_deletion.set(true);
if self.is_deleted() {
self.delete(fallible);
}
}
| assert!(self.is_deleted());
let context = self.upcast::<WebGLObject>().context();
let cmd = WebGLCommand::DeleteBuffer(self.id);
if fallible {
context.send_command_ignored(cmd);
} else {
context.send_command(cmd);
}
}
pub fn is_marked_for_deletion(&self) -> bool {
self.marked_for_deletion.get()
}
pub fn is_deleted(&self) -> bool {
self.marked_for_deletion.get() && !self.is_attached()
}
pub fn target(&self) -> Option<u32> {
self.target.get()
}
pub fn set_target(&self, target: u32) -> WebGLResult<()> {
if self.target.get().map_or(false, |t| t != target) {
return Err(WebGLError::InvalidOperation);
}
self.target.set(Some(target));
Ok(())
}
pub fn is_attached(&self) -> bool {
self.attached_counter.get() != 0
}
pub fn increment_attached_counter(&self) {
self.attached_counter.set(
self.attached_counter
.get()
.checked_add(1)
.expect("refcount overflowed"),
);
}
pub fn decrement_attached_counter(&self) {
self.attached_counter.set(
self.attached_counter
.get()
.checked_sub(1)
.expect("refcount underflowed"),
);
if self.is_deleted() {
self.delete(false);
}
}
pub fn usage(&self) -> u32 {
self.usage.get()
}
}
impl Drop for WebGLBuffer {
fn drop(&mut self) {
self.mark_for_deletion(true);
}
} | fn delete(&self, fallible: bool) { | random_line_split |
webglbuffer.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
// https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl
use crate::dom::bindings::codegen::Bindings::WebGLBufferBinding;
use crate::dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLRenderingContextConstants;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::DomRoot;
use crate::dom::webglobject::WebGLObject;
use crate::dom::webglrenderingcontext::WebGLRenderingContext;
use canvas_traits::webgl::webgl_channel;
use canvas_traits::webgl::{WebGLBufferId, WebGLCommand, WebGLError, WebGLResult};
use dom_struct::dom_struct;
use ipc_channel::ipc;
use std::cell::Cell;
#[dom_struct]
pub struct WebGLBuffer {
webgl_object: WebGLObject,
id: WebGLBufferId,
/// The target to which this buffer was bound the first time
target: Cell<Option<u32>>,
capacity: Cell<usize>,
marked_for_deletion: Cell<bool>,
attached_counter: Cell<u32>,
/// https://www.khronos.org/registry/OpenGL-Refpages/es2.0/xhtml/glGetBufferParameteriv.xml
usage: Cell<u32>,
}
impl WebGLBuffer {
fn new_inherited(context: &WebGLRenderingContext, id: WebGLBufferId) -> Self {
Self {
webgl_object: WebGLObject::new_inherited(context),
id,
target: Default::default(),
capacity: Default::default(),
marked_for_deletion: Default::default(),
attached_counter: Default::default(),
usage: Cell::new(WebGLRenderingContextConstants::STATIC_DRAW),
}
}
pub fn maybe_new(context: &WebGLRenderingContext) -> Option<DomRoot<Self>> {
let (sender, receiver) = webgl_channel().unwrap();
context.send_command(WebGLCommand::CreateBuffer(sender));
receiver
.recv()
.unwrap()
.map(|id| WebGLBuffer::new(context, id))
}
pub fn new(context: &WebGLRenderingContext, id: WebGLBufferId) -> DomRoot<Self> {
reflect_dom_object(
Box::new(WebGLBuffer::new_inherited(context, id)),
&*context.global(),
WebGLBufferBinding::Wrap,
)
}
}
impl WebGLBuffer {
pub fn id(&self) -> WebGLBufferId {
self.id
}
pub fn buffer_data(&self, data: &[u8], usage: u32) -> WebGLResult<()> {
match usage {
WebGLRenderingContextConstants::STREAM_DRAW |
WebGLRenderingContextConstants::STATIC_DRAW |
WebGLRenderingContextConstants::DYNAMIC_DRAW => (),
_ => return Err(WebGLError::InvalidEnum),
}
self.capacity.set(data.len());
self.usage.set(usage);
let (sender, receiver) = ipc::bytes_channel().unwrap();
self.upcast::<WebGLObject>()
.context()
.send_command(WebGLCommand::BufferData(
self.target.get().unwrap(),
receiver,
usage,
));
sender.send(data).unwrap();
Ok(())
}
pub fn capacity(&self) -> usize {
self.capacity.get()
}
pub fn mark_for_deletion(&self, fallible: bool) {
if self.marked_for_deletion.get() {
return;
}
self.marked_for_deletion.set(true);
if self.is_deleted() {
self.delete(fallible);
}
}
fn delete(&self, fallible: bool) {
assert!(self.is_deleted());
let context = self.upcast::<WebGLObject>().context();
let cmd = WebGLCommand::DeleteBuffer(self.id);
if fallible {
context.send_command_ignored(cmd);
} else {
context.send_command(cmd);
}
}
pub fn is_marked_for_deletion(&self) -> bool {
self.marked_for_deletion.get()
}
pub fn is_deleted(&self) -> bool {
self.marked_for_deletion.get() && !self.is_attached()
}
pub fn target(&self) -> Option<u32> {
self.target.get()
}
pub fn set_target(&self, target: u32) -> WebGLResult<()> {
if self.target.get().map_or(false, |t| t != target) {
return Err(WebGLError::InvalidOperation);
}
self.target.set(Some(target));
Ok(())
}
pub fn is_attached(&self) -> bool {
self.attached_counter.get() != 0
}
pub fn increment_attached_counter(&self) {
self.attached_counter.set(
self.attached_counter
.get()
.checked_add(1)
.expect("refcount overflowed"),
);
}
pub fn decrement_attached_counter(&self) {
self.attached_counter.set(
self.attached_counter
.get()
.checked_sub(1)
.expect("refcount underflowed"),
);
if self.is_deleted() {
self.delete(false);
}
}
pub fn usage(&self) -> u32 {
self.usage.get()
}
}
impl Drop for WebGLBuffer {
fn drop(&mut self) |
}
| {
self.mark_for_deletion(true);
} | identifier_body |
webglbuffer.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
// https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl
use crate::dom::bindings::codegen::Bindings::WebGLBufferBinding;
use crate::dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLRenderingContextConstants;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::DomRoot;
use crate::dom::webglobject::WebGLObject;
use crate::dom::webglrenderingcontext::WebGLRenderingContext;
use canvas_traits::webgl::webgl_channel;
use canvas_traits::webgl::{WebGLBufferId, WebGLCommand, WebGLError, WebGLResult};
use dom_struct::dom_struct;
use ipc_channel::ipc;
use std::cell::Cell;
#[dom_struct]
pub struct WebGLBuffer {
webgl_object: WebGLObject,
id: WebGLBufferId,
/// The target to which this buffer was bound the first time
target: Cell<Option<u32>>,
capacity: Cell<usize>,
marked_for_deletion: Cell<bool>,
attached_counter: Cell<u32>,
/// https://www.khronos.org/registry/OpenGL-Refpages/es2.0/xhtml/glGetBufferParameteriv.xml
usage: Cell<u32>,
}
impl WebGLBuffer {
fn new_inherited(context: &WebGLRenderingContext, id: WebGLBufferId) -> Self {
Self {
webgl_object: WebGLObject::new_inherited(context),
id,
target: Default::default(),
capacity: Default::default(),
marked_for_deletion: Default::default(),
attached_counter: Default::default(),
usage: Cell::new(WebGLRenderingContextConstants::STATIC_DRAW),
}
}
pub fn | (context: &WebGLRenderingContext) -> Option<DomRoot<Self>> {
let (sender, receiver) = webgl_channel().unwrap();
context.send_command(WebGLCommand::CreateBuffer(sender));
receiver
.recv()
.unwrap()
.map(|id| WebGLBuffer::new(context, id))
}
pub fn new(context: &WebGLRenderingContext, id: WebGLBufferId) -> DomRoot<Self> {
reflect_dom_object(
Box::new(WebGLBuffer::new_inherited(context, id)),
&*context.global(),
WebGLBufferBinding::Wrap,
)
}
}
impl WebGLBuffer {
pub fn id(&self) -> WebGLBufferId {
self.id
}
pub fn buffer_data(&self, data: &[u8], usage: u32) -> WebGLResult<()> {
match usage {
WebGLRenderingContextConstants::STREAM_DRAW |
WebGLRenderingContextConstants::STATIC_DRAW |
WebGLRenderingContextConstants::DYNAMIC_DRAW => (),
_ => return Err(WebGLError::InvalidEnum),
}
self.capacity.set(data.len());
self.usage.set(usage);
let (sender, receiver) = ipc::bytes_channel().unwrap();
self.upcast::<WebGLObject>()
.context()
.send_command(WebGLCommand::BufferData(
self.target.get().unwrap(),
receiver,
usage,
));
sender.send(data).unwrap();
Ok(())
}
pub fn capacity(&self) -> usize {
self.capacity.get()
}
pub fn mark_for_deletion(&self, fallible: bool) {
if self.marked_for_deletion.get() {
return;
}
self.marked_for_deletion.set(true);
if self.is_deleted() {
self.delete(fallible);
}
}
fn delete(&self, fallible: bool) {
assert!(self.is_deleted());
let context = self.upcast::<WebGLObject>().context();
let cmd = WebGLCommand::DeleteBuffer(self.id);
if fallible {
context.send_command_ignored(cmd);
} else {
context.send_command(cmd);
}
}
pub fn is_marked_for_deletion(&self) -> bool {
self.marked_for_deletion.get()
}
pub fn is_deleted(&self) -> bool {
self.marked_for_deletion.get() && !self.is_attached()
}
pub fn target(&self) -> Option<u32> {
self.target.get()
}
pub fn set_target(&self, target: u32) -> WebGLResult<()> {
if self.target.get().map_or(false, |t| t != target) {
return Err(WebGLError::InvalidOperation);
}
self.target.set(Some(target));
Ok(())
}
pub fn is_attached(&self) -> bool {
self.attached_counter.get() != 0
}
pub fn increment_attached_counter(&self) {
self.attached_counter.set(
self.attached_counter
.get()
.checked_add(1)
.expect("refcount overflowed"),
);
}
pub fn decrement_attached_counter(&self) {
self.attached_counter.set(
self.attached_counter
.get()
.checked_sub(1)
.expect("refcount underflowed"),
);
if self.is_deleted() {
self.delete(false);
}
}
pub fn usage(&self) -> u32 {
self.usage.get()
}
}
impl Drop for WebGLBuffer {
fn drop(&mut self) {
self.mark_for_deletion(true);
}
}
| maybe_new | identifier_name |
service.js | const fs = require('fs');
const path = require('path');
class Service {
constructor() {
this.getFileRecursevly = this.getFileRecursevly.bind(this);
this.getFiles = this.getFiles.bind(this);
}
getFileRecursevly(folderPath, shortPath = '') |
getFiles(path) {
return new Promise((resolve, reject) => {
var files = this.getFileRecursevly(path)
resolve(files)
})
}
}
module.exports = Service; | {
var files = [];
var folder = fs.readdirSync(path.resolve(__dirname, folderPath));
var x = folder.forEach(file => {
var filePath = path.resolve(folderPath, file);
if (fs.lstatSync(filePath).isDirectory()) {
files.push({
folder: file,
files: this.getFileRecursevly(filePath, file)
})
} else {
files.push({ file: file, folder: shortPath });
}
})
return files;
} | identifier_body |
service.js | const fs = require('fs');
const path = require('path');
class Service {
constructor() {
this.getFileRecursevly = this.getFileRecursevly.bind(this);
this.getFiles = this.getFiles.bind(this);
}
getFileRecursevly(folderPath, shortPath = '') {
var files = [];
var folder = fs.readdirSync(path.resolve(__dirname, folderPath));
var x = folder.forEach(file => {
var filePath = path.resolve(folderPath, file);
if (fs.lstatSync(filePath).isDirectory()) {
files.push({
folder: file,
files: this.getFileRecursevly(filePath, file)
})
} else |
})
return files;
}
getFiles(path) {
return new Promise((resolve, reject) => {
var files = this.getFileRecursevly(path)
resolve(files)
})
}
}
module.exports = Service; | {
files.push({ file: file, folder: shortPath });
} | conditional_block |
service.js | const fs = require('fs');
const path = require('path');
class Service {
| () {
this.getFileRecursevly = this.getFileRecursevly.bind(this);
this.getFiles = this.getFiles.bind(this);
}
getFileRecursevly(folderPath, shortPath = '') {
var files = [];
var folder = fs.readdirSync(path.resolve(__dirname, folderPath));
var x = folder.forEach(file => {
var filePath = path.resolve(folderPath, file);
if (fs.lstatSync(filePath).isDirectory()) {
files.push({
folder: file,
files: this.getFileRecursevly(filePath, file)
})
} else {
files.push({ file: file, folder: shortPath });
}
})
return files;
}
getFiles(path) {
return new Promise((resolve, reject) => {
var files = this.getFileRecursevly(path)
resolve(files)
})
}
}
module.exports = Service; | constructor | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.