partition
stringclasses 3
values | func_name
stringlengths 1
134
| docstring
stringlengths 1
46.9k
| path
stringlengths 4
223
| original_string
stringlengths 75
104k
| code
stringlengths 75
104k
| docstring_tokens
listlengths 1
1.97k
| repo
stringlengths 7
55
| language
stringclasses 1
value | url
stringlengths 87
315
| code_tokens
listlengths 19
28.4k
| sha
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|---|---|---|
test
|
DSMRProtocol.connection_lost
|
Stop when connection is lost.
|
dsmr_parser/clients/protocol.py
|
def connection_lost(self, exc):
"""Stop when connection is lost."""
if exc:
self.log.exception('disconnected due to exception')
else:
self.log.info('disconnected because of close/abort.')
self._closed.set()
|
def connection_lost(self, exc):
"""Stop when connection is lost."""
if exc:
self.log.exception('disconnected due to exception')
else:
self.log.info('disconnected because of close/abort.')
self._closed.set()
|
[
"Stop",
"when",
"connection",
"is",
"lost",
"."
] |
ndokter/dsmr_parser
|
python
|
https://github.com/ndokter/dsmr_parser/blob/c04b0a5add58ce70153eede1a87ca171876b61c7/dsmr_parser/clients/protocol.py#L90-L96
|
[
"def",
"connection_lost",
"(",
"self",
",",
"exc",
")",
":",
"if",
"exc",
":",
"self",
".",
"log",
".",
"exception",
"(",
"'disconnected due to exception'",
")",
"else",
":",
"self",
".",
"log",
".",
"info",
"(",
"'disconnected because of close/abort.'",
")",
"self",
".",
"_closed",
".",
"set",
"(",
")"
] |
c04b0a5add58ce70153eede1a87ca171876b61c7
|
test
|
DSMRProtocol.handle_telegram
|
Send off parsed telegram to handling callback.
|
dsmr_parser/clients/protocol.py
|
def handle_telegram(self, telegram):
"""Send off parsed telegram to handling callback."""
self.log.debug('got telegram: %s', telegram)
try:
parsed_telegram = self.telegram_parser.parse(telegram)
except InvalidChecksumError as e:
self.log.warning(str(e))
except ParseError:
self.log.exception("failed to parse telegram")
else:
self.telegram_callback(parsed_telegram)
|
def handle_telegram(self, telegram):
"""Send off parsed telegram to handling callback."""
self.log.debug('got telegram: %s', telegram)
try:
parsed_telegram = self.telegram_parser.parse(telegram)
except InvalidChecksumError as e:
self.log.warning(str(e))
except ParseError:
self.log.exception("failed to parse telegram")
else:
self.telegram_callback(parsed_telegram)
|
[
"Send",
"off",
"parsed",
"telegram",
"to",
"handling",
"callback",
"."
] |
ndokter/dsmr_parser
|
python
|
https://github.com/ndokter/dsmr_parser/blob/c04b0a5add58ce70153eede1a87ca171876b61c7/dsmr_parser/clients/protocol.py#L98-L109
|
[
"def",
"handle_telegram",
"(",
"self",
",",
"telegram",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"'got telegram: %s'",
",",
"telegram",
")",
"try",
":",
"parsed_telegram",
"=",
"self",
".",
"telegram_parser",
".",
"parse",
"(",
"telegram",
")",
"except",
"InvalidChecksumError",
"as",
"e",
":",
"self",
".",
"log",
".",
"warning",
"(",
"str",
"(",
"e",
")",
")",
"except",
"ParseError",
":",
"self",
".",
"log",
".",
"exception",
"(",
"\"failed to parse telegram\"",
")",
"else",
":",
"self",
".",
"telegram_callback",
"(",
"parsed_telegram",
")"
] |
c04b0a5add58ce70153eede1a87ca171876b61c7
|
test
|
TelegramParser.parse
|
Parse telegram from string to dict.
The telegram str type makes python 2.x integration easier.
:param str telegram_data: full telegram from start ('/') to checksum
('!ABCD') including line endings in between the telegram's lines
:rtype: dict
:returns: Shortened example:
{
..
r'\d-\d:96\.1\.1.+?\r\n': <CosemObject>, # EQUIPMENT_IDENTIFIER
r'\d-\d:1\.8\.1.+?\r\n': <CosemObject>, # ELECTRICITY_USED_TARIFF_1
r'\d-\d:24\.3\.0.+?\r\n.+?\r\n': <MBusObject>, # GAS_METER_READING
..
}
:raises ParseError:
:raises InvalidChecksumError:
|
dsmr_parser/parsers.py
|
def parse(self, telegram_data):
"""
Parse telegram from string to dict.
The telegram str type makes python 2.x integration easier.
:param str telegram_data: full telegram from start ('/') to checksum
('!ABCD') including line endings in between the telegram's lines
:rtype: dict
:returns: Shortened example:
{
..
r'\d-\d:96\.1\.1.+?\r\n': <CosemObject>, # EQUIPMENT_IDENTIFIER
r'\d-\d:1\.8\.1.+?\r\n': <CosemObject>, # ELECTRICITY_USED_TARIFF_1
r'\d-\d:24\.3\.0.+?\r\n.+?\r\n': <MBusObject>, # GAS_METER_READING
..
}
:raises ParseError:
:raises InvalidChecksumError:
"""
if self.apply_checksum_validation \
and self.telegram_specification['checksum_support']:
self.validate_checksum(telegram_data)
telegram = {}
for signature, parser in self.telegram_specification['objects'].items():
match = re.search(signature, telegram_data, re.DOTALL)
# Some signatures are optional and may not be present,
# so only parse lines that match
if match:
telegram[signature] = parser.parse(match.group(0))
return telegram
|
def parse(self, telegram_data):
"""
Parse telegram from string to dict.
The telegram str type makes python 2.x integration easier.
:param str telegram_data: full telegram from start ('/') to checksum
('!ABCD') including line endings in between the telegram's lines
:rtype: dict
:returns: Shortened example:
{
..
r'\d-\d:96\.1\.1.+?\r\n': <CosemObject>, # EQUIPMENT_IDENTIFIER
r'\d-\d:1\.8\.1.+?\r\n': <CosemObject>, # ELECTRICITY_USED_TARIFF_1
r'\d-\d:24\.3\.0.+?\r\n.+?\r\n': <MBusObject>, # GAS_METER_READING
..
}
:raises ParseError:
:raises InvalidChecksumError:
"""
if self.apply_checksum_validation \
and self.telegram_specification['checksum_support']:
self.validate_checksum(telegram_data)
telegram = {}
for signature, parser in self.telegram_specification['objects'].items():
match = re.search(signature, telegram_data, re.DOTALL)
# Some signatures are optional and may not be present,
# so only parse lines that match
if match:
telegram[signature] = parser.parse(match.group(0))
return telegram
|
[
"Parse",
"telegram",
"from",
"string",
"to",
"dict",
"."
] |
ndokter/dsmr_parser
|
python
|
https://github.com/ndokter/dsmr_parser/blob/c04b0a5add58ce70153eede1a87ca171876b61c7/dsmr_parser/parsers.py#L24-L59
|
[
"def",
"parse",
"(",
"self",
",",
"telegram_data",
")",
":",
"if",
"self",
".",
"apply_checksum_validation",
"and",
"self",
".",
"telegram_specification",
"[",
"'checksum_support'",
"]",
":",
"self",
".",
"validate_checksum",
"(",
"telegram_data",
")",
"telegram",
"=",
"{",
"}",
"for",
"signature",
",",
"parser",
"in",
"self",
".",
"telegram_specification",
"[",
"'objects'",
"]",
".",
"items",
"(",
")",
":",
"match",
"=",
"re",
".",
"search",
"(",
"signature",
",",
"telegram_data",
",",
"re",
".",
"DOTALL",
")",
"# Some signatures are optional and may not be present,",
"# so only parse lines that match",
"if",
"match",
":",
"telegram",
"[",
"signature",
"]",
"=",
"parser",
".",
"parse",
"(",
"match",
".",
"group",
"(",
"0",
")",
")",
"return",
"telegram"
] |
c04b0a5add58ce70153eede1a87ca171876b61c7
|
test
|
TelegramParser.validate_checksum
|
:param str telegram:
:raises ParseError:
:raises InvalidChecksumError:
|
dsmr_parser/parsers.py
|
def validate_checksum(telegram):
"""
:param str telegram:
:raises ParseError:
:raises InvalidChecksumError:
"""
# Extract the part for which the checksum applies.
checksum_contents = re.search(r'\/.+\!', telegram, re.DOTALL)
# Extract the hexadecimal checksum value itself.
# The line ending '\r\n' for the checksum line can be ignored.
checksum_hex = re.search(r'((?<=\!)[0-9A-Z]{4})+', telegram)
if not checksum_contents or not checksum_hex:
raise ParseError(
'Failed to perform CRC validation because the telegram is '
'incomplete. The checksum and/or content values are missing.'
)
calculated_crc = CRC16().calculate(checksum_contents.group(0))
expected_crc = int(checksum_hex.group(0), base=16)
if calculated_crc != expected_crc:
raise InvalidChecksumError(
"Invalid telegram. The CRC checksum '{}' does not match the "
"expected '{}'".format(
calculated_crc,
expected_crc
)
)
|
def validate_checksum(telegram):
"""
:param str telegram:
:raises ParseError:
:raises InvalidChecksumError:
"""
# Extract the part for which the checksum applies.
checksum_contents = re.search(r'\/.+\!', telegram, re.DOTALL)
# Extract the hexadecimal checksum value itself.
# The line ending '\r\n' for the checksum line can be ignored.
checksum_hex = re.search(r'((?<=\!)[0-9A-Z]{4})+', telegram)
if not checksum_contents or not checksum_hex:
raise ParseError(
'Failed to perform CRC validation because the telegram is '
'incomplete. The checksum and/or content values are missing.'
)
calculated_crc = CRC16().calculate(checksum_contents.group(0))
expected_crc = int(checksum_hex.group(0), base=16)
if calculated_crc != expected_crc:
raise InvalidChecksumError(
"Invalid telegram. The CRC checksum '{}' does not match the "
"expected '{}'".format(
calculated_crc,
expected_crc
)
)
|
[
":",
"param",
"str",
"telegram",
":",
":",
"raises",
"ParseError",
":",
":",
"raises",
"InvalidChecksumError",
":"
] |
ndokter/dsmr_parser
|
python
|
https://github.com/ndokter/dsmr_parser/blob/c04b0a5add58ce70153eede1a87ca171876b61c7/dsmr_parser/parsers.py#L62-L92
|
[
"def",
"validate_checksum",
"(",
"telegram",
")",
":",
"# Extract the part for which the checksum applies.",
"checksum_contents",
"=",
"re",
".",
"search",
"(",
"r'\\/.+\\!'",
",",
"telegram",
",",
"re",
".",
"DOTALL",
")",
"# Extract the hexadecimal checksum value itself.",
"# The line ending '\\r\\n' for the checksum line can be ignored.",
"checksum_hex",
"=",
"re",
".",
"search",
"(",
"r'((?<=\\!)[0-9A-Z]{4})+'",
",",
"telegram",
")",
"if",
"not",
"checksum_contents",
"or",
"not",
"checksum_hex",
":",
"raise",
"ParseError",
"(",
"'Failed to perform CRC validation because the telegram is '",
"'incomplete. The checksum and/or content values are missing.'",
")",
"calculated_crc",
"=",
"CRC16",
"(",
")",
".",
"calculate",
"(",
"checksum_contents",
".",
"group",
"(",
"0",
")",
")",
"expected_crc",
"=",
"int",
"(",
"checksum_hex",
".",
"group",
"(",
"0",
")",
",",
"base",
"=",
"16",
")",
"if",
"calculated_crc",
"!=",
"expected_crc",
":",
"raise",
"InvalidChecksumError",
"(",
"\"Invalid telegram. The CRC checksum '{}' does not match the \"",
"\"expected '{}'\"",
".",
"format",
"(",
"calculated_crc",
",",
"expected_crc",
")",
")"
] |
c04b0a5add58ce70153eede1a87ca171876b61c7
|
test
|
TelegramBuffer._remove
|
Remove telegram from buffer and incomplete data preceding it. This
is easier than validating the data before adding it to the buffer.
:param str telegram:
:return:
|
dsmr_parser/clients/telegram_buffer.py
|
def _remove(self, telegram):
"""
Remove telegram from buffer and incomplete data preceding it. This
is easier than validating the data before adding it to the buffer.
:param str telegram:
:return:
"""
# Remove data leading up to the telegram and the telegram itself.
index = self._buffer.index(telegram) + len(telegram)
self._buffer = self._buffer[index:]
|
def _remove(self, telegram):
"""
Remove telegram from buffer and incomplete data preceding it. This
is easier than validating the data before adding it to the buffer.
:param str telegram:
:return:
"""
# Remove data leading up to the telegram and the telegram itself.
index = self._buffer.index(telegram) + len(telegram)
self._buffer = self._buffer[index:]
|
[
"Remove",
"telegram",
"from",
"buffer",
"and",
"incomplete",
"data",
"preceding",
"it",
".",
"This",
"is",
"easier",
"than",
"validating",
"the",
"data",
"before",
"adding",
"it",
"to",
"the",
"buffer",
".",
":",
"param",
"str",
"telegram",
":",
":",
"return",
":"
] |
ndokter/dsmr_parser
|
python
|
https://github.com/ndokter/dsmr_parser/blob/c04b0a5add58ce70153eede1a87ca171876b61c7/dsmr_parser/clients/telegram_buffer.py#L29-L39
|
[
"def",
"_remove",
"(",
"self",
",",
"telegram",
")",
":",
"# Remove data leading up to the telegram and the telegram itself.",
"index",
"=",
"self",
".",
"_buffer",
".",
"index",
"(",
"telegram",
")",
"+",
"len",
"(",
"telegram",
")",
"self",
".",
"_buffer",
"=",
"self",
".",
"_buffer",
"[",
"index",
":",
"]"
] |
c04b0a5add58ce70153eede1a87ca171876b61c7
|
test
|
get_version
|
Get the version of the package from the given file by
executing it and extracting the given `name`.
|
setupbase.py
|
def get_version(file, name='__version__'):
"""Get the version of the package from the given file by
executing it and extracting the given `name`.
"""
path = os.path.realpath(file)
version_ns = {}
with io.open(path, encoding="utf8") as f:
exec(f.read(), {}, version_ns)
return version_ns[name]
|
def get_version(file, name='__version__'):
"""Get the version of the package from the given file by
executing it and extracting the given `name`.
"""
path = os.path.realpath(file)
version_ns = {}
with io.open(path, encoding="utf8") as f:
exec(f.read(), {}, version_ns)
return version_ns[name]
|
[
"Get",
"the",
"version",
"of",
"the",
"package",
"from",
"the",
"given",
"file",
"by",
"executing",
"it",
"and",
"extracting",
"the",
"given",
"name",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L78-L86
|
[
"def",
"get_version",
"(",
"file",
",",
"name",
"=",
"'__version__'",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"file",
")",
"version_ns",
"=",
"{",
"}",
"with",
"io",
".",
"open",
"(",
"path",
",",
"encoding",
"=",
"\"utf8\"",
")",
"as",
"f",
":",
"exec",
"(",
"f",
".",
"read",
"(",
")",
",",
"{",
"}",
",",
"version_ns",
")",
"return",
"version_ns",
"[",
"name",
"]"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
ensure_python
|
Given a list of range specifiers for python, ensure compatibility.
|
setupbase.py
|
def ensure_python(specs):
"""Given a list of range specifiers for python, ensure compatibility.
"""
if not isinstance(specs, (list, tuple)):
specs = [specs]
v = sys.version_info
part = '%s.%s' % (v.major, v.minor)
for spec in specs:
if part == spec:
return
try:
if eval(part + spec):
return
except SyntaxError:
pass
raise ValueError('Python version %s unsupported' % part)
|
def ensure_python(specs):
"""Given a list of range specifiers for python, ensure compatibility.
"""
if not isinstance(specs, (list, tuple)):
specs = [specs]
v = sys.version_info
part = '%s.%s' % (v.major, v.minor)
for spec in specs:
if part == spec:
return
try:
if eval(part + spec):
return
except SyntaxError:
pass
raise ValueError('Python version %s unsupported' % part)
|
[
"Given",
"a",
"list",
"of",
"range",
"specifiers",
"for",
"python",
"ensure",
"compatibility",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L89-L104
|
[
"def",
"ensure_python",
"(",
"specs",
")",
":",
"if",
"not",
"isinstance",
"(",
"specs",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"specs",
"=",
"[",
"specs",
"]",
"v",
"=",
"sys",
".",
"version_info",
"part",
"=",
"'%s.%s'",
"%",
"(",
"v",
".",
"major",
",",
"v",
".",
"minor",
")",
"for",
"spec",
"in",
"specs",
":",
"if",
"part",
"==",
"spec",
":",
"return",
"try",
":",
"if",
"eval",
"(",
"part",
"+",
"spec",
")",
":",
"return",
"except",
"SyntaxError",
":",
"pass",
"raise",
"ValueError",
"(",
"'Python version %s unsupported'",
"%",
"part",
")"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
find_packages
|
Find all of the packages.
|
setupbase.py
|
def find_packages(top=HERE):
"""
Find all of the packages.
"""
packages = []
for d, dirs, _ in os.walk(top, followlinks=True):
if os.path.exists(pjoin(d, '__init__.py')):
packages.append(os.path.relpath(d, top).replace(os.path.sep, '.'))
elif d != top:
# Do not look for packages in subfolders if current is not a package
dirs[:] = []
return packages
|
def find_packages(top=HERE):
"""
Find all of the packages.
"""
packages = []
for d, dirs, _ in os.walk(top, followlinks=True):
if os.path.exists(pjoin(d, '__init__.py')):
packages.append(os.path.relpath(d, top).replace(os.path.sep, '.'))
elif d != top:
# Do not look for packages in subfolders if current is not a package
dirs[:] = []
return packages
|
[
"Find",
"all",
"of",
"the",
"packages",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L107-L118
|
[
"def",
"find_packages",
"(",
"top",
"=",
"HERE",
")",
":",
"packages",
"=",
"[",
"]",
"for",
"d",
",",
"dirs",
",",
"_",
"in",
"os",
".",
"walk",
"(",
"top",
",",
"followlinks",
"=",
"True",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"pjoin",
"(",
"d",
",",
"'__init__.py'",
")",
")",
":",
"packages",
".",
"append",
"(",
"os",
".",
"path",
".",
"relpath",
"(",
"d",
",",
"top",
")",
".",
"replace",
"(",
"os",
".",
"path",
".",
"sep",
",",
"'.'",
")",
")",
"elif",
"d",
"!=",
"top",
":",
"# Do not look for packages in subfolders if current is not a package",
"dirs",
"[",
":",
"]",
"=",
"[",
"]",
"return",
"packages"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
create_cmdclass
|
Create a command class with the given optional prerelease class.
Parameters
----------
prerelease_cmd: (name, Command) tuple, optional
The command to run before releasing.
package_data_spec: dict, optional
A dictionary whose keys are the dotted package names and
whose values are a list of glob patterns.
data_files_spec: list, optional
A list of (path, dname, pattern) tuples where the path is the
`data_files` install path, dname is the source directory, and the
pattern is a glob pattern.
Notes
-----
We use specs so that we can find the files *after* the build
command has run.
The package data glob patterns should be relative paths from the package
folder containing the __init__.py file, which is given as the package
name.
e.g. `dict(foo=['./bar/*', './baz/**'])`
The data files directories should be absolute paths or relative paths
from the root directory of the repository. Data files are specified
differently from `package_data` because we need a separate path entry
for each nested folder in `data_files`, and this makes it easier to
parse.
e.g. `('share/foo/bar', 'pkgname/bizz, '*')`
|
setupbase.py
|
def create_cmdclass(prerelease_cmd=None, package_data_spec=None,
data_files_spec=None):
"""Create a command class with the given optional prerelease class.
Parameters
----------
prerelease_cmd: (name, Command) tuple, optional
The command to run before releasing.
package_data_spec: dict, optional
A dictionary whose keys are the dotted package names and
whose values are a list of glob patterns.
data_files_spec: list, optional
A list of (path, dname, pattern) tuples where the path is the
`data_files` install path, dname is the source directory, and the
pattern is a glob pattern.
Notes
-----
We use specs so that we can find the files *after* the build
command has run.
The package data glob patterns should be relative paths from the package
folder containing the __init__.py file, which is given as the package
name.
e.g. `dict(foo=['./bar/*', './baz/**'])`
The data files directories should be absolute paths or relative paths
from the root directory of the repository. Data files are specified
differently from `package_data` because we need a separate path entry
for each nested folder in `data_files`, and this makes it easier to
parse.
e.g. `('share/foo/bar', 'pkgname/bizz, '*')`
"""
wrapped = [prerelease_cmd] if prerelease_cmd else []
if package_data_spec or data_files_spec:
wrapped.append('handle_files')
wrapper = functools.partial(_wrap_command, wrapped)
handle_files = _get_file_handler(package_data_spec, data_files_spec)
if 'bdist_egg' in sys.argv:
egg = wrapper(bdist_egg, strict=True)
else:
egg = bdist_egg_disabled
cmdclass = dict(
build_py=wrapper(build_py, strict=is_repo),
bdist_egg=egg,
sdist=wrapper(sdist, strict=True),
handle_files=handle_files,
)
if bdist_wheel:
cmdclass['bdist_wheel'] = wrapper(bdist_wheel, strict=True)
cmdclass['develop'] = wrapper(develop, strict=True)
return cmdclass
|
def create_cmdclass(prerelease_cmd=None, package_data_spec=None,
data_files_spec=None):
"""Create a command class with the given optional prerelease class.
Parameters
----------
prerelease_cmd: (name, Command) tuple, optional
The command to run before releasing.
package_data_spec: dict, optional
A dictionary whose keys are the dotted package names and
whose values are a list of glob patterns.
data_files_spec: list, optional
A list of (path, dname, pattern) tuples where the path is the
`data_files` install path, dname is the source directory, and the
pattern is a glob pattern.
Notes
-----
We use specs so that we can find the files *after* the build
command has run.
The package data glob patterns should be relative paths from the package
folder containing the __init__.py file, which is given as the package
name.
e.g. `dict(foo=['./bar/*', './baz/**'])`
The data files directories should be absolute paths or relative paths
from the root directory of the repository. Data files are specified
differently from `package_data` because we need a separate path entry
for each nested folder in `data_files`, and this makes it easier to
parse.
e.g. `('share/foo/bar', 'pkgname/bizz, '*')`
"""
wrapped = [prerelease_cmd] if prerelease_cmd else []
if package_data_spec or data_files_spec:
wrapped.append('handle_files')
wrapper = functools.partial(_wrap_command, wrapped)
handle_files = _get_file_handler(package_data_spec, data_files_spec)
if 'bdist_egg' in sys.argv:
egg = wrapper(bdist_egg, strict=True)
else:
egg = bdist_egg_disabled
cmdclass = dict(
build_py=wrapper(build_py, strict=is_repo),
bdist_egg=egg,
sdist=wrapper(sdist, strict=True),
handle_files=handle_files,
)
if bdist_wheel:
cmdclass['bdist_wheel'] = wrapper(bdist_wheel, strict=True)
cmdclass['develop'] = wrapper(develop, strict=True)
return cmdclass
|
[
"Create",
"a",
"command",
"class",
"with",
"the",
"given",
"optional",
"prerelease",
"class",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L138-L193
|
[
"def",
"create_cmdclass",
"(",
"prerelease_cmd",
"=",
"None",
",",
"package_data_spec",
"=",
"None",
",",
"data_files_spec",
"=",
"None",
")",
":",
"wrapped",
"=",
"[",
"prerelease_cmd",
"]",
"if",
"prerelease_cmd",
"else",
"[",
"]",
"if",
"package_data_spec",
"or",
"data_files_spec",
":",
"wrapped",
".",
"append",
"(",
"'handle_files'",
")",
"wrapper",
"=",
"functools",
".",
"partial",
"(",
"_wrap_command",
",",
"wrapped",
")",
"handle_files",
"=",
"_get_file_handler",
"(",
"package_data_spec",
",",
"data_files_spec",
")",
"if",
"'bdist_egg'",
"in",
"sys",
".",
"argv",
":",
"egg",
"=",
"wrapper",
"(",
"bdist_egg",
",",
"strict",
"=",
"True",
")",
"else",
":",
"egg",
"=",
"bdist_egg_disabled",
"cmdclass",
"=",
"dict",
"(",
"build_py",
"=",
"wrapper",
"(",
"build_py",
",",
"strict",
"=",
"is_repo",
")",
",",
"bdist_egg",
"=",
"egg",
",",
"sdist",
"=",
"wrapper",
"(",
"sdist",
",",
"strict",
"=",
"True",
")",
",",
"handle_files",
"=",
"handle_files",
",",
")",
"if",
"bdist_wheel",
":",
"cmdclass",
"[",
"'bdist_wheel'",
"]",
"=",
"wrapper",
"(",
"bdist_wheel",
",",
"strict",
"=",
"True",
")",
"cmdclass",
"[",
"'develop'",
"]",
"=",
"wrapper",
"(",
"develop",
",",
"strict",
"=",
"True",
")",
"return",
"cmdclass"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
command_for_func
|
Create a command that calls the given function.
|
setupbase.py
|
def command_for_func(func):
"""Create a command that calls the given function."""
class FuncCommand(BaseCommand):
def run(self):
func()
update_package_data(self.distribution)
return FuncCommand
|
def command_for_func(func):
"""Create a command that calls the given function."""
class FuncCommand(BaseCommand):
def run(self):
func()
update_package_data(self.distribution)
return FuncCommand
|
[
"Create",
"a",
"command",
"that",
"calls",
"the",
"given",
"function",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L196-L205
|
[
"def",
"command_for_func",
"(",
"func",
")",
":",
"class",
"FuncCommand",
"(",
"BaseCommand",
")",
":",
"def",
"run",
"(",
"self",
")",
":",
"func",
"(",
")",
"update_package_data",
"(",
"self",
".",
"distribution",
")",
"return",
"FuncCommand"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
run
|
Echo a command before running it. Defaults to repo as cwd
|
setupbase.py
|
def run(cmd, **kwargs):
"""Echo a command before running it. Defaults to repo as cwd"""
log.info('> ' + list2cmdline(cmd))
kwargs.setdefault('cwd', HERE)
kwargs.setdefault('shell', os.name == 'nt')
if not isinstance(cmd, (list, tuple)) and os.name != 'nt':
cmd = shlex.split(cmd)
cmd[0] = which(cmd[0])
return subprocess.check_call(cmd, **kwargs)
|
def run(cmd, **kwargs):
"""Echo a command before running it. Defaults to repo as cwd"""
log.info('> ' + list2cmdline(cmd))
kwargs.setdefault('cwd', HERE)
kwargs.setdefault('shell', os.name == 'nt')
if not isinstance(cmd, (list, tuple)) and os.name != 'nt':
cmd = shlex.split(cmd)
cmd[0] = which(cmd[0])
return subprocess.check_call(cmd, **kwargs)
|
[
"Echo",
"a",
"command",
"before",
"running",
"it",
".",
"Defaults",
"to",
"repo",
"as",
"cwd"
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L208-L216
|
[
"def",
"run",
"(",
"cmd",
",",
"*",
"*",
"kwargs",
")",
":",
"log",
".",
"info",
"(",
"'> '",
"+",
"list2cmdline",
"(",
"cmd",
")",
")",
"kwargs",
".",
"setdefault",
"(",
"'cwd'",
",",
"HERE",
")",
"kwargs",
".",
"setdefault",
"(",
"'shell'",
",",
"os",
".",
"name",
"==",
"'nt'",
")",
"if",
"not",
"isinstance",
"(",
"cmd",
",",
"(",
"list",
",",
"tuple",
")",
")",
"and",
"os",
".",
"name",
"!=",
"'nt'",
":",
"cmd",
"=",
"shlex",
".",
"split",
"(",
"cmd",
")",
"cmd",
"[",
"0",
"]",
"=",
"which",
"(",
"cmd",
"[",
"0",
"]",
")",
"return",
"subprocess",
".",
"check_call",
"(",
"cmd",
",",
"*",
"*",
"kwargs",
")"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
recursive_mtime
|
Gets the newest/oldest mtime for all files in a directory.
|
setupbase.py
|
def recursive_mtime(path, newest=True):
"""Gets the newest/oldest mtime for all files in a directory."""
if os.path.isfile(path):
return mtime(path)
current_extreme = None
for dirname, dirnames, filenames in os.walk(path, topdown=False):
for filename in filenames:
mt = mtime(pjoin(dirname, filename))
if newest: # Put outside of loop?
if mt >= (current_extreme or mt):
current_extreme = mt
elif mt <= (current_extreme or mt):
current_extreme = mt
return current_extreme
|
def recursive_mtime(path, newest=True):
"""Gets the newest/oldest mtime for all files in a directory."""
if os.path.isfile(path):
return mtime(path)
current_extreme = None
for dirname, dirnames, filenames in os.walk(path, topdown=False):
for filename in filenames:
mt = mtime(pjoin(dirname, filename))
if newest: # Put outside of loop?
if mt >= (current_extreme or mt):
current_extreme = mt
elif mt <= (current_extreme or mt):
current_extreme = mt
return current_extreme
|
[
"Gets",
"the",
"newest",
"/",
"oldest",
"mtime",
"for",
"all",
"files",
"in",
"a",
"directory",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L295-L308
|
[
"def",
"recursive_mtime",
"(",
"path",
",",
"newest",
"=",
"True",
")",
":",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"path",
")",
":",
"return",
"mtime",
"(",
"path",
")",
"current_extreme",
"=",
"None",
"for",
"dirname",
",",
"dirnames",
",",
"filenames",
"in",
"os",
".",
"walk",
"(",
"path",
",",
"topdown",
"=",
"False",
")",
":",
"for",
"filename",
"in",
"filenames",
":",
"mt",
"=",
"mtime",
"(",
"pjoin",
"(",
"dirname",
",",
"filename",
")",
")",
"if",
"newest",
":",
"# Put outside of loop?",
"if",
"mt",
">=",
"(",
"current_extreme",
"or",
"mt",
")",
":",
"current_extreme",
"=",
"mt",
"elif",
"mt",
"<=",
"(",
"current_extreme",
"or",
"mt",
")",
":",
"current_extreme",
"=",
"mt",
"return",
"current_extreme"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
ensure_targets
|
Return a Command that checks that certain files exist.
Raises a ValueError if any of the files are missing.
Note: The check is skipped if the `--skip-npm` flag is used.
|
setupbase.py
|
def ensure_targets(targets):
"""Return a Command that checks that certain files exist.
Raises a ValueError if any of the files are missing.
Note: The check is skipped if the `--skip-npm` flag is used.
"""
class TargetsCheck(BaseCommand):
def run(self):
if skip_npm:
log.info('Skipping target checks')
return
missing = [t for t in targets if not os.path.exists(t)]
if missing:
raise ValueError(('missing files: %s' % missing))
return TargetsCheck
|
def ensure_targets(targets):
"""Return a Command that checks that certain files exist.
Raises a ValueError if any of the files are missing.
Note: The check is skipped if the `--skip-npm` flag is used.
"""
class TargetsCheck(BaseCommand):
def run(self):
if skip_npm:
log.info('Skipping target checks')
return
missing = [t for t in targets if not os.path.exists(t)]
if missing:
raise ValueError(('missing files: %s' % missing))
return TargetsCheck
|
[
"Return",
"a",
"Command",
"that",
"checks",
"that",
"certain",
"files",
"exist",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L375-L392
|
[
"def",
"ensure_targets",
"(",
"targets",
")",
":",
"class",
"TargetsCheck",
"(",
"BaseCommand",
")",
":",
"def",
"run",
"(",
"self",
")",
":",
"if",
"skip_npm",
":",
"log",
".",
"info",
"(",
"'Skipping target checks'",
")",
"return",
"missing",
"=",
"[",
"t",
"for",
"t",
"in",
"targets",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"t",
")",
"]",
"if",
"missing",
":",
"raise",
"ValueError",
"(",
"(",
"'missing files: %s'",
"%",
"missing",
")",
")",
"return",
"TargetsCheck"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
_wrap_command
|
Wrap a setup command
Parameters
----------
cmds: list(str)
The names of the other commands to run prior to the command.
strict: boolean, optional
Whether to raise errors when a pre-command fails.
|
setupbase.py
|
def _wrap_command(cmds, cls, strict=True):
"""Wrap a setup command
Parameters
----------
cmds: list(str)
The names of the other commands to run prior to the command.
strict: boolean, optional
Whether to raise errors when a pre-command fails.
"""
class WrappedCommand(cls):
def run(self):
if not getattr(self, 'uninstall', None):
try:
[self.run_command(cmd) for cmd in cmds]
except Exception:
if strict:
raise
else:
pass
# update package data
update_package_data(self.distribution)
result = cls.run(self)
return result
return WrappedCommand
|
def _wrap_command(cmds, cls, strict=True):
"""Wrap a setup command
Parameters
----------
cmds: list(str)
The names of the other commands to run prior to the command.
strict: boolean, optional
Whether to raise errors when a pre-command fails.
"""
class WrappedCommand(cls):
def run(self):
if not getattr(self, 'uninstall', None):
try:
[self.run_command(cmd) for cmd in cmds]
except Exception:
if strict:
raise
else:
pass
# update package data
update_package_data(self.distribution)
result = cls.run(self)
return result
return WrappedCommand
|
[
"Wrap",
"a",
"setup",
"command"
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L454-L480
|
[
"def",
"_wrap_command",
"(",
"cmds",
",",
"cls",
",",
"strict",
"=",
"True",
")",
":",
"class",
"WrappedCommand",
"(",
"cls",
")",
":",
"def",
"run",
"(",
"self",
")",
":",
"if",
"not",
"getattr",
"(",
"self",
",",
"'uninstall'",
",",
"None",
")",
":",
"try",
":",
"[",
"self",
".",
"run_command",
"(",
"cmd",
")",
"for",
"cmd",
"in",
"cmds",
"]",
"except",
"Exception",
":",
"if",
"strict",
":",
"raise",
"else",
":",
"pass",
"# update package data",
"update_package_data",
"(",
"self",
".",
"distribution",
")",
"result",
"=",
"cls",
".",
"run",
"(",
"self",
")",
"return",
"result",
"return",
"WrappedCommand"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
_get_file_handler
|
Get a package_data and data_files handler command.
|
setupbase.py
|
def _get_file_handler(package_data_spec, data_files_spec):
"""Get a package_data and data_files handler command.
"""
class FileHandler(BaseCommand):
def run(self):
package_data = self.distribution.package_data
package_spec = package_data_spec or dict()
for (key, patterns) in package_spec.items():
package_data[key] = _get_package_data(key, patterns)
self.distribution.data_files = _get_data_files(
data_files_spec, self.distribution.data_files
)
return FileHandler
|
def _get_file_handler(package_data_spec, data_files_spec):
"""Get a package_data and data_files handler command.
"""
class FileHandler(BaseCommand):
def run(self):
package_data = self.distribution.package_data
package_spec = package_data_spec or dict()
for (key, patterns) in package_spec.items():
package_data[key] = _get_package_data(key, patterns)
self.distribution.data_files = _get_data_files(
data_files_spec, self.distribution.data_files
)
return FileHandler
|
[
"Get",
"a",
"package_data",
"and",
"data_files",
"handler",
"command",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L483-L499
|
[
"def",
"_get_file_handler",
"(",
"package_data_spec",
",",
"data_files_spec",
")",
":",
"class",
"FileHandler",
"(",
"BaseCommand",
")",
":",
"def",
"run",
"(",
"self",
")",
":",
"package_data",
"=",
"self",
".",
"distribution",
".",
"package_data",
"package_spec",
"=",
"package_data_spec",
"or",
"dict",
"(",
")",
"for",
"(",
"key",
",",
"patterns",
")",
"in",
"package_spec",
".",
"items",
"(",
")",
":",
"package_data",
"[",
"key",
"]",
"=",
"_get_package_data",
"(",
"key",
",",
"patterns",
")",
"self",
".",
"distribution",
".",
"data_files",
"=",
"_get_data_files",
"(",
"data_files_spec",
",",
"self",
".",
"distribution",
".",
"data_files",
")",
"return",
"FileHandler"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
_get_data_files
|
Expand data file specs into valid data files metadata.
Parameters
----------
data_specs: list of tuples
See [createcmdclass] for description.
existing: list of tuples
The existing distribution data_files metadata.
Returns
-------
A valid list of data_files items.
|
setupbase.py
|
def _get_data_files(data_specs, existing):
"""Expand data file specs into valid data files metadata.
Parameters
----------
data_specs: list of tuples
See [createcmdclass] for description.
existing: list of tuples
The existing distribution data_files metadata.
Returns
-------
A valid list of data_files items.
"""
# Extract the existing data files into a staging object.
file_data = defaultdict(list)
for (path, files) in existing or []:
file_data[path] = files
# Extract the files and assign them to the proper data
# files path.
for (path, dname, pattern) in data_specs or []:
dname = dname.replace(os.sep, '/')
offset = len(dname) + 1
files = _get_files(pjoin(dname, pattern))
for fname in files:
# Normalize the path.
root = os.path.dirname(fname)
full_path = '/'.join([path, root[offset:]])
if full_path.endswith('/'):
full_path = full_path[:-1]
file_data[full_path].append(fname)
# Construct the data files spec.
data_files = []
for (path, files) in file_data.items():
data_files.append((path, files))
return data_files
|
def _get_data_files(data_specs, existing):
"""Expand data file specs into valid data files metadata.
Parameters
----------
data_specs: list of tuples
See [createcmdclass] for description.
existing: list of tuples
The existing distribution data_files metadata.
Returns
-------
A valid list of data_files items.
"""
# Extract the existing data files into a staging object.
file_data = defaultdict(list)
for (path, files) in existing or []:
file_data[path] = files
# Extract the files and assign them to the proper data
# files path.
for (path, dname, pattern) in data_specs or []:
dname = dname.replace(os.sep, '/')
offset = len(dname) + 1
files = _get_files(pjoin(dname, pattern))
for fname in files:
# Normalize the path.
root = os.path.dirname(fname)
full_path = '/'.join([path, root[offset:]])
if full_path.endswith('/'):
full_path = full_path[:-1]
file_data[full_path].append(fname)
# Construct the data files spec.
data_files = []
for (path, files) in file_data.items():
data_files.append((path, files))
return data_files
|
[
"Expand",
"data",
"file",
"specs",
"into",
"valid",
"data",
"files",
"metadata",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L502-L540
|
[
"def",
"_get_data_files",
"(",
"data_specs",
",",
"existing",
")",
":",
"# Extract the existing data files into a staging object.",
"file_data",
"=",
"defaultdict",
"(",
"list",
")",
"for",
"(",
"path",
",",
"files",
")",
"in",
"existing",
"or",
"[",
"]",
":",
"file_data",
"[",
"path",
"]",
"=",
"files",
"# Extract the files and assign them to the proper data",
"# files path.",
"for",
"(",
"path",
",",
"dname",
",",
"pattern",
")",
"in",
"data_specs",
"or",
"[",
"]",
":",
"dname",
"=",
"dname",
".",
"replace",
"(",
"os",
".",
"sep",
",",
"'/'",
")",
"offset",
"=",
"len",
"(",
"dname",
")",
"+",
"1",
"files",
"=",
"_get_files",
"(",
"pjoin",
"(",
"dname",
",",
"pattern",
")",
")",
"for",
"fname",
"in",
"files",
":",
"# Normalize the path.",
"root",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"fname",
")",
"full_path",
"=",
"'/'",
".",
"join",
"(",
"[",
"path",
",",
"root",
"[",
"offset",
":",
"]",
"]",
")",
"if",
"full_path",
".",
"endswith",
"(",
"'/'",
")",
":",
"full_path",
"=",
"full_path",
"[",
":",
"-",
"1",
"]",
"file_data",
"[",
"full_path",
"]",
".",
"append",
"(",
"fname",
")",
"# Construct the data files spec.",
"data_files",
"=",
"[",
"]",
"for",
"(",
"path",
",",
"files",
")",
"in",
"file_data",
".",
"items",
"(",
")",
":",
"data_files",
".",
"append",
"(",
"(",
"path",
",",
"files",
")",
")",
"return",
"data_files"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
_get_package_data
|
Expand file patterns to a list of `package_data` paths.
Parameters
-----------
root: str
The relative path to the package root from `HERE`.
file_patterns: list or str, optional
A list of glob patterns for the data file locations.
The globs can be recursive if they include a `**`.
They should be relative paths from the root or
absolute paths. If not given, all files will be used.
Note:
Files in `node_modules` are ignored.
|
setupbase.py
|
def _get_package_data(root, file_patterns=None):
"""Expand file patterns to a list of `package_data` paths.
Parameters
-----------
root: str
The relative path to the package root from `HERE`.
file_patterns: list or str, optional
A list of glob patterns for the data file locations.
The globs can be recursive if they include a `**`.
They should be relative paths from the root or
absolute paths. If not given, all files will be used.
Note:
Files in `node_modules` are ignored.
"""
if file_patterns is None:
file_patterns = ['*']
return _get_files(file_patterns, pjoin(HERE, root))
|
def _get_package_data(root, file_patterns=None):
"""Expand file patterns to a list of `package_data` paths.
Parameters
-----------
root: str
The relative path to the package root from `HERE`.
file_patterns: list or str, optional
A list of glob patterns for the data file locations.
The globs can be recursive if they include a `**`.
They should be relative paths from the root or
absolute paths. If not given, all files will be used.
Note:
Files in `node_modules` are ignored.
"""
if file_patterns is None:
file_patterns = ['*']
return _get_files(file_patterns, pjoin(HERE, root))
|
[
"Expand",
"file",
"patterns",
"to",
"a",
"list",
"of",
"package_data",
"paths",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L583-L601
|
[
"def",
"_get_package_data",
"(",
"root",
",",
"file_patterns",
"=",
"None",
")",
":",
"if",
"file_patterns",
"is",
"None",
":",
"file_patterns",
"=",
"[",
"'*'",
"]",
"return",
"_get_files",
"(",
"file_patterns",
",",
"pjoin",
"(",
"HERE",
",",
"root",
")",
")"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
_compile_pattern
|
Translate and compile a glob pattern to a regular expression matcher.
|
setupbase.py
|
def _compile_pattern(pat, ignore_case=True):
"""Translate and compile a glob pattern to a regular expression matcher."""
if isinstance(pat, bytes):
pat_str = pat.decode('ISO-8859-1')
res_str = _translate_glob(pat_str)
res = res_str.encode('ISO-8859-1')
else:
res = _translate_glob(pat)
flags = re.IGNORECASE if ignore_case else 0
return re.compile(res, flags=flags).match
|
def _compile_pattern(pat, ignore_case=True):
"""Translate and compile a glob pattern to a regular expression matcher."""
if isinstance(pat, bytes):
pat_str = pat.decode('ISO-8859-1')
res_str = _translate_glob(pat_str)
res = res_str.encode('ISO-8859-1')
else:
res = _translate_glob(pat)
flags = re.IGNORECASE if ignore_case else 0
return re.compile(res, flags=flags).match
|
[
"Translate",
"and",
"compile",
"a",
"glob",
"pattern",
"to",
"a",
"regular",
"expression",
"matcher",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L604-L613
|
[
"def",
"_compile_pattern",
"(",
"pat",
",",
"ignore_case",
"=",
"True",
")",
":",
"if",
"isinstance",
"(",
"pat",
",",
"bytes",
")",
":",
"pat_str",
"=",
"pat",
".",
"decode",
"(",
"'ISO-8859-1'",
")",
"res_str",
"=",
"_translate_glob",
"(",
"pat_str",
")",
"res",
"=",
"res_str",
".",
"encode",
"(",
"'ISO-8859-1'",
")",
"else",
":",
"res",
"=",
"_translate_glob",
"(",
"pat",
")",
"flags",
"=",
"re",
".",
"IGNORECASE",
"if",
"ignore_case",
"else",
"0",
"return",
"re",
".",
"compile",
"(",
"res",
",",
"flags",
"=",
"flags",
")",
".",
"match"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
_iexplode_path
|
Iterate over all the parts of a path.
Splits path recursively with os.path.split().
|
setupbase.py
|
def _iexplode_path(path):
"""Iterate over all the parts of a path.
Splits path recursively with os.path.split().
"""
(head, tail) = os.path.split(path)
if not head or (not tail and head == path):
if head:
yield head
if tail or not head:
yield tail
return
for p in _iexplode_path(head):
yield p
yield tail
|
def _iexplode_path(path):
"""Iterate over all the parts of a path.
Splits path recursively with os.path.split().
"""
(head, tail) = os.path.split(path)
if not head or (not tail and head == path):
if head:
yield head
if tail or not head:
yield tail
return
for p in _iexplode_path(head):
yield p
yield tail
|
[
"Iterate",
"over",
"all",
"the",
"parts",
"of",
"a",
"path",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L616-L630
|
[
"def",
"_iexplode_path",
"(",
"path",
")",
":",
"(",
"head",
",",
"tail",
")",
"=",
"os",
".",
"path",
".",
"split",
"(",
"path",
")",
"if",
"not",
"head",
"or",
"(",
"not",
"tail",
"and",
"head",
"==",
"path",
")",
":",
"if",
"head",
":",
"yield",
"head",
"if",
"tail",
"or",
"not",
"head",
":",
"yield",
"tail",
"return",
"for",
"p",
"in",
"_iexplode_path",
"(",
"head",
")",
":",
"yield",
"p",
"yield",
"tail"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
_translate_glob
|
Translate a glob PATTERN to a regular expression.
|
setupbase.py
|
def _translate_glob(pat):
"""Translate a glob PATTERN to a regular expression."""
translated_parts = []
for part in _iexplode_path(pat):
translated_parts.append(_translate_glob_part(part))
os_sep_class = '[%s]' % re.escape(SEPARATORS)
res = _join_translated(translated_parts, os_sep_class)
return '{res}\\Z(?ms)'.format(res=res)
|
def _translate_glob(pat):
"""Translate a glob PATTERN to a regular expression."""
translated_parts = []
for part in _iexplode_path(pat):
translated_parts.append(_translate_glob_part(part))
os_sep_class = '[%s]' % re.escape(SEPARATORS)
res = _join_translated(translated_parts, os_sep_class)
return '{res}\\Z(?ms)'.format(res=res)
|
[
"Translate",
"a",
"glob",
"PATTERN",
"to",
"a",
"regular",
"expression",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L633-L640
|
[
"def",
"_translate_glob",
"(",
"pat",
")",
":",
"translated_parts",
"=",
"[",
"]",
"for",
"part",
"in",
"_iexplode_path",
"(",
"pat",
")",
":",
"translated_parts",
".",
"append",
"(",
"_translate_glob_part",
"(",
"part",
")",
")",
"os_sep_class",
"=",
"'[%s]'",
"%",
"re",
".",
"escape",
"(",
"SEPARATORS",
")",
"res",
"=",
"_join_translated",
"(",
"translated_parts",
",",
"os_sep_class",
")",
"return",
"'{res}\\\\Z(?ms)'",
".",
"format",
"(",
"res",
"=",
"res",
")"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
_join_translated
|
Join translated glob pattern parts.
This is different from a simple join, as care need to be taken
to allow ** to match ZERO or more directories.
|
setupbase.py
|
def _join_translated(translated_parts, os_sep_class):
"""Join translated glob pattern parts.
This is different from a simple join, as care need to be taken
to allow ** to match ZERO or more directories.
"""
res = ''
for part in translated_parts[:-1]:
if part == '.*':
# drop separator, since it is optional
# (** matches ZERO or more dirs)
res += part
else:
res += part + os_sep_class
if translated_parts[-1] == '.*':
# Final part is **
res += '.+'
# Follow stdlib/git convention of matching all sub files/directories:
res += '({os_sep_class}?.*)?'.format(os_sep_class=os_sep_class)
else:
res += translated_parts[-1]
return res
|
def _join_translated(translated_parts, os_sep_class):
"""Join translated glob pattern parts.
This is different from a simple join, as care need to be taken
to allow ** to match ZERO or more directories.
"""
res = ''
for part in translated_parts[:-1]:
if part == '.*':
# drop separator, since it is optional
# (** matches ZERO or more dirs)
res += part
else:
res += part + os_sep_class
if translated_parts[-1] == '.*':
# Final part is **
res += '.+'
# Follow stdlib/git convention of matching all sub files/directories:
res += '({os_sep_class}?.*)?'.format(os_sep_class=os_sep_class)
else:
res += translated_parts[-1]
return res
|
[
"Join",
"translated",
"glob",
"pattern",
"parts",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L643-L665
|
[
"def",
"_join_translated",
"(",
"translated_parts",
",",
"os_sep_class",
")",
":",
"res",
"=",
"''",
"for",
"part",
"in",
"translated_parts",
"[",
":",
"-",
"1",
"]",
":",
"if",
"part",
"==",
"'.*'",
":",
"# drop separator, since it is optional",
"# (** matches ZERO or more dirs)",
"res",
"+=",
"part",
"else",
":",
"res",
"+=",
"part",
"+",
"os_sep_class",
"if",
"translated_parts",
"[",
"-",
"1",
"]",
"==",
"'.*'",
":",
"# Final part is **",
"res",
"+=",
"'.+'",
"# Follow stdlib/git convention of matching all sub files/directories:",
"res",
"+=",
"'({os_sep_class}?.*)?'",
".",
"format",
"(",
"os_sep_class",
"=",
"os_sep_class",
")",
"else",
":",
"res",
"+=",
"translated_parts",
"[",
"-",
"1",
"]",
"return",
"res"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
_translate_glob_part
|
Translate a glob PATTERN PART to a regular expression.
|
setupbase.py
|
def _translate_glob_part(pat):
"""Translate a glob PATTERN PART to a regular expression."""
# Code modified from Python 3 standard lib fnmatch:
if pat == '**':
return '.*'
i, n = 0, len(pat)
res = []
while i < n:
c = pat[i]
i = i + 1
if c == '*':
# Match anything but path separators:
res.append('[^%s]*' % SEPARATORS)
elif c == '?':
res.append('[^%s]?' % SEPARATORS)
elif c == '[':
j = i
if j < n and pat[j] == '!':
j = j + 1
if j < n and pat[j] == ']':
j = j + 1
while j < n and pat[j] != ']':
j = j + 1
if j >= n:
res.append('\\[')
else:
stuff = pat[i:j].replace('\\', '\\\\')
i = j + 1
if stuff[0] == '!':
stuff = '^' + stuff[1:]
elif stuff[0] == '^':
stuff = '\\' + stuff
res.append('[%s]' % stuff)
else:
res.append(re.escape(c))
return ''.join(res)
|
def _translate_glob_part(pat):
"""Translate a glob PATTERN PART to a regular expression."""
# Code modified from Python 3 standard lib fnmatch:
if pat == '**':
return '.*'
i, n = 0, len(pat)
res = []
while i < n:
c = pat[i]
i = i + 1
if c == '*':
# Match anything but path separators:
res.append('[^%s]*' % SEPARATORS)
elif c == '?':
res.append('[^%s]?' % SEPARATORS)
elif c == '[':
j = i
if j < n and pat[j] == '!':
j = j + 1
if j < n and pat[j] == ']':
j = j + 1
while j < n and pat[j] != ']':
j = j + 1
if j >= n:
res.append('\\[')
else:
stuff = pat[i:j].replace('\\', '\\\\')
i = j + 1
if stuff[0] == '!':
stuff = '^' + stuff[1:]
elif stuff[0] == '^':
stuff = '\\' + stuff
res.append('[%s]' % stuff)
else:
res.append(re.escape(c))
return ''.join(res)
|
[
"Translate",
"a",
"glob",
"PATTERN",
"PART",
"to",
"a",
"regular",
"expression",
"."
] |
jupyter-widgets/jupyterlab-sidecar
|
python
|
https://github.com/jupyter-widgets/jupyterlab-sidecar/blob/8889d09f1a0933e2cbee06d4874f720b075b29e8/setupbase.py#L668-L703
|
[
"def",
"_translate_glob_part",
"(",
"pat",
")",
":",
"# Code modified from Python 3 standard lib fnmatch:",
"if",
"pat",
"==",
"'**'",
":",
"return",
"'.*'",
"i",
",",
"n",
"=",
"0",
",",
"len",
"(",
"pat",
")",
"res",
"=",
"[",
"]",
"while",
"i",
"<",
"n",
":",
"c",
"=",
"pat",
"[",
"i",
"]",
"i",
"=",
"i",
"+",
"1",
"if",
"c",
"==",
"'*'",
":",
"# Match anything but path separators:",
"res",
".",
"append",
"(",
"'[^%s]*'",
"%",
"SEPARATORS",
")",
"elif",
"c",
"==",
"'?'",
":",
"res",
".",
"append",
"(",
"'[^%s]?'",
"%",
"SEPARATORS",
")",
"elif",
"c",
"==",
"'['",
":",
"j",
"=",
"i",
"if",
"j",
"<",
"n",
"and",
"pat",
"[",
"j",
"]",
"==",
"'!'",
":",
"j",
"=",
"j",
"+",
"1",
"if",
"j",
"<",
"n",
"and",
"pat",
"[",
"j",
"]",
"==",
"']'",
":",
"j",
"=",
"j",
"+",
"1",
"while",
"j",
"<",
"n",
"and",
"pat",
"[",
"j",
"]",
"!=",
"']'",
":",
"j",
"=",
"j",
"+",
"1",
"if",
"j",
">=",
"n",
":",
"res",
".",
"append",
"(",
"'\\\\['",
")",
"else",
":",
"stuff",
"=",
"pat",
"[",
"i",
":",
"j",
"]",
".",
"replace",
"(",
"'\\\\'",
",",
"'\\\\\\\\'",
")",
"i",
"=",
"j",
"+",
"1",
"if",
"stuff",
"[",
"0",
"]",
"==",
"'!'",
":",
"stuff",
"=",
"'^'",
"+",
"stuff",
"[",
"1",
":",
"]",
"elif",
"stuff",
"[",
"0",
"]",
"==",
"'^'",
":",
"stuff",
"=",
"'\\\\'",
"+",
"stuff",
"res",
".",
"append",
"(",
"'[%s]'",
"%",
"stuff",
")",
"else",
":",
"res",
".",
"append",
"(",
"re",
".",
"escape",
"(",
"c",
")",
")",
"return",
"''",
".",
"join",
"(",
"res",
")"
] |
8889d09f1a0933e2cbee06d4874f720b075b29e8
|
test
|
PostgresDbWriter.truncate
|
Send DDL to truncate the specified `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
|
mysql2pgsql/lib/postgres_db_writer.py
|
def truncate(self, table):
"""Send DDL to truncate the specified `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
truncate_sql, serial_key_sql = super(PostgresDbWriter, self).truncate(table)
self.execute(truncate_sql)
if serial_key_sql:
self.execute(serial_key_sql)
|
def truncate(self, table):
"""Send DDL to truncate the specified `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
truncate_sql, serial_key_sql = super(PostgresDbWriter, self).truncate(table)
self.execute(truncate_sql)
if serial_key_sql:
self.execute(serial_key_sql)
|
[
"Send",
"DDL",
"to",
"truncate",
"the",
"specified",
"table"
] |
philipsoutham/py-mysql2pgsql
|
python
|
https://github.com/philipsoutham/py-mysql2pgsql/blob/66dc2a3a3119263b3fe77300fb636346509787ef/mysql2pgsql/lib/postgres_db_writer.py#L130-L141
|
[
"def",
"truncate",
"(",
"self",
",",
"table",
")",
":",
"truncate_sql",
",",
"serial_key_sql",
"=",
"super",
"(",
"PostgresDbWriter",
",",
"self",
")",
".",
"truncate",
"(",
"table",
")",
"self",
".",
"execute",
"(",
"truncate_sql",
")",
"if",
"serial_key_sql",
":",
"self",
".",
"execute",
"(",
"serial_key_sql",
")"
] |
66dc2a3a3119263b3fe77300fb636346509787ef
|
test
|
PostgresDbWriter.write_table
|
Send DDL to create the specified `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
|
mysql2pgsql/lib/postgres_db_writer.py
|
def write_table(self, table):
"""Send DDL to create the specified `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
table_sql, serial_key_sql = super(PostgresDbWriter, self).write_table(table)
for sql in serial_key_sql + table_sql:
self.execute(sql)
|
def write_table(self, table):
"""Send DDL to create the specified `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
table_sql, serial_key_sql = super(PostgresDbWriter, self).write_table(table)
for sql in serial_key_sql + table_sql:
self.execute(sql)
|
[
"Send",
"DDL",
"to",
"create",
"the",
"specified",
"table"
] |
philipsoutham/py-mysql2pgsql
|
python
|
https://github.com/philipsoutham/py-mysql2pgsql/blob/66dc2a3a3119263b3fe77300fb636346509787ef/mysql2pgsql/lib/postgres_db_writer.py#L144-L154
|
[
"def",
"write_table",
"(",
"self",
",",
"table",
")",
":",
"table_sql",
",",
"serial_key_sql",
"=",
"super",
"(",
"PostgresDbWriter",
",",
"self",
")",
".",
"write_table",
"(",
"table",
")",
"for",
"sql",
"in",
"serial_key_sql",
"+",
"table_sql",
":",
"self",
".",
"execute",
"(",
"sql",
")"
] |
66dc2a3a3119263b3fe77300fb636346509787ef
|
test
|
PostgresDbWriter.write_indexes
|
Send DDL to create the specified `table` indexes
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
|
mysql2pgsql/lib/postgres_db_writer.py
|
def write_indexes(self, table):
"""Send DDL to create the specified `table` indexes
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
index_sql = super(PostgresDbWriter, self).write_indexes(table)
for sql in index_sql:
self.execute(sql)
|
def write_indexes(self, table):
"""Send DDL to create the specified `table` indexes
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
index_sql = super(PostgresDbWriter, self).write_indexes(table)
for sql in index_sql:
self.execute(sql)
|
[
"Send",
"DDL",
"to",
"create",
"the",
"specified",
"table",
"indexes"
] |
philipsoutham/py-mysql2pgsql
|
python
|
https://github.com/philipsoutham/py-mysql2pgsql/blob/66dc2a3a3119263b3fe77300fb636346509787ef/mysql2pgsql/lib/postgres_db_writer.py#L157-L167
|
[
"def",
"write_indexes",
"(",
"self",
",",
"table",
")",
":",
"index_sql",
"=",
"super",
"(",
"PostgresDbWriter",
",",
"self",
")",
".",
"write_indexes",
"(",
"table",
")",
"for",
"sql",
"in",
"index_sql",
":",
"self",
".",
"execute",
"(",
"sql",
")"
] |
66dc2a3a3119263b3fe77300fb636346509787ef
|
test
|
PostgresDbWriter.write_triggers
|
Send DDL to create the specified `table` triggers
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
|
mysql2pgsql/lib/postgres_db_writer.py
|
def write_triggers(self, table):
"""Send DDL to create the specified `table` triggers
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
index_sql = super(PostgresDbWriter, self).write_triggers(table)
for sql in index_sql:
self.execute(sql)
|
def write_triggers(self, table):
"""Send DDL to create the specified `table` triggers
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
index_sql = super(PostgresDbWriter, self).write_triggers(table)
for sql in index_sql:
self.execute(sql)
|
[
"Send",
"DDL",
"to",
"create",
"the",
"specified",
"table",
"triggers"
] |
philipsoutham/py-mysql2pgsql
|
python
|
https://github.com/philipsoutham/py-mysql2pgsql/blob/66dc2a3a3119263b3fe77300fb636346509787ef/mysql2pgsql/lib/postgres_db_writer.py#L170-L180
|
[
"def",
"write_triggers",
"(",
"self",
",",
"table",
")",
":",
"index_sql",
"=",
"super",
"(",
"PostgresDbWriter",
",",
"self",
")",
".",
"write_triggers",
"(",
"table",
")",
"for",
"sql",
"in",
"index_sql",
":",
"self",
".",
"execute",
"(",
"sql",
")"
] |
66dc2a3a3119263b3fe77300fb636346509787ef
|
test
|
PostgresDbWriter.write_constraints
|
Send DDL to create the specified `table` constraints
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
|
mysql2pgsql/lib/postgres_db_writer.py
|
def write_constraints(self, table):
"""Send DDL to create the specified `table` constraints
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
constraint_sql = super(PostgresDbWriter, self).write_constraints(table)
for sql in constraint_sql:
self.execute(sql)
|
def write_constraints(self, table):
"""Send DDL to create the specified `table` constraints
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
constraint_sql = super(PostgresDbWriter, self).write_constraints(table)
for sql in constraint_sql:
self.execute(sql)
|
[
"Send",
"DDL",
"to",
"create",
"the",
"specified",
"table",
"constraints"
] |
philipsoutham/py-mysql2pgsql
|
python
|
https://github.com/philipsoutham/py-mysql2pgsql/blob/66dc2a3a3119263b3fe77300fb636346509787ef/mysql2pgsql/lib/postgres_db_writer.py#L183-L193
|
[
"def",
"write_constraints",
"(",
"self",
",",
"table",
")",
":",
"constraint_sql",
"=",
"super",
"(",
"PostgresDbWriter",
",",
"self",
")",
".",
"write_constraints",
"(",
"table",
")",
"for",
"sql",
"in",
"constraint_sql",
":",
"self",
".",
"execute",
"(",
"sql",
")"
] |
66dc2a3a3119263b3fe77300fb636346509787ef
|
test
|
PostgresDbWriter.write_contents
|
Write the contents of `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
- `reader`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader` object that allows reading from the data source.
Returns None
|
mysql2pgsql/lib/postgres_db_writer.py
|
def write_contents(self, table, reader):
"""Write the contents of `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
- `reader`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader` object that allows reading from the data source.
Returns None
"""
f = self.FileObjFaker(table, reader.read(table), self.process_row, self.verbose)
self.copy_from(f, '"%s"' % table.name, ['"%s"' % c['name'] for c in table.columns])
|
def write_contents(self, table, reader):
"""Write the contents of `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
- `reader`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader` object that allows reading from the data source.
Returns None
"""
f = self.FileObjFaker(table, reader.read(table), self.process_row, self.verbose)
self.copy_from(f, '"%s"' % table.name, ['"%s"' % c['name'] for c in table.columns])
|
[
"Write",
"the",
"contents",
"of",
"table"
] |
philipsoutham/py-mysql2pgsql
|
python
|
https://github.com/philipsoutham/py-mysql2pgsql/blob/66dc2a3a3119263b3fe77300fb636346509787ef/mysql2pgsql/lib/postgres_db_writer.py#L196-L206
|
[
"def",
"write_contents",
"(",
"self",
",",
"table",
",",
"reader",
")",
":",
"f",
"=",
"self",
".",
"FileObjFaker",
"(",
"table",
",",
"reader",
".",
"read",
"(",
"table",
")",
",",
"self",
".",
"process_row",
",",
"self",
".",
"verbose",
")",
"self",
".",
"copy_from",
"(",
"f",
",",
"'\"%s\"'",
"%",
"table",
".",
"name",
",",
"[",
"'\"%s\"'",
"%",
"c",
"[",
"'name'",
"]",
"for",
"c",
"in",
"table",
".",
"columns",
"]",
")"
] |
66dc2a3a3119263b3fe77300fb636346509787ef
|
test
|
PostgresWriter.process_row
|
Examines row data from MySQL and alters
the values when necessary to be compatible with
sending to PostgreSQL via the copy command
|
mysql2pgsql/lib/postgres_writer.py
|
def process_row(self, table, row):
"""Examines row data from MySQL and alters
the values when necessary to be compatible with
sending to PostgreSQL via the copy command
"""
for index, column in enumerate(table.columns):
hash_key = hash(frozenset(column.items()))
column_type = self.column_types[hash_key] if hash_key in self.column_types else self.column_type(column)
if row[index] == None and ('timestamp' not in column_type or not column['default']):
row[index] = '\N'
elif row[index] == None and column['default']:
if self.tz:
row[index] = '1970-01-01T00:00:00.000000' + self.tz_offset
else:
row[index] = '1970-01-01 00:00:00'
elif 'bit' in column_type:
row[index] = bin(ord(row[index]))[2:]
elif isinstance(row[index], (str, unicode, basestring)):
if column_type == 'bytea':
row[index] = Binary(row[index]).getquoted()[1:-8] if row[index] else row[index]
elif 'text[' in column_type:
row[index] = '{%s}' % ','.join('"%s"' % v.replace('"', r'\"') for v in row[index].split(','))
else:
row[index] = row[index].replace('\\', r'\\').replace('\n', r'\n').replace(
'\t', r'\t').replace('\r', r'\r').replace('\0', '')
elif column_type == 'boolean':
# We got here because you used a tinyint(1), if you didn't want a bool, don't use that type
row[index] = 't' if row[index] not in (None, 0) else 'f' if row[index] == 0 else row[index]
elif isinstance(row[index], (date, datetime)):
if isinstance(row[index], datetime) and self.tz:
try:
if row[index].tzinfo:
row[index] = row[index].astimezone(self.tz).isoformat()
else:
row[index] = datetime(*row[index].timetuple()[:6], tzinfo=self.tz).isoformat()
except Exception as e:
print e.message
else:
row[index] = row[index].isoformat()
elif isinstance(row[index], timedelta):
row[index] = datetime.utcfromtimestamp(_get_total_seconds(row[index])).time().isoformat()
else:
row[index] = AsIs(row[index]).getquoted()
|
def process_row(self, table, row):
"""Examines row data from MySQL and alters
the values when necessary to be compatible with
sending to PostgreSQL via the copy command
"""
for index, column in enumerate(table.columns):
hash_key = hash(frozenset(column.items()))
column_type = self.column_types[hash_key] if hash_key in self.column_types else self.column_type(column)
if row[index] == None and ('timestamp' not in column_type or not column['default']):
row[index] = '\N'
elif row[index] == None and column['default']:
if self.tz:
row[index] = '1970-01-01T00:00:00.000000' + self.tz_offset
else:
row[index] = '1970-01-01 00:00:00'
elif 'bit' in column_type:
row[index] = bin(ord(row[index]))[2:]
elif isinstance(row[index], (str, unicode, basestring)):
if column_type == 'bytea':
row[index] = Binary(row[index]).getquoted()[1:-8] if row[index] else row[index]
elif 'text[' in column_type:
row[index] = '{%s}' % ','.join('"%s"' % v.replace('"', r'\"') for v in row[index].split(','))
else:
row[index] = row[index].replace('\\', r'\\').replace('\n', r'\n').replace(
'\t', r'\t').replace('\r', r'\r').replace('\0', '')
elif column_type == 'boolean':
# We got here because you used a tinyint(1), if you didn't want a bool, don't use that type
row[index] = 't' if row[index] not in (None, 0) else 'f' if row[index] == 0 else row[index]
elif isinstance(row[index], (date, datetime)):
if isinstance(row[index], datetime) and self.tz:
try:
if row[index].tzinfo:
row[index] = row[index].astimezone(self.tz).isoformat()
else:
row[index] = datetime(*row[index].timetuple()[:6], tzinfo=self.tz).isoformat()
except Exception as e:
print e.message
else:
row[index] = row[index].isoformat()
elif isinstance(row[index], timedelta):
row[index] = datetime.utcfromtimestamp(_get_total_seconds(row[index])).time().isoformat()
else:
row[index] = AsIs(row[index]).getquoted()
|
[
"Examines",
"row",
"data",
"from",
"MySQL",
"and",
"alters",
"the",
"values",
"when",
"necessary",
"to",
"be",
"compatible",
"with",
"sending",
"to",
"PostgreSQL",
"via",
"the",
"copy",
"command"
] |
philipsoutham/py-mysql2pgsql
|
python
|
https://github.com/philipsoutham/py-mysql2pgsql/blob/66dc2a3a3119263b3fe77300fb636346509787ef/mysql2pgsql/lib/postgres_writer.py#L149-L191
|
[
"def",
"process_row",
"(",
"self",
",",
"table",
",",
"row",
")",
":",
"for",
"index",
",",
"column",
"in",
"enumerate",
"(",
"table",
".",
"columns",
")",
":",
"hash_key",
"=",
"hash",
"(",
"frozenset",
"(",
"column",
".",
"items",
"(",
")",
")",
")",
"column_type",
"=",
"self",
".",
"column_types",
"[",
"hash_key",
"]",
"if",
"hash_key",
"in",
"self",
".",
"column_types",
"else",
"self",
".",
"column_type",
"(",
"column",
")",
"if",
"row",
"[",
"index",
"]",
"==",
"None",
"and",
"(",
"'timestamp'",
"not",
"in",
"column_type",
"or",
"not",
"column",
"[",
"'default'",
"]",
")",
":",
"row",
"[",
"index",
"]",
"=",
"'\\N'",
"elif",
"row",
"[",
"index",
"]",
"==",
"None",
"and",
"column",
"[",
"'default'",
"]",
":",
"if",
"self",
".",
"tz",
":",
"row",
"[",
"index",
"]",
"=",
"'1970-01-01T00:00:00.000000'",
"+",
"self",
".",
"tz_offset",
"else",
":",
"row",
"[",
"index",
"]",
"=",
"'1970-01-01 00:00:00'",
"elif",
"'bit'",
"in",
"column_type",
":",
"row",
"[",
"index",
"]",
"=",
"bin",
"(",
"ord",
"(",
"row",
"[",
"index",
"]",
")",
")",
"[",
"2",
":",
"]",
"elif",
"isinstance",
"(",
"row",
"[",
"index",
"]",
",",
"(",
"str",
",",
"unicode",
",",
"basestring",
")",
")",
":",
"if",
"column_type",
"==",
"'bytea'",
":",
"row",
"[",
"index",
"]",
"=",
"Binary",
"(",
"row",
"[",
"index",
"]",
")",
".",
"getquoted",
"(",
")",
"[",
"1",
":",
"-",
"8",
"]",
"if",
"row",
"[",
"index",
"]",
"else",
"row",
"[",
"index",
"]",
"elif",
"'text['",
"in",
"column_type",
":",
"row",
"[",
"index",
"]",
"=",
"'{%s}'",
"%",
"','",
".",
"join",
"(",
"'\"%s\"'",
"%",
"v",
".",
"replace",
"(",
"'\"'",
",",
"r'\\\"'",
")",
"for",
"v",
"in",
"row",
"[",
"index",
"]",
".",
"split",
"(",
"','",
")",
")",
"else",
":",
"row",
"[",
"index",
"]",
"=",
"row",
"[",
"index",
"]",
".",
"replace",
"(",
"'\\\\'",
",",
"r'\\\\'",
")",
".",
"replace",
"(",
"'\\n'",
",",
"r'\\n'",
")",
".",
"replace",
"(",
"'\\t'",
",",
"r'\\t'",
")",
".",
"replace",
"(",
"'\\r'",
",",
"r'\\r'",
")",
".",
"replace",
"(",
"'\\0'",
",",
"''",
")",
"elif",
"column_type",
"==",
"'boolean'",
":",
"# We got here because you used a tinyint(1), if you didn't want a bool, don't use that type",
"row",
"[",
"index",
"]",
"=",
"'t'",
"if",
"row",
"[",
"index",
"]",
"not",
"in",
"(",
"None",
",",
"0",
")",
"else",
"'f'",
"if",
"row",
"[",
"index",
"]",
"==",
"0",
"else",
"row",
"[",
"index",
"]",
"elif",
"isinstance",
"(",
"row",
"[",
"index",
"]",
",",
"(",
"date",
",",
"datetime",
")",
")",
":",
"if",
"isinstance",
"(",
"row",
"[",
"index",
"]",
",",
"datetime",
")",
"and",
"self",
".",
"tz",
":",
"try",
":",
"if",
"row",
"[",
"index",
"]",
".",
"tzinfo",
":",
"row",
"[",
"index",
"]",
"=",
"row",
"[",
"index",
"]",
".",
"astimezone",
"(",
"self",
".",
"tz",
")",
".",
"isoformat",
"(",
")",
"else",
":",
"row",
"[",
"index",
"]",
"=",
"datetime",
"(",
"*",
"row",
"[",
"index",
"]",
".",
"timetuple",
"(",
")",
"[",
":",
"6",
"]",
",",
"tzinfo",
"=",
"self",
".",
"tz",
")",
".",
"isoformat",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"e",
".",
"message",
"else",
":",
"row",
"[",
"index",
"]",
"=",
"row",
"[",
"index",
"]",
".",
"isoformat",
"(",
")",
"elif",
"isinstance",
"(",
"row",
"[",
"index",
"]",
",",
"timedelta",
")",
":",
"row",
"[",
"index",
"]",
"=",
"datetime",
".",
"utcfromtimestamp",
"(",
"_get_total_seconds",
"(",
"row",
"[",
"index",
"]",
")",
")",
".",
"time",
"(",
")",
".",
"isoformat",
"(",
")",
"else",
":",
"row",
"[",
"index",
"]",
"=",
"AsIs",
"(",
"row",
"[",
"index",
"]",
")",
".",
"getquoted",
"(",
")"
] |
66dc2a3a3119263b3fe77300fb636346509787ef
|
test
|
PostgresFileWriter.truncate
|
Write DDL to truncate the specified `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
|
mysql2pgsql/lib/postgres_file_writer.py
|
def truncate(self, table):
"""Write DDL to truncate the specified `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
truncate_sql, serial_key_sql = super(PostgresFileWriter, self).truncate(table)
self.f.write("""
-- TRUNCATE %(table_name)s;
%(truncate_sql)s
""" % {'table_name': table.name, 'truncate_sql': truncate_sql})
if serial_key_sql:
self.f.write("""
%(serial_key_sql)s
""" % {
'serial_key_sql': serial_key_sql})
|
def truncate(self, table):
"""Write DDL to truncate the specified `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
truncate_sql, serial_key_sql = super(PostgresFileWriter, self).truncate(table)
self.f.write("""
-- TRUNCATE %(table_name)s;
%(truncate_sql)s
""" % {'table_name': table.name, 'truncate_sql': truncate_sql})
if serial_key_sql:
self.f.write("""
%(serial_key_sql)s
""" % {
'serial_key_sql': serial_key_sql})
|
[
"Write",
"DDL",
"to",
"truncate",
"the",
"specified",
"table"
] |
philipsoutham/py-mysql2pgsql
|
python
|
https://github.com/philipsoutham/py-mysql2pgsql/blob/66dc2a3a3119263b3fe77300fb636346509787ef/mysql2pgsql/lib/postgres_file_writer.py#L36-L54
|
[
"def",
"truncate",
"(",
"self",
",",
"table",
")",
":",
"truncate_sql",
",",
"serial_key_sql",
"=",
"super",
"(",
"PostgresFileWriter",
",",
"self",
")",
".",
"truncate",
"(",
"table",
")",
"self",
".",
"f",
".",
"write",
"(",
"\"\"\"\n-- TRUNCATE %(table_name)s;\n%(truncate_sql)s\n\"\"\"",
"%",
"{",
"'table_name'",
":",
"table",
".",
"name",
",",
"'truncate_sql'",
":",
"truncate_sql",
"}",
")",
"if",
"serial_key_sql",
":",
"self",
".",
"f",
".",
"write",
"(",
"\"\"\"\n%(serial_key_sql)s\n\"\"\"",
"%",
"{",
"'serial_key_sql'",
":",
"serial_key_sql",
"}",
")"
] |
66dc2a3a3119263b3fe77300fb636346509787ef
|
test
|
PostgresFileWriter.write_table
|
Write DDL to create the specified `table`.
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
|
mysql2pgsql/lib/postgres_file_writer.py
|
def write_table(self, table):
"""Write DDL to create the specified `table`.
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
table_sql, serial_key_sql = super(PostgresFileWriter, self).write_table(table)
if serial_key_sql:
self.f.write("""
%(serial_key_sql)s
""" % {
'serial_key_sql': '\n'.join(serial_key_sql)
})
self.f.write("""
-- Table: %(table_name)s
%(table_sql)s
""" % {
'table_name': table.name,
'table_sql': '\n'.join(table_sql),
})
|
def write_table(self, table):
"""Write DDL to create the specified `table`.
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
table_sql, serial_key_sql = super(PostgresFileWriter, self).write_table(table)
if serial_key_sql:
self.f.write("""
%(serial_key_sql)s
""" % {
'serial_key_sql': '\n'.join(serial_key_sql)
})
self.f.write("""
-- Table: %(table_name)s
%(table_sql)s
""" % {
'table_name': table.name,
'table_sql': '\n'.join(table_sql),
})
|
[
"Write",
"DDL",
"to",
"create",
"the",
"specified",
"table",
"."
] |
philipsoutham/py-mysql2pgsql
|
python
|
https://github.com/philipsoutham/py-mysql2pgsql/blob/66dc2a3a3119263b3fe77300fb636346509787ef/mysql2pgsql/lib/postgres_file_writer.py#L57-L79
|
[
"def",
"write_table",
"(",
"self",
",",
"table",
")",
":",
"table_sql",
",",
"serial_key_sql",
"=",
"super",
"(",
"PostgresFileWriter",
",",
"self",
")",
".",
"write_table",
"(",
"table",
")",
"if",
"serial_key_sql",
":",
"self",
".",
"f",
".",
"write",
"(",
"\"\"\"\n%(serial_key_sql)s\n\"\"\"",
"%",
"{",
"'serial_key_sql'",
":",
"'\\n'",
".",
"join",
"(",
"serial_key_sql",
")",
"}",
")",
"self",
".",
"f",
".",
"write",
"(",
"\"\"\"\n-- Table: %(table_name)s\n%(table_sql)s\n\"\"\"",
"%",
"{",
"'table_name'",
":",
"table",
".",
"name",
",",
"'table_sql'",
":",
"'\\n'",
".",
"join",
"(",
"table_sql",
")",
",",
"}",
")"
] |
66dc2a3a3119263b3fe77300fb636346509787ef
|
test
|
PostgresFileWriter.write_indexes
|
Write DDL of `table` indexes to the output file
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
|
mysql2pgsql/lib/postgres_file_writer.py
|
def write_indexes(self, table):
"""Write DDL of `table` indexes to the output file
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
self.f.write('\n'.join(super(PostgresFileWriter, self).write_indexes(table)))
|
def write_indexes(self, table):
"""Write DDL of `table` indexes to the output file
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
self.f.write('\n'.join(super(PostgresFileWriter, self).write_indexes(table)))
|
[
"Write",
"DDL",
"of",
"table",
"indexes",
"to",
"the",
"output",
"file"
] |
philipsoutham/py-mysql2pgsql
|
python
|
https://github.com/philipsoutham/py-mysql2pgsql/blob/66dc2a3a3119263b3fe77300fb636346509787ef/mysql2pgsql/lib/postgres_file_writer.py#L82-L90
|
[
"def",
"write_indexes",
"(",
"self",
",",
"table",
")",
":",
"self",
".",
"f",
".",
"write",
"(",
"'\\n'",
".",
"join",
"(",
"super",
"(",
"PostgresFileWriter",
",",
"self",
")",
".",
"write_indexes",
"(",
"table",
")",
")",
")"
] |
66dc2a3a3119263b3fe77300fb636346509787ef
|
test
|
PostgresFileWriter.write_constraints
|
Write DDL of `table` constraints to the output file
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
|
mysql2pgsql/lib/postgres_file_writer.py
|
def write_constraints(self, table):
"""Write DDL of `table` constraints to the output file
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
self.f.write('\n'.join(super(PostgresFileWriter, self).write_constraints(table)))
|
def write_constraints(self, table):
"""Write DDL of `table` constraints to the output file
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
self.f.write('\n'.join(super(PostgresFileWriter, self).write_constraints(table)))
|
[
"Write",
"DDL",
"of",
"table",
"constraints",
"to",
"the",
"output",
"file"
] |
philipsoutham/py-mysql2pgsql
|
python
|
https://github.com/philipsoutham/py-mysql2pgsql/blob/66dc2a3a3119263b3fe77300fb636346509787ef/mysql2pgsql/lib/postgres_file_writer.py#L93-L101
|
[
"def",
"write_constraints",
"(",
"self",
",",
"table",
")",
":",
"self",
".",
"f",
".",
"write",
"(",
"'\\n'",
".",
"join",
"(",
"super",
"(",
"PostgresFileWriter",
",",
"self",
")",
".",
"write_constraints",
"(",
"table",
")",
")",
")"
] |
66dc2a3a3119263b3fe77300fb636346509787ef
|
test
|
PostgresFileWriter.write_triggers
|
Write TRIGGERs existing on `table` to the output file
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
|
mysql2pgsql/lib/postgres_file_writer.py
|
def write_triggers(self, table):
"""Write TRIGGERs existing on `table` to the output file
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
self.f.write('\n'.join(super(PostgresFileWriter, self).write_triggers(table)))
|
def write_triggers(self, table):
"""Write TRIGGERs existing on `table` to the output file
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
self.f.write('\n'.join(super(PostgresFileWriter, self).write_triggers(table)))
|
[
"Write",
"TRIGGERs",
"existing",
"on",
"table",
"to",
"the",
"output",
"file"
] |
philipsoutham/py-mysql2pgsql
|
python
|
https://github.com/philipsoutham/py-mysql2pgsql/blob/66dc2a3a3119263b3fe77300fb636346509787ef/mysql2pgsql/lib/postgres_file_writer.py#L104-L112
|
[
"def",
"write_triggers",
"(",
"self",
",",
"table",
")",
":",
"self",
".",
"f",
".",
"write",
"(",
"'\\n'",
".",
"join",
"(",
"super",
"(",
"PostgresFileWriter",
",",
"self",
")",
".",
"write_triggers",
"(",
"table",
")",
")",
")"
] |
66dc2a3a3119263b3fe77300fb636346509787ef
|
test
|
PostgresFileWriter.write_contents
|
Write the data contents of `table` to the output file.
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
- `reader`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader` object that allows reading from the data source.
Returns None
|
mysql2pgsql/lib/postgres_file_writer.py
|
def write_contents(self, table, reader):
"""Write the data contents of `table` to the output file.
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
- `reader`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader` object that allows reading from the data source.
Returns None
"""
# start variable optimiztions
pr = self.process_row
f_write = self.f.write
verbose = self.verbose
# end variable optimiztions
f_write("""
--
-- Data for Name: %(table_name)s; Type: TABLE DATA;
--
COPY "%(table_name)s" (%(column_names)s) FROM stdin;
""" % {
'table_name': table.name,
'column_names': ', '.join(('"%s"' % col['name']) for col in table.columns)})
if verbose:
tt = time.time
start_time = tt()
prev_val_len = 0
prev_row_count = 0
for i, row in enumerate(reader.read(table), 1):
row = list(row)
pr(table, row)
try:
f_write(u'%s\n' % (u'\t'.join(row)))
except UnicodeDecodeError:
f_write(u'%s\n' % (u'\t'.join(r.decode('utf-8') for r in row)))
if verbose:
if (i % 20000) == 0:
now = tt()
elapsed = now - start_time
val = '%.2f rows/sec [%s] ' % ((i - prev_row_count) / elapsed, i)
print_row_progress('%s%s' % (("\b" * prev_val_len), val))
prev_val_len = len(val) + 3
start_time = now
prev_row_count = i
f_write("\\.\n\n")
if verbose:
print('')
|
def write_contents(self, table, reader):
"""Write the data contents of `table` to the output file.
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
- `reader`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader` object that allows reading from the data source.
Returns None
"""
# start variable optimiztions
pr = self.process_row
f_write = self.f.write
verbose = self.verbose
# end variable optimiztions
f_write("""
--
-- Data for Name: %(table_name)s; Type: TABLE DATA;
--
COPY "%(table_name)s" (%(column_names)s) FROM stdin;
""" % {
'table_name': table.name,
'column_names': ', '.join(('"%s"' % col['name']) for col in table.columns)})
if verbose:
tt = time.time
start_time = tt()
prev_val_len = 0
prev_row_count = 0
for i, row in enumerate(reader.read(table), 1):
row = list(row)
pr(table, row)
try:
f_write(u'%s\n' % (u'\t'.join(row)))
except UnicodeDecodeError:
f_write(u'%s\n' % (u'\t'.join(r.decode('utf-8') for r in row)))
if verbose:
if (i % 20000) == 0:
now = tt()
elapsed = now - start_time
val = '%.2f rows/sec [%s] ' % ((i - prev_row_count) / elapsed, i)
print_row_progress('%s%s' % (("\b" * prev_val_len), val))
prev_val_len = len(val) + 3
start_time = now
prev_row_count = i
f_write("\\.\n\n")
if verbose:
print('')
|
[
"Write",
"the",
"data",
"contents",
"of",
"table",
"to",
"the",
"output",
"file",
"."
] |
philipsoutham/py-mysql2pgsql
|
python
|
https://github.com/philipsoutham/py-mysql2pgsql/blob/66dc2a3a3119263b3fe77300fb636346509787ef/mysql2pgsql/lib/postgres_file_writer.py#L115-L163
|
[
"def",
"write_contents",
"(",
"self",
",",
"table",
",",
"reader",
")",
":",
"# start variable optimiztions",
"pr",
"=",
"self",
".",
"process_row",
"f_write",
"=",
"self",
".",
"f",
".",
"write",
"verbose",
"=",
"self",
".",
"verbose",
"# end variable optimiztions",
"f_write",
"(",
"\"\"\"\n--\n-- Data for Name: %(table_name)s; Type: TABLE DATA;\n--\n\nCOPY \"%(table_name)s\" (%(column_names)s) FROM stdin;\n\"\"\"",
"%",
"{",
"'table_name'",
":",
"table",
".",
"name",
",",
"'column_names'",
":",
"', '",
".",
"join",
"(",
"(",
"'\"%s\"'",
"%",
"col",
"[",
"'name'",
"]",
")",
"for",
"col",
"in",
"table",
".",
"columns",
")",
"}",
")",
"if",
"verbose",
":",
"tt",
"=",
"time",
".",
"time",
"start_time",
"=",
"tt",
"(",
")",
"prev_val_len",
"=",
"0",
"prev_row_count",
"=",
"0",
"for",
"i",
",",
"row",
"in",
"enumerate",
"(",
"reader",
".",
"read",
"(",
"table",
")",
",",
"1",
")",
":",
"row",
"=",
"list",
"(",
"row",
")",
"pr",
"(",
"table",
",",
"row",
")",
"try",
":",
"f_write",
"(",
"u'%s\\n'",
"%",
"(",
"u'\\t'",
".",
"join",
"(",
"row",
")",
")",
")",
"except",
"UnicodeDecodeError",
":",
"f_write",
"(",
"u'%s\\n'",
"%",
"(",
"u'\\t'",
".",
"join",
"(",
"r",
".",
"decode",
"(",
"'utf-8'",
")",
"for",
"r",
"in",
"row",
")",
")",
")",
"if",
"verbose",
":",
"if",
"(",
"i",
"%",
"20000",
")",
"==",
"0",
":",
"now",
"=",
"tt",
"(",
")",
"elapsed",
"=",
"now",
"-",
"start_time",
"val",
"=",
"'%.2f rows/sec [%s] '",
"%",
"(",
"(",
"i",
"-",
"prev_row_count",
")",
"/",
"elapsed",
",",
"i",
")",
"print_row_progress",
"(",
"'%s%s'",
"%",
"(",
"(",
"\"\\b\"",
"*",
"prev_val_len",
")",
",",
"val",
")",
")",
"prev_val_len",
"=",
"len",
"(",
"val",
")",
"+",
"3",
"start_time",
"=",
"now",
"prev_row_count",
"=",
"i",
"f_write",
"(",
"\"\\\\.\\n\\n\"",
")",
"if",
"verbose",
":",
"print",
"(",
"''",
")"
] |
66dc2a3a3119263b3fe77300fb636346509787ef
|
test
|
parse_fntdata
|
info face="Haettenschweiler" size=60 bold=0 italic=0 charset="" unicode=0 stretchH=100 smooth=1 aa=1 padding=0,0,0,0 spacing=2,2
common lineHeight=64 base=53 scaleW=256 scaleH=128 pages=1 packed=0
page id=0 file="attack_num.png"
chars count=12
char id=52 x=2 y=2 width=33 height=51 xoffset=0 yoffset=5 xadvance=32 page=0 chnl=0 letter="4"
char id=48 x=37 y=2 width=29 height=50 xoffset=1 yoffset=6 xadvance=29 page=0 chnl=0 letter="0"
char id=53 x=68 y=2 width=29 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="5"
char id=57 x=99 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="9"
char id=54 x=129 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="6"
char id=56 x=159 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="8"
char id=51 x=189 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="3"
char id=50 x=219 y=2 width=28 height=49 xoffset=1 yoffset=7 xadvance=28 page=0 chnl=0 letter="2"
char id=55 x=2 y=55 width=30 height=48 xoffset=1 yoffset=8 xadvance=28 page=0 chnl=0 letter="7"
char id=49 x=34 y=55 width=20 height=48 xoffset=1 yoffset=8 xadvance=20 page=0 chnl=0 letter="1"
char id=45 x=56 y=55 width=18 height=12 xoffset=1 yoffset=36 xadvance=19 page=0 chnl=0 letter="-"
char id=32 x=76 y=55 width=0 height=0 xoffset=11 yoffset=73 xadvance=16 page=0 chnl=0 letter="space"
|
src/untp/dataparse.py
|
def parse_fntdata(_data, _config, _extra_data_receiver=None):
"""
info face="Haettenschweiler" size=60 bold=0 italic=0 charset="" unicode=0 stretchH=100 smooth=1 aa=1 padding=0,0,0,0 spacing=2,2
common lineHeight=64 base=53 scaleW=256 scaleH=128 pages=1 packed=0
page id=0 file="attack_num.png"
chars count=12
char id=52 x=2 y=2 width=33 height=51 xoffset=0 yoffset=5 xadvance=32 page=0 chnl=0 letter="4"
char id=48 x=37 y=2 width=29 height=50 xoffset=1 yoffset=6 xadvance=29 page=0 chnl=0 letter="0"
char id=53 x=68 y=2 width=29 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="5"
char id=57 x=99 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="9"
char id=54 x=129 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="6"
char id=56 x=159 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="8"
char id=51 x=189 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="3"
char id=50 x=219 y=2 width=28 height=49 xoffset=1 yoffset=7 xadvance=28 page=0 chnl=0 letter="2"
char id=55 x=2 y=55 width=30 height=48 xoffset=1 yoffset=8 xadvance=28 page=0 chnl=0 letter="7"
char id=49 x=34 y=55 width=20 height=48 xoffset=1 yoffset=8 xadvance=20 page=0 chnl=0 letter="1"
char id=45 x=56 y=55 width=18 height=12 xoffset=1 yoffset=36 xadvance=19 page=0 chnl=0 letter="-"
char id=32 x=76 y=55 width=0 height=0 xoffset=11 yoffset=73 xadvance=16 page=0 chnl=0 letter="space"
"""
data = {}
frame_data_list = []
parse_common_info = parse("common lineHeight={line_height:d} base={base:d} scaleW={scale_w:d} scaleH={scale_h:d} pages={pages:d} packed={packed:d}", _data[1])
parse_page_info = parse("page id={id:d} file=\"{file}\"", _data[2])
parse_char_count = parse("chars count={count:d}", _data[3])
raw_frames_data = {}
for index in xrange(0, parse_char_count["count"]):
parse_frame = parse("char id={id:d} x={x:d} y={y:d} width={width:d} height={height:d} xoffset={xoffset:d} yoffset={yoffset:d} xadvance={xadvance:d} page={page:d} chnl={chnl:d} letter=\"{letter}\"", _data[index + 4])
frame_data = {}
frame_data["name"] = "{prefix}_{id}.png".format(prefix= _config["prefix"], id=parse_frame["id"], letter=parse_frame["letter"])
frame_data["source_size"] = (parse_frame["width"], parse_frame["height"])
frame_data["rotated"] = False
frame_data["src_rect"] = (parse_frame["x"], parse_frame["y"], parse_frame["x"] + parse_frame["width"], parse_frame["y"] + parse_frame["height"])
frame_data["offset"] = (0, 0)
if parse_frame["width"] <= 0 or parse_frame["height"] <= 0:
continue
frame_data_list.append(frame_data)
parse_frame_named_data = parse_frame.named.copy()
parse_frame_named_data["texture"] = frame_data["name"]
raw_frames_data[parse_frame["id"]] = parse_frame_named_data
data["texture"] = parse_page_info["file"]
data["frames"] = frame_data_list
if _extra_data_receiver != None:
_extra_data_receiver["common"] = parse_common_info.named
_extra_data_receiver["frames"] = raw_frames_data
return data
|
def parse_fntdata(_data, _config, _extra_data_receiver=None):
"""
info face="Haettenschweiler" size=60 bold=0 italic=0 charset="" unicode=0 stretchH=100 smooth=1 aa=1 padding=0,0,0,0 spacing=2,2
common lineHeight=64 base=53 scaleW=256 scaleH=128 pages=1 packed=0
page id=0 file="attack_num.png"
chars count=12
char id=52 x=2 y=2 width=33 height=51 xoffset=0 yoffset=5 xadvance=32 page=0 chnl=0 letter="4"
char id=48 x=37 y=2 width=29 height=50 xoffset=1 yoffset=6 xadvance=29 page=0 chnl=0 letter="0"
char id=53 x=68 y=2 width=29 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="5"
char id=57 x=99 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="9"
char id=54 x=129 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="6"
char id=56 x=159 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="8"
char id=51 x=189 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="3"
char id=50 x=219 y=2 width=28 height=49 xoffset=1 yoffset=7 xadvance=28 page=0 chnl=0 letter="2"
char id=55 x=2 y=55 width=30 height=48 xoffset=1 yoffset=8 xadvance=28 page=0 chnl=0 letter="7"
char id=49 x=34 y=55 width=20 height=48 xoffset=1 yoffset=8 xadvance=20 page=0 chnl=0 letter="1"
char id=45 x=56 y=55 width=18 height=12 xoffset=1 yoffset=36 xadvance=19 page=0 chnl=0 letter="-"
char id=32 x=76 y=55 width=0 height=0 xoffset=11 yoffset=73 xadvance=16 page=0 chnl=0 letter="space"
"""
data = {}
frame_data_list = []
parse_common_info = parse("common lineHeight={line_height:d} base={base:d} scaleW={scale_w:d} scaleH={scale_h:d} pages={pages:d} packed={packed:d}", _data[1])
parse_page_info = parse("page id={id:d} file=\"{file}\"", _data[2])
parse_char_count = parse("chars count={count:d}", _data[3])
raw_frames_data = {}
for index in xrange(0, parse_char_count["count"]):
parse_frame = parse("char id={id:d} x={x:d} y={y:d} width={width:d} height={height:d} xoffset={xoffset:d} yoffset={yoffset:d} xadvance={xadvance:d} page={page:d} chnl={chnl:d} letter=\"{letter}\"", _data[index + 4])
frame_data = {}
frame_data["name"] = "{prefix}_{id}.png".format(prefix= _config["prefix"], id=parse_frame["id"], letter=parse_frame["letter"])
frame_data["source_size"] = (parse_frame["width"], parse_frame["height"])
frame_data["rotated"] = False
frame_data["src_rect"] = (parse_frame["x"], parse_frame["y"], parse_frame["x"] + parse_frame["width"], parse_frame["y"] + parse_frame["height"])
frame_data["offset"] = (0, 0)
if parse_frame["width"] <= 0 or parse_frame["height"] <= 0:
continue
frame_data_list.append(frame_data)
parse_frame_named_data = parse_frame.named.copy()
parse_frame_named_data["texture"] = frame_data["name"]
raw_frames_data[parse_frame["id"]] = parse_frame_named_data
data["texture"] = parse_page_info["file"]
data["frames"] = frame_data_list
if _extra_data_receiver != None:
_extra_data_receiver["common"] = parse_common_info.named
_extra_data_receiver["frames"] = raw_frames_data
return data
|
[
"info",
"face",
"=",
"Haettenschweiler",
"size",
"=",
"60",
"bold",
"=",
"0",
"italic",
"=",
"0",
"charset",
"=",
"unicode",
"=",
"0",
"stretchH",
"=",
"100",
"smooth",
"=",
"1",
"aa",
"=",
"1",
"padding",
"=",
"0",
"0",
"0",
"0",
"spacing",
"=",
"2",
"2",
"common",
"lineHeight",
"=",
"64",
"base",
"=",
"53",
"scaleW",
"=",
"256",
"scaleH",
"=",
"128",
"pages",
"=",
"1",
"packed",
"=",
"0",
"page",
"id",
"=",
"0",
"file",
"=",
"attack_num",
".",
"png",
"chars",
"count",
"=",
"12",
"char",
"id",
"=",
"52",
"x",
"=",
"2",
"y",
"=",
"2",
"width",
"=",
"33",
"height",
"=",
"51",
"xoffset",
"=",
"0",
"yoffset",
"=",
"5",
"xadvance",
"=",
"32",
"page",
"=",
"0",
"chnl",
"=",
"0",
"letter",
"=",
"4",
"char",
"id",
"=",
"48",
"x",
"=",
"37",
"y",
"=",
"2",
"width",
"=",
"29",
"height",
"=",
"50",
"xoffset",
"=",
"1",
"yoffset",
"=",
"6",
"xadvance",
"=",
"29",
"page",
"=",
"0",
"chnl",
"=",
"0",
"letter",
"=",
"0",
"char",
"id",
"=",
"53",
"x",
"=",
"68",
"y",
"=",
"2",
"width",
"=",
"29",
"height",
"=",
"50",
"xoffset",
"=",
"1",
"yoffset",
"=",
"6",
"xadvance",
"=",
"28",
"page",
"=",
"0",
"chnl",
"=",
"0",
"letter",
"=",
"5",
"char",
"id",
"=",
"57",
"x",
"=",
"99",
"y",
"=",
"2",
"width",
"=",
"28",
"height",
"=",
"50",
"xoffset",
"=",
"1",
"yoffset",
"=",
"6",
"xadvance",
"=",
"28",
"page",
"=",
"0",
"chnl",
"=",
"0",
"letter",
"=",
"9",
"char",
"id",
"=",
"54",
"x",
"=",
"129",
"y",
"=",
"2",
"width",
"=",
"28",
"height",
"=",
"50",
"xoffset",
"=",
"1",
"yoffset",
"=",
"6",
"xadvance",
"=",
"28",
"page",
"=",
"0",
"chnl",
"=",
"0",
"letter",
"=",
"6",
"char",
"id",
"=",
"56",
"x",
"=",
"159",
"y",
"=",
"2",
"width",
"=",
"28",
"height",
"=",
"50",
"xoffset",
"=",
"1",
"yoffset",
"=",
"6",
"xadvance",
"=",
"28",
"page",
"=",
"0",
"chnl",
"=",
"0",
"letter",
"=",
"8",
"char",
"id",
"=",
"51",
"x",
"=",
"189",
"y",
"=",
"2",
"width",
"=",
"28",
"height",
"=",
"50",
"xoffset",
"=",
"1",
"yoffset",
"=",
"6",
"xadvance",
"=",
"28",
"page",
"=",
"0",
"chnl",
"=",
"0",
"letter",
"=",
"3",
"char",
"id",
"=",
"50",
"x",
"=",
"219",
"y",
"=",
"2",
"width",
"=",
"28",
"height",
"=",
"49",
"xoffset",
"=",
"1",
"yoffset",
"=",
"7",
"xadvance",
"=",
"28",
"page",
"=",
"0",
"chnl",
"=",
"0",
"letter",
"=",
"2",
"char",
"id",
"=",
"55",
"x",
"=",
"2",
"y",
"=",
"55",
"width",
"=",
"30",
"height",
"=",
"48",
"xoffset",
"=",
"1",
"yoffset",
"=",
"8",
"xadvance",
"=",
"28",
"page",
"=",
"0",
"chnl",
"=",
"0",
"letter",
"=",
"7",
"char",
"id",
"=",
"49",
"x",
"=",
"34",
"y",
"=",
"55",
"width",
"=",
"20",
"height",
"=",
"48",
"xoffset",
"=",
"1",
"yoffset",
"=",
"8",
"xadvance",
"=",
"20",
"page",
"=",
"0",
"chnl",
"=",
"0",
"letter",
"=",
"1",
"char",
"id",
"=",
"45",
"x",
"=",
"56",
"y",
"=",
"55",
"width",
"=",
"18",
"height",
"=",
"12",
"xoffset",
"=",
"1",
"yoffset",
"=",
"36",
"xadvance",
"=",
"19",
"page",
"=",
"0",
"chnl",
"=",
"0",
"letter",
"=",
"-",
"char",
"id",
"=",
"32",
"x",
"=",
"76",
"y",
"=",
"55",
"width",
"=",
"0",
"height",
"=",
"0",
"xoffset",
"=",
"11",
"yoffset",
"=",
"73",
"xadvance",
"=",
"16",
"page",
"=",
"0",
"chnl",
"=",
"0",
"letter",
"=",
"space"
] |
justbilt/untp
|
python
|
https://github.com/justbilt/untp/blob/98ec6199e62e7a97ad74b5cf5a1ea7be9b4c9d01/src/untp/dataparse.py#L32-L85
|
[
"def",
"parse_fntdata",
"(",
"_data",
",",
"_config",
",",
"_extra_data_receiver",
"=",
"None",
")",
":",
"data",
"=",
"{",
"}",
"frame_data_list",
"=",
"[",
"]",
"parse_common_info",
"=",
"parse",
"(",
"\"common lineHeight={line_height:d} base={base:d} scaleW={scale_w:d} scaleH={scale_h:d} pages={pages:d} packed={packed:d}\"",
",",
"_data",
"[",
"1",
"]",
")",
"parse_page_info",
"=",
"parse",
"(",
"\"page id={id:d} file=\\\"{file}\\\"\"",
",",
"_data",
"[",
"2",
"]",
")",
"parse_char_count",
"=",
"parse",
"(",
"\"chars count={count:d}\"",
",",
"_data",
"[",
"3",
"]",
")",
"raw_frames_data",
"=",
"{",
"}",
"for",
"index",
"in",
"xrange",
"(",
"0",
",",
"parse_char_count",
"[",
"\"count\"",
"]",
")",
":",
"parse_frame",
"=",
"parse",
"(",
"\"char id={id:d} x={x:d} y={y:d} width={width:d} height={height:d} xoffset={xoffset:d} yoffset={yoffset:d} xadvance={xadvance:d} page={page:d} chnl={chnl:d} letter=\\\"{letter}\\\"\"",
",",
"_data",
"[",
"index",
"+",
"4",
"]",
")",
"frame_data",
"=",
"{",
"}",
"frame_data",
"[",
"\"name\"",
"]",
"=",
"\"{prefix}_{id}.png\"",
".",
"format",
"(",
"prefix",
"=",
"_config",
"[",
"\"prefix\"",
"]",
",",
"id",
"=",
"parse_frame",
"[",
"\"id\"",
"]",
",",
"letter",
"=",
"parse_frame",
"[",
"\"letter\"",
"]",
")",
"frame_data",
"[",
"\"source_size\"",
"]",
"=",
"(",
"parse_frame",
"[",
"\"width\"",
"]",
",",
"parse_frame",
"[",
"\"height\"",
"]",
")",
"frame_data",
"[",
"\"rotated\"",
"]",
"=",
"False",
"frame_data",
"[",
"\"src_rect\"",
"]",
"=",
"(",
"parse_frame",
"[",
"\"x\"",
"]",
",",
"parse_frame",
"[",
"\"y\"",
"]",
",",
"parse_frame",
"[",
"\"x\"",
"]",
"+",
"parse_frame",
"[",
"\"width\"",
"]",
",",
"parse_frame",
"[",
"\"y\"",
"]",
"+",
"parse_frame",
"[",
"\"height\"",
"]",
")",
"frame_data",
"[",
"\"offset\"",
"]",
"=",
"(",
"0",
",",
"0",
")",
"if",
"parse_frame",
"[",
"\"width\"",
"]",
"<=",
"0",
"or",
"parse_frame",
"[",
"\"height\"",
"]",
"<=",
"0",
":",
"continue",
"frame_data_list",
".",
"append",
"(",
"frame_data",
")",
"parse_frame_named_data",
"=",
"parse_frame",
".",
"named",
".",
"copy",
"(",
")",
"parse_frame_named_data",
"[",
"\"texture\"",
"]",
"=",
"frame_data",
"[",
"\"name\"",
"]",
"raw_frames_data",
"[",
"parse_frame",
"[",
"\"id\"",
"]",
"]",
"=",
"parse_frame_named_data",
"data",
"[",
"\"texture\"",
"]",
"=",
"parse_page_info",
"[",
"\"file\"",
"]",
"data",
"[",
"\"frames\"",
"]",
"=",
"frame_data_list",
"if",
"_extra_data_receiver",
"!=",
"None",
":",
"_extra_data_receiver",
"[",
"\"common\"",
"]",
"=",
"parse_common_info",
".",
"named",
"_extra_data_receiver",
"[",
"\"frames\"",
"]",
"=",
"raw_frames_data",
"return",
"data"
] |
98ec6199e62e7a97ad74b5cf5a1ea7be9b4c9d01
|
test
|
_pvr_head
|
struct CCZHeader {
unsigned char sig[4]; // signature. Should be 'CCZ!' 4 bytes
unsigned short compression_type; // should 0
unsigned short version; // should be 2 (although version type==1 is also supported)
unsigned int reserved; // Reserved for users.
unsigned int len; // size of the uncompressed file
};
|
src/untp/pvr.py
|
def _pvr_head(_data):
"""
struct CCZHeader {
unsigned char sig[4]; // signature. Should be 'CCZ!' 4 bytes
unsigned short compression_type; // should 0
unsigned short version; // should be 2 (although version type==1 is also supported)
unsigned int reserved; // Reserved for users.
unsigned int len; // size of the uncompressed file
};
"""
return {
"sig": _data[:4],
"compression_type": struct.unpack("H", _data[4:6])[0],
"version": struct.unpack("H", _data[6:8])[0],
"reserved": struct.unpack("I", _data[8:12])[0],
"len": struct.unpack("I", _data[12:16])[0],
}
|
def _pvr_head(_data):
"""
struct CCZHeader {
unsigned char sig[4]; // signature. Should be 'CCZ!' 4 bytes
unsigned short compression_type; // should 0
unsigned short version; // should be 2 (although version type==1 is also supported)
unsigned int reserved; // Reserved for users.
unsigned int len; // size of the uncompressed file
};
"""
return {
"sig": _data[:4],
"compression_type": struct.unpack("H", _data[4:6])[0],
"version": struct.unpack("H", _data[6:8])[0],
"reserved": struct.unpack("I", _data[8:12])[0],
"len": struct.unpack("I", _data[12:16])[0],
}
|
[
"struct",
"CCZHeader",
"{",
"unsigned",
"char",
"sig",
"[",
"4",
"]",
";",
"//",
"signature",
".",
"Should",
"be",
"CCZ!",
"4",
"bytes",
"unsigned",
"short",
"compression_type",
";",
"//",
"should",
"0",
"unsigned",
"short",
"version",
";",
"//",
"should",
"be",
"2",
"(",
"although",
"version",
"type",
"==",
"1",
"is",
"also",
"supported",
")",
"unsigned",
"int",
"reserved",
";",
"//",
"Reserved",
"for",
"users",
".",
"unsigned",
"int",
"len",
";",
"//",
"size",
"of",
"the",
"uncompressed",
"file",
"}",
";"
] |
justbilt/untp
|
python
|
https://github.com/justbilt/untp/blob/98ec6199e62e7a97ad74b5cf5a1ea7be9b4c9d01/src/untp/pvr.py#L60-L76
|
[
"def",
"_pvr_head",
"(",
"_data",
")",
":",
"return",
"{",
"\"sig\"",
":",
"_data",
"[",
":",
"4",
"]",
",",
"\"compression_type\"",
":",
"struct",
".",
"unpack",
"(",
"\"H\"",
",",
"_data",
"[",
"4",
":",
"6",
"]",
")",
"[",
"0",
"]",
",",
"\"version\"",
":",
"struct",
".",
"unpack",
"(",
"\"H\"",
",",
"_data",
"[",
"6",
":",
"8",
"]",
")",
"[",
"0",
"]",
",",
"\"reserved\"",
":",
"struct",
".",
"unpack",
"(",
"\"I\"",
",",
"_data",
"[",
"8",
":",
"12",
"]",
")",
"[",
"0",
"]",
",",
"\"len\"",
":",
"struct",
".",
"unpack",
"(",
"\"I\"",
",",
"_data",
"[",
"12",
":",
"16",
"]",
")",
"[",
"0",
"]",
",",
"}"
] |
98ec6199e62e7a97ad74b5cf5a1ea7be9b4c9d01
|
test
|
SQLStepQueue.qsize
|
Return an approximate number of queued tasks in the queue.
|
memsql/common/sql_step_queue/queue.py
|
def qsize(self, extra_predicate=None):
""" Return an approximate number of queued tasks in the queue. """
count = self._query_queued('COUNT(*) AS count', extra_predicate=extra_predicate)
return count[0].count
|
def qsize(self, extra_predicate=None):
""" Return an approximate number of queued tasks in the queue. """
count = self._query_queued('COUNT(*) AS count', extra_predicate=extra_predicate)
return count[0].count
|
[
"Return",
"an",
"approximate",
"number",
"of",
"queued",
"tasks",
"in",
"the",
"queue",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_step_queue/queue.py#L50-L53
|
[
"def",
"qsize",
"(",
"self",
",",
"extra_predicate",
"=",
"None",
")",
":",
"count",
"=",
"self",
".",
"_query_queued",
"(",
"'COUNT(*) AS count'",
",",
"extra_predicate",
"=",
"extra_predicate",
")",
"return",
"count",
"[",
"0",
"]",
".",
"count"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
SQLStepQueue.enqueue
|
Enqueue task with specified data.
|
memsql/common/sql_step_queue/queue.py
|
def enqueue(self, data):
""" Enqueue task with specified data. """
jsonified_data = json.dumps(data)
with self._db_conn() as conn:
return conn.execute(
'INSERT INTO %s (created, data) VALUES (%%(created)s, %%(data)s)' % self.table_name,
created=datetime.utcnow(),
data=jsonified_data
)
|
def enqueue(self, data):
""" Enqueue task with specified data. """
jsonified_data = json.dumps(data)
with self._db_conn() as conn:
return conn.execute(
'INSERT INTO %s (created, data) VALUES (%%(created)s, %%(data)s)' % self.table_name,
created=datetime.utcnow(),
data=jsonified_data
)
|
[
"Enqueue",
"task",
"with",
"specified",
"data",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_step_queue/queue.py#L55-L63
|
[
"def",
"enqueue",
"(",
"self",
",",
"data",
")",
":",
"jsonified_data",
"=",
"json",
".",
"dumps",
"(",
"data",
")",
"with",
"self",
".",
"_db_conn",
"(",
")",
"as",
"conn",
":",
"return",
"conn",
".",
"execute",
"(",
"'INSERT INTO %s (created, data) VALUES (%%(created)s, %%(data)s)'",
"%",
"self",
".",
"table_name",
",",
"created",
"=",
"datetime",
".",
"utcnow",
"(",
")",
",",
"data",
"=",
"jsonified_data",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
SQLStepQueue.start
|
Retrieve a task handler from the queue.
If block is True, this function will block until it is able to retrieve a task.
If block is True and timeout is a number it will block for at most <timeout> seconds.
retry_interval is the maximum time in seconds between successive retries.
extra_predicate
If extra_predicate is defined, it should be a tuple of (raw_predicate, predicate_args)
raw_predicate will be prefixed by AND, and inserted into the WHERE condition in the queries.
predicate_args will be sql escaped and formatted into raw_predicate.
|
memsql/common/sql_step_queue/queue.py
|
def start(self, block=False, timeout=None, retry_interval=0.5, extra_predicate=None):
"""
Retrieve a task handler from the queue.
If block is True, this function will block until it is able to retrieve a task.
If block is True and timeout is a number it will block for at most <timeout> seconds.
retry_interval is the maximum time in seconds between successive retries.
extra_predicate
If extra_predicate is defined, it should be a tuple of (raw_predicate, predicate_args)
raw_predicate will be prefixed by AND, and inserted into the WHERE condition in the queries.
predicate_args will be sql escaped and formatted into raw_predicate.
"""
start = time.time()
while 1:
task_handler = self._dequeue_task(extra_predicate)
if task_handler is None and block:
if timeout is not None and (time.time() - start) > timeout:
break
time.sleep(retry_interval * (random.random() + 0.1))
else:
break
return task_handler
|
def start(self, block=False, timeout=None, retry_interval=0.5, extra_predicate=None):
"""
Retrieve a task handler from the queue.
If block is True, this function will block until it is able to retrieve a task.
If block is True and timeout is a number it will block for at most <timeout> seconds.
retry_interval is the maximum time in seconds between successive retries.
extra_predicate
If extra_predicate is defined, it should be a tuple of (raw_predicate, predicate_args)
raw_predicate will be prefixed by AND, and inserted into the WHERE condition in the queries.
predicate_args will be sql escaped and formatted into raw_predicate.
"""
start = time.time()
while 1:
task_handler = self._dequeue_task(extra_predicate)
if task_handler is None and block:
if timeout is not None and (time.time() - start) > timeout:
break
time.sleep(retry_interval * (random.random() + 0.1))
else:
break
return task_handler
|
[
"Retrieve",
"a",
"task",
"handler",
"from",
"the",
"queue",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_step_queue/queue.py#L65-L87
|
[
"def",
"start",
"(",
"self",
",",
"block",
"=",
"False",
",",
"timeout",
"=",
"None",
",",
"retry_interval",
"=",
"0.5",
",",
"extra_predicate",
"=",
"None",
")",
":",
"start",
"=",
"time",
".",
"time",
"(",
")",
"while",
"1",
":",
"task_handler",
"=",
"self",
".",
"_dequeue_task",
"(",
"extra_predicate",
")",
"if",
"task_handler",
"is",
"None",
"and",
"block",
":",
"if",
"timeout",
"is",
"not",
"None",
"and",
"(",
"time",
".",
"time",
"(",
")",
"-",
"start",
")",
">",
"timeout",
":",
"break",
"time",
".",
"sleep",
"(",
"retry_interval",
"*",
"(",
"random",
".",
"random",
"(",
")",
"+",
"0.1",
")",
")",
"else",
":",
"break",
"return",
"task_handler"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
SQLStepQueue._build_extra_predicate
|
This method is a good one to extend if you want to create a queue which always applies an extra predicate.
|
memsql/common/sql_step_queue/queue.py
|
def _build_extra_predicate(self, extra_predicate):
""" This method is a good one to extend if you want to create a queue which always applies an extra predicate. """
if extra_predicate is None:
return ''
# if they don't have a supported format seq, wrap it for them
if not isinstance(extra_predicate[1], (list, dict, tuple)):
extra_predicate = [extra_predicate[0], (extra_predicate[1], )]
extra_predicate = database.escape_query(*extra_predicate)
return 'AND (' + extra_predicate + ')'
|
def _build_extra_predicate(self, extra_predicate):
""" This method is a good one to extend if you want to create a queue which always applies an extra predicate. """
if extra_predicate is None:
return ''
# if they don't have a supported format seq, wrap it for them
if not isinstance(extra_predicate[1], (list, dict, tuple)):
extra_predicate = [extra_predicate[0], (extra_predicate[1], )]
extra_predicate = database.escape_query(*extra_predicate)
return 'AND (' + extra_predicate + ')'
|
[
"This",
"method",
"is",
"a",
"good",
"one",
"to",
"extend",
"if",
"you",
"want",
"to",
"create",
"a",
"queue",
"which",
"always",
"applies",
"an",
"extra",
"predicate",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_step_queue/queue.py#L186-L197
|
[
"def",
"_build_extra_predicate",
"(",
"self",
",",
"extra_predicate",
")",
":",
"if",
"extra_predicate",
"is",
"None",
":",
"return",
"''",
"# if they don't have a supported format seq, wrap it for them",
"if",
"not",
"isinstance",
"(",
"extra_predicate",
"[",
"1",
"]",
",",
"(",
"list",
",",
"dict",
",",
"tuple",
")",
")",
":",
"extra_predicate",
"=",
"[",
"extra_predicate",
"[",
"0",
"]",
",",
"(",
"extra_predicate",
"[",
"1",
"]",
",",
")",
"]",
"extra_predicate",
"=",
"database",
".",
"escape_query",
"(",
"*",
"extra_predicate",
")",
"return",
"'AND ('",
"+",
"extra_predicate",
"+",
"')'"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
simplejson_datetime_serializer
|
Designed to be passed as the default kwarg in simplejson.dumps. Serializes dates and datetimes to ISO strings.
|
memsql/common/json.py
|
def simplejson_datetime_serializer(obj):
"""
Designed to be passed as the default kwarg in simplejson.dumps. Serializes dates and datetimes to ISO strings.
"""
if hasattr(obj, 'isoformat'):
return obj.isoformat()
else:
raise TypeError('Object of type %s with value of %s is not JSON serializable' % (type(obj), repr(obj)))
|
def simplejson_datetime_serializer(obj):
"""
Designed to be passed as the default kwarg in simplejson.dumps. Serializes dates and datetimes to ISO strings.
"""
if hasattr(obj, 'isoformat'):
return obj.isoformat()
else:
raise TypeError('Object of type %s with value of %s is not JSON serializable' % (type(obj), repr(obj)))
|
[
"Designed",
"to",
"be",
"passed",
"as",
"the",
"default",
"kwarg",
"in",
"simplejson",
".",
"dumps",
".",
"Serializes",
"dates",
"and",
"datetimes",
"to",
"ISO",
"strings",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/json.py#L3-L10
|
[
"def",
"simplejson_datetime_serializer",
"(",
"obj",
")",
":",
"if",
"hasattr",
"(",
"obj",
",",
"'isoformat'",
")",
":",
"return",
"obj",
".",
"isoformat",
"(",
")",
"else",
":",
"raise",
"TypeError",
"(",
"'Object of type %s with value of %s is not JSON serializable'",
"%",
"(",
"type",
"(",
"obj",
")",
",",
"repr",
"(",
"obj",
")",
")",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
Connection.reconnect
|
Closes the existing database connection and re-opens it.
|
memsql/common/database.py
|
def reconnect(self):
"""Closes the existing database connection and re-opens it."""
conn = _mysql.connect(**self._db_args)
if conn is not None:
self.close()
self._db = conn
|
def reconnect(self):
"""Closes the existing database connection and re-opens it."""
conn = _mysql.connect(**self._db_args)
if conn is not None:
self.close()
self._db = conn
|
[
"Closes",
"the",
"existing",
"database",
"connection",
"and",
"re",
"-",
"opens",
"it",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/database.py#L95-L100
|
[
"def",
"reconnect",
"(",
"self",
")",
":",
"conn",
"=",
"_mysql",
".",
"connect",
"(",
"*",
"*",
"self",
".",
"_db_args",
")",
"if",
"conn",
"is",
"not",
"None",
":",
"self",
".",
"close",
"(",
")",
"self",
".",
"_db",
"=",
"conn"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
Connection.query
|
Query the connection and return the rows (or affected rows if not a
select query). Mysql errors will be propogated as exceptions.
|
memsql/common/database.py
|
def query(self, query, *parameters, **kwparameters):
"""
Query the connection and return the rows (or affected rows if not a
select query). Mysql errors will be propogated as exceptions.
"""
return self._query(query, parameters, kwparameters)
|
def query(self, query, *parameters, **kwparameters):
"""
Query the connection and return the rows (or affected rows if not a
select query). Mysql errors will be propogated as exceptions.
"""
return self._query(query, parameters, kwparameters)
|
[
"Query",
"the",
"connection",
"and",
"return",
"the",
"rows",
"(",
"or",
"affected",
"rows",
"if",
"not",
"a",
"select",
"query",
")",
".",
"Mysql",
"errors",
"will",
"be",
"propogated",
"as",
"exceptions",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/database.py#L117-L122
|
[
"def",
"query",
"(",
"self",
",",
"query",
",",
"*",
"parameters",
",",
"*",
"*",
"kwparameters",
")",
":",
"return",
"self",
".",
"_query",
"(",
"query",
",",
"parameters",
",",
"kwparameters",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
Connection.get
|
Returns the first row returned for the given query.
|
memsql/common/database.py
|
def get(self, query, *parameters, **kwparameters):
"""Returns the first row returned for the given query."""
rows = self._query(query, parameters, kwparameters)
if not rows:
return None
elif not isinstance(rows, list):
raise MySQLError("Query is not a select query")
elif len(rows) > 1:
raise MySQLError("Multiple rows returned for Database.get() query")
else:
return rows[0]
|
def get(self, query, *parameters, **kwparameters):
"""Returns the first row returned for the given query."""
rows = self._query(query, parameters, kwparameters)
if not rows:
return None
elif not isinstance(rows, list):
raise MySQLError("Query is not a select query")
elif len(rows) > 1:
raise MySQLError("Multiple rows returned for Database.get() query")
else:
return rows[0]
|
[
"Returns",
"the",
"first",
"row",
"returned",
"for",
"the",
"given",
"query",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/database.py#L124-L134
|
[
"def",
"get",
"(",
"self",
",",
"query",
",",
"*",
"parameters",
",",
"*",
"*",
"kwparameters",
")",
":",
"rows",
"=",
"self",
".",
"_query",
"(",
"query",
",",
"parameters",
",",
"kwparameters",
")",
"if",
"not",
"rows",
":",
"return",
"None",
"elif",
"not",
"isinstance",
"(",
"rows",
",",
"list",
")",
":",
"raise",
"MySQLError",
"(",
"\"Query is not a select query\"",
")",
"elif",
"len",
"(",
"rows",
")",
">",
"1",
":",
"raise",
"MySQLError",
"(",
"\"Multiple rows returned for Database.get() query\"",
")",
"else",
":",
"return",
"rows",
"[",
"0",
"]"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
Connection.execute
|
Executes the given query, returning the lastrowid from the query.
|
memsql/common/database.py
|
def execute(self, query, *parameters, **kwparameters):
"""Executes the given query, returning the lastrowid from the query."""
return self.execute_lastrowid(query, *parameters, **kwparameters)
|
def execute(self, query, *parameters, **kwparameters):
"""Executes the given query, returning the lastrowid from the query."""
return self.execute_lastrowid(query, *parameters, **kwparameters)
|
[
"Executes",
"the",
"given",
"query",
"returning",
"the",
"lastrowid",
"from",
"the",
"query",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/database.py#L138-L140
|
[
"def",
"execute",
"(",
"self",
",",
"query",
",",
"*",
"parameters",
",",
"*",
"*",
"kwparameters",
")",
":",
"return",
"self",
".",
"execute_lastrowid",
"(",
"query",
",",
"*",
"parameters",
",",
"*",
"*",
"kwparameters",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
Connection.execute_lastrowid
|
Executes the given query, returning the lastrowid from the query.
|
memsql/common/database.py
|
def execute_lastrowid(self, query, *parameters, **kwparameters):
"""Executes the given query, returning the lastrowid from the query."""
self._execute(query, parameters, kwparameters)
self._result = self._db.store_result()
return self._db.insert_id()
|
def execute_lastrowid(self, query, *parameters, **kwparameters):
"""Executes the given query, returning the lastrowid from the query."""
self._execute(query, parameters, kwparameters)
self._result = self._db.store_result()
return self._db.insert_id()
|
[
"Executes",
"the",
"given",
"query",
"returning",
"the",
"lastrowid",
"from",
"the",
"query",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/database.py#L142-L146
|
[
"def",
"execute_lastrowid",
"(",
"self",
",",
"query",
",",
"*",
"parameters",
",",
"*",
"*",
"kwparameters",
")",
":",
"self",
".",
"_execute",
"(",
"query",
",",
"parameters",
",",
"kwparameters",
")",
"self",
".",
"_result",
"=",
"self",
".",
"_db",
".",
"store_result",
"(",
")",
"return",
"self",
".",
"_db",
".",
"insert_id",
"(",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
get_connection
|
Returns a new connection to the database.
|
examples/multi_threaded_inserts.py
|
def get_connection(db=DATABASE):
""" Returns a new connection to the database. """
return database.connect(host=HOST, port=PORT, user=USER, password=PASSWORD, database=db)
|
def get_connection(db=DATABASE):
""" Returns a new connection to the database. """
return database.connect(host=HOST, port=PORT, user=USER, password=PASSWORD, database=db)
|
[
"Returns",
"a",
"new",
"connection",
"to",
"the",
"database",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/examples/multi_threaded_inserts.py#L30-L32
|
[
"def",
"get_connection",
"(",
"db",
"=",
"DATABASE",
")",
":",
"return",
"database",
".",
"connect",
"(",
"host",
"=",
"HOST",
",",
"port",
"=",
"PORT",
",",
"user",
"=",
"USER",
",",
"password",
"=",
"PASSWORD",
",",
"database",
"=",
"db",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
run_benchmark
|
Run a set of InsertWorkers and record their performance.
|
examples/multi_threaded_inserts.py
|
def run_benchmark():
""" Run a set of InsertWorkers and record their performance. """
stopping = threading.Event()
workers = [ InsertWorker(stopping) for _ in range(NUM_WORKERS) ]
print('Launching %d workers' % NUM_WORKERS)
[ worker.start() for worker in workers ]
time.sleep(WORKLOAD_TIME)
print('Stopping workload')
stopping.set()
[ worker.join() for worker in workers ]
with get_connection() as conn:
count = conn.get("SELECT COUNT(*) AS count FROM %s" % TABLE).count
print("%d rows inserted using %d workers" % (count, NUM_WORKERS))
print("%.1f rows per second" % (count / float(WORKLOAD_TIME)))
|
def run_benchmark():
""" Run a set of InsertWorkers and record their performance. """
stopping = threading.Event()
workers = [ InsertWorker(stopping) for _ in range(NUM_WORKERS) ]
print('Launching %d workers' % NUM_WORKERS)
[ worker.start() for worker in workers ]
time.sleep(WORKLOAD_TIME)
print('Stopping workload')
stopping.set()
[ worker.join() for worker in workers ]
with get_connection() as conn:
count = conn.get("SELECT COUNT(*) AS count FROM %s" % TABLE).count
print("%d rows inserted using %d workers" % (count, NUM_WORKERS))
print("%.1f rows per second" % (count / float(WORKLOAD_TIME)))
|
[
"Run",
"a",
"set",
"of",
"InsertWorkers",
"and",
"record",
"their",
"performance",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/examples/multi_threaded_inserts.py#L64-L84
|
[
"def",
"run_benchmark",
"(",
")",
":",
"stopping",
"=",
"threading",
".",
"Event",
"(",
")",
"workers",
"=",
"[",
"InsertWorker",
"(",
"stopping",
")",
"for",
"_",
"in",
"range",
"(",
"NUM_WORKERS",
")",
"]",
"print",
"(",
"'Launching %d workers'",
"%",
"NUM_WORKERS",
")",
"[",
"worker",
".",
"start",
"(",
")",
"for",
"worker",
"in",
"workers",
"]",
"time",
".",
"sleep",
"(",
"WORKLOAD_TIME",
")",
"print",
"(",
"'Stopping workload'",
")",
"stopping",
".",
"set",
"(",
")",
"[",
"worker",
".",
"join",
"(",
")",
"for",
"worker",
"in",
"workers",
"]",
"with",
"get_connection",
"(",
")",
"as",
"conn",
":",
"count",
"=",
"conn",
".",
"get",
"(",
"\"SELECT COUNT(*) AS count FROM %s\"",
"%",
"TABLE",
")",
".",
"count",
"print",
"(",
"\"%d rows inserted using %d workers\"",
"%",
"(",
"count",
",",
"NUM_WORKERS",
")",
")",
"print",
"(",
"\"%.1f rows per second\"",
"%",
"(",
"count",
"/",
"float",
"(",
"WORKLOAD_TIME",
")",
")",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
RandomAggregatorPool._pool_connect
|
`agg` should be (host, port)
Returns a live connection from the connection pool
|
memsql/common/random_aggregator_pool.py
|
def _pool_connect(self, agg):
""" `agg` should be (host, port)
Returns a live connection from the connection pool
"""
return self._pool.connect(agg[0], agg[1], self._user, self._password, self._database)
|
def _pool_connect(self, agg):
""" `agg` should be (host, port)
Returns a live connection from the connection pool
"""
return self._pool.connect(agg[0], agg[1], self._user, self._password, self._database)
|
[
"agg",
"should",
"be",
"(",
"host",
"port",
")",
"Returns",
"a",
"live",
"connection",
"from",
"the",
"connection",
"pool"
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/random_aggregator_pool.py#L60-L64
|
[
"def",
"_pool_connect",
"(",
"self",
",",
"agg",
")",
":",
"return",
"self",
".",
"_pool",
".",
"connect",
"(",
"agg",
"[",
"0",
"]",
",",
"agg",
"[",
"1",
"]",
",",
"self",
".",
"_user",
",",
"self",
".",
"_password",
",",
"self",
".",
"_database",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
RandomAggregatorPool._connect
|
Returns an aggregator connection.
|
memsql/common/random_aggregator_pool.py
|
def _connect(self):
""" Returns an aggregator connection. """
with self._lock:
if self._aggregator:
try:
return self._pool_connect(self._aggregator)
except PoolConnectionException:
self._aggregator = None
if not len(self._aggregators):
with self._pool_connect(self._primary_aggregator) as conn:
self._update_aggregator_list(conn)
conn.expire()
random.shuffle(self._aggregators)
last_exception = None
for aggregator in self._aggregators:
self.logger.debug('Attempting connection with %s:%s' % (aggregator[0], aggregator[1]))
try:
conn = self._pool_connect(aggregator)
# connection successful!
self._aggregator = aggregator
return conn
except PoolConnectionException as e:
# connection error
last_exception = e
else:
# bad news bears... try again later
self._aggregator = None
self._aggregators = []
raise last_exception
|
def _connect(self):
""" Returns an aggregator connection. """
with self._lock:
if self._aggregator:
try:
return self._pool_connect(self._aggregator)
except PoolConnectionException:
self._aggregator = None
if not len(self._aggregators):
with self._pool_connect(self._primary_aggregator) as conn:
self._update_aggregator_list(conn)
conn.expire()
random.shuffle(self._aggregators)
last_exception = None
for aggregator in self._aggregators:
self.logger.debug('Attempting connection with %s:%s' % (aggregator[0], aggregator[1]))
try:
conn = self._pool_connect(aggregator)
# connection successful!
self._aggregator = aggregator
return conn
except PoolConnectionException as e:
# connection error
last_exception = e
else:
# bad news bears... try again later
self._aggregator = None
self._aggregators = []
raise last_exception
|
[
"Returns",
"an",
"aggregator",
"connection",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/random_aggregator_pool.py#L66-L99
|
[
"def",
"_connect",
"(",
"self",
")",
":",
"with",
"self",
".",
"_lock",
":",
"if",
"self",
".",
"_aggregator",
":",
"try",
":",
"return",
"self",
".",
"_pool_connect",
"(",
"self",
".",
"_aggregator",
")",
"except",
"PoolConnectionException",
":",
"self",
".",
"_aggregator",
"=",
"None",
"if",
"not",
"len",
"(",
"self",
".",
"_aggregators",
")",
":",
"with",
"self",
".",
"_pool_connect",
"(",
"self",
".",
"_primary_aggregator",
")",
"as",
"conn",
":",
"self",
".",
"_update_aggregator_list",
"(",
"conn",
")",
"conn",
".",
"expire",
"(",
")",
"random",
".",
"shuffle",
"(",
"self",
".",
"_aggregators",
")",
"last_exception",
"=",
"None",
"for",
"aggregator",
"in",
"self",
".",
"_aggregators",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Attempting connection with %s:%s'",
"%",
"(",
"aggregator",
"[",
"0",
"]",
",",
"aggregator",
"[",
"1",
"]",
")",
")",
"try",
":",
"conn",
"=",
"self",
".",
"_pool_connect",
"(",
"aggregator",
")",
"# connection successful!",
"self",
".",
"_aggregator",
"=",
"aggregator",
"return",
"conn",
"except",
"PoolConnectionException",
"as",
"e",
":",
"# connection error",
"last_exception",
"=",
"e",
"else",
":",
"# bad news bears... try again later",
"self",
".",
"_aggregator",
"=",
"None",
"self",
".",
"_aggregators",
"=",
"[",
"]",
"raise",
"last_exception"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
lookup_by_number
|
Used for development only
|
memsql/common/errorcodes.py
|
def lookup_by_number(errno):
""" Used for development only """
for key, val in globals().items():
if errno == val:
print(key)
|
def lookup_by_number(errno):
""" Used for development only """
for key, val in globals().items():
if errno == val:
print(key)
|
[
"Used",
"for",
"development",
"only"
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/errorcodes.py#L838-L842
|
[
"def",
"lookup_by_number",
"(",
"errno",
")",
":",
"for",
"key",
",",
"val",
"in",
"globals",
"(",
")",
".",
"items",
"(",
")",
":",
"if",
"errno",
"==",
"val",
":",
"print",
"(",
"key",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
ConnectionPool.size
|
Returns the number of connections cached by the pool.
|
memsql/common/connection_pool.py
|
def size(self):
""" Returns the number of connections cached by the pool. """
return sum(q.qsize() for q in self._connections.values()) + len(self._fairies)
|
def size(self):
""" Returns the number of connections cached by the pool. """
return sum(q.qsize() for q in self._connections.values()) + len(self._fairies)
|
[
"Returns",
"the",
"number",
"of",
"connections",
"cached",
"by",
"the",
"pool",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/connection_pool.py#L90-L92
|
[
"def",
"size",
"(",
"self",
")",
":",
"return",
"sum",
"(",
"q",
".",
"qsize",
"(",
")",
"for",
"q",
"in",
"self",
".",
"_connections",
".",
"values",
"(",
")",
")",
"+",
"len",
"(",
"self",
".",
"_fairies",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
_PoolConnectionFairy.__potential_connection_failure
|
OperationalError's are emitted by the _mysql library for
almost every error code emitted by MySQL. Because of this we
verify that the error is actually a connection error before
terminating the connection and firing off a PoolConnectionException
|
memsql/common/connection_pool.py
|
def __potential_connection_failure(self, e):
""" OperationalError's are emitted by the _mysql library for
almost every error code emitted by MySQL. Because of this we
verify that the error is actually a connection error before
terminating the connection and firing off a PoolConnectionException
"""
try:
self._conn.query('SELECT 1')
except (IOError, _mysql.OperationalError):
# ok, it's actually an issue.
self.__handle_connection_failure(e)
else:
# seems ok, probably programmer error
raise _mysql.DatabaseError(*e.args)
|
def __potential_connection_failure(self, e):
""" OperationalError's are emitted by the _mysql library for
almost every error code emitted by MySQL. Because of this we
verify that the error is actually a connection error before
terminating the connection and firing off a PoolConnectionException
"""
try:
self._conn.query('SELECT 1')
except (IOError, _mysql.OperationalError):
# ok, it's actually an issue.
self.__handle_connection_failure(e)
else:
# seems ok, probably programmer error
raise _mysql.DatabaseError(*e.args)
|
[
"OperationalError",
"s",
"are",
"emitted",
"by",
"the",
"_mysql",
"library",
"for",
"almost",
"every",
"error",
"code",
"emitted",
"by",
"MySQL",
".",
"Because",
"of",
"this",
"we",
"verify",
"that",
"the",
"error",
"is",
"actually",
"a",
"connection",
"error",
"before",
"terminating",
"the",
"connection",
"and",
"firing",
"off",
"a",
"PoolConnectionException"
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/connection_pool.py#L134-L147
|
[
"def",
"__potential_connection_failure",
"(",
"self",
",",
"e",
")",
":",
"try",
":",
"self",
".",
"_conn",
".",
"query",
"(",
"'SELECT 1'",
")",
"except",
"(",
"IOError",
",",
"_mysql",
".",
"OperationalError",
")",
":",
"# ok, it's actually an issue.",
"self",
".",
"__handle_connection_failure",
"(",
"e",
")",
"else",
":",
"# seems ok, probably programmer error",
"raise",
"_mysql",
".",
"DatabaseError",
"(",
"*",
"e",
".",
"args",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
simple_expression
|
Build a simple expression ready to be added onto another query.
>>> simple_expression(joiner=' AND ', name='bob', role='admin')
"`name`=%(_QB_name)s AND `name`=%(_QB_role)s", { '_QB_name': 'bob', '_QB_role': 'admin' }
|
memsql/common/query_builder.py
|
def simple_expression(joiner=', ', **fields):
""" Build a simple expression ready to be added onto another query.
>>> simple_expression(joiner=' AND ', name='bob', role='admin')
"`name`=%(_QB_name)s AND `name`=%(_QB_role)s", { '_QB_name': 'bob', '_QB_role': 'admin' }
"""
expression, params = [], {}
for field_name, value in sorted(fields.items(), key=lambda kv: kv[0]):
key = '_QB_%s' % field_name
expression.append('`%s`=%%(%s)s' % (field_name, key))
params[key] = value
return joiner.join(expression), params
|
def simple_expression(joiner=', ', **fields):
""" Build a simple expression ready to be added onto another query.
>>> simple_expression(joiner=' AND ', name='bob', role='admin')
"`name`=%(_QB_name)s AND `name`=%(_QB_role)s", { '_QB_name': 'bob', '_QB_role': 'admin' }
"""
expression, params = [], {}
for field_name, value in sorted(fields.items(), key=lambda kv: kv[0]):
key = '_QB_%s' % field_name
expression.append('`%s`=%%(%s)s' % (field_name, key))
params[key] = value
return joiner.join(expression), params
|
[
"Build",
"a",
"simple",
"expression",
"ready",
"to",
"be",
"added",
"onto",
"another",
"query",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/query_builder.py#L1-L14
|
[
"def",
"simple_expression",
"(",
"joiner",
"=",
"', '",
",",
"*",
"*",
"fields",
")",
":",
"expression",
",",
"params",
"=",
"[",
"]",
",",
"{",
"}",
"for",
"field_name",
",",
"value",
"in",
"sorted",
"(",
"fields",
".",
"items",
"(",
")",
",",
"key",
"=",
"lambda",
"kv",
":",
"kv",
"[",
"0",
"]",
")",
":",
"key",
"=",
"'_QB_%s'",
"%",
"field_name",
"expression",
".",
"append",
"(",
"'`%s`=%%(%s)s'",
"%",
"(",
"field_name",
",",
"key",
")",
")",
"params",
"[",
"key",
"]",
"=",
"value",
"return",
"joiner",
".",
"join",
"(",
"expression",
")",
",",
"params"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
update
|
Build a update query.
>>> update('foo_table', a=5, b=2)
"UPDATE `foo_table` SET `a`=%(_QB_a)s, `b`=%(_QB_b)s", { '_QB_a': 5, '_QB_b': 2 }
|
memsql/common/query_builder.py
|
def update(table_name, **fields):
""" Build a update query.
>>> update('foo_table', a=5, b=2)
"UPDATE `foo_table` SET `a`=%(_QB_a)s, `b`=%(_QB_b)s", { '_QB_a': 5, '_QB_b': 2 }
"""
prefix = "UPDATE `%s` SET " % table_name
sets, params = simple_expression(', ', **fields)
return prefix + sets, params
|
def update(table_name, **fields):
""" Build a update query.
>>> update('foo_table', a=5, b=2)
"UPDATE `foo_table` SET `a`=%(_QB_a)s, `b`=%(_QB_b)s", { '_QB_a': 5, '_QB_b': 2 }
"""
prefix = "UPDATE `%s` SET " % table_name
sets, params = simple_expression(', ', **fields)
return prefix + sets, params
|
[
"Build",
"a",
"update",
"query",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/query_builder.py#L16-L24
|
[
"def",
"update",
"(",
"table_name",
",",
"*",
"*",
"fields",
")",
":",
"prefix",
"=",
"\"UPDATE `%s` SET \"",
"%",
"table_name",
"sets",
",",
"params",
"=",
"simple_expression",
"(",
"', '",
",",
"*",
"*",
"fields",
")",
"return",
"prefix",
"+",
"sets",
",",
"params"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
SQLLock.ping
|
Notify the manager that this lock is still active.
|
memsql/common/sql_lock.py
|
def ping(self):
""" Notify the manager that this lock is still active. """
with self._db_conn() as conn:
affected_rows = conn.query('''
UPDATE %s
SET last_contact=%%s
WHERE id = %%s AND lock_hash = %%s
''' % self._manager.table_name, datetime.utcnow(), self._lock_id, self._lock_hash)
return bool(affected_rows == 1)
|
def ping(self):
""" Notify the manager that this lock is still active. """
with self._db_conn() as conn:
affected_rows = conn.query('''
UPDATE %s
SET last_contact=%%s
WHERE id = %%s AND lock_hash = %%s
''' % self._manager.table_name, datetime.utcnow(), self._lock_id, self._lock_hash)
return bool(affected_rows == 1)
|
[
"Notify",
"the",
"manager",
"that",
"this",
"lock",
"is",
"still",
"active",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_lock.py#L87-L96
|
[
"def",
"ping",
"(",
"self",
")",
":",
"with",
"self",
".",
"_db_conn",
"(",
")",
"as",
"conn",
":",
"affected_rows",
"=",
"conn",
".",
"query",
"(",
"'''\n UPDATE %s\n SET last_contact=%%s\n WHERE id = %%s AND lock_hash = %%s\n '''",
"%",
"self",
".",
"_manager",
".",
"table_name",
",",
"datetime",
".",
"utcnow",
"(",
")",
",",
"self",
".",
"_lock_id",
",",
"self",
".",
"_lock_hash",
")",
"return",
"bool",
"(",
"affected_rows",
"==",
"1",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
SQLLock.release
|
Release the lock.
|
memsql/common/sql_lock.py
|
def release(self):
""" Release the lock. """
if self.valid():
with self._db_conn() as conn:
affected_rows = conn.query('''
DELETE FROM %s
WHERE id = %%s AND lock_hash = %%s
''' % self._manager.table_name, self._lock_id, self._lock_hash)
return bool(affected_rows == 1)
else:
return False
|
def release(self):
""" Release the lock. """
if self.valid():
with self._db_conn() as conn:
affected_rows = conn.query('''
DELETE FROM %s
WHERE id = %%s AND lock_hash = %%s
''' % self._manager.table_name, self._lock_id, self._lock_hash)
return bool(affected_rows == 1)
else:
return False
|
[
"Release",
"the",
"lock",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_lock.py#L98-L108
|
[
"def",
"release",
"(",
"self",
")",
":",
"if",
"self",
".",
"valid",
"(",
")",
":",
"with",
"self",
".",
"_db_conn",
"(",
")",
"as",
"conn",
":",
"affected_rows",
"=",
"conn",
".",
"query",
"(",
"'''\n DELETE FROM %s\n WHERE id = %%s AND lock_hash = %%s\n '''",
"%",
"self",
".",
"_manager",
".",
"table_name",
",",
"self",
".",
"_lock_id",
",",
"self",
".",
"_lock_hash",
")",
"return",
"bool",
"(",
"affected_rows",
"==",
"1",
")",
"else",
":",
"return",
"False"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
SQLUtility.connect
|
Connect to the database specified
|
memsql/common/sql_utility.py
|
def connect(self, host='127.0.0.1', port=3306, user='root', password='', database=None):
""" Connect to the database specified """
if database is None:
raise exceptions.RequiresDatabase()
self._db_args = { 'host': host, 'port': port, 'user': user, 'password': password, 'database': database }
with self._db_conn() as conn:
conn.query('SELECT 1')
return self
|
def connect(self, host='127.0.0.1', port=3306, user='root', password='', database=None):
""" Connect to the database specified """
if database is None:
raise exceptions.RequiresDatabase()
self._db_args = { 'host': host, 'port': port, 'user': user, 'password': password, 'database': database }
with self._db_conn() as conn:
conn.query('SELECT 1')
return self
|
[
"Connect",
"to",
"the",
"database",
"specified"
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_utility.py#L13-L22
|
[
"def",
"connect",
"(",
"self",
",",
"host",
"=",
"'127.0.0.1'",
",",
"port",
"=",
"3306",
",",
"user",
"=",
"'root'",
",",
"password",
"=",
"''",
",",
"database",
"=",
"None",
")",
":",
"if",
"database",
"is",
"None",
":",
"raise",
"exceptions",
".",
"RequiresDatabase",
"(",
")",
"self",
".",
"_db_args",
"=",
"{",
"'host'",
":",
"host",
",",
"'port'",
":",
"port",
",",
"'user'",
":",
"user",
",",
"'password'",
":",
"password",
",",
"'database'",
":",
"database",
"}",
"with",
"self",
".",
"_db_conn",
"(",
")",
"as",
"conn",
":",
"conn",
".",
"query",
"(",
"'SELECT 1'",
")",
"return",
"self"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
SQLUtility.setup
|
Initialize the required tables in the database
|
memsql/common/sql_utility.py
|
def setup(self):
""" Initialize the required tables in the database """
with self._db_conn() as conn:
for table_defn in self._tables.values():
conn.execute(table_defn)
return self
|
def setup(self):
""" Initialize the required tables in the database """
with self._db_conn() as conn:
for table_defn in self._tables.values():
conn.execute(table_defn)
return self
|
[
"Initialize",
"the",
"required",
"tables",
"in",
"the",
"database"
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_utility.py#L27-L32
|
[
"def",
"setup",
"(",
"self",
")",
":",
"with",
"self",
".",
"_db_conn",
"(",
")",
"as",
"conn",
":",
"for",
"table_defn",
"in",
"self",
".",
"_tables",
".",
"values",
"(",
")",
":",
"conn",
".",
"execute",
"(",
"table_defn",
")",
"return",
"self"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
SQLUtility.destroy
|
Destroy the SQLStepQueue tables in the database
|
memsql/common/sql_utility.py
|
def destroy(self):
""" Destroy the SQLStepQueue tables in the database """
with self._db_conn() as conn:
for table_name in self._tables:
conn.execute('DROP TABLE IF EXISTS %s' % table_name)
return self
|
def destroy(self):
""" Destroy the SQLStepQueue tables in the database """
with self._db_conn() as conn:
for table_name in self._tables:
conn.execute('DROP TABLE IF EXISTS %s' % table_name)
return self
|
[
"Destroy",
"the",
"SQLStepQueue",
"tables",
"in",
"the",
"database"
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_utility.py#L34-L39
|
[
"def",
"destroy",
"(",
"self",
")",
":",
"with",
"self",
".",
"_db_conn",
"(",
")",
"as",
"conn",
":",
"for",
"table_name",
"in",
"self",
".",
"_tables",
":",
"conn",
".",
"execute",
"(",
"'DROP TABLE IF EXISTS %s'",
"%",
"table_name",
")",
"return",
"self"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
SQLUtility.ready
|
Returns True if the tables have been setup, False otherwise
|
memsql/common/sql_utility.py
|
def ready(self):
""" Returns True if the tables have been setup, False otherwise """
with self._db_conn() as conn:
tables = [row.t for row in conn.query('''
SELECT table_name AS t FROM information_schema.tables
WHERE table_schema=%s
''', self._db_args['database'])]
return all([table_name in tables for table_name in self._tables])
|
def ready(self):
""" Returns True if the tables have been setup, False otherwise """
with self._db_conn() as conn:
tables = [row.t for row in conn.query('''
SELECT table_name AS t FROM information_schema.tables
WHERE table_schema=%s
''', self._db_args['database'])]
return all([table_name in tables for table_name in self._tables])
|
[
"Returns",
"True",
"if",
"the",
"tables",
"have",
"been",
"setup",
"False",
"otherwise"
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_utility.py#L41-L48
|
[
"def",
"ready",
"(",
"self",
")",
":",
"with",
"self",
".",
"_db_conn",
"(",
")",
"as",
"conn",
":",
"tables",
"=",
"[",
"row",
".",
"t",
"for",
"row",
"in",
"conn",
".",
"query",
"(",
"'''\n SELECT table_name AS t FROM information_schema.tables\n WHERE table_schema=%s\n '''",
",",
"self",
".",
"_db_args",
"[",
"'database'",
"]",
")",
"]",
"return",
"all",
"(",
"[",
"table_name",
"in",
"tables",
"for",
"table_name",
"in",
"self",
".",
"_tables",
"]",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
TaskHandler.valid
|
Check to see if we are still active.
|
memsql/common/sql_step_queue/task_handler.py
|
def valid(self):
""" Check to see if we are still active. """
if self.finished is not None:
return False
with self._db_conn() as conn:
row = conn.get('''
SELECT (last_contact > %%(now)s - INTERVAL %%(ttl)s SECOND) AS valid
FROM %s
WHERE
id = %%(task_id)s
AND execution_id = %%(execution_id)s
''' % self._queue.table_name,
now=datetime.utcnow(),
ttl=self._queue.execution_ttl,
task_id=self.task_id,
execution_id=self.execution_id)
return bool(row is not None and row.valid)
|
def valid(self):
""" Check to see if we are still active. """
if self.finished is not None:
return False
with self._db_conn() as conn:
row = conn.get('''
SELECT (last_contact > %%(now)s - INTERVAL %%(ttl)s SECOND) AS valid
FROM %s
WHERE
id = %%(task_id)s
AND execution_id = %%(execution_id)s
''' % self._queue.table_name,
now=datetime.utcnow(),
ttl=self._queue.execution_ttl,
task_id=self.task_id,
execution_id=self.execution_id)
return bool(row is not None and row.valid)
|
[
"Check",
"to",
"see",
"if",
"we",
"are",
"still",
"active",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_step_queue/task_handler.py#L26-L44
|
[
"def",
"valid",
"(",
"self",
")",
":",
"if",
"self",
".",
"finished",
"is",
"not",
"None",
":",
"return",
"False",
"with",
"self",
".",
"_db_conn",
"(",
")",
"as",
"conn",
":",
"row",
"=",
"conn",
".",
"get",
"(",
"'''\n SELECT (last_contact > %%(now)s - INTERVAL %%(ttl)s SECOND) AS valid\n FROM %s\n WHERE\n id = %%(task_id)s\n AND execution_id = %%(execution_id)s\n '''",
"%",
"self",
".",
"_queue",
".",
"table_name",
",",
"now",
"=",
"datetime",
".",
"utcnow",
"(",
")",
",",
"ttl",
"=",
"self",
".",
"_queue",
".",
"execution_ttl",
",",
"task_id",
"=",
"self",
".",
"task_id",
",",
"execution_id",
"=",
"self",
".",
"execution_id",
")",
"return",
"bool",
"(",
"row",
"is",
"not",
"None",
"and",
"row",
".",
"valid",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
TaskHandler.ping
|
Notify the queue that this task is still active.
|
memsql/common/sql_step_queue/task_handler.py
|
def ping(self):
""" Notify the queue that this task is still active. """
if self.finished is not None:
raise AlreadyFinished()
with self._db_conn() as conn:
success = conn.query('''
UPDATE %s
SET
last_contact=%%(now)s,
update_count=update_count + 1
WHERE
id = %%(task_id)s
AND execution_id = %%(execution_id)s
AND last_contact > %%(now)s - INTERVAL %%(ttl)s SECOND
''' % self._queue.table_name,
now=datetime.utcnow(),
task_id=self.task_id,
execution_id=self.execution_id,
ttl=self._queue.execution_ttl)
if success != 1:
raise TaskDoesNotExist()
|
def ping(self):
""" Notify the queue that this task is still active. """
if self.finished is not None:
raise AlreadyFinished()
with self._db_conn() as conn:
success = conn.query('''
UPDATE %s
SET
last_contact=%%(now)s,
update_count=update_count + 1
WHERE
id = %%(task_id)s
AND execution_id = %%(execution_id)s
AND last_contact > %%(now)s - INTERVAL %%(ttl)s SECOND
''' % self._queue.table_name,
now=datetime.utcnow(),
task_id=self.task_id,
execution_id=self.execution_id,
ttl=self._queue.execution_ttl)
if success != 1:
raise TaskDoesNotExist()
|
[
"Notify",
"the",
"queue",
"that",
"this",
"task",
"is",
"still",
"active",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_step_queue/task_handler.py#L46-L68
|
[
"def",
"ping",
"(",
"self",
")",
":",
"if",
"self",
".",
"finished",
"is",
"not",
"None",
":",
"raise",
"AlreadyFinished",
"(",
")",
"with",
"self",
".",
"_db_conn",
"(",
")",
"as",
"conn",
":",
"success",
"=",
"conn",
".",
"query",
"(",
"'''\n UPDATE %s\n SET\n last_contact=%%(now)s,\n update_count=update_count + 1\n WHERE\n id = %%(task_id)s\n AND execution_id = %%(execution_id)s\n AND last_contact > %%(now)s - INTERVAL %%(ttl)s SECOND\n '''",
"%",
"self",
".",
"_queue",
".",
"table_name",
",",
"now",
"=",
"datetime",
".",
"utcnow",
"(",
")",
",",
"task_id",
"=",
"self",
".",
"task_id",
",",
"execution_id",
"=",
"self",
".",
"execution_id",
",",
"ttl",
"=",
"self",
".",
"_queue",
".",
"execution_ttl",
")",
"if",
"success",
"!=",
"1",
":",
"raise",
"TaskDoesNotExist",
"(",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
TaskHandler.start_step
|
Start a step.
|
memsql/common/sql_step_queue/task_handler.py
|
def start_step(self, step_name):
""" Start a step. """
if self.finished is not None:
raise AlreadyFinished()
step_data = self._get_step(step_name)
if step_data is not None:
if 'stop' in step_data:
raise StepAlreadyFinished()
else:
raise StepAlreadyStarted()
steps = copy.deepcopy(self.steps)
steps.append({
"start": datetime.utcnow(),
"name": step_name
})
self._save(steps=steps)
|
def start_step(self, step_name):
""" Start a step. """
if self.finished is not None:
raise AlreadyFinished()
step_data = self._get_step(step_name)
if step_data is not None:
if 'stop' in step_data:
raise StepAlreadyFinished()
else:
raise StepAlreadyStarted()
steps = copy.deepcopy(self.steps)
steps.append({
"start": datetime.utcnow(),
"name": step_name
})
self._save(steps=steps)
|
[
"Start",
"a",
"step",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_step_queue/task_handler.py#L110-L127
|
[
"def",
"start_step",
"(",
"self",
",",
"step_name",
")",
":",
"if",
"self",
".",
"finished",
"is",
"not",
"None",
":",
"raise",
"AlreadyFinished",
"(",
")",
"step_data",
"=",
"self",
".",
"_get_step",
"(",
"step_name",
")",
"if",
"step_data",
"is",
"not",
"None",
":",
"if",
"'stop'",
"in",
"step_data",
":",
"raise",
"StepAlreadyFinished",
"(",
")",
"else",
":",
"raise",
"StepAlreadyStarted",
"(",
")",
"steps",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"steps",
")",
"steps",
".",
"append",
"(",
"{",
"\"start\"",
":",
"datetime",
".",
"utcnow",
"(",
")",
",",
"\"name\"",
":",
"step_name",
"}",
")",
"self",
".",
"_save",
"(",
"steps",
"=",
"steps",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
TaskHandler.stop_step
|
Stop a step.
|
memsql/common/sql_step_queue/task_handler.py
|
def stop_step(self, step_name):
""" Stop a step. """
if self.finished is not None:
raise AlreadyFinished()
steps = copy.deepcopy(self.steps)
step_data = self._get_step(step_name, steps=steps)
if step_data is None:
raise StepNotStarted()
elif 'stop' in step_data:
raise StepAlreadyFinished()
step_data['stop'] = datetime.utcnow()
step_data['duration'] = util.timedelta_total_seconds(step_data['stop'] - step_data['start'])
self._save(steps=steps)
|
def stop_step(self, step_name):
""" Stop a step. """
if self.finished is not None:
raise AlreadyFinished()
steps = copy.deepcopy(self.steps)
step_data = self._get_step(step_name, steps=steps)
if step_data is None:
raise StepNotStarted()
elif 'stop' in step_data:
raise StepAlreadyFinished()
step_data['stop'] = datetime.utcnow()
step_data['duration'] = util.timedelta_total_seconds(step_data['stop'] - step_data['start'])
self._save(steps=steps)
|
[
"Stop",
"a",
"step",
"."
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_step_queue/task_handler.py#L129-L145
|
[
"def",
"stop_step",
"(",
"self",
",",
"step_name",
")",
":",
"if",
"self",
".",
"finished",
"is",
"not",
"None",
":",
"raise",
"AlreadyFinished",
"(",
")",
"steps",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"steps",
")",
"step_data",
"=",
"self",
".",
"_get_step",
"(",
"step_name",
",",
"steps",
"=",
"steps",
")",
"if",
"step_data",
"is",
"None",
":",
"raise",
"StepNotStarted",
"(",
")",
"elif",
"'stop'",
"in",
"step_data",
":",
"raise",
"StepAlreadyFinished",
"(",
")",
"step_data",
"[",
"'stop'",
"]",
"=",
"datetime",
".",
"utcnow",
"(",
")",
"step_data",
"[",
"'duration'",
"]",
"=",
"util",
".",
"timedelta_total_seconds",
"(",
"step_data",
"[",
"'stop'",
"]",
"-",
"step_data",
"[",
"'start'",
"]",
")",
"self",
".",
"_save",
"(",
"steps",
"=",
"steps",
")"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
TaskHandler._load_steps
|
load steps -> basically load all the datetime isoformats into datetimes
|
memsql/common/sql_step_queue/task_handler.py
|
def _load_steps(self, raw_steps):
""" load steps -> basically load all the datetime isoformats into datetimes """
for step in raw_steps:
if 'start' in step:
step['start'] = parser.parse(step['start'])
if 'stop' in step:
step['stop'] = parser.parse(step['stop'])
return raw_steps
|
def _load_steps(self, raw_steps):
""" load steps -> basically load all the datetime isoformats into datetimes """
for step in raw_steps:
if 'start' in step:
step['start'] = parser.parse(step['start'])
if 'stop' in step:
step['stop'] = parser.parse(step['stop'])
return raw_steps
|
[
"load",
"steps",
"-",
">",
"basically",
"load",
"all",
"the",
"datetime",
"isoformats",
"into",
"datetimes"
] |
memsql/memsql-python
|
python
|
https://github.com/memsql/memsql-python/blob/aac223a1b937d5b348b42af3c601a6c685ca633a/memsql/common/sql_step_queue/task_handler.py#L197-L204
|
[
"def",
"_load_steps",
"(",
"self",
",",
"raw_steps",
")",
":",
"for",
"step",
"in",
"raw_steps",
":",
"if",
"'start'",
"in",
"step",
":",
"step",
"[",
"'start'",
"]",
"=",
"parser",
".",
"parse",
"(",
"step",
"[",
"'start'",
"]",
")",
"if",
"'stop'",
"in",
"step",
":",
"step",
"[",
"'stop'",
"]",
"=",
"parser",
".",
"parse",
"(",
"step",
"[",
"'stop'",
"]",
")",
"return",
"raw_steps"
] |
aac223a1b937d5b348b42af3c601a6c685ca633a
|
test
|
WebSocketConnection.disconnect
|
Disconnects from the websocket connection and joins the Thread.
:return:
|
btfxwss/connection.py
|
def disconnect(self):
"""Disconnects from the websocket connection and joins the Thread.
:return:
"""
self.log.debug("disconnect(): Disconnecting from API..")
self.reconnect_required.clear()
self.disconnect_called.set()
if self.socket:
self.socket.close()
self.join(timeout=1)
|
def disconnect(self):
"""Disconnects from the websocket connection and joins the Thread.
:return:
"""
self.log.debug("disconnect(): Disconnecting from API..")
self.reconnect_required.clear()
self.disconnect_called.set()
if self.socket:
self.socket.close()
self.join(timeout=1)
|
[
"Disconnects",
"from",
"the",
"websocket",
"connection",
"and",
"joins",
"the",
"Thread",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L99-L109
|
[
"def",
"disconnect",
"(",
"self",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"disconnect(): Disconnecting from API..\"",
")",
"self",
".",
"reconnect_required",
".",
"clear",
"(",
")",
"self",
".",
"disconnect_called",
".",
"set",
"(",
")",
"if",
"self",
".",
"socket",
":",
"self",
".",
"socket",
".",
"close",
"(",
")",
"self",
".",
"join",
"(",
"timeout",
"=",
"1",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection.reconnect
|
Issues a reconnection by setting the reconnect_required event.
:return:
|
btfxwss/connection.py
|
def reconnect(self):
"""Issues a reconnection by setting the reconnect_required event.
:return:
"""
# Reconnect attempt at self.reconnect_interval
self.log.debug("reconnect(): Initialzion reconnect sequence..")
self.connected.clear()
self.reconnect_required.set()
if self.socket:
self.socket.close()
|
def reconnect(self):
"""Issues a reconnection by setting the reconnect_required event.
:return:
"""
# Reconnect attempt at self.reconnect_interval
self.log.debug("reconnect(): Initialzion reconnect sequence..")
self.connected.clear()
self.reconnect_required.set()
if self.socket:
self.socket.close()
|
[
"Issues",
"a",
"reconnection",
"by",
"setting",
"the",
"reconnect_required",
"event",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L111-L121
|
[
"def",
"reconnect",
"(",
"self",
")",
":",
"# Reconnect attempt at self.reconnect_interval",
"self",
".",
"log",
".",
"debug",
"(",
"\"reconnect(): Initialzion reconnect sequence..\"",
")",
"self",
".",
"connected",
".",
"clear",
"(",
")",
"self",
".",
"reconnect_required",
".",
"set",
"(",
")",
"if",
"self",
".",
"socket",
":",
"self",
".",
"socket",
".",
"close",
"(",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection._connect
|
Creates a websocket connection.
:return:
|
btfxwss/connection.py
|
def _connect(self):
"""Creates a websocket connection.
:return:
"""
self.log.debug("_connect(): Initializing Connection..")
self.socket = websocket.WebSocketApp(
self.url,
on_open=self._on_open,
on_message=self._on_message,
on_error=self._on_error,
on_close=self._on_close
)
if 'ca_certs' not in self.sslopt.keys():
ssl_defaults = ssl.get_default_verify_paths()
self.sslopt['ca_certs'] = ssl_defaults.cafile
self.log.debug("_connect(): Starting Connection..")
self.socket.run_forever(sslopt=self.sslopt,
http_proxy_host=self.http_proxy_host,
http_proxy_port=self.http_proxy_port,
http_proxy_auth=self.http_proxy_auth,
http_no_proxy=self.http_no_proxy)
# stop outstanding ping/pong timers
self._stop_timers()
while self.reconnect_required.is_set():
if not self.disconnect_called.is_set():
self.log.info("Attempting to connect again in %s seconds."
% self.reconnect_interval)
self.state = "unavailable"
time.sleep(self.reconnect_interval)
# We need to set this flag since closing the socket will
# set it to False
self.socket.keep_running = True
self.socket.sock = None
self.socket.run_forever(sslopt=self.sslopt,
http_proxy_host=self.http_proxy_host,
http_proxy_port=self.http_proxy_port,
http_proxy_auth=self.http_proxy_auth,
http_no_proxy=self.http_no_proxy)
else:
break
|
def _connect(self):
"""Creates a websocket connection.
:return:
"""
self.log.debug("_connect(): Initializing Connection..")
self.socket = websocket.WebSocketApp(
self.url,
on_open=self._on_open,
on_message=self._on_message,
on_error=self._on_error,
on_close=self._on_close
)
if 'ca_certs' not in self.sslopt.keys():
ssl_defaults = ssl.get_default_verify_paths()
self.sslopt['ca_certs'] = ssl_defaults.cafile
self.log.debug("_connect(): Starting Connection..")
self.socket.run_forever(sslopt=self.sslopt,
http_proxy_host=self.http_proxy_host,
http_proxy_port=self.http_proxy_port,
http_proxy_auth=self.http_proxy_auth,
http_no_proxy=self.http_no_proxy)
# stop outstanding ping/pong timers
self._stop_timers()
while self.reconnect_required.is_set():
if not self.disconnect_called.is_set():
self.log.info("Attempting to connect again in %s seconds."
% self.reconnect_interval)
self.state = "unavailable"
time.sleep(self.reconnect_interval)
# We need to set this flag since closing the socket will
# set it to False
self.socket.keep_running = True
self.socket.sock = None
self.socket.run_forever(sslopt=self.sslopt,
http_proxy_host=self.http_proxy_host,
http_proxy_port=self.http_proxy_port,
http_proxy_auth=self.http_proxy_auth,
http_no_proxy=self.http_no_proxy)
else:
break
|
[
"Creates",
"a",
"websocket",
"connection",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L123-L167
|
[
"def",
"_connect",
"(",
"self",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_connect(): Initializing Connection..\"",
")",
"self",
".",
"socket",
"=",
"websocket",
".",
"WebSocketApp",
"(",
"self",
".",
"url",
",",
"on_open",
"=",
"self",
".",
"_on_open",
",",
"on_message",
"=",
"self",
".",
"_on_message",
",",
"on_error",
"=",
"self",
".",
"_on_error",
",",
"on_close",
"=",
"self",
".",
"_on_close",
")",
"if",
"'ca_certs'",
"not",
"in",
"self",
".",
"sslopt",
".",
"keys",
"(",
")",
":",
"ssl_defaults",
"=",
"ssl",
".",
"get_default_verify_paths",
"(",
")",
"self",
".",
"sslopt",
"[",
"'ca_certs'",
"]",
"=",
"ssl_defaults",
".",
"cafile",
"self",
".",
"log",
".",
"debug",
"(",
"\"_connect(): Starting Connection..\"",
")",
"self",
".",
"socket",
".",
"run_forever",
"(",
"sslopt",
"=",
"self",
".",
"sslopt",
",",
"http_proxy_host",
"=",
"self",
".",
"http_proxy_host",
",",
"http_proxy_port",
"=",
"self",
".",
"http_proxy_port",
",",
"http_proxy_auth",
"=",
"self",
".",
"http_proxy_auth",
",",
"http_no_proxy",
"=",
"self",
".",
"http_no_proxy",
")",
"# stop outstanding ping/pong timers",
"self",
".",
"_stop_timers",
"(",
")",
"while",
"self",
".",
"reconnect_required",
".",
"is_set",
"(",
")",
":",
"if",
"not",
"self",
".",
"disconnect_called",
".",
"is_set",
"(",
")",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Attempting to connect again in %s seconds.\"",
"%",
"self",
".",
"reconnect_interval",
")",
"self",
".",
"state",
"=",
"\"unavailable\"",
"time",
".",
"sleep",
"(",
"self",
".",
"reconnect_interval",
")",
"# We need to set this flag since closing the socket will",
"# set it to False",
"self",
".",
"socket",
".",
"keep_running",
"=",
"True",
"self",
".",
"socket",
".",
"sock",
"=",
"None",
"self",
".",
"socket",
".",
"run_forever",
"(",
"sslopt",
"=",
"self",
".",
"sslopt",
",",
"http_proxy_host",
"=",
"self",
".",
"http_proxy_host",
",",
"http_proxy_port",
"=",
"self",
".",
"http_proxy_port",
",",
"http_proxy_auth",
"=",
"self",
".",
"http_proxy_auth",
",",
"http_no_proxy",
"=",
"self",
".",
"http_no_proxy",
")",
"else",
":",
"break"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection._on_message
|
Handles and passes received data to the appropriate handlers.
:return:
|
btfxwss/connection.py
|
def _on_message(self, ws, message):
"""Handles and passes received data to the appropriate handlers.
:return:
"""
self._stop_timers()
raw, received_at = message, time.time()
self.log.debug("_on_message(): Received new message %s at %s",
raw, received_at)
try:
data = json.loads(raw)
except json.JSONDecodeError:
# Something wrong with this data, log and discard
return
# Handle data
if isinstance(data, dict):
# This is a system message
self._system_handler(data, received_at)
else:
# This is a list of data
if data[1] == 'hb':
self._heartbeat_handler()
else:
self._data_handler(data, received_at)
# We've received data, reset timers
self._start_timers()
|
def _on_message(self, ws, message):
"""Handles and passes received data to the appropriate handlers.
:return:
"""
self._stop_timers()
raw, received_at = message, time.time()
self.log.debug("_on_message(): Received new message %s at %s",
raw, received_at)
try:
data = json.loads(raw)
except json.JSONDecodeError:
# Something wrong with this data, log and discard
return
# Handle data
if isinstance(data, dict):
# This is a system message
self._system_handler(data, received_at)
else:
# This is a list of data
if data[1] == 'hb':
self._heartbeat_handler()
else:
self._data_handler(data, received_at)
# We've received data, reset timers
self._start_timers()
|
[
"Handles",
"and",
"passes",
"received",
"data",
"to",
"the",
"appropriate",
"handlers",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L177-L205
|
[
"def",
"_on_message",
"(",
"self",
",",
"ws",
",",
"message",
")",
":",
"self",
".",
"_stop_timers",
"(",
")",
"raw",
",",
"received_at",
"=",
"message",
",",
"time",
".",
"time",
"(",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"_on_message(): Received new message %s at %s\"",
",",
"raw",
",",
"received_at",
")",
"try",
":",
"data",
"=",
"json",
".",
"loads",
"(",
"raw",
")",
"except",
"json",
".",
"JSONDecodeError",
":",
"# Something wrong with this data, log and discard",
"return",
"# Handle data",
"if",
"isinstance",
"(",
"data",
",",
"dict",
")",
":",
"# This is a system message",
"self",
".",
"_system_handler",
"(",
"data",
",",
"received_at",
")",
"else",
":",
"# This is a list of data",
"if",
"data",
"[",
"1",
"]",
"==",
"'hb'",
":",
"self",
".",
"_heartbeat_handler",
"(",
")",
"else",
":",
"self",
".",
"_data_handler",
"(",
"data",
",",
"received_at",
")",
"# We've received data, reset timers",
"self",
".",
"_start_timers",
"(",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection._stop_timers
|
Stops ping, pong and connection timers.
:return:
|
btfxwss/connection.py
|
def _stop_timers(self):
"""Stops ping, pong and connection timers.
:return:
"""
if self.ping_timer:
self.ping_timer.cancel()
if self.connection_timer:
self.connection_timer.cancel()
if self.pong_timer:
self.pong_timer.cancel()
self.log.debug("_stop_timers(): Timers stopped.")
|
def _stop_timers(self):
"""Stops ping, pong and connection timers.
:return:
"""
if self.ping_timer:
self.ping_timer.cancel()
if self.connection_timer:
self.connection_timer.cancel()
if self.pong_timer:
self.pong_timer.cancel()
self.log.debug("_stop_timers(): Timers stopped.")
|
[
"Stops",
"ping",
"pong",
"and",
"connection",
"timers",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L234-L247
|
[
"def",
"_stop_timers",
"(",
"self",
")",
":",
"if",
"self",
".",
"ping_timer",
":",
"self",
".",
"ping_timer",
".",
"cancel",
"(",
")",
"if",
"self",
".",
"connection_timer",
":",
"self",
".",
"connection_timer",
".",
"cancel",
"(",
")",
"if",
"self",
".",
"pong_timer",
":",
"self",
".",
"pong_timer",
".",
"cancel",
"(",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"_stop_timers(): Timers stopped.\"",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection.send_ping
|
Sends a ping message to the API and starts pong timers.
:return:
|
btfxwss/connection.py
|
def send_ping(self):
"""Sends a ping message to the API and starts pong timers.
:return:
"""
self.log.debug("send_ping(): Sending ping to API..")
self.socket.send(json.dumps({'event': 'ping'}))
self.pong_timer = Timer(self.pong_timeout, self._check_pong)
self.pong_timer.start()
|
def send_ping(self):
"""Sends a ping message to the API and starts pong timers.
:return:
"""
self.log.debug("send_ping(): Sending ping to API..")
self.socket.send(json.dumps({'event': 'ping'}))
self.pong_timer = Timer(self.pong_timeout, self._check_pong)
self.pong_timer.start()
|
[
"Sends",
"a",
"ping",
"message",
"to",
"the",
"API",
"and",
"starts",
"pong",
"timers",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L266-L274
|
[
"def",
"send_ping",
"(",
"self",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"send_ping(): Sending ping to API..\"",
")",
"self",
".",
"socket",
".",
"send",
"(",
"json",
".",
"dumps",
"(",
"{",
"'event'",
":",
"'ping'",
"}",
")",
")",
"self",
".",
"pong_timer",
"=",
"Timer",
"(",
"self",
".",
"pong_timeout",
",",
"self",
".",
"_check_pong",
")",
"self",
".",
"pong_timer",
".",
"start",
"(",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection._check_pong
|
Checks if a Pong message was received.
:return:
|
btfxwss/connection.py
|
def _check_pong(self):
"""Checks if a Pong message was received.
:return:
"""
self.pong_timer.cancel()
if self.pong_received:
self.log.debug("_check_pong(): Pong received in time.")
self.pong_received = False
else:
# reconnect
self.log.debug("_check_pong(): Pong not received in time."
"Issuing reconnect..")
self.reconnect()
|
def _check_pong(self):
"""Checks if a Pong message was received.
:return:
"""
self.pong_timer.cancel()
if self.pong_received:
self.log.debug("_check_pong(): Pong received in time.")
self.pong_received = False
else:
# reconnect
self.log.debug("_check_pong(): Pong not received in time."
"Issuing reconnect..")
self.reconnect()
|
[
"Checks",
"if",
"a",
"Pong",
"message",
"was",
"received",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L276-L289
|
[
"def",
"_check_pong",
"(",
"self",
")",
":",
"self",
".",
"pong_timer",
".",
"cancel",
"(",
")",
"if",
"self",
".",
"pong_received",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_check_pong(): Pong received in time.\"",
")",
"self",
".",
"pong_received",
"=",
"False",
"else",
":",
"# reconnect",
"self",
".",
"log",
".",
"debug",
"(",
"\"_check_pong(): Pong not received in time.\"",
"\"Issuing reconnect..\"",
")",
"self",
".",
"reconnect",
"(",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection.send
|
Sends the given Payload to the API via the websocket connection.
:param kwargs: payload paarameters as key=value pairs
:return:
|
btfxwss/connection.py
|
def send(self, api_key=None, secret=None, list_data=None, auth=False, **kwargs):
"""Sends the given Payload to the API via the websocket connection.
:param kwargs: payload paarameters as key=value pairs
:return:
"""
if auth:
nonce = str(int(time.time() * 10000000))
auth_string = 'AUTH' + nonce
auth_sig = hmac.new(secret.encode(), auth_string.encode(),
hashlib.sha384).hexdigest()
payload = {'event': 'auth', 'apiKey': api_key, 'authSig': auth_sig,
'authPayload': auth_string, 'authNonce': nonce}
payload = json.dumps(payload)
elif list_data:
payload = json.dumps(list_data)
else:
payload = json.dumps(kwargs)
self.log.debug("send(): Sending payload to API: %s", payload)
try:
self.socket.send(payload)
except websocket.WebSocketConnectionClosedException:
self.log.error("send(): Did not send out payload %s - client not connected. ", kwargs)
|
def send(self, api_key=None, secret=None, list_data=None, auth=False, **kwargs):
"""Sends the given Payload to the API via the websocket connection.
:param kwargs: payload paarameters as key=value pairs
:return:
"""
if auth:
nonce = str(int(time.time() * 10000000))
auth_string = 'AUTH' + nonce
auth_sig = hmac.new(secret.encode(), auth_string.encode(),
hashlib.sha384).hexdigest()
payload = {'event': 'auth', 'apiKey': api_key, 'authSig': auth_sig,
'authPayload': auth_string, 'authNonce': nonce}
payload = json.dumps(payload)
elif list_data:
payload = json.dumps(list_data)
else:
payload = json.dumps(kwargs)
self.log.debug("send(): Sending payload to API: %s", payload)
try:
self.socket.send(payload)
except websocket.WebSocketConnectionClosedException:
self.log.error("send(): Did not send out payload %s - client not connected. ", kwargs)
|
[
"Sends",
"the",
"given",
"Payload",
"to",
"the",
"API",
"via",
"the",
"websocket",
"connection",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L291-L314
|
[
"def",
"send",
"(",
"self",
",",
"api_key",
"=",
"None",
",",
"secret",
"=",
"None",
",",
"list_data",
"=",
"None",
",",
"auth",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"auth",
":",
"nonce",
"=",
"str",
"(",
"int",
"(",
"time",
".",
"time",
"(",
")",
"*",
"10000000",
")",
")",
"auth_string",
"=",
"'AUTH'",
"+",
"nonce",
"auth_sig",
"=",
"hmac",
".",
"new",
"(",
"secret",
".",
"encode",
"(",
")",
",",
"auth_string",
".",
"encode",
"(",
")",
",",
"hashlib",
".",
"sha384",
")",
".",
"hexdigest",
"(",
")",
"payload",
"=",
"{",
"'event'",
":",
"'auth'",
",",
"'apiKey'",
":",
"api_key",
",",
"'authSig'",
":",
"auth_sig",
",",
"'authPayload'",
":",
"auth_string",
",",
"'authNonce'",
":",
"nonce",
"}",
"payload",
"=",
"json",
".",
"dumps",
"(",
"payload",
")",
"elif",
"list_data",
":",
"payload",
"=",
"json",
".",
"dumps",
"(",
"list_data",
")",
"else",
":",
"payload",
"=",
"json",
".",
"dumps",
"(",
"kwargs",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"send(): Sending payload to API: %s\"",
",",
"payload",
")",
"try",
":",
"self",
".",
"socket",
".",
"send",
"(",
"payload",
")",
"except",
"websocket",
".",
"WebSocketConnectionClosedException",
":",
"self",
".",
"log",
".",
"error",
"(",
"\"send(): Did not send out payload %s - client not connected. \"",
",",
"kwargs",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection.pass_to_client
|
Passes data up to the client via a Queue().
:param event:
:param data:
:param args:
:return:
|
btfxwss/connection.py
|
def pass_to_client(self, event, data, *args):
"""Passes data up to the client via a Queue().
:param event:
:param data:
:param args:
:return:
"""
self.q.put((event, data, *args))
|
def pass_to_client(self, event, data, *args):
"""Passes data up to the client via a Queue().
:param event:
:param data:
:param args:
:return:
"""
self.q.put((event, data, *args))
|
[
"Passes",
"data",
"up",
"to",
"the",
"client",
"via",
"a",
"Queue",
"()",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L316-L324
|
[
"def",
"pass_to_client",
"(",
"self",
",",
"event",
",",
"data",
",",
"*",
"args",
")",
":",
"self",
".",
"q",
".",
"put",
"(",
"(",
"event",
",",
"data",
",",
"*",
"args",
")",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection._unpause
|
Unpauses the connection.
Send a message up to client that he should re-subscribe to all
channels.
:return:
|
btfxwss/connection.py
|
def _unpause(self):
"""Unpauses the connection.
Send a message up to client that he should re-subscribe to all
channels.
:return:
"""
self.log.debug("_unpause(): Clearing paused() Flag!")
self.paused.clear()
self.log.debug("_unpause(): Re-subscribing softly..")
self._resubscribe(soft=True)
|
def _unpause(self):
"""Unpauses the connection.
Send a message up to client that he should re-subscribe to all
channels.
:return:
"""
self.log.debug("_unpause(): Clearing paused() Flag!")
self.paused.clear()
self.log.debug("_unpause(): Re-subscribing softly..")
self._resubscribe(soft=True)
|
[
"Unpauses",
"the",
"connection",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L342-L353
|
[
"def",
"_unpause",
"(",
"self",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_unpause(): Clearing paused() Flag!\"",
")",
"self",
".",
"paused",
".",
"clear",
"(",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"_unpause(): Re-subscribing softly..\"",
")",
"self",
".",
"_resubscribe",
"(",
"soft",
"=",
"True",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection._system_handler
|
Distributes system messages to the appropriate handler.
System messages include everything that arrives as a dict,
or a list containing a heartbeat.
:param data:
:param ts:
:return:
|
btfxwss/connection.py
|
def _system_handler(self, data, ts):
"""Distributes system messages to the appropriate handler.
System messages include everything that arrives as a dict,
or a list containing a heartbeat.
:param data:
:param ts:
:return:
"""
self.log.debug("_system_handler(): Received a system message: %s", data)
# Unpack the data
event = data.pop('event')
if event == 'pong':
self.log.debug("_system_handler(): Distributing %s to _pong_handler..",
data)
self._pong_handler()
elif event == 'info':
self.log.debug("_system_handler(): Distributing %s to _info_handler..",
data)
self._info_handler(data)
elif event == 'error':
self.log.debug("_system_handler(): Distributing %s to _error_handler..",
data)
self._error_handler(data)
elif event in ('subscribed', 'unsubscribed', 'conf', 'auth', 'unauth'):
self.log.debug("_system_handler(): Distributing %s to "
"_response_handler..", data)
self._response_handler(event, data, ts)
else:
self.log.error("Unhandled event: %s, data: %s", event, data)
|
def _system_handler(self, data, ts):
"""Distributes system messages to the appropriate handler.
System messages include everything that arrives as a dict,
or a list containing a heartbeat.
:param data:
:param ts:
:return:
"""
self.log.debug("_system_handler(): Received a system message: %s", data)
# Unpack the data
event = data.pop('event')
if event == 'pong':
self.log.debug("_system_handler(): Distributing %s to _pong_handler..",
data)
self._pong_handler()
elif event == 'info':
self.log.debug("_system_handler(): Distributing %s to _info_handler..",
data)
self._info_handler(data)
elif event == 'error':
self.log.debug("_system_handler(): Distributing %s to _error_handler..",
data)
self._error_handler(data)
elif event in ('subscribed', 'unsubscribed', 'conf', 'auth', 'unauth'):
self.log.debug("_system_handler(): Distributing %s to "
"_response_handler..", data)
self._response_handler(event, data, ts)
else:
self.log.error("Unhandled event: %s, data: %s", event, data)
|
[
"Distributes",
"system",
"messages",
"to",
"the",
"appropriate",
"handler",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L374-L404
|
[
"def",
"_system_handler",
"(",
"self",
",",
"data",
",",
"ts",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_system_handler(): Received a system message: %s\"",
",",
"data",
")",
"# Unpack the data",
"event",
"=",
"data",
".",
"pop",
"(",
"'event'",
")",
"if",
"event",
"==",
"'pong'",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_system_handler(): Distributing %s to _pong_handler..\"",
",",
"data",
")",
"self",
".",
"_pong_handler",
"(",
")",
"elif",
"event",
"==",
"'info'",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_system_handler(): Distributing %s to _info_handler..\"",
",",
"data",
")",
"self",
".",
"_info_handler",
"(",
"data",
")",
"elif",
"event",
"==",
"'error'",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_system_handler(): Distributing %s to _error_handler..\"",
",",
"data",
")",
"self",
".",
"_error_handler",
"(",
"data",
")",
"elif",
"event",
"in",
"(",
"'subscribed'",
",",
"'unsubscribed'",
",",
"'conf'",
",",
"'auth'",
",",
"'unauth'",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_system_handler(): Distributing %s to \"",
"\"_response_handler..\"",
",",
"data",
")",
"self",
".",
"_response_handler",
"(",
"event",
",",
"data",
",",
"ts",
")",
"else",
":",
"self",
".",
"log",
".",
"error",
"(",
"\"Unhandled event: %s, data: %s\"",
",",
"event",
",",
"data",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection._response_handler
|
Handles responses to (un)subscribe and conf commands.
Passes data up to client.
:param data:
:param ts:
:return:
|
btfxwss/connection.py
|
def _response_handler(self, event, data, ts):
"""Handles responses to (un)subscribe and conf commands.
Passes data up to client.
:param data:
:param ts:
:return:
"""
self.log.debug("_response_handler(): Passing %s to client..", data)
self.pass_to_client(event, data, ts)
|
def _response_handler(self, event, data, ts):
"""Handles responses to (un)subscribe and conf commands.
Passes data up to client.
:param data:
:param ts:
:return:
"""
self.log.debug("_response_handler(): Passing %s to client..", data)
self.pass_to_client(event, data, ts)
|
[
"Handles",
"responses",
"to",
"(",
"un",
")",
"subscribe",
"and",
"conf",
"commands",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L406-L416
|
[
"def",
"_response_handler",
"(",
"self",
",",
"event",
",",
"data",
",",
"ts",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_response_handler(): Passing %s to client..\"",
",",
"data",
")",
"self",
".",
"pass_to_client",
"(",
"event",
",",
"data",
",",
"ts",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection._info_handler
|
Handle INFO messages from the API and issues relevant actions.
:param data:
:param ts:
|
btfxwss/connection.py
|
def _info_handler(self, data):
"""
Handle INFO messages from the API and issues relevant actions.
:param data:
:param ts:
"""
def raise_exception():
"""Log info code as error and raise a ValueError."""
self.log.error("%s: %s", data['code'], info_message[data['code']])
raise ValueError("%s: %s" % (data['code'], info_message[data['code']]))
if 'code' not in data and 'version' in data:
self.log.info('Initialized Client on API Version %s', data['version'])
return
info_message = {20000: 'Invalid User given! Please make sure the given ID is correct!',
20051: 'Stop/Restart websocket server '
'(please try to reconnect)',
20060: 'Refreshing data from the trading engine; '
'please pause any acivity.',
20061: 'Done refreshing data from the trading engine.'
' Re-subscription advised.'}
codes = {20051: self.reconnect, 20060: self._pause,
20061: self._unpause}
if 'version' in data:
self.log.info("API version: %i", data['version'])
return
try:
self.log.info(info_message[data['code']])
codes[data['code']]()
except KeyError as e:
self.log.exception(e)
self.log.error("Unknown Info code %s!", data['code'])
raise
|
def _info_handler(self, data):
"""
Handle INFO messages from the API and issues relevant actions.
:param data:
:param ts:
"""
def raise_exception():
"""Log info code as error and raise a ValueError."""
self.log.error("%s: %s", data['code'], info_message[data['code']])
raise ValueError("%s: %s" % (data['code'], info_message[data['code']]))
if 'code' not in data and 'version' in data:
self.log.info('Initialized Client on API Version %s', data['version'])
return
info_message = {20000: 'Invalid User given! Please make sure the given ID is correct!',
20051: 'Stop/Restart websocket server '
'(please try to reconnect)',
20060: 'Refreshing data from the trading engine; '
'please pause any acivity.',
20061: 'Done refreshing data from the trading engine.'
' Re-subscription advised.'}
codes = {20051: self.reconnect, 20060: self._pause,
20061: self._unpause}
if 'version' in data:
self.log.info("API version: %i", data['version'])
return
try:
self.log.info(info_message[data['code']])
codes[data['code']]()
except KeyError as e:
self.log.exception(e)
self.log.error("Unknown Info code %s!", data['code'])
raise
|
[
"Handle",
"INFO",
"messages",
"from",
"the",
"API",
"and",
"issues",
"relevant",
"actions",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L418-L456
|
[
"def",
"_info_handler",
"(",
"self",
",",
"data",
")",
":",
"def",
"raise_exception",
"(",
")",
":",
"\"\"\"Log info code as error and raise a ValueError.\"\"\"",
"self",
".",
"log",
".",
"error",
"(",
"\"%s: %s\"",
",",
"data",
"[",
"'code'",
"]",
",",
"info_message",
"[",
"data",
"[",
"'code'",
"]",
"]",
")",
"raise",
"ValueError",
"(",
"\"%s: %s\"",
"%",
"(",
"data",
"[",
"'code'",
"]",
",",
"info_message",
"[",
"data",
"[",
"'code'",
"]",
"]",
")",
")",
"if",
"'code'",
"not",
"in",
"data",
"and",
"'version'",
"in",
"data",
":",
"self",
".",
"log",
".",
"info",
"(",
"'Initialized Client on API Version %s'",
",",
"data",
"[",
"'version'",
"]",
")",
"return",
"info_message",
"=",
"{",
"20000",
":",
"'Invalid User given! Please make sure the given ID is correct!'",
",",
"20051",
":",
"'Stop/Restart websocket server '",
"'(please try to reconnect)'",
",",
"20060",
":",
"'Refreshing data from the trading engine; '",
"'please pause any acivity.'",
",",
"20061",
":",
"'Done refreshing data from the trading engine.'",
"' Re-subscription advised.'",
"}",
"codes",
"=",
"{",
"20051",
":",
"self",
".",
"reconnect",
",",
"20060",
":",
"self",
".",
"_pause",
",",
"20061",
":",
"self",
".",
"_unpause",
"}",
"if",
"'version'",
"in",
"data",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"API version: %i\"",
",",
"data",
"[",
"'version'",
"]",
")",
"return",
"try",
":",
"self",
".",
"log",
".",
"info",
"(",
"info_message",
"[",
"data",
"[",
"'code'",
"]",
"]",
")",
"codes",
"[",
"data",
"[",
"'code'",
"]",
"]",
"(",
")",
"except",
"KeyError",
"as",
"e",
":",
"self",
".",
"log",
".",
"exception",
"(",
"e",
")",
"self",
".",
"log",
".",
"error",
"(",
"\"Unknown Info code %s!\"",
",",
"data",
"[",
"'code'",
"]",
")",
"raise"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection._error_handler
|
Handle Error messages and log them accordingly.
:param data:
:param ts:
|
btfxwss/connection.py
|
def _error_handler(self, data):
"""
Handle Error messages and log them accordingly.
:param data:
:param ts:
"""
errors = {10000: 'Unknown event',
10001: 'Generic error',
10008: 'Concurrency error',
10020: 'Request parameters error',
10050: 'Configuration setup failed',
10100: 'Failed authentication',
10111: 'Error in authentication request payload',
10112: 'Error in authentication request signature',
10113: 'Error in authentication request encryption',
10114: 'Error in authentication request nonce',
10200: 'Error in un-authentication request',
10300: 'Subscription Failed (generic)',
10301: 'Already Subscribed',
10302: 'Unknown channel',
10400: 'Subscription Failed (generic)',
10401: 'Not subscribed',
11000: 'Not ready, try again later',
20000: 'User is invalid!',
20051: 'Websocket server stopping',
20060: 'Websocket server resyncing',
20061: 'Websocket server resync complete'
}
try:
self.log.error(errors[data['code']])
except KeyError:
self.log.error("Received unknown error Code in message %s! "
"Reconnecting..", data)
|
def _error_handler(self, data):
"""
Handle Error messages and log them accordingly.
:param data:
:param ts:
"""
errors = {10000: 'Unknown event',
10001: 'Generic error',
10008: 'Concurrency error',
10020: 'Request parameters error',
10050: 'Configuration setup failed',
10100: 'Failed authentication',
10111: 'Error in authentication request payload',
10112: 'Error in authentication request signature',
10113: 'Error in authentication request encryption',
10114: 'Error in authentication request nonce',
10200: 'Error in un-authentication request',
10300: 'Subscription Failed (generic)',
10301: 'Already Subscribed',
10302: 'Unknown channel',
10400: 'Subscription Failed (generic)',
10401: 'Not subscribed',
11000: 'Not ready, try again later',
20000: 'User is invalid!',
20051: 'Websocket server stopping',
20060: 'Websocket server resyncing',
20061: 'Websocket server resync complete'
}
try:
self.log.error(errors[data['code']])
except KeyError:
self.log.error("Received unknown error Code in message %s! "
"Reconnecting..", data)
|
[
"Handle",
"Error",
"messages",
"and",
"log",
"them",
"accordingly",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L458-L491
|
[
"def",
"_error_handler",
"(",
"self",
",",
"data",
")",
":",
"errors",
"=",
"{",
"10000",
":",
"'Unknown event'",
",",
"10001",
":",
"'Generic error'",
",",
"10008",
":",
"'Concurrency error'",
",",
"10020",
":",
"'Request parameters error'",
",",
"10050",
":",
"'Configuration setup failed'",
",",
"10100",
":",
"'Failed authentication'",
",",
"10111",
":",
"'Error in authentication request payload'",
",",
"10112",
":",
"'Error in authentication request signature'",
",",
"10113",
":",
"'Error in authentication request encryption'",
",",
"10114",
":",
"'Error in authentication request nonce'",
",",
"10200",
":",
"'Error in un-authentication request'",
",",
"10300",
":",
"'Subscription Failed (generic)'",
",",
"10301",
":",
"'Already Subscribed'",
",",
"10302",
":",
"'Unknown channel'",
",",
"10400",
":",
"'Subscription Failed (generic)'",
",",
"10401",
":",
"'Not subscribed'",
",",
"11000",
":",
"'Not ready, try again later'",
",",
"20000",
":",
"'User is invalid!'",
",",
"20051",
":",
"'Websocket server stopping'",
",",
"20060",
":",
"'Websocket server resyncing'",
",",
"20061",
":",
"'Websocket server resync complete'",
"}",
"try",
":",
"self",
".",
"log",
".",
"error",
"(",
"errors",
"[",
"data",
"[",
"'code'",
"]",
"]",
")",
"except",
"KeyError",
":",
"self",
".",
"log",
".",
"error",
"(",
"\"Received unknown error Code in message %s! \"",
"\"Reconnecting..\"",
",",
"data",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection._data_handler
|
Handles data messages by passing them up to the client.
:param data:
:param ts:
:return:
|
btfxwss/connection.py
|
def _data_handler(self, data, ts):
"""Handles data messages by passing them up to the client.
:param data:
:param ts:
:return:
"""
# Pass the data up to the Client
self.log.debug("_data_handler(): Passing %s to client..",
data)
self.pass_to_client('data', data, ts)
|
def _data_handler(self, data, ts):
"""Handles data messages by passing them up to the client.
:param data:
:param ts:
:return:
"""
# Pass the data up to the Client
self.log.debug("_data_handler(): Passing %s to client..",
data)
self.pass_to_client('data', data, ts)
|
[
"Handles",
"data",
"messages",
"by",
"passing",
"them",
"up",
"to",
"the",
"client",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L493-L503
|
[
"def",
"_data_handler",
"(",
"self",
",",
"data",
",",
"ts",
")",
":",
"# Pass the data up to the Client",
"self",
".",
"log",
".",
"debug",
"(",
"\"_data_handler(): Passing %s to client..\"",
",",
"data",
")",
"self",
".",
"pass_to_client",
"(",
"'data'",
",",
"data",
",",
"ts",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
WebSocketConnection._resubscribe
|
Resubscribes to all channels found in self.channel_configs.
:param soft: if True, unsubscribes first.
:return: None
|
btfxwss/connection.py
|
def _resubscribe(self, soft=False):
"""Resubscribes to all channels found in self.channel_configs.
:param soft: if True, unsubscribes first.
:return: None
"""
# Restore non-default Bitfinex websocket configuration
if self.bitfinex_config:
self.send(**self.bitfinex_config)
q_list = []
while True:
try:
identifier, q = self.channel_configs.popitem(last=True if soft else False)
except KeyError:
break
q_list.append((identifier, q.copy()))
if identifier == 'auth':
self.send(**q, auth=True)
continue
if soft:
q['event'] = 'unsubscribe'
self.send(**q)
# Resubscribe for soft start.
if soft:
for identifier, q in reversed(q_list):
self.channel_configs[identifier] = q
self.send(**q)
else:
for identifier, q in q_list:
self.channel_configs[identifier] = q
|
def _resubscribe(self, soft=False):
"""Resubscribes to all channels found in self.channel_configs.
:param soft: if True, unsubscribes first.
:return: None
"""
# Restore non-default Bitfinex websocket configuration
if self.bitfinex_config:
self.send(**self.bitfinex_config)
q_list = []
while True:
try:
identifier, q = self.channel_configs.popitem(last=True if soft else False)
except KeyError:
break
q_list.append((identifier, q.copy()))
if identifier == 'auth':
self.send(**q, auth=True)
continue
if soft:
q['event'] = 'unsubscribe'
self.send(**q)
# Resubscribe for soft start.
if soft:
for identifier, q in reversed(q_list):
self.channel_configs[identifier] = q
self.send(**q)
else:
for identifier, q in q_list:
self.channel_configs[identifier] = q
|
[
"Resubscribes",
"to",
"all",
"channels",
"found",
"in",
"self",
".",
"channel_configs",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/connection.py#L505-L535
|
[
"def",
"_resubscribe",
"(",
"self",
",",
"soft",
"=",
"False",
")",
":",
"# Restore non-default Bitfinex websocket configuration",
"if",
"self",
".",
"bitfinex_config",
":",
"self",
".",
"send",
"(",
"*",
"*",
"self",
".",
"bitfinex_config",
")",
"q_list",
"=",
"[",
"]",
"while",
"True",
":",
"try",
":",
"identifier",
",",
"q",
"=",
"self",
".",
"channel_configs",
".",
"popitem",
"(",
"last",
"=",
"True",
"if",
"soft",
"else",
"False",
")",
"except",
"KeyError",
":",
"break",
"q_list",
".",
"append",
"(",
"(",
"identifier",
",",
"q",
".",
"copy",
"(",
")",
")",
")",
"if",
"identifier",
"==",
"'auth'",
":",
"self",
".",
"send",
"(",
"*",
"*",
"q",
",",
"auth",
"=",
"True",
")",
"continue",
"if",
"soft",
":",
"q",
"[",
"'event'",
"]",
"=",
"'unsubscribe'",
"self",
".",
"send",
"(",
"*",
"*",
"q",
")",
"# Resubscribe for soft start.",
"if",
"soft",
":",
"for",
"identifier",
",",
"q",
"in",
"reversed",
"(",
"q_list",
")",
":",
"self",
".",
"channel_configs",
"[",
"identifier",
"]",
"=",
"q",
"self",
".",
"send",
"(",
"*",
"*",
"q",
")",
"else",
":",
"for",
"identifier",
",",
"q",
"in",
"q_list",
":",
"self",
".",
"channel_configs",
"[",
"identifier",
"]",
"=",
"q"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
QueueProcessor.join
|
Set sentinel for run() method and join thread.
:param timeout:
:return:
|
btfxwss/queue_processor.py
|
def join(self, timeout=None):
"""Set sentinel for run() method and join thread.
:param timeout:
:return:
"""
self._stopped.set()
super(QueueProcessor, self).join(timeout=timeout)
|
def join(self, timeout=None):
"""Set sentinel for run() method and join thread.
:param timeout:
:return:
"""
self._stopped.set()
super(QueueProcessor, self).join(timeout=timeout)
|
[
"Set",
"sentinel",
"for",
"run",
"()",
"method",
"and",
"join",
"thread",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/queue_processor.py#L73-L80
|
[
"def",
"join",
"(",
"self",
",",
"timeout",
"=",
"None",
")",
":",
"self",
".",
"_stopped",
".",
"set",
"(",
")",
"super",
"(",
"QueueProcessor",
",",
"self",
")",
".",
"join",
"(",
"timeout",
"=",
"timeout",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
QueueProcessor.run
|
Main routine.
:return:
|
btfxwss/queue_processor.py
|
def run(self):
"""Main routine.
:return:
"""
while not self._stopped.is_set():
try:
message = self.q.get(timeout=0.1)
except Empty:
continue
dtype, data, ts = message
if dtype in ('subscribed', 'unsubscribed', 'conf', 'auth', 'unauth'):
try:
self._response_handlers[dtype](dtype, data, ts)
except KeyError:
self.log.error("Dtype '%s' does not have a response "
"handler! (%s)", dtype, message)
elif dtype == 'data':
try:
channel_id = data[0]
if channel_id != 0:
# Get channel type associated with this data to the
# associated data type (from 'data' to
# 'book', 'ticker' or similar
channel_type, *_ = self.channel_directory[channel_id]
# Run the associated data handler for this channel type.
self._data_handlers[channel_type](channel_type, data, ts)
# Update time stamps.
self.update_timestamps(channel_id, ts)
else:
# This is data from auth channel, call handler
self._handle_account(data=data, ts=ts)
except KeyError:
self.log.error("Channel ID does not have a data handler! %s",
message)
else:
self.log.error("Unknown dtype on queue! %s", message)
continue
|
def run(self):
"""Main routine.
:return:
"""
while not self._stopped.is_set():
try:
message = self.q.get(timeout=0.1)
except Empty:
continue
dtype, data, ts = message
if dtype in ('subscribed', 'unsubscribed', 'conf', 'auth', 'unauth'):
try:
self._response_handlers[dtype](dtype, data, ts)
except KeyError:
self.log.error("Dtype '%s' does not have a response "
"handler! (%s)", dtype, message)
elif dtype == 'data':
try:
channel_id = data[0]
if channel_id != 0:
# Get channel type associated with this data to the
# associated data type (from 'data' to
# 'book', 'ticker' or similar
channel_type, *_ = self.channel_directory[channel_id]
# Run the associated data handler for this channel type.
self._data_handlers[channel_type](channel_type, data, ts)
# Update time stamps.
self.update_timestamps(channel_id, ts)
else:
# This is data from auth channel, call handler
self._handle_account(data=data, ts=ts)
except KeyError:
self.log.error("Channel ID does not have a data handler! %s",
message)
else:
self.log.error("Unknown dtype on queue! %s", message)
continue
|
[
"Main",
"routine",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/queue_processor.py#L82-L121
|
[
"def",
"run",
"(",
"self",
")",
":",
"while",
"not",
"self",
".",
"_stopped",
".",
"is_set",
"(",
")",
":",
"try",
":",
"message",
"=",
"self",
".",
"q",
".",
"get",
"(",
"timeout",
"=",
"0.1",
")",
"except",
"Empty",
":",
"continue",
"dtype",
",",
"data",
",",
"ts",
"=",
"message",
"if",
"dtype",
"in",
"(",
"'subscribed'",
",",
"'unsubscribed'",
",",
"'conf'",
",",
"'auth'",
",",
"'unauth'",
")",
":",
"try",
":",
"self",
".",
"_response_handlers",
"[",
"dtype",
"]",
"(",
"dtype",
",",
"data",
",",
"ts",
")",
"except",
"KeyError",
":",
"self",
".",
"log",
".",
"error",
"(",
"\"Dtype '%s' does not have a response \"",
"\"handler! (%s)\"",
",",
"dtype",
",",
"message",
")",
"elif",
"dtype",
"==",
"'data'",
":",
"try",
":",
"channel_id",
"=",
"data",
"[",
"0",
"]",
"if",
"channel_id",
"!=",
"0",
":",
"# Get channel type associated with this data to the",
"# associated data type (from 'data' to",
"# 'book', 'ticker' or similar",
"channel_type",
",",
"",
"*",
"_",
"=",
"self",
".",
"channel_directory",
"[",
"channel_id",
"]",
"# Run the associated data handler for this channel type.",
"self",
".",
"_data_handlers",
"[",
"channel_type",
"]",
"(",
"channel_type",
",",
"data",
",",
"ts",
")",
"# Update time stamps.",
"self",
".",
"update_timestamps",
"(",
"channel_id",
",",
"ts",
")",
"else",
":",
"# This is data from auth channel, call handler",
"self",
".",
"_handle_account",
"(",
"data",
"=",
"data",
",",
"ts",
"=",
"ts",
")",
"except",
"KeyError",
":",
"self",
".",
"log",
".",
"error",
"(",
"\"Channel ID does not have a data handler! %s\"",
",",
"message",
")",
"else",
":",
"self",
".",
"log",
".",
"error",
"(",
"\"Unknown dtype on queue! %s\"",
",",
"message",
")",
"continue"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
QueueProcessor._handle_subscribed
|
Handles responses to subscribe() commands.
Registers a channel id with the client and assigns a data handler to it.
:param dtype:
:param data:
:param ts:
:return:
|
btfxwss/queue_processor.py
|
def _handle_subscribed(self, dtype, data, ts,):
"""Handles responses to subscribe() commands.
Registers a channel id with the client and assigns a data handler to it.
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_subscribed: %s - %s - %s", dtype, data, ts)
channel_name = data.pop('channel')
channel_id = data.pop('chanId')
config = data
if 'pair' in config:
symbol = config['pair']
if symbol.startswith('t'):
symbol = symbol[1:]
elif 'symbol' in config:
symbol = config['symbol']
if symbol.startswith('t'):
symbol = symbol[1:]
elif 'key' in config:
symbol = config['key'].split(':')[2][1:] #layout type:interval:tPair
else:
symbol = None
if 'prec' in config and config['prec'].startswith('R'):
channel_name = 'raw_' + channel_name
self.channel_handlers[channel_id] = self._data_handlers[channel_name]
# Create a channel_name, symbol tuple to identify channels of same type
if 'key' in config:
identifier = (channel_name, symbol, config['key'].split(':')[1])
else:
identifier = (channel_name, symbol)
self.channel_handlers[channel_id] = identifier
self.channel_directory[identifier] = channel_id
self.channel_directory[channel_id] = identifier
self.log.info("Subscription succesful for channel %s", identifier)
|
def _handle_subscribed(self, dtype, data, ts,):
"""Handles responses to subscribe() commands.
Registers a channel id with the client and assigns a data handler to it.
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_subscribed: %s - %s - %s", dtype, data, ts)
channel_name = data.pop('channel')
channel_id = data.pop('chanId')
config = data
if 'pair' in config:
symbol = config['pair']
if symbol.startswith('t'):
symbol = symbol[1:]
elif 'symbol' in config:
symbol = config['symbol']
if symbol.startswith('t'):
symbol = symbol[1:]
elif 'key' in config:
symbol = config['key'].split(':')[2][1:] #layout type:interval:tPair
else:
symbol = None
if 'prec' in config and config['prec'].startswith('R'):
channel_name = 'raw_' + channel_name
self.channel_handlers[channel_id] = self._data_handlers[channel_name]
# Create a channel_name, symbol tuple to identify channels of same type
if 'key' in config:
identifier = (channel_name, symbol, config['key'].split(':')[1])
else:
identifier = (channel_name, symbol)
self.channel_handlers[channel_id] = identifier
self.channel_directory[identifier] = channel_id
self.channel_directory[channel_id] = identifier
self.log.info("Subscription succesful for channel %s", identifier)
|
[
"Handles",
"responses",
"to",
"subscribe",
"()",
"commands",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/queue_processor.py#L123-L164
|
[
"def",
"_handle_subscribed",
"(",
"self",
",",
"dtype",
",",
"data",
",",
"ts",
",",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_handle_subscribed: %s - %s - %s\"",
",",
"dtype",
",",
"data",
",",
"ts",
")",
"channel_name",
"=",
"data",
".",
"pop",
"(",
"'channel'",
")",
"channel_id",
"=",
"data",
".",
"pop",
"(",
"'chanId'",
")",
"config",
"=",
"data",
"if",
"'pair'",
"in",
"config",
":",
"symbol",
"=",
"config",
"[",
"'pair'",
"]",
"if",
"symbol",
".",
"startswith",
"(",
"'t'",
")",
":",
"symbol",
"=",
"symbol",
"[",
"1",
":",
"]",
"elif",
"'symbol'",
"in",
"config",
":",
"symbol",
"=",
"config",
"[",
"'symbol'",
"]",
"if",
"symbol",
".",
"startswith",
"(",
"'t'",
")",
":",
"symbol",
"=",
"symbol",
"[",
"1",
":",
"]",
"elif",
"'key'",
"in",
"config",
":",
"symbol",
"=",
"config",
"[",
"'key'",
"]",
".",
"split",
"(",
"':'",
")",
"[",
"2",
"]",
"[",
"1",
":",
"]",
"#layout type:interval:tPair",
"else",
":",
"symbol",
"=",
"None",
"if",
"'prec'",
"in",
"config",
"and",
"config",
"[",
"'prec'",
"]",
".",
"startswith",
"(",
"'R'",
")",
":",
"channel_name",
"=",
"'raw_'",
"+",
"channel_name",
"self",
".",
"channel_handlers",
"[",
"channel_id",
"]",
"=",
"self",
".",
"_data_handlers",
"[",
"channel_name",
"]",
"# Create a channel_name, symbol tuple to identify channels of same type",
"if",
"'key'",
"in",
"config",
":",
"identifier",
"=",
"(",
"channel_name",
",",
"symbol",
",",
"config",
"[",
"'key'",
"]",
".",
"split",
"(",
"':'",
")",
"[",
"1",
"]",
")",
"else",
":",
"identifier",
"=",
"(",
"channel_name",
",",
"symbol",
")",
"self",
".",
"channel_handlers",
"[",
"channel_id",
"]",
"=",
"identifier",
"self",
".",
"channel_directory",
"[",
"identifier",
"]",
"=",
"channel_id",
"self",
".",
"channel_directory",
"[",
"channel_id",
"]",
"=",
"identifier",
"self",
".",
"log",
".",
"info",
"(",
"\"Subscription succesful for channel %s\"",
",",
"identifier",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
QueueProcessor._handle_unsubscribed
|
Handles responses to unsubscribe() commands.
Removes a channel id from the client.
:param dtype:
:param data:
:param ts:
:return:
|
btfxwss/queue_processor.py
|
def _handle_unsubscribed(self, dtype, data, ts):
"""Handles responses to unsubscribe() commands.
Removes a channel id from the client.
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_unsubscribed: %s - %s - %s", dtype, data, ts)
channel_id = data.pop('chanId')
# Unregister the channel from all internal attributes
chan_identifier = self.channel_directory.pop(channel_id)
self.channel_directory.pop(chan_identifier)
self.channel_handlers.pop(channel_id)
self.last_update.pop(channel_id)
self.log.info("Successfully unsubscribed from %s", chan_identifier)
|
def _handle_unsubscribed(self, dtype, data, ts):
"""Handles responses to unsubscribe() commands.
Removes a channel id from the client.
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_unsubscribed: %s - %s - %s", dtype, data, ts)
channel_id = data.pop('chanId')
# Unregister the channel from all internal attributes
chan_identifier = self.channel_directory.pop(channel_id)
self.channel_directory.pop(chan_identifier)
self.channel_handlers.pop(channel_id)
self.last_update.pop(channel_id)
self.log.info("Successfully unsubscribed from %s", chan_identifier)
|
[
"Handles",
"responses",
"to",
"unsubscribe",
"()",
"commands",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/queue_processor.py#L166-L184
|
[
"def",
"_handle_unsubscribed",
"(",
"self",
",",
"dtype",
",",
"data",
",",
"ts",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_handle_unsubscribed: %s - %s - %s\"",
",",
"dtype",
",",
"data",
",",
"ts",
")",
"channel_id",
"=",
"data",
".",
"pop",
"(",
"'chanId'",
")",
"# Unregister the channel from all internal attributes",
"chan_identifier",
"=",
"self",
".",
"channel_directory",
".",
"pop",
"(",
"channel_id",
")",
"self",
".",
"channel_directory",
".",
"pop",
"(",
"chan_identifier",
")",
"self",
".",
"channel_handlers",
".",
"pop",
"(",
"channel_id",
")",
"self",
".",
"last_update",
".",
"pop",
"(",
"channel_id",
")",
"self",
".",
"log",
".",
"info",
"(",
"\"Successfully unsubscribed from %s\"",
",",
"chan_identifier",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
QueueProcessor._handle_auth
|
Handles authentication responses.
:param dtype:
:param data:
:param ts:
:return:
|
btfxwss/queue_processor.py
|
def _handle_auth(self, dtype, data, ts):
"""Handles authentication responses.
:param dtype:
:param data:
:param ts:
:return:
"""
# Contains keys status, chanId, userId, caps
if dtype == 'unauth':
raise NotImplementedError
channel_id = data.pop('chanId')
user_id = data.pop('userId')
identifier = ('auth', user_id)
self.channel_handlers[identifier] = channel_id
self.channel_directory[identifier] = channel_id
self.channel_directory[channel_id] = identifier
|
def _handle_auth(self, dtype, data, ts):
"""Handles authentication responses.
:param dtype:
:param data:
:param ts:
:return:
"""
# Contains keys status, chanId, userId, caps
if dtype == 'unauth':
raise NotImplementedError
channel_id = data.pop('chanId')
user_id = data.pop('userId')
identifier = ('auth', user_id)
self.channel_handlers[identifier] = channel_id
self.channel_directory[identifier] = channel_id
self.channel_directory[channel_id] = identifier
|
[
"Handles",
"authentication",
"responses",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/queue_processor.py#L186-L203
|
[
"def",
"_handle_auth",
"(",
"self",
",",
"dtype",
",",
"data",
",",
"ts",
")",
":",
"# Contains keys status, chanId, userId, caps",
"if",
"dtype",
"==",
"'unauth'",
":",
"raise",
"NotImplementedError",
"channel_id",
"=",
"data",
".",
"pop",
"(",
"'chanId'",
")",
"user_id",
"=",
"data",
".",
"pop",
"(",
"'userId'",
")",
"identifier",
"=",
"(",
"'auth'",
",",
"user_id",
")",
"self",
".",
"channel_handlers",
"[",
"identifier",
"]",
"=",
"channel_id",
"self",
".",
"channel_directory",
"[",
"identifier",
"]",
"=",
"channel_id",
"self",
".",
"channel_directory",
"[",
"channel_id",
"]",
"=",
"identifier"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
QueueProcessor._handle_conf
|
Handles configuration messages.
:param dtype:
:param data:
:param ts:
:return:
|
btfxwss/queue_processor.py
|
def _handle_conf(self, dtype, data, ts):
"""Handles configuration messages.
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_conf: %s - %s - %s", dtype, data, ts)
self.log.info("Configuration accepted: %s", dtype)
return
|
def _handle_conf(self, dtype, data, ts):
"""Handles configuration messages.
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_conf: %s - %s - %s", dtype, data, ts)
self.log.info("Configuration accepted: %s", dtype)
return
|
[
"Handles",
"configuration",
"messages",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/queue_processor.py#L205-L215
|
[
"def",
"_handle_conf",
"(",
"self",
",",
"dtype",
",",
"data",
",",
"ts",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_handle_conf: %s - %s - %s\"",
",",
"dtype",
",",
"data",
",",
"ts",
")",
"self",
".",
"log",
".",
"info",
"(",
"\"Configuration accepted: %s\"",
",",
"dtype",
")",
"return"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
QueueProcessor.update_timestamps
|
Updates the timestamp for the given channel id.
:param chan_id:
:param ts:
:return:
|
btfxwss/queue_processor.py
|
def update_timestamps(self, chan_id, ts):
"""Updates the timestamp for the given channel id.
:param chan_id:
:param ts:
:return:
"""
try:
self.last_update[chan_id] = ts
except KeyError:
self.log.warning("Attempted ts update of channel %s, but channel "
"not present anymore.",
self.channel_directory[chan_id])
|
def update_timestamps(self, chan_id, ts):
"""Updates the timestamp for the given channel id.
:param chan_id:
:param ts:
:return:
"""
try:
self.last_update[chan_id] = ts
except KeyError:
self.log.warning("Attempted ts update of channel %s, but channel "
"not present anymore.",
self.channel_directory[chan_id])
|
[
"Updates",
"the",
"timestamp",
"for",
"the",
"given",
"channel",
"id",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/queue_processor.py#L217-L229
|
[
"def",
"update_timestamps",
"(",
"self",
",",
"chan_id",
",",
"ts",
")",
":",
"try",
":",
"self",
".",
"last_update",
"[",
"chan_id",
"]",
"=",
"ts",
"except",
"KeyError",
":",
"self",
".",
"log",
".",
"warning",
"(",
"\"Attempted ts update of channel %s, but channel \"",
"\"not present anymore.\"",
",",
"self",
".",
"channel_directory",
"[",
"chan_id",
"]",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
QueueProcessor._handle_account
|
Handles Account related data.
translation table for channel names:
Data Channels
os - Orders
hos - Historical Orders
ps - Positions
hts - Trades (snapshot)
te - Trade Event
tu - Trade Update
ws - Wallets
bu - Balance Info
miu - Margin Info
fiu - Funding Info
fos - Offers
hfos - Historical Offers
fcs - Credits
hfcs - Historical Credits
fls - Loans
hfls - Historical Loans
htfs - Funding Trades
n - Notifications (WIP)
:param dtype:
:param data:
:param ts:
:return:
|
btfxwss/queue_processor.py
|
def _handle_account(self, data, ts):
""" Handles Account related data.
translation table for channel names:
Data Channels
os - Orders
hos - Historical Orders
ps - Positions
hts - Trades (snapshot)
te - Trade Event
tu - Trade Update
ws - Wallets
bu - Balance Info
miu - Margin Info
fiu - Funding Info
fos - Offers
hfos - Historical Offers
fcs - Credits
hfcs - Historical Credits
fls - Loans
hfls - Historical Loans
htfs - Funding Trades
n - Notifications (WIP)
:param dtype:
:param data:
:param ts:
:return:
"""
# channel_short, data
chan_id, channel_short_name, *data = data
entry = (channel_short_name, data, ts)
self.account.put(entry)
|
def _handle_account(self, data, ts):
""" Handles Account related data.
translation table for channel names:
Data Channels
os - Orders
hos - Historical Orders
ps - Positions
hts - Trades (snapshot)
te - Trade Event
tu - Trade Update
ws - Wallets
bu - Balance Info
miu - Margin Info
fiu - Funding Info
fos - Offers
hfos - Historical Offers
fcs - Credits
hfcs - Historical Credits
fls - Loans
hfls - Historical Loans
htfs - Funding Trades
n - Notifications (WIP)
:param dtype:
:param data:
:param ts:
:return:
"""
# channel_short, data
chan_id, channel_short_name, *data = data
entry = (channel_short_name, data, ts)
self.account.put(entry)
|
[
"Handles",
"Account",
"related",
"data",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/queue_processor.py#L231-L263
|
[
"def",
"_handle_account",
"(",
"self",
",",
"data",
",",
"ts",
")",
":",
"# channel_short, data",
"chan_id",
",",
"channel_short_name",
",",
"",
"*",
"data",
"=",
"data",
"entry",
"=",
"(",
"channel_short_name",
",",
"data",
",",
"ts",
")",
"self",
".",
"account",
".",
"put",
"(",
"entry",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
QueueProcessor._handle_ticker
|
Adds received ticker data to self.tickers dict, filed under its channel
id.
:param dtype:
:param data:
:param ts:
:return:
|
btfxwss/queue_processor.py
|
def _handle_ticker(self, dtype, data, ts):
"""Adds received ticker data to self.tickers dict, filed under its channel
id.
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_ticker: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.tickers[channel_identifier].put(entry)
|
def _handle_ticker(self, dtype, data, ts):
"""Adds received ticker data to self.tickers dict, filed under its channel
id.
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_ticker: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.tickers[channel_identifier].put(entry)
|
[
"Adds",
"received",
"ticker",
"data",
"to",
"self",
".",
"tickers",
"dict",
"filed",
"under",
"its",
"channel",
"id",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/queue_processor.py#L265-L279
|
[
"def",
"_handle_ticker",
"(",
"self",
",",
"dtype",
",",
"data",
",",
"ts",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_handle_ticker: %s - %s - %s\"",
",",
"dtype",
",",
"data",
",",
"ts",
")",
"channel_id",
",",
"",
"*",
"data",
"=",
"data",
"channel_identifier",
"=",
"self",
".",
"channel_directory",
"[",
"channel_id",
"]",
"entry",
"=",
"(",
"data",
",",
"ts",
")",
"self",
".",
"tickers",
"[",
"channel_identifier",
"]",
".",
"put",
"(",
"entry",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
QueueProcessor._handle_book
|
Updates the order book stored in self.books[chan_id].
:param dtype:
:param data:
:param ts:
:return:
|
btfxwss/queue_processor.py
|
def _handle_book(self, dtype, data, ts):
"""Updates the order book stored in self.books[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_book: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
log.debug("ts: %s\tchan_id: %s\tdata: %s", ts, channel_id, data)
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.books[channel_identifier].put(entry)
|
def _handle_book(self, dtype, data, ts):
"""Updates the order book stored in self.books[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_book: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
log.debug("ts: %s\tchan_id: %s\tdata: %s", ts, channel_id, data)
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.books[channel_identifier].put(entry)
|
[
"Updates",
"the",
"order",
"book",
"stored",
"in",
"self",
".",
"books",
"[",
"chan_id",
"]",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/queue_processor.py#L281-L294
|
[
"def",
"_handle_book",
"(",
"self",
",",
"dtype",
",",
"data",
",",
"ts",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_handle_book: %s - %s - %s\"",
",",
"dtype",
",",
"data",
",",
"ts",
")",
"channel_id",
",",
"",
"*",
"data",
"=",
"data",
"log",
".",
"debug",
"(",
"\"ts: %s\\tchan_id: %s\\tdata: %s\"",
",",
"ts",
",",
"channel_id",
",",
"data",
")",
"channel_identifier",
"=",
"self",
".",
"channel_directory",
"[",
"channel_id",
"]",
"entry",
"=",
"(",
"data",
",",
"ts",
")",
"self",
".",
"books",
"[",
"channel_identifier",
"]",
".",
"put",
"(",
"entry",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
QueueProcessor._handle_raw_book
|
Updates the raw order books stored in self.raw_books[chan_id].
:param dtype:
:param data:
:param ts:
:return:
|
btfxwss/queue_processor.py
|
def _handle_raw_book(self, dtype, data, ts):
"""Updates the raw order books stored in self.raw_books[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_raw_book: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.raw_books[channel_identifier].put(entry)
|
def _handle_raw_book(self, dtype, data, ts):
"""Updates the raw order books stored in self.raw_books[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_raw_book: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.raw_books[channel_identifier].put(entry)
|
[
"Updates",
"the",
"raw",
"order",
"books",
"stored",
"in",
"self",
".",
"raw_books",
"[",
"chan_id",
"]",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/queue_processor.py#L296-L308
|
[
"def",
"_handle_raw_book",
"(",
"self",
",",
"dtype",
",",
"data",
",",
"ts",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_handle_raw_book: %s - %s - %s\"",
",",
"dtype",
",",
"data",
",",
"ts",
")",
"channel_id",
",",
"",
"*",
"data",
"=",
"data",
"channel_identifier",
"=",
"self",
".",
"channel_directory",
"[",
"channel_id",
"]",
"entry",
"=",
"(",
"data",
",",
"ts",
")",
"self",
".",
"raw_books",
"[",
"channel_identifier",
"]",
".",
"put",
"(",
"entry",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
QueueProcessor._handle_trades
|
Files trades in self._trades[chan_id].
:param dtype:
:param data:
:param ts:
:return:
|
btfxwss/queue_processor.py
|
def _handle_trades(self, dtype, data, ts):
"""Files trades in self._trades[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_trades: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.trades[channel_identifier].put(entry)
|
def _handle_trades(self, dtype, data, ts):
"""Files trades in self._trades[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_trades: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.trades[channel_identifier].put(entry)
|
[
"Files",
"trades",
"in",
"self",
".",
"_trades",
"[",
"chan_id",
"]",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/queue_processor.py#L310-L322
|
[
"def",
"_handle_trades",
"(",
"self",
",",
"dtype",
",",
"data",
",",
"ts",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_handle_trades: %s - %s - %s\"",
",",
"dtype",
",",
"data",
",",
"ts",
")",
"channel_id",
",",
"",
"*",
"data",
"=",
"data",
"channel_identifier",
"=",
"self",
".",
"channel_directory",
"[",
"channel_id",
"]",
"entry",
"=",
"(",
"data",
",",
"ts",
")",
"self",
".",
"trades",
"[",
"channel_identifier",
"]",
".",
"put",
"(",
"entry",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
QueueProcessor._handle_candles
|
Stores OHLC data received via wss in self.candles[chan_id].
:param dtype:
:param data:
:param ts:
:return:
|
btfxwss/queue_processor.py
|
def _handle_candles(self, dtype, data, ts):
"""Stores OHLC data received via wss in self.candles[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_candles: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.candles[channel_identifier].put(entry)
|
def _handle_candles(self, dtype, data, ts):
"""Stores OHLC data received via wss in self.candles[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_candles: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.candles[channel_identifier].put(entry)
|
[
"Stores",
"OHLC",
"data",
"received",
"via",
"wss",
"in",
"self",
".",
"candles",
"[",
"chan_id",
"]",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/queue_processor.py#L324-L336
|
[
"def",
"_handle_candles",
"(",
"self",
",",
"dtype",
",",
"data",
",",
"ts",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"_handle_candles: %s - %s - %s\"",
",",
"dtype",
",",
"data",
",",
"ts",
")",
"channel_id",
",",
"",
"*",
"data",
"=",
"data",
"channel_identifier",
"=",
"self",
".",
"channel_directory",
"[",
"channel_id",
"]",
"entry",
"=",
"(",
"data",
",",
"ts",
")",
"self",
".",
"candles",
"[",
"channel_identifier",
"]",
".",
"put",
"(",
"entry",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
BtfxWss.reset
|
Reset the client.
:return:
|
btfxwss/client.py
|
def reset(self):
"""Reset the client.
:return:
"""
self.conn.reconnect()
while not self.conn.connected.is_set():
log.info("reset(): Waiting for connection to be set up..")
time.sleep(1)
for key in self.channel_configs:
self.conn.send(**self.channel_configs[key])
|
def reset(self):
"""Reset the client.
:return:
"""
self.conn.reconnect()
while not self.conn.connected.is_set():
log.info("reset(): Waiting for connection to be set up..")
time.sleep(1)
for key in self.channel_configs:
self.conn.send(**self.channel_configs[key])
|
[
"Reset",
"the",
"client",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/client.py#L78-L89
|
[
"def",
"reset",
"(",
"self",
")",
":",
"self",
".",
"conn",
".",
"reconnect",
"(",
")",
"while",
"not",
"self",
".",
"conn",
".",
"connected",
".",
"is_set",
"(",
")",
":",
"log",
".",
"info",
"(",
"\"reset(): Waiting for connection to be set up..\"",
")",
"time",
".",
"sleep",
"(",
"1",
")",
"for",
"key",
"in",
"self",
".",
"channel_configs",
":",
"self",
".",
"conn",
".",
"send",
"(",
"*",
"*",
"self",
".",
"channel_configs",
"[",
"key",
"]",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
BtfxWss.candles
|
Return a queue containing all received candles data.
:param pair: str, Symbol pair to request data for
:param timeframe: str
:return: Queue()
|
btfxwss/client.py
|
def candles(self, pair, timeframe=None):
"""Return a queue containing all received candles data.
:param pair: str, Symbol pair to request data for
:param timeframe: str
:return: Queue()
"""
timeframe = '1m' if not timeframe else timeframe
key = ('candles', pair, timeframe)
return self.queue_processor.candles[key]
|
def candles(self, pair, timeframe=None):
"""Return a queue containing all received candles data.
:param pair: str, Symbol pair to request data for
:param timeframe: str
:return: Queue()
"""
timeframe = '1m' if not timeframe else timeframe
key = ('candles', pair, timeframe)
return self.queue_processor.candles[key]
|
[
"Return",
"a",
"queue",
"containing",
"all",
"received",
"candles",
"data",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/client.py#L131-L140
|
[
"def",
"candles",
"(",
"self",
",",
"pair",
",",
"timeframe",
"=",
"None",
")",
":",
"timeframe",
"=",
"'1m'",
"if",
"not",
"timeframe",
"else",
"timeframe",
"key",
"=",
"(",
"'candles'",
",",
"pair",
",",
"timeframe",
")",
"return",
"self",
".",
"queue_processor",
".",
"candles",
"[",
"key",
"]"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
BtfxWss.config
|
Send configuration to websocket server
:param decimals_as_strings: bool, turn on/off decimals as strings
:param ts_as_dates: bool, decide to request timestamps as dates instead
:param sequencing: bool, turn on sequencing
:param ts: bool, request the timestamp to be appended to every array
sent by the server
:param kwargs:
:return:
|
btfxwss/client.py
|
def config(self, decimals_as_strings=True, ts_as_dates=False,
sequencing=False, ts=False, **kwargs):
"""Send configuration to websocket server
:param decimals_as_strings: bool, turn on/off decimals as strings
:param ts_as_dates: bool, decide to request timestamps as dates instead
:param sequencing: bool, turn on sequencing
:param ts: bool, request the timestamp to be appended to every array
sent by the server
:param kwargs:
:return:
"""
flags = 0
if decimals_as_strings:
flags += 8
if ts_as_dates:
flags += 32
if ts:
flags += 32768
if sequencing:
flags += 65536
q = {'event': 'conf', 'flags': flags}
q.update(kwargs)
self.conn.bitfinex_config = q
self.conn.send(**q)
|
def config(self, decimals_as_strings=True, ts_as_dates=False,
sequencing=False, ts=False, **kwargs):
"""Send configuration to websocket server
:param decimals_as_strings: bool, turn on/off decimals as strings
:param ts_as_dates: bool, decide to request timestamps as dates instead
:param sequencing: bool, turn on sequencing
:param ts: bool, request the timestamp to be appended to every array
sent by the server
:param kwargs:
:return:
"""
flags = 0
if decimals_as_strings:
flags += 8
if ts_as_dates:
flags += 32
if ts:
flags += 32768
if sequencing:
flags += 65536
q = {'event': 'conf', 'flags': flags}
q.update(kwargs)
self.conn.bitfinex_config = q
self.conn.send(**q)
|
[
"Send",
"configuration",
"to",
"websocket",
"server"
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/client.py#L161-L185
|
[
"def",
"config",
"(",
"self",
",",
"decimals_as_strings",
"=",
"True",
",",
"ts_as_dates",
"=",
"False",
",",
"sequencing",
"=",
"False",
",",
"ts",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"flags",
"=",
"0",
"if",
"decimals_as_strings",
":",
"flags",
"+=",
"8",
"if",
"ts_as_dates",
":",
"flags",
"+=",
"32",
"if",
"ts",
":",
"flags",
"+=",
"32768",
"if",
"sequencing",
":",
"flags",
"+=",
"65536",
"q",
"=",
"{",
"'event'",
":",
"'conf'",
",",
"'flags'",
":",
"flags",
"}",
"q",
".",
"update",
"(",
"kwargs",
")",
"self",
".",
"conn",
".",
"bitfinex_config",
"=",
"q",
"self",
".",
"conn",
".",
"send",
"(",
"*",
"*",
"q",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
test
|
BtfxWss.subscribe_to_ticker
|
Subscribe to the passed pair's ticker channel.
:param pair: str, Symbol pair to request data for
:param kwargs:
:return:
|
btfxwss/client.py
|
def subscribe_to_ticker(self, pair, **kwargs):
"""Subscribe to the passed pair's ticker channel.
:param pair: str, Symbol pair to request data for
:param kwargs:
:return:
"""
identifier = ('ticker', pair)
self._subscribe('ticker', identifier, symbol=pair, **kwargs)
|
def subscribe_to_ticker(self, pair, **kwargs):
"""Subscribe to the passed pair's ticker channel.
:param pair: str, Symbol pair to request data for
:param kwargs:
:return:
"""
identifier = ('ticker', pair)
self._subscribe('ticker', identifier, symbol=pair, **kwargs)
|
[
"Subscribe",
"to",
"the",
"passed",
"pair",
"s",
"ticker",
"channel",
"."
] |
Crypto-toolbox/btfxwss
|
python
|
https://github.com/Crypto-toolbox/btfxwss/blob/16827fa6aacb2c0e289aa852bf61a18df6905835/btfxwss/client.py#L188-L196
|
[
"def",
"subscribe_to_ticker",
"(",
"self",
",",
"pair",
",",
"*",
"*",
"kwargs",
")",
":",
"identifier",
"=",
"(",
"'ticker'",
",",
"pair",
")",
"self",
".",
"_subscribe",
"(",
"'ticker'",
",",
"identifier",
",",
"symbol",
"=",
"pair",
",",
"*",
"*",
"kwargs",
")"
] |
16827fa6aacb2c0e289aa852bf61a18df6905835
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.