INSTRUCTION
stringlengths 1
8.43k
| RESPONSE
stringlengths 75
104k
|
|---|---|
Stop when connection is lost.
|
def connection_lost(self, exc):
"""Stop when connection is lost."""
if exc:
self.log.exception('disconnected due to exception')
else:
self.log.info('disconnected because of close/abort.')
self._closed.set()
|
Send off parsed telegram to handling callback.
|
def handle_telegram(self, telegram):
"""Send off parsed telegram to handling callback."""
self.log.debug('got telegram: %s', telegram)
try:
parsed_telegram = self.telegram_parser.parse(telegram)
except InvalidChecksumError as e:
self.log.warning(str(e))
except ParseError:
self.log.exception("failed to parse telegram")
else:
self.telegram_callback(parsed_telegram)
|
Parse telegram from string to dict.
|
def parse(self, telegram_data):
"""
Parse telegram from string to dict.
The telegram str type makes python 2.x integration easier.
:param str telegram_data: full telegram from start ('/') to checksum
('!ABCD') including line endings in between the telegram's lines
:rtype: dict
:returns: Shortened example:
{
..
r'\d-\d:96\.1\.1.+?\r\n': <CosemObject>, # EQUIPMENT_IDENTIFIER
r'\d-\d:1\.8\.1.+?\r\n': <CosemObject>, # ELECTRICITY_USED_TARIFF_1
r'\d-\d:24\.3\.0.+?\r\n.+?\r\n': <MBusObject>, # GAS_METER_READING
..
}
:raises ParseError:
:raises InvalidChecksumError:
"""
if self.apply_checksum_validation \
and self.telegram_specification['checksum_support']:
self.validate_checksum(telegram_data)
telegram = {}
for signature, parser in self.telegram_specification['objects'].items():
match = re.search(signature, telegram_data, re.DOTALL)
# Some signatures are optional and may not be present,
# so only parse lines that match
if match:
telegram[signature] = parser.parse(match.group(0))
return telegram
|
: param str telegram:: raises ParseError:: raises InvalidChecksumError:
|
def validate_checksum(telegram):
"""
:param str telegram:
:raises ParseError:
:raises InvalidChecksumError:
"""
# Extract the part for which the checksum applies.
checksum_contents = re.search(r'\/.+\!', telegram, re.DOTALL)
# Extract the hexadecimal checksum value itself.
# The line ending '\r\n' for the checksum line can be ignored.
checksum_hex = re.search(r'((?<=\!)[0-9A-Z]{4})+', telegram)
if not checksum_contents or not checksum_hex:
raise ParseError(
'Failed to perform CRC validation because the telegram is '
'incomplete. The checksum and/or content values are missing.'
)
calculated_crc = CRC16().calculate(checksum_contents.group(0))
expected_crc = int(checksum_hex.group(0), base=16)
if calculated_crc != expected_crc:
raise InvalidChecksumError(
"Invalid telegram. The CRC checksum '{}' does not match the "
"expected '{}'".format(
calculated_crc,
expected_crc
)
)
|
Remove telegram from buffer and incomplete data preceding it. This is easier than validating the data before adding it to the buffer.: param str telegram:: return:
|
def _remove(self, telegram):
"""
Remove telegram from buffer and incomplete data preceding it. This
is easier than validating the data before adding it to the buffer.
:param str telegram:
:return:
"""
# Remove data leading up to the telegram and the telegram itself.
index = self._buffer.index(telegram) + len(telegram)
self._buffer = self._buffer[index:]
|
Get the version of the package from the given file by executing it and extracting the given name.
|
def get_version(file, name='__version__'):
"""Get the version of the package from the given file by
executing it and extracting the given `name`.
"""
path = os.path.realpath(file)
version_ns = {}
with io.open(path, encoding="utf8") as f:
exec(f.read(), {}, version_ns)
return version_ns[name]
|
Given a list of range specifiers for python ensure compatibility.
|
def ensure_python(specs):
"""Given a list of range specifiers for python, ensure compatibility.
"""
if not isinstance(specs, (list, tuple)):
specs = [specs]
v = sys.version_info
part = '%s.%s' % (v.major, v.minor)
for spec in specs:
if part == spec:
return
try:
if eval(part + spec):
return
except SyntaxError:
pass
raise ValueError('Python version %s unsupported' % part)
|
Find all of the packages.
|
def find_packages(top=HERE):
"""
Find all of the packages.
"""
packages = []
for d, dirs, _ in os.walk(top, followlinks=True):
if os.path.exists(pjoin(d, '__init__.py')):
packages.append(os.path.relpath(d, top).replace(os.path.sep, '.'))
elif d != top:
# Do not look for packages in subfolders if current is not a package
dirs[:] = []
return packages
|
Create a command class with the given optional prerelease class.
|
def create_cmdclass(prerelease_cmd=None, package_data_spec=None,
data_files_spec=None):
"""Create a command class with the given optional prerelease class.
Parameters
----------
prerelease_cmd: (name, Command) tuple, optional
The command to run before releasing.
package_data_spec: dict, optional
A dictionary whose keys are the dotted package names and
whose values are a list of glob patterns.
data_files_spec: list, optional
A list of (path, dname, pattern) tuples where the path is the
`data_files` install path, dname is the source directory, and the
pattern is a glob pattern.
Notes
-----
We use specs so that we can find the files *after* the build
command has run.
The package data glob patterns should be relative paths from the package
folder containing the __init__.py file, which is given as the package
name.
e.g. `dict(foo=['./bar/*', './baz/**'])`
The data files directories should be absolute paths or relative paths
from the root directory of the repository. Data files are specified
differently from `package_data` because we need a separate path entry
for each nested folder in `data_files`, and this makes it easier to
parse.
e.g. `('share/foo/bar', 'pkgname/bizz, '*')`
"""
wrapped = [prerelease_cmd] if prerelease_cmd else []
if package_data_spec or data_files_spec:
wrapped.append('handle_files')
wrapper = functools.partial(_wrap_command, wrapped)
handle_files = _get_file_handler(package_data_spec, data_files_spec)
if 'bdist_egg' in sys.argv:
egg = wrapper(bdist_egg, strict=True)
else:
egg = bdist_egg_disabled
cmdclass = dict(
build_py=wrapper(build_py, strict=is_repo),
bdist_egg=egg,
sdist=wrapper(sdist, strict=True),
handle_files=handle_files,
)
if bdist_wheel:
cmdclass['bdist_wheel'] = wrapper(bdist_wheel, strict=True)
cmdclass['develop'] = wrapper(develop, strict=True)
return cmdclass
|
Create a command that calls the given function.
|
def command_for_func(func):
"""Create a command that calls the given function."""
class FuncCommand(BaseCommand):
def run(self):
func()
update_package_data(self.distribution)
return FuncCommand
|
Echo a command before running it. Defaults to repo as cwd
|
def run(cmd, **kwargs):
"""Echo a command before running it. Defaults to repo as cwd"""
log.info('> ' + list2cmdline(cmd))
kwargs.setdefault('cwd', HERE)
kwargs.setdefault('shell', os.name == 'nt')
if not isinstance(cmd, (list, tuple)) and os.name != 'nt':
cmd = shlex.split(cmd)
cmd[0] = which(cmd[0])
return subprocess.check_call(cmd, **kwargs)
|
Gets the newest/ oldest mtime for all files in a directory.
|
def recursive_mtime(path, newest=True):
"""Gets the newest/oldest mtime for all files in a directory."""
if os.path.isfile(path):
return mtime(path)
current_extreme = None
for dirname, dirnames, filenames in os.walk(path, topdown=False):
for filename in filenames:
mt = mtime(pjoin(dirname, filename))
if newest: # Put outside of loop?
if mt >= (current_extreme or mt):
current_extreme = mt
elif mt <= (current_extreme or mt):
current_extreme = mt
return current_extreme
|
Return a Command that checks that certain files exist.
|
def ensure_targets(targets):
"""Return a Command that checks that certain files exist.
Raises a ValueError if any of the files are missing.
Note: The check is skipped if the `--skip-npm` flag is used.
"""
class TargetsCheck(BaseCommand):
def run(self):
if skip_npm:
log.info('Skipping target checks')
return
missing = [t for t in targets if not os.path.exists(t)]
if missing:
raise ValueError(('missing files: %s' % missing))
return TargetsCheck
|
Wrap a setup command
|
def _wrap_command(cmds, cls, strict=True):
"""Wrap a setup command
Parameters
----------
cmds: list(str)
The names of the other commands to run prior to the command.
strict: boolean, optional
Whether to raise errors when a pre-command fails.
"""
class WrappedCommand(cls):
def run(self):
if not getattr(self, 'uninstall', None):
try:
[self.run_command(cmd) for cmd in cmds]
except Exception:
if strict:
raise
else:
pass
# update package data
update_package_data(self.distribution)
result = cls.run(self)
return result
return WrappedCommand
|
Get a package_data and data_files handler command.
|
def _get_file_handler(package_data_spec, data_files_spec):
"""Get a package_data and data_files handler command.
"""
class FileHandler(BaseCommand):
def run(self):
package_data = self.distribution.package_data
package_spec = package_data_spec or dict()
for (key, patterns) in package_spec.items():
package_data[key] = _get_package_data(key, patterns)
self.distribution.data_files = _get_data_files(
data_files_spec, self.distribution.data_files
)
return FileHandler
|
Expand data file specs into valid data files metadata.
|
def _get_data_files(data_specs, existing):
"""Expand data file specs into valid data files metadata.
Parameters
----------
data_specs: list of tuples
See [createcmdclass] for description.
existing: list of tuples
The existing distribution data_files metadata.
Returns
-------
A valid list of data_files items.
"""
# Extract the existing data files into a staging object.
file_data = defaultdict(list)
for (path, files) in existing or []:
file_data[path] = files
# Extract the files and assign them to the proper data
# files path.
for (path, dname, pattern) in data_specs or []:
dname = dname.replace(os.sep, '/')
offset = len(dname) + 1
files = _get_files(pjoin(dname, pattern))
for fname in files:
# Normalize the path.
root = os.path.dirname(fname)
full_path = '/'.join([path, root[offset:]])
if full_path.endswith('/'):
full_path = full_path[:-1]
file_data[full_path].append(fname)
# Construct the data files spec.
data_files = []
for (path, files) in file_data.items():
data_files.append((path, files))
return data_files
|
Expand file patterns to a list of package_data paths.
|
def _get_package_data(root, file_patterns=None):
"""Expand file patterns to a list of `package_data` paths.
Parameters
-----------
root: str
The relative path to the package root from `HERE`.
file_patterns: list or str, optional
A list of glob patterns for the data file locations.
The globs can be recursive if they include a `**`.
They should be relative paths from the root or
absolute paths. If not given, all files will be used.
Note:
Files in `node_modules` are ignored.
"""
if file_patterns is None:
file_patterns = ['*']
return _get_files(file_patterns, pjoin(HERE, root))
|
Translate and compile a glob pattern to a regular expression matcher.
|
def _compile_pattern(pat, ignore_case=True):
"""Translate and compile a glob pattern to a regular expression matcher."""
if isinstance(pat, bytes):
pat_str = pat.decode('ISO-8859-1')
res_str = _translate_glob(pat_str)
res = res_str.encode('ISO-8859-1')
else:
res = _translate_glob(pat)
flags = re.IGNORECASE if ignore_case else 0
return re.compile(res, flags=flags).match
|
Iterate over all the parts of a path.
|
def _iexplode_path(path):
"""Iterate over all the parts of a path.
Splits path recursively with os.path.split().
"""
(head, tail) = os.path.split(path)
if not head or (not tail and head == path):
if head:
yield head
if tail or not head:
yield tail
return
for p in _iexplode_path(head):
yield p
yield tail
|
Translate a glob PATTERN to a regular expression.
|
def _translate_glob(pat):
"""Translate a glob PATTERN to a regular expression."""
translated_parts = []
for part in _iexplode_path(pat):
translated_parts.append(_translate_glob_part(part))
os_sep_class = '[%s]' % re.escape(SEPARATORS)
res = _join_translated(translated_parts, os_sep_class)
return '{res}\\Z(?ms)'.format(res=res)
|
Join translated glob pattern parts.
|
def _join_translated(translated_parts, os_sep_class):
"""Join translated glob pattern parts.
This is different from a simple join, as care need to be taken
to allow ** to match ZERO or more directories.
"""
res = ''
for part in translated_parts[:-1]:
if part == '.*':
# drop separator, since it is optional
# (** matches ZERO or more dirs)
res += part
else:
res += part + os_sep_class
if translated_parts[-1] == '.*':
# Final part is **
res += '.+'
# Follow stdlib/git convention of matching all sub files/directories:
res += '({os_sep_class}?.*)?'.format(os_sep_class=os_sep_class)
else:
res += translated_parts[-1]
return res
|
Translate a glob PATTERN PART to a regular expression.
|
def _translate_glob_part(pat):
"""Translate a glob PATTERN PART to a regular expression."""
# Code modified from Python 3 standard lib fnmatch:
if pat == '**':
return '.*'
i, n = 0, len(pat)
res = []
while i < n:
c = pat[i]
i = i + 1
if c == '*':
# Match anything but path separators:
res.append('[^%s]*' % SEPARATORS)
elif c == '?':
res.append('[^%s]?' % SEPARATORS)
elif c == '[':
j = i
if j < n and pat[j] == '!':
j = j + 1
if j < n and pat[j] == ']':
j = j + 1
while j < n and pat[j] != ']':
j = j + 1
if j >= n:
res.append('\\[')
else:
stuff = pat[i:j].replace('\\', '\\\\')
i = j + 1
if stuff[0] == '!':
stuff = '^' + stuff[1:]
elif stuff[0] == '^':
stuff = '\\' + stuff
res.append('[%s]' % stuff)
else:
res.append(re.escape(c))
return ''.join(res)
|
Send DDL to truncate the specified table
|
def truncate(self, table):
"""Send DDL to truncate the specified `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
truncate_sql, serial_key_sql = super(PostgresDbWriter, self).truncate(table)
self.execute(truncate_sql)
if serial_key_sql:
self.execute(serial_key_sql)
|
Send DDL to create the specified table
|
def write_table(self, table):
"""Send DDL to create the specified `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
table_sql, serial_key_sql = super(PostgresDbWriter, self).write_table(table)
for sql in serial_key_sql + table_sql:
self.execute(sql)
|
Send DDL to create the specified table indexes
|
def write_indexes(self, table):
"""Send DDL to create the specified `table` indexes
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
index_sql = super(PostgresDbWriter, self).write_indexes(table)
for sql in index_sql:
self.execute(sql)
|
Send DDL to create the specified table triggers
|
def write_triggers(self, table):
"""Send DDL to create the specified `table` triggers
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
index_sql = super(PostgresDbWriter, self).write_triggers(table)
for sql in index_sql:
self.execute(sql)
|
Send DDL to create the specified table constraints
|
def write_constraints(self, table):
"""Send DDL to create the specified `table` constraints
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
constraint_sql = super(PostgresDbWriter, self).write_constraints(table)
for sql in constraint_sql:
self.execute(sql)
|
Write the contents of table
|
def write_contents(self, table, reader):
"""Write the contents of `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
- `reader`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader` object that allows reading from the data source.
Returns None
"""
f = self.FileObjFaker(table, reader.read(table), self.process_row, self.verbose)
self.copy_from(f, '"%s"' % table.name, ['"%s"' % c['name'] for c in table.columns])
|
Examines row data from MySQL and alters the values when necessary to be compatible with sending to PostgreSQL via the copy command
|
def process_row(self, table, row):
"""Examines row data from MySQL and alters
the values when necessary to be compatible with
sending to PostgreSQL via the copy command
"""
for index, column in enumerate(table.columns):
hash_key = hash(frozenset(column.items()))
column_type = self.column_types[hash_key] if hash_key in self.column_types else self.column_type(column)
if row[index] == None and ('timestamp' not in column_type or not column['default']):
row[index] = '\N'
elif row[index] == None and column['default']:
if self.tz:
row[index] = '1970-01-01T00:00:00.000000' + self.tz_offset
else:
row[index] = '1970-01-01 00:00:00'
elif 'bit' in column_type:
row[index] = bin(ord(row[index]))[2:]
elif isinstance(row[index], (str, unicode, basestring)):
if column_type == 'bytea':
row[index] = Binary(row[index]).getquoted()[1:-8] if row[index] else row[index]
elif 'text[' in column_type:
row[index] = '{%s}' % ','.join('"%s"' % v.replace('"', r'\"') for v in row[index].split(','))
else:
row[index] = row[index].replace('\\', r'\\').replace('\n', r'\n').replace(
'\t', r'\t').replace('\r', r'\r').replace('\0', '')
elif column_type == 'boolean':
# We got here because you used a tinyint(1), if you didn't want a bool, don't use that type
row[index] = 't' if row[index] not in (None, 0) else 'f' if row[index] == 0 else row[index]
elif isinstance(row[index], (date, datetime)):
if isinstance(row[index], datetime) and self.tz:
try:
if row[index].tzinfo:
row[index] = row[index].astimezone(self.tz).isoformat()
else:
row[index] = datetime(*row[index].timetuple()[:6], tzinfo=self.tz).isoformat()
except Exception as e:
print e.message
else:
row[index] = row[index].isoformat()
elif isinstance(row[index], timedelta):
row[index] = datetime.utcfromtimestamp(_get_total_seconds(row[index])).time().isoformat()
else:
row[index] = AsIs(row[index]).getquoted()
|
Write DDL to truncate the specified table
|
def truncate(self, table):
"""Write DDL to truncate the specified `table`
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
truncate_sql, serial_key_sql = super(PostgresFileWriter, self).truncate(table)
self.f.write("""
-- TRUNCATE %(table_name)s;
%(truncate_sql)s
""" % {'table_name': table.name, 'truncate_sql': truncate_sql})
if serial_key_sql:
self.f.write("""
%(serial_key_sql)s
""" % {
'serial_key_sql': serial_key_sql})
|
Write DDL to create the specified table.
|
def write_table(self, table):
"""Write DDL to create the specified `table`.
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
table_sql, serial_key_sql = super(PostgresFileWriter, self).write_table(table)
if serial_key_sql:
self.f.write("""
%(serial_key_sql)s
""" % {
'serial_key_sql': '\n'.join(serial_key_sql)
})
self.f.write("""
-- Table: %(table_name)s
%(table_sql)s
""" % {
'table_name': table.name,
'table_sql': '\n'.join(table_sql),
})
|
Write DDL of table indexes to the output file
|
def write_indexes(self, table):
"""Write DDL of `table` indexes to the output file
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
self.f.write('\n'.join(super(PostgresFileWriter, self).write_indexes(table)))
|
Write DDL of table constraints to the output file
|
def write_constraints(self, table):
"""Write DDL of `table` constraints to the output file
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
self.f.write('\n'.join(super(PostgresFileWriter, self).write_constraints(table)))
|
Write TRIGGERs existing on table to the output file
|
def write_triggers(self, table):
"""Write TRIGGERs existing on `table` to the output file
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
self.f.write('\n'.join(super(PostgresFileWriter, self).write_triggers(table)))
|
Write the data contents of table to the output file.
|
def write_contents(self, table, reader):
"""Write the data contents of `table` to the output file.
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
- `reader`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader` object that allows reading from the data source.
Returns None
"""
# start variable optimiztions
pr = self.process_row
f_write = self.f.write
verbose = self.verbose
# end variable optimiztions
f_write("""
--
-- Data for Name: %(table_name)s; Type: TABLE DATA;
--
COPY "%(table_name)s" (%(column_names)s) FROM stdin;
""" % {
'table_name': table.name,
'column_names': ', '.join(('"%s"' % col['name']) for col in table.columns)})
if verbose:
tt = time.time
start_time = tt()
prev_val_len = 0
prev_row_count = 0
for i, row in enumerate(reader.read(table), 1):
row = list(row)
pr(table, row)
try:
f_write(u'%s\n' % (u'\t'.join(row)))
except UnicodeDecodeError:
f_write(u'%s\n' % (u'\t'.join(r.decode('utf-8') for r in row)))
if verbose:
if (i % 20000) == 0:
now = tt()
elapsed = now - start_time
val = '%.2f rows/sec [%s] ' % ((i - prev_row_count) / elapsed, i)
print_row_progress('%s%s' % (("\b" * prev_val_len), val))
prev_val_len = len(val) + 3
start_time = now
prev_row_count = i
f_write("\\.\n\n")
if verbose:
print('')
|
info face = Haettenschweiler size = 60 bold = 0 italic = 0 charset = unicode = 0 stretchH = 100 smooth = 1 aa = 1 padding = 0 0 0 0 spacing = 2 2 common lineHeight = 64 base = 53 scaleW = 256 scaleH = 128 pages = 1 packed = 0 page id = 0 file = attack_num. png chars count = 12 char id = 52 x = 2 y = 2 width = 33 height = 51 xoffset = 0 yoffset = 5 xadvance = 32 page = 0 chnl = 0 letter = 4 char id = 48 x = 37 y = 2 width = 29 height = 50 xoffset = 1 yoffset = 6 xadvance = 29 page = 0 chnl = 0 letter = 0 char id = 53 x = 68 y = 2 width = 29 height = 50 xoffset = 1 yoffset = 6 xadvance = 28 page = 0 chnl = 0 letter = 5 char id = 57 x = 99 y = 2 width = 28 height = 50 xoffset = 1 yoffset = 6 xadvance = 28 page = 0 chnl = 0 letter = 9 char id = 54 x = 129 y = 2 width = 28 height = 50 xoffset = 1 yoffset = 6 xadvance = 28 page = 0 chnl = 0 letter = 6 char id = 56 x = 159 y = 2 width = 28 height = 50 xoffset = 1 yoffset = 6 xadvance = 28 page = 0 chnl = 0 letter = 8 char id = 51 x = 189 y = 2 width = 28 height = 50 xoffset = 1 yoffset = 6 xadvance = 28 page = 0 chnl = 0 letter = 3 char id = 50 x = 219 y = 2 width = 28 height = 49 xoffset = 1 yoffset = 7 xadvance = 28 page = 0 chnl = 0 letter = 2 char id = 55 x = 2 y = 55 width = 30 height = 48 xoffset = 1 yoffset = 8 xadvance = 28 page = 0 chnl = 0 letter = 7 char id = 49 x = 34 y = 55 width = 20 height = 48 xoffset = 1 yoffset = 8 xadvance = 20 page = 0 chnl = 0 letter = 1 char id = 45 x = 56 y = 55 width = 18 height = 12 xoffset = 1 yoffset = 36 xadvance = 19 page = 0 chnl = 0 letter = - char id = 32 x = 76 y = 55 width = 0 height = 0 xoffset = 11 yoffset = 73 xadvance = 16 page = 0 chnl = 0 letter = space
|
def parse_fntdata(_data, _config, _extra_data_receiver=None):
"""
info face="Haettenschweiler" size=60 bold=0 italic=0 charset="" unicode=0 stretchH=100 smooth=1 aa=1 padding=0,0,0,0 spacing=2,2
common lineHeight=64 base=53 scaleW=256 scaleH=128 pages=1 packed=0
page id=0 file="attack_num.png"
chars count=12
char id=52 x=2 y=2 width=33 height=51 xoffset=0 yoffset=5 xadvance=32 page=0 chnl=0 letter="4"
char id=48 x=37 y=2 width=29 height=50 xoffset=1 yoffset=6 xadvance=29 page=0 chnl=0 letter="0"
char id=53 x=68 y=2 width=29 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="5"
char id=57 x=99 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="9"
char id=54 x=129 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="6"
char id=56 x=159 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="8"
char id=51 x=189 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="3"
char id=50 x=219 y=2 width=28 height=49 xoffset=1 yoffset=7 xadvance=28 page=0 chnl=0 letter="2"
char id=55 x=2 y=55 width=30 height=48 xoffset=1 yoffset=8 xadvance=28 page=0 chnl=0 letter="7"
char id=49 x=34 y=55 width=20 height=48 xoffset=1 yoffset=8 xadvance=20 page=0 chnl=0 letter="1"
char id=45 x=56 y=55 width=18 height=12 xoffset=1 yoffset=36 xadvance=19 page=0 chnl=0 letter="-"
char id=32 x=76 y=55 width=0 height=0 xoffset=11 yoffset=73 xadvance=16 page=0 chnl=0 letter="space"
"""
data = {}
frame_data_list = []
parse_common_info = parse("common lineHeight={line_height:d} base={base:d} scaleW={scale_w:d} scaleH={scale_h:d} pages={pages:d} packed={packed:d}", _data[1])
parse_page_info = parse("page id={id:d} file=\"{file}\"", _data[2])
parse_char_count = parse("chars count={count:d}", _data[3])
raw_frames_data = {}
for index in xrange(0, parse_char_count["count"]):
parse_frame = parse("char id={id:d} x={x:d} y={y:d} width={width:d} height={height:d} xoffset={xoffset:d} yoffset={yoffset:d} xadvance={xadvance:d} page={page:d} chnl={chnl:d} letter=\"{letter}\"", _data[index + 4])
frame_data = {}
frame_data["name"] = "{prefix}_{id}.png".format(prefix= _config["prefix"], id=parse_frame["id"], letter=parse_frame["letter"])
frame_data["source_size"] = (parse_frame["width"], parse_frame["height"])
frame_data["rotated"] = False
frame_data["src_rect"] = (parse_frame["x"], parse_frame["y"], parse_frame["x"] + parse_frame["width"], parse_frame["y"] + parse_frame["height"])
frame_data["offset"] = (0, 0)
if parse_frame["width"] <= 0 or parse_frame["height"] <= 0:
continue
frame_data_list.append(frame_data)
parse_frame_named_data = parse_frame.named.copy()
parse_frame_named_data["texture"] = frame_data["name"]
raw_frames_data[parse_frame["id"]] = parse_frame_named_data
data["texture"] = parse_page_info["file"]
data["frames"] = frame_data_list
if _extra_data_receiver != None:
_extra_data_receiver["common"] = parse_common_info.named
_extra_data_receiver["frames"] = raw_frames_data
return data
|
struct CCZHeader { unsigned char sig [ 4 ] ;// signature. Should be CCZ! 4 bytes unsigned short compression_type ;// should 0 unsigned short version ;// should be 2 ( although version type == 1 is also supported ) unsigned int reserved ;// Reserved for users. unsigned int len ;// size of the uncompressed file } ;
|
def _pvr_head(_data):
"""
struct CCZHeader {
unsigned char sig[4]; // signature. Should be 'CCZ!' 4 bytes
unsigned short compression_type; // should 0
unsigned short version; // should be 2 (although version type==1 is also supported)
unsigned int reserved; // Reserved for users.
unsigned int len; // size of the uncompressed file
};
"""
return {
"sig": _data[:4],
"compression_type": struct.unpack("H", _data[4:6])[0],
"version": struct.unpack("H", _data[6:8])[0],
"reserved": struct.unpack("I", _data[8:12])[0],
"len": struct.unpack("I", _data[12:16])[0],
}
|
Return an approximate number of queued tasks in the queue.
|
def qsize(self, extra_predicate=None):
""" Return an approximate number of queued tasks in the queue. """
count = self._query_queued('COUNT(*) AS count', extra_predicate=extra_predicate)
return count[0].count
|
Enqueue task with specified data.
|
def enqueue(self, data):
""" Enqueue task with specified data. """
jsonified_data = json.dumps(data)
with self._db_conn() as conn:
return conn.execute(
'INSERT INTO %s (created, data) VALUES (%%(created)s, %%(data)s)' % self.table_name,
created=datetime.utcnow(),
data=jsonified_data
)
|
Retrieve a task handler from the queue.
|
def start(self, block=False, timeout=None, retry_interval=0.5, extra_predicate=None):
"""
Retrieve a task handler from the queue.
If block is True, this function will block until it is able to retrieve a task.
If block is True and timeout is a number it will block for at most <timeout> seconds.
retry_interval is the maximum time in seconds between successive retries.
extra_predicate
If extra_predicate is defined, it should be a tuple of (raw_predicate, predicate_args)
raw_predicate will be prefixed by AND, and inserted into the WHERE condition in the queries.
predicate_args will be sql escaped and formatted into raw_predicate.
"""
start = time.time()
while 1:
task_handler = self._dequeue_task(extra_predicate)
if task_handler is None and block:
if timeout is not None and (time.time() - start) > timeout:
break
time.sleep(retry_interval * (random.random() + 0.1))
else:
break
return task_handler
|
This method is a good one to extend if you want to create a queue which always applies an extra predicate.
|
def _build_extra_predicate(self, extra_predicate):
""" This method is a good one to extend if you want to create a queue which always applies an extra predicate. """
if extra_predicate is None:
return ''
# if they don't have a supported format seq, wrap it for them
if not isinstance(extra_predicate[1], (list, dict, tuple)):
extra_predicate = [extra_predicate[0], (extra_predicate[1], )]
extra_predicate = database.escape_query(*extra_predicate)
return 'AND (' + extra_predicate + ')'
|
Designed to be passed as the default kwarg in simplejson. dumps. Serializes dates and datetimes to ISO strings.
|
def simplejson_datetime_serializer(obj):
"""
Designed to be passed as the default kwarg in simplejson.dumps. Serializes dates and datetimes to ISO strings.
"""
if hasattr(obj, 'isoformat'):
return obj.isoformat()
else:
raise TypeError('Object of type %s with value of %s is not JSON serializable' % (type(obj), repr(obj)))
|
Closes the existing database connection and re - opens it.
|
def reconnect(self):
"""Closes the existing database connection and re-opens it."""
conn = _mysql.connect(**self._db_args)
if conn is not None:
self.close()
self._db = conn
|
Query the connection and return the rows ( or affected rows if not a select query ). Mysql errors will be propogated as exceptions.
|
def query(self, query, *parameters, **kwparameters):
"""
Query the connection and return the rows (or affected rows if not a
select query). Mysql errors will be propogated as exceptions.
"""
return self._query(query, parameters, kwparameters)
|
Returns the first row returned for the given query.
|
def get(self, query, *parameters, **kwparameters):
"""Returns the first row returned for the given query."""
rows = self._query(query, parameters, kwparameters)
if not rows:
return None
elif not isinstance(rows, list):
raise MySQLError("Query is not a select query")
elif len(rows) > 1:
raise MySQLError("Multiple rows returned for Database.get() query")
else:
return rows[0]
|
Executes the given query returning the lastrowid from the query.
|
def execute(self, query, *parameters, **kwparameters):
"""Executes the given query, returning the lastrowid from the query."""
return self.execute_lastrowid(query, *parameters, **kwparameters)
|
Executes the given query returning the lastrowid from the query.
|
def execute_lastrowid(self, query, *parameters, **kwparameters):
"""Executes the given query, returning the lastrowid from the query."""
self._execute(query, parameters, kwparameters)
self._result = self._db.store_result()
return self._db.insert_id()
|
Returns a new connection to the database.
|
def get_connection(db=DATABASE):
""" Returns a new connection to the database. """
return database.connect(host=HOST, port=PORT, user=USER, password=PASSWORD, database=db)
|
Run a set of InsertWorkers and record their performance.
|
def run_benchmark():
""" Run a set of InsertWorkers and record their performance. """
stopping = threading.Event()
workers = [ InsertWorker(stopping) for _ in range(NUM_WORKERS) ]
print('Launching %d workers' % NUM_WORKERS)
[ worker.start() for worker in workers ]
time.sleep(WORKLOAD_TIME)
print('Stopping workload')
stopping.set()
[ worker.join() for worker in workers ]
with get_connection() as conn:
count = conn.get("SELECT COUNT(*) AS count FROM %s" % TABLE).count
print("%d rows inserted using %d workers" % (count, NUM_WORKERS))
print("%.1f rows per second" % (count / float(WORKLOAD_TIME)))
|
agg should be ( host port ) Returns a live connection from the connection pool
|
def _pool_connect(self, agg):
""" `agg` should be (host, port)
Returns a live connection from the connection pool
"""
return self._pool.connect(agg[0], agg[1], self._user, self._password, self._database)
|
Returns an aggregator connection.
|
def _connect(self):
""" Returns an aggregator connection. """
with self._lock:
if self._aggregator:
try:
return self._pool_connect(self._aggregator)
except PoolConnectionException:
self._aggregator = None
if not len(self._aggregators):
with self._pool_connect(self._primary_aggregator) as conn:
self._update_aggregator_list(conn)
conn.expire()
random.shuffle(self._aggregators)
last_exception = None
for aggregator in self._aggregators:
self.logger.debug('Attempting connection with %s:%s' % (aggregator[0], aggregator[1]))
try:
conn = self._pool_connect(aggregator)
# connection successful!
self._aggregator = aggregator
return conn
except PoolConnectionException as e:
# connection error
last_exception = e
else:
# bad news bears... try again later
self._aggregator = None
self._aggregators = []
raise last_exception
|
Used for development only
|
def lookup_by_number(errno):
""" Used for development only """
for key, val in globals().items():
if errno == val:
print(key)
|
Returns the number of connections cached by the pool.
|
def size(self):
""" Returns the number of connections cached by the pool. """
return sum(q.qsize() for q in self._connections.values()) + len(self._fairies)
|
OperationalError s are emitted by the _mysql library for almost every error code emitted by MySQL. Because of this we verify that the error is actually a connection error before terminating the connection and firing off a PoolConnectionException
|
def __potential_connection_failure(self, e):
""" OperationalError's are emitted by the _mysql library for
almost every error code emitted by MySQL. Because of this we
verify that the error is actually a connection error before
terminating the connection and firing off a PoolConnectionException
"""
try:
self._conn.query('SELECT 1')
except (IOError, _mysql.OperationalError):
# ok, it's actually an issue.
self.__handle_connection_failure(e)
else:
# seems ok, probably programmer error
raise _mysql.DatabaseError(*e.args)
|
Build a simple expression ready to be added onto another query.
|
def simple_expression(joiner=', ', **fields):
""" Build a simple expression ready to be added onto another query.
>>> simple_expression(joiner=' AND ', name='bob', role='admin')
"`name`=%(_QB_name)s AND `name`=%(_QB_role)s", { '_QB_name': 'bob', '_QB_role': 'admin' }
"""
expression, params = [], {}
for field_name, value in sorted(fields.items(), key=lambda kv: kv[0]):
key = '_QB_%s' % field_name
expression.append('`%s`=%%(%s)s' % (field_name, key))
params[key] = value
return joiner.join(expression), params
|
Build a update query.
|
def update(table_name, **fields):
""" Build a update query.
>>> update('foo_table', a=5, b=2)
"UPDATE `foo_table` SET `a`=%(_QB_a)s, `b`=%(_QB_b)s", { '_QB_a': 5, '_QB_b': 2 }
"""
prefix = "UPDATE `%s` SET " % table_name
sets, params = simple_expression(', ', **fields)
return prefix + sets, params
|
Notify the manager that this lock is still active.
|
def ping(self):
""" Notify the manager that this lock is still active. """
with self._db_conn() as conn:
affected_rows = conn.query('''
UPDATE %s
SET last_contact=%%s
WHERE id = %%s AND lock_hash = %%s
''' % self._manager.table_name, datetime.utcnow(), self._lock_id, self._lock_hash)
return bool(affected_rows == 1)
|
Release the lock.
|
def release(self):
""" Release the lock. """
if self.valid():
with self._db_conn() as conn:
affected_rows = conn.query('''
DELETE FROM %s
WHERE id = %%s AND lock_hash = %%s
''' % self._manager.table_name, self._lock_id, self._lock_hash)
return bool(affected_rows == 1)
else:
return False
|
Connect to the database specified
|
def connect(self, host='127.0.0.1', port=3306, user='root', password='', database=None):
""" Connect to the database specified """
if database is None:
raise exceptions.RequiresDatabase()
self._db_args = { 'host': host, 'port': port, 'user': user, 'password': password, 'database': database }
with self._db_conn() as conn:
conn.query('SELECT 1')
return self
|
Initialize the required tables in the database
|
def setup(self):
""" Initialize the required tables in the database """
with self._db_conn() as conn:
for table_defn in self._tables.values():
conn.execute(table_defn)
return self
|
Destroy the SQLStepQueue tables in the database
|
def destroy(self):
""" Destroy the SQLStepQueue tables in the database """
with self._db_conn() as conn:
for table_name in self._tables:
conn.execute('DROP TABLE IF EXISTS %s' % table_name)
return self
|
Returns True if the tables have been setup False otherwise
|
def ready(self):
""" Returns True if the tables have been setup, False otherwise """
with self._db_conn() as conn:
tables = [row.t for row in conn.query('''
SELECT table_name AS t FROM information_schema.tables
WHERE table_schema=%s
''', self._db_args['database'])]
return all([table_name in tables for table_name in self._tables])
|
Check to see if we are still active.
|
def valid(self):
""" Check to see if we are still active. """
if self.finished is not None:
return False
with self._db_conn() as conn:
row = conn.get('''
SELECT (last_contact > %%(now)s - INTERVAL %%(ttl)s SECOND) AS valid
FROM %s
WHERE
id = %%(task_id)s
AND execution_id = %%(execution_id)s
''' % self._queue.table_name,
now=datetime.utcnow(),
ttl=self._queue.execution_ttl,
task_id=self.task_id,
execution_id=self.execution_id)
return bool(row is not None and row.valid)
|
Notify the queue that this task is still active.
|
def ping(self):
""" Notify the queue that this task is still active. """
if self.finished is not None:
raise AlreadyFinished()
with self._db_conn() as conn:
success = conn.query('''
UPDATE %s
SET
last_contact=%%(now)s,
update_count=update_count + 1
WHERE
id = %%(task_id)s
AND execution_id = %%(execution_id)s
AND last_contact > %%(now)s - INTERVAL %%(ttl)s SECOND
''' % self._queue.table_name,
now=datetime.utcnow(),
task_id=self.task_id,
execution_id=self.execution_id,
ttl=self._queue.execution_ttl)
if success != 1:
raise TaskDoesNotExist()
|
Start a step.
|
def start_step(self, step_name):
""" Start a step. """
if self.finished is not None:
raise AlreadyFinished()
step_data = self._get_step(step_name)
if step_data is not None:
if 'stop' in step_data:
raise StepAlreadyFinished()
else:
raise StepAlreadyStarted()
steps = copy.deepcopy(self.steps)
steps.append({
"start": datetime.utcnow(),
"name": step_name
})
self._save(steps=steps)
|
Stop a step.
|
def stop_step(self, step_name):
""" Stop a step. """
if self.finished is not None:
raise AlreadyFinished()
steps = copy.deepcopy(self.steps)
step_data = self._get_step(step_name, steps=steps)
if step_data is None:
raise StepNotStarted()
elif 'stop' in step_data:
raise StepAlreadyFinished()
step_data['stop'] = datetime.utcnow()
step_data['duration'] = util.timedelta_total_seconds(step_data['stop'] - step_data['start'])
self._save(steps=steps)
|
load steps - > basically load all the datetime isoformats into datetimes
|
def _load_steps(self, raw_steps):
""" load steps -> basically load all the datetime isoformats into datetimes """
for step in raw_steps:
if 'start' in step:
step['start'] = parser.parse(step['start'])
if 'stop' in step:
step['stop'] = parser.parse(step['stop'])
return raw_steps
|
Disconnects from the websocket connection and joins the Thread.
|
def disconnect(self):
"""Disconnects from the websocket connection and joins the Thread.
:return:
"""
self.log.debug("disconnect(): Disconnecting from API..")
self.reconnect_required.clear()
self.disconnect_called.set()
if self.socket:
self.socket.close()
self.join(timeout=1)
|
Issues a reconnection by setting the reconnect_required event.
|
def reconnect(self):
"""Issues a reconnection by setting the reconnect_required event.
:return:
"""
# Reconnect attempt at self.reconnect_interval
self.log.debug("reconnect(): Initialzion reconnect sequence..")
self.connected.clear()
self.reconnect_required.set()
if self.socket:
self.socket.close()
|
Creates a websocket connection.
|
def _connect(self):
"""Creates a websocket connection.
:return:
"""
self.log.debug("_connect(): Initializing Connection..")
self.socket = websocket.WebSocketApp(
self.url,
on_open=self._on_open,
on_message=self._on_message,
on_error=self._on_error,
on_close=self._on_close
)
if 'ca_certs' not in self.sslopt.keys():
ssl_defaults = ssl.get_default_verify_paths()
self.sslopt['ca_certs'] = ssl_defaults.cafile
self.log.debug("_connect(): Starting Connection..")
self.socket.run_forever(sslopt=self.sslopt,
http_proxy_host=self.http_proxy_host,
http_proxy_port=self.http_proxy_port,
http_proxy_auth=self.http_proxy_auth,
http_no_proxy=self.http_no_proxy)
# stop outstanding ping/pong timers
self._stop_timers()
while self.reconnect_required.is_set():
if not self.disconnect_called.is_set():
self.log.info("Attempting to connect again in %s seconds."
% self.reconnect_interval)
self.state = "unavailable"
time.sleep(self.reconnect_interval)
# We need to set this flag since closing the socket will
# set it to False
self.socket.keep_running = True
self.socket.sock = None
self.socket.run_forever(sslopt=self.sslopt,
http_proxy_host=self.http_proxy_host,
http_proxy_port=self.http_proxy_port,
http_proxy_auth=self.http_proxy_auth,
http_no_proxy=self.http_no_proxy)
else:
break
|
Handles and passes received data to the appropriate handlers.
|
def _on_message(self, ws, message):
"""Handles and passes received data to the appropriate handlers.
:return:
"""
self._stop_timers()
raw, received_at = message, time.time()
self.log.debug("_on_message(): Received new message %s at %s",
raw, received_at)
try:
data = json.loads(raw)
except json.JSONDecodeError:
# Something wrong with this data, log and discard
return
# Handle data
if isinstance(data, dict):
# This is a system message
self._system_handler(data, received_at)
else:
# This is a list of data
if data[1] == 'hb':
self._heartbeat_handler()
else:
self._data_handler(data, received_at)
# We've received data, reset timers
self._start_timers()
|
Stops ping pong and connection timers.
|
def _stop_timers(self):
"""Stops ping, pong and connection timers.
:return:
"""
if self.ping_timer:
self.ping_timer.cancel()
if self.connection_timer:
self.connection_timer.cancel()
if self.pong_timer:
self.pong_timer.cancel()
self.log.debug("_stop_timers(): Timers stopped.")
|
Sends a ping message to the API and starts pong timers.
|
def send_ping(self):
"""Sends a ping message to the API and starts pong timers.
:return:
"""
self.log.debug("send_ping(): Sending ping to API..")
self.socket.send(json.dumps({'event': 'ping'}))
self.pong_timer = Timer(self.pong_timeout, self._check_pong)
self.pong_timer.start()
|
Checks if a Pong message was received.
|
def _check_pong(self):
"""Checks if a Pong message was received.
:return:
"""
self.pong_timer.cancel()
if self.pong_received:
self.log.debug("_check_pong(): Pong received in time.")
self.pong_received = False
else:
# reconnect
self.log.debug("_check_pong(): Pong not received in time."
"Issuing reconnect..")
self.reconnect()
|
Sends the given Payload to the API via the websocket connection.
|
def send(self, api_key=None, secret=None, list_data=None, auth=False, **kwargs):
"""Sends the given Payload to the API via the websocket connection.
:param kwargs: payload paarameters as key=value pairs
:return:
"""
if auth:
nonce = str(int(time.time() * 10000000))
auth_string = 'AUTH' + nonce
auth_sig = hmac.new(secret.encode(), auth_string.encode(),
hashlib.sha384).hexdigest()
payload = {'event': 'auth', 'apiKey': api_key, 'authSig': auth_sig,
'authPayload': auth_string, 'authNonce': nonce}
payload = json.dumps(payload)
elif list_data:
payload = json.dumps(list_data)
else:
payload = json.dumps(kwargs)
self.log.debug("send(): Sending payload to API: %s", payload)
try:
self.socket.send(payload)
except websocket.WebSocketConnectionClosedException:
self.log.error("send(): Did not send out payload %s - client not connected. ", kwargs)
|
Passes data up to the client via a Queue ().
|
def pass_to_client(self, event, data, *args):
"""Passes data up to the client via a Queue().
:param event:
:param data:
:param args:
:return:
"""
self.q.put((event, data, *args))
|
Unpauses the connection.
|
def _unpause(self):
"""Unpauses the connection.
Send a message up to client that he should re-subscribe to all
channels.
:return:
"""
self.log.debug("_unpause(): Clearing paused() Flag!")
self.paused.clear()
self.log.debug("_unpause(): Re-subscribing softly..")
self._resubscribe(soft=True)
|
Distributes system messages to the appropriate handler.
|
def _system_handler(self, data, ts):
"""Distributes system messages to the appropriate handler.
System messages include everything that arrives as a dict,
or a list containing a heartbeat.
:param data:
:param ts:
:return:
"""
self.log.debug("_system_handler(): Received a system message: %s", data)
# Unpack the data
event = data.pop('event')
if event == 'pong':
self.log.debug("_system_handler(): Distributing %s to _pong_handler..",
data)
self._pong_handler()
elif event == 'info':
self.log.debug("_system_handler(): Distributing %s to _info_handler..",
data)
self._info_handler(data)
elif event == 'error':
self.log.debug("_system_handler(): Distributing %s to _error_handler..",
data)
self._error_handler(data)
elif event in ('subscribed', 'unsubscribed', 'conf', 'auth', 'unauth'):
self.log.debug("_system_handler(): Distributing %s to "
"_response_handler..", data)
self._response_handler(event, data, ts)
else:
self.log.error("Unhandled event: %s, data: %s", event, data)
|
Handles responses to ( un ) subscribe and conf commands.
|
def _response_handler(self, event, data, ts):
"""Handles responses to (un)subscribe and conf commands.
Passes data up to client.
:param data:
:param ts:
:return:
"""
self.log.debug("_response_handler(): Passing %s to client..", data)
self.pass_to_client(event, data, ts)
|
Handle INFO messages from the API and issues relevant actions.
|
def _info_handler(self, data):
"""
Handle INFO messages from the API and issues relevant actions.
:param data:
:param ts:
"""
def raise_exception():
"""Log info code as error and raise a ValueError."""
self.log.error("%s: %s", data['code'], info_message[data['code']])
raise ValueError("%s: %s" % (data['code'], info_message[data['code']]))
if 'code' not in data and 'version' in data:
self.log.info('Initialized Client on API Version %s', data['version'])
return
info_message = {20000: 'Invalid User given! Please make sure the given ID is correct!',
20051: 'Stop/Restart websocket server '
'(please try to reconnect)',
20060: 'Refreshing data from the trading engine; '
'please pause any acivity.',
20061: 'Done refreshing data from the trading engine.'
' Re-subscription advised.'}
codes = {20051: self.reconnect, 20060: self._pause,
20061: self._unpause}
if 'version' in data:
self.log.info("API version: %i", data['version'])
return
try:
self.log.info(info_message[data['code']])
codes[data['code']]()
except KeyError as e:
self.log.exception(e)
self.log.error("Unknown Info code %s!", data['code'])
raise
|
Handle Error messages and log them accordingly.
|
def _error_handler(self, data):
"""
Handle Error messages and log them accordingly.
:param data:
:param ts:
"""
errors = {10000: 'Unknown event',
10001: 'Generic error',
10008: 'Concurrency error',
10020: 'Request parameters error',
10050: 'Configuration setup failed',
10100: 'Failed authentication',
10111: 'Error in authentication request payload',
10112: 'Error in authentication request signature',
10113: 'Error in authentication request encryption',
10114: 'Error in authentication request nonce',
10200: 'Error in un-authentication request',
10300: 'Subscription Failed (generic)',
10301: 'Already Subscribed',
10302: 'Unknown channel',
10400: 'Subscription Failed (generic)',
10401: 'Not subscribed',
11000: 'Not ready, try again later',
20000: 'User is invalid!',
20051: 'Websocket server stopping',
20060: 'Websocket server resyncing',
20061: 'Websocket server resync complete'
}
try:
self.log.error(errors[data['code']])
except KeyError:
self.log.error("Received unknown error Code in message %s! "
"Reconnecting..", data)
|
Handles data messages by passing them up to the client.
|
def _data_handler(self, data, ts):
"""Handles data messages by passing them up to the client.
:param data:
:param ts:
:return:
"""
# Pass the data up to the Client
self.log.debug("_data_handler(): Passing %s to client..",
data)
self.pass_to_client('data', data, ts)
|
Resubscribes to all channels found in self. channel_configs.
|
def _resubscribe(self, soft=False):
"""Resubscribes to all channels found in self.channel_configs.
:param soft: if True, unsubscribes first.
:return: None
"""
# Restore non-default Bitfinex websocket configuration
if self.bitfinex_config:
self.send(**self.bitfinex_config)
q_list = []
while True:
try:
identifier, q = self.channel_configs.popitem(last=True if soft else False)
except KeyError:
break
q_list.append((identifier, q.copy()))
if identifier == 'auth':
self.send(**q, auth=True)
continue
if soft:
q['event'] = 'unsubscribe'
self.send(**q)
# Resubscribe for soft start.
if soft:
for identifier, q in reversed(q_list):
self.channel_configs[identifier] = q
self.send(**q)
else:
for identifier, q in q_list:
self.channel_configs[identifier] = q
|
Set sentinel for run () method and join thread.
|
def join(self, timeout=None):
"""Set sentinel for run() method and join thread.
:param timeout:
:return:
"""
self._stopped.set()
super(QueueProcessor, self).join(timeout=timeout)
|
Main routine.
|
def run(self):
"""Main routine.
:return:
"""
while not self._stopped.is_set():
try:
message = self.q.get(timeout=0.1)
except Empty:
continue
dtype, data, ts = message
if dtype in ('subscribed', 'unsubscribed', 'conf', 'auth', 'unauth'):
try:
self._response_handlers[dtype](dtype, data, ts)
except KeyError:
self.log.error("Dtype '%s' does not have a response "
"handler! (%s)", dtype, message)
elif dtype == 'data':
try:
channel_id = data[0]
if channel_id != 0:
# Get channel type associated with this data to the
# associated data type (from 'data' to
# 'book', 'ticker' or similar
channel_type, *_ = self.channel_directory[channel_id]
# Run the associated data handler for this channel type.
self._data_handlers[channel_type](channel_type, data, ts)
# Update time stamps.
self.update_timestamps(channel_id, ts)
else:
# This is data from auth channel, call handler
self._handle_account(data=data, ts=ts)
except KeyError:
self.log.error("Channel ID does not have a data handler! %s",
message)
else:
self.log.error("Unknown dtype on queue! %s", message)
continue
|
Handles responses to subscribe () commands.
|
def _handle_subscribed(self, dtype, data, ts,):
"""Handles responses to subscribe() commands.
Registers a channel id with the client and assigns a data handler to it.
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_subscribed: %s - %s - %s", dtype, data, ts)
channel_name = data.pop('channel')
channel_id = data.pop('chanId')
config = data
if 'pair' in config:
symbol = config['pair']
if symbol.startswith('t'):
symbol = symbol[1:]
elif 'symbol' in config:
symbol = config['symbol']
if symbol.startswith('t'):
symbol = symbol[1:]
elif 'key' in config:
symbol = config['key'].split(':')[2][1:] #layout type:interval:tPair
else:
symbol = None
if 'prec' in config and config['prec'].startswith('R'):
channel_name = 'raw_' + channel_name
self.channel_handlers[channel_id] = self._data_handlers[channel_name]
# Create a channel_name, symbol tuple to identify channels of same type
if 'key' in config:
identifier = (channel_name, symbol, config['key'].split(':')[1])
else:
identifier = (channel_name, symbol)
self.channel_handlers[channel_id] = identifier
self.channel_directory[identifier] = channel_id
self.channel_directory[channel_id] = identifier
self.log.info("Subscription succesful for channel %s", identifier)
|
Handles responses to unsubscribe () commands.
|
def _handle_unsubscribed(self, dtype, data, ts):
"""Handles responses to unsubscribe() commands.
Removes a channel id from the client.
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_unsubscribed: %s - %s - %s", dtype, data, ts)
channel_id = data.pop('chanId')
# Unregister the channel from all internal attributes
chan_identifier = self.channel_directory.pop(channel_id)
self.channel_directory.pop(chan_identifier)
self.channel_handlers.pop(channel_id)
self.last_update.pop(channel_id)
self.log.info("Successfully unsubscribed from %s", chan_identifier)
|
Handles authentication responses.
|
def _handle_auth(self, dtype, data, ts):
"""Handles authentication responses.
:param dtype:
:param data:
:param ts:
:return:
"""
# Contains keys status, chanId, userId, caps
if dtype == 'unauth':
raise NotImplementedError
channel_id = data.pop('chanId')
user_id = data.pop('userId')
identifier = ('auth', user_id)
self.channel_handlers[identifier] = channel_id
self.channel_directory[identifier] = channel_id
self.channel_directory[channel_id] = identifier
|
Handles configuration messages.
|
def _handle_conf(self, dtype, data, ts):
"""Handles configuration messages.
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_conf: %s - %s - %s", dtype, data, ts)
self.log.info("Configuration accepted: %s", dtype)
return
|
Updates the timestamp for the given channel id.
|
def update_timestamps(self, chan_id, ts):
"""Updates the timestamp for the given channel id.
:param chan_id:
:param ts:
:return:
"""
try:
self.last_update[chan_id] = ts
except KeyError:
self.log.warning("Attempted ts update of channel %s, but channel "
"not present anymore.",
self.channel_directory[chan_id])
|
Handles Account related data.
|
def _handle_account(self, data, ts):
""" Handles Account related data.
translation table for channel names:
Data Channels
os - Orders
hos - Historical Orders
ps - Positions
hts - Trades (snapshot)
te - Trade Event
tu - Trade Update
ws - Wallets
bu - Balance Info
miu - Margin Info
fiu - Funding Info
fos - Offers
hfos - Historical Offers
fcs - Credits
hfcs - Historical Credits
fls - Loans
hfls - Historical Loans
htfs - Funding Trades
n - Notifications (WIP)
:param dtype:
:param data:
:param ts:
:return:
"""
# channel_short, data
chan_id, channel_short_name, *data = data
entry = (channel_short_name, data, ts)
self.account.put(entry)
|
Adds received ticker data to self. tickers dict filed under its channel id.
|
def _handle_ticker(self, dtype, data, ts):
"""Adds received ticker data to self.tickers dict, filed under its channel
id.
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_ticker: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.tickers[channel_identifier].put(entry)
|
Updates the order book stored in self. books [ chan_id ].
|
def _handle_book(self, dtype, data, ts):
"""Updates the order book stored in self.books[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_book: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
log.debug("ts: %s\tchan_id: %s\tdata: %s", ts, channel_id, data)
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.books[channel_identifier].put(entry)
|
Updates the raw order books stored in self. raw_books [ chan_id ].
|
def _handle_raw_book(self, dtype, data, ts):
"""Updates the raw order books stored in self.raw_books[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_raw_book: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.raw_books[channel_identifier].put(entry)
|
Files trades in self. _trades [ chan_id ].
|
def _handle_trades(self, dtype, data, ts):
"""Files trades in self._trades[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_trades: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.trades[channel_identifier].put(entry)
|
Stores OHLC data received via wss in self. candles [ chan_id ].
|
def _handle_candles(self, dtype, data, ts):
"""Stores OHLC data received via wss in self.candles[chan_id].
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_candles: %s - %s - %s", dtype, data, ts)
channel_id, *data = data
channel_identifier = self.channel_directory[channel_id]
entry = (data, ts)
self.candles[channel_identifier].put(entry)
|
Reset the client.
|
def reset(self):
"""Reset the client.
:return:
"""
self.conn.reconnect()
while not self.conn.connected.is_set():
log.info("reset(): Waiting for connection to be set up..")
time.sleep(1)
for key in self.channel_configs:
self.conn.send(**self.channel_configs[key])
|
Return a queue containing all received candles data.
|
def candles(self, pair, timeframe=None):
"""Return a queue containing all received candles data.
:param pair: str, Symbol pair to request data for
:param timeframe: str
:return: Queue()
"""
timeframe = '1m' if not timeframe else timeframe
key = ('candles', pair, timeframe)
return self.queue_processor.candles[key]
|
Send configuration to websocket server
|
def config(self, decimals_as_strings=True, ts_as_dates=False,
sequencing=False, ts=False, **kwargs):
"""Send configuration to websocket server
:param decimals_as_strings: bool, turn on/off decimals as strings
:param ts_as_dates: bool, decide to request timestamps as dates instead
:param sequencing: bool, turn on sequencing
:param ts: bool, request the timestamp to be appended to every array
sent by the server
:param kwargs:
:return:
"""
flags = 0
if decimals_as_strings:
flags += 8
if ts_as_dates:
flags += 32
if ts:
flags += 32768
if sequencing:
flags += 65536
q = {'event': 'conf', 'flags': flags}
q.update(kwargs)
self.conn.bitfinex_config = q
self.conn.send(**q)
|
Subscribe to the passed pair s ticker channel.
|
def subscribe_to_ticker(self, pair, **kwargs):
"""Subscribe to the passed pair's ticker channel.
:param pair: str, Symbol pair to request data for
:param kwargs:
:return:
"""
identifier = ('ticker', pair)
self._subscribe('ticker', identifier, symbol=pair, **kwargs)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.