INSTRUCTION
stringlengths 1
8.43k
| RESPONSE
stringlengths 75
104k
|
|---|---|
Convert input into a QuantumChannel subclass object or Operator object
|
def _init_transformer(cls, data):
"""Convert input into a QuantumChannel subclass object or Operator object"""
# This handles common conversion for all QuantumChannel subclasses.
# If the input is already a QuantumChannel subclass it will return
# the original object
if isinstance(data, QuantumChannel):
return data
if hasattr(data, 'to_quantumchannel'):
# If the data object is not a QuantumChannel it will give
# preference to a 'to_quantumchannel' attribute that allows
# an arbitrary object to define its own conversion to any
# quantum channel subclass.
return data.to_channel()
if hasattr(data, 'to_channel'):
# TODO: this 'to_channel' method is the same case as the above
# but is used by current version of Aer. It should be removed
# once Aer is nupdated to use `to_quantumchannel`
# instead of `to_channel`,
return data.to_channel()
# Finally if the input is not a QuantumChannel and doesn't have a
# 'to_quantumchannel' conversion method we try and initialize it as a
# regular matrix Operator which can be converted into a QuantumChannel.
return Operator(data)
|
If dag is mapped and the direction is correct the property is_direction_mapped is set to True ( or to False otherwise ).
|
def run(self, dag):
"""
If `dag` is mapped and the direction is correct the property
`is_direction_mapped` is set to True (or to False otherwise).
Args:
dag (DAGCircuit): DAG to check.
"""
if self.layout is None:
if self.property_set["layout"]:
self.layout = self.property_set["layout"]
else:
self.layout = Layout.generate_trivial_layout(*dag.qregs.values())
self.property_set['is_direction_mapped'] = True
edges = self.coupling_map.get_edges()
for gate in dag.twoQ_gates():
physical_q0 = self.layout[gate.qargs[0]]
physical_q1 = self.layout[gate.qargs[1]]
if isinstance(gate.op, (CXBase, CnotGate)) and (
physical_q0, physical_q1) not in edges:
self.property_set['is_direction_mapped'] = False
return
|
Create Graphene Enum for sorting a SQLAlchemy class query
|
def sort_enum_for_model(cls, name=None, symbol_name=_symbol_name):
"""Create Graphene Enum for sorting a SQLAlchemy class query
Parameters
- cls : Sqlalchemy model class
Model used to create the sort enumerator
- name : str, optional, default None
Name to use for the enumerator. If not provided it will be set to `cls.__name__ + 'SortEnum'`
- symbol_name : function, optional, default `_symbol_name`
Function which takes the column name and a boolean indicating if the sort direction is ascending,
and returns the symbol name for the current column and sort direction.
The default function will create, for a column named 'foo', the symbols 'foo_asc' and 'foo_desc'
Returns
- Enum
The Graphene enumerator
"""
enum, _ = _sort_enum_for_model(cls, name, symbol_name)
return enum
|
Returns a Graphene argument for the sort field that accepts a list of sorting directions for a model. If has_default is True ( the default ) it will sort the result by the primary key ( s )
|
def sort_argument_for_model(cls, has_default=True):
"""Returns a Graphene argument for the sort field that accepts a list of sorting directions for a model.
If `has_default` is True (the default) it will sort the result by the primary key(s)
"""
enum, default = _sort_enum_for_model(cls)
if not has_default:
default = None
return Argument(List(enum), default_value=default)
|
Find all substrings of the given string which represent date and/ or time and parse them.
|
def search_dates(text, languages=None, settings=None, add_detected_language=False):
"""Find all substrings of the given string which represent date and/or time and parse them.
:param text:
A string in a natural language which may contain date and/or time expressions.
:type text: str|unicode
:param languages:
A list of two letters language codes.e.g. ['en', 'es']. If languages are given, it will
not attempt to detect the language.
:type languages: list
:param settings:
Configure customized behavior using settings defined in :mod:`dateparser.conf.Settings`.
:type settings: dict
:param add_detected_language:
Indicates if we want the detected language returned in the tuple.
:type add_detected_language: bool
:return: Returns list of tuples containing:
substrings representing date and/or time, corresponding :mod:`datetime.datetime`
object and detected language if *add_detected_language* is True.
Returns None if no dates that can be parsed are found.
:rtype: list
:raises: ValueError - Unknown Language
>>> from dateparser.search import search_dates
>>> search_dates('The first artificial Earth satellite was launched on 4 October 1957.')
[('on 4 October 1957', datetime.datetime(1957, 10, 4, 0, 0))]
>>> search_dates('The first artificial Earth satellite was launched on 4 October 1957.', add_detected_language=True)
[('on 4 October 1957', datetime.datetime(1957, 10, 4, 0, 0), 'en')]
>>> search_dates("The client arrived to the office for the first time in March 3rd, 2004 and got serviced, after a couple of months, on May 6th 2004, the customer returned indicating a defect on the part")
[('in March 3rd, 2004 and', datetime.datetime(2004, 3, 3, 0, 0)),
('on May 6th 2004', datetime.datetime(2004, 5, 6, 0, 0))]
"""
result = _search_with_detection.search_dates(
text=text, languages=languages, settings=settings
)
language, dates = result.get('Language'), result.get('Dates')
if dates:
if add_detected_language:
dates = [date + (language, ) for date in dates]
return dates
|
Monkey patching _strptime to avoid problems related with non - english locale changes on the system.
|
def patch_strptime():
"""Monkey patching _strptime to avoid problems related with non-english
locale changes on the system.
For example, if system's locale is set to fr_FR. Parser won't recognize
any date since all languages are translated to english dates.
"""
_strptime = imp.load_module(
'strptime_patched', *imp.find_module('_strptime')
)
_calendar = imp.load_module(
'calendar_patched', *imp.find_module('_strptime')
)
_strptime._getlang = lambda: ('en_US', 'UTF-8')
_strptime.calendar = _calendar
_strptime.calendar.day_abbr = [
'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'
]
_strptime.calendar.day_name = [
'monday', 'tuesday', 'wednesday', 'thursday',
'friday', 'saturday', 'sunday'
]
_strptime.calendar.month_abbr = [
'', 'jan', 'feb', 'mar', 'apr', 'may', 'jun',
'jul', 'aug', 'sep', 'oct', 'nov', 'dec'
]
_strptime.calendar.month_name = [
'', 'january', 'february', 'march', 'april',
'may', 'june', 'july', 'august', 'september',
'october', 'november', 'december'
]
return _strptime._strptime_time
|
Get an ordered mapping with locale codes as keys and corresponding locale instances as values.
|
def get_locale_map(self, languages=None, locales=None, region=None,
use_given_order=False, allow_conflicting_locales=False):
"""
Get an ordered mapping with locale codes as keys
and corresponding locale instances as values.
:param languages:
A list of language codes, e.g. ['en', 'es', 'zh-Hant'].
If locales are not given, languages and region are
used to construct locales to load.
:type languages: list
:param locales:
A list of codes of locales which are to be loaded,
e.g. ['fr-PF', 'qu-EC', 'af-NA']
:type locales: list
:param region:
A region code, e.g. 'IN', '001', 'NE'.
If locales are not given, languages and region are
used to construct locales to load.
:type region: str|unicode
:param use_given_order:
If True, the returned mapping is ordered in the order locales are given.
:type allow_redetect_language: bool
:param allow_conflicting_locales:
if True, locales with same language and different region can be loaded.
:type allow_conflicting_locales: bool
:return: ordered locale code to locale instance mapping
"""
return OrderedDict(self._load_data(
languages=languages, locales=locales, region=region, use_given_order=use_given_order,
allow_conflicting_locales=allow_conflicting_locales))
|
Yield locale instances.
|
def get_locales(self, languages=None, locales=None, region=None,
use_given_order=False, allow_conflicting_locales=False):
"""
Yield locale instances.
:param languages:
A list of language codes, e.g. ['en', 'es', 'zh-Hant'].
If locales are not given, languages and region are
used to construct locales to load.
:type languages: list
:param locales:
A list of codes of locales which are to be loaded,
e.g. ['fr-PF', 'qu-EC', 'af-NA']
:type locales: list
:param region:
A region code, e.g. 'IN', '001', 'NE'.
If locales are not given, languages and region are
used to construct locales to load.
:type region: str|unicode
:param use_given_order:
If True, the returned mapping is ordered in the order locales are given.
:type allow_redetect_language: bool
:param allow_conflicting_locales:
if True, locales with same language and different region can be loaded.
:type allow_conflicting_locales: bool
:yield: locale instances
"""
for _, locale in self._load_data(
languages=languages, locales=locales, region=region,
use_given_order=use_given_order,
allow_conflicting_locales=allow_conflicting_locales):
yield locale
|
Check if tokens are valid tokens for the locale.
|
def are_tokens_valid(self, tokens):
"""
Check if tokens are valid tokens for the locale.
:param tokens:
a list of string or unicode tokens.
:type tokens: list
:return: True if tokens are valid, False otherwise.
"""
match_relative_regex = self._get_match_relative_regex_cache()
for token in tokens:
if any([match_relative_regex.match(token),
token in self, token.isdigit()]):
continue
else:
return False
else:
return True
|
Split the date string using translations in locale info.
|
def split(self, string, keep_formatting=False):
"""
Split the date string using translations in locale info.
:param string:
Date string to be splitted.
:type string:
str|unicode
:param keep_formatting:
If True, retain formatting of the date string.
:type keep_formatting: bool
:return: A list of string tokens formed after splitting the date string.
"""
if not string:
return string
split_relative_regex = self._get_split_relative_regex_cache()
match_relative_regex = self._get_match_relative_regex_cache()
tokens = split_relative_regex.split(string)
for i, token in enumerate(tokens):
if match_relative_regex.match(token):
tokens[i] = [token]
continue
tokens[i] = self._split_by_known_words(token, keep_formatting)
return list(filter(bool, chain(*tokens)))
|
Find all substrings of the given string which represent date and/ or time and parse them.
|
def search_dates(self, text, languages=None, settings=None):
"""
Find all substrings of the given string which represent date and/or time and parse them.
:param text:
A string in a natural language which may contain date and/or time expressions.
:type text: str|unicode
:param languages:
A list of two letters language codes.e.g. ['en', 'es']. If languages are given, it will not attempt
to detect the language.
:type languages: list
:param settings:
Configure customized behavior using settings defined in :mod:`dateparser.conf.Settings`.
:type settings: dict
:return: a dict mapping keys to two letter language code and a list of tuples of pairs:
substring representing date expressions and corresponding :mod:`datetime.datetime` object.
For example:
{'Language': 'en', 'Dates': [('on 4 October 1957', datetime.datetime(1957, 10, 4, 0, 0))]}
If language of the string isn't recognised returns:
{'Language': None, 'Dates': None}
:raises: ValueError - Unknown Language
"""
language_shortname = self.detect_language(text=text, languages=languages)
if not language_shortname:
return {'Language': None, 'Dates': None}
return {'Language': language_shortname, 'Dates': self.search.search_parse(language_shortname, text,
settings=settings)}
|
Parse date and time from given date string.
|
def parse(date_string, date_formats=None, languages=None, locales=None, region=None, settings=None):
"""Parse date and time from given date string.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param date_formats:
A list of format strings using directives as given
`here <https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior>`_.
The parser applies formats one by one, taking into account the detected languages/locales.
:type date_formats: list
:param languages:
A list of language codes, e.g. ['en', 'es', 'zh-Hant'].
If locales are not given, languages and region are used to construct locales for translation.
:type languages: list
:param locales:
A list of locale codes, e.g. ['fr-PF', 'qu-EC', 'af-NA'].
The parser uses locales to translate date string.
:type locales: list
:param region:
A region code, e.g. 'IN', '001', 'NE'.
If locales are not given, languages and region are used to construct locales for translation.
:type region: str|unicode
:param settings:
Configure customized behavior using settings defined in :mod:`dateparser.conf.Settings`.
:type settings: dict
:return: Returns :class:`datetime <datetime.datetime>` representing parsed date if successful, else returns None
:rtype: :class:`datetime <datetime.datetime>`.
:raises: ValueError - Unknown Language
"""
parser = _default_parser
if any([languages, locales, region, not settings._default]):
parser = DateDataParser(languages=languages, locales=locales,
region=region, settings=settings)
data = parser.get_date_data(date_string, date_formats)
if data:
return data['date_obj']
|
Attemps to parse time part of date strings like 1 day ago 2 PM
|
def _parse_time(self, date_string, settings):
"""Attemps to parse time part of date strings like '1 day ago, 2 PM' """
date_string = PATTERN.sub('', date_string)
date_string = re.sub(r'\b(?:ago|in)\b', '', date_string)
try:
return time_parser(date_string)
except:
pass
|
Check if the locale is applicable to translate date string.
|
def is_applicable(self, date_string, strip_timezone=False, settings=None):
"""
Check if the locale is applicable to translate date string.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param strip_timezone:
If True, timezone is stripped from date string.
:type strip_timezone: bool
:return: boolean value representing if the locale is applicable for the date string or not.
"""
if strip_timezone:
date_string, _ = pop_tz_offset_from_string(date_string, as_offset=False)
date_string = self._translate_numerals(date_string)
if settings.NORMALIZE:
date_string = normalize_unicode(date_string)
date_string = self._simplify(date_string, settings=settings)
dictionary = self._get_dictionary(settings)
date_tokens = dictionary.split(date_string)
return dictionary.are_tokens_valid(date_tokens)
|
Translate the date string to its English equivalent.
|
def translate(self, date_string, keep_formatting=False, settings=None):
"""
Translate the date string to its English equivalent.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param keep_formatting:
If True, retain formatting of the date string after translation.
:type keep_formatting: bool
:return: translated date string.
"""
date_string = self._translate_numerals(date_string)
if settings.NORMALIZE:
date_string = normalize_unicode(date_string)
date_string = self._simplify(date_string, settings=settings)
dictionary = self._get_dictionary(settings)
date_string_tokens = dictionary.split(date_string, keep_formatting)
relative_translations = self._get_relative_translations(settings=settings)
for i, word in enumerate(date_string_tokens):
word = word.lower()
for pattern, replacement in relative_translations.items():
if pattern.match(word):
date_string_tokens[i] = pattern.sub(replacement, word)
else:
if word in dictionary:
date_string_tokens[i] = dictionary[word] or ''
if "in" in date_string_tokens:
date_string_tokens = self._clear_future_words(date_string_tokens)
return self._join(list(filter(bool, date_string_tokens)),
separator="" if keep_formatting else " ", settings=settings)
|
Parse with formats and return a dictionary with period and obj_date.
|
def parse_with_formats(date_string, date_formats, settings):
""" Parse with formats and return a dictionary with 'period' and 'obj_date'.
:returns: :class:`datetime.datetime`, dict or None
"""
period = 'day'
for date_format in date_formats:
try:
date_obj = datetime.strptime(date_string, date_format)
except ValueError:
continue
else:
# If format does not include the day, use last day of the month
# instead of first, because the first is usually out of range.
if '%d' not in date_format:
period = 'month'
date_obj = date_obj.replace(
day=get_last_day_of_month(date_obj.year, date_obj.month))
if not ('%y' in date_format or '%Y' in date_format):
today = datetime.today()
date_obj = date_obj.replace(year=today.year)
date_obj = apply_timezone_from_settings(date_obj, settings)
return {'date_obj': date_obj, 'period': period}
else:
return {'date_obj': None, 'period': period}
|
Parse string representing date and/ or time in recognizable localized formats. Supports parsing multiple languages and timezones.
|
def get_date_data(self, date_string, date_formats=None):
"""
Parse string representing date and/or time in recognizable localized formats.
Supports parsing multiple languages and timezones.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param date_formats:
A list of format strings using directives as given
`here <https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior>`_.
The parser applies formats one by one, taking into account the detected languages.
:type date_formats: list
:return: a dict mapping keys to :mod:`datetime.datetime` object and *period*. For example:
{'date_obj': datetime.datetime(2015, 6, 1, 0, 0), 'period': u'day'}
:raises: ValueError - Unknown Language
.. note:: *Period* values can be a 'day' (default), 'week', 'month', 'year'.
*Period* represents the granularity of date parsed from the given string.
In the example below, since no day information is present, the day is assumed to be current
day ``16`` from *current date* (which is June 16, 2015, at the moment of writing this).
Hence, the level of precision is ``month``:
>>> DateDataParser().get_date_data(u'March 2015')
{'date_obj': datetime.datetime(2015, 3, 16, 0, 0), 'period': u'month'}
Similarly, for date strings with no day and month information present, level of precision
is ``year`` and day ``16`` and month ``6`` are from *current_date*.
>>> DateDataParser().get_date_data(u'2014')
{'date_obj': datetime.datetime(2014, 6, 16, 0, 0), 'period': u'year'}
Dates with time zone indications or UTC offsets are returned in UTC time unless
specified using `Settings`_.
>>> DateDataParser().get_date_data(u'23 March 2000, 1:21 PM CET')
{'date_obj': datetime.datetime(2000, 3, 23, 14, 21), 'period': 'day'}
"""
if not(isinstance(date_string, six.text_type) or isinstance(date_string, six.string_types)):
raise TypeError('Input type must be str or unicode')
if isinstance(date_string, bytes):
date_string = date_string.decode('utf-8')
res = parse_with_formats(date_string, date_formats or [], self._settings)
if res['date_obj']:
return res
date_string = sanitize_date(date_string)
for locale in self._get_applicable_locales(date_string):
parsed_date = _DateLocaleParser.parse(
locale, date_string, date_formats, settings=self._settings)
if parsed_date:
parsed_date['locale'] = locale.shortname
if self.try_previous_locales:
self.previous_locales.insert(0, locale)
return parsed_date
else:
return {'date_obj': None, 'period': 'day', 'locale': None}
|
return load plan ( timestamps generator )
|
def get_load_plan(self):
"""
return load plan (timestamps generator)
"""
if self.rps_schedule and self.instances_schedule:
raise StepperConfigurationError(
'Both rps and instances schedules specified. You must specify only one of them'
)
elif self.rps_schedule:
info.status.publish('loadscheme', self.rps_schedule)
return lp.create(self.rps_schedule)
elif self.instances_schedule:
info.status.publish('loadscheme', self.instances_schedule)
return ip.create(self.instances_schedule)
else:
self.instances_schedule = []
info.status.publish('loadscheme', self.instances_schedule)
return ip.create(self.instances_schedule)
|
return ammo generator
|
def get_ammo_generator(self):
"""
return ammo generator
"""
af_readers = {
'phantom': missile.AmmoFileReader,
'slowlog': missile.SlowLogReader,
'line': missile.LineReader,
'uri': missile.UriReader,
'uripost': missile.UriPostReader,
'access': missile.AccessLogReader,
'caseline': missile.CaseLineReader,
}
if self.uris and self.ammo_file:
raise StepperConfigurationError(
'Both uris and ammo file specified. You must specify only one of them'
)
elif self.uris:
ammo_gen = missile.UriStyleGenerator(
self.uris, self.headers, http_ver=self.http_ver)
elif self.ammo_file:
if self.ammo_type in af_readers:
if self.ammo_type == 'phantom':
opener = resource.get_opener(self.ammo_file)
with opener(self.use_cache) as ammo:
try:
if not ammo.next()[0].isdigit():
self.ammo_type = 'uri'
self.log.info(
"Setting ammo_type 'uri' because ammo is not started with digit and you did not specify ammo format"
)
else:
self.log.info(
"Default ammo type ('phantom') used, use 'phantom.ammo_type' option to override it"
)
except StopIteration:
self.log.exception(
"Couldn't read first line of ammo file")
raise AmmoFileError(
"Couldn't read first line of ammo file")
else:
raise NotImplementedError(
'No such ammo type implemented: "%s"' % self.ammo_type)
ammo_gen = af_readers[self.ammo_type](
self.ammo_file, headers=self.headers, http_ver=self.http_ver, use_cache=self.use_cache)
else:
raise StepperConfigurationError(
'Ammo not found. Specify uris or ammo file')
self.log.info("Using %s ammo reader" % type(ammo_gen).__name__)
return ammo_gen
|
translate http code to net code. if accertion failed set net code to 314
|
def _exc_to_net(param1, success):
""" translate http code to net code. if accertion failed, set net code to 314 """
if len(param1) <= 3:
# FIXME: we're unable to use better logic here, because we should support non-http codes
# but, we should look for core.util.HTTP or some other common logic
# here
if success:
return 0
else:
return 314
exc = param1.split(' ')[-1]
if exc in KNOWN_EXC.keys():
return KNOWN_EXC[exc]
else:
logger.warning(
"Unknown Java exception, consider adding it to dictionary: %s",
param1)
return 41
|
translate exception str to http code
|
def _exc_to_http(param1):
""" translate exception str to http code"""
if len(param1) <= 3:
try:
int(param1)
except BaseException:
logger.error(
"JMeter wrote some strange data into codes column: %s", param1)
else:
return int(param1)
exc = param1.split(' ')[-1]
if exc in KNOWN_EXC.keys():
return 0
else:
logger.warning("Unknown Java exception. %s", param1)
return 0
|
Read phantom tool specific options
|
def read_config(self):
""" Read phantom tool specific options """
self.threads = self.cfg["threads"] or str(int(multiprocessing.cpu_count() / 2) + 1)
self.phantom_modules_path = self.cfg["phantom_modules_path"]
self.additional_libs = ' '.join(self.cfg["additional_libs"])
self.answ_log_level = self.cfg["writelog"]
if self.answ_log_level.lower() in ['0', 'false']:
self.answ_log_level = 'none'
elif self.answ_log_level.lower() in ['1', 'true']:
self.answ_log_level = 'all'
self.timeout = parse_duration(self.cfg["timeout"])
if self.timeout > 120000:
logger.warning(
"You've set timeout over 2 minutes."
" Are you a functional tester?")
self.answ_log = self.core.mkstemp(".log", "answ_")
self.core.add_artifact_file(self.answ_log)
self.core.add_artifact_file(self.phout_file)
self.core.add_artifact_file(self.stat_log)
self.phantom_log = self.core.mkstemp(".log", "phantom_")
self.core.add_artifact_file(self.phantom_log)
main_stream = StreamConfig(
self.core,
len(self.streams), self.phout_file, self.answ_log,
self.answ_log_level, self.timeout, self.cfg, True)
self.streams.append(main_stream)
for section in self.multi():
self.streams.append(
StreamConfig(
self.core,
len(self.streams), self.phout_file, self.answ_log,
self.answ_log_level, self.timeout, section))
for stream in self.streams:
stream.read_config()
if any(stream.ssl for stream in self.streams):
self.additional_libs += ' ssl io_benchmark_method_stream_transport_ssl'
|
Generate phantom tool run config
|
def compose_config(self):
""" Generate phantom tool run config """
streams_config = ''
stat_benchmarks = ''
for stream in self.streams:
streams_config += stream.compose_config()
if not stream.is_main:
stat_benchmarks += " " + "benchmark_io%s" % stream.sequence_no
kwargs = {}
kwargs['threads'] = self.threads
kwargs['phantom_log'] = self.phantom_log
kwargs['stat_log'] = self.stat_log
kwargs['benchmarks_block'] = streams_config
kwargs['stat_benchmarks'] = stat_benchmarks
kwargs['additional_libs'] = self.additional_libs
kwargs['phantom_modules_path'] = self.phantom_modules_path
filename = self.core.mkstemp(".conf", "phantom_")
self.core.add_artifact_file(filename)
logger.debug("Generating phantom config: %s", filename)
template_str = resource_string(__name__, "config/phantom.conf.tpl")
tpl = string.Template(template_str)
config = tpl.substitute(kwargs)
with open(filename, 'w') as conffile:
conffile.write(config)
return filename
|
get merged info about phantom conf
|
def get_info(self):
""" get merged info about phantom conf """
result = copy.copy(self.streams[0])
result.stat_log = self.stat_log
result.steps = []
result.ammo_file = ''
result.rps_schedule = None
result.ammo_count = 0
result.duration = 0
result.instances = 0
result.loadscheme = []
result.loop_count = 0
for stream in self.streams:
sec_no = 0
logger.debug("Steps: %s", stream.stepper_wrapper.steps)
for item in stream.stepper_wrapper.steps:
for x in range(0, item[1]):
if len(result.steps) > sec_no:
result.steps[sec_no][0] += item[0]
else:
result.steps.append([item[0], 1])
sec_no += 1
if result.rps_schedule:
result.rps_schedule = []
else:
result.rps_schedule = stream.stepper_wrapper.loadscheme
if result.loadscheme:
result.loadscheme = ''
else:
# FIXME: add formatted load scheme for server:
# <step_size,step_type,first_rps,last_rps,original_step_params>
# as a string
result.loadscheme = ''
if result.loop_count:
result.loop_count = u'0'
else:
result.loop_count = stream.stepper_wrapper.loop_count
result.ammo_file += '{} '.format(stream.stepper_wrapper.ammo_file)
result.ammo_count += stream.stepper_wrapper.ammo_count
result.duration = max(
result.duration, stream.stepper_wrapper.duration)
result.instances += stream.instances
if not result.ammo_count:
raise ValueError("Total ammo count cannot be zero")
return result
|
reads config
|
def read_config(self):
""" reads config """
# multi-options
self.ssl = self.get_option("ssl")
self.tank_type = self.get_option("tank_type")
# TODO: refactor. Maybe we should decide how to interact with
# StepperWrapper here.
# self.instances = self.get_option('instances')
self.gatling = ' '.join(self.get_option('gatling_ip').split("\n"))
self.method_prefix = self.get_option("method_prefix")
self.method_options = self.get_option("method_options")
self.source_log_prefix = self.get_option("source_log_prefix")
self.phantom_http_line = self.get_option("phantom_http_line")
self.phantom_http_field_num = self.get_option("phantom_http_field_num")
self.phantom_http_field = self.get_option("phantom_http_field")
self.phantom_http_entity = self.get_option("phantom_http_entity")
self.address = self.get_option('address')
do_test_connect = self.get_option("connection_test")
explicit_port = self.get_option('port', '')
self.ipv6, self.resolved_ip, self.port, self.address = self.address_wizard.resolve(
self.address, do_test_connect, explicit_port)
logger.info(
"Resolved %s into %s:%s", self.address, self.resolved_ip, self.port)
self.client_cipher_suites = self.get_option("client_cipher_suites", "")
self.client_certificate = self.get_option("client_certificate", "")
self.client_key = self.get_option("client_key", "")
self.stepper_wrapper.read_config()
|
compose benchmark block
|
def compose_config(self):
""" compose benchmark block """
# step file
self.stepper_wrapper.prepare_stepper()
self.stpd = self.stepper_wrapper.stpd
if self.stepper_wrapper.instances:
self.instances = self.stepper_wrapper.instances
if not self.stpd:
raise RuntimeError("Cannot proceed with no STPD file")
kwargs = {}
kwargs['sequence_no'] = self.sequence_no
if self.ssl:
_auth_section = ''
_ciphers = ''
ssl_template = "transport_t ssl_transport = transport_ssl_t {\n" \
" timeout = 1s\n" \
" %s\n" \
" %s}\n" \
" transport = ssl_transport"
if self.client_certificate or self.client_key:
_auth_section = 'auth_t def_auth = auth_t { key = "%s" cert = "%s"} auth = def_auth' \
% (self.client_key, self.client_certificate)
if self.client_cipher_suites:
_ciphers = 'ciphers = "%s"' % self.client_cipher_suites
kwargs['ssl_transport'] = ssl_template % (_auth_section, _ciphers)
else:
kwargs['ssl_transport'] = ""
kwargs['method_stream'] = self.method_prefix + \
"_ipv6_t" if self.ipv6 else self.method_prefix + "_ipv4_t"
kwargs['phout'] = self.phout_file
kwargs['answ_log'] = self.answ_log
kwargs['answ_log_level'] = self.answ_log_level
kwargs['comment_answ'] = "# " if self.answ_log_level == 'none' else ''
kwargs['stpd'] = self.stpd
kwargs['source_log_prefix'] = self.source_log_prefix
kwargs['method_options'] = self.method_options
if self.tank_type:
kwargs[
'proto'] = "proto=http_proto%s" % self.sequence_no if self.tank_type == 'http' else "proto=none_proto"
kwargs['comment_proto'] = ""
else:
kwargs['proto'] = ""
kwargs['comment_proto'] = "#"
if self.gatling:
kwargs['bind'] = 'bind={ ' + self.gatling + ' }'
else:
kwargs['bind'] = ''
kwargs['ip'] = self.resolved_ip
kwargs['port'] = self.port
kwargs['timeout'] = self.timeout
kwargs['instances'] = self.instances
tune = ''
if self.phantom_http_entity:
tune += "entity = " + self.phantom_http_entity + "\n"
if self.phantom_http_field:
tune += "field = " + self.phantom_http_field + "\n"
if self.phantom_http_field_num:
tune += "field_num = {}\n".format(self.phantom_http_field_num)
if self.phantom_http_line:
tune += "line = " + self.phantom_http_line + "\n"
if tune:
kwargs['reply_limits'] = 'reply_limits = {\n' + tune + "}"
else:
kwargs['reply_limits'] = ''
if self.is_main:
fname = 'phantom_benchmark_main.tpl'
else:
fname = 'phantom_benchmark_additional.tpl'
template_str = resource_string(
__name__, "config/" + fname)
tpl = string.Template(template_str)
config = tpl.substitute(kwargs)
return config
|
download remote resources replace links with local filenames add result file section: param dict config: pandora config
|
def patch_config(self, config):
"""
download remote resources, replace links with local filenames
add result file section
:param dict config: pandora config
"""
# get expvar parameters
if config.get("monitoring"):
if config["monitoring"].get("expvar"):
self.expvar = config["monitoring"]["expvar"].get("enabled")
if config["monitoring"]["expvar"].get("port"):
self.expvar_port = config["monitoring"]["expvar"].get("port")
else:
self.expvar_port = self.DEFAULT_EXPVAR_PORT
# or set if expvar not exists
else:
config["monitoring"] = {
"expvar": {
"enabled": True,
}
}
self.expvar = True
self.expvar_port = self.DEFAULT_EXPVAR_PORT
# FIXME this is broken for custom ammo providers due to interface incompatibility
# FIXME refactor pandora plx
for pool in config['pools']:
if pool.get('ammo', {}).get('file', ''):
self.ammofile = pool['ammo']['file']
pool['ammo']['file'] = resource_manager.resource_filename(
self.ammofile
)
if not pool.get('result') or 'phout' not in pool.get('result', {}).get('type', ''):
logger.warning('Seems like pandora result file not specified... adding defaults')
pool['result'] = dict(
destination=self.DEFAULT_REPORT_FILE,
type='phout',
)
return config
|
2h 2h5m 5m 180 1h4m3: param duration:: return:
|
def validate_duration(self, field, duration):
'''
2h
2h5m
5m
180
1h4m3
:param duration:
:return:
'''
DURATION_RE = r'^(\d+d)?(\d+h)?(\d+m)?(\d+s?)?$'
if not re.match(DURATION_RE, duration):
self._error(field, 'Load duration examples: 2h30m; 5m15; 180')
|
step ( 10 200 5 180 ) step ( 5 50 2. 5 5m ) line ( 22 154 2h5m ) step ( 5 50 2. 5 5m ) line ( 22 154 2h5m ) const ( 10 1h4m3s ): param field:: param value:: return:
|
def _validator_load_scheme(self, field, value):
'''
step(10,200,5,180)
step(5,50,2.5,5m)
line(22,154,2h5m)
step(5,50,2.5,5m) line(22,154,2h5m)
const(10,1h4m3s)
:param field:
:param value:
:return:
'''
# stpd file can be any value
if self.document['load_type'] in 'stpd_file':
return
PRIMARY_RE = r'(step|line|const)\((.+?)\)'
N_OF_ARGS = {
'step': 4,
'line': 3,
'const': 2,
}
matches = re.findall(PRIMARY_RE, value)
if len(matches) == 0:
self._error(field, 'Should match one of the following patterns: step(...) / line(...) / const(...)')
else:
for match in matches:
curve, params_str = match
params = [v.strip() for v in params_str.split(',')]
# check number of arguments
if not len(params) == N_OF_ARGS[curve]:
self._error(field, '{} load scheme: expected {} arguments, found {}'.format(curve,
N_OF_ARGS[curve],
len(params)))
# check arguments' types
for param in params[:-1]:
if not self.is_number(param):
self._error(field, 'Argument {} in load scheme should be a number'.format(param))
self.validate_duration(field, params[-1])
|
: returns: [ ( plugin_name plugin_package plugin_config )... ]: rtype: list of tuple
|
def __parse_enabled_plugins(self):
"""
:returns: [(plugin_name, plugin_package, plugin_config), ...]
:rtype: list of tuple
"""
return [
(
plugin_name,
plugin['package'],
plugin) for plugin_name,
plugin in self.raw_config_dict.items() if (
plugin_name not in self.BASE_SCHEMA.keys()) and isinstance(
plugin,
dict) and plugin.get('enabled')]
|
: returns: [ ( plugin_name plugin_package plugin_config )... ]: rtype: list of tuple
|
def plugins(self):
"""
:returns: [(plugin_name, plugin_package, plugin_config), ...]
:rtype: list of tuple
"""
if not self._plugins:
self._plugins = [
(plugin_name,
plugin_cfg['package'],
plugin_cfg) for plugin_name, plugin_cfg in self.validated.items() if (
plugin_name not in self.base_schema.keys()) and plugin_cfg['enabled']]
return self._plugins
|
This function polls stdout and stderr streams and writes their contents to log
|
def log_stdout_stderr(log, stdout, stderr, comment=""):
"""
This function polls stdout and stderr streams and writes their contents
to log
"""
readable = select.select([stdout], [], [], 0)[0]
if stderr:
exceptional = select.select([stderr], [], [], 0)[0]
else:
exceptional = []
log.debug("Selected: %s, %s", readable, exceptional)
for handle in readable:
line = handle.read()
readable.remove(handle)
if line:
log.debug("%s stdout: %s", comment, line.strip())
for handle in exceptional:
line = handle.read()
exceptional.remove(handle)
if line:
log.warn("%s stderr: %s", comment, line.strip())
|
helper for above functions
|
def expand_time(str_time, default_unit='s', multiplier=1):
"""
helper for above functions
"""
parser = re.compile(r'(\d+)([a-zA-Z]*)')
parts = parser.findall(str_time)
result = 0.0
for value, unit in parts:
value = int(value)
unit = unit.lower()
if unit == '':
unit = default_unit
if unit == 'ms':
result += value * 0.001
continue
elif unit == 's':
result += value
continue
elif unit == 'm':
result += value * 60
continue
elif unit == 'h':
result += value * 60 * 60
continue
elif unit == 'd':
result += value * 60 * 60 * 24
continue
elif unit == 'w':
result += value * 60 * 60 * 24 * 7
continue
else:
raise ValueError(
"String contains unsupported unit %s: %s" % (unit, str_time))
return int(result * multiplier)
|
Check whether pid exists in the current process table.
|
def pid_exists(pid):
"""Check whether pid exists in the current process table."""
if pid < 0:
return False
try:
os.kill(pid, 0)
except OSError as exc:
logging.debug("No process[%s]: %s", exc.errno, exc)
return exc.errno == errno.EPERM
else:
p = psutil.Process(pid)
return p.status != psutil.STATUS_ZOMBIE
|
>>> string = apple orange banana tree green >>> splitstring ( string ) [ apple orange green banana tree ]
|
def splitstring(string):
"""
>>> string = 'apple orange "banana tree" green'
>>> splitstring(string)
['apple', 'orange', 'green', '"banana tree"']
"""
patt = re.compile(r'"[\w ]+"')
if patt.search(string):
quoted_item = patt.search(string).group()
newstring = patt.sub('', string)
return newstring.split() + [quoted_item]
else:
return string.split()
|
Reads { _len } characters if _len is not None else reads line: param pos: start reading position: param _len: number of characters to read: rtype: ( string int )
|
def read_with_lock(self, pos, _len=None):
"""
Reads {_len} characters if _len is not None else reads line
:param pos: start reading position
:param _len: number of characters to read
:rtype: (string, int)
"""
self.wait_lock()
try:
self._opened_file.seek(pos)
result = self._opened_file.read(_len) if _len is not None else self._opened_file.readline()
stop_pos = self._opened_file.tell()
finally:
self.unlock()
if not result and self.stop.is_set():
result = None
return result, stop_pos
|
get_option wrapper
|
def get_option(self, option, param2=None):
''' get_option wrapper'''
result = self.cfg[option]
self.log.debug(
"Option %s = %s", option, result)
return result
|
stepper part of reading options
|
def read_config(self):
''' stepper part of reading options '''
self.log.info("Configuring StepperWrapper...")
self.ammo_file = self.get_option(self.OPTION_AMMOFILE)
self.ammo_type = self.get_option('ammo_type')
if self.ammo_file:
self.ammo_file = os.path.expanduser(self.ammo_file)
self.loop_limit = self.get_option(self.OPTION_LOOP)
self.ammo_limit = self.get_option("ammo_limit")
self.load_profile = LoadProfile(**self.get_option('load_profile'))
self.instances = int(
self.get_option(self.OPTION_INSTANCES_LIMIT, '1000'))
self.uris = self.get_option("uris", [])
while '' in self.uris:
self.uris.remove('')
self.headers = self.get_option("headers")
self.http_ver = self.get_option("header_http")
self.autocases = self.get_option("autocases")
self.enum_ammo = self.get_option("enum_ammo")
self.use_caching = self.get_option("use_caching")
self.file_cache = self.get_option('file_cache')
cache_dir = self.get_option("cache_dir") or self.core.artifacts_base_dir
self.cache_dir = os.path.expanduser(cache_dir)
self.force_stepping = self.get_option("force_stepping")
if self.get_option(self.OPTION_LOAD)[self.OPTION_LOAD_TYPE] == 'stpd_file':
self.stpd = self.get_option(self.OPTION_LOAD)[self.OPTION_SCHEDULE]
self.chosen_cases = self.get_option("chosen_cases").split()
if self.chosen_cases:
self.log.info("chosen_cases LIMITS: %s", self.chosen_cases)
|
Generate test data if necessary
|
def prepare_stepper(self):
''' Generate test data if necessary '''
def publish_info(stepper_info):
info.status.publish('loadscheme', stepper_info.loadscheme)
info.status.publish('loop_count', stepper_info.loop_count)
info.status.publish('steps', stepper_info.steps)
info.status.publish('duration', stepper_info.duration)
info.status.ammo_count = stepper_info.ammo_count
info.status.publish('instances', stepper_info.instances)
self.core.publish('stepper', 'loadscheme', stepper_info.loadscheme)
self.core.publish('stepper', 'loop_count', stepper_info.loop_count)
self.core.publish('stepper', 'steps', stepper_info.steps)
self.core.publish('stepper', 'duration', stepper_info.duration)
self.core.publish('stepper', 'ammo_count', stepper_info.ammo_count)
self.core.publish('stepper', 'instances', stepper_info.instances)
return stepper_info
if not self.stpd:
self.stpd = self.__get_stpd_filename()
if self.use_caching and not self.force_stepping and os.path.exists(
self.stpd) and os.path.exists(self.__si_filename()):
self.log.info("Using cached stpd-file: %s", self.stpd)
stepper_info = self.__read_cached_options()
if self.instances and self.load_profile.is_rps():
self.log.info(
"rps_schedule is set. Overriding cached instances param from config: %s",
self.instances)
stepper_info = stepper_info._replace(
instances=self.instances)
publish_info(stepper_info)
else:
if (
self.force_stepping and os.path.exists(self.__si_filename())):
os.remove(self.__si_filename())
self.__make_stpd_file()
stepper_info = info.status.get_info()
self.__write_cached_options(stepper_info)
else:
self.log.info("Using specified stpd-file: %s", self.stpd)
stepper_info = publish_info(self.__read_cached_options())
self.ammo_count = stepper_info.ammo_count
self.duration = stepper_info.duration
self.loop_count = stepper_info.loop_count
self.loadscheme = stepper_info.loadscheme
self.steps = stepper_info.steps
if stepper_info.instances:
self.instances = stepper_info.instances
|
Choose the name for stepped data file
|
def __get_stpd_filename(self):
''' Choose the name for stepped data file '''
if self.use_caching:
sep = "|"
hasher = hashlib.md5()
hashed_str = "cache version 6" + sep + \
';'.join(self.load_profile.schedule) + sep + str(self.loop_limit)
hashed_str += sep + str(self.ammo_limit) + sep + ';'.join(
self.load_profile.schedule) + sep + str(self.autocases)
hashed_str += sep + ";".join(self.uris) + sep + ";".join(
self.headers) + sep + self.http_ver + sep + ";".join(
self.chosen_cases)
hashed_str += sep + str(self.enum_ammo) + sep + str(self.ammo_type)
if self.load_profile.is_instances():
hashed_str += sep + str(self.instances)
if self.ammo_file:
opener = resource.get_opener(self.ammo_file)
hashed_str += sep + opener.hash
else:
if not self.uris:
raise RuntimeError("Neither ammofile nor uris specified")
hashed_str += sep + \
';'.join(self.uris) + sep + ';'.join(self.headers)
self.log.debug("stpd-hash source: %s", hashed_str)
hasher.update(hashed_str.encode('utf8'))
if not os.path.exists(self.cache_dir):
os.makedirs(self.cache_dir)
stpd = self.cache_dir + '/' + \
os.path.basename(self.ammo_file) + \
"_" + hasher.hexdigest() + ".stpd"
else:
stpd = os.path.realpath("ammo.stpd")
self.log.debug("Generated cache file name: %s", stpd)
return stpd
|
Read stepper info from json
|
def __read_cached_options(self):
'''
Read stepper info from json
'''
self.log.debug("Reading cached stepper info: %s", self.__si_filename())
with open(self.__si_filename(), 'r') as si_file:
si = info.StepperInfo(**json.load(si_file))
return si
|
Write stepper info to json
|
def __write_cached_options(self, si):
'''
Write stepper info to json
'''
self.log.debug("Saving stepper info: %s", self.__si_filename())
with open(self.__si_filename(), 'w') as si_file:
json.dump(si._asdict(), si_file, indent=4)
|
stpd generation using Stepper class
|
def __make_stpd_file(self):
''' stpd generation using Stepper class '''
self.log.info("Making stpd-file: %s", self.stpd)
stepper = Stepper(
self.core,
rps_schedule=self.load_profile.schedule if self.load_profile.is_rps() else None,
http_ver=self.http_ver,
ammo_file=self.ammo_file,
instances_schedule=self.load_profile.schedule if self.load_profile.is_instances() else None,
instances=self.instances,
loop_limit=self.loop_limit,
ammo_limit=self.ammo_limit,
uris=self.uris,
headers=[header.strip('[]') for header in self.headers],
autocases=self.autocases,
enum_ammo=self.enum_ammo,
ammo_type=self.ammo_type,
chosen_cases=self.chosen_cases,
use_cache=self.use_caching)
with open(self.stpd, 'w', self.file_cache) as os:
stepper.write(os)
|
Create Load Plan as defined in schedule. Publish info about its duration.
|
def create(rps_schedule):
"""
Create Load Plan as defined in schedule. Publish info about its duration.
"""
if len(rps_schedule) > 1:
lp = Composite(
[StepFactory.produce(step_config) for step_config in rps_schedule])
else:
lp = StepFactory.produce(rps_schedule[0])
info.status.publish('duration', lp.get_duration() / 1000)
info.status.publish('steps', lp.get_rps_list())
info.status.lp_len = len(lp)
return lp
|
: param n: number of charge: return: when to shoot nth charge milliseconds
|
def ts(self, n):
"""
:param n: number of charge
:return: when to shoot nth charge, milliseconds
"""
try:
root1, root2 = solve_quadratic(self.slope / 2.0, self.minrps, -n)
except ZeroDivisionError:
root2 = float(n) / self.minrps
return int(root2 * 1000)
|
Return rps for second t
|
def rps_at(self, t):
'''Return rps for second t'''
if 0 <= t <= self.duration:
return self.minrps + \
float(self.maxrps - self.minrps) * t / self.duration
else:
return 0
|
get list of constant load parts ( we have no constant load at all but tank will think so ) with parts durations ( float )
|
def get_float_rps_list(self):
'''
get list of constant load parts (we have no constant load at all, but tank will think so),
with parts durations (float)
'''
int_rps = range(int(self.minrps), int(self.maxrps) + 1)
step_duration = float(self.duration) / len(int_rps)
rps_list = [(rps, int(step_duration)) for rps in int_rps]
return rps_list
|
get list of each second s rps: returns: list of tuples ( rps duration of corresponding rps in seconds ): rtype: list
|
def get_rps_list(self):
"""
get list of each second's rps
:returns: list of tuples (rps, duration of corresponding rps in seconds)
:rtype: list
"""
seconds = range(0, int(self.duration) + 1)
rps_groups = groupby([proper_round(self.rps_at(t)) for t in seconds],
lambda x: x)
rps_list = [(rps, len(list(rpl))) for rps, rpl in rps_groups]
return rps_list
|
Execute and check exit code
|
def execute(self, cmd):
"""
Execute and check exit code
"""
self.log.info("Executing: %s", cmd)
retcode = execute(
cmd, shell=True, poll_period=0.1, catch_out=self.catch_out)[0]
if retcode:
raise RuntimeError("Subprocess returned %s" % retcode)
return retcode
|
The reason why we have two separate methods for monitoring and aggregates is a strong difference in incoming data.
|
def decode_monitoring(self, data):
"""
The reason why we have two separate methods for monitoring
and aggregates is a strong difference in incoming data.
"""
points = list()
for second_data in data:
for host, host_data in second_data["data"].iteritems():
points.append(
self.__make_points(
"monitoring",
{"host": host, "comment": host_data.get("comment")},
second_data["timestamp"],
{
metric: value
for metric, value in host_data["metrics"].iteritems()
}
)
)
return points
|
x Make a set of points for this label
|
def __make_points_for_label(self, ts, data, label, prefix, gun_stats):
"""x
Make a set of points for `this` label
overall_quantiles, overall_meta, net_codes, proto_codes, histograms
"""
label_points = list()
label_points.extend(
(
# overall quantiles for label
self.__make_points(
prefix + "overall_quantiles",
{"label": label},
ts,
self.__make_quantile_fields(data)
),
# overall meta (gun status) for label
self.__make_points(
prefix + "overall_meta",
{"label": label},
ts,
self.__make_overall_meta_fields(data, gun_stats)
),
# net codes for label
self.__make_points(
prefix + "net_codes",
{"label": label},
ts,
self.__make_netcodes_fields(data)
),
# proto codes for label
self.__make_points(
prefix + "proto_codes",
{"label": label},
ts,
self.__make_protocodes_fields(data)
)
)
)
# histograms, one row for each bin
if self.histograms:
for bin_, count in zip(data["interval_real"]["hist"]["bins"],
data["interval_real"]["hist"]["data"]):
label_points.append(
self.__make_points(
prefix + "histograms",
{"label": label},
ts,
{"bin": bin_, "count": count}
)
)
return label_points
|
Parameters ---------- measurement: string measurement type ( e. g. monitoring overall_meta net_codes proto_codes overall_quantiles ) additional_tags: dict custom additional tags for this points ts: integer timestamp fields: dict influxdb columns
|
def __make_points(self, measurement, additional_tags, ts, fields):
"""
Parameters
----------
measurement : string
measurement type (e.g. monitoring, overall_meta, net_codes, proto_codes, overall_quantiles)
additional_tags : dict
custom additional tags for this points
ts : integer
timestamp
fields : dict
influxdb columns
Returns
-------
dict
points for InfluxDB client
"""
tags = self.tags.copy()
tags.update(additional_tags)
return {
"measurement": measurement,
"tags": tags,
"time": int(ts),
"fields": fields,
}
|
publish value to status
|
def publish(self, key, value):
"""publish value to status"""
self.log.debug(
"Publishing status: %s/%s: %s", self.__class__.__name__, key, value)
self.core.publish(self.__class__.__name__, key, value)
|
helper to aggregate codes by mask
|
def count_matched_codes(codes_regex, codes_dict):
""" helper to aggregate codes by mask """
total = 0
for code, count in codes_dict.items():
if codes_regex.match(str(code)):
total += count
return total
|
Say the workers to finish their jobs and quit.
|
def stop(self):
"""
Say the workers to finish their jobs and quit.
"""
self.quit.set()
# yapf:disable
while sorted([
self.pool[i].is_alive()
for i in xrange(len(self.pool))])[-1]:
time.sleep(1)
# yapf:enable
try:
while not self.task_queue.empty():
self.task_queue.get(timeout=0.1)
self.task_queue.close()
self.feeder.join()
except Exception as ex:
logger.info(ex)
|
A feeder that runs in distinct thread in main process.
|
def _feed(self):
"""
A feeder that runs in distinct thread in main process.
"""
self.plan = StpdReader(self.stpd_filename)
if self.cached_stpd:
self.plan = list(self.plan)
for task in self.plan:
if self.quit.is_set():
logger.info("Stop feeding: gonna quit")
return
# try putting a task to a queue unless there is a quit flag
# or all workers have exited
while True:
try:
self.task_queue.put(task, timeout=1)
break
except Full:
if self.quit.is_set() or self.workers_finished:
return
else:
continue
workers_count = self.instances
logger.info(
"Feeded all data. Publishing %d killer tasks" % (workers_count))
retry_delay = 1
for _ in range(5):
try:
[
self.task_queue.put(None, timeout=1)
for _ in xrange(0, workers_count)
]
break
except Full:
logger.debug(
"Couldn't post killer tasks"
" because queue is full. Retrying in %ss", retry_delay)
time.sleep(retry_delay)
retry_delay *= 2
try:
logger.info("Waiting for workers")
map(lambda x: x.join(), self.pool)
logger.info("All workers exited.")
self.workers_finished = True
except (KeyboardInterrupt, SystemExit):
self.task_queue.close()
self.results.close()
self.quit.set()
logger.info("Going to quit. Waiting for workers")
map(lambda x: x.join(), self.pool)
self.workers_finished = True
|
A worker that does actual jobs
|
def _worker(self):
"""
A worker that does actual jobs
"""
logger.debug("Init shooter process")
try:
self.gun.setup()
except Exception:
logger.exception("Couldn't initialize gun. Exit shooter process")
return
while not self.quit.is_set():
try:
task = self.task_queue.get(timeout=1)
if not task:
logger.debug("Got killer task.")
break
timestamp, missile, marker = task
planned_time = self.start_time + (timestamp / 1000.0)
delay = planned_time - time.time()
if delay > 0:
time.sleep(delay)
try:
with self.instance_counter.get_lock():
self.instance_counter.value += 1
self.gun.shoot(missile, marker)
finally:
with self.instance_counter.get_lock():
self.instance_counter.value -= 1
except (KeyboardInterrupt, SystemExit):
break
except Empty:
if self.quit.is_set():
logger.debug("Empty queue. Exiting process")
return
except Full:
logger.warning("Couldn't put to result queue because it's full")
except Exception:
logger.exception("Bfg shoot exception")
try:
self.gun.teardown()
except Exception:
logger.exception("Couldn't finalize gun. Exit shooter process")
return
logger.debug("Exit shooter process")
|
A worker that does actual jobs
|
def _green_worker(self):
"""
A worker that does actual jobs
"""
while not self.quit.is_set():
try:
task = self.green_queue.get(timeout=1)
timestamp, missile, marker = task
planned_time = self.start_time + (timestamp / 1000.0)
delay = planned_time - time.time()
if delay > 0:
time.sleep(delay)
try:
with self.instance_counter.get_lock():
self.instance_counter.value += 1
self.gun.shoot(missile, marker)
finally:
with self.instance_counter.get_lock():
self.instance_counter.value -= 1
self._free_threads_count += 1
except (KeyboardInterrupt, SystemExit):
break
except Empty:
continue
except Full:
logger.warning("Couldn't put to result queue because it's full")
except Exception:
logger.exception("Bfg shoot exception")
|
Set up logging
|
def init_logging(self, log_filename="tank.log"):
""" Set up logging """
logger = logging.getLogger('')
self.log_filename = log_filename
self.core.add_artifact_file(self.log_filename)
file_handler = logging.FileHandler(self.log_filename)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(
logging.Formatter(
"%(asctime)s [%(levelname)s] %(name)s %(message)s"))
logger.addHandler(file_handler)
console_handler = logging.StreamHandler(sys.stdout)
stderr_hdl = logging.StreamHandler(sys.stderr)
# fmt_verbose = logging.Formatter(
# "%(asctime)s [%(levelname)s] %(name)s %(message)s")
fmt_regular = logging.Formatter(
"%(asctime)s %(levelname)s: %(message)s", "%H:%M:%S")
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(fmt_regular)
stderr_hdl.setFormatter(fmt_regular)
f_err = SingleLevelFilter(logging.ERROR, True)
f_warn = SingleLevelFilter(logging.WARNING, True)
f_crit = SingleLevelFilter(logging.CRITICAL, True)
console_handler.addFilter(f_err)
console_handler.addFilter(f_warn)
console_handler.addFilter(f_crit)
logger.addHandler(console_handler)
f_info = SingleLevelFilter(logging.INFO, True)
f_debug = SingleLevelFilter(logging.DEBUG, True)
stderr_hdl.addFilter(f_info)
stderr_hdl.addFilter(f_debug)
logger.addHandler(stderr_hdl)
|
override config options with user specified options
|
def __add_user_options(self):
""" override config options with user specified options"""
if self.options.get('user_options', None):
self.core.apply_shorthand_options(self.options['user_options'])
|
Make preparations before running Tank
|
def configure(self, options):
""" Make preparations before running Tank """
self.options = options
if self.options.get('lock_dir', None):
self.core.set_option(self.core.SECTION, "lock_dir", self.options['lock_dir'])
if self.options.get('ignore_lock', None):
self.core.set_option(self.core.SECTION, 'ignore_lock', self.options['ignore_lock'])
while True:
try:
self.core.get_lock()
break
except Exception as exc:
if self.options.get('lock_fail', None):
raise RuntimeError("Lock file present, cannot continue")
self.log.info(
"Couldn't get lock. Will retry in 5 seconds... (%s)",
str(exc))
time.sleep(5)
configs = self.get_default_configs()
if self.options.get('config', None):
configs.append(self.options['config'])
self.core.load_configs(configs)
self.__add_user_options()
self.core.load_plugins()
if self.options.get('ignore_lock', None):
self.core.set_option(self.core.SECTION, self.IGNORE_LOCKS, "1")
|
returns default configs list from/ etc home dir and package_data
|
def get_default_configs(self):
""" returns default configs list, from /etc, home dir and package_data"""
# initialize basic defaults
configs = [resource_filename(__name__, 'config/00-base.ini')]
try:
conf_files = sorted(os.listdir(self.baseconfigs_location))
for filename in conf_files:
if fnmatch.fnmatch(filename, '*.ini'):
configs += [
os.path.realpath(
self.baseconfigs_location + os.sep + filename)
]
except OSError:
self.log.warn(
self.baseconfigs_location + ' is not accessible to get configs list')
configs += [os.path.expanduser('~/.yandex-tank')]
return configs
|
call shutdown routines
|
def __graceful_shutdown(self):
""" call shutdown routines """
retcode = 1
self.log.info("Trying to shutdown gracefully...")
retcode = self.core.plugins_end_test(retcode)
retcode = self.core.plugins_post_process(retcode)
self.log.info("Done graceful shutdown")
return retcode
|
Collect data cache it and send to listeners
|
def _collect_data(self, end=False):
"""
Collect data, cache it and send to listeners
"""
data = get_nowait_from_queue(self.results)
stats = get_nowait_from_queue(self.stats_results)
logger.debug("Data timestamps: %s" % [d.get('ts') for d in data])
logger.debug("Stats timestamps: %s" % [d.get('ts') for d in stats])
for item in data:
ts = item['ts']
if ts in self.stat_cache:
# send items
data_item = item
stat_item = self.stat_cache.pop(ts)
self.__notify_listeners(data_item, stat_item)
else:
self.data_cache[ts] = item
for item in stats:
ts = item['ts']
if ts in self.data_cache:
# send items
data_item = self.data_cache.pop(ts)
stat_item = item
self.__notify_listeners(data_item, stat_item)
else:
self.stat_cache[ts] = item
if end and len(self.data_cache) > 0:
logger.info('Timestamps without stats:')
for ts, data_item in sorted(self.data_cache.items(), key=lambda i: i[0]):
logger.info(ts)
self.__notify_listeners(data_item, StatsReader.stats_item(ts, 0, 0))
|
notify all listeners about aggregate data and stats
|
def __notify_listeners(self, data, stats):
""" notify all listeners about aggregate data and stats """
for listener in self.listeners:
listener.on_aggregated_data(data, stats)
|
Returns a marker function of the requested marker_type
|
def get_marker(marker_type, enum_ammo=False):
'''
Returns a marker function of the requested marker_type
>>> marker = get_marker('uniq')(__test_missile)
>>> type(marker)
<type 'str'>
>>> len(marker)
32
>>> get_marker('uri')(__test_missile)
'_example_search_hello_help_us'
>>> marker = get_marker('non-existent')(__test_missile)
Traceback (most recent call last):
...
NotImplementedError: No such marker: "non-existent"
>>> get_marker('3')(__test_missile)
'_example_search_hello'
>>> marker = get_marker('3', True)
>>> marker(__test_missile)
'_example_search_hello#0'
>>> marker(__test_missile)
'_example_search_hello#1'
'''
try:
limit = int(marker_type)
if limit:
marker = __UriMarker(limit)
else:
def marker(m):
return ''
except ValueError:
if marker_type in __markers:
marker = __markers[marker_type]
else:
raise NotImplementedError('No such marker: "%s"' % marker_type)
# todo: fix u'False'
if enum_ammo:
marker = __Enumerator(marker)
return marker
|
: type column_mapping: dict: type data_session: DataSession
|
def get_uploader(data_session, column_mapping, overall_only=False):
"""
:type column_mapping: dict
:type data_session: DataSession
"""
overall = {col_name: data_session.new_aggregated_metric(name + ' overall')
for col_name, name in column_mapping.items()}
def upload_df(df):
for col_name, metric in overall.items():
df['value'] = df[col_name]
metric.put(df)
return upload_df
|
: type path: str
|
def cfg_folder_loader(path):
"""
:type path: str
"""
CFG_WILDCARD = '*.yaml'
return [load_cfg(filename) for filename in sorted(glob.glob(os.path.join(path, CFG_WILDCARD)))]
|
: type options: list of str: rtype: list of dict
|
def parse_options(options):
"""
:type options: list of str
:rtype: list of dict
"""
if options is None:
return []
else:
return [
convert_single_option(key.strip(), value.strip())
for key, value
in [option.split('=', 1) for option in options]
]
|
returns default configs list from/ etc and home dir
|
def get_default_configs():
""" returns default configs list, from /etc and home dir """
# initialize basic defaults
configs = [resource_filename(__name__, 'config/00-base.ini')]
baseconfigs_location = '/etc/yandex-tank'
try:
conf_files = sorted(os.listdir(baseconfigs_location))
for filename in conf_files:
if fnmatch.fnmatch(filename, '*.ini'):
configs += [
os.path.realpath(
baseconfigs_location + os.sep + filename)
]
except OSError:
logger.info(
baseconfigs_location + ' is not accessible to get configs list')
configs += [os.path.expanduser('~/.yandex-tank')]
return configs
|
clean markup from string
|
def clean_markup(self, orig_str):
''' clean markup from string '''
for val in [
self.YELLOW, self.RED, self.RESET, self.CYAN, self.BG_MAGENTA,
self.WHITE, self.BG_GREEN, self.GREEN, self.BG_BROWN,
self.RED_DARK, self.MAGENTA, self.BG_CYAN
]:
orig_str = orig_str.replace(val, '')
return orig_str
|
Parse duration string such as 3h2m3s into milliseconds
|
def parse_duration(duration):
'''
Parse duration string, such as '3h2m3s' into milliseconds
>>> parse_duration('3h2m3s')
10923000
>>> parse_duration('0.3s')
300
>>> parse_duration('5')
5000
'''
_re_token = re.compile("([0-9.]+)([dhms]?)")
def parse_token(time, multiplier):
multipliers = {
'd': 86400,
'h': 3600,
'm': 60,
's': 1,
}
if multiplier:
if multiplier in multipliers:
return int(float(time) * multipliers[multiplier] * 1000)
else:
raise StepperConfigurationError(
'Failed to parse duration: %s' % duration)
else:
return int(float(time) * 1000)
return sum(parse_token(*token) for token in _re_token.findall(duration))
|
>>> solve_quadratic ( 1. 0 2. 0 1. 0 ) ( - 1. 0 - 1. 0 )
|
def solve_quadratic(a, b, c):
'''
>>> solve_quadratic(1.0, 2.0, 1.0)
(-1.0, -1.0)
'''
discRoot = math.sqrt((b * b) - 4 * a * c)
root1 = (-b - discRoot) / (2 * a)
root2 = (-b + discRoot) / (2 * a)
return (root1, root2)
|
rounds float to closest int: rtype: int: param n: float
|
def proper_round(n):
"""
rounds float to closest int
:rtype: int
:param n: float
"""
return int(n) + (n / abs(n)) * int(abs(n - int(n)) >= 0.5) if n != 0 else 0
|
Start local agent
|
def start(self):
"""Start local agent"""
logger.info('Starting agent on localhost')
args = self.python.split() + [
os.path.join(
self.workdir,
self.AGENT_FILENAME),
'--telegraf',
self.path['TELEGRAF_LOCAL_PATH'],
'--host',
self.host]
if self.kill_old:
args.append(self.kill_old)
self.session = self.popen(args)
self.reader_thread = threading.Thread(target=self.read_buffer)
self.reader_thread.setDaemon(True)
return self.session
|
Remove agent s files from remote host
|
def uninstall(self):
"""
Remove agent's files from remote host
"""
if self.session:
logger.info('Waiting monitoring data...')
self.session.terminate()
self.session.wait()
self.session = None
log_filename = "agent_{host}.log".format(host="localhost")
data_filename = "agent_{host}.rawdata".format(host="localhost")
try:
logger.info('Saving monitoring artefacts from localhost')
copyfile(self.workdir + "/_agent.log", log_filename)
copyfile(self.workdir + "/monitoring.rawdata", data_filename)
logger.info('Deleting temp directory: %s', self.workdir)
rmtree(self.workdir)
except Exception:
logger.error("Exception while uninstalling agent", exc_info=True)
logger.info("Removing agent from: localhost")
return log_filename, data_filename
|
Create folder and copy agent and metrics scripts to remote host
|
def install(self):
"""Create folder and copy agent and metrics scripts to remote host"""
logger.info(
"Installing monitoring agent at %s@%s...",
self.username,
self.host)
# create remote temp dir
cmd = self.python + ' -c "import tempfile; print tempfile.mkdtemp();"'
logger.info("Creating temp dir on %s", self.host)
try:
out, errors, err_code = self.ssh.execute(cmd)
except Exception:
logger.error(
"Failed to install monitoring agent to %s",
self.host,
exc_info=True)
return None, None, None
if errors:
logger.error("[%s] error: '%s'", self.host, errors)
logger.error("Cancelling agent installation on %s", self.host)
return None, None, None
if err_code:
logger.error(
"Failed to create remote dir via SSH at %s@%s, code %s: %s" %
(self.username, self.host, err_code, out.strip()))
return None, None, None
remote_dir = out.strip()
if remote_dir:
self.path['AGENT_REMOTE_FOLDER'] = remote_dir
self.agent_remote_folder = remote_dir
logger.debug(
"Remote dir at %s:%s", self.host, self.path['AGENT_REMOTE_FOLDER'])
# create collector config
agent_config = self.config.create_collector_config(
self.path['AGENT_REMOTE_FOLDER'])
startup_config = self.config.create_startup_config()
customs_script = self.config.create_custom_exec_script()
# trying to detect os version/architecture and get information about telegraf client
# DO NOT DELETE indices in string format below. Python 2.6 does not
# support string formatting without indices
remote_cmd = 'import os; print os.path.isfile("' + self.path[
'TELEGRAF_REMOTE_PATH'] + '")'
cmd = self.python + ' -c \'{cmd}\''.format(cmd=remote_cmd)
remote_telegraf_exists = "False"
try:
out, err, err_code = self.ssh.execute(cmd)
except Exception:
logger.error(
"SSH execute error trying to check telegraf availability on host %s",
self.host,
exc_info=True)
else:
if err:
logger.error("[%s] error: '%s'", self.host, errors)
if out.strip():
remote_telegraf_exists = out.strip()
try:
if remote_telegraf_exists in "True":
logger.debug('Found telegraf client on %s..', self.host)
else:
logger.debug(
'Not found telegraf client on %s, trying to install from tank. Copying..',
self.host)
if os.path.isfile(self.path['TELEGRAF_LOCAL_PATH']):
self.ssh.send_file(
self.path['TELEGRAF_LOCAL_PATH'],
self.path['TELEGRAF_REMOTE_PATH'])
elif os.path.isfile("/usr/bin/telegraf"):
self.ssh.send_file(
'/usr/bin/telegraf', self.path['TELEGRAF_REMOTE_PATH'])
else:
logger.error(
'Telegraf binary not found neither on %s nor on localhost at specified path: %s\n'
'You can download telegraf binaries here: https://github.com/influxdata/telegraf\n'
'or install debian package: `telegraf`', self.host, self.path['TELEGRAF_LOCAL_PATH'])
return None, None, None
self.ssh.send_file(
os.path.join(
self.path['AGENT_LOCAL_FOLDER'],
self.AGENT_FILENAME),
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
self.AGENT_FILENAME))
self.ssh.send_file(
agent_config,
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
'agent.cfg'))
self.ssh.send_file(
startup_config,
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
'agent_startup.cfg'))
self.ssh.send_file(
customs_script,
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
'agent_customs.sh'))
except Exception:
logger.error(
"Failed to install agent on %s", self.host, exc_info=True)
return None, None, None
return agent_config, startup_config, customs_script
|
Start remote agent
|
def start(self):
"""Start remote agent"""
logger.info('Starting agent: %s', self.host)
command = "{python} {agent_path} --telegraf {telegraf_path} --host {host} {kill_old}".format(
python=self.python,
agent_path=os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
self.AGENT_FILENAME),
telegraf_path=self.path['TELEGRAF_REMOTE_PATH'],
host=self.host,
kill_old=self.kill_old)
logger.debug('Command to start agent: %s', command)
self.session = self.ssh.async_session(command)
self.reader_thread = threading.Thread(target=self.read_buffer)
self.reader_thread.setDaemon(True)
return self.session
|
Remove agent s files from remote host
|
def uninstall(self):
"""
Remove agent's files from remote host
"""
log_filename = "agent_{host}.log".format(host=self.host)
data_filename = "agent_{host}.rawdata".format(host=self.host)
try:
if self.session:
self.session.send("stop\n")
self.session.close()
self.session = None
except BaseException:
logger.warning(
'Unable to correctly stop monitoring agent - session is broken. Pay attention to agent log (%s).',
log_filename,
exc_info=True)
else:
try:
self.ssh.get_file(
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
"_agent.log"),
log_filename)
self.ssh.get_file(
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
"monitoring.rawdata"),
data_filename)
self.ssh.rm_r(self.path['AGENT_REMOTE_FOLDER'])
except Exception:
logger.error("Unable to get agent artefacts", exc_info=True)
self._kill_agent()
return log_filename, data_filename
|
: type cfg_ini: ConfigParser
|
def parse_sections(cfg_ini):
"""
:type cfg_ini: ConfigParser
"""
return [Section(section.lower(),
guess_plugin(section.lower()),
without_defaults(cfg_ini, section))
for section in cfg_ini.sections()
if not re.match(CORE_SECTION_PATTERN, section.lower()) and section.lower() not in DEPRECATED_SECTIONS]
|
: type sections: list of Section: rtype: list of Section
|
def combine_sections(sections):
"""
:type sections: list of Section
:rtype: list of Section
"""
PLUGINS_TO_COMBINE = {
'Phantom': ('phantom', 'multi', True),
'Bfg': ('bfg', 'gun_config', False)
}
plugins = {}
ready_sections = []
for section in sections:
if section.plugin in PLUGINS_TO_COMBINE.keys():
try:
plugins[section.plugin].append(section)
except KeyError:
plugins[section.plugin] = [section]
else:
ready_sections.append(section)
for plugin_name, _sections in plugins.items():
if isinstance(_sections, list):
parent_name, child_name, is_list = PLUGINS_TO_COMBINE[plugin_name]
ready_sections.append(Section.from_multiple(_sections, parent_name, child_name, is_list))
return ready_sections
|
: rtype: { str: object }
|
def converted(self):
"""
:rtype: {str: object}
"""
if self._converted is None:
self._converted = self.converter(self.name, self.value)
return self._converted
|
: rtype: ( str object )
|
def as_tuple(self):
"""
:rtype: (str, object)
"""
if self._as_tuple is None:
self._as_tuple = self.converted.items()[0]
return self._as_tuple
|
: rtype: callable
|
def converter(self):
"""
:rtype: callable
"""
if self._converter is None:
try:
self._converter = self.SPECIAL_CONVERTERS[self.plugin][self.name]
except KeyError:
try:
self._converter = self._get_scheme_converter()
except UnknownOption:
self._converter = self.CONVERTERS_FOR_UNKNOWN.get(self.plugin, self.dummy_converter)
return self._converter
|
: type parent_name: str: type sections: list of Section
|
def from_multiple(cls, sections, parent_name=None, child_name=None, is_list=True):
"""
:type parent_name: str
:type sections: list of Section
"""
if len(sections) == 1:
return sections[0]
if parent_name:
master_section = filter(lambda section: section.name == parent_name, sections)[0]
rest = filter(lambda section: section.name != parent_name, sections)
else:
master_section = sections[0]
parent_name = master_section.name
rest = sections[1:]
child = {'multi': [section.get_cfg_dict(with_meta=False) for section in rest]} if is_list \
else {child_name: cls._select_one(master_section, rest).get_cfg_dict(with_meta=False)}
master_section.merged_options.update(child)
return master_section
|
Underlines content with =. New lines and tabs will be replaced: param str content:: param str new_line_replacement:: param str tab_replacement:: return: unicode
|
def title(content, new_line_replacement=' ', tab_replacement=' '):
"""
Underlines content with '='. New lines and tabs will be replaced
:param str content:
:param str new_line_replacement:
:param str tab_replacement:
:return: unicode
"""
prepared_content = content.strip().replace('\n', new_line_replacement).replace('\t', tab_replacement)
return u'{}\n{}'.format(prepared_content, '=' * len(prepared_content))
|
Searching for line in jmeter. log such as Waiting for possible shutdown message on port 4445
|
def __discover_jmeter_udp_port(self):
"""Searching for line in jmeter.log such as
Waiting for possible shutdown message on port 4445
"""
r = re.compile(self.DISCOVER_PORT_PATTERN)
with open(self.process_stderr.name, 'r') as f:
cnt = 0
while self.process.pid and cnt < 10:
line = f.readline()
m = r.match(line)
if m is None:
cnt += 1
time.sleep(1)
else:
port = int(m.group('port'))
return port
else:
logger.warning('JMeter UDP port wasn\'t discovered')
return None
|
Genius idea by Alexey Lavrenyuk
|
def __add_jmeter_components(self, jmx, jtl, variables):
""" Genius idea by Alexey Lavrenyuk """
logger.debug("Original JMX: %s", os.path.realpath(jmx))
with open(jmx, 'r') as src_jmx:
source_lines = src_jmx.readlines()
try:
# In new Jmeter version (3.2 as example) WorkBench's plugin checkbox enabled by default
# It totally crashes Yandex tank injection and raises XML Parse Exception
closing = source_lines.pop(-1)
if "WorkBenchGui" in source_lines[-5]:
logger.info("WorkBench checkbox enabled...bypassing")
last_string_count = 6
else:
last_string_count = 2
while last_string_count > 0:
closing = source_lines.pop(-1) + closing
last_string_count -= 1
logger.debug("Closing statement: %s", closing)
except Exception as exc:
raise RuntimeError("Failed to find the end of JMX XML: %s" % exc)
udv_tpl = resource_string(__name__, 'config/jmeter_var_template.xml')
udv_set = []
for var_name, var_value in variables.iteritems():
udv_set.append(udv_tpl % (var_name, var_name, var_value))
udv = "\n".join(udv_set)
if self.jmeter_ver >= 2.13:
save_connect = '<connectTime>true</connectTime>'
else:
save_connect = ''
if self.ext_log in ['errors', 'all']:
level_map = {'errors': 'true', 'all': 'false'}
tpl_resource = 'jmeter_writer_ext.xml'
tpl_args = {
'jtl': self.jtl_file,
'udv': udv,
'ext_log': self.ext_log_file,
'ext_level': level_map[self.ext_log],
'save_connect': save_connect
}
else:
tpl_resource = 'jmeter_writer.xml'
tpl_args = {
'jtl': self.jtl_file,
'udv': udv,
'save_connect': save_connect
}
tpl = resource_string(__name__, 'config/' + tpl_resource)
try:
new_jmx = self.core.mkstemp(
'.jmx', 'modified_', os.path.dirname(os.path.realpath(jmx)))
except OSError as exc:
logger.debug("Can't create modified jmx near original: %s", exc)
new_jmx = self.core.mkstemp('.jmx', 'modified_')
logger.debug("Modified JMX: %s", new_jmx)
with open(new_jmx, "wb") as fh:
fh.write(''.join(source_lines))
fh.write(tpl % tpl_args)
fh.write(closing)
return new_jmx
|
Gracefull termination of running process
|
def __terminate(self):
"""Gracefull termination of running process"""
if self.__stderr_file:
self.__stderr_file.close()
if not self.__process:
return
waitfor = time.time() + _PROCESS_KILL_TIMEOUT
while time.time() < waitfor:
try:
self.__process.terminate()
except EnvironmentError as e:
if e.errno != errno.ESRCH:
_LOGGER.warning("Failed to terminate process '{}': {}".format(self.__cmd, e))
return
time.sleep(0.1)
try:
self.__process.kill()
except EnvironmentError as e:
if e.errno != errno.ESRCH:
_LOGGER.warning("Failed to kill process '{}': {}".format(self.__cmd, e))
return
|
Parse lines and return stats
|
def _read_data(self, lines):
"""
Parse lines and return stats
"""
results = []
for line in lines:
timestamp, rps, instances = line.split("\t")
curr_ts = int(float(timestamp)) # We allow floats here, but tank expects only seconds
if self.__last_ts < curr_ts:
self.__last_ts = curr_ts
results.append(self.stats_item(self.__last_ts, float(rps), float(instances)))
return results
|
instantiate criterion from config string
|
def __create_criterion(self, criterion_str):
""" instantiate criterion from config string """
parsed = criterion_str.split("(")
type_str = parsed[0].strip().lower()
parsed[1] = parsed[1].split(")")[0].strip()
for criterion_class in self.custom_criterions:
if criterion_class.get_type_string() == type_str:
return criterion_class(self, parsed[1])
raise ValueError(
"Unsupported autostop criterion type: %s" % criterion_str)
|
Prepare config data.
|
def getconfig(self, filename, target_hint):
"""Prepare config data."""
try:
tree = self.parse_xml(filename)
except IOError as exc:
logger.error("Error loading config: %s", exc)
raise RuntimeError("Can't read monitoring config %s" % filename)
hosts = tree.findall('Host')
config = []
for host in hosts:
host_config = self.get_host_config(host, target_hint)
config.append(host_config)
return config
|
Startup and shutdown commands config Used by agent. py on the target
|
def create_startup_config(self):
""" Startup and shutdown commands config
Used by agent.py on the target
"""
cfg_path = "agent_startup_{}.cfg".format(self.host)
if os.path.isfile(cfg_path):
logger.info(
'Found agent startup config file in working directory with the same name as created for host %s.\n'
'Creating new one via tempfile. This will affect predictable filenames for agent artefacts',
self.host)
handle, cfg_path = tempfile.mkstemp('.cfg', 'agent_')
os.close(handle)
try:
config = ConfigParser.RawConfigParser()
# FIXME incinerate such a string formatting inside a method call
# T_T
config.add_section('startup')
[
config.set('startup', "cmd%s" % idx, cmd)
for idx, cmd in enumerate(self.startups)
]
config.add_section('shutdown')
[
config.set('shutdown', "cmd%s" % idx, cmd)
for idx, cmd in enumerate(self.shutdowns)
]
config.add_section('source')
[
config.set('source', "file%s" % idx, path)
for idx, path in enumerate(self.sources)
]
with open(cfg_path, 'w') as fds:
config.write(fds)
except Exception as exc:
logger.error(
'Error trying to create monitoring startups config. Malformed? %s',
exc,
exc_info=True)
return cfg_path
|
bash script w/ custom commands inside inspired by half a night trying to avoid escaping bash special characters
|
def create_custom_exec_script(self):
""" bash script w/ custom commands inside
inspired by half a night trying to avoid escaping bash special characters
"""
cfg_path = "agent_customs_{}.cfg".format(self.host)
if os.path.isfile(cfg_path):
logger.info(
'Found agent custom execs config file in working directory with the same name as created for host %s.\n'
'Creating new one via tempfile. This will affect predictable filenames for agent artefacts',
self.host)
handle, cfg_path = tempfile.mkstemp('.sh', 'agent_customs_')
os.close(handle)
cmds = ""
for idx, cmd in enumerate(self.custom):
cmds += "-{idx}) {cmd};;\n".format(idx=idx, cmd=cmd['cmd'])
customs_script = """
#!/bin/sh
while :
do
case "$1" in
{cmds}
*) break;;
esac
shift
done
""".format(cmds=cmds)
with open(cfg_path, 'w') as fds:
fds.write(customs_script)
return cfg_path
|
Telegraf collector config toml format
|
def create_collector_config(self, workdir):
""" Telegraf collector config,
toml format
"""
cfg_path = "agent_collector_{}.cfg".format(self.host)
if os.path.isfile(cfg_path):
logger.info(
'Found agent config file in working directory with the same name as created for host %s.\n'
'Creating new one via tempfile. This will affect predictable filenames for agent artefacts',
self.host)
handle, cfg_path = tempfile.mkstemp('.cfg', 'agent_collector_')
os.close(handle)
self.monitoring_data_output = "{remote_folder}/monitoring.rawdata".format(
remote_folder=workdir)
defaults_old_enabled = ['CPU', 'Memory', 'Disk', 'Net', 'System']
try:
config = ConfigParser.RawConfigParser()
config.add_section("global_tags")
config.add_section("agent")
config.set(
"agent",
"interval",
"'{interval}s'".format(interval=self.interval))
config.set("agent", "round_interval", "true")
config.set("agent", "flush_interval", "'1s'")
config.set("agent", "collection_jitter", "'0s'")
config.set("agent", "flush_jitter", "'1s'")
for section in self.host_config.keys():
# telegraf-style config
if not self.old_style_configs:
config.add_section(
"{section_name}".format(
section_name=self.host_config[section]['name']))
for key, value in iteritems(self.host_config[section]):
if key != 'name':
config.set(
"{section_name}".format(
section_name=self.host_config[section][
'name']),
"{key}".format(key=key),
"{value}".format(value=value))
# monitoring-style config
else:
if section in defaults_old_enabled:
config.add_section(
"{section_name}".format(
section_name=self.host_config[section]['name']))
for key, value in iteritems(self.host_config[section]):
if key in [
'fielddrop', 'fieldpass', 'percpu',
'devices', 'interfaces'
]:
config.set(
"{section_name}".format(
section_name=self.host_config[section][
'name']),
"{key}".format(key=key),
"{value}".format(value=value))
# outputs
config.add_section("[outputs.file]")
config.set(
"[outputs.file]",
"files",
"['{config}']".format(config=self.monitoring_data_output))
config.set("[outputs.file]", "data_format", "'json'")
with open(cfg_path, 'w') as fds:
config.write(fds)
# dirty hack, this allow to avoid bash escape quoting, we're pushing shell script w/ arguments
# index of argument is index of custom metric in our config
inputs = ""
for idx, cmd in enumerate(self.custom):
inputs += "[[inputs.exec]]\n"
inputs += "commands = ['/bin/sh {workdir}/agent_customs.sh -{idx}']\n".format(
workdir=workdir, idx=idx)
inputs += "data_format = 'value'\n"
inputs += "data_type = 'float'\n"
inputs += "name_prefix = '{}_'\n\n".format(cmd.get('label'))
if cmd['diff']:
decoder.diff_metrics['custom'].append(
decoder.find_common_names(cmd.get('label')))
with open(cfg_path, 'a') as fds:
fds.write(inputs)
# telegraf raw configuration into xml
telegraf_raw = ""
for element in self.telegrafraw:
telegraf_raw += element
with open(cfg_path, 'a') as fds:
fds.write(telegraf_raw)
except Exception as exc:
logger.error(
'Error trying to create monitoring config. Malformed? %s',
exc,
exc_info=True)
return cfg_path
|
raise exception on disk space exceeded
|
def __check_disk(self):
''' raise exception on disk space exceeded '''
cmd = "sh -c \"df --no-sync -m -P -l -x fuse -x tmpfs -x devtmpfs -x davfs -x nfs "
cmd += self.core.artifacts_base_dir
cmd += " | tail -n 1 | awk '{print \$4}' \""
res = execute(cmd, True, 0.1, True)
logging.debug("Result: %s", res)
if not len(res[1]):
self.log.debug("No disk usage info: %s", res[2])
return
disk_free = res[1]
self.log.debug(
"Disk free space: %s/%s", disk_free.strip(), self.disk_limit)
if int(disk_free.strip()) < self.disk_limit:
raise RuntimeError(
"Not enough local resources: disk space less than %sMB in %s: %sMB"
% (
self.disk_limit, self.core.artifacts_base_dir,
int(disk_free.strip())))
|
raise exception on RAM exceeded
|
def __check_mem(self):
''' raise exception on RAM exceeded '''
mem_free = psutil.virtual_memory().available / 2**20
self.log.debug("Memory free: %s/%s", mem_free, self.mem_limit)
if mem_free < self.mem_limit:
raise RuntimeError(
"Not enough resources: free memory less "
"than %sMB: %sMB" % (self.mem_limit, mem_free))
|
Gets width and height of terminal viewport
|
def get_terminal_size():
'''
Gets width and height of terminal viewport
'''
default_size = (30, 120)
env = os.environ
def ioctl_gwinsz(file_d):
'''
Helper to get console size
'''
try:
sizes = struct.unpack(
'hh', fcntl.ioctl(file_d, termios.TIOCGWINSZ, '1234'))
except Exception:
sizes = default_size
return sizes
sizes = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
if not sizes:
try:
file_d = os.open(os.ctermid(), os.O_RDONLY)
sizes = ioctl_gwinsz(file_d)
os.close(file_d.fileno())
except Exception:
pass
if not sizes:
try:
sizes = (env['LINES'], env['COLUMNS'])
except Exception:
sizes = default_size
return int(sizes[1]), int(sizes[0])
|
Gets next line for right panel
|
def __get_right_line(self, widget_output):
''' Gets next line for right panel '''
right_line = ''
if widget_output:
right_line = widget_output.pop(0)
if len(right_line) > self.right_panel_width:
right_line_plain = self.markup.clean_markup(right_line)
if len(right_line_plain) > self.right_panel_width:
right_line = right_line[:self.right_panel_width] + self.markup.RESET
return right_line
|
Cut tuple of line chunks according to it s wisible lenght
|
def __truncate(self, line_arr, max_width):
''' Cut tuple of line chunks according to it's wisible lenght '''
def is_space(chunk):
return all([True if i == ' ' else False for i in chunk])
def is_empty(chunks, markups):
result = []
for chunk in chunks:
if chunk in markups:
result.append(True)
elif is_space(chunk):
result.append(True)
else:
result.append(False)
return all(result)
left = max_width
result = ''
markups = self.markup.get_markup_vars()
for num, chunk in enumerate(line_arr):
if chunk in markups:
result += chunk
else:
if left > 0:
if len(chunk) <= left:
result += chunk
left -= len(chunk)
else:
leftover = (chunk[left:],) + line_arr[num + 1:]
was_cut = not is_empty(leftover, markups)
if was_cut:
result += chunk[:left - 1] + self.markup.RESET + u'\u2026'
else:
result += chunk[:left]
left = 0
return result
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.