partition
stringclasses 3
values | func_name
stringlengths 1
134
| docstring
stringlengths 1
46.9k
| path
stringlengths 4
223
| original_string
stringlengths 75
104k
| code
stringlengths 75
104k
| docstring_tokens
listlengths 1
1.97k
| repo
stringlengths 7
55
| language
stringclasses 1
value | url
stringlengths 87
315
| code_tokens
listlengths 19
28.4k
| sha
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|---|---|---|
test
|
QuantumChannel._init_transformer
|
Convert input into a QuantumChannel subclass object or Operator object
|
qiskit/quantum_info/operators/channel/quantum_channel.py
|
def _init_transformer(cls, data):
"""Convert input into a QuantumChannel subclass object or Operator object"""
# This handles common conversion for all QuantumChannel subclasses.
# If the input is already a QuantumChannel subclass it will return
# the original object
if isinstance(data, QuantumChannel):
return data
if hasattr(data, 'to_quantumchannel'):
# If the data object is not a QuantumChannel it will give
# preference to a 'to_quantumchannel' attribute that allows
# an arbitrary object to define its own conversion to any
# quantum channel subclass.
return data.to_channel()
if hasattr(data, 'to_channel'):
# TODO: this 'to_channel' method is the same case as the above
# but is used by current version of Aer. It should be removed
# once Aer is nupdated to use `to_quantumchannel`
# instead of `to_channel`,
return data.to_channel()
# Finally if the input is not a QuantumChannel and doesn't have a
# 'to_quantumchannel' conversion method we try and initialize it as a
# regular matrix Operator which can be converted into a QuantumChannel.
return Operator(data)
|
def _init_transformer(cls, data):
"""Convert input into a QuantumChannel subclass object or Operator object"""
# This handles common conversion for all QuantumChannel subclasses.
# If the input is already a QuantumChannel subclass it will return
# the original object
if isinstance(data, QuantumChannel):
return data
if hasattr(data, 'to_quantumchannel'):
# If the data object is not a QuantumChannel it will give
# preference to a 'to_quantumchannel' attribute that allows
# an arbitrary object to define its own conversion to any
# quantum channel subclass.
return data.to_channel()
if hasattr(data, 'to_channel'):
# TODO: this 'to_channel' method is the same case as the above
# but is used by current version of Aer. It should be removed
# once Aer is nupdated to use `to_quantumchannel`
# instead of `to_channel`,
return data.to_channel()
# Finally if the input is not a QuantumChannel and doesn't have a
# 'to_quantumchannel' conversion method we try and initialize it as a
# regular matrix Operator which can be converted into a QuantumChannel.
return Operator(data)
|
[
"Convert",
"input",
"into",
"a",
"QuantumChannel",
"subclass",
"object",
"or",
"Operator",
"object"
] |
Qiskit/qiskit-terra
|
python
|
https://github.com/Qiskit/qiskit-terra/blob/d4f58d903bc96341b816f7c35df936d6421267d1/qiskit/quantum_info/operators/channel/quantum_channel.py#L130-L152
|
[
"def",
"_init_transformer",
"(",
"cls",
",",
"data",
")",
":",
"# This handles common conversion for all QuantumChannel subclasses.",
"# If the input is already a QuantumChannel subclass it will return",
"# the original object",
"if",
"isinstance",
"(",
"data",
",",
"QuantumChannel",
")",
":",
"return",
"data",
"if",
"hasattr",
"(",
"data",
",",
"'to_quantumchannel'",
")",
":",
"# If the data object is not a QuantumChannel it will give",
"# preference to a 'to_quantumchannel' attribute that allows",
"# an arbitrary object to define its own conversion to any",
"# quantum channel subclass.",
"return",
"data",
".",
"to_channel",
"(",
")",
"if",
"hasattr",
"(",
"data",
",",
"'to_channel'",
")",
":",
"# TODO: this 'to_channel' method is the same case as the above",
"# but is used by current version of Aer. It should be removed",
"# once Aer is nupdated to use `to_quantumchannel`",
"# instead of `to_channel`,",
"return",
"data",
".",
"to_channel",
"(",
")",
"# Finally if the input is not a QuantumChannel and doesn't have a",
"# 'to_quantumchannel' conversion method we try and initialize it as a",
"# regular matrix Operator which can be converted into a QuantumChannel.",
"return",
"Operator",
"(",
"data",
")"
] |
d4f58d903bc96341b816f7c35df936d6421267d1
|
test
|
CheckCnotDirection.run
|
If `dag` is mapped and the direction is correct the property
`is_direction_mapped` is set to True (or to False otherwise).
Args:
dag (DAGCircuit): DAG to check.
|
qiskit/transpiler/passes/mapping/check_cnot_direction.py
|
def run(self, dag):
"""
If `dag` is mapped and the direction is correct the property
`is_direction_mapped` is set to True (or to False otherwise).
Args:
dag (DAGCircuit): DAG to check.
"""
if self.layout is None:
if self.property_set["layout"]:
self.layout = self.property_set["layout"]
else:
self.layout = Layout.generate_trivial_layout(*dag.qregs.values())
self.property_set['is_direction_mapped'] = True
edges = self.coupling_map.get_edges()
for gate in dag.twoQ_gates():
physical_q0 = self.layout[gate.qargs[0]]
physical_q1 = self.layout[gate.qargs[1]]
if isinstance(gate.op, (CXBase, CnotGate)) and (
physical_q0, physical_q1) not in edges:
self.property_set['is_direction_mapped'] = False
return
|
def run(self, dag):
"""
If `dag` is mapped and the direction is correct the property
`is_direction_mapped` is set to True (or to False otherwise).
Args:
dag (DAGCircuit): DAG to check.
"""
if self.layout is None:
if self.property_set["layout"]:
self.layout = self.property_set["layout"]
else:
self.layout = Layout.generate_trivial_layout(*dag.qregs.values())
self.property_set['is_direction_mapped'] = True
edges = self.coupling_map.get_edges()
for gate in dag.twoQ_gates():
physical_q0 = self.layout[gate.qargs[0]]
physical_q1 = self.layout[gate.qargs[1]]
if isinstance(gate.op, (CXBase, CnotGate)) and (
physical_q0, physical_q1) not in edges:
self.property_set['is_direction_mapped'] = False
return
|
[
"If",
"dag",
"is",
"mapped",
"and",
"the",
"direction",
"is",
"correct",
"the",
"property",
"is_direction_mapped",
"is",
"set",
"to",
"True",
"(",
"or",
"to",
"False",
"otherwise",
")",
"."
] |
Qiskit/qiskit-terra
|
python
|
https://github.com/Qiskit/qiskit-terra/blob/d4f58d903bc96341b816f7c35df936d6421267d1/qiskit/transpiler/passes/mapping/check_cnot_direction.py#L37-L61
|
[
"def",
"run",
"(",
"self",
",",
"dag",
")",
":",
"if",
"self",
".",
"layout",
"is",
"None",
":",
"if",
"self",
".",
"property_set",
"[",
"\"layout\"",
"]",
":",
"self",
".",
"layout",
"=",
"self",
".",
"property_set",
"[",
"\"layout\"",
"]",
"else",
":",
"self",
".",
"layout",
"=",
"Layout",
".",
"generate_trivial_layout",
"(",
"*",
"dag",
".",
"qregs",
".",
"values",
"(",
")",
")",
"self",
".",
"property_set",
"[",
"'is_direction_mapped'",
"]",
"=",
"True",
"edges",
"=",
"self",
".",
"coupling_map",
".",
"get_edges",
"(",
")",
"for",
"gate",
"in",
"dag",
".",
"twoQ_gates",
"(",
")",
":",
"physical_q0",
"=",
"self",
".",
"layout",
"[",
"gate",
".",
"qargs",
"[",
"0",
"]",
"]",
"physical_q1",
"=",
"self",
".",
"layout",
"[",
"gate",
".",
"qargs",
"[",
"1",
"]",
"]",
"if",
"isinstance",
"(",
"gate",
".",
"op",
",",
"(",
"CXBase",
",",
"CnotGate",
")",
")",
"and",
"(",
"physical_q0",
",",
"physical_q1",
")",
"not",
"in",
"edges",
":",
"self",
".",
"property_set",
"[",
"'is_direction_mapped'",
"]",
"=",
"False",
"return"
] |
d4f58d903bc96341b816f7c35df936d6421267d1
|
test
|
sort_enum_for_model
|
Create Graphene Enum for sorting a SQLAlchemy class query
Parameters
- cls : Sqlalchemy model class
Model used to create the sort enumerator
- name : str, optional, default None
Name to use for the enumerator. If not provided it will be set to `cls.__name__ + 'SortEnum'`
- symbol_name : function, optional, default `_symbol_name`
Function which takes the column name and a boolean indicating if the sort direction is ascending,
and returns the symbol name for the current column and sort direction.
The default function will create, for a column named 'foo', the symbols 'foo_asc' and 'foo_desc'
Returns
- Enum
The Graphene enumerator
|
graphene_sqlalchemy/utils.py
|
def sort_enum_for_model(cls, name=None, symbol_name=_symbol_name):
"""Create Graphene Enum for sorting a SQLAlchemy class query
Parameters
- cls : Sqlalchemy model class
Model used to create the sort enumerator
- name : str, optional, default None
Name to use for the enumerator. If not provided it will be set to `cls.__name__ + 'SortEnum'`
- symbol_name : function, optional, default `_symbol_name`
Function which takes the column name and a boolean indicating if the sort direction is ascending,
and returns the symbol name for the current column and sort direction.
The default function will create, for a column named 'foo', the symbols 'foo_asc' and 'foo_desc'
Returns
- Enum
The Graphene enumerator
"""
enum, _ = _sort_enum_for_model(cls, name, symbol_name)
return enum
|
def sort_enum_for_model(cls, name=None, symbol_name=_symbol_name):
"""Create Graphene Enum for sorting a SQLAlchemy class query
Parameters
- cls : Sqlalchemy model class
Model used to create the sort enumerator
- name : str, optional, default None
Name to use for the enumerator. If not provided it will be set to `cls.__name__ + 'SortEnum'`
- symbol_name : function, optional, default `_symbol_name`
Function which takes the column name and a boolean indicating if the sort direction is ascending,
and returns the symbol name for the current column and sort direction.
The default function will create, for a column named 'foo', the symbols 'foo_asc' and 'foo_desc'
Returns
- Enum
The Graphene enumerator
"""
enum, _ = _sort_enum_for_model(cls, name, symbol_name)
return enum
|
[
"Create",
"Graphene",
"Enum",
"for",
"sorting",
"a",
"SQLAlchemy",
"class",
"query"
] |
graphql-python/graphene-sqlalchemy
|
python
|
https://github.com/graphql-python/graphene-sqlalchemy/blob/e362e3fc4993d7e95873044bb4d00185b1d3dd8b/graphene_sqlalchemy/utils.py#L82-L100
|
[
"def",
"sort_enum_for_model",
"(",
"cls",
",",
"name",
"=",
"None",
",",
"symbol_name",
"=",
"_symbol_name",
")",
":",
"enum",
",",
"_",
"=",
"_sort_enum_for_model",
"(",
"cls",
",",
"name",
",",
"symbol_name",
")",
"return",
"enum"
] |
e362e3fc4993d7e95873044bb4d00185b1d3dd8b
|
test
|
sort_argument_for_model
|
Returns a Graphene argument for the sort field that accepts a list of sorting directions for a model.
If `has_default` is True (the default) it will sort the result by the primary key(s)
|
graphene_sqlalchemy/utils.py
|
def sort_argument_for_model(cls, has_default=True):
"""Returns a Graphene argument for the sort field that accepts a list of sorting directions for a model.
If `has_default` is True (the default) it will sort the result by the primary key(s)
"""
enum, default = _sort_enum_for_model(cls)
if not has_default:
default = None
return Argument(List(enum), default_value=default)
|
def sort_argument_for_model(cls, has_default=True):
"""Returns a Graphene argument for the sort field that accepts a list of sorting directions for a model.
If `has_default` is True (the default) it will sort the result by the primary key(s)
"""
enum, default = _sort_enum_for_model(cls)
if not has_default:
default = None
return Argument(List(enum), default_value=default)
|
[
"Returns",
"a",
"Graphene",
"argument",
"for",
"the",
"sort",
"field",
"that",
"accepts",
"a",
"list",
"of",
"sorting",
"directions",
"for",
"a",
"model",
".",
"If",
"has_default",
"is",
"True",
"(",
"the",
"default",
")",
"it",
"will",
"sort",
"the",
"result",
"by",
"the",
"primary",
"key",
"(",
"s",
")"
] |
graphql-python/graphene-sqlalchemy
|
python
|
https://github.com/graphql-python/graphene-sqlalchemy/blob/e362e3fc4993d7e95873044bb4d00185b1d3dd8b/graphene_sqlalchemy/utils.py#L103-L110
|
[
"def",
"sort_argument_for_model",
"(",
"cls",
",",
"has_default",
"=",
"True",
")",
":",
"enum",
",",
"default",
"=",
"_sort_enum_for_model",
"(",
"cls",
")",
"if",
"not",
"has_default",
":",
"default",
"=",
"None",
"return",
"Argument",
"(",
"List",
"(",
"enum",
")",
",",
"default_value",
"=",
"default",
")"
] |
e362e3fc4993d7e95873044bb4d00185b1d3dd8b
|
test
|
search_dates
|
Find all substrings of the given string which represent date and/or time and parse them.
:param text:
A string in a natural language which may contain date and/or time expressions.
:type text: str|unicode
:param languages:
A list of two letters language codes.e.g. ['en', 'es']. If languages are given, it will
not attempt to detect the language.
:type languages: list
:param settings:
Configure customized behavior using settings defined in :mod:`dateparser.conf.Settings`.
:type settings: dict
:param add_detected_language:
Indicates if we want the detected language returned in the tuple.
:type add_detected_language: bool
:return: Returns list of tuples containing:
substrings representing date and/or time, corresponding :mod:`datetime.datetime`
object and detected language if *add_detected_language* is True.
Returns None if no dates that can be parsed are found.
:rtype: list
:raises: ValueError - Unknown Language
>>> from dateparser.search import search_dates
>>> search_dates('The first artificial Earth satellite was launched on 4 October 1957.')
[('on 4 October 1957', datetime.datetime(1957, 10, 4, 0, 0))]
>>> search_dates('The first artificial Earth satellite was launched on 4 October 1957.', add_detected_language=True)
[('on 4 October 1957', datetime.datetime(1957, 10, 4, 0, 0), 'en')]
>>> search_dates("The client arrived to the office for the first time in March 3rd, 2004 and got serviced, after a couple of months, on May 6th 2004, the customer returned indicating a defect on the part")
[('in March 3rd, 2004 and', datetime.datetime(2004, 3, 3, 0, 0)),
('on May 6th 2004', datetime.datetime(2004, 5, 6, 0, 0))]
|
dateparser/search/__init__.py
|
def search_dates(text, languages=None, settings=None, add_detected_language=False):
"""Find all substrings of the given string which represent date and/or time and parse them.
:param text:
A string in a natural language which may contain date and/or time expressions.
:type text: str|unicode
:param languages:
A list of two letters language codes.e.g. ['en', 'es']. If languages are given, it will
not attempt to detect the language.
:type languages: list
:param settings:
Configure customized behavior using settings defined in :mod:`dateparser.conf.Settings`.
:type settings: dict
:param add_detected_language:
Indicates if we want the detected language returned in the tuple.
:type add_detected_language: bool
:return: Returns list of tuples containing:
substrings representing date and/or time, corresponding :mod:`datetime.datetime`
object and detected language if *add_detected_language* is True.
Returns None if no dates that can be parsed are found.
:rtype: list
:raises: ValueError - Unknown Language
>>> from dateparser.search import search_dates
>>> search_dates('The first artificial Earth satellite was launched on 4 October 1957.')
[('on 4 October 1957', datetime.datetime(1957, 10, 4, 0, 0))]
>>> search_dates('The first artificial Earth satellite was launched on 4 October 1957.', add_detected_language=True)
[('on 4 October 1957', datetime.datetime(1957, 10, 4, 0, 0), 'en')]
>>> search_dates("The client arrived to the office for the first time in March 3rd, 2004 and got serviced, after a couple of months, on May 6th 2004, the customer returned indicating a defect on the part")
[('in March 3rd, 2004 and', datetime.datetime(2004, 3, 3, 0, 0)),
('on May 6th 2004', datetime.datetime(2004, 5, 6, 0, 0))]
"""
result = _search_with_detection.search_dates(
text=text, languages=languages, settings=settings
)
language, dates = result.get('Language'), result.get('Dates')
if dates:
if add_detected_language:
dates = [date + (language, ) for date in dates]
return dates
|
def search_dates(text, languages=None, settings=None, add_detected_language=False):
"""Find all substrings of the given string which represent date and/or time and parse them.
:param text:
A string in a natural language which may contain date and/or time expressions.
:type text: str|unicode
:param languages:
A list of two letters language codes.e.g. ['en', 'es']. If languages are given, it will
not attempt to detect the language.
:type languages: list
:param settings:
Configure customized behavior using settings defined in :mod:`dateparser.conf.Settings`.
:type settings: dict
:param add_detected_language:
Indicates if we want the detected language returned in the tuple.
:type add_detected_language: bool
:return: Returns list of tuples containing:
substrings representing date and/or time, corresponding :mod:`datetime.datetime`
object and detected language if *add_detected_language* is True.
Returns None if no dates that can be parsed are found.
:rtype: list
:raises: ValueError - Unknown Language
>>> from dateparser.search import search_dates
>>> search_dates('The first artificial Earth satellite was launched on 4 October 1957.')
[('on 4 October 1957', datetime.datetime(1957, 10, 4, 0, 0))]
>>> search_dates('The first artificial Earth satellite was launched on 4 October 1957.', add_detected_language=True)
[('on 4 October 1957', datetime.datetime(1957, 10, 4, 0, 0), 'en')]
>>> search_dates("The client arrived to the office for the first time in March 3rd, 2004 and got serviced, after a couple of months, on May 6th 2004, the customer returned indicating a defect on the part")
[('in March 3rd, 2004 and', datetime.datetime(2004, 3, 3, 0, 0)),
('on May 6th 2004', datetime.datetime(2004, 5, 6, 0, 0))]
"""
result = _search_with_detection.search_dates(
text=text, languages=languages, settings=settings
)
language, dates = result.get('Language'), result.get('Dates')
if dates:
if add_detected_language:
dates = [date + (language, ) for date in dates]
return dates
|
[
"Find",
"all",
"substrings",
"of",
"the",
"given",
"string",
"which",
"represent",
"date",
"and",
"/",
"or",
"time",
"and",
"parse",
"them",
"."
] |
scrapinghub/dateparser
|
python
|
https://github.com/scrapinghub/dateparser/blob/11a761c99d3ee522a3c63756b70c106a579e8b5c/dateparser/search/__init__.py#L9-L56
|
[
"def",
"search_dates",
"(",
"text",
",",
"languages",
"=",
"None",
",",
"settings",
"=",
"None",
",",
"add_detected_language",
"=",
"False",
")",
":",
"result",
"=",
"_search_with_detection",
".",
"search_dates",
"(",
"text",
"=",
"text",
",",
"languages",
"=",
"languages",
",",
"settings",
"=",
"settings",
")",
"language",
",",
"dates",
"=",
"result",
".",
"get",
"(",
"'Language'",
")",
",",
"result",
".",
"get",
"(",
"'Dates'",
")",
"if",
"dates",
":",
"if",
"add_detected_language",
":",
"dates",
"=",
"[",
"date",
"+",
"(",
"language",
",",
")",
"for",
"date",
"in",
"dates",
"]",
"return",
"dates"
] |
11a761c99d3ee522a3c63756b70c106a579e8b5c
|
test
|
patch_strptime
|
Monkey patching _strptime to avoid problems related with non-english
locale changes on the system.
For example, if system's locale is set to fr_FR. Parser won't recognize
any date since all languages are translated to english dates.
|
dateparser/utils/strptime.py
|
def patch_strptime():
"""Monkey patching _strptime to avoid problems related with non-english
locale changes on the system.
For example, if system's locale is set to fr_FR. Parser won't recognize
any date since all languages are translated to english dates.
"""
_strptime = imp.load_module(
'strptime_patched', *imp.find_module('_strptime')
)
_calendar = imp.load_module(
'calendar_patched', *imp.find_module('_strptime')
)
_strptime._getlang = lambda: ('en_US', 'UTF-8')
_strptime.calendar = _calendar
_strptime.calendar.day_abbr = [
'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'
]
_strptime.calendar.day_name = [
'monday', 'tuesday', 'wednesday', 'thursday',
'friday', 'saturday', 'sunday'
]
_strptime.calendar.month_abbr = [
'', 'jan', 'feb', 'mar', 'apr', 'may', 'jun',
'jul', 'aug', 'sep', 'oct', 'nov', 'dec'
]
_strptime.calendar.month_name = [
'', 'january', 'february', 'march', 'april',
'may', 'june', 'july', 'august', 'september',
'october', 'november', 'december'
]
return _strptime._strptime_time
|
def patch_strptime():
"""Monkey patching _strptime to avoid problems related with non-english
locale changes on the system.
For example, if system's locale is set to fr_FR. Parser won't recognize
any date since all languages are translated to english dates.
"""
_strptime = imp.load_module(
'strptime_patched', *imp.find_module('_strptime')
)
_calendar = imp.load_module(
'calendar_patched', *imp.find_module('_strptime')
)
_strptime._getlang = lambda: ('en_US', 'UTF-8')
_strptime.calendar = _calendar
_strptime.calendar.day_abbr = [
'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'
]
_strptime.calendar.day_name = [
'monday', 'tuesday', 'wednesday', 'thursday',
'friday', 'saturday', 'sunday'
]
_strptime.calendar.month_abbr = [
'', 'jan', 'feb', 'mar', 'apr', 'may', 'jun',
'jul', 'aug', 'sep', 'oct', 'nov', 'dec'
]
_strptime.calendar.month_name = [
'', 'january', 'february', 'march', 'april',
'may', 'june', 'july', 'august', 'september',
'october', 'november', 'december'
]
return _strptime._strptime_time
|
[
"Monkey",
"patching",
"_strptime",
"to",
"avoid",
"problems",
"related",
"with",
"non",
"-",
"english",
"locale",
"changes",
"on",
"the",
"system",
"."
] |
scrapinghub/dateparser
|
python
|
https://github.com/scrapinghub/dateparser/blob/11a761c99d3ee522a3c63756b70c106a579e8b5c/dateparser/utils/strptime.py#L17-L52
|
[
"def",
"patch_strptime",
"(",
")",
":",
"_strptime",
"=",
"imp",
".",
"load_module",
"(",
"'strptime_patched'",
",",
"*",
"imp",
".",
"find_module",
"(",
"'_strptime'",
")",
")",
"_calendar",
"=",
"imp",
".",
"load_module",
"(",
"'calendar_patched'",
",",
"*",
"imp",
".",
"find_module",
"(",
"'_strptime'",
")",
")",
"_strptime",
".",
"_getlang",
"=",
"lambda",
":",
"(",
"'en_US'",
",",
"'UTF-8'",
")",
"_strptime",
".",
"calendar",
"=",
"_calendar",
"_strptime",
".",
"calendar",
".",
"day_abbr",
"=",
"[",
"'mon'",
",",
"'tue'",
",",
"'wed'",
",",
"'thu'",
",",
"'fri'",
",",
"'sat'",
",",
"'sun'",
"]",
"_strptime",
".",
"calendar",
".",
"day_name",
"=",
"[",
"'monday'",
",",
"'tuesday'",
",",
"'wednesday'",
",",
"'thursday'",
",",
"'friday'",
",",
"'saturday'",
",",
"'sunday'",
"]",
"_strptime",
".",
"calendar",
".",
"month_abbr",
"=",
"[",
"''",
",",
"'jan'",
",",
"'feb'",
",",
"'mar'",
",",
"'apr'",
",",
"'may'",
",",
"'jun'",
",",
"'jul'",
",",
"'aug'",
",",
"'sep'",
",",
"'oct'",
",",
"'nov'",
",",
"'dec'",
"]",
"_strptime",
".",
"calendar",
".",
"month_name",
"=",
"[",
"''",
",",
"'january'",
",",
"'february'",
",",
"'march'",
",",
"'april'",
",",
"'may'",
",",
"'june'",
",",
"'july'",
",",
"'august'",
",",
"'september'",
",",
"'october'",
",",
"'november'",
",",
"'december'",
"]",
"return",
"_strptime",
".",
"_strptime_time"
] |
11a761c99d3ee522a3c63756b70c106a579e8b5c
|
test
|
LocaleDataLoader.get_locale_map
|
Get an ordered mapping with locale codes as keys
and corresponding locale instances as values.
:param languages:
A list of language codes, e.g. ['en', 'es', 'zh-Hant'].
If locales are not given, languages and region are
used to construct locales to load.
:type languages: list
:param locales:
A list of codes of locales which are to be loaded,
e.g. ['fr-PF', 'qu-EC', 'af-NA']
:type locales: list
:param region:
A region code, e.g. 'IN', '001', 'NE'.
If locales are not given, languages and region are
used to construct locales to load.
:type region: str|unicode
:param use_given_order:
If True, the returned mapping is ordered in the order locales are given.
:type allow_redetect_language: bool
:param allow_conflicting_locales:
if True, locales with same language and different region can be loaded.
:type allow_conflicting_locales: bool
:return: ordered locale code to locale instance mapping
|
dateparser/languages/loader.py
|
def get_locale_map(self, languages=None, locales=None, region=None,
use_given_order=False, allow_conflicting_locales=False):
"""
Get an ordered mapping with locale codes as keys
and corresponding locale instances as values.
:param languages:
A list of language codes, e.g. ['en', 'es', 'zh-Hant'].
If locales are not given, languages and region are
used to construct locales to load.
:type languages: list
:param locales:
A list of codes of locales which are to be loaded,
e.g. ['fr-PF', 'qu-EC', 'af-NA']
:type locales: list
:param region:
A region code, e.g. 'IN', '001', 'NE'.
If locales are not given, languages and region are
used to construct locales to load.
:type region: str|unicode
:param use_given_order:
If True, the returned mapping is ordered in the order locales are given.
:type allow_redetect_language: bool
:param allow_conflicting_locales:
if True, locales with same language and different region can be loaded.
:type allow_conflicting_locales: bool
:return: ordered locale code to locale instance mapping
"""
return OrderedDict(self._load_data(
languages=languages, locales=locales, region=region, use_given_order=use_given_order,
allow_conflicting_locales=allow_conflicting_locales))
|
def get_locale_map(self, languages=None, locales=None, region=None,
use_given_order=False, allow_conflicting_locales=False):
"""
Get an ordered mapping with locale codes as keys
and corresponding locale instances as values.
:param languages:
A list of language codes, e.g. ['en', 'es', 'zh-Hant'].
If locales are not given, languages and region are
used to construct locales to load.
:type languages: list
:param locales:
A list of codes of locales which are to be loaded,
e.g. ['fr-PF', 'qu-EC', 'af-NA']
:type locales: list
:param region:
A region code, e.g. 'IN', '001', 'NE'.
If locales are not given, languages and region are
used to construct locales to load.
:type region: str|unicode
:param use_given_order:
If True, the returned mapping is ordered in the order locales are given.
:type allow_redetect_language: bool
:param allow_conflicting_locales:
if True, locales with same language and different region can be loaded.
:type allow_conflicting_locales: bool
:return: ordered locale code to locale instance mapping
"""
return OrderedDict(self._load_data(
languages=languages, locales=locales, region=region, use_given_order=use_given_order,
allow_conflicting_locales=allow_conflicting_locales))
|
[
"Get",
"an",
"ordered",
"mapping",
"with",
"locale",
"codes",
"as",
"keys",
"and",
"corresponding",
"locale",
"instances",
"as",
"values",
"."
] |
scrapinghub/dateparser
|
python
|
https://github.com/scrapinghub/dateparser/blob/11a761c99d3ee522a3c63756b70c106a579e8b5c/dateparser/languages/loader.py#L48-L83
|
[
"def",
"get_locale_map",
"(",
"self",
",",
"languages",
"=",
"None",
",",
"locales",
"=",
"None",
",",
"region",
"=",
"None",
",",
"use_given_order",
"=",
"False",
",",
"allow_conflicting_locales",
"=",
"False",
")",
":",
"return",
"OrderedDict",
"(",
"self",
".",
"_load_data",
"(",
"languages",
"=",
"languages",
",",
"locales",
"=",
"locales",
",",
"region",
"=",
"region",
",",
"use_given_order",
"=",
"use_given_order",
",",
"allow_conflicting_locales",
"=",
"allow_conflicting_locales",
")",
")"
] |
11a761c99d3ee522a3c63756b70c106a579e8b5c
|
test
|
LocaleDataLoader.get_locales
|
Yield locale instances.
:param languages:
A list of language codes, e.g. ['en', 'es', 'zh-Hant'].
If locales are not given, languages and region are
used to construct locales to load.
:type languages: list
:param locales:
A list of codes of locales which are to be loaded,
e.g. ['fr-PF', 'qu-EC', 'af-NA']
:type locales: list
:param region:
A region code, e.g. 'IN', '001', 'NE'.
If locales are not given, languages and region are
used to construct locales to load.
:type region: str|unicode
:param use_given_order:
If True, the returned mapping is ordered in the order locales are given.
:type allow_redetect_language: bool
:param allow_conflicting_locales:
if True, locales with same language and different region can be loaded.
:type allow_conflicting_locales: bool
:yield: locale instances
|
dateparser/languages/loader.py
|
def get_locales(self, languages=None, locales=None, region=None,
use_given_order=False, allow_conflicting_locales=False):
"""
Yield locale instances.
:param languages:
A list of language codes, e.g. ['en', 'es', 'zh-Hant'].
If locales are not given, languages and region are
used to construct locales to load.
:type languages: list
:param locales:
A list of codes of locales which are to be loaded,
e.g. ['fr-PF', 'qu-EC', 'af-NA']
:type locales: list
:param region:
A region code, e.g. 'IN', '001', 'NE'.
If locales are not given, languages and region are
used to construct locales to load.
:type region: str|unicode
:param use_given_order:
If True, the returned mapping is ordered in the order locales are given.
:type allow_redetect_language: bool
:param allow_conflicting_locales:
if True, locales with same language and different region can be loaded.
:type allow_conflicting_locales: bool
:yield: locale instances
"""
for _, locale in self._load_data(
languages=languages, locales=locales, region=region,
use_given_order=use_given_order,
allow_conflicting_locales=allow_conflicting_locales):
yield locale
|
def get_locales(self, languages=None, locales=None, region=None,
use_given_order=False, allow_conflicting_locales=False):
"""
Yield locale instances.
:param languages:
A list of language codes, e.g. ['en', 'es', 'zh-Hant'].
If locales are not given, languages and region are
used to construct locales to load.
:type languages: list
:param locales:
A list of codes of locales which are to be loaded,
e.g. ['fr-PF', 'qu-EC', 'af-NA']
:type locales: list
:param region:
A region code, e.g. 'IN', '001', 'NE'.
If locales are not given, languages and region are
used to construct locales to load.
:type region: str|unicode
:param use_given_order:
If True, the returned mapping is ordered in the order locales are given.
:type allow_redetect_language: bool
:param allow_conflicting_locales:
if True, locales with same language and different region can be loaded.
:type allow_conflicting_locales: bool
:yield: locale instances
"""
for _, locale in self._load_data(
languages=languages, locales=locales, region=region,
use_given_order=use_given_order,
allow_conflicting_locales=allow_conflicting_locales):
yield locale
|
[
"Yield",
"locale",
"instances",
"."
] |
scrapinghub/dateparser
|
python
|
https://github.com/scrapinghub/dateparser/blob/11a761c99d3ee522a3c63756b70c106a579e8b5c/dateparser/languages/loader.py#L85-L121
|
[
"def",
"get_locales",
"(",
"self",
",",
"languages",
"=",
"None",
",",
"locales",
"=",
"None",
",",
"region",
"=",
"None",
",",
"use_given_order",
"=",
"False",
",",
"allow_conflicting_locales",
"=",
"False",
")",
":",
"for",
"_",
",",
"locale",
"in",
"self",
".",
"_load_data",
"(",
"languages",
"=",
"languages",
",",
"locales",
"=",
"locales",
",",
"region",
"=",
"region",
",",
"use_given_order",
"=",
"use_given_order",
",",
"allow_conflicting_locales",
"=",
"allow_conflicting_locales",
")",
":",
"yield",
"locale"
] |
11a761c99d3ee522a3c63756b70c106a579e8b5c
|
test
|
Dictionary.are_tokens_valid
|
Check if tokens are valid tokens for the locale.
:param tokens:
a list of string or unicode tokens.
:type tokens: list
:return: True if tokens are valid, False otherwise.
|
dateparser/languages/dictionary.py
|
def are_tokens_valid(self, tokens):
"""
Check if tokens are valid tokens for the locale.
:param tokens:
a list of string or unicode tokens.
:type tokens: list
:return: True if tokens are valid, False otherwise.
"""
match_relative_regex = self._get_match_relative_regex_cache()
for token in tokens:
if any([match_relative_regex.match(token),
token in self, token.isdigit()]):
continue
else:
return False
else:
return True
|
def are_tokens_valid(self, tokens):
"""
Check if tokens are valid tokens for the locale.
:param tokens:
a list of string or unicode tokens.
:type tokens: list
:return: True if tokens are valid, False otherwise.
"""
match_relative_regex = self._get_match_relative_regex_cache()
for token in tokens:
if any([match_relative_regex.match(token),
token in self, token.isdigit()]):
continue
else:
return False
else:
return True
|
[
"Check",
"if",
"tokens",
"are",
"valid",
"tokens",
"for",
"the",
"locale",
"."
] |
scrapinghub/dateparser
|
python
|
https://github.com/scrapinghub/dateparser/blob/11a761c99d3ee522a3c63756b70c106a579e8b5c/dateparser/languages/dictionary.py#L96-L114
|
[
"def",
"are_tokens_valid",
"(",
"self",
",",
"tokens",
")",
":",
"match_relative_regex",
"=",
"self",
".",
"_get_match_relative_regex_cache",
"(",
")",
"for",
"token",
"in",
"tokens",
":",
"if",
"any",
"(",
"[",
"match_relative_regex",
".",
"match",
"(",
"token",
")",
",",
"token",
"in",
"self",
",",
"token",
".",
"isdigit",
"(",
")",
"]",
")",
":",
"continue",
"else",
":",
"return",
"False",
"else",
":",
"return",
"True"
] |
11a761c99d3ee522a3c63756b70c106a579e8b5c
|
test
|
Dictionary.split
|
Split the date string using translations in locale info.
:param string:
Date string to be splitted.
:type string:
str|unicode
:param keep_formatting:
If True, retain formatting of the date string.
:type keep_formatting: bool
:return: A list of string tokens formed after splitting the date string.
|
dateparser/languages/dictionary.py
|
def split(self, string, keep_formatting=False):
"""
Split the date string using translations in locale info.
:param string:
Date string to be splitted.
:type string:
str|unicode
:param keep_formatting:
If True, retain formatting of the date string.
:type keep_formatting: bool
:return: A list of string tokens formed after splitting the date string.
"""
if not string:
return string
split_relative_regex = self._get_split_relative_regex_cache()
match_relative_regex = self._get_match_relative_regex_cache()
tokens = split_relative_regex.split(string)
for i, token in enumerate(tokens):
if match_relative_regex.match(token):
tokens[i] = [token]
continue
tokens[i] = self._split_by_known_words(token, keep_formatting)
return list(filter(bool, chain(*tokens)))
|
def split(self, string, keep_formatting=False):
"""
Split the date string using translations in locale info.
:param string:
Date string to be splitted.
:type string:
str|unicode
:param keep_formatting:
If True, retain formatting of the date string.
:type keep_formatting: bool
:return: A list of string tokens formed after splitting the date string.
"""
if not string:
return string
split_relative_regex = self._get_split_relative_regex_cache()
match_relative_regex = self._get_match_relative_regex_cache()
tokens = split_relative_regex.split(string)
for i, token in enumerate(tokens):
if match_relative_regex.match(token):
tokens[i] = [token]
continue
tokens[i] = self._split_by_known_words(token, keep_formatting)
return list(filter(bool, chain(*tokens)))
|
[
"Split",
"the",
"date",
"string",
"using",
"translations",
"in",
"locale",
"info",
"."
] |
scrapinghub/dateparser
|
python
|
https://github.com/scrapinghub/dateparser/blob/11a761c99d3ee522a3c63756b70c106a579e8b5c/dateparser/languages/dictionary.py#L116-L145
|
[
"def",
"split",
"(",
"self",
",",
"string",
",",
"keep_formatting",
"=",
"False",
")",
":",
"if",
"not",
"string",
":",
"return",
"string",
"split_relative_regex",
"=",
"self",
".",
"_get_split_relative_regex_cache",
"(",
")",
"match_relative_regex",
"=",
"self",
".",
"_get_match_relative_regex_cache",
"(",
")",
"tokens",
"=",
"split_relative_regex",
".",
"split",
"(",
"string",
")",
"for",
"i",
",",
"token",
"in",
"enumerate",
"(",
"tokens",
")",
":",
"if",
"match_relative_regex",
".",
"match",
"(",
"token",
")",
":",
"tokens",
"[",
"i",
"]",
"=",
"[",
"token",
"]",
"continue",
"tokens",
"[",
"i",
"]",
"=",
"self",
".",
"_split_by_known_words",
"(",
"token",
",",
"keep_formatting",
")",
"return",
"list",
"(",
"filter",
"(",
"bool",
",",
"chain",
"(",
"*",
"tokens",
")",
")",
")"
] |
11a761c99d3ee522a3c63756b70c106a579e8b5c
|
test
|
DateSearchWithDetection.search_dates
|
Find all substrings of the given string which represent date and/or time and parse them.
:param text:
A string in a natural language which may contain date and/or time expressions.
:type text: str|unicode
:param languages:
A list of two letters language codes.e.g. ['en', 'es']. If languages are given, it will not attempt
to detect the language.
:type languages: list
:param settings:
Configure customized behavior using settings defined in :mod:`dateparser.conf.Settings`.
:type settings: dict
:return: a dict mapping keys to two letter language code and a list of tuples of pairs:
substring representing date expressions and corresponding :mod:`datetime.datetime` object.
For example:
{'Language': 'en', 'Dates': [('on 4 October 1957', datetime.datetime(1957, 10, 4, 0, 0))]}
If language of the string isn't recognised returns:
{'Language': None, 'Dates': None}
:raises: ValueError - Unknown Language
|
dateparser/search/search.py
|
def search_dates(self, text, languages=None, settings=None):
"""
Find all substrings of the given string which represent date and/or time and parse them.
:param text:
A string in a natural language which may contain date and/or time expressions.
:type text: str|unicode
:param languages:
A list of two letters language codes.e.g. ['en', 'es']. If languages are given, it will not attempt
to detect the language.
:type languages: list
:param settings:
Configure customized behavior using settings defined in :mod:`dateparser.conf.Settings`.
:type settings: dict
:return: a dict mapping keys to two letter language code and a list of tuples of pairs:
substring representing date expressions and corresponding :mod:`datetime.datetime` object.
For example:
{'Language': 'en', 'Dates': [('on 4 October 1957', datetime.datetime(1957, 10, 4, 0, 0))]}
If language of the string isn't recognised returns:
{'Language': None, 'Dates': None}
:raises: ValueError - Unknown Language
"""
language_shortname = self.detect_language(text=text, languages=languages)
if not language_shortname:
return {'Language': None, 'Dates': None}
return {'Language': language_shortname, 'Dates': self.search.search_parse(language_shortname, text,
settings=settings)}
|
def search_dates(self, text, languages=None, settings=None):
"""
Find all substrings of the given string which represent date and/or time and parse them.
:param text:
A string in a natural language which may contain date and/or time expressions.
:type text: str|unicode
:param languages:
A list of two letters language codes.e.g. ['en', 'es']. If languages are given, it will not attempt
to detect the language.
:type languages: list
:param settings:
Configure customized behavior using settings defined in :mod:`dateparser.conf.Settings`.
:type settings: dict
:return: a dict mapping keys to two letter language code and a list of tuples of pairs:
substring representing date expressions and corresponding :mod:`datetime.datetime` object.
For example:
{'Language': 'en', 'Dates': [('on 4 October 1957', datetime.datetime(1957, 10, 4, 0, 0))]}
If language of the string isn't recognised returns:
{'Language': None, 'Dates': None}
:raises: ValueError - Unknown Language
"""
language_shortname = self.detect_language(text=text, languages=languages)
if not language_shortname:
return {'Language': None, 'Dates': None}
return {'Language': language_shortname, 'Dates': self.search.search_parse(language_shortname, text,
settings=settings)}
|
[
"Find",
"all",
"substrings",
"of",
"the",
"given",
"string",
"which",
"represent",
"date",
"and",
"/",
"or",
"time",
"and",
"parse",
"them",
"."
] |
scrapinghub/dateparser
|
python
|
https://github.com/scrapinghub/dateparser/blob/11a761c99d3ee522a3c63756b70c106a579e8b5c/dateparser/search/search.py#L202-L230
|
[
"def",
"search_dates",
"(",
"self",
",",
"text",
",",
"languages",
"=",
"None",
",",
"settings",
"=",
"None",
")",
":",
"language_shortname",
"=",
"self",
".",
"detect_language",
"(",
"text",
"=",
"text",
",",
"languages",
"=",
"languages",
")",
"if",
"not",
"language_shortname",
":",
"return",
"{",
"'Language'",
":",
"None",
",",
"'Dates'",
":",
"None",
"}",
"return",
"{",
"'Language'",
":",
"language_shortname",
",",
"'Dates'",
":",
"self",
".",
"search",
".",
"search_parse",
"(",
"language_shortname",
",",
"text",
",",
"settings",
"=",
"settings",
")",
"}"
] |
11a761c99d3ee522a3c63756b70c106a579e8b5c
|
test
|
parse
|
Parse date and time from given date string.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param date_formats:
A list of format strings using directives as given
`here <https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior>`_.
The parser applies formats one by one, taking into account the detected languages/locales.
:type date_formats: list
:param languages:
A list of language codes, e.g. ['en', 'es', 'zh-Hant'].
If locales are not given, languages and region are used to construct locales for translation.
:type languages: list
:param locales:
A list of locale codes, e.g. ['fr-PF', 'qu-EC', 'af-NA'].
The parser uses locales to translate date string.
:type locales: list
:param region:
A region code, e.g. 'IN', '001', 'NE'.
If locales are not given, languages and region are used to construct locales for translation.
:type region: str|unicode
:param settings:
Configure customized behavior using settings defined in :mod:`dateparser.conf.Settings`.
:type settings: dict
:return: Returns :class:`datetime <datetime.datetime>` representing parsed date if successful, else returns None
:rtype: :class:`datetime <datetime.datetime>`.
:raises: ValueError - Unknown Language
|
dateparser/__init__.py
|
def parse(date_string, date_formats=None, languages=None, locales=None, region=None, settings=None):
"""Parse date and time from given date string.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param date_formats:
A list of format strings using directives as given
`here <https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior>`_.
The parser applies formats one by one, taking into account the detected languages/locales.
:type date_formats: list
:param languages:
A list of language codes, e.g. ['en', 'es', 'zh-Hant'].
If locales are not given, languages and region are used to construct locales for translation.
:type languages: list
:param locales:
A list of locale codes, e.g. ['fr-PF', 'qu-EC', 'af-NA'].
The parser uses locales to translate date string.
:type locales: list
:param region:
A region code, e.g. 'IN', '001', 'NE'.
If locales are not given, languages and region are used to construct locales for translation.
:type region: str|unicode
:param settings:
Configure customized behavior using settings defined in :mod:`dateparser.conf.Settings`.
:type settings: dict
:return: Returns :class:`datetime <datetime.datetime>` representing parsed date if successful, else returns None
:rtype: :class:`datetime <datetime.datetime>`.
:raises: ValueError - Unknown Language
"""
parser = _default_parser
if any([languages, locales, region, not settings._default]):
parser = DateDataParser(languages=languages, locales=locales,
region=region, settings=settings)
data = parser.get_date_data(date_string, date_formats)
if data:
return data['date_obj']
|
def parse(date_string, date_formats=None, languages=None, locales=None, region=None, settings=None):
"""Parse date and time from given date string.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param date_formats:
A list of format strings using directives as given
`here <https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior>`_.
The parser applies formats one by one, taking into account the detected languages/locales.
:type date_formats: list
:param languages:
A list of language codes, e.g. ['en', 'es', 'zh-Hant'].
If locales are not given, languages and region are used to construct locales for translation.
:type languages: list
:param locales:
A list of locale codes, e.g. ['fr-PF', 'qu-EC', 'af-NA'].
The parser uses locales to translate date string.
:type locales: list
:param region:
A region code, e.g. 'IN', '001', 'NE'.
If locales are not given, languages and region are used to construct locales for translation.
:type region: str|unicode
:param settings:
Configure customized behavior using settings defined in :mod:`dateparser.conf.Settings`.
:type settings: dict
:return: Returns :class:`datetime <datetime.datetime>` representing parsed date if successful, else returns None
:rtype: :class:`datetime <datetime.datetime>`.
:raises: ValueError - Unknown Language
"""
parser = _default_parser
if any([languages, locales, region, not settings._default]):
parser = DateDataParser(languages=languages, locales=locales,
region=region, settings=settings)
data = parser.get_date_data(date_string, date_formats)
if data:
return data['date_obj']
|
[
"Parse",
"date",
"and",
"time",
"from",
"given",
"date",
"string",
"."
] |
scrapinghub/dateparser
|
python
|
https://github.com/scrapinghub/dateparser/blob/11a761c99d3ee522a3c63756b70c106a579e8b5c/dateparser/__init__.py#L11-L56
|
[
"def",
"parse",
"(",
"date_string",
",",
"date_formats",
"=",
"None",
",",
"languages",
"=",
"None",
",",
"locales",
"=",
"None",
",",
"region",
"=",
"None",
",",
"settings",
"=",
"None",
")",
":",
"parser",
"=",
"_default_parser",
"if",
"any",
"(",
"[",
"languages",
",",
"locales",
",",
"region",
",",
"not",
"settings",
".",
"_default",
"]",
")",
":",
"parser",
"=",
"DateDataParser",
"(",
"languages",
"=",
"languages",
",",
"locales",
"=",
"locales",
",",
"region",
"=",
"region",
",",
"settings",
"=",
"settings",
")",
"data",
"=",
"parser",
".",
"get_date_data",
"(",
"date_string",
",",
"date_formats",
")",
"if",
"data",
":",
"return",
"data",
"[",
"'date_obj'",
"]"
] |
11a761c99d3ee522a3c63756b70c106a579e8b5c
|
test
|
FreshnessDateDataParser._parse_time
|
Attemps to parse time part of date strings like '1 day ago, 2 PM'
|
dateparser/freshness_date_parser.py
|
def _parse_time(self, date_string, settings):
"""Attemps to parse time part of date strings like '1 day ago, 2 PM' """
date_string = PATTERN.sub('', date_string)
date_string = re.sub(r'\b(?:ago|in)\b', '', date_string)
try:
return time_parser(date_string)
except:
pass
|
def _parse_time(self, date_string, settings):
"""Attemps to parse time part of date strings like '1 day ago, 2 PM' """
date_string = PATTERN.sub('', date_string)
date_string = re.sub(r'\b(?:ago|in)\b', '', date_string)
try:
return time_parser(date_string)
except:
pass
|
[
"Attemps",
"to",
"parse",
"time",
"part",
"of",
"date",
"strings",
"like",
"1",
"day",
"ago",
"2",
"PM"
] |
scrapinghub/dateparser
|
python
|
https://github.com/scrapinghub/dateparser/blob/11a761c99d3ee522a3c63756b70c106a579e8b5c/dateparser/freshness_date_parser.py#L36-L43
|
[
"def",
"_parse_time",
"(",
"self",
",",
"date_string",
",",
"settings",
")",
":",
"date_string",
"=",
"PATTERN",
".",
"sub",
"(",
"''",
",",
"date_string",
")",
"date_string",
"=",
"re",
".",
"sub",
"(",
"r'\\b(?:ago|in)\\b'",
",",
"''",
",",
"date_string",
")",
"try",
":",
"return",
"time_parser",
"(",
"date_string",
")",
"except",
":",
"pass"
] |
11a761c99d3ee522a3c63756b70c106a579e8b5c
|
test
|
Locale.is_applicable
|
Check if the locale is applicable to translate date string.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param strip_timezone:
If True, timezone is stripped from date string.
:type strip_timezone: bool
:return: boolean value representing if the locale is applicable for the date string or not.
|
dateparser/languages/locale.py
|
def is_applicable(self, date_string, strip_timezone=False, settings=None):
"""
Check if the locale is applicable to translate date string.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param strip_timezone:
If True, timezone is stripped from date string.
:type strip_timezone: bool
:return: boolean value representing if the locale is applicable for the date string or not.
"""
if strip_timezone:
date_string, _ = pop_tz_offset_from_string(date_string, as_offset=False)
date_string = self._translate_numerals(date_string)
if settings.NORMALIZE:
date_string = normalize_unicode(date_string)
date_string = self._simplify(date_string, settings=settings)
dictionary = self._get_dictionary(settings)
date_tokens = dictionary.split(date_string)
return dictionary.are_tokens_valid(date_tokens)
|
def is_applicable(self, date_string, strip_timezone=False, settings=None):
"""
Check if the locale is applicable to translate date string.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param strip_timezone:
If True, timezone is stripped from date string.
:type strip_timezone: bool
:return: boolean value representing if the locale is applicable for the date string or not.
"""
if strip_timezone:
date_string, _ = pop_tz_offset_from_string(date_string, as_offset=False)
date_string = self._translate_numerals(date_string)
if settings.NORMALIZE:
date_string = normalize_unicode(date_string)
date_string = self._simplify(date_string, settings=settings)
dictionary = self._get_dictionary(settings)
date_tokens = dictionary.split(date_string)
return dictionary.are_tokens_valid(date_tokens)
|
[
"Check",
"if",
"the",
"locale",
"is",
"applicable",
"to",
"translate",
"date",
"string",
"."
] |
scrapinghub/dateparser
|
python
|
https://github.com/scrapinghub/dateparser/blob/11a761c99d3ee522a3c63756b70c106a579e8b5c/dateparser/languages/locale.py#L53-L77
|
[
"def",
"is_applicable",
"(",
"self",
",",
"date_string",
",",
"strip_timezone",
"=",
"False",
",",
"settings",
"=",
"None",
")",
":",
"if",
"strip_timezone",
":",
"date_string",
",",
"_",
"=",
"pop_tz_offset_from_string",
"(",
"date_string",
",",
"as_offset",
"=",
"False",
")",
"date_string",
"=",
"self",
".",
"_translate_numerals",
"(",
"date_string",
")",
"if",
"settings",
".",
"NORMALIZE",
":",
"date_string",
"=",
"normalize_unicode",
"(",
"date_string",
")",
"date_string",
"=",
"self",
".",
"_simplify",
"(",
"date_string",
",",
"settings",
"=",
"settings",
")",
"dictionary",
"=",
"self",
".",
"_get_dictionary",
"(",
"settings",
")",
"date_tokens",
"=",
"dictionary",
".",
"split",
"(",
"date_string",
")",
"return",
"dictionary",
".",
"are_tokens_valid",
"(",
"date_tokens",
")"
] |
11a761c99d3ee522a3c63756b70c106a579e8b5c
|
test
|
Locale.translate
|
Translate the date string to its English equivalent.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param keep_formatting:
If True, retain formatting of the date string after translation.
:type keep_formatting: bool
:return: translated date string.
|
dateparser/languages/locale.py
|
def translate(self, date_string, keep_formatting=False, settings=None):
"""
Translate the date string to its English equivalent.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param keep_formatting:
If True, retain formatting of the date string after translation.
:type keep_formatting: bool
:return: translated date string.
"""
date_string = self._translate_numerals(date_string)
if settings.NORMALIZE:
date_string = normalize_unicode(date_string)
date_string = self._simplify(date_string, settings=settings)
dictionary = self._get_dictionary(settings)
date_string_tokens = dictionary.split(date_string, keep_formatting)
relative_translations = self._get_relative_translations(settings=settings)
for i, word in enumerate(date_string_tokens):
word = word.lower()
for pattern, replacement in relative_translations.items():
if pattern.match(word):
date_string_tokens[i] = pattern.sub(replacement, word)
else:
if word in dictionary:
date_string_tokens[i] = dictionary[word] or ''
if "in" in date_string_tokens:
date_string_tokens = self._clear_future_words(date_string_tokens)
return self._join(list(filter(bool, date_string_tokens)),
separator="" if keep_formatting else " ", settings=settings)
|
def translate(self, date_string, keep_formatting=False, settings=None):
"""
Translate the date string to its English equivalent.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param keep_formatting:
If True, retain formatting of the date string after translation.
:type keep_formatting: bool
:return: translated date string.
"""
date_string = self._translate_numerals(date_string)
if settings.NORMALIZE:
date_string = normalize_unicode(date_string)
date_string = self._simplify(date_string, settings=settings)
dictionary = self._get_dictionary(settings)
date_string_tokens = dictionary.split(date_string, keep_formatting)
relative_translations = self._get_relative_translations(settings=settings)
for i, word in enumerate(date_string_tokens):
word = word.lower()
for pattern, replacement in relative_translations.items():
if pattern.match(word):
date_string_tokens[i] = pattern.sub(replacement, word)
else:
if word in dictionary:
date_string_tokens[i] = dictionary[word] or ''
if "in" in date_string_tokens:
date_string_tokens = self._clear_future_words(date_string_tokens)
return self._join(list(filter(bool, date_string_tokens)),
separator="" if keep_formatting else " ", settings=settings)
|
[
"Translate",
"the",
"date",
"string",
"to",
"its",
"English",
"equivalent",
"."
] |
scrapinghub/dateparser
|
python
|
https://github.com/scrapinghub/dateparser/blob/11a761c99d3ee522a3c63756b70c106a579e8b5c/dateparser/languages/locale.py#L114-L149
|
[
"def",
"translate",
"(",
"self",
",",
"date_string",
",",
"keep_formatting",
"=",
"False",
",",
"settings",
"=",
"None",
")",
":",
"date_string",
"=",
"self",
".",
"_translate_numerals",
"(",
"date_string",
")",
"if",
"settings",
".",
"NORMALIZE",
":",
"date_string",
"=",
"normalize_unicode",
"(",
"date_string",
")",
"date_string",
"=",
"self",
".",
"_simplify",
"(",
"date_string",
",",
"settings",
"=",
"settings",
")",
"dictionary",
"=",
"self",
".",
"_get_dictionary",
"(",
"settings",
")",
"date_string_tokens",
"=",
"dictionary",
".",
"split",
"(",
"date_string",
",",
"keep_formatting",
")",
"relative_translations",
"=",
"self",
".",
"_get_relative_translations",
"(",
"settings",
"=",
"settings",
")",
"for",
"i",
",",
"word",
"in",
"enumerate",
"(",
"date_string_tokens",
")",
":",
"word",
"=",
"word",
".",
"lower",
"(",
")",
"for",
"pattern",
",",
"replacement",
"in",
"relative_translations",
".",
"items",
"(",
")",
":",
"if",
"pattern",
".",
"match",
"(",
"word",
")",
":",
"date_string_tokens",
"[",
"i",
"]",
"=",
"pattern",
".",
"sub",
"(",
"replacement",
",",
"word",
")",
"else",
":",
"if",
"word",
"in",
"dictionary",
":",
"date_string_tokens",
"[",
"i",
"]",
"=",
"dictionary",
"[",
"word",
"]",
"or",
"''",
"if",
"\"in\"",
"in",
"date_string_tokens",
":",
"date_string_tokens",
"=",
"self",
".",
"_clear_future_words",
"(",
"date_string_tokens",
")",
"return",
"self",
".",
"_join",
"(",
"list",
"(",
"filter",
"(",
"bool",
",",
"date_string_tokens",
")",
")",
",",
"separator",
"=",
"\"\"",
"if",
"keep_formatting",
"else",
"\" \"",
",",
"settings",
"=",
"settings",
")"
] |
11a761c99d3ee522a3c63756b70c106a579e8b5c
|
test
|
parse_with_formats
|
Parse with formats and return a dictionary with 'period' and 'obj_date'.
:returns: :class:`datetime.datetime`, dict or None
|
dateparser/date.py
|
def parse_with_formats(date_string, date_formats, settings):
""" Parse with formats and return a dictionary with 'period' and 'obj_date'.
:returns: :class:`datetime.datetime`, dict or None
"""
period = 'day'
for date_format in date_formats:
try:
date_obj = datetime.strptime(date_string, date_format)
except ValueError:
continue
else:
# If format does not include the day, use last day of the month
# instead of first, because the first is usually out of range.
if '%d' not in date_format:
period = 'month'
date_obj = date_obj.replace(
day=get_last_day_of_month(date_obj.year, date_obj.month))
if not ('%y' in date_format or '%Y' in date_format):
today = datetime.today()
date_obj = date_obj.replace(year=today.year)
date_obj = apply_timezone_from_settings(date_obj, settings)
return {'date_obj': date_obj, 'period': period}
else:
return {'date_obj': None, 'period': period}
|
def parse_with_formats(date_string, date_formats, settings):
""" Parse with formats and return a dictionary with 'period' and 'obj_date'.
:returns: :class:`datetime.datetime`, dict or None
"""
period = 'day'
for date_format in date_formats:
try:
date_obj = datetime.strptime(date_string, date_format)
except ValueError:
continue
else:
# If format does not include the day, use last day of the month
# instead of first, because the first is usually out of range.
if '%d' not in date_format:
period = 'month'
date_obj = date_obj.replace(
day=get_last_day_of_month(date_obj.year, date_obj.month))
if not ('%y' in date_format or '%Y' in date_format):
today = datetime.today()
date_obj = date_obj.replace(year=today.year)
date_obj = apply_timezone_from_settings(date_obj, settings)
return {'date_obj': date_obj, 'period': period}
else:
return {'date_obj': None, 'period': period}
|
[
"Parse",
"with",
"formats",
"and",
"return",
"a",
"dictionary",
"with",
"period",
"and",
"obj_date",
"."
] |
scrapinghub/dateparser
|
python
|
https://github.com/scrapinghub/dateparser/blob/11a761c99d3ee522a3c63756b70c106a579e8b5c/dateparser/date.py#L133-L161
|
[
"def",
"parse_with_formats",
"(",
"date_string",
",",
"date_formats",
",",
"settings",
")",
":",
"period",
"=",
"'day'",
"for",
"date_format",
"in",
"date_formats",
":",
"try",
":",
"date_obj",
"=",
"datetime",
".",
"strptime",
"(",
"date_string",
",",
"date_format",
")",
"except",
"ValueError",
":",
"continue",
"else",
":",
"# If format does not include the day, use last day of the month",
"# instead of first, because the first is usually out of range.",
"if",
"'%d'",
"not",
"in",
"date_format",
":",
"period",
"=",
"'month'",
"date_obj",
"=",
"date_obj",
".",
"replace",
"(",
"day",
"=",
"get_last_day_of_month",
"(",
"date_obj",
".",
"year",
",",
"date_obj",
".",
"month",
")",
")",
"if",
"not",
"(",
"'%y'",
"in",
"date_format",
"or",
"'%Y'",
"in",
"date_format",
")",
":",
"today",
"=",
"datetime",
".",
"today",
"(",
")",
"date_obj",
"=",
"date_obj",
".",
"replace",
"(",
"year",
"=",
"today",
".",
"year",
")",
"date_obj",
"=",
"apply_timezone_from_settings",
"(",
"date_obj",
",",
"settings",
")",
"return",
"{",
"'date_obj'",
":",
"date_obj",
",",
"'period'",
":",
"period",
"}",
"else",
":",
"return",
"{",
"'date_obj'",
":",
"None",
",",
"'period'",
":",
"period",
"}"
] |
11a761c99d3ee522a3c63756b70c106a579e8b5c
|
test
|
DateDataParser.get_date_data
|
Parse string representing date and/or time in recognizable localized formats.
Supports parsing multiple languages and timezones.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param date_formats:
A list of format strings using directives as given
`here <https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior>`_.
The parser applies formats one by one, taking into account the detected languages.
:type date_formats: list
:return: a dict mapping keys to :mod:`datetime.datetime` object and *period*. For example:
{'date_obj': datetime.datetime(2015, 6, 1, 0, 0), 'period': u'day'}
:raises: ValueError - Unknown Language
.. note:: *Period* values can be a 'day' (default), 'week', 'month', 'year'.
*Period* represents the granularity of date parsed from the given string.
In the example below, since no day information is present, the day is assumed to be current
day ``16`` from *current date* (which is June 16, 2015, at the moment of writing this).
Hence, the level of precision is ``month``:
>>> DateDataParser().get_date_data(u'March 2015')
{'date_obj': datetime.datetime(2015, 3, 16, 0, 0), 'period': u'month'}
Similarly, for date strings with no day and month information present, level of precision
is ``year`` and day ``16`` and month ``6`` are from *current_date*.
>>> DateDataParser().get_date_data(u'2014')
{'date_obj': datetime.datetime(2014, 6, 16, 0, 0), 'period': u'year'}
Dates with time zone indications or UTC offsets are returned in UTC time unless
specified using `Settings`_.
>>> DateDataParser().get_date_data(u'23 March 2000, 1:21 PM CET')
{'date_obj': datetime.datetime(2000, 3, 23, 14, 21), 'period': 'day'}
|
dateparser/date.py
|
def get_date_data(self, date_string, date_formats=None):
"""
Parse string representing date and/or time in recognizable localized formats.
Supports parsing multiple languages and timezones.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param date_formats:
A list of format strings using directives as given
`here <https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior>`_.
The parser applies formats one by one, taking into account the detected languages.
:type date_formats: list
:return: a dict mapping keys to :mod:`datetime.datetime` object and *period*. For example:
{'date_obj': datetime.datetime(2015, 6, 1, 0, 0), 'period': u'day'}
:raises: ValueError - Unknown Language
.. note:: *Period* values can be a 'day' (default), 'week', 'month', 'year'.
*Period* represents the granularity of date parsed from the given string.
In the example below, since no day information is present, the day is assumed to be current
day ``16`` from *current date* (which is June 16, 2015, at the moment of writing this).
Hence, the level of precision is ``month``:
>>> DateDataParser().get_date_data(u'March 2015')
{'date_obj': datetime.datetime(2015, 3, 16, 0, 0), 'period': u'month'}
Similarly, for date strings with no day and month information present, level of precision
is ``year`` and day ``16`` and month ``6`` are from *current_date*.
>>> DateDataParser().get_date_data(u'2014')
{'date_obj': datetime.datetime(2014, 6, 16, 0, 0), 'period': u'year'}
Dates with time zone indications or UTC offsets are returned in UTC time unless
specified using `Settings`_.
>>> DateDataParser().get_date_data(u'23 March 2000, 1:21 PM CET')
{'date_obj': datetime.datetime(2000, 3, 23, 14, 21), 'period': 'day'}
"""
if not(isinstance(date_string, six.text_type) or isinstance(date_string, six.string_types)):
raise TypeError('Input type must be str or unicode')
if isinstance(date_string, bytes):
date_string = date_string.decode('utf-8')
res = parse_with_formats(date_string, date_formats or [], self._settings)
if res['date_obj']:
return res
date_string = sanitize_date(date_string)
for locale in self._get_applicable_locales(date_string):
parsed_date = _DateLocaleParser.parse(
locale, date_string, date_formats, settings=self._settings)
if parsed_date:
parsed_date['locale'] = locale.shortname
if self.try_previous_locales:
self.previous_locales.insert(0, locale)
return parsed_date
else:
return {'date_obj': None, 'period': 'day', 'locale': None}
|
def get_date_data(self, date_string, date_formats=None):
"""
Parse string representing date and/or time in recognizable localized formats.
Supports parsing multiple languages and timezones.
:param date_string:
A string representing date and/or time in a recognizably valid format.
:type date_string: str|unicode
:param date_formats:
A list of format strings using directives as given
`here <https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior>`_.
The parser applies formats one by one, taking into account the detected languages.
:type date_formats: list
:return: a dict mapping keys to :mod:`datetime.datetime` object and *period*. For example:
{'date_obj': datetime.datetime(2015, 6, 1, 0, 0), 'period': u'day'}
:raises: ValueError - Unknown Language
.. note:: *Period* values can be a 'day' (default), 'week', 'month', 'year'.
*Period* represents the granularity of date parsed from the given string.
In the example below, since no day information is present, the day is assumed to be current
day ``16`` from *current date* (which is June 16, 2015, at the moment of writing this).
Hence, the level of precision is ``month``:
>>> DateDataParser().get_date_data(u'March 2015')
{'date_obj': datetime.datetime(2015, 3, 16, 0, 0), 'period': u'month'}
Similarly, for date strings with no day and month information present, level of precision
is ``year`` and day ``16`` and month ``6`` are from *current_date*.
>>> DateDataParser().get_date_data(u'2014')
{'date_obj': datetime.datetime(2014, 6, 16, 0, 0), 'period': u'year'}
Dates with time zone indications or UTC offsets are returned in UTC time unless
specified using `Settings`_.
>>> DateDataParser().get_date_data(u'23 March 2000, 1:21 PM CET')
{'date_obj': datetime.datetime(2000, 3, 23, 14, 21), 'period': 'day'}
"""
if not(isinstance(date_string, six.text_type) or isinstance(date_string, six.string_types)):
raise TypeError('Input type must be str or unicode')
if isinstance(date_string, bytes):
date_string = date_string.decode('utf-8')
res = parse_with_formats(date_string, date_formats or [], self._settings)
if res['date_obj']:
return res
date_string = sanitize_date(date_string)
for locale in self._get_applicable_locales(date_string):
parsed_date = _DateLocaleParser.parse(
locale, date_string, date_formats, settings=self._settings)
if parsed_date:
parsed_date['locale'] = locale.shortname
if self.try_previous_locales:
self.previous_locales.insert(0, locale)
return parsed_date
else:
return {'date_obj': None, 'period': 'day', 'locale': None}
|
[
"Parse",
"string",
"representing",
"date",
"and",
"/",
"or",
"time",
"in",
"recognizable",
"localized",
"formats",
".",
"Supports",
"parsing",
"multiple",
"languages",
"and",
"timezones",
"."
] |
scrapinghub/dateparser
|
python
|
https://github.com/scrapinghub/dateparser/blob/11a761c99d3ee522a3c63756b70c106a579e8b5c/dateparser/date.py#L354-L418
|
[
"def",
"get_date_data",
"(",
"self",
",",
"date_string",
",",
"date_formats",
"=",
"None",
")",
":",
"if",
"not",
"(",
"isinstance",
"(",
"date_string",
",",
"six",
".",
"text_type",
")",
"or",
"isinstance",
"(",
"date_string",
",",
"six",
".",
"string_types",
")",
")",
":",
"raise",
"TypeError",
"(",
"'Input type must be str or unicode'",
")",
"if",
"isinstance",
"(",
"date_string",
",",
"bytes",
")",
":",
"date_string",
"=",
"date_string",
".",
"decode",
"(",
"'utf-8'",
")",
"res",
"=",
"parse_with_formats",
"(",
"date_string",
",",
"date_formats",
"or",
"[",
"]",
",",
"self",
".",
"_settings",
")",
"if",
"res",
"[",
"'date_obj'",
"]",
":",
"return",
"res",
"date_string",
"=",
"sanitize_date",
"(",
"date_string",
")",
"for",
"locale",
"in",
"self",
".",
"_get_applicable_locales",
"(",
"date_string",
")",
":",
"parsed_date",
"=",
"_DateLocaleParser",
".",
"parse",
"(",
"locale",
",",
"date_string",
",",
"date_formats",
",",
"settings",
"=",
"self",
".",
"_settings",
")",
"if",
"parsed_date",
":",
"parsed_date",
"[",
"'locale'",
"]",
"=",
"locale",
".",
"shortname",
"if",
"self",
".",
"try_previous_locales",
":",
"self",
".",
"previous_locales",
".",
"insert",
"(",
"0",
",",
"locale",
")",
"return",
"parsed_date",
"else",
":",
"return",
"{",
"'date_obj'",
":",
"None",
",",
"'period'",
":",
"'day'",
",",
"'locale'",
":",
"None",
"}"
] |
11a761c99d3ee522a3c63756b70c106a579e8b5c
|
test
|
ComponentFactory.get_load_plan
|
return load plan (timestamps generator)
|
yandextank/stepper/config.py
|
def get_load_plan(self):
"""
return load plan (timestamps generator)
"""
if self.rps_schedule and self.instances_schedule:
raise StepperConfigurationError(
'Both rps and instances schedules specified. You must specify only one of them'
)
elif self.rps_schedule:
info.status.publish('loadscheme', self.rps_schedule)
return lp.create(self.rps_schedule)
elif self.instances_schedule:
info.status.publish('loadscheme', self.instances_schedule)
return ip.create(self.instances_schedule)
else:
self.instances_schedule = []
info.status.publish('loadscheme', self.instances_schedule)
return ip.create(self.instances_schedule)
|
def get_load_plan(self):
"""
return load plan (timestamps generator)
"""
if self.rps_schedule and self.instances_schedule:
raise StepperConfigurationError(
'Both rps and instances schedules specified. You must specify only one of them'
)
elif self.rps_schedule:
info.status.publish('loadscheme', self.rps_schedule)
return lp.create(self.rps_schedule)
elif self.instances_schedule:
info.status.publish('loadscheme', self.instances_schedule)
return ip.create(self.instances_schedule)
else:
self.instances_schedule = []
info.status.publish('loadscheme', self.instances_schedule)
return ip.create(self.instances_schedule)
|
[
"return",
"load",
"plan",
"(",
"timestamps",
"generator",
")"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/config.py#L56-L73
|
[
"def",
"get_load_plan",
"(",
"self",
")",
":",
"if",
"self",
".",
"rps_schedule",
"and",
"self",
".",
"instances_schedule",
":",
"raise",
"StepperConfigurationError",
"(",
"'Both rps and instances schedules specified. You must specify only one of them'",
")",
"elif",
"self",
".",
"rps_schedule",
":",
"info",
".",
"status",
".",
"publish",
"(",
"'loadscheme'",
",",
"self",
".",
"rps_schedule",
")",
"return",
"lp",
".",
"create",
"(",
"self",
".",
"rps_schedule",
")",
"elif",
"self",
".",
"instances_schedule",
":",
"info",
".",
"status",
".",
"publish",
"(",
"'loadscheme'",
",",
"self",
".",
"instances_schedule",
")",
"return",
"ip",
".",
"create",
"(",
"self",
".",
"instances_schedule",
")",
"else",
":",
"self",
".",
"instances_schedule",
"=",
"[",
"]",
"info",
".",
"status",
".",
"publish",
"(",
"'loadscheme'",
",",
"self",
".",
"instances_schedule",
")",
"return",
"ip",
".",
"create",
"(",
"self",
".",
"instances_schedule",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
ComponentFactory.get_ammo_generator
|
return ammo generator
|
yandextank/stepper/config.py
|
def get_ammo_generator(self):
"""
return ammo generator
"""
af_readers = {
'phantom': missile.AmmoFileReader,
'slowlog': missile.SlowLogReader,
'line': missile.LineReader,
'uri': missile.UriReader,
'uripost': missile.UriPostReader,
'access': missile.AccessLogReader,
'caseline': missile.CaseLineReader,
}
if self.uris and self.ammo_file:
raise StepperConfigurationError(
'Both uris and ammo file specified. You must specify only one of them'
)
elif self.uris:
ammo_gen = missile.UriStyleGenerator(
self.uris, self.headers, http_ver=self.http_ver)
elif self.ammo_file:
if self.ammo_type in af_readers:
if self.ammo_type == 'phantom':
opener = resource.get_opener(self.ammo_file)
with opener(self.use_cache) as ammo:
try:
if not ammo.next()[0].isdigit():
self.ammo_type = 'uri'
self.log.info(
"Setting ammo_type 'uri' because ammo is not started with digit and you did not specify ammo format"
)
else:
self.log.info(
"Default ammo type ('phantom') used, use 'phantom.ammo_type' option to override it"
)
except StopIteration:
self.log.exception(
"Couldn't read first line of ammo file")
raise AmmoFileError(
"Couldn't read first line of ammo file")
else:
raise NotImplementedError(
'No such ammo type implemented: "%s"' % self.ammo_type)
ammo_gen = af_readers[self.ammo_type](
self.ammo_file, headers=self.headers, http_ver=self.http_ver, use_cache=self.use_cache)
else:
raise StepperConfigurationError(
'Ammo not found. Specify uris or ammo file')
self.log.info("Using %s ammo reader" % type(ammo_gen).__name__)
return ammo_gen
|
def get_ammo_generator(self):
"""
return ammo generator
"""
af_readers = {
'phantom': missile.AmmoFileReader,
'slowlog': missile.SlowLogReader,
'line': missile.LineReader,
'uri': missile.UriReader,
'uripost': missile.UriPostReader,
'access': missile.AccessLogReader,
'caseline': missile.CaseLineReader,
}
if self.uris and self.ammo_file:
raise StepperConfigurationError(
'Both uris and ammo file specified. You must specify only one of them'
)
elif self.uris:
ammo_gen = missile.UriStyleGenerator(
self.uris, self.headers, http_ver=self.http_ver)
elif self.ammo_file:
if self.ammo_type in af_readers:
if self.ammo_type == 'phantom':
opener = resource.get_opener(self.ammo_file)
with opener(self.use_cache) as ammo:
try:
if not ammo.next()[0].isdigit():
self.ammo_type = 'uri'
self.log.info(
"Setting ammo_type 'uri' because ammo is not started with digit and you did not specify ammo format"
)
else:
self.log.info(
"Default ammo type ('phantom') used, use 'phantom.ammo_type' option to override it"
)
except StopIteration:
self.log.exception(
"Couldn't read first line of ammo file")
raise AmmoFileError(
"Couldn't read first line of ammo file")
else:
raise NotImplementedError(
'No such ammo type implemented: "%s"' % self.ammo_type)
ammo_gen = af_readers[self.ammo_type](
self.ammo_file, headers=self.headers, http_ver=self.http_ver, use_cache=self.use_cache)
else:
raise StepperConfigurationError(
'Ammo not found. Specify uris or ammo file')
self.log.info("Using %s ammo reader" % type(ammo_gen).__name__)
return ammo_gen
|
[
"return",
"ammo",
"generator"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/config.py#L75-L124
|
[
"def",
"get_ammo_generator",
"(",
"self",
")",
":",
"af_readers",
"=",
"{",
"'phantom'",
":",
"missile",
".",
"AmmoFileReader",
",",
"'slowlog'",
":",
"missile",
".",
"SlowLogReader",
",",
"'line'",
":",
"missile",
".",
"LineReader",
",",
"'uri'",
":",
"missile",
".",
"UriReader",
",",
"'uripost'",
":",
"missile",
".",
"UriPostReader",
",",
"'access'",
":",
"missile",
".",
"AccessLogReader",
",",
"'caseline'",
":",
"missile",
".",
"CaseLineReader",
",",
"}",
"if",
"self",
".",
"uris",
"and",
"self",
".",
"ammo_file",
":",
"raise",
"StepperConfigurationError",
"(",
"'Both uris and ammo file specified. You must specify only one of them'",
")",
"elif",
"self",
".",
"uris",
":",
"ammo_gen",
"=",
"missile",
".",
"UriStyleGenerator",
"(",
"self",
".",
"uris",
",",
"self",
".",
"headers",
",",
"http_ver",
"=",
"self",
".",
"http_ver",
")",
"elif",
"self",
".",
"ammo_file",
":",
"if",
"self",
".",
"ammo_type",
"in",
"af_readers",
":",
"if",
"self",
".",
"ammo_type",
"==",
"'phantom'",
":",
"opener",
"=",
"resource",
".",
"get_opener",
"(",
"self",
".",
"ammo_file",
")",
"with",
"opener",
"(",
"self",
".",
"use_cache",
")",
"as",
"ammo",
":",
"try",
":",
"if",
"not",
"ammo",
".",
"next",
"(",
")",
"[",
"0",
"]",
".",
"isdigit",
"(",
")",
":",
"self",
".",
"ammo_type",
"=",
"'uri'",
"self",
".",
"log",
".",
"info",
"(",
"\"Setting ammo_type 'uri' because ammo is not started with digit and you did not specify ammo format\"",
")",
"else",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Default ammo type ('phantom') used, use 'phantom.ammo_type' option to override it\"",
")",
"except",
"StopIteration",
":",
"self",
".",
"log",
".",
"exception",
"(",
"\"Couldn't read first line of ammo file\"",
")",
"raise",
"AmmoFileError",
"(",
"\"Couldn't read first line of ammo file\"",
")",
"else",
":",
"raise",
"NotImplementedError",
"(",
"'No such ammo type implemented: \"%s\"'",
"%",
"self",
".",
"ammo_type",
")",
"ammo_gen",
"=",
"af_readers",
"[",
"self",
".",
"ammo_type",
"]",
"(",
"self",
".",
"ammo_file",
",",
"headers",
"=",
"self",
".",
"headers",
",",
"http_ver",
"=",
"self",
".",
"http_ver",
",",
"use_cache",
"=",
"self",
".",
"use_cache",
")",
"else",
":",
"raise",
"StepperConfigurationError",
"(",
"'Ammo not found. Specify uris or ammo file'",
")",
"self",
".",
"log",
".",
"info",
"(",
"\"Using %s ammo reader\"",
"%",
"type",
"(",
"ammo_gen",
")",
".",
"__name__",
")",
"return",
"ammo_gen"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
_exc_to_net
|
translate http code to net code. if accertion failed, set net code to 314
|
yandextank/plugins/JMeter/reader.py
|
def _exc_to_net(param1, success):
""" translate http code to net code. if accertion failed, set net code to 314 """
if len(param1) <= 3:
# FIXME: we're unable to use better logic here, because we should support non-http codes
# but, we should look for core.util.HTTP or some other common logic
# here
if success:
return 0
else:
return 314
exc = param1.split(' ')[-1]
if exc in KNOWN_EXC.keys():
return KNOWN_EXC[exc]
else:
logger.warning(
"Unknown Java exception, consider adding it to dictionary: %s",
param1)
return 41
|
def _exc_to_net(param1, success):
""" translate http code to net code. if accertion failed, set net code to 314 """
if len(param1) <= 3:
# FIXME: we're unable to use better logic here, because we should support non-http codes
# but, we should look for core.util.HTTP or some other common logic
# here
if success:
return 0
else:
return 314
exc = param1.split(' ')[-1]
if exc in KNOWN_EXC.keys():
return KNOWN_EXC[exc]
else:
logger.warning(
"Unknown Java exception, consider adding it to dictionary: %s",
param1)
return 41
|
[
"translate",
"http",
"code",
"to",
"net",
"code",
".",
"if",
"accertion",
"failed",
"set",
"net",
"code",
"to",
"314"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/JMeter/reader.py#L35-L53
|
[
"def",
"_exc_to_net",
"(",
"param1",
",",
"success",
")",
":",
"if",
"len",
"(",
"param1",
")",
"<=",
"3",
":",
"# FIXME: we're unable to use better logic here, because we should support non-http codes",
"# but, we should look for core.util.HTTP or some other common logic",
"# here",
"if",
"success",
":",
"return",
"0",
"else",
":",
"return",
"314",
"exc",
"=",
"param1",
".",
"split",
"(",
"' '",
")",
"[",
"-",
"1",
"]",
"if",
"exc",
"in",
"KNOWN_EXC",
".",
"keys",
"(",
")",
":",
"return",
"KNOWN_EXC",
"[",
"exc",
"]",
"else",
":",
"logger",
".",
"warning",
"(",
"\"Unknown Java exception, consider adding it to dictionary: %s\"",
",",
"param1",
")",
"return",
"41"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
_exc_to_http
|
translate exception str to http code
|
yandextank/plugins/JMeter/reader.py
|
def _exc_to_http(param1):
""" translate exception str to http code"""
if len(param1) <= 3:
try:
int(param1)
except BaseException:
logger.error(
"JMeter wrote some strange data into codes column: %s", param1)
else:
return int(param1)
exc = param1.split(' ')[-1]
if exc in KNOWN_EXC.keys():
return 0
else:
logger.warning("Unknown Java exception. %s", param1)
return 0
|
def _exc_to_http(param1):
""" translate exception str to http code"""
if len(param1) <= 3:
try:
int(param1)
except BaseException:
logger.error(
"JMeter wrote some strange data into codes column: %s", param1)
else:
return int(param1)
exc = param1.split(' ')[-1]
if exc in KNOWN_EXC.keys():
return 0
else:
logger.warning("Unknown Java exception. %s", param1)
return 0
|
[
"translate",
"exception",
"str",
"to",
"http",
"code"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/JMeter/reader.py#L56-L72
|
[
"def",
"_exc_to_http",
"(",
"param1",
")",
":",
"if",
"len",
"(",
"param1",
")",
"<=",
"3",
":",
"try",
":",
"int",
"(",
"param1",
")",
"except",
"BaseException",
":",
"logger",
".",
"error",
"(",
"\"JMeter wrote some strange data into codes column: %s\"",
",",
"param1",
")",
"else",
":",
"return",
"int",
"(",
"param1",
")",
"exc",
"=",
"param1",
".",
"split",
"(",
"' '",
")",
"[",
"-",
"1",
"]",
"if",
"exc",
"in",
"KNOWN_EXC",
".",
"keys",
"(",
")",
":",
"return",
"0",
"else",
":",
"logger",
".",
"warning",
"(",
"\"Unknown Java exception. %s\"",
",",
"param1",
")",
"return",
"0"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
PhantomConfig.read_config
|
Read phantom tool specific options
|
yandextank/plugins/Phantom/utils.py
|
def read_config(self):
""" Read phantom tool specific options """
self.threads = self.cfg["threads"] or str(int(multiprocessing.cpu_count() / 2) + 1)
self.phantom_modules_path = self.cfg["phantom_modules_path"]
self.additional_libs = ' '.join(self.cfg["additional_libs"])
self.answ_log_level = self.cfg["writelog"]
if self.answ_log_level.lower() in ['0', 'false']:
self.answ_log_level = 'none'
elif self.answ_log_level.lower() in ['1', 'true']:
self.answ_log_level = 'all'
self.timeout = parse_duration(self.cfg["timeout"])
if self.timeout > 120000:
logger.warning(
"You've set timeout over 2 minutes."
" Are you a functional tester?")
self.answ_log = self.core.mkstemp(".log", "answ_")
self.core.add_artifact_file(self.answ_log)
self.core.add_artifact_file(self.phout_file)
self.core.add_artifact_file(self.stat_log)
self.phantom_log = self.core.mkstemp(".log", "phantom_")
self.core.add_artifact_file(self.phantom_log)
main_stream = StreamConfig(
self.core,
len(self.streams), self.phout_file, self.answ_log,
self.answ_log_level, self.timeout, self.cfg, True)
self.streams.append(main_stream)
for section in self.multi():
self.streams.append(
StreamConfig(
self.core,
len(self.streams), self.phout_file, self.answ_log,
self.answ_log_level, self.timeout, section))
for stream in self.streams:
stream.read_config()
if any(stream.ssl for stream in self.streams):
self.additional_libs += ' ssl io_benchmark_method_stream_transport_ssl'
|
def read_config(self):
""" Read phantom tool specific options """
self.threads = self.cfg["threads"] or str(int(multiprocessing.cpu_count() / 2) + 1)
self.phantom_modules_path = self.cfg["phantom_modules_path"]
self.additional_libs = ' '.join(self.cfg["additional_libs"])
self.answ_log_level = self.cfg["writelog"]
if self.answ_log_level.lower() in ['0', 'false']:
self.answ_log_level = 'none'
elif self.answ_log_level.lower() in ['1', 'true']:
self.answ_log_level = 'all'
self.timeout = parse_duration(self.cfg["timeout"])
if self.timeout > 120000:
logger.warning(
"You've set timeout over 2 minutes."
" Are you a functional tester?")
self.answ_log = self.core.mkstemp(".log", "answ_")
self.core.add_artifact_file(self.answ_log)
self.core.add_artifact_file(self.phout_file)
self.core.add_artifact_file(self.stat_log)
self.phantom_log = self.core.mkstemp(".log", "phantom_")
self.core.add_artifact_file(self.phantom_log)
main_stream = StreamConfig(
self.core,
len(self.streams), self.phout_file, self.answ_log,
self.answ_log_level, self.timeout, self.cfg, True)
self.streams.append(main_stream)
for section in self.multi():
self.streams.append(
StreamConfig(
self.core,
len(self.streams), self.phout_file, self.answ_log,
self.answ_log_level, self.timeout, section))
for stream in self.streams:
stream.read_config()
if any(stream.ssl for stream in self.streams):
self.additional_libs += ' ssl io_benchmark_method_stream_transport_ssl'
|
[
"Read",
"phantom",
"tool",
"specific",
"options"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Phantom/utils.py#L59-L98
|
[
"def",
"read_config",
"(",
"self",
")",
":",
"self",
".",
"threads",
"=",
"self",
".",
"cfg",
"[",
"\"threads\"",
"]",
"or",
"str",
"(",
"int",
"(",
"multiprocessing",
".",
"cpu_count",
"(",
")",
"/",
"2",
")",
"+",
"1",
")",
"self",
".",
"phantom_modules_path",
"=",
"self",
".",
"cfg",
"[",
"\"phantom_modules_path\"",
"]",
"self",
".",
"additional_libs",
"=",
"' '",
".",
"join",
"(",
"self",
".",
"cfg",
"[",
"\"additional_libs\"",
"]",
")",
"self",
".",
"answ_log_level",
"=",
"self",
".",
"cfg",
"[",
"\"writelog\"",
"]",
"if",
"self",
".",
"answ_log_level",
".",
"lower",
"(",
")",
"in",
"[",
"'0'",
",",
"'false'",
"]",
":",
"self",
".",
"answ_log_level",
"=",
"'none'",
"elif",
"self",
".",
"answ_log_level",
".",
"lower",
"(",
")",
"in",
"[",
"'1'",
",",
"'true'",
"]",
":",
"self",
".",
"answ_log_level",
"=",
"'all'",
"self",
".",
"timeout",
"=",
"parse_duration",
"(",
"self",
".",
"cfg",
"[",
"\"timeout\"",
"]",
")",
"if",
"self",
".",
"timeout",
">",
"120000",
":",
"logger",
".",
"warning",
"(",
"\"You've set timeout over 2 minutes.\"",
"\" Are you a functional tester?\"",
")",
"self",
".",
"answ_log",
"=",
"self",
".",
"core",
".",
"mkstemp",
"(",
"\".log\"",
",",
"\"answ_\"",
")",
"self",
".",
"core",
".",
"add_artifact_file",
"(",
"self",
".",
"answ_log",
")",
"self",
".",
"core",
".",
"add_artifact_file",
"(",
"self",
".",
"phout_file",
")",
"self",
".",
"core",
".",
"add_artifact_file",
"(",
"self",
".",
"stat_log",
")",
"self",
".",
"phantom_log",
"=",
"self",
".",
"core",
".",
"mkstemp",
"(",
"\".log\"",
",",
"\"phantom_\"",
")",
"self",
".",
"core",
".",
"add_artifact_file",
"(",
"self",
".",
"phantom_log",
")",
"main_stream",
"=",
"StreamConfig",
"(",
"self",
".",
"core",
",",
"len",
"(",
"self",
".",
"streams",
")",
",",
"self",
".",
"phout_file",
",",
"self",
".",
"answ_log",
",",
"self",
".",
"answ_log_level",
",",
"self",
".",
"timeout",
",",
"self",
".",
"cfg",
",",
"True",
")",
"self",
".",
"streams",
".",
"append",
"(",
"main_stream",
")",
"for",
"section",
"in",
"self",
".",
"multi",
"(",
")",
":",
"self",
".",
"streams",
".",
"append",
"(",
"StreamConfig",
"(",
"self",
".",
"core",
",",
"len",
"(",
"self",
".",
"streams",
")",
",",
"self",
".",
"phout_file",
",",
"self",
".",
"answ_log",
",",
"self",
".",
"answ_log_level",
",",
"self",
".",
"timeout",
",",
"section",
")",
")",
"for",
"stream",
"in",
"self",
".",
"streams",
":",
"stream",
".",
"read_config",
"(",
")",
"if",
"any",
"(",
"stream",
".",
"ssl",
"for",
"stream",
"in",
"self",
".",
"streams",
")",
":",
"self",
".",
"additional_libs",
"+=",
"' ssl io_benchmark_method_stream_transport_ssl'"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
PhantomConfig.compose_config
|
Generate phantom tool run config
|
yandextank/plugins/Phantom/utils.py
|
def compose_config(self):
""" Generate phantom tool run config """
streams_config = ''
stat_benchmarks = ''
for stream in self.streams:
streams_config += stream.compose_config()
if not stream.is_main:
stat_benchmarks += " " + "benchmark_io%s" % stream.sequence_no
kwargs = {}
kwargs['threads'] = self.threads
kwargs['phantom_log'] = self.phantom_log
kwargs['stat_log'] = self.stat_log
kwargs['benchmarks_block'] = streams_config
kwargs['stat_benchmarks'] = stat_benchmarks
kwargs['additional_libs'] = self.additional_libs
kwargs['phantom_modules_path'] = self.phantom_modules_path
filename = self.core.mkstemp(".conf", "phantom_")
self.core.add_artifact_file(filename)
logger.debug("Generating phantom config: %s", filename)
template_str = resource_string(__name__, "config/phantom.conf.tpl")
tpl = string.Template(template_str)
config = tpl.substitute(kwargs)
with open(filename, 'w') as conffile:
conffile.write(config)
return filename
|
def compose_config(self):
""" Generate phantom tool run config """
streams_config = ''
stat_benchmarks = ''
for stream in self.streams:
streams_config += stream.compose_config()
if not stream.is_main:
stat_benchmarks += " " + "benchmark_io%s" % stream.sequence_no
kwargs = {}
kwargs['threads'] = self.threads
kwargs['phantom_log'] = self.phantom_log
kwargs['stat_log'] = self.stat_log
kwargs['benchmarks_block'] = streams_config
kwargs['stat_benchmarks'] = stat_benchmarks
kwargs['additional_libs'] = self.additional_libs
kwargs['phantom_modules_path'] = self.phantom_modules_path
filename = self.core.mkstemp(".conf", "phantom_")
self.core.add_artifact_file(filename)
logger.debug("Generating phantom config: %s", filename)
template_str = resource_string(__name__, "config/phantom.conf.tpl")
tpl = string.Template(template_str)
config = tpl.substitute(kwargs)
with open(filename, 'w') as conffile:
conffile.write(config)
return filename
|
[
"Generate",
"phantom",
"tool",
"run",
"config"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Phantom/utils.py#L109-L135
|
[
"def",
"compose_config",
"(",
"self",
")",
":",
"streams_config",
"=",
"''",
"stat_benchmarks",
"=",
"''",
"for",
"stream",
"in",
"self",
".",
"streams",
":",
"streams_config",
"+=",
"stream",
".",
"compose_config",
"(",
")",
"if",
"not",
"stream",
".",
"is_main",
":",
"stat_benchmarks",
"+=",
"\" \"",
"+",
"\"benchmark_io%s\"",
"%",
"stream",
".",
"sequence_no",
"kwargs",
"=",
"{",
"}",
"kwargs",
"[",
"'threads'",
"]",
"=",
"self",
".",
"threads",
"kwargs",
"[",
"'phantom_log'",
"]",
"=",
"self",
".",
"phantom_log",
"kwargs",
"[",
"'stat_log'",
"]",
"=",
"self",
".",
"stat_log",
"kwargs",
"[",
"'benchmarks_block'",
"]",
"=",
"streams_config",
"kwargs",
"[",
"'stat_benchmarks'",
"]",
"=",
"stat_benchmarks",
"kwargs",
"[",
"'additional_libs'",
"]",
"=",
"self",
".",
"additional_libs",
"kwargs",
"[",
"'phantom_modules_path'",
"]",
"=",
"self",
".",
"phantom_modules_path",
"filename",
"=",
"self",
".",
"core",
".",
"mkstemp",
"(",
"\".conf\"",
",",
"\"phantom_\"",
")",
"self",
".",
"core",
".",
"add_artifact_file",
"(",
"filename",
")",
"logger",
".",
"debug",
"(",
"\"Generating phantom config: %s\"",
",",
"filename",
")",
"template_str",
"=",
"resource_string",
"(",
"__name__",
",",
"\"config/phantom.conf.tpl\"",
")",
"tpl",
"=",
"string",
".",
"Template",
"(",
"template_str",
")",
"config",
"=",
"tpl",
".",
"substitute",
"(",
"kwargs",
")",
"with",
"open",
"(",
"filename",
",",
"'w'",
")",
"as",
"conffile",
":",
"conffile",
".",
"write",
"(",
"config",
")",
"return",
"filename"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
PhantomConfig.get_info
|
get merged info about phantom conf
|
yandextank/plugins/Phantom/utils.py
|
def get_info(self):
""" get merged info about phantom conf """
result = copy.copy(self.streams[0])
result.stat_log = self.stat_log
result.steps = []
result.ammo_file = ''
result.rps_schedule = None
result.ammo_count = 0
result.duration = 0
result.instances = 0
result.loadscheme = []
result.loop_count = 0
for stream in self.streams:
sec_no = 0
logger.debug("Steps: %s", stream.stepper_wrapper.steps)
for item in stream.stepper_wrapper.steps:
for x in range(0, item[1]):
if len(result.steps) > sec_no:
result.steps[sec_no][0] += item[0]
else:
result.steps.append([item[0], 1])
sec_no += 1
if result.rps_schedule:
result.rps_schedule = []
else:
result.rps_schedule = stream.stepper_wrapper.loadscheme
if result.loadscheme:
result.loadscheme = ''
else:
# FIXME: add formatted load scheme for server:
# <step_size,step_type,first_rps,last_rps,original_step_params>
# as a string
result.loadscheme = ''
if result.loop_count:
result.loop_count = u'0'
else:
result.loop_count = stream.stepper_wrapper.loop_count
result.ammo_file += '{} '.format(stream.stepper_wrapper.ammo_file)
result.ammo_count += stream.stepper_wrapper.ammo_count
result.duration = max(
result.duration, stream.stepper_wrapper.duration)
result.instances += stream.instances
if not result.ammo_count:
raise ValueError("Total ammo count cannot be zero")
return result
|
def get_info(self):
""" get merged info about phantom conf """
result = copy.copy(self.streams[0])
result.stat_log = self.stat_log
result.steps = []
result.ammo_file = ''
result.rps_schedule = None
result.ammo_count = 0
result.duration = 0
result.instances = 0
result.loadscheme = []
result.loop_count = 0
for stream in self.streams:
sec_no = 0
logger.debug("Steps: %s", stream.stepper_wrapper.steps)
for item in stream.stepper_wrapper.steps:
for x in range(0, item[1]):
if len(result.steps) > sec_no:
result.steps[sec_no][0] += item[0]
else:
result.steps.append([item[0], 1])
sec_no += 1
if result.rps_schedule:
result.rps_schedule = []
else:
result.rps_schedule = stream.stepper_wrapper.loadscheme
if result.loadscheme:
result.loadscheme = ''
else:
# FIXME: add formatted load scheme for server:
# <step_size,step_type,first_rps,last_rps,original_step_params>
# as a string
result.loadscheme = ''
if result.loop_count:
result.loop_count = u'0'
else:
result.loop_count = stream.stepper_wrapper.loop_count
result.ammo_file += '{} '.format(stream.stepper_wrapper.ammo_file)
result.ammo_count += stream.stepper_wrapper.ammo_count
result.duration = max(
result.duration, stream.stepper_wrapper.duration)
result.instances += stream.instances
if not result.ammo_count:
raise ValueError("Total ammo count cannot be zero")
return result
|
[
"get",
"merged",
"info",
"about",
"phantom",
"conf"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Phantom/utils.py#L142-L192
|
[
"def",
"get_info",
"(",
"self",
")",
":",
"result",
"=",
"copy",
".",
"copy",
"(",
"self",
".",
"streams",
"[",
"0",
"]",
")",
"result",
".",
"stat_log",
"=",
"self",
".",
"stat_log",
"result",
".",
"steps",
"=",
"[",
"]",
"result",
".",
"ammo_file",
"=",
"''",
"result",
".",
"rps_schedule",
"=",
"None",
"result",
".",
"ammo_count",
"=",
"0",
"result",
".",
"duration",
"=",
"0",
"result",
".",
"instances",
"=",
"0",
"result",
".",
"loadscheme",
"=",
"[",
"]",
"result",
".",
"loop_count",
"=",
"0",
"for",
"stream",
"in",
"self",
".",
"streams",
":",
"sec_no",
"=",
"0",
"logger",
".",
"debug",
"(",
"\"Steps: %s\"",
",",
"stream",
".",
"stepper_wrapper",
".",
"steps",
")",
"for",
"item",
"in",
"stream",
".",
"stepper_wrapper",
".",
"steps",
":",
"for",
"x",
"in",
"range",
"(",
"0",
",",
"item",
"[",
"1",
"]",
")",
":",
"if",
"len",
"(",
"result",
".",
"steps",
")",
">",
"sec_no",
":",
"result",
".",
"steps",
"[",
"sec_no",
"]",
"[",
"0",
"]",
"+=",
"item",
"[",
"0",
"]",
"else",
":",
"result",
".",
"steps",
".",
"append",
"(",
"[",
"item",
"[",
"0",
"]",
",",
"1",
"]",
")",
"sec_no",
"+=",
"1",
"if",
"result",
".",
"rps_schedule",
":",
"result",
".",
"rps_schedule",
"=",
"[",
"]",
"else",
":",
"result",
".",
"rps_schedule",
"=",
"stream",
".",
"stepper_wrapper",
".",
"loadscheme",
"if",
"result",
".",
"loadscheme",
":",
"result",
".",
"loadscheme",
"=",
"''",
"else",
":",
"# FIXME: add formatted load scheme for server:",
"# <step_size,step_type,first_rps,last_rps,original_step_params>",
"# as a string",
"result",
".",
"loadscheme",
"=",
"''",
"if",
"result",
".",
"loop_count",
":",
"result",
".",
"loop_count",
"=",
"u'0'",
"else",
":",
"result",
".",
"loop_count",
"=",
"stream",
".",
"stepper_wrapper",
".",
"loop_count",
"result",
".",
"ammo_file",
"+=",
"'{} '",
".",
"format",
"(",
"stream",
".",
"stepper_wrapper",
".",
"ammo_file",
")",
"result",
".",
"ammo_count",
"+=",
"stream",
".",
"stepper_wrapper",
".",
"ammo_count",
"result",
".",
"duration",
"=",
"max",
"(",
"result",
".",
"duration",
",",
"stream",
".",
"stepper_wrapper",
".",
"duration",
")",
"result",
".",
"instances",
"+=",
"stream",
".",
"instances",
"if",
"not",
"result",
".",
"ammo_count",
":",
"raise",
"ValueError",
"(",
"\"Total ammo count cannot be zero\"",
")",
"return",
"result"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
StreamConfig.read_config
|
reads config
|
yandextank/plugins/Phantom/utils.py
|
def read_config(self):
""" reads config """
# multi-options
self.ssl = self.get_option("ssl")
self.tank_type = self.get_option("tank_type")
# TODO: refactor. Maybe we should decide how to interact with
# StepperWrapper here.
# self.instances = self.get_option('instances')
self.gatling = ' '.join(self.get_option('gatling_ip').split("\n"))
self.method_prefix = self.get_option("method_prefix")
self.method_options = self.get_option("method_options")
self.source_log_prefix = self.get_option("source_log_prefix")
self.phantom_http_line = self.get_option("phantom_http_line")
self.phantom_http_field_num = self.get_option("phantom_http_field_num")
self.phantom_http_field = self.get_option("phantom_http_field")
self.phantom_http_entity = self.get_option("phantom_http_entity")
self.address = self.get_option('address')
do_test_connect = self.get_option("connection_test")
explicit_port = self.get_option('port', '')
self.ipv6, self.resolved_ip, self.port, self.address = self.address_wizard.resolve(
self.address, do_test_connect, explicit_port)
logger.info(
"Resolved %s into %s:%s", self.address, self.resolved_ip, self.port)
self.client_cipher_suites = self.get_option("client_cipher_suites", "")
self.client_certificate = self.get_option("client_certificate", "")
self.client_key = self.get_option("client_key", "")
self.stepper_wrapper.read_config()
|
def read_config(self):
""" reads config """
# multi-options
self.ssl = self.get_option("ssl")
self.tank_type = self.get_option("tank_type")
# TODO: refactor. Maybe we should decide how to interact with
# StepperWrapper here.
# self.instances = self.get_option('instances')
self.gatling = ' '.join(self.get_option('gatling_ip').split("\n"))
self.method_prefix = self.get_option("method_prefix")
self.method_options = self.get_option("method_options")
self.source_log_prefix = self.get_option("source_log_prefix")
self.phantom_http_line = self.get_option("phantom_http_line")
self.phantom_http_field_num = self.get_option("phantom_http_field_num")
self.phantom_http_field = self.get_option("phantom_http_field")
self.phantom_http_entity = self.get_option("phantom_http_entity")
self.address = self.get_option('address')
do_test_connect = self.get_option("connection_test")
explicit_port = self.get_option('port', '')
self.ipv6, self.resolved_ip, self.port, self.address = self.address_wizard.resolve(
self.address, do_test_connect, explicit_port)
logger.info(
"Resolved %s into %s:%s", self.address, self.resolved_ip, self.port)
self.client_cipher_suites = self.get_option("client_cipher_suites", "")
self.client_certificate = self.get_option("client_certificate", "")
self.client_key = self.get_option("client_key", "")
self.stepper_wrapper.read_config()
|
[
"reads",
"config"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Phantom/utils.py#L253-L283
|
[
"def",
"read_config",
"(",
"self",
")",
":",
"# multi-options",
"self",
".",
"ssl",
"=",
"self",
".",
"get_option",
"(",
"\"ssl\"",
")",
"self",
".",
"tank_type",
"=",
"self",
".",
"get_option",
"(",
"\"tank_type\"",
")",
"# TODO: refactor. Maybe we should decide how to interact with",
"# StepperWrapper here.",
"# self.instances = self.get_option('instances')",
"self",
".",
"gatling",
"=",
"' '",
".",
"join",
"(",
"self",
".",
"get_option",
"(",
"'gatling_ip'",
")",
".",
"split",
"(",
"\"\\n\"",
")",
")",
"self",
".",
"method_prefix",
"=",
"self",
".",
"get_option",
"(",
"\"method_prefix\"",
")",
"self",
".",
"method_options",
"=",
"self",
".",
"get_option",
"(",
"\"method_options\"",
")",
"self",
".",
"source_log_prefix",
"=",
"self",
".",
"get_option",
"(",
"\"source_log_prefix\"",
")",
"self",
".",
"phantom_http_line",
"=",
"self",
".",
"get_option",
"(",
"\"phantom_http_line\"",
")",
"self",
".",
"phantom_http_field_num",
"=",
"self",
".",
"get_option",
"(",
"\"phantom_http_field_num\"",
")",
"self",
".",
"phantom_http_field",
"=",
"self",
".",
"get_option",
"(",
"\"phantom_http_field\"",
")",
"self",
".",
"phantom_http_entity",
"=",
"self",
".",
"get_option",
"(",
"\"phantom_http_entity\"",
")",
"self",
".",
"address",
"=",
"self",
".",
"get_option",
"(",
"'address'",
")",
"do_test_connect",
"=",
"self",
".",
"get_option",
"(",
"\"connection_test\"",
")",
"explicit_port",
"=",
"self",
".",
"get_option",
"(",
"'port'",
",",
"''",
")",
"self",
".",
"ipv6",
",",
"self",
".",
"resolved_ip",
",",
"self",
".",
"port",
",",
"self",
".",
"address",
"=",
"self",
".",
"address_wizard",
".",
"resolve",
"(",
"self",
".",
"address",
",",
"do_test_connect",
",",
"explicit_port",
")",
"logger",
".",
"info",
"(",
"\"Resolved %s into %s:%s\"",
",",
"self",
".",
"address",
",",
"self",
".",
"resolved_ip",
",",
"self",
".",
"port",
")",
"self",
".",
"client_cipher_suites",
"=",
"self",
".",
"get_option",
"(",
"\"client_cipher_suites\"",
",",
"\"\"",
")",
"self",
".",
"client_certificate",
"=",
"self",
".",
"get_option",
"(",
"\"client_certificate\"",
",",
"\"\"",
")",
"self",
".",
"client_key",
"=",
"self",
".",
"get_option",
"(",
"\"client_key\"",
",",
"\"\"",
")",
"self",
".",
"stepper_wrapper",
".",
"read_config",
"(",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
StreamConfig.compose_config
|
compose benchmark block
|
yandextank/plugins/Phantom/utils.py
|
def compose_config(self):
""" compose benchmark block """
# step file
self.stepper_wrapper.prepare_stepper()
self.stpd = self.stepper_wrapper.stpd
if self.stepper_wrapper.instances:
self.instances = self.stepper_wrapper.instances
if not self.stpd:
raise RuntimeError("Cannot proceed with no STPD file")
kwargs = {}
kwargs['sequence_no'] = self.sequence_no
if self.ssl:
_auth_section = ''
_ciphers = ''
ssl_template = "transport_t ssl_transport = transport_ssl_t {\n" \
" timeout = 1s\n" \
" %s\n" \
" %s}\n" \
" transport = ssl_transport"
if self.client_certificate or self.client_key:
_auth_section = 'auth_t def_auth = auth_t { key = "%s" cert = "%s"} auth = def_auth' \
% (self.client_key, self.client_certificate)
if self.client_cipher_suites:
_ciphers = 'ciphers = "%s"' % self.client_cipher_suites
kwargs['ssl_transport'] = ssl_template % (_auth_section, _ciphers)
else:
kwargs['ssl_transport'] = ""
kwargs['method_stream'] = self.method_prefix + \
"_ipv6_t" if self.ipv6 else self.method_prefix + "_ipv4_t"
kwargs['phout'] = self.phout_file
kwargs['answ_log'] = self.answ_log
kwargs['answ_log_level'] = self.answ_log_level
kwargs['comment_answ'] = "# " if self.answ_log_level == 'none' else ''
kwargs['stpd'] = self.stpd
kwargs['source_log_prefix'] = self.source_log_prefix
kwargs['method_options'] = self.method_options
if self.tank_type:
kwargs[
'proto'] = "proto=http_proto%s" % self.sequence_no if self.tank_type == 'http' else "proto=none_proto"
kwargs['comment_proto'] = ""
else:
kwargs['proto'] = ""
kwargs['comment_proto'] = "#"
if self.gatling:
kwargs['bind'] = 'bind={ ' + self.gatling + ' }'
else:
kwargs['bind'] = ''
kwargs['ip'] = self.resolved_ip
kwargs['port'] = self.port
kwargs['timeout'] = self.timeout
kwargs['instances'] = self.instances
tune = ''
if self.phantom_http_entity:
tune += "entity = " + self.phantom_http_entity + "\n"
if self.phantom_http_field:
tune += "field = " + self.phantom_http_field + "\n"
if self.phantom_http_field_num:
tune += "field_num = {}\n".format(self.phantom_http_field_num)
if self.phantom_http_line:
tune += "line = " + self.phantom_http_line + "\n"
if tune:
kwargs['reply_limits'] = 'reply_limits = {\n' + tune + "}"
else:
kwargs['reply_limits'] = ''
if self.is_main:
fname = 'phantom_benchmark_main.tpl'
else:
fname = 'phantom_benchmark_additional.tpl'
template_str = resource_string(
__name__, "config/" + fname)
tpl = string.Template(template_str)
config = tpl.substitute(kwargs)
return config
|
def compose_config(self):
""" compose benchmark block """
# step file
self.stepper_wrapper.prepare_stepper()
self.stpd = self.stepper_wrapper.stpd
if self.stepper_wrapper.instances:
self.instances = self.stepper_wrapper.instances
if not self.stpd:
raise RuntimeError("Cannot proceed with no STPD file")
kwargs = {}
kwargs['sequence_no'] = self.sequence_no
if self.ssl:
_auth_section = ''
_ciphers = ''
ssl_template = "transport_t ssl_transport = transport_ssl_t {\n" \
" timeout = 1s\n" \
" %s\n" \
" %s}\n" \
" transport = ssl_transport"
if self.client_certificate or self.client_key:
_auth_section = 'auth_t def_auth = auth_t { key = "%s" cert = "%s"} auth = def_auth' \
% (self.client_key, self.client_certificate)
if self.client_cipher_suites:
_ciphers = 'ciphers = "%s"' % self.client_cipher_suites
kwargs['ssl_transport'] = ssl_template % (_auth_section, _ciphers)
else:
kwargs['ssl_transport'] = ""
kwargs['method_stream'] = self.method_prefix + \
"_ipv6_t" if self.ipv6 else self.method_prefix + "_ipv4_t"
kwargs['phout'] = self.phout_file
kwargs['answ_log'] = self.answ_log
kwargs['answ_log_level'] = self.answ_log_level
kwargs['comment_answ'] = "# " if self.answ_log_level == 'none' else ''
kwargs['stpd'] = self.stpd
kwargs['source_log_prefix'] = self.source_log_prefix
kwargs['method_options'] = self.method_options
if self.tank_type:
kwargs[
'proto'] = "proto=http_proto%s" % self.sequence_no if self.tank_type == 'http' else "proto=none_proto"
kwargs['comment_proto'] = ""
else:
kwargs['proto'] = ""
kwargs['comment_proto'] = "#"
if self.gatling:
kwargs['bind'] = 'bind={ ' + self.gatling + ' }'
else:
kwargs['bind'] = ''
kwargs['ip'] = self.resolved_ip
kwargs['port'] = self.port
kwargs['timeout'] = self.timeout
kwargs['instances'] = self.instances
tune = ''
if self.phantom_http_entity:
tune += "entity = " + self.phantom_http_entity + "\n"
if self.phantom_http_field:
tune += "field = " + self.phantom_http_field + "\n"
if self.phantom_http_field_num:
tune += "field_num = {}\n".format(self.phantom_http_field_num)
if self.phantom_http_line:
tune += "line = " + self.phantom_http_line + "\n"
if tune:
kwargs['reply_limits'] = 'reply_limits = {\n' + tune + "}"
else:
kwargs['reply_limits'] = ''
if self.is_main:
fname = 'phantom_benchmark_main.tpl'
else:
fname = 'phantom_benchmark_additional.tpl'
template_str = resource_string(
__name__, "config/" + fname)
tpl = string.Template(template_str)
config = tpl.substitute(kwargs)
return config
|
[
"compose",
"benchmark",
"block"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Phantom/utils.py#L285-L363
|
[
"def",
"compose_config",
"(",
"self",
")",
":",
"# step file",
"self",
".",
"stepper_wrapper",
".",
"prepare_stepper",
"(",
")",
"self",
".",
"stpd",
"=",
"self",
".",
"stepper_wrapper",
".",
"stpd",
"if",
"self",
".",
"stepper_wrapper",
".",
"instances",
":",
"self",
".",
"instances",
"=",
"self",
".",
"stepper_wrapper",
".",
"instances",
"if",
"not",
"self",
".",
"stpd",
":",
"raise",
"RuntimeError",
"(",
"\"Cannot proceed with no STPD file\"",
")",
"kwargs",
"=",
"{",
"}",
"kwargs",
"[",
"'sequence_no'",
"]",
"=",
"self",
".",
"sequence_no",
"if",
"self",
".",
"ssl",
":",
"_auth_section",
"=",
"''",
"_ciphers",
"=",
"''",
"ssl_template",
"=",
"\"transport_t ssl_transport = transport_ssl_t {\\n\"",
"\" timeout = 1s\\n\"",
"\" %s\\n\"",
"\" %s}\\n\"",
"\" transport = ssl_transport\"",
"if",
"self",
".",
"client_certificate",
"or",
"self",
".",
"client_key",
":",
"_auth_section",
"=",
"'auth_t def_auth = auth_t { key = \"%s\" cert = \"%s\"} auth = def_auth'",
"%",
"(",
"self",
".",
"client_key",
",",
"self",
".",
"client_certificate",
")",
"if",
"self",
".",
"client_cipher_suites",
":",
"_ciphers",
"=",
"'ciphers = \"%s\"'",
"%",
"self",
".",
"client_cipher_suites",
"kwargs",
"[",
"'ssl_transport'",
"]",
"=",
"ssl_template",
"%",
"(",
"_auth_section",
",",
"_ciphers",
")",
"else",
":",
"kwargs",
"[",
"'ssl_transport'",
"]",
"=",
"\"\"",
"kwargs",
"[",
"'method_stream'",
"]",
"=",
"self",
".",
"method_prefix",
"+",
"\"_ipv6_t\"",
"if",
"self",
".",
"ipv6",
"else",
"self",
".",
"method_prefix",
"+",
"\"_ipv4_t\"",
"kwargs",
"[",
"'phout'",
"]",
"=",
"self",
".",
"phout_file",
"kwargs",
"[",
"'answ_log'",
"]",
"=",
"self",
".",
"answ_log",
"kwargs",
"[",
"'answ_log_level'",
"]",
"=",
"self",
".",
"answ_log_level",
"kwargs",
"[",
"'comment_answ'",
"]",
"=",
"\"# \"",
"if",
"self",
".",
"answ_log_level",
"==",
"'none'",
"else",
"''",
"kwargs",
"[",
"'stpd'",
"]",
"=",
"self",
".",
"stpd",
"kwargs",
"[",
"'source_log_prefix'",
"]",
"=",
"self",
".",
"source_log_prefix",
"kwargs",
"[",
"'method_options'",
"]",
"=",
"self",
".",
"method_options",
"if",
"self",
".",
"tank_type",
":",
"kwargs",
"[",
"'proto'",
"]",
"=",
"\"proto=http_proto%s\"",
"%",
"self",
".",
"sequence_no",
"if",
"self",
".",
"tank_type",
"==",
"'http'",
"else",
"\"proto=none_proto\"",
"kwargs",
"[",
"'comment_proto'",
"]",
"=",
"\"\"",
"else",
":",
"kwargs",
"[",
"'proto'",
"]",
"=",
"\"\"",
"kwargs",
"[",
"'comment_proto'",
"]",
"=",
"\"#\"",
"if",
"self",
".",
"gatling",
":",
"kwargs",
"[",
"'bind'",
"]",
"=",
"'bind={ '",
"+",
"self",
".",
"gatling",
"+",
"' }'",
"else",
":",
"kwargs",
"[",
"'bind'",
"]",
"=",
"''",
"kwargs",
"[",
"'ip'",
"]",
"=",
"self",
".",
"resolved_ip",
"kwargs",
"[",
"'port'",
"]",
"=",
"self",
".",
"port",
"kwargs",
"[",
"'timeout'",
"]",
"=",
"self",
".",
"timeout",
"kwargs",
"[",
"'instances'",
"]",
"=",
"self",
".",
"instances",
"tune",
"=",
"''",
"if",
"self",
".",
"phantom_http_entity",
":",
"tune",
"+=",
"\"entity = \"",
"+",
"self",
".",
"phantom_http_entity",
"+",
"\"\\n\"",
"if",
"self",
".",
"phantom_http_field",
":",
"tune",
"+=",
"\"field = \"",
"+",
"self",
".",
"phantom_http_field",
"+",
"\"\\n\"",
"if",
"self",
".",
"phantom_http_field_num",
":",
"tune",
"+=",
"\"field_num = {}\\n\"",
".",
"format",
"(",
"self",
".",
"phantom_http_field_num",
")",
"if",
"self",
".",
"phantom_http_line",
":",
"tune",
"+=",
"\"line = \"",
"+",
"self",
".",
"phantom_http_line",
"+",
"\"\\n\"",
"if",
"tune",
":",
"kwargs",
"[",
"'reply_limits'",
"]",
"=",
"'reply_limits = {\\n'",
"+",
"tune",
"+",
"\"}\"",
"else",
":",
"kwargs",
"[",
"'reply_limits'",
"]",
"=",
"''",
"if",
"self",
".",
"is_main",
":",
"fname",
"=",
"'phantom_benchmark_main.tpl'",
"else",
":",
"fname",
"=",
"'phantom_benchmark_additional.tpl'",
"template_str",
"=",
"resource_string",
"(",
"__name__",
",",
"\"config/\"",
"+",
"fname",
")",
"tpl",
"=",
"string",
".",
"Template",
"(",
"template_str",
")",
"config",
"=",
"tpl",
".",
"substitute",
"(",
"kwargs",
")",
"return",
"config"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Plugin.patch_config
|
download remote resources, replace links with local filenames
add result file section
:param dict config: pandora config
|
yandextank/plugins/Pandora/plugin.py
|
def patch_config(self, config):
"""
download remote resources, replace links with local filenames
add result file section
:param dict config: pandora config
"""
# get expvar parameters
if config.get("monitoring"):
if config["monitoring"].get("expvar"):
self.expvar = config["monitoring"]["expvar"].get("enabled")
if config["monitoring"]["expvar"].get("port"):
self.expvar_port = config["monitoring"]["expvar"].get("port")
else:
self.expvar_port = self.DEFAULT_EXPVAR_PORT
# or set if expvar not exists
else:
config["monitoring"] = {
"expvar": {
"enabled": True,
}
}
self.expvar = True
self.expvar_port = self.DEFAULT_EXPVAR_PORT
# FIXME this is broken for custom ammo providers due to interface incompatibility
# FIXME refactor pandora plx
for pool in config['pools']:
if pool.get('ammo', {}).get('file', ''):
self.ammofile = pool['ammo']['file']
pool['ammo']['file'] = resource_manager.resource_filename(
self.ammofile
)
if not pool.get('result') or 'phout' not in pool.get('result', {}).get('type', ''):
logger.warning('Seems like pandora result file not specified... adding defaults')
pool['result'] = dict(
destination=self.DEFAULT_REPORT_FILE,
type='phout',
)
return config
|
def patch_config(self, config):
"""
download remote resources, replace links with local filenames
add result file section
:param dict config: pandora config
"""
# get expvar parameters
if config.get("monitoring"):
if config["monitoring"].get("expvar"):
self.expvar = config["monitoring"]["expvar"].get("enabled")
if config["monitoring"]["expvar"].get("port"):
self.expvar_port = config["monitoring"]["expvar"].get("port")
else:
self.expvar_port = self.DEFAULT_EXPVAR_PORT
# or set if expvar not exists
else:
config["monitoring"] = {
"expvar": {
"enabled": True,
}
}
self.expvar = True
self.expvar_port = self.DEFAULT_EXPVAR_PORT
# FIXME this is broken for custom ammo providers due to interface incompatibility
# FIXME refactor pandora plx
for pool in config['pools']:
if pool.get('ammo', {}).get('file', ''):
self.ammofile = pool['ammo']['file']
pool['ammo']['file'] = resource_manager.resource_filename(
self.ammofile
)
if not pool.get('result') or 'phout' not in pool.get('result', {}).get('type', ''):
logger.warning('Seems like pandora result file not specified... adding defaults')
pool['result'] = dict(
destination=self.DEFAULT_REPORT_FILE,
type='phout',
)
return config
|
[
"download",
"remote",
"resources",
"replace",
"links",
"with",
"local",
"filenames",
"add",
"result",
"file",
"section",
":",
"param",
"dict",
"config",
":",
"pandora",
"config"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Pandora/plugin.py#L94-L132
|
[
"def",
"patch_config",
"(",
"self",
",",
"config",
")",
":",
"# get expvar parameters",
"if",
"config",
".",
"get",
"(",
"\"monitoring\"",
")",
":",
"if",
"config",
"[",
"\"monitoring\"",
"]",
".",
"get",
"(",
"\"expvar\"",
")",
":",
"self",
".",
"expvar",
"=",
"config",
"[",
"\"monitoring\"",
"]",
"[",
"\"expvar\"",
"]",
".",
"get",
"(",
"\"enabled\"",
")",
"if",
"config",
"[",
"\"monitoring\"",
"]",
"[",
"\"expvar\"",
"]",
".",
"get",
"(",
"\"port\"",
")",
":",
"self",
".",
"expvar_port",
"=",
"config",
"[",
"\"monitoring\"",
"]",
"[",
"\"expvar\"",
"]",
".",
"get",
"(",
"\"port\"",
")",
"else",
":",
"self",
".",
"expvar_port",
"=",
"self",
".",
"DEFAULT_EXPVAR_PORT",
"# or set if expvar not exists",
"else",
":",
"config",
"[",
"\"monitoring\"",
"]",
"=",
"{",
"\"expvar\"",
":",
"{",
"\"enabled\"",
":",
"True",
",",
"}",
"}",
"self",
".",
"expvar",
"=",
"True",
"self",
".",
"expvar_port",
"=",
"self",
".",
"DEFAULT_EXPVAR_PORT",
"# FIXME this is broken for custom ammo providers due to interface incompatibility",
"# FIXME refactor pandora plx",
"for",
"pool",
"in",
"config",
"[",
"'pools'",
"]",
":",
"if",
"pool",
".",
"get",
"(",
"'ammo'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'file'",
",",
"''",
")",
":",
"self",
".",
"ammofile",
"=",
"pool",
"[",
"'ammo'",
"]",
"[",
"'file'",
"]",
"pool",
"[",
"'ammo'",
"]",
"[",
"'file'",
"]",
"=",
"resource_manager",
".",
"resource_filename",
"(",
"self",
".",
"ammofile",
")",
"if",
"not",
"pool",
".",
"get",
"(",
"'result'",
")",
"or",
"'phout'",
"not",
"in",
"pool",
".",
"get",
"(",
"'result'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'type'",
",",
"''",
")",
":",
"logger",
".",
"warning",
"(",
"'Seems like pandora result file not specified... adding defaults'",
")",
"pool",
"[",
"'result'",
"]",
"=",
"dict",
"(",
"destination",
"=",
"self",
".",
"DEFAULT_REPORT_FILE",
",",
"type",
"=",
"'phout'",
",",
")",
"return",
"config"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
PatchedValidator.validate_duration
|
2h
2h5m
5m
180
1h4m3
:param duration:
:return:
|
yandextank/validator/validator.py
|
def validate_duration(self, field, duration):
'''
2h
2h5m
5m
180
1h4m3
:param duration:
:return:
'''
DURATION_RE = r'^(\d+d)?(\d+h)?(\d+m)?(\d+s?)?$'
if not re.match(DURATION_RE, duration):
self._error(field, 'Load duration examples: 2h30m; 5m15; 180')
|
def validate_duration(self, field, duration):
'''
2h
2h5m
5m
180
1h4m3
:param duration:
:return:
'''
DURATION_RE = r'^(\d+d)?(\d+h)?(\d+m)?(\d+s?)?$'
if not re.match(DURATION_RE, duration):
self._error(field, 'Load duration examples: 2h30m; 5m15; 180')
|
[
"2h",
"2h5m",
"5m",
"180",
"1h4m3",
":",
"param",
"duration",
":",
":",
"return",
":"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/validator/validator.py#L97-L109
|
[
"def",
"validate_duration",
"(",
"self",
",",
"field",
",",
"duration",
")",
":",
"DURATION_RE",
"=",
"r'^(\\d+d)?(\\d+h)?(\\d+m)?(\\d+s?)?$'",
"if",
"not",
"re",
".",
"match",
"(",
"DURATION_RE",
",",
"duration",
")",
":",
"self",
".",
"_error",
"(",
"field",
",",
"'Load duration examples: 2h30m; 5m15; 180'",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
PatchedValidator._validator_load_scheme
|
step(10,200,5,180)
step(5,50,2.5,5m)
line(22,154,2h5m)
step(5,50,2.5,5m) line(22,154,2h5m)
const(10,1h4m3s)
:param field:
:param value:
:return:
|
yandextank/validator/validator.py
|
def _validator_load_scheme(self, field, value):
'''
step(10,200,5,180)
step(5,50,2.5,5m)
line(22,154,2h5m)
step(5,50,2.5,5m) line(22,154,2h5m)
const(10,1h4m3s)
:param field:
:param value:
:return:
'''
# stpd file can be any value
if self.document['load_type'] in 'stpd_file':
return
PRIMARY_RE = r'(step|line|const)\((.+?)\)'
N_OF_ARGS = {
'step': 4,
'line': 3,
'const': 2,
}
matches = re.findall(PRIMARY_RE, value)
if len(matches) == 0:
self._error(field, 'Should match one of the following patterns: step(...) / line(...) / const(...)')
else:
for match in matches:
curve, params_str = match
params = [v.strip() for v in params_str.split(',')]
# check number of arguments
if not len(params) == N_OF_ARGS[curve]:
self._error(field, '{} load scheme: expected {} arguments, found {}'.format(curve,
N_OF_ARGS[curve],
len(params)))
# check arguments' types
for param in params[:-1]:
if not self.is_number(param):
self._error(field, 'Argument {} in load scheme should be a number'.format(param))
self.validate_duration(field, params[-1])
|
def _validator_load_scheme(self, field, value):
'''
step(10,200,5,180)
step(5,50,2.5,5m)
line(22,154,2h5m)
step(5,50,2.5,5m) line(22,154,2h5m)
const(10,1h4m3s)
:param field:
:param value:
:return:
'''
# stpd file can be any value
if self.document['load_type'] in 'stpd_file':
return
PRIMARY_RE = r'(step|line|const)\((.+?)\)'
N_OF_ARGS = {
'step': 4,
'line': 3,
'const': 2,
}
matches = re.findall(PRIMARY_RE, value)
if len(matches) == 0:
self._error(field, 'Should match one of the following patterns: step(...) / line(...) / const(...)')
else:
for match in matches:
curve, params_str = match
params = [v.strip() for v in params_str.split(',')]
# check number of arguments
if not len(params) == N_OF_ARGS[curve]:
self._error(field, '{} load scheme: expected {} arguments, found {}'.format(curve,
N_OF_ARGS[curve],
len(params)))
# check arguments' types
for param in params[:-1]:
if not self.is_number(param):
self._error(field, 'Argument {} in load scheme should be a number'.format(param))
self.validate_duration(field, params[-1])
|
[
"step",
"(",
"10",
"200",
"5",
"180",
")",
"step",
"(",
"5",
"50",
"2",
".",
"5",
"5m",
")",
"line",
"(",
"22",
"154",
"2h5m",
")",
"step",
"(",
"5",
"50",
"2",
".",
"5",
"5m",
")",
"line",
"(",
"22",
"154",
"2h5m",
")",
"const",
"(",
"10",
"1h4m3s",
")",
":",
"param",
"field",
":",
":",
"param",
"value",
":",
":",
"return",
":"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/validator/validator.py#L111-L148
|
[
"def",
"_validator_load_scheme",
"(",
"self",
",",
"field",
",",
"value",
")",
":",
"# stpd file can be any value",
"if",
"self",
".",
"document",
"[",
"'load_type'",
"]",
"in",
"'stpd_file'",
":",
"return",
"PRIMARY_RE",
"=",
"r'(step|line|const)\\((.+?)\\)'",
"N_OF_ARGS",
"=",
"{",
"'step'",
":",
"4",
",",
"'line'",
":",
"3",
",",
"'const'",
":",
"2",
",",
"}",
"matches",
"=",
"re",
".",
"findall",
"(",
"PRIMARY_RE",
",",
"value",
")",
"if",
"len",
"(",
"matches",
")",
"==",
"0",
":",
"self",
".",
"_error",
"(",
"field",
",",
"'Should match one of the following patterns: step(...) / line(...) / const(...)'",
")",
"else",
":",
"for",
"match",
"in",
"matches",
":",
"curve",
",",
"params_str",
"=",
"match",
"params",
"=",
"[",
"v",
".",
"strip",
"(",
")",
"for",
"v",
"in",
"params_str",
".",
"split",
"(",
"','",
")",
"]",
"# check number of arguments",
"if",
"not",
"len",
"(",
"params",
")",
"==",
"N_OF_ARGS",
"[",
"curve",
"]",
":",
"self",
".",
"_error",
"(",
"field",
",",
"'{} load scheme: expected {} arguments, found {}'",
".",
"format",
"(",
"curve",
",",
"N_OF_ARGS",
"[",
"curve",
"]",
",",
"len",
"(",
"params",
")",
")",
")",
"# check arguments' types",
"for",
"param",
"in",
"params",
"[",
":",
"-",
"1",
"]",
":",
"if",
"not",
"self",
".",
"is_number",
"(",
"param",
")",
":",
"self",
".",
"_error",
"(",
"field",
",",
"'Argument {} in load scheme should be a number'",
".",
"format",
"(",
"param",
")",
")",
"self",
".",
"validate_duration",
"(",
"field",
",",
"params",
"[",
"-",
"1",
"]",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
TankConfig.__parse_enabled_plugins
|
:returns: [(plugin_name, plugin_package, plugin_config), ...]
:rtype: list of tuple
|
yandextank/validator/validator.py
|
def __parse_enabled_plugins(self):
"""
:returns: [(plugin_name, plugin_package, plugin_config), ...]
:rtype: list of tuple
"""
return [
(
plugin_name,
plugin['package'],
plugin) for plugin_name,
plugin in self.raw_config_dict.items() if (
plugin_name not in self.BASE_SCHEMA.keys()) and isinstance(
plugin,
dict) and plugin.get('enabled')]
|
def __parse_enabled_plugins(self):
"""
:returns: [(plugin_name, plugin_package, plugin_config), ...]
:rtype: list of tuple
"""
return [
(
plugin_name,
plugin['package'],
plugin) for plugin_name,
plugin in self.raw_config_dict.items() if (
plugin_name not in self.BASE_SCHEMA.keys()) and isinstance(
plugin,
dict) and plugin.get('enabled')]
|
[
":",
"returns",
":",
"[",
"(",
"plugin_name",
"plugin_package",
"plugin_config",
")",
"...",
"]",
":",
"rtype",
":",
"list",
"of",
"tuple"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/validator/validator.py#L228-L241
|
[
"def",
"__parse_enabled_plugins",
"(",
"self",
")",
":",
"return",
"[",
"(",
"plugin_name",
",",
"plugin",
"[",
"'package'",
"]",
",",
"plugin",
")",
"for",
"plugin_name",
",",
"plugin",
"in",
"self",
".",
"raw_config_dict",
".",
"items",
"(",
")",
"if",
"(",
"plugin_name",
"not",
"in",
"self",
".",
"BASE_SCHEMA",
".",
"keys",
"(",
")",
")",
"and",
"isinstance",
"(",
"plugin",
",",
"dict",
")",
"and",
"plugin",
".",
"get",
"(",
"'enabled'",
")",
"]"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
ValidatedConfig.plugins
|
:returns: [(plugin_name, plugin_package, plugin_config), ...]
:rtype: list of tuple
|
yandextank/validator/validator.py
|
def plugins(self):
"""
:returns: [(plugin_name, plugin_package, plugin_config), ...]
:rtype: list of tuple
"""
if not self._plugins:
self._plugins = [
(plugin_name,
plugin_cfg['package'],
plugin_cfg) for plugin_name, plugin_cfg in self.validated.items() if (
plugin_name not in self.base_schema.keys()) and plugin_cfg['enabled']]
return self._plugins
|
def plugins(self):
"""
:returns: [(plugin_name, plugin_package, plugin_config), ...]
:rtype: list of tuple
"""
if not self._plugins:
self._plugins = [
(plugin_name,
plugin_cfg['package'],
plugin_cfg) for plugin_name, plugin_cfg in self.validated.items() if (
plugin_name not in self.base_schema.keys()) and plugin_cfg['enabled']]
return self._plugins
|
[
":",
"returns",
":",
"[",
"(",
"plugin_name",
"plugin_package",
"plugin_config",
")",
"...",
"]",
":",
"rtype",
":",
"list",
"of",
"tuple"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/validator/validator.py#L307-L318
|
[
"def",
"plugins",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_plugins",
":",
"self",
".",
"_plugins",
"=",
"[",
"(",
"plugin_name",
",",
"plugin_cfg",
"[",
"'package'",
"]",
",",
"plugin_cfg",
")",
"for",
"plugin_name",
",",
"plugin_cfg",
"in",
"self",
".",
"validated",
".",
"items",
"(",
")",
"if",
"(",
"plugin_name",
"not",
"in",
"self",
".",
"base_schema",
".",
"keys",
"(",
")",
")",
"and",
"plugin_cfg",
"[",
"'enabled'",
"]",
"]",
"return",
"self",
".",
"_plugins"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
log_stdout_stderr
|
This function polls stdout and stderr streams and writes their contents
to log
|
yandextank/common/util.py
|
def log_stdout_stderr(log, stdout, stderr, comment=""):
"""
This function polls stdout and stderr streams and writes their contents
to log
"""
readable = select.select([stdout], [], [], 0)[0]
if stderr:
exceptional = select.select([stderr], [], [], 0)[0]
else:
exceptional = []
log.debug("Selected: %s, %s", readable, exceptional)
for handle in readable:
line = handle.read()
readable.remove(handle)
if line:
log.debug("%s stdout: %s", comment, line.strip())
for handle in exceptional:
line = handle.read()
exceptional.remove(handle)
if line:
log.warn("%s stderr: %s", comment, line.strip())
|
def log_stdout_stderr(log, stdout, stderr, comment=""):
"""
This function polls stdout and stderr streams and writes their contents
to log
"""
readable = select.select([stdout], [], [], 0)[0]
if stderr:
exceptional = select.select([stderr], [], [], 0)[0]
else:
exceptional = []
log.debug("Selected: %s, %s", readable, exceptional)
for handle in readable:
line = handle.read()
readable.remove(handle)
if line:
log.debug("%s stdout: %s", comment, line.strip())
for handle in exceptional:
line = handle.read()
exceptional.remove(handle)
if line:
log.warn("%s stderr: %s", comment, line.strip())
|
[
"This",
"function",
"polls",
"stdout",
"and",
"stderr",
"streams",
"and",
"writes",
"their",
"contents",
"to",
"log"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/common/util.py#L344-L367
|
[
"def",
"log_stdout_stderr",
"(",
"log",
",",
"stdout",
",",
"stderr",
",",
"comment",
"=",
"\"\"",
")",
":",
"readable",
"=",
"select",
".",
"select",
"(",
"[",
"stdout",
"]",
",",
"[",
"]",
",",
"[",
"]",
",",
"0",
")",
"[",
"0",
"]",
"if",
"stderr",
":",
"exceptional",
"=",
"select",
".",
"select",
"(",
"[",
"stderr",
"]",
",",
"[",
"]",
",",
"[",
"]",
",",
"0",
")",
"[",
"0",
"]",
"else",
":",
"exceptional",
"=",
"[",
"]",
"log",
".",
"debug",
"(",
"\"Selected: %s, %s\"",
",",
"readable",
",",
"exceptional",
")",
"for",
"handle",
"in",
"readable",
":",
"line",
"=",
"handle",
".",
"read",
"(",
")",
"readable",
".",
"remove",
"(",
"handle",
")",
"if",
"line",
":",
"log",
".",
"debug",
"(",
"\"%s stdout: %s\"",
",",
"comment",
",",
"line",
".",
"strip",
"(",
")",
")",
"for",
"handle",
"in",
"exceptional",
":",
"line",
"=",
"handle",
".",
"read",
"(",
")",
"exceptional",
".",
"remove",
"(",
"handle",
")",
"if",
"line",
":",
"log",
".",
"warn",
"(",
"\"%s stderr: %s\"",
",",
"comment",
",",
"line",
".",
"strip",
"(",
")",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
expand_time
|
helper for above functions
|
yandextank/common/util.py
|
def expand_time(str_time, default_unit='s', multiplier=1):
"""
helper for above functions
"""
parser = re.compile(r'(\d+)([a-zA-Z]*)')
parts = parser.findall(str_time)
result = 0.0
for value, unit in parts:
value = int(value)
unit = unit.lower()
if unit == '':
unit = default_unit
if unit == 'ms':
result += value * 0.001
continue
elif unit == 's':
result += value
continue
elif unit == 'm':
result += value * 60
continue
elif unit == 'h':
result += value * 60 * 60
continue
elif unit == 'd':
result += value * 60 * 60 * 24
continue
elif unit == 'w':
result += value * 60 * 60 * 24 * 7
continue
else:
raise ValueError(
"String contains unsupported unit %s: %s" % (unit, str_time))
return int(result * multiplier)
|
def expand_time(str_time, default_unit='s', multiplier=1):
"""
helper for above functions
"""
parser = re.compile(r'(\d+)([a-zA-Z]*)')
parts = parser.findall(str_time)
result = 0.0
for value, unit in parts:
value = int(value)
unit = unit.lower()
if unit == '':
unit = default_unit
if unit == 'ms':
result += value * 0.001
continue
elif unit == 's':
result += value
continue
elif unit == 'm':
result += value * 60
continue
elif unit == 'h':
result += value * 60 * 60
continue
elif unit == 'd':
result += value * 60 * 60 * 24
continue
elif unit == 'w':
result += value * 60 * 60 * 24 * 7
continue
else:
raise ValueError(
"String contains unsupported unit %s: %s" % (unit, str_time))
return int(result * multiplier)
|
[
"helper",
"for",
"above",
"functions"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/common/util.py#L384-L418
|
[
"def",
"expand_time",
"(",
"str_time",
",",
"default_unit",
"=",
"'s'",
",",
"multiplier",
"=",
"1",
")",
":",
"parser",
"=",
"re",
".",
"compile",
"(",
"r'(\\d+)([a-zA-Z]*)'",
")",
"parts",
"=",
"parser",
".",
"findall",
"(",
"str_time",
")",
"result",
"=",
"0.0",
"for",
"value",
",",
"unit",
"in",
"parts",
":",
"value",
"=",
"int",
"(",
"value",
")",
"unit",
"=",
"unit",
".",
"lower",
"(",
")",
"if",
"unit",
"==",
"''",
":",
"unit",
"=",
"default_unit",
"if",
"unit",
"==",
"'ms'",
":",
"result",
"+=",
"value",
"*",
"0.001",
"continue",
"elif",
"unit",
"==",
"'s'",
":",
"result",
"+=",
"value",
"continue",
"elif",
"unit",
"==",
"'m'",
":",
"result",
"+=",
"value",
"*",
"60",
"continue",
"elif",
"unit",
"==",
"'h'",
":",
"result",
"+=",
"value",
"*",
"60",
"*",
"60",
"continue",
"elif",
"unit",
"==",
"'d'",
":",
"result",
"+=",
"value",
"*",
"60",
"*",
"60",
"*",
"24",
"continue",
"elif",
"unit",
"==",
"'w'",
":",
"result",
"+=",
"value",
"*",
"60",
"*",
"60",
"*",
"24",
"*",
"7",
"continue",
"else",
":",
"raise",
"ValueError",
"(",
"\"String contains unsupported unit %s: %s\"",
"%",
"(",
"unit",
",",
"str_time",
")",
")",
"return",
"int",
"(",
"result",
"*",
"multiplier",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
pid_exists
|
Check whether pid exists in the current process table.
|
yandextank/common/util.py
|
def pid_exists(pid):
"""Check whether pid exists in the current process table."""
if pid < 0:
return False
try:
os.kill(pid, 0)
except OSError as exc:
logging.debug("No process[%s]: %s", exc.errno, exc)
return exc.errno == errno.EPERM
else:
p = psutil.Process(pid)
return p.status != psutil.STATUS_ZOMBIE
|
def pid_exists(pid):
"""Check whether pid exists in the current process table."""
if pid < 0:
return False
try:
os.kill(pid, 0)
except OSError as exc:
logging.debug("No process[%s]: %s", exc.errno, exc)
return exc.errno == errno.EPERM
else:
p = psutil.Process(pid)
return p.status != psutil.STATUS_ZOMBIE
|
[
"Check",
"whether",
"pid",
"exists",
"in",
"the",
"current",
"process",
"table",
"."
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/common/util.py#L421-L432
|
[
"def",
"pid_exists",
"(",
"pid",
")",
":",
"if",
"pid",
"<",
"0",
":",
"return",
"False",
"try",
":",
"os",
".",
"kill",
"(",
"pid",
",",
"0",
")",
"except",
"OSError",
"as",
"exc",
":",
"logging",
".",
"debug",
"(",
"\"No process[%s]: %s\"",
",",
"exc",
".",
"errno",
",",
"exc",
")",
"return",
"exc",
".",
"errno",
"==",
"errno",
".",
"EPERM",
"else",
":",
"p",
"=",
"psutil",
".",
"Process",
"(",
"pid",
")",
"return",
"p",
".",
"status",
"!=",
"psutil",
".",
"STATUS_ZOMBIE"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
splitstring
|
>>> string = 'apple orange "banana tree" green'
>>> splitstring(string)
['apple', 'orange', 'green', '"banana tree"']
|
yandextank/common/util.py
|
def splitstring(string):
"""
>>> string = 'apple orange "banana tree" green'
>>> splitstring(string)
['apple', 'orange', 'green', '"banana tree"']
"""
patt = re.compile(r'"[\w ]+"')
if patt.search(string):
quoted_item = patt.search(string).group()
newstring = patt.sub('', string)
return newstring.split() + [quoted_item]
else:
return string.split()
|
def splitstring(string):
"""
>>> string = 'apple orange "banana tree" green'
>>> splitstring(string)
['apple', 'orange', 'green', '"banana tree"']
"""
patt = re.compile(r'"[\w ]+"')
if patt.search(string):
quoted_item = patt.search(string).group()
newstring = patt.sub('', string)
return newstring.split() + [quoted_item]
else:
return string.split()
|
[
">>>",
"string",
"=",
"apple",
"orange",
"banana",
"tree",
"green",
">>>",
"splitstring",
"(",
"string",
")",
"[",
"apple",
"orange",
"green",
"banana",
"tree",
"]"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/common/util.py#L435-L447
|
[
"def",
"splitstring",
"(",
"string",
")",
":",
"patt",
"=",
"re",
".",
"compile",
"(",
"r'\"[\\w ]+\"'",
")",
"if",
"patt",
".",
"search",
"(",
"string",
")",
":",
"quoted_item",
"=",
"patt",
".",
"search",
"(",
"string",
")",
".",
"group",
"(",
")",
"newstring",
"=",
"patt",
".",
"sub",
"(",
"''",
",",
"string",
")",
"return",
"newstring",
".",
"split",
"(",
")",
"+",
"[",
"quoted_item",
"]",
"else",
":",
"return",
"string",
".",
"split",
"(",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
FileMultiReader.read_with_lock
|
Reads {_len} characters if _len is not None else reads line
:param pos: start reading position
:param _len: number of characters to read
:rtype: (string, int)
|
yandextank/common/util.py
|
def read_with_lock(self, pos, _len=None):
"""
Reads {_len} characters if _len is not None else reads line
:param pos: start reading position
:param _len: number of characters to read
:rtype: (string, int)
"""
self.wait_lock()
try:
self._opened_file.seek(pos)
result = self._opened_file.read(_len) if _len is not None else self._opened_file.readline()
stop_pos = self._opened_file.tell()
finally:
self.unlock()
if not result and self.stop.is_set():
result = None
return result, stop_pos
|
def read_with_lock(self, pos, _len=None):
"""
Reads {_len} characters if _len is not None else reads line
:param pos: start reading position
:param _len: number of characters to read
:rtype: (string, int)
"""
self.wait_lock()
try:
self._opened_file.seek(pos)
result = self._opened_file.read(_len) if _len is not None else self._opened_file.readline()
stop_pos = self._opened_file.tell()
finally:
self.unlock()
if not result and self.stop.is_set():
result = None
return result, stop_pos
|
[
"Reads",
"{",
"_len",
"}",
"characters",
"if",
"_len",
"is",
"not",
"None",
"else",
"reads",
"line",
":",
"param",
"pos",
":",
"start",
"reading",
"position",
":",
"param",
"_len",
":",
"number",
"of",
"characters",
"to",
"read",
":",
"rtype",
":",
"(",
"string",
"int",
")"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/common/util.py#L664-L680
|
[
"def",
"read_with_lock",
"(",
"self",
",",
"pos",
",",
"_len",
"=",
"None",
")",
":",
"self",
".",
"wait_lock",
"(",
")",
"try",
":",
"self",
".",
"_opened_file",
".",
"seek",
"(",
"pos",
")",
"result",
"=",
"self",
".",
"_opened_file",
".",
"read",
"(",
"_len",
")",
"if",
"_len",
"is",
"not",
"None",
"else",
"self",
".",
"_opened_file",
".",
"readline",
"(",
")",
"stop_pos",
"=",
"self",
".",
"_opened_file",
".",
"tell",
"(",
")",
"finally",
":",
"self",
".",
"unlock",
"(",
")",
"if",
"not",
"result",
"and",
"self",
".",
"stop",
".",
"is_set",
"(",
")",
":",
"result",
"=",
"None",
"return",
"result",
",",
"stop_pos"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
StepperWrapper.get_option
|
get_option wrapper
|
yandextank/stepper/main.py
|
def get_option(self, option, param2=None):
''' get_option wrapper'''
result = self.cfg[option]
self.log.debug(
"Option %s = %s", option, result)
return result
|
def get_option(self, option, param2=None):
''' get_option wrapper'''
result = self.cfg[option]
self.log.debug(
"Option %s = %s", option, result)
return result
|
[
"get_option",
"wrapper"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/main.py#L136-L141
|
[
"def",
"get_option",
"(",
"self",
",",
"option",
",",
"param2",
"=",
"None",
")",
":",
"result",
"=",
"self",
".",
"cfg",
"[",
"option",
"]",
"self",
".",
"log",
".",
"debug",
"(",
"\"Option %s = %s\"",
",",
"option",
",",
"result",
")",
"return",
"result"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
StepperWrapper.read_config
|
stepper part of reading options
|
yandextank/stepper/main.py
|
def read_config(self):
''' stepper part of reading options '''
self.log.info("Configuring StepperWrapper...")
self.ammo_file = self.get_option(self.OPTION_AMMOFILE)
self.ammo_type = self.get_option('ammo_type')
if self.ammo_file:
self.ammo_file = os.path.expanduser(self.ammo_file)
self.loop_limit = self.get_option(self.OPTION_LOOP)
self.ammo_limit = self.get_option("ammo_limit")
self.load_profile = LoadProfile(**self.get_option('load_profile'))
self.instances = int(
self.get_option(self.OPTION_INSTANCES_LIMIT, '1000'))
self.uris = self.get_option("uris", [])
while '' in self.uris:
self.uris.remove('')
self.headers = self.get_option("headers")
self.http_ver = self.get_option("header_http")
self.autocases = self.get_option("autocases")
self.enum_ammo = self.get_option("enum_ammo")
self.use_caching = self.get_option("use_caching")
self.file_cache = self.get_option('file_cache')
cache_dir = self.get_option("cache_dir") or self.core.artifacts_base_dir
self.cache_dir = os.path.expanduser(cache_dir)
self.force_stepping = self.get_option("force_stepping")
if self.get_option(self.OPTION_LOAD)[self.OPTION_LOAD_TYPE] == 'stpd_file':
self.stpd = self.get_option(self.OPTION_LOAD)[self.OPTION_SCHEDULE]
self.chosen_cases = self.get_option("chosen_cases").split()
if self.chosen_cases:
self.log.info("chosen_cases LIMITS: %s", self.chosen_cases)
|
def read_config(self):
''' stepper part of reading options '''
self.log.info("Configuring StepperWrapper...")
self.ammo_file = self.get_option(self.OPTION_AMMOFILE)
self.ammo_type = self.get_option('ammo_type')
if self.ammo_file:
self.ammo_file = os.path.expanduser(self.ammo_file)
self.loop_limit = self.get_option(self.OPTION_LOOP)
self.ammo_limit = self.get_option("ammo_limit")
self.load_profile = LoadProfile(**self.get_option('load_profile'))
self.instances = int(
self.get_option(self.OPTION_INSTANCES_LIMIT, '1000'))
self.uris = self.get_option("uris", [])
while '' in self.uris:
self.uris.remove('')
self.headers = self.get_option("headers")
self.http_ver = self.get_option("header_http")
self.autocases = self.get_option("autocases")
self.enum_ammo = self.get_option("enum_ammo")
self.use_caching = self.get_option("use_caching")
self.file_cache = self.get_option('file_cache')
cache_dir = self.get_option("cache_dir") or self.core.artifacts_base_dir
self.cache_dir = os.path.expanduser(cache_dir)
self.force_stepping = self.get_option("force_stepping")
if self.get_option(self.OPTION_LOAD)[self.OPTION_LOAD_TYPE] == 'stpd_file':
self.stpd = self.get_option(self.OPTION_LOAD)[self.OPTION_SCHEDULE]
self.chosen_cases = self.get_option("chosen_cases").split()
if self.chosen_cases:
self.log.info("chosen_cases LIMITS: %s", self.chosen_cases)
|
[
"stepper",
"part",
"of",
"reading",
"options"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/main.py#L159-L191
|
[
"def",
"read_config",
"(",
"self",
")",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Configuring StepperWrapper...\"",
")",
"self",
".",
"ammo_file",
"=",
"self",
".",
"get_option",
"(",
"self",
".",
"OPTION_AMMOFILE",
")",
"self",
".",
"ammo_type",
"=",
"self",
".",
"get_option",
"(",
"'ammo_type'",
")",
"if",
"self",
".",
"ammo_file",
":",
"self",
".",
"ammo_file",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"self",
".",
"ammo_file",
")",
"self",
".",
"loop_limit",
"=",
"self",
".",
"get_option",
"(",
"self",
".",
"OPTION_LOOP",
")",
"self",
".",
"ammo_limit",
"=",
"self",
".",
"get_option",
"(",
"\"ammo_limit\"",
")",
"self",
".",
"load_profile",
"=",
"LoadProfile",
"(",
"*",
"*",
"self",
".",
"get_option",
"(",
"'load_profile'",
")",
")",
"self",
".",
"instances",
"=",
"int",
"(",
"self",
".",
"get_option",
"(",
"self",
".",
"OPTION_INSTANCES_LIMIT",
",",
"'1000'",
")",
")",
"self",
".",
"uris",
"=",
"self",
".",
"get_option",
"(",
"\"uris\"",
",",
"[",
"]",
")",
"while",
"''",
"in",
"self",
".",
"uris",
":",
"self",
".",
"uris",
".",
"remove",
"(",
"''",
")",
"self",
".",
"headers",
"=",
"self",
".",
"get_option",
"(",
"\"headers\"",
")",
"self",
".",
"http_ver",
"=",
"self",
".",
"get_option",
"(",
"\"header_http\"",
")",
"self",
".",
"autocases",
"=",
"self",
".",
"get_option",
"(",
"\"autocases\"",
")",
"self",
".",
"enum_ammo",
"=",
"self",
".",
"get_option",
"(",
"\"enum_ammo\"",
")",
"self",
".",
"use_caching",
"=",
"self",
".",
"get_option",
"(",
"\"use_caching\"",
")",
"self",
".",
"file_cache",
"=",
"self",
".",
"get_option",
"(",
"'file_cache'",
")",
"cache_dir",
"=",
"self",
".",
"get_option",
"(",
"\"cache_dir\"",
")",
"or",
"self",
".",
"core",
".",
"artifacts_base_dir",
"self",
".",
"cache_dir",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"cache_dir",
")",
"self",
".",
"force_stepping",
"=",
"self",
".",
"get_option",
"(",
"\"force_stepping\"",
")",
"if",
"self",
".",
"get_option",
"(",
"self",
".",
"OPTION_LOAD",
")",
"[",
"self",
".",
"OPTION_LOAD_TYPE",
"]",
"==",
"'stpd_file'",
":",
"self",
".",
"stpd",
"=",
"self",
".",
"get_option",
"(",
"self",
".",
"OPTION_LOAD",
")",
"[",
"self",
".",
"OPTION_SCHEDULE",
"]",
"self",
".",
"chosen_cases",
"=",
"self",
".",
"get_option",
"(",
"\"chosen_cases\"",
")",
".",
"split",
"(",
")",
"if",
"self",
".",
"chosen_cases",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"chosen_cases LIMITS: %s\"",
",",
"self",
".",
"chosen_cases",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
StepperWrapper.prepare_stepper
|
Generate test data if necessary
|
yandextank/stepper/main.py
|
def prepare_stepper(self):
''' Generate test data if necessary '''
def publish_info(stepper_info):
info.status.publish('loadscheme', stepper_info.loadscheme)
info.status.publish('loop_count', stepper_info.loop_count)
info.status.publish('steps', stepper_info.steps)
info.status.publish('duration', stepper_info.duration)
info.status.ammo_count = stepper_info.ammo_count
info.status.publish('instances', stepper_info.instances)
self.core.publish('stepper', 'loadscheme', stepper_info.loadscheme)
self.core.publish('stepper', 'loop_count', stepper_info.loop_count)
self.core.publish('stepper', 'steps', stepper_info.steps)
self.core.publish('stepper', 'duration', stepper_info.duration)
self.core.publish('stepper', 'ammo_count', stepper_info.ammo_count)
self.core.publish('stepper', 'instances', stepper_info.instances)
return stepper_info
if not self.stpd:
self.stpd = self.__get_stpd_filename()
if self.use_caching and not self.force_stepping and os.path.exists(
self.stpd) and os.path.exists(self.__si_filename()):
self.log.info("Using cached stpd-file: %s", self.stpd)
stepper_info = self.__read_cached_options()
if self.instances and self.load_profile.is_rps():
self.log.info(
"rps_schedule is set. Overriding cached instances param from config: %s",
self.instances)
stepper_info = stepper_info._replace(
instances=self.instances)
publish_info(stepper_info)
else:
if (
self.force_stepping and os.path.exists(self.__si_filename())):
os.remove(self.__si_filename())
self.__make_stpd_file()
stepper_info = info.status.get_info()
self.__write_cached_options(stepper_info)
else:
self.log.info("Using specified stpd-file: %s", self.stpd)
stepper_info = publish_info(self.__read_cached_options())
self.ammo_count = stepper_info.ammo_count
self.duration = stepper_info.duration
self.loop_count = stepper_info.loop_count
self.loadscheme = stepper_info.loadscheme
self.steps = stepper_info.steps
if stepper_info.instances:
self.instances = stepper_info.instances
|
def prepare_stepper(self):
''' Generate test data if necessary '''
def publish_info(stepper_info):
info.status.publish('loadscheme', stepper_info.loadscheme)
info.status.publish('loop_count', stepper_info.loop_count)
info.status.publish('steps', stepper_info.steps)
info.status.publish('duration', stepper_info.duration)
info.status.ammo_count = stepper_info.ammo_count
info.status.publish('instances', stepper_info.instances)
self.core.publish('stepper', 'loadscheme', stepper_info.loadscheme)
self.core.publish('stepper', 'loop_count', stepper_info.loop_count)
self.core.publish('stepper', 'steps', stepper_info.steps)
self.core.publish('stepper', 'duration', stepper_info.duration)
self.core.publish('stepper', 'ammo_count', stepper_info.ammo_count)
self.core.publish('stepper', 'instances', stepper_info.instances)
return stepper_info
if not self.stpd:
self.stpd = self.__get_stpd_filename()
if self.use_caching and not self.force_stepping and os.path.exists(
self.stpd) and os.path.exists(self.__si_filename()):
self.log.info("Using cached stpd-file: %s", self.stpd)
stepper_info = self.__read_cached_options()
if self.instances and self.load_profile.is_rps():
self.log.info(
"rps_schedule is set. Overriding cached instances param from config: %s",
self.instances)
stepper_info = stepper_info._replace(
instances=self.instances)
publish_info(stepper_info)
else:
if (
self.force_stepping and os.path.exists(self.__si_filename())):
os.remove(self.__si_filename())
self.__make_stpd_file()
stepper_info = info.status.get_info()
self.__write_cached_options(stepper_info)
else:
self.log.info("Using specified stpd-file: %s", self.stpd)
stepper_info = publish_info(self.__read_cached_options())
self.ammo_count = stepper_info.ammo_count
self.duration = stepper_info.duration
self.loop_count = stepper_info.loop_count
self.loadscheme = stepper_info.loadscheme
self.steps = stepper_info.steps
if stepper_info.instances:
self.instances = stepper_info.instances
|
[
"Generate",
"test",
"data",
"if",
"necessary"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/main.py#L193-L240
|
[
"def",
"prepare_stepper",
"(",
"self",
")",
":",
"def",
"publish_info",
"(",
"stepper_info",
")",
":",
"info",
".",
"status",
".",
"publish",
"(",
"'loadscheme'",
",",
"stepper_info",
".",
"loadscheme",
")",
"info",
".",
"status",
".",
"publish",
"(",
"'loop_count'",
",",
"stepper_info",
".",
"loop_count",
")",
"info",
".",
"status",
".",
"publish",
"(",
"'steps'",
",",
"stepper_info",
".",
"steps",
")",
"info",
".",
"status",
".",
"publish",
"(",
"'duration'",
",",
"stepper_info",
".",
"duration",
")",
"info",
".",
"status",
".",
"ammo_count",
"=",
"stepper_info",
".",
"ammo_count",
"info",
".",
"status",
".",
"publish",
"(",
"'instances'",
",",
"stepper_info",
".",
"instances",
")",
"self",
".",
"core",
".",
"publish",
"(",
"'stepper'",
",",
"'loadscheme'",
",",
"stepper_info",
".",
"loadscheme",
")",
"self",
".",
"core",
".",
"publish",
"(",
"'stepper'",
",",
"'loop_count'",
",",
"stepper_info",
".",
"loop_count",
")",
"self",
".",
"core",
".",
"publish",
"(",
"'stepper'",
",",
"'steps'",
",",
"stepper_info",
".",
"steps",
")",
"self",
".",
"core",
".",
"publish",
"(",
"'stepper'",
",",
"'duration'",
",",
"stepper_info",
".",
"duration",
")",
"self",
".",
"core",
".",
"publish",
"(",
"'stepper'",
",",
"'ammo_count'",
",",
"stepper_info",
".",
"ammo_count",
")",
"self",
".",
"core",
".",
"publish",
"(",
"'stepper'",
",",
"'instances'",
",",
"stepper_info",
".",
"instances",
")",
"return",
"stepper_info",
"if",
"not",
"self",
".",
"stpd",
":",
"self",
".",
"stpd",
"=",
"self",
".",
"__get_stpd_filename",
"(",
")",
"if",
"self",
".",
"use_caching",
"and",
"not",
"self",
".",
"force_stepping",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"stpd",
")",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"__si_filename",
"(",
")",
")",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Using cached stpd-file: %s\"",
",",
"self",
".",
"stpd",
")",
"stepper_info",
"=",
"self",
".",
"__read_cached_options",
"(",
")",
"if",
"self",
".",
"instances",
"and",
"self",
".",
"load_profile",
".",
"is_rps",
"(",
")",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"rps_schedule is set. Overriding cached instances param from config: %s\"",
",",
"self",
".",
"instances",
")",
"stepper_info",
"=",
"stepper_info",
".",
"_replace",
"(",
"instances",
"=",
"self",
".",
"instances",
")",
"publish_info",
"(",
"stepper_info",
")",
"else",
":",
"if",
"(",
"self",
".",
"force_stepping",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"__si_filename",
"(",
")",
")",
")",
":",
"os",
".",
"remove",
"(",
"self",
".",
"__si_filename",
"(",
")",
")",
"self",
".",
"__make_stpd_file",
"(",
")",
"stepper_info",
"=",
"info",
".",
"status",
".",
"get_info",
"(",
")",
"self",
".",
"__write_cached_options",
"(",
"stepper_info",
")",
"else",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Using specified stpd-file: %s\"",
",",
"self",
".",
"stpd",
")",
"stepper_info",
"=",
"publish_info",
"(",
"self",
".",
"__read_cached_options",
"(",
")",
")",
"self",
".",
"ammo_count",
"=",
"stepper_info",
".",
"ammo_count",
"self",
".",
"duration",
"=",
"stepper_info",
".",
"duration",
"self",
".",
"loop_count",
"=",
"stepper_info",
".",
"loop_count",
"self",
".",
"loadscheme",
"=",
"stepper_info",
".",
"loadscheme",
"self",
".",
"steps",
"=",
"stepper_info",
".",
"steps",
"if",
"stepper_info",
".",
"instances",
":",
"self",
".",
"instances",
"=",
"stepper_info",
".",
"instances"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
StepperWrapper.__get_stpd_filename
|
Choose the name for stepped data file
|
yandextank/stepper/main.py
|
def __get_stpd_filename(self):
''' Choose the name for stepped data file '''
if self.use_caching:
sep = "|"
hasher = hashlib.md5()
hashed_str = "cache version 6" + sep + \
';'.join(self.load_profile.schedule) + sep + str(self.loop_limit)
hashed_str += sep + str(self.ammo_limit) + sep + ';'.join(
self.load_profile.schedule) + sep + str(self.autocases)
hashed_str += sep + ";".join(self.uris) + sep + ";".join(
self.headers) + sep + self.http_ver + sep + ";".join(
self.chosen_cases)
hashed_str += sep + str(self.enum_ammo) + sep + str(self.ammo_type)
if self.load_profile.is_instances():
hashed_str += sep + str(self.instances)
if self.ammo_file:
opener = resource.get_opener(self.ammo_file)
hashed_str += sep + opener.hash
else:
if not self.uris:
raise RuntimeError("Neither ammofile nor uris specified")
hashed_str += sep + \
';'.join(self.uris) + sep + ';'.join(self.headers)
self.log.debug("stpd-hash source: %s", hashed_str)
hasher.update(hashed_str.encode('utf8'))
if not os.path.exists(self.cache_dir):
os.makedirs(self.cache_dir)
stpd = self.cache_dir + '/' + \
os.path.basename(self.ammo_file) + \
"_" + hasher.hexdigest() + ".stpd"
else:
stpd = os.path.realpath("ammo.stpd")
self.log.debug("Generated cache file name: %s", stpd)
return stpd
|
def __get_stpd_filename(self):
''' Choose the name for stepped data file '''
if self.use_caching:
sep = "|"
hasher = hashlib.md5()
hashed_str = "cache version 6" + sep + \
';'.join(self.load_profile.schedule) + sep + str(self.loop_limit)
hashed_str += sep + str(self.ammo_limit) + sep + ';'.join(
self.load_profile.schedule) + sep + str(self.autocases)
hashed_str += sep + ";".join(self.uris) + sep + ";".join(
self.headers) + sep + self.http_ver + sep + ";".join(
self.chosen_cases)
hashed_str += sep + str(self.enum_ammo) + sep + str(self.ammo_type)
if self.load_profile.is_instances():
hashed_str += sep + str(self.instances)
if self.ammo_file:
opener = resource.get_opener(self.ammo_file)
hashed_str += sep + opener.hash
else:
if not self.uris:
raise RuntimeError("Neither ammofile nor uris specified")
hashed_str += sep + \
';'.join(self.uris) + sep + ';'.join(self.headers)
self.log.debug("stpd-hash source: %s", hashed_str)
hasher.update(hashed_str.encode('utf8'))
if not os.path.exists(self.cache_dir):
os.makedirs(self.cache_dir)
stpd = self.cache_dir + '/' + \
os.path.basename(self.ammo_file) + \
"_" + hasher.hexdigest() + ".stpd"
else:
stpd = os.path.realpath("ammo.stpd")
self.log.debug("Generated cache file name: %s", stpd)
return stpd
|
[
"Choose",
"the",
"name",
"for",
"stepped",
"data",
"file"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/main.py#L246-L279
|
[
"def",
"__get_stpd_filename",
"(",
"self",
")",
":",
"if",
"self",
".",
"use_caching",
":",
"sep",
"=",
"\"|\"",
"hasher",
"=",
"hashlib",
".",
"md5",
"(",
")",
"hashed_str",
"=",
"\"cache version 6\"",
"+",
"sep",
"+",
"';'",
".",
"join",
"(",
"self",
".",
"load_profile",
".",
"schedule",
")",
"+",
"sep",
"+",
"str",
"(",
"self",
".",
"loop_limit",
")",
"hashed_str",
"+=",
"sep",
"+",
"str",
"(",
"self",
".",
"ammo_limit",
")",
"+",
"sep",
"+",
"';'",
".",
"join",
"(",
"self",
".",
"load_profile",
".",
"schedule",
")",
"+",
"sep",
"+",
"str",
"(",
"self",
".",
"autocases",
")",
"hashed_str",
"+=",
"sep",
"+",
"\";\"",
".",
"join",
"(",
"self",
".",
"uris",
")",
"+",
"sep",
"+",
"\";\"",
".",
"join",
"(",
"self",
".",
"headers",
")",
"+",
"sep",
"+",
"self",
".",
"http_ver",
"+",
"sep",
"+",
"\";\"",
".",
"join",
"(",
"self",
".",
"chosen_cases",
")",
"hashed_str",
"+=",
"sep",
"+",
"str",
"(",
"self",
".",
"enum_ammo",
")",
"+",
"sep",
"+",
"str",
"(",
"self",
".",
"ammo_type",
")",
"if",
"self",
".",
"load_profile",
".",
"is_instances",
"(",
")",
":",
"hashed_str",
"+=",
"sep",
"+",
"str",
"(",
"self",
".",
"instances",
")",
"if",
"self",
".",
"ammo_file",
":",
"opener",
"=",
"resource",
".",
"get_opener",
"(",
"self",
".",
"ammo_file",
")",
"hashed_str",
"+=",
"sep",
"+",
"opener",
".",
"hash",
"else",
":",
"if",
"not",
"self",
".",
"uris",
":",
"raise",
"RuntimeError",
"(",
"\"Neither ammofile nor uris specified\"",
")",
"hashed_str",
"+=",
"sep",
"+",
"';'",
".",
"join",
"(",
"self",
".",
"uris",
")",
"+",
"sep",
"+",
"';'",
".",
"join",
"(",
"self",
".",
"headers",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"stpd-hash source: %s\"",
",",
"hashed_str",
")",
"hasher",
".",
"update",
"(",
"hashed_str",
".",
"encode",
"(",
"'utf8'",
")",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"cache_dir",
")",
":",
"os",
".",
"makedirs",
"(",
"self",
".",
"cache_dir",
")",
"stpd",
"=",
"self",
".",
"cache_dir",
"+",
"'/'",
"+",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"ammo_file",
")",
"+",
"\"_\"",
"+",
"hasher",
".",
"hexdigest",
"(",
")",
"+",
"\".stpd\"",
"else",
":",
"stpd",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"\"ammo.stpd\"",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"Generated cache file name: %s\"",
",",
"stpd",
")",
"return",
"stpd"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
StepperWrapper.__read_cached_options
|
Read stepper info from json
|
yandextank/stepper/main.py
|
def __read_cached_options(self):
'''
Read stepper info from json
'''
self.log.debug("Reading cached stepper info: %s", self.__si_filename())
with open(self.__si_filename(), 'r') as si_file:
si = info.StepperInfo(**json.load(si_file))
return si
|
def __read_cached_options(self):
'''
Read stepper info from json
'''
self.log.debug("Reading cached stepper info: %s", self.__si_filename())
with open(self.__si_filename(), 'r') as si_file:
si = info.StepperInfo(**json.load(si_file))
return si
|
[
"Read",
"stepper",
"info",
"from",
"json"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/main.py#L281-L288
|
[
"def",
"__read_cached_options",
"(",
"self",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"Reading cached stepper info: %s\"",
",",
"self",
".",
"__si_filename",
"(",
")",
")",
"with",
"open",
"(",
"self",
".",
"__si_filename",
"(",
")",
",",
"'r'",
")",
"as",
"si_file",
":",
"si",
"=",
"info",
".",
"StepperInfo",
"(",
"*",
"*",
"json",
".",
"load",
"(",
"si_file",
")",
")",
"return",
"si"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
StepperWrapper.__write_cached_options
|
Write stepper info to json
|
yandextank/stepper/main.py
|
def __write_cached_options(self, si):
'''
Write stepper info to json
'''
self.log.debug("Saving stepper info: %s", self.__si_filename())
with open(self.__si_filename(), 'w') as si_file:
json.dump(si._asdict(), si_file, indent=4)
|
def __write_cached_options(self, si):
'''
Write stepper info to json
'''
self.log.debug("Saving stepper info: %s", self.__si_filename())
with open(self.__si_filename(), 'w') as si_file:
json.dump(si._asdict(), si_file, indent=4)
|
[
"Write",
"stepper",
"info",
"to",
"json"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/main.py#L290-L296
|
[
"def",
"__write_cached_options",
"(",
"self",
",",
"si",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"Saving stepper info: %s\"",
",",
"self",
".",
"__si_filename",
"(",
")",
")",
"with",
"open",
"(",
"self",
".",
"__si_filename",
"(",
")",
",",
"'w'",
")",
"as",
"si_file",
":",
"json",
".",
"dump",
"(",
"si",
".",
"_asdict",
"(",
")",
",",
"si_file",
",",
"indent",
"=",
"4",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
StepperWrapper.__make_stpd_file
|
stpd generation using Stepper class
|
yandextank/stepper/main.py
|
def __make_stpd_file(self):
''' stpd generation using Stepper class '''
self.log.info("Making stpd-file: %s", self.stpd)
stepper = Stepper(
self.core,
rps_schedule=self.load_profile.schedule if self.load_profile.is_rps() else None,
http_ver=self.http_ver,
ammo_file=self.ammo_file,
instances_schedule=self.load_profile.schedule if self.load_profile.is_instances() else None,
instances=self.instances,
loop_limit=self.loop_limit,
ammo_limit=self.ammo_limit,
uris=self.uris,
headers=[header.strip('[]') for header in self.headers],
autocases=self.autocases,
enum_ammo=self.enum_ammo,
ammo_type=self.ammo_type,
chosen_cases=self.chosen_cases,
use_cache=self.use_caching)
with open(self.stpd, 'w', self.file_cache) as os:
stepper.write(os)
|
def __make_stpd_file(self):
''' stpd generation using Stepper class '''
self.log.info("Making stpd-file: %s", self.stpd)
stepper = Stepper(
self.core,
rps_schedule=self.load_profile.schedule if self.load_profile.is_rps() else None,
http_ver=self.http_ver,
ammo_file=self.ammo_file,
instances_schedule=self.load_profile.schedule if self.load_profile.is_instances() else None,
instances=self.instances,
loop_limit=self.loop_limit,
ammo_limit=self.ammo_limit,
uris=self.uris,
headers=[header.strip('[]') for header in self.headers],
autocases=self.autocases,
enum_ammo=self.enum_ammo,
ammo_type=self.ammo_type,
chosen_cases=self.chosen_cases,
use_cache=self.use_caching)
with open(self.stpd, 'w', self.file_cache) as os:
stepper.write(os)
|
[
"stpd",
"generation",
"using",
"Stepper",
"class"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/main.py#L298-L318
|
[
"def",
"__make_stpd_file",
"(",
"self",
")",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Making stpd-file: %s\"",
",",
"self",
".",
"stpd",
")",
"stepper",
"=",
"Stepper",
"(",
"self",
".",
"core",
",",
"rps_schedule",
"=",
"self",
".",
"load_profile",
".",
"schedule",
"if",
"self",
".",
"load_profile",
".",
"is_rps",
"(",
")",
"else",
"None",
",",
"http_ver",
"=",
"self",
".",
"http_ver",
",",
"ammo_file",
"=",
"self",
".",
"ammo_file",
",",
"instances_schedule",
"=",
"self",
".",
"load_profile",
".",
"schedule",
"if",
"self",
".",
"load_profile",
".",
"is_instances",
"(",
")",
"else",
"None",
",",
"instances",
"=",
"self",
".",
"instances",
",",
"loop_limit",
"=",
"self",
".",
"loop_limit",
",",
"ammo_limit",
"=",
"self",
".",
"ammo_limit",
",",
"uris",
"=",
"self",
".",
"uris",
",",
"headers",
"=",
"[",
"header",
".",
"strip",
"(",
"'[]'",
")",
"for",
"header",
"in",
"self",
".",
"headers",
"]",
",",
"autocases",
"=",
"self",
".",
"autocases",
",",
"enum_ammo",
"=",
"self",
".",
"enum_ammo",
",",
"ammo_type",
"=",
"self",
".",
"ammo_type",
",",
"chosen_cases",
"=",
"self",
".",
"chosen_cases",
",",
"use_cache",
"=",
"self",
".",
"use_caching",
")",
"with",
"open",
"(",
"self",
".",
"stpd",
",",
"'w'",
",",
"self",
".",
"file_cache",
")",
"as",
"os",
":",
"stepper",
".",
"write",
"(",
"os",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
create
|
Create Load Plan as defined in schedule. Publish info about its duration.
|
yandextank/stepper/load_plan.py
|
def create(rps_schedule):
"""
Create Load Plan as defined in schedule. Publish info about its duration.
"""
if len(rps_schedule) > 1:
lp = Composite(
[StepFactory.produce(step_config) for step_config in rps_schedule])
else:
lp = StepFactory.produce(rps_schedule[0])
info.status.publish('duration', lp.get_duration() / 1000)
info.status.publish('steps', lp.get_rps_list())
info.status.lp_len = len(lp)
return lp
|
def create(rps_schedule):
"""
Create Load Plan as defined in schedule. Publish info about its duration.
"""
if len(rps_schedule) > 1:
lp = Composite(
[StepFactory.produce(step_config) for step_config in rps_schedule])
else:
lp = StepFactory.produce(rps_schedule[0])
info.status.publish('duration', lp.get_duration() / 1000)
info.status.publish('steps', lp.get_rps_list())
info.status.lp_len = len(lp)
return lp
|
[
"Create",
"Load",
"Plan",
"as",
"defined",
"in",
"schedule",
".",
"Publish",
"info",
"about",
"its",
"duration",
"."
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/load_plan.py#L207-L219
|
[
"def",
"create",
"(",
"rps_schedule",
")",
":",
"if",
"len",
"(",
"rps_schedule",
")",
">",
"1",
":",
"lp",
"=",
"Composite",
"(",
"[",
"StepFactory",
".",
"produce",
"(",
"step_config",
")",
"for",
"step_config",
"in",
"rps_schedule",
"]",
")",
"else",
":",
"lp",
"=",
"StepFactory",
".",
"produce",
"(",
"rps_schedule",
"[",
"0",
"]",
")",
"info",
".",
"status",
".",
"publish",
"(",
"'duration'",
",",
"lp",
".",
"get_duration",
"(",
")",
"/",
"1000",
")",
"info",
".",
"status",
".",
"publish",
"(",
"'steps'",
",",
"lp",
".",
"get_rps_list",
"(",
")",
")",
"info",
".",
"status",
".",
"lp_len",
"=",
"len",
"(",
"lp",
")",
"return",
"lp"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Line.ts
|
:param n: number of charge
:return: when to shoot nth charge, milliseconds
|
yandextank/stepper/load_plan.py
|
def ts(self, n):
"""
:param n: number of charge
:return: when to shoot nth charge, milliseconds
"""
try:
root1, root2 = solve_quadratic(self.slope / 2.0, self.minrps, -n)
except ZeroDivisionError:
root2 = float(n) / self.minrps
return int(root2 * 1000)
|
def ts(self, n):
"""
:param n: number of charge
:return: when to shoot nth charge, milliseconds
"""
try:
root1, root2 = solve_quadratic(self.slope / 2.0, self.minrps, -n)
except ZeroDivisionError:
root2 = float(n) / self.minrps
return int(root2 * 1000)
|
[
":",
"param",
"n",
":",
"number",
"of",
"charge",
":",
"return",
":",
"when",
"to",
"shoot",
"nth",
"charge",
"milliseconds"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/load_plan.py#L65-L74
|
[
"def",
"ts",
"(",
"self",
",",
"n",
")",
":",
"try",
":",
"root1",
",",
"root2",
"=",
"solve_quadratic",
"(",
"self",
".",
"slope",
"/",
"2.0",
",",
"self",
".",
"minrps",
",",
"-",
"n",
")",
"except",
"ZeroDivisionError",
":",
"root2",
"=",
"float",
"(",
"n",
")",
"/",
"self",
".",
"minrps",
"return",
"int",
"(",
"root2",
"*",
"1000",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Line.rps_at
|
Return rps for second t
|
yandextank/stepper/load_plan.py
|
def rps_at(self, t):
'''Return rps for second t'''
if 0 <= t <= self.duration:
return self.minrps + \
float(self.maxrps - self.minrps) * t / self.duration
else:
return 0
|
def rps_at(self, t):
'''Return rps for second t'''
if 0 <= t <= self.duration:
return self.minrps + \
float(self.maxrps - self.minrps) * t / self.duration
else:
return 0
|
[
"Return",
"rps",
"for",
"second",
"t"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/load_plan.py#L83-L89
|
[
"def",
"rps_at",
"(",
"self",
",",
"t",
")",
":",
"if",
"0",
"<=",
"t",
"<=",
"self",
".",
"duration",
":",
"return",
"self",
".",
"minrps",
"+",
"float",
"(",
"self",
".",
"maxrps",
"-",
"self",
".",
"minrps",
")",
"*",
"t",
"/",
"self",
".",
"duration",
"else",
":",
"return",
"0"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Line.get_float_rps_list
|
get list of constant load parts (we have no constant load at all, but tank will think so),
with parts durations (float)
|
yandextank/stepper/load_plan.py
|
def get_float_rps_list(self):
'''
get list of constant load parts (we have no constant load at all, but tank will think so),
with parts durations (float)
'''
int_rps = range(int(self.minrps), int(self.maxrps) + 1)
step_duration = float(self.duration) / len(int_rps)
rps_list = [(rps, int(step_duration)) for rps in int_rps]
return rps_list
|
def get_float_rps_list(self):
'''
get list of constant load parts (we have no constant load at all, but tank will think so),
with parts durations (float)
'''
int_rps = range(int(self.minrps), int(self.maxrps) + 1)
step_duration = float(self.duration) / len(int_rps)
rps_list = [(rps, int(step_duration)) for rps in int_rps]
return rps_list
|
[
"get",
"list",
"of",
"constant",
"load",
"parts",
"(",
"we",
"have",
"no",
"constant",
"load",
"at",
"all",
"but",
"tank",
"will",
"think",
"so",
")",
"with",
"parts",
"durations",
"(",
"float",
")"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/load_plan.py#L99-L107
|
[
"def",
"get_float_rps_list",
"(",
"self",
")",
":",
"int_rps",
"=",
"range",
"(",
"int",
"(",
"self",
".",
"minrps",
")",
",",
"int",
"(",
"self",
".",
"maxrps",
")",
"+",
"1",
")",
"step_duration",
"=",
"float",
"(",
"self",
".",
"duration",
")",
"/",
"len",
"(",
"int_rps",
")",
"rps_list",
"=",
"[",
"(",
"rps",
",",
"int",
"(",
"step_duration",
")",
")",
"for",
"rps",
"in",
"int_rps",
"]",
"return",
"rps_list"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Line.get_rps_list
|
get list of each second's rps
:returns: list of tuples (rps, duration of corresponding rps in seconds)
:rtype: list
|
yandextank/stepper/load_plan.py
|
def get_rps_list(self):
"""
get list of each second's rps
:returns: list of tuples (rps, duration of corresponding rps in seconds)
:rtype: list
"""
seconds = range(0, int(self.duration) + 1)
rps_groups = groupby([proper_round(self.rps_at(t)) for t in seconds],
lambda x: x)
rps_list = [(rps, len(list(rpl))) for rps, rpl in rps_groups]
return rps_list
|
def get_rps_list(self):
"""
get list of each second's rps
:returns: list of tuples (rps, duration of corresponding rps in seconds)
:rtype: list
"""
seconds = range(0, int(self.duration) + 1)
rps_groups = groupby([proper_round(self.rps_at(t)) for t in seconds],
lambda x: x)
rps_list = [(rps, len(list(rpl))) for rps, rpl in rps_groups]
return rps_list
|
[
"get",
"list",
"of",
"each",
"second",
"s",
"rps",
":",
"returns",
":",
"list",
"of",
"tuples",
"(",
"rps",
"duration",
"of",
"corresponding",
"rps",
"in",
"seconds",
")",
":",
"rtype",
":",
"list"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/load_plan.py#L109-L119
|
[
"def",
"get_rps_list",
"(",
"self",
")",
":",
"seconds",
"=",
"range",
"(",
"0",
",",
"int",
"(",
"self",
".",
"duration",
")",
"+",
"1",
")",
"rps_groups",
"=",
"groupby",
"(",
"[",
"proper_round",
"(",
"self",
".",
"rps_at",
"(",
"t",
")",
")",
"for",
"t",
"in",
"seconds",
"]",
",",
"lambda",
"x",
":",
"x",
")",
"rps_list",
"=",
"[",
"(",
"rps",
",",
"len",
"(",
"list",
"(",
"rpl",
")",
")",
")",
"for",
"rps",
",",
"rpl",
"in",
"rps_groups",
"]",
"return",
"rps_list"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Plugin.execute
|
Execute and check exit code
|
yandextank/plugins/ShellExec/plugin.py
|
def execute(self, cmd):
"""
Execute and check exit code
"""
self.log.info("Executing: %s", cmd)
retcode = execute(
cmd, shell=True, poll_period=0.1, catch_out=self.catch_out)[0]
if retcode:
raise RuntimeError("Subprocess returned %s" % retcode)
return retcode
|
def execute(self, cmd):
"""
Execute and check exit code
"""
self.log.info("Executing: %s", cmd)
retcode = execute(
cmd, shell=True, poll_period=0.1, catch_out=self.catch_out)[0]
if retcode:
raise RuntimeError("Subprocess returned %s" % retcode)
return retcode
|
[
"Execute",
"and",
"check",
"exit",
"code"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/ShellExec/plugin.py#L71-L80
|
[
"def",
"execute",
"(",
"self",
",",
"cmd",
")",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Executing: %s\"",
",",
"cmd",
")",
"retcode",
"=",
"execute",
"(",
"cmd",
",",
"shell",
"=",
"True",
",",
"poll_period",
"=",
"0.1",
",",
"catch_out",
"=",
"self",
".",
"catch_out",
")",
"[",
"0",
"]",
"if",
"retcode",
":",
"raise",
"RuntimeError",
"(",
"\"Subprocess returned %s\"",
"%",
"retcode",
")",
"return",
"retcode"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Decoder.decode_monitoring
|
The reason why we have two separate methods for monitoring
and aggregates is a strong difference in incoming data.
|
yandextank/plugins/InfluxUploader/decoder.py
|
def decode_monitoring(self, data):
"""
The reason why we have two separate methods for monitoring
and aggregates is a strong difference in incoming data.
"""
points = list()
for second_data in data:
for host, host_data in second_data["data"].iteritems():
points.append(
self.__make_points(
"monitoring",
{"host": host, "comment": host_data.get("comment")},
second_data["timestamp"],
{
metric: value
for metric, value in host_data["metrics"].iteritems()
}
)
)
return points
|
def decode_monitoring(self, data):
"""
The reason why we have two separate methods for monitoring
and aggregates is a strong difference in incoming data.
"""
points = list()
for second_data in data:
for host, host_data in second_data["data"].iteritems():
points.append(
self.__make_points(
"monitoring",
{"host": host, "comment": host_data.get("comment")},
second_data["timestamp"],
{
metric: value
for metric, value in host_data["metrics"].iteritems()
}
)
)
return points
|
[
"The",
"reason",
"why",
"we",
"have",
"two",
"separate",
"methods",
"for",
"monitoring",
"and",
"aggregates",
"is",
"a",
"strong",
"difference",
"in",
"incoming",
"data",
"."
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/InfluxUploader/decoder.py#L38-L57
|
[
"def",
"decode_monitoring",
"(",
"self",
",",
"data",
")",
":",
"points",
"=",
"list",
"(",
")",
"for",
"second_data",
"in",
"data",
":",
"for",
"host",
",",
"host_data",
"in",
"second_data",
"[",
"\"data\"",
"]",
".",
"iteritems",
"(",
")",
":",
"points",
".",
"append",
"(",
"self",
".",
"__make_points",
"(",
"\"monitoring\"",
",",
"{",
"\"host\"",
":",
"host",
",",
"\"comment\"",
":",
"host_data",
".",
"get",
"(",
"\"comment\"",
")",
"}",
",",
"second_data",
"[",
"\"timestamp\"",
"]",
",",
"{",
"metric",
":",
"value",
"for",
"metric",
",",
"value",
"in",
"host_data",
"[",
"\"metrics\"",
"]",
".",
"iteritems",
"(",
")",
"}",
")",
")",
"return",
"points"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Decoder.__make_points_for_label
|
x
Make a set of points for `this` label
overall_quantiles, overall_meta, net_codes, proto_codes, histograms
|
yandextank/plugins/InfluxUploader/decoder.py
|
def __make_points_for_label(self, ts, data, label, prefix, gun_stats):
"""x
Make a set of points for `this` label
overall_quantiles, overall_meta, net_codes, proto_codes, histograms
"""
label_points = list()
label_points.extend(
(
# overall quantiles for label
self.__make_points(
prefix + "overall_quantiles",
{"label": label},
ts,
self.__make_quantile_fields(data)
),
# overall meta (gun status) for label
self.__make_points(
prefix + "overall_meta",
{"label": label},
ts,
self.__make_overall_meta_fields(data, gun_stats)
),
# net codes for label
self.__make_points(
prefix + "net_codes",
{"label": label},
ts,
self.__make_netcodes_fields(data)
),
# proto codes for label
self.__make_points(
prefix + "proto_codes",
{"label": label},
ts,
self.__make_protocodes_fields(data)
)
)
)
# histograms, one row for each bin
if self.histograms:
for bin_, count in zip(data["interval_real"]["hist"]["bins"],
data["interval_real"]["hist"]["data"]):
label_points.append(
self.__make_points(
prefix + "histograms",
{"label": label},
ts,
{"bin": bin_, "count": count}
)
)
return label_points
|
def __make_points_for_label(self, ts, data, label, prefix, gun_stats):
"""x
Make a set of points for `this` label
overall_quantiles, overall_meta, net_codes, proto_codes, histograms
"""
label_points = list()
label_points.extend(
(
# overall quantiles for label
self.__make_points(
prefix + "overall_quantiles",
{"label": label},
ts,
self.__make_quantile_fields(data)
),
# overall meta (gun status) for label
self.__make_points(
prefix + "overall_meta",
{"label": label},
ts,
self.__make_overall_meta_fields(data, gun_stats)
),
# net codes for label
self.__make_points(
prefix + "net_codes",
{"label": label},
ts,
self.__make_netcodes_fields(data)
),
# proto codes for label
self.__make_points(
prefix + "proto_codes",
{"label": label},
ts,
self.__make_protocodes_fields(data)
)
)
)
# histograms, one row for each bin
if self.histograms:
for bin_, count in zip(data["interval_real"]["hist"]["bins"],
data["interval_real"]["hist"]["data"]):
label_points.append(
self.__make_points(
prefix + "histograms",
{"label": label},
ts,
{"bin": bin_, "count": count}
)
)
return label_points
|
[
"x",
"Make",
"a",
"set",
"of",
"points",
"for",
"this",
"label"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/InfluxUploader/decoder.py#L82-L134
|
[
"def",
"__make_points_for_label",
"(",
"self",
",",
"ts",
",",
"data",
",",
"label",
",",
"prefix",
",",
"gun_stats",
")",
":",
"label_points",
"=",
"list",
"(",
")",
"label_points",
".",
"extend",
"(",
"(",
"# overall quantiles for label",
"self",
".",
"__make_points",
"(",
"prefix",
"+",
"\"overall_quantiles\"",
",",
"{",
"\"label\"",
":",
"label",
"}",
",",
"ts",
",",
"self",
".",
"__make_quantile_fields",
"(",
"data",
")",
")",
",",
"# overall meta (gun status) for label",
"self",
".",
"__make_points",
"(",
"prefix",
"+",
"\"overall_meta\"",
",",
"{",
"\"label\"",
":",
"label",
"}",
",",
"ts",
",",
"self",
".",
"__make_overall_meta_fields",
"(",
"data",
",",
"gun_stats",
")",
")",
",",
"# net codes for label",
"self",
".",
"__make_points",
"(",
"prefix",
"+",
"\"net_codes\"",
",",
"{",
"\"label\"",
":",
"label",
"}",
",",
"ts",
",",
"self",
".",
"__make_netcodes_fields",
"(",
"data",
")",
")",
",",
"# proto codes for label",
"self",
".",
"__make_points",
"(",
"prefix",
"+",
"\"proto_codes\"",
",",
"{",
"\"label\"",
":",
"label",
"}",
",",
"ts",
",",
"self",
".",
"__make_protocodes_fields",
"(",
"data",
")",
")",
")",
")",
"# histograms, one row for each bin",
"if",
"self",
".",
"histograms",
":",
"for",
"bin_",
",",
"count",
"in",
"zip",
"(",
"data",
"[",
"\"interval_real\"",
"]",
"[",
"\"hist\"",
"]",
"[",
"\"bins\"",
"]",
",",
"data",
"[",
"\"interval_real\"",
"]",
"[",
"\"hist\"",
"]",
"[",
"\"data\"",
"]",
")",
":",
"label_points",
".",
"append",
"(",
"self",
".",
"__make_points",
"(",
"prefix",
"+",
"\"histograms\"",
",",
"{",
"\"label\"",
":",
"label",
"}",
",",
"ts",
",",
"{",
"\"bin\"",
":",
"bin_",
",",
"\"count\"",
":",
"count",
"}",
")",
")",
"return",
"label_points"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Decoder.__make_points
|
Parameters
----------
measurement : string
measurement type (e.g. monitoring, overall_meta, net_codes, proto_codes, overall_quantiles)
additional_tags : dict
custom additional tags for this points
ts : integer
timestamp
fields : dict
influxdb columns
Returns
-------
dict
points for InfluxDB client
|
yandextank/plugins/InfluxUploader/decoder.py
|
def __make_points(self, measurement, additional_tags, ts, fields):
"""
Parameters
----------
measurement : string
measurement type (e.g. monitoring, overall_meta, net_codes, proto_codes, overall_quantiles)
additional_tags : dict
custom additional tags for this points
ts : integer
timestamp
fields : dict
influxdb columns
Returns
-------
dict
points for InfluxDB client
"""
tags = self.tags.copy()
tags.update(additional_tags)
return {
"measurement": measurement,
"tags": tags,
"time": int(ts),
"fields": fields,
}
|
def __make_points(self, measurement, additional_tags, ts, fields):
"""
Parameters
----------
measurement : string
measurement type (e.g. monitoring, overall_meta, net_codes, proto_codes, overall_quantiles)
additional_tags : dict
custom additional tags for this points
ts : integer
timestamp
fields : dict
influxdb columns
Returns
-------
dict
points for InfluxDB client
"""
tags = self.tags.copy()
tags.update(additional_tags)
return {
"measurement": measurement,
"tags": tags,
"time": int(ts),
"fields": fields,
}
|
[
"Parameters",
"----------",
"measurement",
":",
"string",
"measurement",
"type",
"(",
"e",
".",
"g",
".",
"monitoring",
"overall_meta",
"net_codes",
"proto_codes",
"overall_quantiles",
")",
"additional_tags",
":",
"dict",
"custom",
"additional",
"tags",
"for",
"this",
"points",
"ts",
":",
"integer",
"timestamp",
"fields",
":",
"dict",
"influxdb",
"columns"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/InfluxUploader/decoder.py#L166-L191
|
[
"def",
"__make_points",
"(",
"self",
",",
"measurement",
",",
"additional_tags",
",",
"ts",
",",
"fields",
")",
":",
"tags",
"=",
"self",
".",
"tags",
".",
"copy",
"(",
")",
"tags",
".",
"update",
"(",
"additional_tags",
")",
"return",
"{",
"\"measurement\"",
":",
"measurement",
",",
"\"tags\"",
":",
"tags",
",",
"\"time\"",
":",
"int",
"(",
"ts",
")",
",",
"\"fields\"",
":",
"fields",
",",
"}"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
AbstractPlugin.publish
|
publish value to status
|
yandextank/common/interfaces.py
|
def publish(self, key, value):
"""publish value to status"""
self.log.debug(
"Publishing status: %s/%s: %s", self.__class__.__name__, key, value)
self.core.publish(self.__class__.__name__, key, value)
|
def publish(self, key, value):
"""publish value to status"""
self.log.debug(
"Publishing status: %s/%s: %s", self.__class__.__name__, key, value)
self.core.publish(self.__class__.__name__, key, value)
|
[
"publish",
"value",
"to",
"status"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/common/interfaces.py#L98-L102
|
[
"def",
"publish",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"Publishing status: %s/%s: %s\"",
",",
"self",
".",
"__class__",
".",
"__name__",
",",
"key",
",",
"value",
")",
"self",
".",
"core",
".",
"publish",
"(",
"self",
".",
"__class__",
".",
"__name__",
",",
"key",
",",
"value",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
AbstractCriterion.count_matched_codes
|
helper to aggregate codes by mask
|
yandextank/common/interfaces.py
|
def count_matched_codes(codes_regex, codes_dict):
""" helper to aggregate codes by mask """
total = 0
for code, count in codes_dict.items():
if codes_regex.match(str(code)):
total += count
return total
|
def count_matched_codes(codes_regex, codes_dict):
""" helper to aggregate codes by mask """
total = 0
for code, count in codes_dict.items():
if codes_regex.match(str(code)):
total += count
return total
|
[
"helper",
"to",
"aggregate",
"codes",
"by",
"mask"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/common/interfaces.py#L171-L177
|
[
"def",
"count_matched_codes",
"(",
"codes_regex",
",",
"codes_dict",
")",
":",
"total",
"=",
"0",
"for",
"code",
",",
"count",
"in",
"codes_dict",
".",
"items",
"(",
")",
":",
"if",
"codes_regex",
".",
"match",
"(",
"str",
"(",
"code",
")",
")",
":",
"total",
"+=",
"count",
"return",
"total"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
BFGBase.stop
|
Say the workers to finish their jobs and quit.
|
yandextank/plugins/Bfg/worker.py
|
def stop(self):
"""
Say the workers to finish their jobs and quit.
"""
self.quit.set()
# yapf:disable
while sorted([
self.pool[i].is_alive()
for i in xrange(len(self.pool))])[-1]:
time.sleep(1)
# yapf:enable
try:
while not self.task_queue.empty():
self.task_queue.get(timeout=0.1)
self.task_queue.close()
self.feeder.join()
except Exception as ex:
logger.info(ex)
|
def stop(self):
"""
Say the workers to finish their jobs and quit.
"""
self.quit.set()
# yapf:disable
while sorted([
self.pool[i].is_alive()
for i in xrange(len(self.pool))])[-1]:
time.sleep(1)
# yapf:enable
try:
while not self.task_queue.empty():
self.task_queue.get(timeout=0.1)
self.task_queue.close()
self.feeder.join()
except Exception as ex:
logger.info(ex)
|
[
"Say",
"the",
"workers",
"to",
"finish",
"their",
"jobs",
"and",
"quit",
"."
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Bfg/worker.py#L62-L79
|
[
"def",
"stop",
"(",
"self",
")",
":",
"self",
".",
"quit",
".",
"set",
"(",
")",
"# yapf:disable",
"while",
"sorted",
"(",
"[",
"self",
".",
"pool",
"[",
"i",
"]",
".",
"is_alive",
"(",
")",
"for",
"i",
"in",
"xrange",
"(",
"len",
"(",
"self",
".",
"pool",
")",
")",
"]",
")",
"[",
"-",
"1",
"]",
":",
"time",
".",
"sleep",
"(",
"1",
")",
"# yapf:enable",
"try",
":",
"while",
"not",
"self",
".",
"task_queue",
".",
"empty",
"(",
")",
":",
"self",
".",
"task_queue",
".",
"get",
"(",
"timeout",
"=",
"0.1",
")",
"self",
".",
"task_queue",
".",
"close",
"(",
")",
"self",
".",
"feeder",
".",
"join",
"(",
")",
"except",
"Exception",
"as",
"ex",
":",
"logger",
".",
"info",
"(",
"ex",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
BFGBase._feed
|
A feeder that runs in distinct thread in main process.
|
yandextank/plugins/Bfg/worker.py
|
def _feed(self):
"""
A feeder that runs in distinct thread in main process.
"""
self.plan = StpdReader(self.stpd_filename)
if self.cached_stpd:
self.plan = list(self.plan)
for task in self.plan:
if self.quit.is_set():
logger.info("Stop feeding: gonna quit")
return
# try putting a task to a queue unless there is a quit flag
# or all workers have exited
while True:
try:
self.task_queue.put(task, timeout=1)
break
except Full:
if self.quit.is_set() or self.workers_finished:
return
else:
continue
workers_count = self.instances
logger.info(
"Feeded all data. Publishing %d killer tasks" % (workers_count))
retry_delay = 1
for _ in range(5):
try:
[
self.task_queue.put(None, timeout=1)
for _ in xrange(0, workers_count)
]
break
except Full:
logger.debug(
"Couldn't post killer tasks"
" because queue is full. Retrying in %ss", retry_delay)
time.sleep(retry_delay)
retry_delay *= 2
try:
logger.info("Waiting for workers")
map(lambda x: x.join(), self.pool)
logger.info("All workers exited.")
self.workers_finished = True
except (KeyboardInterrupt, SystemExit):
self.task_queue.close()
self.results.close()
self.quit.set()
logger.info("Going to quit. Waiting for workers")
map(lambda x: x.join(), self.pool)
self.workers_finished = True
|
def _feed(self):
"""
A feeder that runs in distinct thread in main process.
"""
self.plan = StpdReader(self.stpd_filename)
if self.cached_stpd:
self.plan = list(self.plan)
for task in self.plan:
if self.quit.is_set():
logger.info("Stop feeding: gonna quit")
return
# try putting a task to a queue unless there is a quit flag
# or all workers have exited
while True:
try:
self.task_queue.put(task, timeout=1)
break
except Full:
if self.quit.is_set() or self.workers_finished:
return
else:
continue
workers_count = self.instances
logger.info(
"Feeded all data. Publishing %d killer tasks" % (workers_count))
retry_delay = 1
for _ in range(5):
try:
[
self.task_queue.put(None, timeout=1)
for _ in xrange(0, workers_count)
]
break
except Full:
logger.debug(
"Couldn't post killer tasks"
" because queue is full. Retrying in %ss", retry_delay)
time.sleep(retry_delay)
retry_delay *= 2
try:
logger.info("Waiting for workers")
map(lambda x: x.join(), self.pool)
logger.info("All workers exited.")
self.workers_finished = True
except (KeyboardInterrupt, SystemExit):
self.task_queue.close()
self.results.close()
self.quit.set()
logger.info("Going to quit. Waiting for workers")
map(lambda x: x.join(), self.pool)
self.workers_finished = True
|
[
"A",
"feeder",
"that",
"runs",
"in",
"distinct",
"thread",
"in",
"main",
"process",
"."
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Bfg/worker.py#L81-L132
|
[
"def",
"_feed",
"(",
"self",
")",
":",
"self",
".",
"plan",
"=",
"StpdReader",
"(",
"self",
".",
"stpd_filename",
")",
"if",
"self",
".",
"cached_stpd",
":",
"self",
".",
"plan",
"=",
"list",
"(",
"self",
".",
"plan",
")",
"for",
"task",
"in",
"self",
".",
"plan",
":",
"if",
"self",
".",
"quit",
".",
"is_set",
"(",
")",
":",
"logger",
".",
"info",
"(",
"\"Stop feeding: gonna quit\"",
")",
"return",
"# try putting a task to a queue unless there is a quit flag",
"# or all workers have exited",
"while",
"True",
":",
"try",
":",
"self",
".",
"task_queue",
".",
"put",
"(",
"task",
",",
"timeout",
"=",
"1",
")",
"break",
"except",
"Full",
":",
"if",
"self",
".",
"quit",
".",
"is_set",
"(",
")",
"or",
"self",
".",
"workers_finished",
":",
"return",
"else",
":",
"continue",
"workers_count",
"=",
"self",
".",
"instances",
"logger",
".",
"info",
"(",
"\"Feeded all data. Publishing %d killer tasks\"",
"%",
"(",
"workers_count",
")",
")",
"retry_delay",
"=",
"1",
"for",
"_",
"in",
"range",
"(",
"5",
")",
":",
"try",
":",
"[",
"self",
".",
"task_queue",
".",
"put",
"(",
"None",
",",
"timeout",
"=",
"1",
")",
"for",
"_",
"in",
"xrange",
"(",
"0",
",",
"workers_count",
")",
"]",
"break",
"except",
"Full",
":",
"logger",
".",
"debug",
"(",
"\"Couldn't post killer tasks\"",
"\" because queue is full. Retrying in %ss\"",
",",
"retry_delay",
")",
"time",
".",
"sleep",
"(",
"retry_delay",
")",
"retry_delay",
"*=",
"2",
"try",
":",
"logger",
".",
"info",
"(",
"\"Waiting for workers\"",
")",
"map",
"(",
"lambda",
"x",
":",
"x",
".",
"join",
"(",
")",
",",
"self",
".",
"pool",
")",
"logger",
".",
"info",
"(",
"\"All workers exited.\"",
")",
"self",
".",
"workers_finished",
"=",
"True",
"except",
"(",
"KeyboardInterrupt",
",",
"SystemExit",
")",
":",
"self",
".",
"task_queue",
".",
"close",
"(",
")",
"self",
".",
"results",
".",
"close",
"(",
")",
"self",
".",
"quit",
".",
"set",
"(",
")",
"logger",
".",
"info",
"(",
"\"Going to quit. Waiting for workers\"",
")",
"map",
"(",
"lambda",
"x",
":",
"x",
".",
"join",
"(",
")",
",",
"self",
".",
"pool",
")",
"self",
".",
"workers_finished",
"=",
"True"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
BFGMultiprocessing._worker
|
A worker that does actual jobs
|
yandextank/plugins/Bfg/worker.py
|
def _worker(self):
"""
A worker that does actual jobs
"""
logger.debug("Init shooter process")
try:
self.gun.setup()
except Exception:
logger.exception("Couldn't initialize gun. Exit shooter process")
return
while not self.quit.is_set():
try:
task = self.task_queue.get(timeout=1)
if not task:
logger.debug("Got killer task.")
break
timestamp, missile, marker = task
planned_time = self.start_time + (timestamp / 1000.0)
delay = planned_time - time.time()
if delay > 0:
time.sleep(delay)
try:
with self.instance_counter.get_lock():
self.instance_counter.value += 1
self.gun.shoot(missile, marker)
finally:
with self.instance_counter.get_lock():
self.instance_counter.value -= 1
except (KeyboardInterrupt, SystemExit):
break
except Empty:
if self.quit.is_set():
logger.debug("Empty queue. Exiting process")
return
except Full:
logger.warning("Couldn't put to result queue because it's full")
except Exception:
logger.exception("Bfg shoot exception")
try:
self.gun.teardown()
except Exception:
logger.exception("Couldn't finalize gun. Exit shooter process")
return
logger.debug("Exit shooter process")
|
def _worker(self):
"""
A worker that does actual jobs
"""
logger.debug("Init shooter process")
try:
self.gun.setup()
except Exception:
logger.exception("Couldn't initialize gun. Exit shooter process")
return
while not self.quit.is_set():
try:
task = self.task_queue.get(timeout=1)
if not task:
logger.debug("Got killer task.")
break
timestamp, missile, marker = task
planned_time = self.start_time + (timestamp / 1000.0)
delay = planned_time - time.time()
if delay > 0:
time.sleep(delay)
try:
with self.instance_counter.get_lock():
self.instance_counter.value += 1
self.gun.shoot(missile, marker)
finally:
with self.instance_counter.get_lock():
self.instance_counter.value -= 1
except (KeyboardInterrupt, SystemExit):
break
except Empty:
if self.quit.is_set():
logger.debug("Empty queue. Exiting process")
return
except Full:
logger.warning("Couldn't put to result queue because it's full")
except Exception:
logger.exception("Bfg shoot exception")
try:
self.gun.teardown()
except Exception:
logger.exception("Couldn't finalize gun. Exit shooter process")
return
logger.debug("Exit shooter process")
|
[
"A",
"worker",
"that",
"does",
"actual",
"jobs"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Bfg/worker.py#L140-L186
|
[
"def",
"_worker",
"(",
"self",
")",
":",
"logger",
".",
"debug",
"(",
"\"Init shooter process\"",
")",
"try",
":",
"self",
".",
"gun",
".",
"setup",
"(",
")",
"except",
"Exception",
":",
"logger",
".",
"exception",
"(",
"\"Couldn't initialize gun. Exit shooter process\"",
")",
"return",
"while",
"not",
"self",
".",
"quit",
".",
"is_set",
"(",
")",
":",
"try",
":",
"task",
"=",
"self",
".",
"task_queue",
".",
"get",
"(",
"timeout",
"=",
"1",
")",
"if",
"not",
"task",
":",
"logger",
".",
"debug",
"(",
"\"Got killer task.\"",
")",
"break",
"timestamp",
",",
"missile",
",",
"marker",
"=",
"task",
"planned_time",
"=",
"self",
".",
"start_time",
"+",
"(",
"timestamp",
"/",
"1000.0",
")",
"delay",
"=",
"planned_time",
"-",
"time",
".",
"time",
"(",
")",
"if",
"delay",
">",
"0",
":",
"time",
".",
"sleep",
"(",
"delay",
")",
"try",
":",
"with",
"self",
".",
"instance_counter",
".",
"get_lock",
"(",
")",
":",
"self",
".",
"instance_counter",
".",
"value",
"+=",
"1",
"self",
".",
"gun",
".",
"shoot",
"(",
"missile",
",",
"marker",
")",
"finally",
":",
"with",
"self",
".",
"instance_counter",
".",
"get_lock",
"(",
")",
":",
"self",
".",
"instance_counter",
".",
"value",
"-=",
"1",
"except",
"(",
"KeyboardInterrupt",
",",
"SystemExit",
")",
":",
"break",
"except",
"Empty",
":",
"if",
"self",
".",
"quit",
".",
"is_set",
"(",
")",
":",
"logger",
".",
"debug",
"(",
"\"Empty queue. Exiting process\"",
")",
"return",
"except",
"Full",
":",
"logger",
".",
"warning",
"(",
"\"Couldn't put to result queue because it's full\"",
")",
"except",
"Exception",
":",
"logger",
".",
"exception",
"(",
"\"Bfg shoot exception\"",
")",
"try",
":",
"self",
".",
"gun",
".",
"teardown",
"(",
")",
"except",
"Exception",
":",
"logger",
".",
"exception",
"(",
"\"Couldn't finalize gun. Exit shooter process\"",
")",
"return",
"logger",
".",
"debug",
"(",
"\"Exit shooter process\"",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
BFGGreen._green_worker
|
A worker that does actual jobs
|
yandextank/plugins/Bfg/worker.py
|
def _green_worker(self):
"""
A worker that does actual jobs
"""
while not self.quit.is_set():
try:
task = self.green_queue.get(timeout=1)
timestamp, missile, marker = task
planned_time = self.start_time + (timestamp / 1000.0)
delay = planned_time - time.time()
if delay > 0:
time.sleep(delay)
try:
with self.instance_counter.get_lock():
self.instance_counter.value += 1
self.gun.shoot(missile, marker)
finally:
with self.instance_counter.get_lock():
self.instance_counter.value -= 1
self._free_threads_count += 1
except (KeyboardInterrupt, SystemExit):
break
except Empty:
continue
except Full:
logger.warning("Couldn't put to result queue because it's full")
except Exception:
logger.exception("Bfg shoot exception")
|
def _green_worker(self):
"""
A worker that does actual jobs
"""
while not self.quit.is_set():
try:
task = self.green_queue.get(timeout=1)
timestamp, missile, marker = task
planned_time = self.start_time + (timestamp / 1000.0)
delay = planned_time - time.time()
if delay > 0:
time.sleep(delay)
try:
with self.instance_counter.get_lock():
self.instance_counter.value += 1
self.gun.shoot(missile, marker)
finally:
with self.instance_counter.get_lock():
self.instance_counter.value -= 1
self._free_threads_count += 1
except (KeyboardInterrupt, SystemExit):
break
except Empty:
continue
except Full:
logger.warning("Couldn't put to result queue because it's full")
except Exception:
logger.exception("Bfg shoot exception")
|
[
"A",
"worker",
"that",
"does",
"actual",
"jobs"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Bfg/worker.py#L241-L274
|
[
"def",
"_green_worker",
"(",
"self",
")",
":",
"while",
"not",
"self",
".",
"quit",
".",
"is_set",
"(",
")",
":",
"try",
":",
"task",
"=",
"self",
".",
"green_queue",
".",
"get",
"(",
"timeout",
"=",
"1",
")",
"timestamp",
",",
"missile",
",",
"marker",
"=",
"task",
"planned_time",
"=",
"self",
".",
"start_time",
"+",
"(",
"timestamp",
"/",
"1000.0",
")",
"delay",
"=",
"planned_time",
"-",
"time",
".",
"time",
"(",
")",
"if",
"delay",
">",
"0",
":",
"time",
".",
"sleep",
"(",
"delay",
")",
"try",
":",
"with",
"self",
".",
"instance_counter",
".",
"get_lock",
"(",
")",
":",
"self",
".",
"instance_counter",
".",
"value",
"+=",
"1",
"self",
".",
"gun",
".",
"shoot",
"(",
"missile",
",",
"marker",
")",
"finally",
":",
"with",
"self",
".",
"instance_counter",
".",
"get_lock",
"(",
")",
":",
"self",
".",
"instance_counter",
".",
"value",
"-=",
"1",
"self",
".",
"_free_threads_count",
"+=",
"1",
"except",
"(",
"KeyboardInterrupt",
",",
"SystemExit",
")",
":",
"break",
"except",
"Empty",
":",
"continue",
"except",
"Full",
":",
"logger",
".",
"warning",
"(",
"\"Couldn't put to result queue because it's full\"",
")",
"except",
"Exception",
":",
"logger",
".",
"exception",
"(",
"\"Bfg shoot exception\"",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
ApiWorker.init_logging
|
Set up logging
|
yandextank/api/apiworker.py
|
def init_logging(self, log_filename="tank.log"):
""" Set up logging """
logger = logging.getLogger('')
self.log_filename = log_filename
self.core.add_artifact_file(self.log_filename)
file_handler = logging.FileHandler(self.log_filename)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(
logging.Formatter(
"%(asctime)s [%(levelname)s] %(name)s %(message)s"))
logger.addHandler(file_handler)
console_handler = logging.StreamHandler(sys.stdout)
stderr_hdl = logging.StreamHandler(sys.stderr)
# fmt_verbose = logging.Formatter(
# "%(asctime)s [%(levelname)s] %(name)s %(message)s")
fmt_regular = logging.Formatter(
"%(asctime)s %(levelname)s: %(message)s", "%H:%M:%S")
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(fmt_regular)
stderr_hdl.setFormatter(fmt_regular)
f_err = SingleLevelFilter(logging.ERROR, True)
f_warn = SingleLevelFilter(logging.WARNING, True)
f_crit = SingleLevelFilter(logging.CRITICAL, True)
console_handler.addFilter(f_err)
console_handler.addFilter(f_warn)
console_handler.addFilter(f_crit)
logger.addHandler(console_handler)
f_info = SingleLevelFilter(logging.INFO, True)
f_debug = SingleLevelFilter(logging.DEBUG, True)
stderr_hdl.addFilter(f_info)
stderr_hdl.addFilter(f_debug)
logger.addHandler(stderr_hdl)
|
def init_logging(self, log_filename="tank.log"):
""" Set up logging """
logger = logging.getLogger('')
self.log_filename = log_filename
self.core.add_artifact_file(self.log_filename)
file_handler = logging.FileHandler(self.log_filename)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(
logging.Formatter(
"%(asctime)s [%(levelname)s] %(name)s %(message)s"))
logger.addHandler(file_handler)
console_handler = logging.StreamHandler(sys.stdout)
stderr_hdl = logging.StreamHandler(sys.stderr)
# fmt_verbose = logging.Formatter(
# "%(asctime)s [%(levelname)s] %(name)s %(message)s")
fmt_regular = logging.Formatter(
"%(asctime)s %(levelname)s: %(message)s", "%H:%M:%S")
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(fmt_regular)
stderr_hdl.setFormatter(fmt_regular)
f_err = SingleLevelFilter(logging.ERROR, True)
f_warn = SingleLevelFilter(logging.WARNING, True)
f_crit = SingleLevelFilter(logging.CRITICAL, True)
console_handler.addFilter(f_err)
console_handler.addFilter(f_warn)
console_handler.addFilter(f_crit)
logger.addHandler(console_handler)
f_info = SingleLevelFilter(logging.INFO, True)
f_debug = SingleLevelFilter(logging.DEBUG, True)
stderr_hdl.addFilter(f_info)
stderr_hdl.addFilter(f_debug)
logger.addHandler(stderr_hdl)
|
[
"Set",
"up",
"logging"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/api/apiworker.py#L22-L58
|
[
"def",
"init_logging",
"(",
"self",
",",
"log_filename",
"=",
"\"tank.log\"",
")",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"''",
")",
"self",
".",
"log_filename",
"=",
"log_filename",
"self",
".",
"core",
".",
"add_artifact_file",
"(",
"self",
".",
"log_filename",
")",
"file_handler",
"=",
"logging",
".",
"FileHandler",
"(",
"self",
".",
"log_filename",
")",
"file_handler",
".",
"setLevel",
"(",
"logging",
".",
"DEBUG",
")",
"file_handler",
".",
"setFormatter",
"(",
"logging",
".",
"Formatter",
"(",
"\"%(asctime)s [%(levelname)s] %(name)s %(message)s\"",
")",
")",
"logger",
".",
"addHandler",
"(",
"file_handler",
")",
"console_handler",
"=",
"logging",
".",
"StreamHandler",
"(",
"sys",
".",
"stdout",
")",
"stderr_hdl",
"=",
"logging",
".",
"StreamHandler",
"(",
"sys",
".",
"stderr",
")",
"# fmt_verbose = logging.Formatter(",
"# \"%(asctime)s [%(levelname)s] %(name)s %(message)s\")",
"fmt_regular",
"=",
"logging",
".",
"Formatter",
"(",
"\"%(asctime)s %(levelname)s: %(message)s\"",
",",
"\"%H:%M:%S\"",
")",
"console_handler",
".",
"setLevel",
"(",
"logging",
".",
"INFO",
")",
"console_handler",
".",
"setFormatter",
"(",
"fmt_regular",
")",
"stderr_hdl",
".",
"setFormatter",
"(",
"fmt_regular",
")",
"f_err",
"=",
"SingleLevelFilter",
"(",
"logging",
".",
"ERROR",
",",
"True",
")",
"f_warn",
"=",
"SingleLevelFilter",
"(",
"logging",
".",
"WARNING",
",",
"True",
")",
"f_crit",
"=",
"SingleLevelFilter",
"(",
"logging",
".",
"CRITICAL",
",",
"True",
")",
"console_handler",
".",
"addFilter",
"(",
"f_err",
")",
"console_handler",
".",
"addFilter",
"(",
"f_warn",
")",
"console_handler",
".",
"addFilter",
"(",
"f_crit",
")",
"logger",
".",
"addHandler",
"(",
"console_handler",
")",
"f_info",
"=",
"SingleLevelFilter",
"(",
"logging",
".",
"INFO",
",",
"True",
")",
"f_debug",
"=",
"SingleLevelFilter",
"(",
"logging",
".",
"DEBUG",
",",
"True",
")",
"stderr_hdl",
".",
"addFilter",
"(",
"f_info",
")",
"stderr_hdl",
".",
"addFilter",
"(",
"f_debug",
")",
"logger",
".",
"addHandler",
"(",
"stderr_hdl",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
ApiWorker.__add_user_options
|
override config options with user specified options
|
yandextank/api/apiworker.py
|
def __add_user_options(self):
""" override config options with user specified options"""
if self.options.get('user_options', None):
self.core.apply_shorthand_options(self.options['user_options'])
|
def __add_user_options(self):
""" override config options with user specified options"""
if self.options.get('user_options', None):
self.core.apply_shorthand_options(self.options['user_options'])
|
[
"override",
"config",
"options",
"with",
"user",
"specified",
"options"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/api/apiworker.py#L60-L63
|
[
"def",
"__add_user_options",
"(",
"self",
")",
":",
"if",
"self",
".",
"options",
".",
"get",
"(",
"'user_options'",
",",
"None",
")",
":",
"self",
".",
"core",
".",
"apply_shorthand_options",
"(",
"self",
".",
"options",
"[",
"'user_options'",
"]",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
ApiWorker.configure
|
Make preparations before running Tank
|
yandextank/api/apiworker.py
|
def configure(self, options):
""" Make preparations before running Tank """
self.options = options
if self.options.get('lock_dir', None):
self.core.set_option(self.core.SECTION, "lock_dir", self.options['lock_dir'])
if self.options.get('ignore_lock', None):
self.core.set_option(self.core.SECTION, 'ignore_lock', self.options['ignore_lock'])
while True:
try:
self.core.get_lock()
break
except Exception as exc:
if self.options.get('lock_fail', None):
raise RuntimeError("Lock file present, cannot continue")
self.log.info(
"Couldn't get lock. Will retry in 5 seconds... (%s)",
str(exc))
time.sleep(5)
configs = self.get_default_configs()
if self.options.get('config', None):
configs.append(self.options['config'])
self.core.load_configs(configs)
self.__add_user_options()
self.core.load_plugins()
if self.options.get('ignore_lock', None):
self.core.set_option(self.core.SECTION, self.IGNORE_LOCKS, "1")
|
def configure(self, options):
""" Make preparations before running Tank """
self.options = options
if self.options.get('lock_dir', None):
self.core.set_option(self.core.SECTION, "lock_dir", self.options['lock_dir'])
if self.options.get('ignore_lock', None):
self.core.set_option(self.core.SECTION, 'ignore_lock', self.options['ignore_lock'])
while True:
try:
self.core.get_lock()
break
except Exception as exc:
if self.options.get('lock_fail', None):
raise RuntimeError("Lock file present, cannot continue")
self.log.info(
"Couldn't get lock. Will retry in 5 seconds... (%s)",
str(exc))
time.sleep(5)
configs = self.get_default_configs()
if self.options.get('config', None):
configs.append(self.options['config'])
self.core.load_configs(configs)
self.__add_user_options()
self.core.load_plugins()
if self.options.get('ignore_lock', None):
self.core.set_option(self.core.SECTION, self.IGNORE_LOCKS, "1")
|
[
"Make",
"preparations",
"before",
"running",
"Tank"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/api/apiworker.py#L65-L93
|
[
"def",
"configure",
"(",
"self",
",",
"options",
")",
":",
"self",
".",
"options",
"=",
"options",
"if",
"self",
".",
"options",
".",
"get",
"(",
"'lock_dir'",
",",
"None",
")",
":",
"self",
".",
"core",
".",
"set_option",
"(",
"self",
".",
"core",
".",
"SECTION",
",",
"\"lock_dir\"",
",",
"self",
".",
"options",
"[",
"'lock_dir'",
"]",
")",
"if",
"self",
".",
"options",
".",
"get",
"(",
"'ignore_lock'",
",",
"None",
")",
":",
"self",
".",
"core",
".",
"set_option",
"(",
"self",
".",
"core",
".",
"SECTION",
",",
"'ignore_lock'",
",",
"self",
".",
"options",
"[",
"'ignore_lock'",
"]",
")",
"while",
"True",
":",
"try",
":",
"self",
".",
"core",
".",
"get_lock",
"(",
")",
"break",
"except",
"Exception",
"as",
"exc",
":",
"if",
"self",
".",
"options",
".",
"get",
"(",
"'lock_fail'",
",",
"None",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Lock file present, cannot continue\"",
")",
"self",
".",
"log",
".",
"info",
"(",
"\"Couldn't get lock. Will retry in 5 seconds... (%s)\"",
",",
"str",
"(",
"exc",
")",
")",
"time",
".",
"sleep",
"(",
"5",
")",
"configs",
"=",
"self",
".",
"get_default_configs",
"(",
")",
"if",
"self",
".",
"options",
".",
"get",
"(",
"'config'",
",",
"None",
")",
":",
"configs",
".",
"append",
"(",
"self",
".",
"options",
"[",
"'config'",
"]",
")",
"self",
".",
"core",
".",
"load_configs",
"(",
"configs",
")",
"self",
".",
"__add_user_options",
"(",
")",
"self",
".",
"core",
".",
"load_plugins",
"(",
")",
"if",
"self",
".",
"options",
".",
"get",
"(",
"'ignore_lock'",
",",
"None",
")",
":",
"self",
".",
"core",
".",
"set_option",
"(",
"self",
".",
"core",
".",
"SECTION",
",",
"self",
".",
"IGNORE_LOCKS",
",",
"\"1\"",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
ApiWorker.get_default_configs
|
returns default configs list, from /etc, home dir and package_data
|
yandextank/api/apiworker.py
|
def get_default_configs(self):
""" returns default configs list, from /etc, home dir and package_data"""
# initialize basic defaults
configs = [resource_filename(__name__, 'config/00-base.ini')]
try:
conf_files = sorted(os.listdir(self.baseconfigs_location))
for filename in conf_files:
if fnmatch.fnmatch(filename, '*.ini'):
configs += [
os.path.realpath(
self.baseconfigs_location + os.sep + filename)
]
except OSError:
self.log.warn(
self.baseconfigs_location + ' is not accessible to get configs list')
configs += [os.path.expanduser('~/.yandex-tank')]
return configs
|
def get_default_configs(self):
""" returns default configs list, from /etc, home dir and package_data"""
# initialize basic defaults
configs = [resource_filename(__name__, 'config/00-base.ini')]
try:
conf_files = sorted(os.listdir(self.baseconfigs_location))
for filename in conf_files:
if fnmatch.fnmatch(filename, '*.ini'):
configs += [
os.path.realpath(
self.baseconfigs_location + os.sep + filename)
]
except OSError:
self.log.warn(
self.baseconfigs_location + ' is not accessible to get configs list')
configs += [os.path.expanduser('~/.yandex-tank')]
return configs
|
[
"returns",
"default",
"configs",
"list",
"from",
"/",
"etc",
"home",
"dir",
"and",
"package_data"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/api/apiworker.py#L135-L152
|
[
"def",
"get_default_configs",
"(",
"self",
")",
":",
"# initialize basic defaults",
"configs",
"=",
"[",
"resource_filename",
"(",
"__name__",
",",
"'config/00-base.ini'",
")",
"]",
"try",
":",
"conf_files",
"=",
"sorted",
"(",
"os",
".",
"listdir",
"(",
"self",
".",
"baseconfigs_location",
")",
")",
"for",
"filename",
"in",
"conf_files",
":",
"if",
"fnmatch",
".",
"fnmatch",
"(",
"filename",
",",
"'*.ini'",
")",
":",
"configs",
"+=",
"[",
"os",
".",
"path",
".",
"realpath",
"(",
"self",
".",
"baseconfigs_location",
"+",
"os",
".",
"sep",
"+",
"filename",
")",
"]",
"except",
"OSError",
":",
"self",
".",
"log",
".",
"warn",
"(",
"self",
".",
"baseconfigs_location",
"+",
"' is not accessible to get configs list'",
")",
"configs",
"+=",
"[",
"os",
".",
"path",
".",
"expanduser",
"(",
"'~/.yandex-tank'",
")",
"]",
"return",
"configs"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
ApiWorker.__graceful_shutdown
|
call shutdown routines
|
yandextank/api/apiworker.py
|
def __graceful_shutdown(self):
""" call shutdown routines """
retcode = 1
self.log.info("Trying to shutdown gracefully...")
retcode = self.core.plugins_end_test(retcode)
retcode = self.core.plugins_post_process(retcode)
self.log.info("Done graceful shutdown")
return retcode
|
def __graceful_shutdown(self):
""" call shutdown routines """
retcode = 1
self.log.info("Trying to shutdown gracefully...")
retcode = self.core.plugins_end_test(retcode)
retcode = self.core.plugins_post_process(retcode)
self.log.info("Done graceful shutdown")
return retcode
|
[
"call",
"shutdown",
"routines"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/api/apiworker.py#L154-L161
|
[
"def",
"__graceful_shutdown",
"(",
"self",
")",
":",
"retcode",
"=",
"1",
"self",
".",
"log",
".",
"info",
"(",
"\"Trying to shutdown gracefully...\"",
")",
"retcode",
"=",
"self",
".",
"core",
".",
"plugins_end_test",
"(",
"retcode",
")",
"retcode",
"=",
"self",
".",
"core",
".",
"plugins_post_process",
"(",
"retcode",
")",
"self",
".",
"log",
".",
"info",
"(",
"\"Done graceful shutdown\"",
")",
"return",
"retcode"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
TankAggregator._collect_data
|
Collect data, cache it and send to listeners
|
yandextank/aggregator/tank_aggregator.py
|
def _collect_data(self, end=False):
"""
Collect data, cache it and send to listeners
"""
data = get_nowait_from_queue(self.results)
stats = get_nowait_from_queue(self.stats_results)
logger.debug("Data timestamps: %s" % [d.get('ts') for d in data])
logger.debug("Stats timestamps: %s" % [d.get('ts') for d in stats])
for item in data:
ts = item['ts']
if ts in self.stat_cache:
# send items
data_item = item
stat_item = self.stat_cache.pop(ts)
self.__notify_listeners(data_item, stat_item)
else:
self.data_cache[ts] = item
for item in stats:
ts = item['ts']
if ts in self.data_cache:
# send items
data_item = self.data_cache.pop(ts)
stat_item = item
self.__notify_listeners(data_item, stat_item)
else:
self.stat_cache[ts] = item
if end and len(self.data_cache) > 0:
logger.info('Timestamps without stats:')
for ts, data_item in sorted(self.data_cache.items(), key=lambda i: i[0]):
logger.info(ts)
self.__notify_listeners(data_item, StatsReader.stats_item(ts, 0, 0))
|
def _collect_data(self, end=False):
"""
Collect data, cache it and send to listeners
"""
data = get_nowait_from_queue(self.results)
stats = get_nowait_from_queue(self.stats_results)
logger.debug("Data timestamps: %s" % [d.get('ts') for d in data])
logger.debug("Stats timestamps: %s" % [d.get('ts') for d in stats])
for item in data:
ts = item['ts']
if ts in self.stat_cache:
# send items
data_item = item
stat_item = self.stat_cache.pop(ts)
self.__notify_listeners(data_item, stat_item)
else:
self.data_cache[ts] = item
for item in stats:
ts = item['ts']
if ts in self.data_cache:
# send items
data_item = self.data_cache.pop(ts)
stat_item = item
self.__notify_listeners(data_item, stat_item)
else:
self.stat_cache[ts] = item
if end and len(self.data_cache) > 0:
logger.info('Timestamps without stats:')
for ts, data_item in sorted(self.data_cache.items(), key=lambda i: i[0]):
logger.info(ts)
self.__notify_listeners(data_item, StatsReader.stats_item(ts, 0, 0))
|
[
"Collect",
"data",
"cache",
"it",
"and",
"send",
"to",
"listeners"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/aggregator/tank_aggregator.py#L81-L111
|
[
"def",
"_collect_data",
"(",
"self",
",",
"end",
"=",
"False",
")",
":",
"data",
"=",
"get_nowait_from_queue",
"(",
"self",
".",
"results",
")",
"stats",
"=",
"get_nowait_from_queue",
"(",
"self",
".",
"stats_results",
")",
"logger",
".",
"debug",
"(",
"\"Data timestamps: %s\"",
"%",
"[",
"d",
".",
"get",
"(",
"'ts'",
")",
"for",
"d",
"in",
"data",
"]",
")",
"logger",
".",
"debug",
"(",
"\"Stats timestamps: %s\"",
"%",
"[",
"d",
".",
"get",
"(",
"'ts'",
")",
"for",
"d",
"in",
"stats",
"]",
")",
"for",
"item",
"in",
"data",
":",
"ts",
"=",
"item",
"[",
"'ts'",
"]",
"if",
"ts",
"in",
"self",
".",
"stat_cache",
":",
"# send items",
"data_item",
"=",
"item",
"stat_item",
"=",
"self",
".",
"stat_cache",
".",
"pop",
"(",
"ts",
")",
"self",
".",
"__notify_listeners",
"(",
"data_item",
",",
"stat_item",
")",
"else",
":",
"self",
".",
"data_cache",
"[",
"ts",
"]",
"=",
"item",
"for",
"item",
"in",
"stats",
":",
"ts",
"=",
"item",
"[",
"'ts'",
"]",
"if",
"ts",
"in",
"self",
".",
"data_cache",
":",
"# send items",
"data_item",
"=",
"self",
".",
"data_cache",
".",
"pop",
"(",
"ts",
")",
"stat_item",
"=",
"item",
"self",
".",
"__notify_listeners",
"(",
"data_item",
",",
"stat_item",
")",
"else",
":",
"self",
".",
"stat_cache",
"[",
"ts",
"]",
"=",
"item",
"if",
"end",
"and",
"len",
"(",
"self",
".",
"data_cache",
")",
">",
"0",
":",
"logger",
".",
"info",
"(",
"'Timestamps without stats:'",
")",
"for",
"ts",
",",
"data_item",
"in",
"sorted",
"(",
"self",
".",
"data_cache",
".",
"items",
"(",
")",
",",
"key",
"=",
"lambda",
"i",
":",
"i",
"[",
"0",
"]",
")",
":",
"logger",
".",
"info",
"(",
"ts",
")",
"self",
".",
"__notify_listeners",
"(",
"data_item",
",",
"StatsReader",
".",
"stats_item",
"(",
"ts",
",",
"0",
",",
"0",
")",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
TankAggregator.__notify_listeners
|
notify all listeners about aggregate data and stats
|
yandextank/aggregator/tank_aggregator.py
|
def __notify_listeners(self, data, stats):
""" notify all listeners about aggregate data and stats """
for listener in self.listeners:
listener.on_aggregated_data(data, stats)
|
def __notify_listeners(self, data, stats):
""" notify all listeners about aggregate data and stats """
for listener in self.listeners:
listener.on_aggregated_data(data, stats)
|
[
"notify",
"all",
"listeners",
"about",
"aggregate",
"data",
"and",
"stats"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/aggregator/tank_aggregator.py#L137-L140
|
[
"def",
"__notify_listeners",
"(",
"self",
",",
"data",
",",
"stats",
")",
":",
"for",
"listener",
"in",
"self",
".",
"listeners",
":",
"listener",
".",
"on_aggregated_data",
"(",
"data",
",",
"stats",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
get_marker
|
Returns a marker function of the requested marker_type
>>> marker = get_marker('uniq')(__test_missile)
>>> type(marker)
<type 'str'>
>>> len(marker)
32
>>> get_marker('uri')(__test_missile)
'_example_search_hello_help_us'
>>> marker = get_marker('non-existent')(__test_missile)
Traceback (most recent call last):
...
NotImplementedError: No such marker: "non-existent"
>>> get_marker('3')(__test_missile)
'_example_search_hello'
>>> marker = get_marker('3', True)
>>> marker(__test_missile)
'_example_search_hello#0'
>>> marker(__test_missile)
'_example_search_hello#1'
|
yandextank/stepper/mark.py
|
def get_marker(marker_type, enum_ammo=False):
'''
Returns a marker function of the requested marker_type
>>> marker = get_marker('uniq')(__test_missile)
>>> type(marker)
<type 'str'>
>>> len(marker)
32
>>> get_marker('uri')(__test_missile)
'_example_search_hello_help_us'
>>> marker = get_marker('non-existent')(__test_missile)
Traceback (most recent call last):
...
NotImplementedError: No such marker: "non-existent"
>>> get_marker('3')(__test_missile)
'_example_search_hello'
>>> marker = get_marker('3', True)
>>> marker(__test_missile)
'_example_search_hello#0'
>>> marker(__test_missile)
'_example_search_hello#1'
'''
try:
limit = int(marker_type)
if limit:
marker = __UriMarker(limit)
else:
def marker(m):
return ''
except ValueError:
if marker_type in __markers:
marker = __markers[marker_type]
else:
raise NotImplementedError('No such marker: "%s"' % marker_type)
# todo: fix u'False'
if enum_ammo:
marker = __Enumerator(marker)
return marker
|
def get_marker(marker_type, enum_ammo=False):
'''
Returns a marker function of the requested marker_type
>>> marker = get_marker('uniq')(__test_missile)
>>> type(marker)
<type 'str'>
>>> len(marker)
32
>>> get_marker('uri')(__test_missile)
'_example_search_hello_help_us'
>>> marker = get_marker('non-existent')(__test_missile)
Traceback (most recent call last):
...
NotImplementedError: No such marker: "non-existent"
>>> get_marker('3')(__test_missile)
'_example_search_hello'
>>> marker = get_marker('3', True)
>>> marker(__test_missile)
'_example_search_hello#0'
>>> marker(__test_missile)
'_example_search_hello#1'
'''
try:
limit = int(marker_type)
if limit:
marker = __UriMarker(limit)
else:
def marker(m):
return ''
except ValueError:
if marker_type in __markers:
marker = __markers[marker_type]
else:
raise NotImplementedError('No such marker: "%s"' % marker_type)
# todo: fix u'False'
if enum_ammo:
marker = __Enumerator(marker)
return marker
|
[
"Returns",
"a",
"marker",
"function",
"of",
"the",
"requested",
"marker_type"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/mark.py#L54-L98
|
[
"def",
"get_marker",
"(",
"marker_type",
",",
"enum_ammo",
"=",
"False",
")",
":",
"try",
":",
"limit",
"=",
"int",
"(",
"marker_type",
")",
"if",
"limit",
":",
"marker",
"=",
"__UriMarker",
"(",
"limit",
")",
"else",
":",
"def",
"marker",
"(",
"m",
")",
":",
"return",
"''",
"except",
"ValueError",
":",
"if",
"marker_type",
"in",
"__markers",
":",
"marker",
"=",
"__markers",
"[",
"marker_type",
"]",
"else",
":",
"raise",
"NotImplementedError",
"(",
"'No such marker: \"%s\"'",
"%",
"marker_type",
")",
"# todo: fix u'False'",
"if",
"enum_ammo",
":",
"marker",
"=",
"__Enumerator",
"(",
"marker",
")",
"return",
"marker"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
get_uploader
|
:type column_mapping: dict
:type data_session: DataSession
|
yandextank/plugins/NeUploader/plugin.py
|
def get_uploader(data_session, column_mapping, overall_only=False):
"""
:type column_mapping: dict
:type data_session: DataSession
"""
overall = {col_name: data_session.new_aggregated_metric(name + ' overall')
for col_name, name in column_mapping.items()}
def upload_df(df):
for col_name, metric in overall.items():
df['value'] = df[col_name]
metric.put(df)
return upload_df
|
def get_uploader(data_session, column_mapping, overall_only=False):
"""
:type column_mapping: dict
:type data_session: DataSession
"""
overall = {col_name: data_session.new_aggregated_metric(name + ' overall')
for col_name, name in column_mapping.items()}
def upload_df(df):
for col_name, metric in overall.items():
df['value'] = df[col_name]
metric.put(df)
return upload_df
|
[
":",
"type",
"column_mapping",
":",
"dict",
":",
"type",
"data_session",
":",
"DataSession"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/NeUploader/plugin.py#L81-L93
|
[
"def",
"get_uploader",
"(",
"data_session",
",",
"column_mapping",
",",
"overall_only",
"=",
"False",
")",
":",
"overall",
"=",
"{",
"col_name",
":",
"data_session",
".",
"new_aggregated_metric",
"(",
"name",
"+",
"' overall'",
")",
"for",
"col_name",
",",
"name",
"in",
"column_mapping",
".",
"items",
"(",
")",
"}",
"def",
"upload_df",
"(",
"df",
")",
":",
"for",
"col_name",
",",
"metric",
"in",
"overall",
".",
"items",
"(",
")",
":",
"df",
"[",
"'value'",
"]",
"=",
"df",
"[",
"col_name",
"]",
"metric",
".",
"put",
"(",
"df",
")",
"return",
"upload_df"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
cfg_folder_loader
|
:type path: str
|
yandextank/core/consoleworker.py
|
def cfg_folder_loader(path):
"""
:type path: str
"""
CFG_WILDCARD = '*.yaml'
return [load_cfg(filename) for filename in sorted(glob.glob(os.path.join(path, CFG_WILDCARD)))]
|
def cfg_folder_loader(path):
"""
:type path: str
"""
CFG_WILDCARD = '*.yaml'
return [load_cfg(filename) for filename in sorted(glob.glob(os.path.join(path, CFG_WILDCARD)))]
|
[
":",
"type",
"path",
":",
"str"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/core/consoleworker.py#L81-L86
|
[
"def",
"cfg_folder_loader",
"(",
"path",
")",
":",
"CFG_WILDCARD",
"=",
"'*.yaml'",
"return",
"[",
"load_cfg",
"(",
"filename",
")",
"for",
"filename",
"in",
"sorted",
"(",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"CFG_WILDCARD",
")",
")",
")",
"]"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
parse_options
|
:type options: list of str
:rtype: list of dict
|
yandextank/core/consoleworker.py
|
def parse_options(options):
"""
:type options: list of str
:rtype: list of dict
"""
if options is None:
return []
else:
return [
convert_single_option(key.strip(), value.strip())
for key, value
in [option.split('=', 1) for option in options]
]
|
def parse_options(options):
"""
:type options: list of str
:rtype: list of dict
"""
if options is None:
return []
else:
return [
convert_single_option(key.strip(), value.strip())
for key, value
in [option.split('=', 1) for option in options]
]
|
[
":",
"type",
"options",
":",
"list",
"of",
"str",
":",
"rtype",
":",
"list",
"of",
"dict"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/core/consoleworker.py#L97-L109
|
[
"def",
"parse_options",
"(",
"options",
")",
":",
"if",
"options",
"is",
"None",
":",
"return",
"[",
"]",
"else",
":",
"return",
"[",
"convert_single_option",
"(",
"key",
".",
"strip",
"(",
")",
",",
"value",
".",
"strip",
"(",
")",
")",
"for",
"key",
",",
"value",
"in",
"[",
"option",
".",
"split",
"(",
"'='",
",",
"1",
")",
"for",
"option",
"in",
"options",
"]",
"]"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
get_default_configs
|
returns default configs list, from /etc and home dir
|
yandextank/core/consoleworker.py
|
def get_default_configs():
""" returns default configs list, from /etc and home dir """
# initialize basic defaults
configs = [resource_filename(__name__, 'config/00-base.ini')]
baseconfigs_location = '/etc/yandex-tank'
try:
conf_files = sorted(os.listdir(baseconfigs_location))
for filename in conf_files:
if fnmatch.fnmatch(filename, '*.ini'):
configs += [
os.path.realpath(
baseconfigs_location + os.sep + filename)
]
except OSError:
logger.info(
baseconfigs_location + ' is not accessible to get configs list')
configs += [os.path.expanduser('~/.yandex-tank')]
return configs
|
def get_default_configs():
""" returns default configs list, from /etc and home dir """
# initialize basic defaults
configs = [resource_filename(__name__, 'config/00-base.ini')]
baseconfigs_location = '/etc/yandex-tank'
try:
conf_files = sorted(os.listdir(baseconfigs_location))
for filename in conf_files:
if fnmatch.fnmatch(filename, '*.ini'):
configs += [
os.path.realpath(
baseconfigs_location + os.sep + filename)
]
except OSError:
logger.info(
baseconfigs_location + ' is not accessible to get configs list')
configs += [os.path.expanduser('~/.yandex-tank')]
return configs
|
[
"returns",
"default",
"configs",
"list",
"from",
"/",
"etc",
"and",
"home",
"dir"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/core/consoleworker.py#L149-L167
|
[
"def",
"get_default_configs",
"(",
")",
":",
"# initialize basic defaults",
"configs",
"=",
"[",
"resource_filename",
"(",
"__name__",
",",
"'config/00-base.ini'",
")",
"]",
"baseconfigs_location",
"=",
"'/etc/yandex-tank'",
"try",
":",
"conf_files",
"=",
"sorted",
"(",
"os",
".",
"listdir",
"(",
"baseconfigs_location",
")",
")",
"for",
"filename",
"in",
"conf_files",
":",
"if",
"fnmatch",
".",
"fnmatch",
"(",
"filename",
",",
"'*.ini'",
")",
":",
"configs",
"+=",
"[",
"os",
".",
"path",
".",
"realpath",
"(",
"baseconfigs_location",
"+",
"os",
".",
"sep",
"+",
"filename",
")",
"]",
"except",
"OSError",
":",
"logger",
".",
"info",
"(",
"baseconfigs_location",
"+",
"' is not accessible to get configs list'",
")",
"configs",
"+=",
"[",
"os",
".",
"path",
".",
"expanduser",
"(",
"'~/.yandex-tank'",
")",
"]",
"return",
"configs"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
RealConsoleMarkup.clean_markup
|
clean markup from string
|
yandextank/core/consoleworker.py
|
def clean_markup(self, orig_str):
''' clean markup from string '''
for val in [
self.YELLOW, self.RED, self.RESET, self.CYAN, self.BG_MAGENTA,
self.WHITE, self.BG_GREEN, self.GREEN, self.BG_BROWN,
self.RED_DARK, self.MAGENTA, self.BG_CYAN
]:
orig_str = orig_str.replace(val, '')
return orig_str
|
def clean_markup(self, orig_str):
''' clean markup from string '''
for val in [
self.YELLOW, self.RED, self.RESET, self.CYAN, self.BG_MAGENTA,
self.WHITE, self.BG_GREEN, self.GREEN, self.BG_BROWN,
self.RED_DARK, self.MAGENTA, self.BG_CYAN
]:
orig_str = orig_str.replace(val, '')
return orig_str
|
[
"clean",
"markup",
"from",
"string"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/core/consoleworker.py#L49-L57
|
[
"def",
"clean_markup",
"(",
"self",
",",
"orig_str",
")",
":",
"for",
"val",
"in",
"[",
"self",
".",
"YELLOW",
",",
"self",
".",
"RED",
",",
"self",
".",
"RESET",
",",
"self",
".",
"CYAN",
",",
"self",
".",
"BG_MAGENTA",
",",
"self",
".",
"WHITE",
",",
"self",
".",
"BG_GREEN",
",",
"self",
".",
"GREEN",
",",
"self",
".",
"BG_BROWN",
",",
"self",
".",
"RED_DARK",
",",
"self",
".",
"MAGENTA",
",",
"self",
".",
"BG_CYAN",
"]",
":",
"orig_str",
"=",
"orig_str",
".",
"replace",
"(",
"val",
",",
"''",
")",
"return",
"orig_str"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
parse_duration
|
Parse duration string, such as '3h2m3s' into milliseconds
>>> parse_duration('3h2m3s')
10923000
>>> parse_duration('0.3s')
300
>>> parse_duration('5')
5000
|
yandextank/stepper/util.py
|
def parse_duration(duration):
'''
Parse duration string, such as '3h2m3s' into milliseconds
>>> parse_duration('3h2m3s')
10923000
>>> parse_duration('0.3s')
300
>>> parse_duration('5')
5000
'''
_re_token = re.compile("([0-9.]+)([dhms]?)")
def parse_token(time, multiplier):
multipliers = {
'd': 86400,
'h': 3600,
'm': 60,
's': 1,
}
if multiplier:
if multiplier in multipliers:
return int(float(time) * multipliers[multiplier] * 1000)
else:
raise StepperConfigurationError(
'Failed to parse duration: %s' % duration)
else:
return int(float(time) * 1000)
return sum(parse_token(*token) for token in _re_token.findall(duration))
|
def parse_duration(duration):
'''
Parse duration string, such as '3h2m3s' into milliseconds
>>> parse_duration('3h2m3s')
10923000
>>> parse_duration('0.3s')
300
>>> parse_duration('5')
5000
'''
_re_token = re.compile("([0-9.]+)([dhms]?)")
def parse_token(time, multiplier):
multipliers = {
'd': 86400,
'h': 3600,
'm': 60,
's': 1,
}
if multiplier:
if multiplier in multipliers:
return int(float(time) * multipliers[multiplier] * 1000)
else:
raise StepperConfigurationError(
'Failed to parse duration: %s' % duration)
else:
return int(float(time) * 1000)
return sum(parse_token(*token) for token in _re_token.findall(duration))
|
[
"Parse",
"duration",
"string",
"such",
"as",
"3h2m3s",
"into",
"milliseconds"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/util.py#L18-L49
|
[
"def",
"parse_duration",
"(",
"duration",
")",
":",
"_re_token",
"=",
"re",
".",
"compile",
"(",
"\"([0-9.]+)([dhms]?)\"",
")",
"def",
"parse_token",
"(",
"time",
",",
"multiplier",
")",
":",
"multipliers",
"=",
"{",
"'d'",
":",
"86400",
",",
"'h'",
":",
"3600",
",",
"'m'",
":",
"60",
",",
"'s'",
":",
"1",
",",
"}",
"if",
"multiplier",
":",
"if",
"multiplier",
"in",
"multipliers",
":",
"return",
"int",
"(",
"float",
"(",
"time",
")",
"*",
"multipliers",
"[",
"multiplier",
"]",
"*",
"1000",
")",
"else",
":",
"raise",
"StepperConfigurationError",
"(",
"'Failed to parse duration: %s'",
"%",
"duration",
")",
"else",
":",
"return",
"int",
"(",
"float",
"(",
"time",
")",
"*",
"1000",
")",
"return",
"sum",
"(",
"parse_token",
"(",
"*",
"token",
")",
"for",
"token",
"in",
"_re_token",
".",
"findall",
"(",
"duration",
")",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
solve_quadratic
|
>>> solve_quadratic(1.0, 2.0, 1.0)
(-1.0, -1.0)
|
yandextank/stepper/util.py
|
def solve_quadratic(a, b, c):
'''
>>> solve_quadratic(1.0, 2.0, 1.0)
(-1.0, -1.0)
'''
discRoot = math.sqrt((b * b) - 4 * a * c)
root1 = (-b - discRoot) / (2 * a)
root2 = (-b + discRoot) / (2 * a)
return (root1, root2)
|
def solve_quadratic(a, b, c):
'''
>>> solve_quadratic(1.0, 2.0, 1.0)
(-1.0, -1.0)
'''
discRoot = math.sqrt((b * b) - 4 * a * c)
root1 = (-b - discRoot) / (2 * a)
root2 = (-b + discRoot) / (2 * a)
return (root1, root2)
|
[
">>>",
"solve_quadratic",
"(",
"1",
".",
"0",
"2",
".",
"0",
"1",
".",
"0",
")",
"(",
"-",
"1",
".",
"0",
"-",
"1",
".",
"0",
")"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/util.py#L52-L60
|
[
"def",
"solve_quadratic",
"(",
"a",
",",
"b",
",",
"c",
")",
":",
"discRoot",
"=",
"math",
".",
"sqrt",
"(",
"(",
"b",
"*",
"b",
")",
"-",
"4",
"*",
"a",
"*",
"c",
")",
"root1",
"=",
"(",
"-",
"b",
"-",
"discRoot",
")",
"/",
"(",
"2",
"*",
"a",
")",
"root2",
"=",
"(",
"-",
"b",
"+",
"discRoot",
")",
"/",
"(",
"2",
"*",
"a",
")",
"return",
"(",
"root1",
",",
"root2",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
proper_round
|
rounds float to closest int
:rtype: int
:param n: float
|
yandextank/stepper/util.py
|
def proper_round(n):
"""
rounds float to closest int
:rtype: int
:param n: float
"""
return int(n) + (n / abs(n)) * int(abs(n - int(n)) >= 0.5) if n != 0 else 0
|
def proper_round(n):
"""
rounds float to closest int
:rtype: int
:param n: float
"""
return int(n) + (n / abs(n)) * int(abs(n - int(n)) >= 0.5) if n != 0 else 0
|
[
"rounds",
"float",
"to",
"closest",
"int",
":",
"rtype",
":",
"int",
":",
"param",
"n",
":",
"float"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/stepper/util.py#L67-L73
|
[
"def",
"proper_round",
"(",
"n",
")",
":",
"return",
"int",
"(",
"n",
")",
"+",
"(",
"n",
"/",
"abs",
"(",
"n",
")",
")",
"*",
"int",
"(",
"abs",
"(",
"n",
"-",
"int",
"(",
"n",
")",
")",
">=",
"0.5",
")",
"if",
"n",
"!=",
"0",
"else",
"0"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
LocalhostClient.start
|
Start local agent
|
yandextank/plugins/Telegraf/client.py
|
def start(self):
"""Start local agent"""
logger.info('Starting agent on localhost')
args = self.python.split() + [
os.path.join(
self.workdir,
self.AGENT_FILENAME),
'--telegraf',
self.path['TELEGRAF_LOCAL_PATH'],
'--host',
self.host]
if self.kill_old:
args.append(self.kill_old)
self.session = self.popen(args)
self.reader_thread = threading.Thread(target=self.read_buffer)
self.reader_thread.setDaemon(True)
return self.session
|
def start(self):
"""Start local agent"""
logger.info('Starting agent on localhost')
args = self.python.split() + [
os.path.join(
self.workdir,
self.AGENT_FILENAME),
'--telegraf',
self.path['TELEGRAF_LOCAL_PATH'],
'--host',
self.host]
if self.kill_old:
args.append(self.kill_old)
self.session = self.popen(args)
self.reader_thread = threading.Thread(target=self.read_buffer)
self.reader_thread.setDaemon(True)
return self.session
|
[
"Start",
"local",
"agent"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Telegraf/client.py#L105-L121
|
[
"def",
"start",
"(",
"self",
")",
":",
"logger",
".",
"info",
"(",
"'Starting agent on localhost'",
")",
"args",
"=",
"self",
".",
"python",
".",
"split",
"(",
")",
"+",
"[",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"workdir",
",",
"self",
".",
"AGENT_FILENAME",
")",
",",
"'--telegraf'",
",",
"self",
".",
"path",
"[",
"'TELEGRAF_LOCAL_PATH'",
"]",
",",
"'--host'",
",",
"self",
".",
"host",
"]",
"if",
"self",
".",
"kill_old",
":",
"args",
".",
"append",
"(",
"self",
".",
"kill_old",
")",
"self",
".",
"session",
"=",
"self",
".",
"popen",
"(",
"args",
")",
"self",
".",
"reader_thread",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"read_buffer",
")",
"self",
".",
"reader_thread",
".",
"setDaemon",
"(",
"True",
")",
"return",
"self",
".",
"session"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
LocalhostClient.uninstall
|
Remove agent's files from remote host
|
yandextank/plugins/Telegraf/client.py
|
def uninstall(self):
"""
Remove agent's files from remote host
"""
if self.session:
logger.info('Waiting monitoring data...')
self.session.terminate()
self.session.wait()
self.session = None
log_filename = "agent_{host}.log".format(host="localhost")
data_filename = "agent_{host}.rawdata".format(host="localhost")
try:
logger.info('Saving monitoring artefacts from localhost')
copyfile(self.workdir + "/_agent.log", log_filename)
copyfile(self.workdir + "/monitoring.rawdata", data_filename)
logger.info('Deleting temp directory: %s', self.workdir)
rmtree(self.workdir)
except Exception:
logger.error("Exception while uninstalling agent", exc_info=True)
logger.info("Removing agent from: localhost")
return log_filename, data_filename
|
def uninstall(self):
"""
Remove agent's files from remote host
"""
if self.session:
logger.info('Waiting monitoring data...')
self.session.terminate()
self.session.wait()
self.session = None
log_filename = "agent_{host}.log".format(host="localhost")
data_filename = "agent_{host}.rawdata".format(host="localhost")
try:
logger.info('Saving monitoring artefacts from localhost')
copyfile(self.workdir + "/_agent.log", log_filename)
copyfile(self.workdir + "/monitoring.rawdata", data_filename)
logger.info('Deleting temp directory: %s', self.workdir)
rmtree(self.workdir)
except Exception:
logger.error("Exception while uninstalling agent", exc_info=True)
logger.info("Removing agent from: localhost")
return log_filename, data_filename
|
[
"Remove",
"agent",
"s",
"files",
"from",
"remote",
"host"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Telegraf/client.py#L141-L162
|
[
"def",
"uninstall",
"(",
"self",
")",
":",
"if",
"self",
".",
"session",
":",
"logger",
".",
"info",
"(",
"'Waiting monitoring data...'",
")",
"self",
".",
"session",
".",
"terminate",
"(",
")",
"self",
".",
"session",
".",
"wait",
"(",
")",
"self",
".",
"session",
"=",
"None",
"log_filename",
"=",
"\"agent_{host}.log\"",
".",
"format",
"(",
"host",
"=",
"\"localhost\"",
")",
"data_filename",
"=",
"\"agent_{host}.rawdata\"",
".",
"format",
"(",
"host",
"=",
"\"localhost\"",
")",
"try",
":",
"logger",
".",
"info",
"(",
"'Saving monitoring artefacts from localhost'",
")",
"copyfile",
"(",
"self",
".",
"workdir",
"+",
"\"/_agent.log\"",
",",
"log_filename",
")",
"copyfile",
"(",
"self",
".",
"workdir",
"+",
"\"/monitoring.rawdata\"",
",",
"data_filename",
")",
"logger",
".",
"info",
"(",
"'Deleting temp directory: %s'",
",",
"self",
".",
"workdir",
")",
"rmtree",
"(",
"self",
".",
"workdir",
")",
"except",
"Exception",
":",
"logger",
".",
"error",
"(",
"\"Exception while uninstalling agent\"",
",",
"exc_info",
"=",
"True",
")",
"logger",
".",
"info",
"(",
"\"Removing agent from: localhost\"",
")",
"return",
"log_filename",
",",
"data_filename"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
SSHClient.install
|
Create folder and copy agent and metrics scripts to remote host
|
yandextank/plugins/Telegraf/client.py
|
def install(self):
"""Create folder and copy agent and metrics scripts to remote host"""
logger.info(
"Installing monitoring agent at %s@%s...",
self.username,
self.host)
# create remote temp dir
cmd = self.python + ' -c "import tempfile; print tempfile.mkdtemp();"'
logger.info("Creating temp dir on %s", self.host)
try:
out, errors, err_code = self.ssh.execute(cmd)
except Exception:
logger.error(
"Failed to install monitoring agent to %s",
self.host,
exc_info=True)
return None, None, None
if errors:
logger.error("[%s] error: '%s'", self.host, errors)
logger.error("Cancelling agent installation on %s", self.host)
return None, None, None
if err_code:
logger.error(
"Failed to create remote dir via SSH at %s@%s, code %s: %s" %
(self.username, self.host, err_code, out.strip()))
return None, None, None
remote_dir = out.strip()
if remote_dir:
self.path['AGENT_REMOTE_FOLDER'] = remote_dir
self.agent_remote_folder = remote_dir
logger.debug(
"Remote dir at %s:%s", self.host, self.path['AGENT_REMOTE_FOLDER'])
# create collector config
agent_config = self.config.create_collector_config(
self.path['AGENT_REMOTE_FOLDER'])
startup_config = self.config.create_startup_config()
customs_script = self.config.create_custom_exec_script()
# trying to detect os version/architecture and get information about telegraf client
# DO NOT DELETE indices in string format below. Python 2.6 does not
# support string formatting without indices
remote_cmd = 'import os; print os.path.isfile("' + self.path[
'TELEGRAF_REMOTE_PATH'] + '")'
cmd = self.python + ' -c \'{cmd}\''.format(cmd=remote_cmd)
remote_telegraf_exists = "False"
try:
out, err, err_code = self.ssh.execute(cmd)
except Exception:
logger.error(
"SSH execute error trying to check telegraf availability on host %s",
self.host,
exc_info=True)
else:
if err:
logger.error("[%s] error: '%s'", self.host, errors)
if out.strip():
remote_telegraf_exists = out.strip()
try:
if remote_telegraf_exists in "True":
logger.debug('Found telegraf client on %s..', self.host)
else:
logger.debug(
'Not found telegraf client on %s, trying to install from tank. Copying..',
self.host)
if os.path.isfile(self.path['TELEGRAF_LOCAL_PATH']):
self.ssh.send_file(
self.path['TELEGRAF_LOCAL_PATH'],
self.path['TELEGRAF_REMOTE_PATH'])
elif os.path.isfile("/usr/bin/telegraf"):
self.ssh.send_file(
'/usr/bin/telegraf', self.path['TELEGRAF_REMOTE_PATH'])
else:
logger.error(
'Telegraf binary not found neither on %s nor on localhost at specified path: %s\n'
'You can download telegraf binaries here: https://github.com/influxdata/telegraf\n'
'or install debian package: `telegraf`', self.host, self.path['TELEGRAF_LOCAL_PATH'])
return None, None, None
self.ssh.send_file(
os.path.join(
self.path['AGENT_LOCAL_FOLDER'],
self.AGENT_FILENAME),
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
self.AGENT_FILENAME))
self.ssh.send_file(
agent_config,
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
'agent.cfg'))
self.ssh.send_file(
startup_config,
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
'agent_startup.cfg'))
self.ssh.send_file(
customs_script,
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
'agent_customs.sh'))
except Exception:
logger.error(
"Failed to install agent on %s", self.host, exc_info=True)
return None, None, None
return agent_config, startup_config, customs_script
|
def install(self):
"""Create folder and copy agent and metrics scripts to remote host"""
logger.info(
"Installing monitoring agent at %s@%s...",
self.username,
self.host)
# create remote temp dir
cmd = self.python + ' -c "import tempfile; print tempfile.mkdtemp();"'
logger.info("Creating temp dir on %s", self.host)
try:
out, errors, err_code = self.ssh.execute(cmd)
except Exception:
logger.error(
"Failed to install monitoring agent to %s",
self.host,
exc_info=True)
return None, None, None
if errors:
logger.error("[%s] error: '%s'", self.host, errors)
logger.error("Cancelling agent installation on %s", self.host)
return None, None, None
if err_code:
logger.error(
"Failed to create remote dir via SSH at %s@%s, code %s: %s" %
(self.username, self.host, err_code, out.strip()))
return None, None, None
remote_dir = out.strip()
if remote_dir:
self.path['AGENT_REMOTE_FOLDER'] = remote_dir
self.agent_remote_folder = remote_dir
logger.debug(
"Remote dir at %s:%s", self.host, self.path['AGENT_REMOTE_FOLDER'])
# create collector config
agent_config = self.config.create_collector_config(
self.path['AGENT_REMOTE_FOLDER'])
startup_config = self.config.create_startup_config()
customs_script = self.config.create_custom_exec_script()
# trying to detect os version/architecture and get information about telegraf client
# DO NOT DELETE indices in string format below. Python 2.6 does not
# support string formatting without indices
remote_cmd = 'import os; print os.path.isfile("' + self.path[
'TELEGRAF_REMOTE_PATH'] + '")'
cmd = self.python + ' -c \'{cmd}\''.format(cmd=remote_cmd)
remote_telegraf_exists = "False"
try:
out, err, err_code = self.ssh.execute(cmd)
except Exception:
logger.error(
"SSH execute error trying to check telegraf availability on host %s",
self.host,
exc_info=True)
else:
if err:
logger.error("[%s] error: '%s'", self.host, errors)
if out.strip():
remote_telegraf_exists = out.strip()
try:
if remote_telegraf_exists in "True":
logger.debug('Found telegraf client on %s..', self.host)
else:
logger.debug(
'Not found telegraf client on %s, trying to install from tank. Copying..',
self.host)
if os.path.isfile(self.path['TELEGRAF_LOCAL_PATH']):
self.ssh.send_file(
self.path['TELEGRAF_LOCAL_PATH'],
self.path['TELEGRAF_REMOTE_PATH'])
elif os.path.isfile("/usr/bin/telegraf"):
self.ssh.send_file(
'/usr/bin/telegraf', self.path['TELEGRAF_REMOTE_PATH'])
else:
logger.error(
'Telegraf binary not found neither on %s nor on localhost at specified path: %s\n'
'You can download telegraf binaries here: https://github.com/influxdata/telegraf\n'
'or install debian package: `telegraf`', self.host, self.path['TELEGRAF_LOCAL_PATH'])
return None, None, None
self.ssh.send_file(
os.path.join(
self.path['AGENT_LOCAL_FOLDER'],
self.AGENT_FILENAME),
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
self.AGENT_FILENAME))
self.ssh.send_file(
agent_config,
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
'agent.cfg'))
self.ssh.send_file(
startup_config,
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
'agent_startup.cfg'))
self.ssh.send_file(
customs_script,
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
'agent_customs.sh'))
except Exception:
logger.error(
"Failed to install agent on %s", self.host, exc_info=True)
return None, None, None
return agent_config, startup_config, customs_script
|
[
"Create",
"folder",
"and",
"copy",
"agent",
"and",
"metrics",
"scripts",
"to",
"remote",
"host"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Telegraf/client.py#L198-L309
|
[
"def",
"install",
"(",
"self",
")",
":",
"logger",
".",
"info",
"(",
"\"Installing monitoring agent at %s@%s...\"",
",",
"self",
".",
"username",
",",
"self",
".",
"host",
")",
"# create remote temp dir",
"cmd",
"=",
"self",
".",
"python",
"+",
"' -c \"import tempfile; print tempfile.mkdtemp();\"'",
"logger",
".",
"info",
"(",
"\"Creating temp dir on %s\"",
",",
"self",
".",
"host",
")",
"try",
":",
"out",
",",
"errors",
",",
"err_code",
"=",
"self",
".",
"ssh",
".",
"execute",
"(",
"cmd",
")",
"except",
"Exception",
":",
"logger",
".",
"error",
"(",
"\"Failed to install monitoring agent to %s\"",
",",
"self",
".",
"host",
",",
"exc_info",
"=",
"True",
")",
"return",
"None",
",",
"None",
",",
"None",
"if",
"errors",
":",
"logger",
".",
"error",
"(",
"\"[%s] error: '%s'\"",
",",
"self",
".",
"host",
",",
"errors",
")",
"logger",
".",
"error",
"(",
"\"Cancelling agent installation on %s\"",
",",
"self",
".",
"host",
")",
"return",
"None",
",",
"None",
",",
"None",
"if",
"err_code",
":",
"logger",
".",
"error",
"(",
"\"Failed to create remote dir via SSH at %s@%s, code %s: %s\"",
"%",
"(",
"self",
".",
"username",
",",
"self",
".",
"host",
",",
"err_code",
",",
"out",
".",
"strip",
"(",
")",
")",
")",
"return",
"None",
",",
"None",
",",
"None",
"remote_dir",
"=",
"out",
".",
"strip",
"(",
")",
"if",
"remote_dir",
":",
"self",
".",
"path",
"[",
"'AGENT_REMOTE_FOLDER'",
"]",
"=",
"remote_dir",
"self",
".",
"agent_remote_folder",
"=",
"remote_dir",
"logger",
".",
"debug",
"(",
"\"Remote dir at %s:%s\"",
",",
"self",
".",
"host",
",",
"self",
".",
"path",
"[",
"'AGENT_REMOTE_FOLDER'",
"]",
")",
"# create collector config",
"agent_config",
"=",
"self",
".",
"config",
".",
"create_collector_config",
"(",
"self",
".",
"path",
"[",
"'AGENT_REMOTE_FOLDER'",
"]",
")",
"startup_config",
"=",
"self",
".",
"config",
".",
"create_startup_config",
"(",
")",
"customs_script",
"=",
"self",
".",
"config",
".",
"create_custom_exec_script",
"(",
")",
"# trying to detect os version/architecture and get information about telegraf client",
"# DO NOT DELETE indices in string format below. Python 2.6 does not",
"# support string formatting without indices",
"remote_cmd",
"=",
"'import os; print os.path.isfile(\"'",
"+",
"self",
".",
"path",
"[",
"'TELEGRAF_REMOTE_PATH'",
"]",
"+",
"'\")'",
"cmd",
"=",
"self",
".",
"python",
"+",
"' -c \\'{cmd}\\''",
".",
"format",
"(",
"cmd",
"=",
"remote_cmd",
")",
"remote_telegraf_exists",
"=",
"\"False\"",
"try",
":",
"out",
",",
"err",
",",
"err_code",
"=",
"self",
".",
"ssh",
".",
"execute",
"(",
"cmd",
")",
"except",
"Exception",
":",
"logger",
".",
"error",
"(",
"\"SSH execute error trying to check telegraf availability on host %s\"",
",",
"self",
".",
"host",
",",
"exc_info",
"=",
"True",
")",
"else",
":",
"if",
"err",
":",
"logger",
".",
"error",
"(",
"\"[%s] error: '%s'\"",
",",
"self",
".",
"host",
",",
"errors",
")",
"if",
"out",
".",
"strip",
"(",
")",
":",
"remote_telegraf_exists",
"=",
"out",
".",
"strip",
"(",
")",
"try",
":",
"if",
"remote_telegraf_exists",
"in",
"\"True\"",
":",
"logger",
".",
"debug",
"(",
"'Found telegraf client on %s..'",
",",
"self",
".",
"host",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"'Not found telegraf client on %s, trying to install from tank. Copying..'",
",",
"self",
".",
"host",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"self",
".",
"path",
"[",
"'TELEGRAF_LOCAL_PATH'",
"]",
")",
":",
"self",
".",
"ssh",
".",
"send_file",
"(",
"self",
".",
"path",
"[",
"'TELEGRAF_LOCAL_PATH'",
"]",
",",
"self",
".",
"path",
"[",
"'TELEGRAF_REMOTE_PATH'",
"]",
")",
"elif",
"os",
".",
"path",
".",
"isfile",
"(",
"\"/usr/bin/telegraf\"",
")",
":",
"self",
".",
"ssh",
".",
"send_file",
"(",
"'/usr/bin/telegraf'",
",",
"self",
".",
"path",
"[",
"'TELEGRAF_REMOTE_PATH'",
"]",
")",
"else",
":",
"logger",
".",
"error",
"(",
"'Telegraf binary not found neither on %s nor on localhost at specified path: %s\\n'",
"'You can download telegraf binaries here: https://github.com/influxdata/telegraf\\n'",
"'or install debian package: `telegraf`'",
",",
"self",
".",
"host",
",",
"self",
".",
"path",
"[",
"'TELEGRAF_LOCAL_PATH'",
"]",
")",
"return",
"None",
",",
"None",
",",
"None",
"self",
".",
"ssh",
".",
"send_file",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
"[",
"'AGENT_LOCAL_FOLDER'",
"]",
",",
"self",
".",
"AGENT_FILENAME",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
"[",
"'AGENT_REMOTE_FOLDER'",
"]",
",",
"self",
".",
"AGENT_FILENAME",
")",
")",
"self",
".",
"ssh",
".",
"send_file",
"(",
"agent_config",
",",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
"[",
"'AGENT_REMOTE_FOLDER'",
"]",
",",
"'agent.cfg'",
")",
")",
"self",
".",
"ssh",
".",
"send_file",
"(",
"startup_config",
",",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
"[",
"'AGENT_REMOTE_FOLDER'",
"]",
",",
"'agent_startup.cfg'",
")",
")",
"self",
".",
"ssh",
".",
"send_file",
"(",
"customs_script",
",",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
"[",
"'AGENT_REMOTE_FOLDER'",
"]",
",",
"'agent_customs.sh'",
")",
")",
"except",
"Exception",
":",
"logger",
".",
"error",
"(",
"\"Failed to install agent on %s\"",
",",
"self",
".",
"host",
",",
"exc_info",
"=",
"True",
")",
"return",
"None",
",",
"None",
",",
"None",
"return",
"agent_config",
",",
"startup_config",
",",
"customs_script"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
SSHClient.start
|
Start remote agent
|
yandextank/plugins/Telegraf/client.py
|
def start(self):
"""Start remote agent"""
logger.info('Starting agent: %s', self.host)
command = "{python} {agent_path} --telegraf {telegraf_path} --host {host} {kill_old}".format(
python=self.python,
agent_path=os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
self.AGENT_FILENAME),
telegraf_path=self.path['TELEGRAF_REMOTE_PATH'],
host=self.host,
kill_old=self.kill_old)
logger.debug('Command to start agent: %s', command)
self.session = self.ssh.async_session(command)
self.reader_thread = threading.Thread(target=self.read_buffer)
self.reader_thread.setDaemon(True)
return self.session
|
def start(self):
"""Start remote agent"""
logger.info('Starting agent: %s', self.host)
command = "{python} {agent_path} --telegraf {telegraf_path} --host {host} {kill_old}".format(
python=self.python,
agent_path=os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
self.AGENT_FILENAME),
telegraf_path=self.path['TELEGRAF_REMOTE_PATH'],
host=self.host,
kill_old=self.kill_old)
logger.debug('Command to start agent: %s', command)
self.session = self.ssh.async_session(command)
self.reader_thread = threading.Thread(target=self.read_buffer)
self.reader_thread.setDaemon(True)
return self.session
|
[
"Start",
"remote",
"agent"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Telegraf/client.py#L311-L326
|
[
"def",
"start",
"(",
"self",
")",
":",
"logger",
".",
"info",
"(",
"'Starting agent: %s'",
",",
"self",
".",
"host",
")",
"command",
"=",
"\"{python} {agent_path} --telegraf {telegraf_path} --host {host} {kill_old}\"",
".",
"format",
"(",
"python",
"=",
"self",
".",
"python",
",",
"agent_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
"[",
"'AGENT_REMOTE_FOLDER'",
"]",
",",
"self",
".",
"AGENT_FILENAME",
")",
",",
"telegraf_path",
"=",
"self",
".",
"path",
"[",
"'TELEGRAF_REMOTE_PATH'",
"]",
",",
"host",
"=",
"self",
".",
"host",
",",
"kill_old",
"=",
"self",
".",
"kill_old",
")",
"logger",
".",
"debug",
"(",
"'Command to start agent: %s'",
",",
"command",
")",
"self",
".",
"session",
"=",
"self",
".",
"ssh",
".",
"async_session",
"(",
"command",
")",
"self",
".",
"reader_thread",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"read_buffer",
")",
"self",
".",
"reader_thread",
".",
"setDaemon",
"(",
"True",
")",
"return",
"self",
".",
"session"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
SSHClient.uninstall
|
Remove agent's files from remote host
|
yandextank/plugins/Telegraf/client.py
|
def uninstall(self):
"""
Remove agent's files from remote host
"""
log_filename = "agent_{host}.log".format(host=self.host)
data_filename = "agent_{host}.rawdata".format(host=self.host)
try:
if self.session:
self.session.send("stop\n")
self.session.close()
self.session = None
except BaseException:
logger.warning(
'Unable to correctly stop monitoring agent - session is broken. Pay attention to agent log (%s).',
log_filename,
exc_info=True)
else:
try:
self.ssh.get_file(
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
"_agent.log"),
log_filename)
self.ssh.get_file(
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
"monitoring.rawdata"),
data_filename)
self.ssh.rm_r(self.path['AGENT_REMOTE_FOLDER'])
except Exception:
logger.error("Unable to get agent artefacts", exc_info=True)
self._kill_agent()
return log_filename, data_filename
|
def uninstall(self):
"""
Remove agent's files from remote host
"""
log_filename = "agent_{host}.log".format(host=self.host)
data_filename = "agent_{host}.rawdata".format(host=self.host)
try:
if self.session:
self.session.send("stop\n")
self.session.close()
self.session = None
except BaseException:
logger.warning(
'Unable to correctly stop monitoring agent - session is broken. Pay attention to agent log (%s).',
log_filename,
exc_info=True)
else:
try:
self.ssh.get_file(
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
"_agent.log"),
log_filename)
self.ssh.get_file(
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
"monitoring.rawdata"),
data_filename)
self.ssh.rm_r(self.path['AGENT_REMOTE_FOLDER'])
except Exception:
logger.error("Unable to get agent artefacts", exc_info=True)
self._kill_agent()
return log_filename, data_filename
|
[
"Remove",
"agent",
"s",
"files",
"from",
"remote",
"host"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Telegraf/client.py#L342-L375
|
[
"def",
"uninstall",
"(",
"self",
")",
":",
"log_filename",
"=",
"\"agent_{host}.log\"",
".",
"format",
"(",
"host",
"=",
"self",
".",
"host",
")",
"data_filename",
"=",
"\"agent_{host}.rawdata\"",
".",
"format",
"(",
"host",
"=",
"self",
".",
"host",
")",
"try",
":",
"if",
"self",
".",
"session",
":",
"self",
".",
"session",
".",
"send",
"(",
"\"stop\\n\"",
")",
"self",
".",
"session",
".",
"close",
"(",
")",
"self",
".",
"session",
"=",
"None",
"except",
"BaseException",
":",
"logger",
".",
"warning",
"(",
"'Unable to correctly stop monitoring agent - session is broken. Pay attention to agent log (%s).'",
",",
"log_filename",
",",
"exc_info",
"=",
"True",
")",
"else",
":",
"try",
":",
"self",
".",
"ssh",
".",
"get_file",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
"[",
"'AGENT_REMOTE_FOLDER'",
"]",
",",
"\"_agent.log\"",
")",
",",
"log_filename",
")",
"self",
".",
"ssh",
".",
"get_file",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
"[",
"'AGENT_REMOTE_FOLDER'",
"]",
",",
"\"monitoring.rawdata\"",
")",
",",
"data_filename",
")",
"self",
".",
"ssh",
".",
"rm_r",
"(",
"self",
".",
"path",
"[",
"'AGENT_REMOTE_FOLDER'",
"]",
")",
"except",
"Exception",
":",
"logger",
".",
"error",
"(",
"\"Unable to get agent artefacts\"",
",",
"exc_info",
"=",
"True",
")",
"self",
".",
"_kill_agent",
"(",
")",
"return",
"log_filename",
",",
"data_filename"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
parse_sections
|
:type cfg_ini: ConfigParser
|
yandextank/config_converter/converter.py
|
def parse_sections(cfg_ini):
"""
:type cfg_ini: ConfigParser
"""
return [Section(section.lower(),
guess_plugin(section.lower()),
without_defaults(cfg_ini, section))
for section in cfg_ini.sections()
if not re.match(CORE_SECTION_PATTERN, section.lower()) and section.lower() not in DEPRECATED_SECTIONS]
|
def parse_sections(cfg_ini):
"""
:type cfg_ini: ConfigParser
"""
return [Section(section.lower(),
guess_plugin(section.lower()),
without_defaults(cfg_ini, section))
for section in cfg_ini.sections()
if not re.match(CORE_SECTION_PATTERN, section.lower()) and section.lower() not in DEPRECATED_SECTIONS]
|
[
":",
"type",
"cfg_ini",
":",
"ConfigParser"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/config_converter/converter.py#L381-L389
|
[
"def",
"parse_sections",
"(",
"cfg_ini",
")",
":",
"return",
"[",
"Section",
"(",
"section",
".",
"lower",
"(",
")",
",",
"guess_plugin",
"(",
"section",
".",
"lower",
"(",
")",
")",
",",
"without_defaults",
"(",
"cfg_ini",
",",
"section",
")",
")",
"for",
"section",
"in",
"cfg_ini",
".",
"sections",
"(",
")",
"if",
"not",
"re",
".",
"match",
"(",
"CORE_SECTION_PATTERN",
",",
"section",
".",
"lower",
"(",
")",
")",
"and",
"section",
".",
"lower",
"(",
")",
"not",
"in",
"DEPRECATED_SECTIONS",
"]"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
combine_sections
|
:type sections: list of Section
:rtype: list of Section
|
yandextank/config_converter/converter.py
|
def combine_sections(sections):
"""
:type sections: list of Section
:rtype: list of Section
"""
PLUGINS_TO_COMBINE = {
'Phantom': ('phantom', 'multi', True),
'Bfg': ('bfg', 'gun_config', False)
}
plugins = {}
ready_sections = []
for section in sections:
if section.plugin in PLUGINS_TO_COMBINE.keys():
try:
plugins[section.plugin].append(section)
except KeyError:
plugins[section.plugin] = [section]
else:
ready_sections.append(section)
for plugin_name, _sections in plugins.items():
if isinstance(_sections, list):
parent_name, child_name, is_list = PLUGINS_TO_COMBINE[plugin_name]
ready_sections.append(Section.from_multiple(_sections, parent_name, child_name, is_list))
return ready_sections
|
def combine_sections(sections):
"""
:type sections: list of Section
:rtype: list of Section
"""
PLUGINS_TO_COMBINE = {
'Phantom': ('phantom', 'multi', True),
'Bfg': ('bfg', 'gun_config', False)
}
plugins = {}
ready_sections = []
for section in sections:
if section.plugin in PLUGINS_TO_COMBINE.keys():
try:
plugins[section.plugin].append(section)
except KeyError:
plugins[section.plugin] = [section]
else:
ready_sections.append(section)
for plugin_name, _sections in plugins.items():
if isinstance(_sections, list):
parent_name, child_name, is_list = PLUGINS_TO_COMBINE[plugin_name]
ready_sections.append(Section.from_multiple(_sections, parent_name, child_name, is_list))
return ready_sections
|
[
":",
"type",
"sections",
":",
"list",
"of",
"Section",
":",
"rtype",
":",
"list",
"of",
"Section"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/config_converter/converter.py#L467-L491
|
[
"def",
"combine_sections",
"(",
"sections",
")",
":",
"PLUGINS_TO_COMBINE",
"=",
"{",
"'Phantom'",
":",
"(",
"'phantom'",
",",
"'multi'",
",",
"True",
")",
",",
"'Bfg'",
":",
"(",
"'bfg'",
",",
"'gun_config'",
",",
"False",
")",
"}",
"plugins",
"=",
"{",
"}",
"ready_sections",
"=",
"[",
"]",
"for",
"section",
"in",
"sections",
":",
"if",
"section",
".",
"plugin",
"in",
"PLUGINS_TO_COMBINE",
".",
"keys",
"(",
")",
":",
"try",
":",
"plugins",
"[",
"section",
".",
"plugin",
"]",
".",
"append",
"(",
"section",
")",
"except",
"KeyError",
":",
"plugins",
"[",
"section",
".",
"plugin",
"]",
"=",
"[",
"section",
"]",
"else",
":",
"ready_sections",
".",
"append",
"(",
"section",
")",
"for",
"plugin_name",
",",
"_sections",
"in",
"plugins",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"_sections",
",",
"list",
")",
":",
"parent_name",
",",
"child_name",
",",
"is_list",
"=",
"PLUGINS_TO_COMBINE",
"[",
"plugin_name",
"]",
"ready_sections",
".",
"append",
"(",
"Section",
".",
"from_multiple",
"(",
"_sections",
",",
"parent_name",
",",
"child_name",
",",
"is_list",
")",
")",
"return",
"ready_sections"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Option.converted
|
:rtype: {str: object}
|
yandextank/config_converter/converter.py
|
def converted(self):
"""
:rtype: {str: object}
"""
if self._converted is None:
self._converted = self.converter(self.name, self.value)
return self._converted
|
def converted(self):
"""
:rtype: {str: object}
"""
if self._converted is None:
self._converted = self.converter(self.name, self.value)
return self._converted
|
[
":",
"rtype",
":",
"{",
"str",
":",
"object",
"}"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/config_converter/converter.py#L252-L258
|
[
"def",
"converted",
"(",
"self",
")",
":",
"if",
"self",
".",
"_converted",
"is",
"None",
":",
"self",
".",
"_converted",
"=",
"self",
".",
"converter",
"(",
"self",
".",
"name",
",",
"self",
".",
"value",
")",
"return",
"self",
".",
"_converted"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Option.as_tuple
|
:rtype: (str, object)
|
yandextank/config_converter/converter.py
|
def as_tuple(self):
"""
:rtype: (str, object)
"""
if self._as_tuple is None:
self._as_tuple = self.converted.items()[0]
return self._as_tuple
|
def as_tuple(self):
"""
:rtype: (str, object)
"""
if self._as_tuple is None:
self._as_tuple = self.converted.items()[0]
return self._as_tuple
|
[
":",
"rtype",
":",
"(",
"str",
"object",
")"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/config_converter/converter.py#L261-L267
|
[
"def",
"as_tuple",
"(",
"self",
")",
":",
"if",
"self",
".",
"_as_tuple",
"is",
"None",
":",
"self",
".",
"_as_tuple",
"=",
"self",
".",
"converted",
".",
"items",
"(",
")",
"[",
"0",
"]",
"return",
"self",
".",
"_as_tuple"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Option.converter
|
:rtype: callable
|
yandextank/config_converter/converter.py
|
def converter(self):
"""
:rtype: callable
"""
if self._converter is None:
try:
self._converter = self.SPECIAL_CONVERTERS[self.plugin][self.name]
except KeyError:
try:
self._converter = self._get_scheme_converter()
except UnknownOption:
self._converter = self.CONVERTERS_FOR_UNKNOWN.get(self.plugin, self.dummy_converter)
return self._converter
|
def converter(self):
"""
:rtype: callable
"""
if self._converter is None:
try:
self._converter = self.SPECIAL_CONVERTERS[self.plugin][self.name]
except KeyError:
try:
self._converter = self._get_scheme_converter()
except UnknownOption:
self._converter = self.CONVERTERS_FOR_UNKNOWN.get(self.plugin, self.dummy_converter)
return self._converter
|
[
":",
"rtype",
":",
"callable"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/config_converter/converter.py#L270-L282
|
[
"def",
"converter",
"(",
"self",
")",
":",
"if",
"self",
".",
"_converter",
"is",
"None",
":",
"try",
":",
"self",
".",
"_converter",
"=",
"self",
".",
"SPECIAL_CONVERTERS",
"[",
"self",
".",
"plugin",
"]",
"[",
"self",
".",
"name",
"]",
"except",
"KeyError",
":",
"try",
":",
"self",
".",
"_converter",
"=",
"self",
".",
"_get_scheme_converter",
"(",
")",
"except",
"UnknownOption",
":",
"self",
".",
"_converter",
"=",
"self",
".",
"CONVERTERS_FOR_UNKNOWN",
".",
"get",
"(",
"self",
".",
"plugin",
",",
"self",
".",
"dummy_converter",
")",
"return",
"self",
".",
"_converter"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Section.from_multiple
|
:type parent_name: str
:type sections: list of Section
|
yandextank/config_converter/converter.py
|
def from_multiple(cls, sections, parent_name=None, child_name=None, is_list=True):
"""
:type parent_name: str
:type sections: list of Section
"""
if len(sections) == 1:
return sections[0]
if parent_name:
master_section = filter(lambda section: section.name == parent_name, sections)[0]
rest = filter(lambda section: section.name != parent_name, sections)
else:
master_section = sections[0]
parent_name = master_section.name
rest = sections[1:]
child = {'multi': [section.get_cfg_dict(with_meta=False) for section in rest]} if is_list \
else {child_name: cls._select_one(master_section, rest).get_cfg_dict(with_meta=False)}
master_section.merged_options.update(child)
return master_section
|
def from_multiple(cls, sections, parent_name=None, child_name=None, is_list=True):
"""
:type parent_name: str
:type sections: list of Section
"""
if len(sections) == 1:
return sections[0]
if parent_name:
master_section = filter(lambda section: section.name == parent_name, sections)[0]
rest = filter(lambda section: section.name != parent_name, sections)
else:
master_section = sections[0]
parent_name = master_section.name
rest = sections[1:]
child = {'multi': [section.get_cfg_dict(with_meta=False) for section in rest]} if is_list \
else {child_name: cls._select_one(master_section, rest).get_cfg_dict(with_meta=False)}
master_section.merged_options.update(child)
return master_section
|
[
":",
"type",
"parent_name",
":",
"str",
":",
"type",
"sections",
":",
"list",
"of",
"Section"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/config_converter/converter.py#L333-L350
|
[
"def",
"from_multiple",
"(",
"cls",
",",
"sections",
",",
"parent_name",
"=",
"None",
",",
"child_name",
"=",
"None",
",",
"is_list",
"=",
"True",
")",
":",
"if",
"len",
"(",
"sections",
")",
"==",
"1",
":",
"return",
"sections",
"[",
"0",
"]",
"if",
"parent_name",
":",
"master_section",
"=",
"filter",
"(",
"lambda",
"section",
":",
"section",
".",
"name",
"==",
"parent_name",
",",
"sections",
")",
"[",
"0",
"]",
"rest",
"=",
"filter",
"(",
"lambda",
"section",
":",
"section",
".",
"name",
"!=",
"parent_name",
",",
"sections",
")",
"else",
":",
"master_section",
"=",
"sections",
"[",
"0",
"]",
"parent_name",
"=",
"master_section",
".",
"name",
"rest",
"=",
"sections",
"[",
"1",
":",
"]",
"child",
"=",
"{",
"'multi'",
":",
"[",
"section",
".",
"get_cfg_dict",
"(",
"with_meta",
"=",
"False",
")",
"for",
"section",
"in",
"rest",
"]",
"}",
"if",
"is_list",
"else",
"{",
"child_name",
":",
"cls",
".",
"_select_one",
"(",
"master_section",
",",
"rest",
")",
".",
"get_cfg_dict",
"(",
"with_meta",
"=",
"False",
")",
"}",
"master_section",
".",
"merged_options",
".",
"update",
"(",
"child",
")",
"return",
"master_section"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
RSTRenderer.title
|
Underlines content with '='. New lines and tabs will be replaced
:param str content:
:param str new_line_replacement:
:param str tab_replacement:
:return: unicode
|
yandextank/validator/docs_gen.py
|
def title(content, new_line_replacement=' ', tab_replacement=' '):
"""
Underlines content with '='. New lines and tabs will be replaced
:param str content:
:param str new_line_replacement:
:param str tab_replacement:
:return: unicode
"""
prepared_content = content.strip().replace('\n', new_line_replacement).replace('\t', tab_replacement)
return u'{}\n{}'.format(prepared_content, '=' * len(prepared_content))
|
def title(content, new_line_replacement=' ', tab_replacement=' '):
"""
Underlines content with '='. New lines and tabs will be replaced
:param str content:
:param str new_line_replacement:
:param str tab_replacement:
:return: unicode
"""
prepared_content = content.strip().replace('\n', new_line_replacement).replace('\t', tab_replacement)
return u'{}\n{}'.format(prepared_content, '=' * len(prepared_content))
|
[
"Underlines",
"content",
"with",
"=",
".",
"New",
"lines",
"and",
"tabs",
"will",
"be",
"replaced",
":",
"param",
"str",
"content",
":",
":",
"param",
"str",
"new_line_replacement",
":",
":",
"param",
"str",
"tab_replacement",
":",
":",
"return",
":",
"unicode"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/validator/docs_gen.py#L113-L122
|
[
"def",
"title",
"(",
"content",
",",
"new_line_replacement",
"=",
"' '",
",",
"tab_replacement",
"=",
"' '",
")",
":",
"prepared_content",
"=",
"content",
".",
"strip",
"(",
")",
".",
"replace",
"(",
"'\\n'",
",",
"new_line_replacement",
")",
".",
"replace",
"(",
"'\\t'",
",",
"tab_replacement",
")",
"return",
"u'{}\\n{}'",
".",
"format",
"(",
"prepared_content",
",",
"'='",
"*",
"len",
"(",
"prepared_content",
")",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Plugin.__discover_jmeter_udp_port
|
Searching for line in jmeter.log such as
Waiting for possible shutdown message on port 4445
|
yandextank/plugins/JMeter/plugin.py
|
def __discover_jmeter_udp_port(self):
"""Searching for line in jmeter.log such as
Waiting for possible shutdown message on port 4445
"""
r = re.compile(self.DISCOVER_PORT_PATTERN)
with open(self.process_stderr.name, 'r') as f:
cnt = 0
while self.process.pid and cnt < 10:
line = f.readline()
m = r.match(line)
if m is None:
cnt += 1
time.sleep(1)
else:
port = int(m.group('port'))
return port
else:
logger.warning('JMeter UDP port wasn\'t discovered')
return None
|
def __discover_jmeter_udp_port(self):
"""Searching for line in jmeter.log such as
Waiting for possible shutdown message on port 4445
"""
r = re.compile(self.DISCOVER_PORT_PATTERN)
with open(self.process_stderr.name, 'r') as f:
cnt = 0
while self.process.pid and cnt < 10:
line = f.readline()
m = r.match(line)
if m is None:
cnt += 1
time.sleep(1)
else:
port = int(m.group('port'))
return port
else:
logger.warning('JMeter UDP port wasn\'t discovered')
return None
|
[
"Searching",
"for",
"line",
"in",
"jmeter",
".",
"log",
"such",
"as",
"Waiting",
"for",
"possible",
"shutdown",
"message",
"on",
"port",
"4445"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/JMeter/plugin.py#L169-L187
|
[
"def",
"__discover_jmeter_udp_port",
"(",
"self",
")",
":",
"r",
"=",
"re",
".",
"compile",
"(",
"self",
".",
"DISCOVER_PORT_PATTERN",
")",
"with",
"open",
"(",
"self",
".",
"process_stderr",
".",
"name",
",",
"'r'",
")",
"as",
"f",
":",
"cnt",
"=",
"0",
"while",
"self",
".",
"process",
".",
"pid",
"and",
"cnt",
"<",
"10",
":",
"line",
"=",
"f",
".",
"readline",
"(",
")",
"m",
"=",
"r",
".",
"match",
"(",
"line",
")",
"if",
"m",
"is",
"None",
":",
"cnt",
"+=",
"1",
"time",
".",
"sleep",
"(",
"1",
")",
"else",
":",
"port",
"=",
"int",
"(",
"m",
".",
"group",
"(",
"'port'",
")",
")",
"return",
"port",
"else",
":",
"logger",
".",
"warning",
"(",
"'JMeter UDP port wasn\\'t discovered'",
")",
"return",
"None"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Plugin.__add_jmeter_components
|
Genius idea by Alexey Lavrenyuk
|
yandextank/plugins/JMeter/plugin.py
|
def __add_jmeter_components(self, jmx, jtl, variables):
""" Genius idea by Alexey Lavrenyuk """
logger.debug("Original JMX: %s", os.path.realpath(jmx))
with open(jmx, 'r') as src_jmx:
source_lines = src_jmx.readlines()
try:
# In new Jmeter version (3.2 as example) WorkBench's plugin checkbox enabled by default
# It totally crashes Yandex tank injection and raises XML Parse Exception
closing = source_lines.pop(-1)
if "WorkBenchGui" in source_lines[-5]:
logger.info("WorkBench checkbox enabled...bypassing")
last_string_count = 6
else:
last_string_count = 2
while last_string_count > 0:
closing = source_lines.pop(-1) + closing
last_string_count -= 1
logger.debug("Closing statement: %s", closing)
except Exception as exc:
raise RuntimeError("Failed to find the end of JMX XML: %s" % exc)
udv_tpl = resource_string(__name__, 'config/jmeter_var_template.xml')
udv_set = []
for var_name, var_value in variables.iteritems():
udv_set.append(udv_tpl % (var_name, var_name, var_value))
udv = "\n".join(udv_set)
if self.jmeter_ver >= 2.13:
save_connect = '<connectTime>true</connectTime>'
else:
save_connect = ''
if self.ext_log in ['errors', 'all']:
level_map = {'errors': 'true', 'all': 'false'}
tpl_resource = 'jmeter_writer_ext.xml'
tpl_args = {
'jtl': self.jtl_file,
'udv': udv,
'ext_log': self.ext_log_file,
'ext_level': level_map[self.ext_log],
'save_connect': save_connect
}
else:
tpl_resource = 'jmeter_writer.xml'
tpl_args = {
'jtl': self.jtl_file,
'udv': udv,
'save_connect': save_connect
}
tpl = resource_string(__name__, 'config/' + tpl_resource)
try:
new_jmx = self.core.mkstemp(
'.jmx', 'modified_', os.path.dirname(os.path.realpath(jmx)))
except OSError as exc:
logger.debug("Can't create modified jmx near original: %s", exc)
new_jmx = self.core.mkstemp('.jmx', 'modified_')
logger.debug("Modified JMX: %s", new_jmx)
with open(new_jmx, "wb") as fh:
fh.write(''.join(source_lines))
fh.write(tpl % tpl_args)
fh.write(closing)
return new_jmx
|
def __add_jmeter_components(self, jmx, jtl, variables):
""" Genius idea by Alexey Lavrenyuk """
logger.debug("Original JMX: %s", os.path.realpath(jmx))
with open(jmx, 'r') as src_jmx:
source_lines = src_jmx.readlines()
try:
# In new Jmeter version (3.2 as example) WorkBench's plugin checkbox enabled by default
# It totally crashes Yandex tank injection and raises XML Parse Exception
closing = source_lines.pop(-1)
if "WorkBenchGui" in source_lines[-5]:
logger.info("WorkBench checkbox enabled...bypassing")
last_string_count = 6
else:
last_string_count = 2
while last_string_count > 0:
closing = source_lines.pop(-1) + closing
last_string_count -= 1
logger.debug("Closing statement: %s", closing)
except Exception as exc:
raise RuntimeError("Failed to find the end of JMX XML: %s" % exc)
udv_tpl = resource_string(__name__, 'config/jmeter_var_template.xml')
udv_set = []
for var_name, var_value in variables.iteritems():
udv_set.append(udv_tpl % (var_name, var_name, var_value))
udv = "\n".join(udv_set)
if self.jmeter_ver >= 2.13:
save_connect = '<connectTime>true</connectTime>'
else:
save_connect = ''
if self.ext_log in ['errors', 'all']:
level_map = {'errors': 'true', 'all': 'false'}
tpl_resource = 'jmeter_writer_ext.xml'
tpl_args = {
'jtl': self.jtl_file,
'udv': udv,
'ext_log': self.ext_log_file,
'ext_level': level_map[self.ext_log],
'save_connect': save_connect
}
else:
tpl_resource = 'jmeter_writer.xml'
tpl_args = {
'jtl': self.jtl_file,
'udv': udv,
'save_connect': save_connect
}
tpl = resource_string(__name__, 'config/' + tpl_resource)
try:
new_jmx = self.core.mkstemp(
'.jmx', 'modified_', os.path.dirname(os.path.realpath(jmx)))
except OSError as exc:
logger.debug("Can't create modified jmx near original: %s", exc)
new_jmx = self.core.mkstemp('.jmx', 'modified_')
logger.debug("Modified JMX: %s", new_jmx)
with open(new_jmx, "wb") as fh:
fh.write(''.join(source_lines))
fh.write(tpl % tpl_args)
fh.write(closing)
return new_jmx
|
[
"Genius",
"idea",
"by",
"Alexey",
"Lavrenyuk"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/JMeter/plugin.py#L199-L263
|
[
"def",
"__add_jmeter_components",
"(",
"self",
",",
"jmx",
",",
"jtl",
",",
"variables",
")",
":",
"logger",
".",
"debug",
"(",
"\"Original JMX: %s\"",
",",
"os",
".",
"path",
".",
"realpath",
"(",
"jmx",
")",
")",
"with",
"open",
"(",
"jmx",
",",
"'r'",
")",
"as",
"src_jmx",
":",
"source_lines",
"=",
"src_jmx",
".",
"readlines",
"(",
")",
"try",
":",
"# In new Jmeter version (3.2 as example) WorkBench's plugin checkbox enabled by default",
"# It totally crashes Yandex tank injection and raises XML Parse Exception",
"closing",
"=",
"source_lines",
".",
"pop",
"(",
"-",
"1",
")",
"if",
"\"WorkBenchGui\"",
"in",
"source_lines",
"[",
"-",
"5",
"]",
":",
"logger",
".",
"info",
"(",
"\"WorkBench checkbox enabled...bypassing\"",
")",
"last_string_count",
"=",
"6",
"else",
":",
"last_string_count",
"=",
"2",
"while",
"last_string_count",
">",
"0",
":",
"closing",
"=",
"source_lines",
".",
"pop",
"(",
"-",
"1",
")",
"+",
"closing",
"last_string_count",
"-=",
"1",
"logger",
".",
"debug",
"(",
"\"Closing statement: %s\"",
",",
"closing",
")",
"except",
"Exception",
"as",
"exc",
":",
"raise",
"RuntimeError",
"(",
"\"Failed to find the end of JMX XML: %s\"",
"%",
"exc",
")",
"udv_tpl",
"=",
"resource_string",
"(",
"__name__",
",",
"'config/jmeter_var_template.xml'",
")",
"udv_set",
"=",
"[",
"]",
"for",
"var_name",
",",
"var_value",
"in",
"variables",
".",
"iteritems",
"(",
")",
":",
"udv_set",
".",
"append",
"(",
"udv_tpl",
"%",
"(",
"var_name",
",",
"var_name",
",",
"var_value",
")",
")",
"udv",
"=",
"\"\\n\"",
".",
"join",
"(",
"udv_set",
")",
"if",
"self",
".",
"jmeter_ver",
">=",
"2.13",
":",
"save_connect",
"=",
"'<connectTime>true</connectTime>'",
"else",
":",
"save_connect",
"=",
"''",
"if",
"self",
".",
"ext_log",
"in",
"[",
"'errors'",
",",
"'all'",
"]",
":",
"level_map",
"=",
"{",
"'errors'",
":",
"'true'",
",",
"'all'",
":",
"'false'",
"}",
"tpl_resource",
"=",
"'jmeter_writer_ext.xml'",
"tpl_args",
"=",
"{",
"'jtl'",
":",
"self",
".",
"jtl_file",
",",
"'udv'",
":",
"udv",
",",
"'ext_log'",
":",
"self",
".",
"ext_log_file",
",",
"'ext_level'",
":",
"level_map",
"[",
"self",
".",
"ext_log",
"]",
",",
"'save_connect'",
":",
"save_connect",
"}",
"else",
":",
"tpl_resource",
"=",
"'jmeter_writer.xml'",
"tpl_args",
"=",
"{",
"'jtl'",
":",
"self",
".",
"jtl_file",
",",
"'udv'",
":",
"udv",
",",
"'save_connect'",
":",
"save_connect",
"}",
"tpl",
"=",
"resource_string",
"(",
"__name__",
",",
"'config/'",
"+",
"tpl_resource",
")",
"try",
":",
"new_jmx",
"=",
"self",
".",
"core",
".",
"mkstemp",
"(",
"'.jmx'",
",",
"'modified_'",
",",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"realpath",
"(",
"jmx",
")",
")",
")",
"except",
"OSError",
"as",
"exc",
":",
"logger",
".",
"debug",
"(",
"\"Can't create modified jmx near original: %s\"",
",",
"exc",
")",
"new_jmx",
"=",
"self",
".",
"core",
".",
"mkstemp",
"(",
"'.jmx'",
",",
"'modified_'",
")",
"logger",
".",
"debug",
"(",
"\"Modified JMX: %s\"",
",",
"new_jmx",
")",
"with",
"open",
"(",
"new_jmx",
",",
"\"wb\"",
")",
"as",
"fh",
":",
"fh",
".",
"write",
"(",
"''",
".",
"join",
"(",
"source_lines",
")",
")",
"fh",
".",
"write",
"(",
"tpl",
"%",
"tpl_args",
")",
"fh",
".",
"write",
"(",
"closing",
")",
"return",
"new_jmx"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Plugin.__terminate
|
Gracefull termination of running process
|
yandextank/plugins/ShootExec/plugin.py
|
def __terminate(self):
"""Gracefull termination of running process"""
if self.__stderr_file:
self.__stderr_file.close()
if not self.__process:
return
waitfor = time.time() + _PROCESS_KILL_TIMEOUT
while time.time() < waitfor:
try:
self.__process.terminate()
except EnvironmentError as e:
if e.errno != errno.ESRCH:
_LOGGER.warning("Failed to terminate process '{}': {}".format(self.__cmd, e))
return
time.sleep(0.1)
try:
self.__process.kill()
except EnvironmentError as e:
if e.errno != errno.ESRCH:
_LOGGER.warning("Failed to kill process '{}': {}".format(self.__cmd, e))
return
|
def __terminate(self):
"""Gracefull termination of running process"""
if self.__stderr_file:
self.__stderr_file.close()
if not self.__process:
return
waitfor = time.time() + _PROCESS_KILL_TIMEOUT
while time.time() < waitfor:
try:
self.__process.terminate()
except EnvironmentError as e:
if e.errno != errno.ESRCH:
_LOGGER.warning("Failed to terminate process '{}': {}".format(self.__cmd, e))
return
time.sleep(0.1)
try:
self.__process.kill()
except EnvironmentError as e:
if e.errno != errno.ESRCH:
_LOGGER.warning("Failed to kill process '{}': {}".format(self.__cmd, e))
return
|
[
"Gracefull",
"termination",
"of",
"running",
"process"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/ShootExec/plugin.py#L153-L177
|
[
"def",
"__terminate",
"(",
"self",
")",
":",
"if",
"self",
".",
"__stderr_file",
":",
"self",
".",
"__stderr_file",
".",
"close",
"(",
")",
"if",
"not",
"self",
".",
"__process",
":",
"return",
"waitfor",
"=",
"time",
".",
"time",
"(",
")",
"+",
"_PROCESS_KILL_TIMEOUT",
"while",
"time",
".",
"time",
"(",
")",
"<",
"waitfor",
":",
"try",
":",
"self",
".",
"__process",
".",
"terminate",
"(",
")",
"except",
"EnvironmentError",
"as",
"e",
":",
"if",
"e",
".",
"errno",
"!=",
"errno",
".",
"ESRCH",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Failed to terminate process '{}': {}\"",
".",
"format",
"(",
"self",
".",
"__cmd",
",",
"e",
")",
")",
"return",
"time",
".",
"sleep",
"(",
"0.1",
")",
"try",
":",
"self",
".",
"__process",
".",
"kill",
"(",
")",
"except",
"EnvironmentError",
"as",
"e",
":",
"if",
"e",
".",
"errno",
"!=",
"errno",
".",
"ESRCH",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Failed to kill process '{}': {}\"",
".",
"format",
"(",
"self",
".",
"__cmd",
",",
"e",
")",
")",
"return"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
_FileStatsReader._read_data
|
Parse lines and return stats
|
yandextank/plugins/ShootExec/plugin.py
|
def _read_data(self, lines):
"""
Parse lines and return stats
"""
results = []
for line in lines:
timestamp, rps, instances = line.split("\t")
curr_ts = int(float(timestamp)) # We allow floats here, but tank expects only seconds
if self.__last_ts < curr_ts:
self.__last_ts = curr_ts
results.append(self.stats_item(self.__last_ts, float(rps), float(instances)))
return results
|
def _read_data(self, lines):
"""
Parse lines and return stats
"""
results = []
for line in lines:
timestamp, rps, instances = line.split("\t")
curr_ts = int(float(timestamp)) # We allow floats here, but tank expects only seconds
if self.__last_ts < curr_ts:
self.__last_ts = curr_ts
results.append(self.stats_item(self.__last_ts, float(rps), float(instances)))
return results
|
[
"Parse",
"lines",
"and",
"return",
"stats"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/ShootExec/plugin.py#L210-L222
|
[
"def",
"_read_data",
"(",
"self",
",",
"lines",
")",
":",
"results",
"=",
"[",
"]",
"for",
"line",
"in",
"lines",
":",
"timestamp",
",",
"rps",
",",
"instances",
"=",
"line",
".",
"split",
"(",
"\"\\t\"",
")",
"curr_ts",
"=",
"int",
"(",
"float",
"(",
"timestamp",
")",
")",
"# We allow floats here, but tank expects only seconds",
"if",
"self",
".",
"__last_ts",
"<",
"curr_ts",
":",
"self",
".",
"__last_ts",
"=",
"curr_ts",
"results",
".",
"append",
"(",
"self",
".",
"stats_item",
"(",
"self",
".",
"__last_ts",
",",
"float",
"(",
"rps",
")",
",",
"float",
"(",
"instances",
")",
")",
")",
"return",
"results"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Plugin.__create_criterion
|
instantiate criterion from config string
|
yandextank/plugins/Autostop/plugin.py
|
def __create_criterion(self, criterion_str):
""" instantiate criterion from config string """
parsed = criterion_str.split("(")
type_str = parsed[0].strip().lower()
parsed[1] = parsed[1].split(")")[0].strip()
for criterion_class in self.custom_criterions:
if criterion_class.get_type_string() == type_str:
return criterion_class(self, parsed[1])
raise ValueError(
"Unsupported autostop criterion type: %s" % criterion_str)
|
def __create_criterion(self, criterion_str):
""" instantiate criterion from config string """
parsed = criterion_str.split("(")
type_str = parsed[0].strip().lower()
parsed[1] = parsed[1].split(")")[0].strip()
for criterion_class in self.custom_criterions:
if criterion_class.get_type_string() == type_str:
return criterion_class(self, parsed[1])
raise ValueError(
"Unsupported autostop criterion type: %s" % criterion_str)
|
[
"instantiate",
"criterion",
"from",
"config",
"string"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Autostop/plugin.py#L98-L108
|
[
"def",
"__create_criterion",
"(",
"self",
",",
"criterion_str",
")",
":",
"parsed",
"=",
"criterion_str",
".",
"split",
"(",
"\"(\"",
")",
"type_str",
"=",
"parsed",
"[",
"0",
"]",
".",
"strip",
"(",
")",
".",
"lower",
"(",
")",
"parsed",
"[",
"1",
"]",
"=",
"parsed",
"[",
"1",
"]",
".",
"split",
"(",
"\")\"",
")",
"[",
"0",
"]",
".",
"strip",
"(",
")",
"for",
"criterion_class",
"in",
"self",
".",
"custom_criterions",
":",
"if",
"criterion_class",
".",
"get_type_string",
"(",
")",
"==",
"type_str",
":",
"return",
"criterion_class",
"(",
"self",
",",
"parsed",
"[",
"1",
"]",
")",
"raise",
"ValueError",
"(",
"\"Unsupported autostop criterion type: %s\"",
"%",
"criterion_str",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
ConfigManager.getconfig
|
Prepare config data.
|
yandextank/plugins/Telegraf/config.py
|
def getconfig(self, filename, target_hint):
"""Prepare config data."""
try:
tree = self.parse_xml(filename)
except IOError as exc:
logger.error("Error loading config: %s", exc)
raise RuntimeError("Can't read monitoring config %s" % filename)
hosts = tree.findall('Host')
config = []
for host in hosts:
host_config = self.get_host_config(host, target_hint)
config.append(host_config)
return config
|
def getconfig(self, filename, target_hint):
"""Prepare config data."""
try:
tree = self.parse_xml(filename)
except IOError as exc:
logger.error("Error loading config: %s", exc)
raise RuntimeError("Can't read monitoring config %s" % filename)
hosts = tree.findall('Host')
config = []
for host in hosts:
host_config = self.get_host_config(host, target_hint)
config.append(host_config)
return config
|
[
"Prepare",
"config",
"data",
"."
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Telegraf/config.py#L30-L42
|
[
"def",
"getconfig",
"(",
"self",
",",
"filename",
",",
"target_hint",
")",
":",
"try",
":",
"tree",
"=",
"self",
".",
"parse_xml",
"(",
"filename",
")",
"except",
"IOError",
"as",
"exc",
":",
"logger",
".",
"error",
"(",
"\"Error loading config: %s\"",
",",
"exc",
")",
"raise",
"RuntimeError",
"(",
"\"Can't read monitoring config %s\"",
"%",
"filename",
")",
"hosts",
"=",
"tree",
".",
"findall",
"(",
"'Host'",
")",
"config",
"=",
"[",
"]",
"for",
"host",
"in",
"hosts",
":",
"host_config",
"=",
"self",
".",
"get_host_config",
"(",
"host",
",",
"target_hint",
")",
"config",
".",
"append",
"(",
"host_config",
")",
"return",
"config"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
AgentConfig.create_startup_config
|
Startup and shutdown commands config
Used by agent.py on the target
|
yandextank/plugins/Telegraf/config.py
|
def create_startup_config(self):
""" Startup and shutdown commands config
Used by agent.py on the target
"""
cfg_path = "agent_startup_{}.cfg".format(self.host)
if os.path.isfile(cfg_path):
logger.info(
'Found agent startup config file in working directory with the same name as created for host %s.\n'
'Creating new one via tempfile. This will affect predictable filenames for agent artefacts',
self.host)
handle, cfg_path = tempfile.mkstemp('.cfg', 'agent_')
os.close(handle)
try:
config = ConfigParser.RawConfigParser()
# FIXME incinerate such a string formatting inside a method call
# T_T
config.add_section('startup')
[
config.set('startup', "cmd%s" % idx, cmd)
for idx, cmd in enumerate(self.startups)
]
config.add_section('shutdown')
[
config.set('shutdown', "cmd%s" % idx, cmd)
for idx, cmd in enumerate(self.shutdowns)
]
config.add_section('source')
[
config.set('source', "file%s" % idx, path)
for idx, path in enumerate(self.sources)
]
with open(cfg_path, 'w') as fds:
config.write(fds)
except Exception as exc:
logger.error(
'Error trying to create monitoring startups config. Malformed? %s',
exc,
exc_info=True)
return cfg_path
|
def create_startup_config(self):
""" Startup and shutdown commands config
Used by agent.py on the target
"""
cfg_path = "agent_startup_{}.cfg".format(self.host)
if os.path.isfile(cfg_path):
logger.info(
'Found agent startup config file in working directory with the same name as created for host %s.\n'
'Creating new one via tempfile. This will affect predictable filenames for agent artefacts',
self.host)
handle, cfg_path = tempfile.mkstemp('.cfg', 'agent_')
os.close(handle)
try:
config = ConfigParser.RawConfigParser()
# FIXME incinerate such a string formatting inside a method call
# T_T
config.add_section('startup')
[
config.set('startup', "cmd%s" % idx, cmd)
for idx, cmd in enumerate(self.startups)
]
config.add_section('shutdown')
[
config.set('shutdown', "cmd%s" % idx, cmd)
for idx, cmd in enumerate(self.shutdowns)
]
config.add_section('source')
[
config.set('source', "file%s" % idx, path)
for idx, path in enumerate(self.sources)
]
with open(cfg_path, 'w') as fds:
config.write(fds)
except Exception as exc:
logger.error(
'Error trying to create monitoring startups config. Malformed? %s',
exc,
exc_info=True)
return cfg_path
|
[
"Startup",
"and",
"shutdown",
"commands",
"config",
"Used",
"by",
"agent",
".",
"py",
"on",
"the",
"target"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Telegraf/config.py#L185-L225
|
[
"def",
"create_startup_config",
"(",
"self",
")",
":",
"cfg_path",
"=",
"\"agent_startup_{}.cfg\"",
".",
"format",
"(",
"self",
".",
"host",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"cfg_path",
")",
":",
"logger",
".",
"info",
"(",
"'Found agent startup config file in working directory with the same name as created for host %s.\\n'",
"'Creating new one via tempfile. This will affect predictable filenames for agent artefacts'",
",",
"self",
".",
"host",
")",
"handle",
",",
"cfg_path",
"=",
"tempfile",
".",
"mkstemp",
"(",
"'.cfg'",
",",
"'agent_'",
")",
"os",
".",
"close",
"(",
"handle",
")",
"try",
":",
"config",
"=",
"ConfigParser",
".",
"RawConfigParser",
"(",
")",
"# FIXME incinerate such a string formatting inside a method call",
"# T_T",
"config",
".",
"add_section",
"(",
"'startup'",
")",
"[",
"config",
".",
"set",
"(",
"'startup'",
",",
"\"cmd%s\"",
"%",
"idx",
",",
"cmd",
")",
"for",
"idx",
",",
"cmd",
"in",
"enumerate",
"(",
"self",
".",
"startups",
")",
"]",
"config",
".",
"add_section",
"(",
"'shutdown'",
")",
"[",
"config",
".",
"set",
"(",
"'shutdown'",
",",
"\"cmd%s\"",
"%",
"idx",
",",
"cmd",
")",
"for",
"idx",
",",
"cmd",
"in",
"enumerate",
"(",
"self",
".",
"shutdowns",
")",
"]",
"config",
".",
"add_section",
"(",
"'source'",
")",
"[",
"config",
".",
"set",
"(",
"'source'",
",",
"\"file%s\"",
"%",
"idx",
",",
"path",
")",
"for",
"idx",
",",
"path",
"in",
"enumerate",
"(",
"self",
".",
"sources",
")",
"]",
"with",
"open",
"(",
"cfg_path",
",",
"'w'",
")",
"as",
"fds",
":",
"config",
".",
"write",
"(",
"fds",
")",
"except",
"Exception",
"as",
"exc",
":",
"logger",
".",
"error",
"(",
"'Error trying to create monitoring startups config. Malformed? %s'",
",",
"exc",
",",
"exc_info",
"=",
"True",
")",
"return",
"cfg_path"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
AgentConfig.create_custom_exec_script
|
bash script w/ custom commands inside
inspired by half a night trying to avoid escaping bash special characters
|
yandextank/plugins/Telegraf/config.py
|
def create_custom_exec_script(self):
""" bash script w/ custom commands inside
inspired by half a night trying to avoid escaping bash special characters
"""
cfg_path = "agent_customs_{}.cfg".format(self.host)
if os.path.isfile(cfg_path):
logger.info(
'Found agent custom execs config file in working directory with the same name as created for host %s.\n'
'Creating new one via tempfile. This will affect predictable filenames for agent artefacts',
self.host)
handle, cfg_path = tempfile.mkstemp('.sh', 'agent_customs_')
os.close(handle)
cmds = ""
for idx, cmd in enumerate(self.custom):
cmds += "-{idx}) {cmd};;\n".format(idx=idx, cmd=cmd['cmd'])
customs_script = """
#!/bin/sh
while :
do
case "$1" in
{cmds}
*) break;;
esac
shift
done
""".format(cmds=cmds)
with open(cfg_path, 'w') as fds:
fds.write(customs_script)
return cfg_path
|
def create_custom_exec_script(self):
""" bash script w/ custom commands inside
inspired by half a night trying to avoid escaping bash special characters
"""
cfg_path = "agent_customs_{}.cfg".format(self.host)
if os.path.isfile(cfg_path):
logger.info(
'Found agent custom execs config file in working directory with the same name as created for host %s.\n'
'Creating new one via tempfile. This will affect predictable filenames for agent artefacts',
self.host)
handle, cfg_path = tempfile.mkstemp('.sh', 'agent_customs_')
os.close(handle)
cmds = ""
for idx, cmd in enumerate(self.custom):
cmds += "-{idx}) {cmd};;\n".format(idx=idx, cmd=cmd['cmd'])
customs_script = """
#!/bin/sh
while :
do
case "$1" in
{cmds}
*) break;;
esac
shift
done
""".format(cmds=cmds)
with open(cfg_path, 'w') as fds:
fds.write(customs_script)
return cfg_path
|
[
"bash",
"script",
"w",
"/",
"custom",
"commands",
"inside",
"inspired",
"by",
"half",
"a",
"night",
"trying",
"to",
"avoid",
"escaping",
"bash",
"special",
"characters"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Telegraf/config.py#L227-L258
|
[
"def",
"create_custom_exec_script",
"(",
"self",
")",
":",
"cfg_path",
"=",
"\"agent_customs_{}.cfg\"",
".",
"format",
"(",
"self",
".",
"host",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"cfg_path",
")",
":",
"logger",
".",
"info",
"(",
"'Found agent custom execs config file in working directory with the same name as created for host %s.\\n'",
"'Creating new one via tempfile. This will affect predictable filenames for agent artefacts'",
",",
"self",
".",
"host",
")",
"handle",
",",
"cfg_path",
"=",
"tempfile",
".",
"mkstemp",
"(",
"'.sh'",
",",
"'agent_customs_'",
")",
"os",
".",
"close",
"(",
"handle",
")",
"cmds",
"=",
"\"\"",
"for",
"idx",
",",
"cmd",
"in",
"enumerate",
"(",
"self",
".",
"custom",
")",
":",
"cmds",
"+=",
"\"-{idx}) {cmd};;\\n\"",
".",
"format",
"(",
"idx",
"=",
"idx",
",",
"cmd",
"=",
"cmd",
"[",
"'cmd'",
"]",
")",
"customs_script",
"=",
"\"\"\"\n #!/bin/sh\n while :\n do\n case \"$1\" in\n {cmds}\n *) break;;\n esac\n shift\n done\n \"\"\"",
".",
"format",
"(",
"cmds",
"=",
"cmds",
")",
"with",
"open",
"(",
"cfg_path",
",",
"'w'",
")",
"as",
"fds",
":",
"fds",
".",
"write",
"(",
"customs_script",
")",
"return",
"cfg_path"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
AgentConfig.create_collector_config
|
Telegraf collector config,
toml format
|
yandextank/plugins/Telegraf/config.py
|
def create_collector_config(self, workdir):
""" Telegraf collector config,
toml format
"""
cfg_path = "agent_collector_{}.cfg".format(self.host)
if os.path.isfile(cfg_path):
logger.info(
'Found agent config file in working directory with the same name as created for host %s.\n'
'Creating new one via tempfile. This will affect predictable filenames for agent artefacts',
self.host)
handle, cfg_path = tempfile.mkstemp('.cfg', 'agent_collector_')
os.close(handle)
self.monitoring_data_output = "{remote_folder}/monitoring.rawdata".format(
remote_folder=workdir)
defaults_old_enabled = ['CPU', 'Memory', 'Disk', 'Net', 'System']
try:
config = ConfigParser.RawConfigParser()
config.add_section("global_tags")
config.add_section("agent")
config.set(
"agent",
"interval",
"'{interval}s'".format(interval=self.interval))
config.set("agent", "round_interval", "true")
config.set("agent", "flush_interval", "'1s'")
config.set("agent", "collection_jitter", "'0s'")
config.set("agent", "flush_jitter", "'1s'")
for section in self.host_config.keys():
# telegraf-style config
if not self.old_style_configs:
config.add_section(
"{section_name}".format(
section_name=self.host_config[section]['name']))
for key, value in iteritems(self.host_config[section]):
if key != 'name':
config.set(
"{section_name}".format(
section_name=self.host_config[section][
'name']),
"{key}".format(key=key),
"{value}".format(value=value))
# monitoring-style config
else:
if section in defaults_old_enabled:
config.add_section(
"{section_name}".format(
section_name=self.host_config[section]['name']))
for key, value in iteritems(self.host_config[section]):
if key in [
'fielddrop', 'fieldpass', 'percpu',
'devices', 'interfaces'
]:
config.set(
"{section_name}".format(
section_name=self.host_config[section][
'name']),
"{key}".format(key=key),
"{value}".format(value=value))
# outputs
config.add_section("[outputs.file]")
config.set(
"[outputs.file]",
"files",
"['{config}']".format(config=self.monitoring_data_output))
config.set("[outputs.file]", "data_format", "'json'")
with open(cfg_path, 'w') as fds:
config.write(fds)
# dirty hack, this allow to avoid bash escape quoting, we're pushing shell script w/ arguments
# index of argument is index of custom metric in our config
inputs = ""
for idx, cmd in enumerate(self.custom):
inputs += "[[inputs.exec]]\n"
inputs += "commands = ['/bin/sh {workdir}/agent_customs.sh -{idx}']\n".format(
workdir=workdir, idx=idx)
inputs += "data_format = 'value'\n"
inputs += "data_type = 'float'\n"
inputs += "name_prefix = '{}_'\n\n".format(cmd.get('label'))
if cmd['diff']:
decoder.diff_metrics['custom'].append(
decoder.find_common_names(cmd.get('label')))
with open(cfg_path, 'a') as fds:
fds.write(inputs)
# telegraf raw configuration into xml
telegraf_raw = ""
for element in self.telegrafraw:
telegraf_raw += element
with open(cfg_path, 'a') as fds:
fds.write(telegraf_raw)
except Exception as exc:
logger.error(
'Error trying to create monitoring config. Malformed? %s',
exc,
exc_info=True)
return cfg_path
|
def create_collector_config(self, workdir):
""" Telegraf collector config,
toml format
"""
cfg_path = "agent_collector_{}.cfg".format(self.host)
if os.path.isfile(cfg_path):
logger.info(
'Found agent config file in working directory with the same name as created for host %s.\n'
'Creating new one via tempfile. This will affect predictable filenames for agent artefacts',
self.host)
handle, cfg_path = tempfile.mkstemp('.cfg', 'agent_collector_')
os.close(handle)
self.monitoring_data_output = "{remote_folder}/monitoring.rawdata".format(
remote_folder=workdir)
defaults_old_enabled = ['CPU', 'Memory', 'Disk', 'Net', 'System']
try:
config = ConfigParser.RawConfigParser()
config.add_section("global_tags")
config.add_section("agent")
config.set(
"agent",
"interval",
"'{interval}s'".format(interval=self.interval))
config.set("agent", "round_interval", "true")
config.set("agent", "flush_interval", "'1s'")
config.set("agent", "collection_jitter", "'0s'")
config.set("agent", "flush_jitter", "'1s'")
for section in self.host_config.keys():
# telegraf-style config
if not self.old_style_configs:
config.add_section(
"{section_name}".format(
section_name=self.host_config[section]['name']))
for key, value in iteritems(self.host_config[section]):
if key != 'name':
config.set(
"{section_name}".format(
section_name=self.host_config[section][
'name']),
"{key}".format(key=key),
"{value}".format(value=value))
# monitoring-style config
else:
if section in defaults_old_enabled:
config.add_section(
"{section_name}".format(
section_name=self.host_config[section]['name']))
for key, value in iteritems(self.host_config[section]):
if key in [
'fielddrop', 'fieldpass', 'percpu',
'devices', 'interfaces'
]:
config.set(
"{section_name}".format(
section_name=self.host_config[section][
'name']),
"{key}".format(key=key),
"{value}".format(value=value))
# outputs
config.add_section("[outputs.file]")
config.set(
"[outputs.file]",
"files",
"['{config}']".format(config=self.monitoring_data_output))
config.set("[outputs.file]", "data_format", "'json'")
with open(cfg_path, 'w') as fds:
config.write(fds)
# dirty hack, this allow to avoid bash escape quoting, we're pushing shell script w/ arguments
# index of argument is index of custom metric in our config
inputs = ""
for idx, cmd in enumerate(self.custom):
inputs += "[[inputs.exec]]\n"
inputs += "commands = ['/bin/sh {workdir}/agent_customs.sh -{idx}']\n".format(
workdir=workdir, idx=idx)
inputs += "data_format = 'value'\n"
inputs += "data_type = 'float'\n"
inputs += "name_prefix = '{}_'\n\n".format(cmd.get('label'))
if cmd['diff']:
decoder.diff_metrics['custom'].append(
decoder.find_common_names(cmd.get('label')))
with open(cfg_path, 'a') as fds:
fds.write(inputs)
# telegraf raw configuration into xml
telegraf_raw = ""
for element in self.telegrafraw:
telegraf_raw += element
with open(cfg_path, 'a') as fds:
fds.write(telegraf_raw)
except Exception as exc:
logger.error(
'Error trying to create monitoring config. Malformed? %s',
exc,
exc_info=True)
return cfg_path
|
[
"Telegraf",
"collector",
"config",
"toml",
"format"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Telegraf/config.py#L260-L366
|
[
"def",
"create_collector_config",
"(",
"self",
",",
"workdir",
")",
":",
"cfg_path",
"=",
"\"agent_collector_{}.cfg\"",
".",
"format",
"(",
"self",
".",
"host",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"cfg_path",
")",
":",
"logger",
".",
"info",
"(",
"'Found agent config file in working directory with the same name as created for host %s.\\n'",
"'Creating new one via tempfile. This will affect predictable filenames for agent artefacts'",
",",
"self",
".",
"host",
")",
"handle",
",",
"cfg_path",
"=",
"tempfile",
".",
"mkstemp",
"(",
"'.cfg'",
",",
"'agent_collector_'",
")",
"os",
".",
"close",
"(",
"handle",
")",
"self",
".",
"monitoring_data_output",
"=",
"\"{remote_folder}/monitoring.rawdata\"",
".",
"format",
"(",
"remote_folder",
"=",
"workdir",
")",
"defaults_old_enabled",
"=",
"[",
"'CPU'",
",",
"'Memory'",
",",
"'Disk'",
",",
"'Net'",
",",
"'System'",
"]",
"try",
":",
"config",
"=",
"ConfigParser",
".",
"RawConfigParser",
"(",
")",
"config",
".",
"add_section",
"(",
"\"global_tags\"",
")",
"config",
".",
"add_section",
"(",
"\"agent\"",
")",
"config",
".",
"set",
"(",
"\"agent\"",
",",
"\"interval\"",
",",
"\"'{interval}s'\"",
".",
"format",
"(",
"interval",
"=",
"self",
".",
"interval",
")",
")",
"config",
".",
"set",
"(",
"\"agent\"",
",",
"\"round_interval\"",
",",
"\"true\"",
")",
"config",
".",
"set",
"(",
"\"agent\"",
",",
"\"flush_interval\"",
",",
"\"'1s'\"",
")",
"config",
".",
"set",
"(",
"\"agent\"",
",",
"\"collection_jitter\"",
",",
"\"'0s'\"",
")",
"config",
".",
"set",
"(",
"\"agent\"",
",",
"\"flush_jitter\"",
",",
"\"'1s'\"",
")",
"for",
"section",
"in",
"self",
".",
"host_config",
".",
"keys",
"(",
")",
":",
"# telegraf-style config",
"if",
"not",
"self",
".",
"old_style_configs",
":",
"config",
".",
"add_section",
"(",
"\"{section_name}\"",
".",
"format",
"(",
"section_name",
"=",
"self",
".",
"host_config",
"[",
"section",
"]",
"[",
"'name'",
"]",
")",
")",
"for",
"key",
",",
"value",
"in",
"iteritems",
"(",
"self",
".",
"host_config",
"[",
"section",
"]",
")",
":",
"if",
"key",
"!=",
"'name'",
":",
"config",
".",
"set",
"(",
"\"{section_name}\"",
".",
"format",
"(",
"section_name",
"=",
"self",
".",
"host_config",
"[",
"section",
"]",
"[",
"'name'",
"]",
")",
",",
"\"{key}\"",
".",
"format",
"(",
"key",
"=",
"key",
")",
",",
"\"{value}\"",
".",
"format",
"(",
"value",
"=",
"value",
")",
")",
"# monitoring-style config",
"else",
":",
"if",
"section",
"in",
"defaults_old_enabled",
":",
"config",
".",
"add_section",
"(",
"\"{section_name}\"",
".",
"format",
"(",
"section_name",
"=",
"self",
".",
"host_config",
"[",
"section",
"]",
"[",
"'name'",
"]",
")",
")",
"for",
"key",
",",
"value",
"in",
"iteritems",
"(",
"self",
".",
"host_config",
"[",
"section",
"]",
")",
":",
"if",
"key",
"in",
"[",
"'fielddrop'",
",",
"'fieldpass'",
",",
"'percpu'",
",",
"'devices'",
",",
"'interfaces'",
"]",
":",
"config",
".",
"set",
"(",
"\"{section_name}\"",
".",
"format",
"(",
"section_name",
"=",
"self",
".",
"host_config",
"[",
"section",
"]",
"[",
"'name'",
"]",
")",
",",
"\"{key}\"",
".",
"format",
"(",
"key",
"=",
"key",
")",
",",
"\"{value}\"",
".",
"format",
"(",
"value",
"=",
"value",
")",
")",
"# outputs",
"config",
".",
"add_section",
"(",
"\"[outputs.file]\"",
")",
"config",
".",
"set",
"(",
"\"[outputs.file]\"",
",",
"\"files\"",
",",
"\"['{config}']\"",
".",
"format",
"(",
"config",
"=",
"self",
".",
"monitoring_data_output",
")",
")",
"config",
".",
"set",
"(",
"\"[outputs.file]\"",
",",
"\"data_format\"",
",",
"\"'json'\"",
")",
"with",
"open",
"(",
"cfg_path",
",",
"'w'",
")",
"as",
"fds",
":",
"config",
".",
"write",
"(",
"fds",
")",
"# dirty hack, this allow to avoid bash escape quoting, we're pushing shell script w/ arguments",
"# index of argument is index of custom metric in our config",
"inputs",
"=",
"\"\"",
"for",
"idx",
",",
"cmd",
"in",
"enumerate",
"(",
"self",
".",
"custom",
")",
":",
"inputs",
"+=",
"\"[[inputs.exec]]\\n\"",
"inputs",
"+=",
"\"commands = ['/bin/sh {workdir}/agent_customs.sh -{idx}']\\n\"",
".",
"format",
"(",
"workdir",
"=",
"workdir",
",",
"idx",
"=",
"idx",
")",
"inputs",
"+=",
"\"data_format = 'value'\\n\"",
"inputs",
"+=",
"\"data_type = 'float'\\n\"",
"inputs",
"+=",
"\"name_prefix = '{}_'\\n\\n\"",
".",
"format",
"(",
"cmd",
".",
"get",
"(",
"'label'",
")",
")",
"if",
"cmd",
"[",
"'diff'",
"]",
":",
"decoder",
".",
"diff_metrics",
"[",
"'custom'",
"]",
".",
"append",
"(",
"decoder",
".",
"find_common_names",
"(",
"cmd",
".",
"get",
"(",
"'label'",
")",
")",
")",
"with",
"open",
"(",
"cfg_path",
",",
"'a'",
")",
"as",
"fds",
":",
"fds",
".",
"write",
"(",
"inputs",
")",
"# telegraf raw configuration into xml",
"telegraf_raw",
"=",
"\"\"",
"for",
"element",
"in",
"self",
".",
"telegrafraw",
":",
"telegraf_raw",
"+=",
"element",
"with",
"open",
"(",
"cfg_path",
",",
"'a'",
")",
"as",
"fds",
":",
"fds",
".",
"write",
"(",
"telegraf_raw",
")",
"except",
"Exception",
"as",
"exc",
":",
"logger",
".",
"error",
"(",
"'Error trying to create monitoring config. Malformed? %s'",
",",
"exc",
",",
"exc_info",
"=",
"True",
")",
"return",
"cfg_path"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Plugin.__check_disk
|
raise exception on disk space exceeded
|
yandextank/plugins/ResourceCheck/plugin.py
|
def __check_disk(self):
''' raise exception on disk space exceeded '''
cmd = "sh -c \"df --no-sync -m -P -l -x fuse -x tmpfs -x devtmpfs -x davfs -x nfs "
cmd += self.core.artifacts_base_dir
cmd += " | tail -n 1 | awk '{print \$4}' \""
res = execute(cmd, True, 0.1, True)
logging.debug("Result: %s", res)
if not len(res[1]):
self.log.debug("No disk usage info: %s", res[2])
return
disk_free = res[1]
self.log.debug(
"Disk free space: %s/%s", disk_free.strip(), self.disk_limit)
if int(disk_free.strip()) < self.disk_limit:
raise RuntimeError(
"Not enough local resources: disk space less than %sMB in %s: %sMB"
% (
self.disk_limit, self.core.artifacts_base_dir,
int(disk_free.strip())))
|
def __check_disk(self):
''' raise exception on disk space exceeded '''
cmd = "sh -c \"df --no-sync -m -P -l -x fuse -x tmpfs -x devtmpfs -x davfs -x nfs "
cmd += self.core.artifacts_base_dir
cmd += " | tail -n 1 | awk '{print \$4}' \""
res = execute(cmd, True, 0.1, True)
logging.debug("Result: %s", res)
if not len(res[1]):
self.log.debug("No disk usage info: %s", res[2])
return
disk_free = res[1]
self.log.debug(
"Disk free space: %s/%s", disk_free.strip(), self.disk_limit)
if int(disk_free.strip()) < self.disk_limit:
raise RuntimeError(
"Not enough local resources: disk space less than %sMB in %s: %sMB"
% (
self.disk_limit, self.core.artifacts_base_dir,
int(disk_free.strip())))
|
[
"raise",
"exception",
"on",
"disk",
"space",
"exceeded"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/ResourceCheck/plugin.py#L52-L70
|
[
"def",
"__check_disk",
"(",
"self",
")",
":",
"cmd",
"=",
"\"sh -c \\\"df --no-sync -m -P -l -x fuse -x tmpfs -x devtmpfs -x davfs -x nfs \"",
"cmd",
"+=",
"self",
".",
"core",
".",
"artifacts_base_dir",
"cmd",
"+=",
"\" | tail -n 1 | awk '{print \\$4}' \\\"\"",
"res",
"=",
"execute",
"(",
"cmd",
",",
"True",
",",
"0.1",
",",
"True",
")",
"logging",
".",
"debug",
"(",
"\"Result: %s\"",
",",
"res",
")",
"if",
"not",
"len",
"(",
"res",
"[",
"1",
"]",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"No disk usage info: %s\"",
",",
"res",
"[",
"2",
"]",
")",
"return",
"disk_free",
"=",
"res",
"[",
"1",
"]",
"self",
".",
"log",
".",
"debug",
"(",
"\"Disk free space: %s/%s\"",
",",
"disk_free",
".",
"strip",
"(",
")",
",",
"self",
".",
"disk_limit",
")",
"if",
"int",
"(",
"disk_free",
".",
"strip",
"(",
")",
")",
"<",
"self",
".",
"disk_limit",
":",
"raise",
"RuntimeError",
"(",
"\"Not enough local resources: disk space less than %sMB in %s: %sMB\"",
"%",
"(",
"self",
".",
"disk_limit",
",",
"self",
".",
"core",
".",
"artifacts_base_dir",
",",
"int",
"(",
"disk_free",
".",
"strip",
"(",
")",
")",
")",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Plugin.__check_mem
|
raise exception on RAM exceeded
|
yandextank/plugins/ResourceCheck/plugin.py
|
def __check_mem(self):
''' raise exception on RAM exceeded '''
mem_free = psutil.virtual_memory().available / 2**20
self.log.debug("Memory free: %s/%s", mem_free, self.mem_limit)
if mem_free < self.mem_limit:
raise RuntimeError(
"Not enough resources: free memory less "
"than %sMB: %sMB" % (self.mem_limit, mem_free))
|
def __check_mem(self):
''' raise exception on RAM exceeded '''
mem_free = psutil.virtual_memory().available / 2**20
self.log.debug("Memory free: %s/%s", mem_free, self.mem_limit)
if mem_free < self.mem_limit:
raise RuntimeError(
"Not enough resources: free memory less "
"than %sMB: %sMB" % (self.mem_limit, mem_free))
|
[
"raise",
"exception",
"on",
"RAM",
"exceeded"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/ResourceCheck/plugin.py#L72-L79
|
[
"def",
"__check_mem",
"(",
"self",
")",
":",
"mem_free",
"=",
"psutil",
".",
"virtual_memory",
"(",
")",
".",
"available",
"/",
"2",
"**",
"20",
"self",
".",
"log",
".",
"debug",
"(",
"\"Memory free: %s/%s\"",
",",
"mem_free",
",",
"self",
".",
"mem_limit",
")",
"if",
"mem_free",
"<",
"self",
".",
"mem_limit",
":",
"raise",
"RuntimeError",
"(",
"\"Not enough resources: free memory less \"",
"\"than %sMB: %sMB\"",
"%",
"(",
"self",
".",
"mem_limit",
",",
"mem_free",
")",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
get_terminal_size
|
Gets width and height of terminal viewport
|
yandextank/plugins/Console/screen.py
|
def get_terminal_size():
'''
Gets width and height of terminal viewport
'''
default_size = (30, 120)
env = os.environ
def ioctl_gwinsz(file_d):
'''
Helper to get console size
'''
try:
sizes = struct.unpack(
'hh', fcntl.ioctl(file_d, termios.TIOCGWINSZ, '1234'))
except Exception:
sizes = default_size
return sizes
sizes = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
if not sizes:
try:
file_d = os.open(os.ctermid(), os.O_RDONLY)
sizes = ioctl_gwinsz(file_d)
os.close(file_d.fileno())
except Exception:
pass
if not sizes:
try:
sizes = (env['LINES'], env['COLUMNS'])
except Exception:
sizes = default_size
return int(sizes[1]), int(sizes[0])
|
def get_terminal_size():
'''
Gets width and height of terminal viewport
'''
default_size = (30, 120)
env = os.environ
def ioctl_gwinsz(file_d):
'''
Helper to get console size
'''
try:
sizes = struct.unpack(
'hh', fcntl.ioctl(file_d, termios.TIOCGWINSZ, '1234'))
except Exception:
sizes = default_size
return sizes
sizes = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
if not sizes:
try:
file_d = os.open(os.ctermid(), os.O_RDONLY)
sizes = ioctl_gwinsz(file_d)
os.close(file_d.fileno())
except Exception:
pass
if not sizes:
try:
sizes = (env['LINES'], env['COLUMNS'])
except Exception:
sizes = default_size
return int(sizes[1]), int(sizes[0])
|
[
"Gets",
"width",
"and",
"height",
"of",
"terminal",
"viewport"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Console/screen.py#L16-L47
|
[
"def",
"get_terminal_size",
"(",
")",
":",
"default_size",
"=",
"(",
"30",
",",
"120",
")",
"env",
"=",
"os",
".",
"environ",
"def",
"ioctl_gwinsz",
"(",
"file_d",
")",
":",
"'''\n Helper to get console size\n '''",
"try",
":",
"sizes",
"=",
"struct",
".",
"unpack",
"(",
"'hh'",
",",
"fcntl",
".",
"ioctl",
"(",
"file_d",
",",
"termios",
".",
"TIOCGWINSZ",
",",
"'1234'",
")",
")",
"except",
"Exception",
":",
"sizes",
"=",
"default_size",
"return",
"sizes",
"sizes",
"=",
"ioctl_gwinsz",
"(",
"0",
")",
"or",
"ioctl_gwinsz",
"(",
"1",
")",
"or",
"ioctl_gwinsz",
"(",
"2",
")",
"if",
"not",
"sizes",
":",
"try",
":",
"file_d",
"=",
"os",
".",
"open",
"(",
"os",
".",
"ctermid",
"(",
")",
",",
"os",
".",
"O_RDONLY",
")",
"sizes",
"=",
"ioctl_gwinsz",
"(",
"file_d",
")",
"os",
".",
"close",
"(",
"file_d",
".",
"fileno",
"(",
")",
")",
"except",
"Exception",
":",
"pass",
"if",
"not",
"sizes",
":",
"try",
":",
"sizes",
"=",
"(",
"env",
"[",
"'LINES'",
"]",
",",
"env",
"[",
"'COLUMNS'",
"]",
")",
"except",
"Exception",
":",
"sizes",
"=",
"default_size",
"return",
"int",
"(",
"sizes",
"[",
"1",
"]",
")",
",",
"int",
"(",
"sizes",
"[",
"0",
"]",
")"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Screen.__get_right_line
|
Gets next line for right panel
|
yandextank/plugins/Console/screen.py
|
def __get_right_line(self, widget_output):
''' Gets next line for right panel '''
right_line = ''
if widget_output:
right_line = widget_output.pop(0)
if len(right_line) > self.right_panel_width:
right_line_plain = self.markup.clean_markup(right_line)
if len(right_line_plain) > self.right_panel_width:
right_line = right_line[:self.right_panel_width] + self.markup.RESET
return right_line
|
def __get_right_line(self, widget_output):
''' Gets next line for right panel '''
right_line = ''
if widget_output:
right_line = widget_output.pop(0)
if len(right_line) > self.right_panel_width:
right_line_plain = self.markup.clean_markup(right_line)
if len(right_line_plain) > self.right_panel_width:
right_line = right_line[:self.right_panel_width] + self.markup.RESET
return right_line
|
[
"Gets",
"next",
"line",
"for",
"right",
"panel"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Console/screen.py#L318-L327
|
[
"def",
"__get_right_line",
"(",
"self",
",",
"widget_output",
")",
":",
"right_line",
"=",
"''",
"if",
"widget_output",
":",
"right_line",
"=",
"widget_output",
".",
"pop",
"(",
"0",
")",
"if",
"len",
"(",
"right_line",
")",
">",
"self",
".",
"right_panel_width",
":",
"right_line_plain",
"=",
"self",
".",
"markup",
".",
"clean_markup",
"(",
"right_line",
")",
"if",
"len",
"(",
"right_line_plain",
")",
">",
"self",
".",
"right_panel_width",
":",
"right_line",
"=",
"right_line",
"[",
":",
"self",
".",
"right_panel_width",
"]",
"+",
"self",
".",
"markup",
".",
"RESET",
"return",
"right_line"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
test
|
Screen.__truncate
|
Cut tuple of line chunks according to it's wisible lenght
|
yandextank/plugins/Console/screen.py
|
def __truncate(self, line_arr, max_width):
''' Cut tuple of line chunks according to it's wisible lenght '''
def is_space(chunk):
return all([True if i == ' ' else False for i in chunk])
def is_empty(chunks, markups):
result = []
for chunk in chunks:
if chunk in markups:
result.append(True)
elif is_space(chunk):
result.append(True)
else:
result.append(False)
return all(result)
left = max_width
result = ''
markups = self.markup.get_markup_vars()
for num, chunk in enumerate(line_arr):
if chunk in markups:
result += chunk
else:
if left > 0:
if len(chunk) <= left:
result += chunk
left -= len(chunk)
else:
leftover = (chunk[left:],) + line_arr[num + 1:]
was_cut = not is_empty(leftover, markups)
if was_cut:
result += chunk[:left - 1] + self.markup.RESET + u'\u2026'
else:
result += chunk[:left]
left = 0
return result
|
def __truncate(self, line_arr, max_width):
''' Cut tuple of line chunks according to it's wisible lenght '''
def is_space(chunk):
return all([True if i == ' ' else False for i in chunk])
def is_empty(chunks, markups):
result = []
for chunk in chunks:
if chunk in markups:
result.append(True)
elif is_space(chunk):
result.append(True)
else:
result.append(False)
return all(result)
left = max_width
result = ''
markups = self.markup.get_markup_vars()
for num, chunk in enumerate(line_arr):
if chunk in markups:
result += chunk
else:
if left > 0:
if len(chunk) <= left:
result += chunk
left -= len(chunk)
else:
leftover = (chunk[left:],) + line_arr[num + 1:]
was_cut = not is_empty(leftover, markups)
if was_cut:
result += chunk[:left - 1] + self.markup.RESET + u'\u2026'
else:
result += chunk[:left]
left = 0
return result
|
[
"Cut",
"tuple",
"of",
"line",
"chunks",
"according",
"to",
"it",
"s",
"wisible",
"lenght"
] |
yandex/yandex-tank
|
python
|
https://github.com/yandex/yandex-tank/blob/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b/yandextank/plugins/Console/screen.py#L329-L363
|
[
"def",
"__truncate",
"(",
"self",
",",
"line_arr",
",",
"max_width",
")",
":",
"def",
"is_space",
"(",
"chunk",
")",
":",
"return",
"all",
"(",
"[",
"True",
"if",
"i",
"==",
"' '",
"else",
"False",
"for",
"i",
"in",
"chunk",
"]",
")",
"def",
"is_empty",
"(",
"chunks",
",",
"markups",
")",
":",
"result",
"=",
"[",
"]",
"for",
"chunk",
"in",
"chunks",
":",
"if",
"chunk",
"in",
"markups",
":",
"result",
".",
"append",
"(",
"True",
")",
"elif",
"is_space",
"(",
"chunk",
")",
":",
"result",
".",
"append",
"(",
"True",
")",
"else",
":",
"result",
".",
"append",
"(",
"False",
")",
"return",
"all",
"(",
"result",
")",
"left",
"=",
"max_width",
"result",
"=",
"''",
"markups",
"=",
"self",
".",
"markup",
".",
"get_markup_vars",
"(",
")",
"for",
"num",
",",
"chunk",
"in",
"enumerate",
"(",
"line_arr",
")",
":",
"if",
"chunk",
"in",
"markups",
":",
"result",
"+=",
"chunk",
"else",
":",
"if",
"left",
">",
"0",
":",
"if",
"len",
"(",
"chunk",
")",
"<=",
"left",
":",
"result",
"+=",
"chunk",
"left",
"-=",
"len",
"(",
"chunk",
")",
"else",
":",
"leftover",
"=",
"(",
"chunk",
"[",
"left",
":",
"]",
",",
")",
"+",
"line_arr",
"[",
"num",
"+",
"1",
":",
"]",
"was_cut",
"=",
"not",
"is_empty",
"(",
"leftover",
",",
"markups",
")",
"if",
"was_cut",
":",
"result",
"+=",
"chunk",
"[",
":",
"left",
"-",
"1",
"]",
"+",
"self",
".",
"markup",
".",
"RESET",
"+",
"u'\\u2026'",
"else",
":",
"result",
"+=",
"chunk",
"[",
":",
"left",
"]",
"left",
"=",
"0",
"return",
"result"
] |
d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.