partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
valid
one_to_many
Use an event to build a one-to-many relationship on a class. This makes use of the :meth:`.References._reference_table` method to generate a full foreign key relationship from the remote table.
baka_model/model/meta/orm.py
def one_to_many(clsname, **kw): """Use an event to build a one-to-many relationship on a class. This makes use of the :meth:`.References._reference_table` method to generate a full foreign key relationship from the remote table. """ @declared_attr def o2m(cls): cls._references((clsname, cls.__name__)) return relationship(clsname, **kw) return o2m
def one_to_many(clsname, **kw): """Use an event to build a one-to-many relationship on a class. This makes use of the :meth:`.References._reference_table` method to generate a full foreign key relationship from the remote table. """ @declared_attr def o2m(cls): cls._references((clsname, cls.__name__)) return relationship(clsname, **kw) return o2m
[ "Use", "an", "event", "to", "build", "a", "one", "-", "to", "-", "many", "relationship", "on", "a", "class", "." ]
suryakencana007/baka_model
python
https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/model/meta/orm.py#L37-L48
[ "def", "one_to_many", "(", "clsname", ",", "*", "*", "kw", ")", ":", "@", "declared_attr", "def", "o2m", "(", "cls", ")", ":", "cls", ".", "_references", "(", "(", "clsname", ",", "cls", ".", "__name__", ")", ")", "return", "relationship", "(", "clsname", ",", "*", "*", "kw", ")", "return", "o2m" ]
915c2da9920e973302f5764ae63799acd5ecf0b7
valid
djeffify_string
Djeffifies string_to_djeff
djeff/djeff.py
def djeffify_string(string_to_djeff): """ Djeffifies string_to_djeff """ string_to_djeff = re.sub(r'^(?=[jg])', 'd', string_to_djeff, flags=re.IGNORECASE) # first string_to_djeff = re.sub(r'[ ](?=[jg])', ' d', string_to_djeff, flags=re.IGNORECASE) # spaces string_to_djeff = re.sub(r'[\n](?=[jg])', '\nd', string_to_djeff, flags=re.IGNORECASE) # \n return string_to_djeff
def djeffify_string(string_to_djeff): """ Djeffifies string_to_djeff """ string_to_djeff = re.sub(r'^(?=[jg])', 'd', string_to_djeff, flags=re.IGNORECASE) # first string_to_djeff = re.sub(r'[ ](?=[jg])', ' d', string_to_djeff, flags=re.IGNORECASE) # spaces string_to_djeff = re.sub(r'[\n](?=[jg])', '\nd', string_to_djeff, flags=re.IGNORECASE) # \n return string_to_djeff
[ "Djeffifies", "string_to_djeff" ]
lightstrike/djeff
python
https://github.com/lightstrike/djeff/blob/806a7fe1c9ebbe144bc8afcff55deb5616e372b4/djeff/djeff.py#L22-L29
[ "def", "djeffify_string", "(", "string_to_djeff", ")", ":", "string_to_djeff", "=", "re", ".", "sub", "(", "r'^(?=[jg])'", ",", "'d'", ",", "string_to_djeff", ",", "flags", "=", "re", ".", "IGNORECASE", ")", "# first", "string_to_djeff", "=", "re", ".", "sub", "(", "r'[ ](?=[jg])'", ",", "' d'", ",", "string_to_djeff", ",", "flags", "=", "re", ".", "IGNORECASE", ")", "# spaces", "string_to_djeff", "=", "re", ".", "sub", "(", "r'[\\n](?=[jg])'", ",", "'\\nd'", ",", "string_to_djeff", ",", "flags", "=", "re", ".", "IGNORECASE", ")", "# \\n", "return", "string_to_djeff" ]
806a7fe1c9ebbe144bc8afcff55deb5616e372b4
valid
DjeffParser.handle_data
Djeffify data between tags
djeff/djeff.py
def handle_data(self, data): """ Djeffify data between tags """ if data.strip(): data = djeffify_string(data) self.djhtml += data
def handle_data(self, data): """ Djeffify data between tags """ if data.strip(): data = djeffify_string(data) self.djhtml += data
[ "Djeffify", "data", "between", "tags" ]
lightstrike/djeff
python
https://github.com/lightstrike/djeff/blob/806a7fe1c9ebbe144bc8afcff55deb5616e372b4/djeff/djeff.py#L71-L77
[ "def", "handle_data", "(", "self", ",", "data", ")", ":", "if", "data", ".", "strip", "(", ")", ":", "data", "=", "djeffify_string", "(", "data", ")", "self", ".", "djhtml", "+=", "data" ]
806a7fe1c9ebbe144bc8afcff55deb5616e372b4
valid
References._reference_table
Create a foreign key reference from the local class to the given remote table. Adds column references to the declarative class and adds a ForeignKeyConstraint.
baka_model/model/meta/schema.py
def _reference_table(cls, ref_table): """Create a foreign key reference from the local class to the given remote table. Adds column references to the declarative class and adds a ForeignKeyConstraint. """ # create pairs of (Foreign key column, primary key column) cols = [(sa.Column(), refcol) for refcol in ref_table.primary_key] # set "tablename_colname = Foreign key Column" on the local class for col, refcol in cols: setattr(cls, "%s_%s" % (ref_table.name, refcol.name), col) # add a ForeignKeyConstraint([local columns], [remote columns]) cls.__table__.append_constraint(sa.ForeignKeyConstraint(*zip(*cols)))
def _reference_table(cls, ref_table): """Create a foreign key reference from the local class to the given remote table. Adds column references to the declarative class and adds a ForeignKeyConstraint. """ # create pairs of (Foreign key column, primary key column) cols = [(sa.Column(), refcol) for refcol in ref_table.primary_key] # set "tablename_colname = Foreign key Column" on the local class for col, refcol in cols: setattr(cls, "%s_%s" % (ref_table.name, refcol.name), col) # add a ForeignKeyConstraint([local columns], [remote columns]) cls.__table__.append_constraint(sa.ForeignKeyConstraint(*zip(*cols)))
[ "Create", "a", "foreign", "key", "reference", "from", "the", "local", "class", "to", "the", "given", "remote", "table", "." ]
suryakencana007/baka_model
python
https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/model/meta/schema.py#L42-L58
[ "def", "_reference_table", "(", "cls", ",", "ref_table", ")", ":", "# create pairs of (Foreign key column, primary key column)", "cols", "=", "[", "(", "sa", ".", "Column", "(", ")", ",", "refcol", ")", "for", "refcol", "in", "ref_table", ".", "primary_key", "]", "# set \"tablename_colname = Foreign key Column\" on the local class", "for", "col", ",", "refcol", "in", "cols", ":", "setattr", "(", "cls", ",", "\"%s_%s\"", "%", "(", "ref_table", ".", "name", ",", "refcol", ".", "name", ")", ",", "col", ")", "# add a ForeignKeyConstraint([local columns], [remote columns])", "cls", ".", "__table__", ".", "append_constraint", "(", "sa", ".", "ForeignKeyConstraint", "(", "*", "zip", "(", "*", "cols", ")", ")", ")" ]
915c2da9920e973302f5764ae63799acd5ecf0b7
valid
JsonSerializableMixin.__try_to_json
Try to run __json__ on the given object. Raise TypeError is __json__ is missing :param request: Pyramid Request object :type request: <Request> :param obj: Object to JSONify :type obj: any object that has __json__ method :exception: TypeError
baka_model/model/meta/schema.py
def __try_to_json(self, request, attr): """ Try to run __json__ on the given object. Raise TypeError is __json__ is missing :param request: Pyramid Request object :type request: <Request> :param obj: Object to JSONify :type obj: any object that has __json__ method :exception: TypeError """ # check for __json__ method and try to JSONify if hasattr(attr, '__json__'): return attr.__json__(request) # raise error otherwise raise TypeError('__json__ method missing on %s' % str(attr))
def __try_to_json(self, request, attr): """ Try to run __json__ on the given object. Raise TypeError is __json__ is missing :param request: Pyramid Request object :type request: <Request> :param obj: Object to JSONify :type obj: any object that has __json__ method :exception: TypeError """ # check for __json__ method and try to JSONify if hasattr(attr, '__json__'): return attr.__json__(request) # raise error otherwise raise TypeError('__json__ method missing on %s' % str(attr))
[ "Try", "to", "run", "__json__", "on", "the", "given", "object", ".", "Raise", "TypeError", "is", "__json__", "is", "missing" ]
suryakencana007/baka_model
python
https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/model/meta/schema.py#L147-L164
[ "def", "__try_to_json", "(", "self", ",", "request", ",", "attr", ")", ":", "# check for __json__ method and try to JSONify", "if", "hasattr", "(", "attr", ",", "'__json__'", ")", ":", "return", "attr", ".", "__json__", "(", "request", ")", "# raise error otherwise", "raise", "TypeError", "(", "'__json__ method missing on %s'", "%", "str", "(", "attr", ")", ")" ]
915c2da9920e973302f5764ae63799acd5ecf0b7
valid
prepare_path
Path join helper method Join paths if list passed :type path: str|unicode|list :rtype: str|unicode
static_bundle/utils.py
def prepare_path(path): """ Path join helper method Join paths if list passed :type path: str|unicode|list :rtype: str|unicode """ if type(path) == list: return os.path.join(*path) return path
def prepare_path(path): """ Path join helper method Join paths if list passed :type path: str|unicode|list :rtype: str|unicode """ if type(path) == list: return os.path.join(*path) return path
[ "Path", "join", "helper", "method", "Join", "paths", "if", "list", "passed" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/utils.py#L8-L18
[ "def", "prepare_path", "(", "path", ")", ":", "if", "type", "(", "path", ")", "==", "list", ":", "return", "os", ".", "path", ".", "join", "(", "*", "path", ")", "return", "path" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
read_from_file
Read helper method :type file_path: str|unicode :type encoding: str|unicode :rtype: str|unicode
static_bundle/utils.py
def read_from_file(file_path, encoding="utf-8"): """ Read helper method :type file_path: str|unicode :type encoding: str|unicode :rtype: str|unicode """ with codecs.open(file_path, "r", encoding) as f: return f.read()
def read_from_file(file_path, encoding="utf-8"): """ Read helper method :type file_path: str|unicode :type encoding: str|unicode :rtype: str|unicode """ with codecs.open(file_path, "r", encoding) as f: return f.read()
[ "Read", "helper", "method" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/utils.py#L21-L30
[ "def", "read_from_file", "(", "file_path", ",", "encoding", "=", "\"utf-8\"", ")", ":", "with", "codecs", ".", "open", "(", "file_path", ",", "\"r\"", ",", "encoding", ")", "as", "f", ":", "return", "f", ".", "read", "(", ")" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
write_to_file
Write helper method :type file_path: str|unicode :type contents: str|unicode :type encoding: str|unicode
static_bundle/utils.py
def write_to_file(file_path, contents, encoding="utf-8"): """ Write helper method :type file_path: str|unicode :type contents: str|unicode :type encoding: str|unicode """ with codecs.open(file_path, "w", encoding) as f: f.write(contents)
def write_to_file(file_path, contents, encoding="utf-8"): """ Write helper method :type file_path: str|unicode :type contents: str|unicode :type encoding: str|unicode """ with codecs.open(file_path, "w", encoding) as f: f.write(contents)
[ "Write", "helper", "method" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/utils.py#L33-L42
[ "def", "write_to_file", "(", "file_path", ",", "contents", ",", "encoding", "=", "\"utf-8\"", ")", ":", "with", "codecs", ".", "open", "(", "file_path", ",", "\"w\"", ",", "encoding", ")", "as", "f", ":", "f", ".", "write", "(", "contents", ")" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
copy_file
Copy file helper method :type src: str|unicode :type dest: str|unicode
static_bundle/utils.py
def copy_file(src, dest): """ Copy file helper method :type src: str|unicode :type dest: str|unicode """ dir_path = os.path.dirname(dest) if not os.path.exists(dir_path): os.makedirs(dir_path) shutil.copy2(src, dest)
def copy_file(src, dest): """ Copy file helper method :type src: str|unicode :type dest: str|unicode """ dir_path = os.path.dirname(dest) if not os.path.exists(dir_path): os.makedirs(dir_path) shutil.copy2(src, dest)
[ "Copy", "file", "helper", "method" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/utils.py#L45-L55
[ "def", "copy_file", "(", "src", ",", "dest", ")", ":", "dir_path", "=", "os", ".", "path", ".", "dirname", "(", "dest", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "dir_path", ")", ":", "os", ".", "makedirs", "(", "dir_path", ")", "shutil", ".", "copy2", "(", "src", ",", "dest", ")" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
get_path_extension
Split file name and extension :type path: str|unicode :rtype: one str|unicode
static_bundle/utils.py
def get_path_extension(path): """ Split file name and extension :type path: str|unicode :rtype: one str|unicode """ file_path, file_ext = os.path.splitext(path) return file_ext.lstrip('.')
def get_path_extension(path): """ Split file name and extension :type path: str|unicode :rtype: one str|unicode """ file_path, file_ext = os.path.splitext(path) return file_ext.lstrip('.')
[ "Split", "file", "name", "and", "extension" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/utils.py#L58-L66
[ "def", "get_path_extension", "(", "path", ")", ":", "file_path", ",", "file_ext", "=", "os", ".", "path", ".", "splitext", "(", "path", ")", "return", "file_ext", ".", "lstrip", "(", "'.'", ")" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
split_path
Helper method for absolute and relative paths resolution Split passed path and return each directory parts example: "/usr/share/dir" return: ["usr", "share", "dir"] @type path: one of (unicode, str) @rtype: list
static_bundle/utils.py
def split_path(path): """ Helper method for absolute and relative paths resolution Split passed path and return each directory parts example: "/usr/share/dir" return: ["usr", "share", "dir"] @type path: one of (unicode, str) @rtype: list """ result_parts = [] #todo: check loops while path != "/": parts = os.path.split(path) if parts[1] == path: result_parts.insert(0, parts[1]) break elif parts[0] == path: result_parts.insert(0, parts[0]) break else: path = parts[0] result_parts.insert(0, parts[1]) return result_parts
def split_path(path): """ Helper method for absolute and relative paths resolution Split passed path and return each directory parts example: "/usr/share/dir" return: ["usr", "share", "dir"] @type path: one of (unicode, str) @rtype: list """ result_parts = [] #todo: check loops while path != "/": parts = os.path.split(path) if parts[1] == path: result_parts.insert(0, parts[1]) break elif parts[0] == path: result_parts.insert(0, parts[0]) break else: path = parts[0] result_parts.insert(0, parts[1]) return result_parts
[ "Helper", "method", "for", "absolute", "and", "relative", "paths", "resolution", "Split", "passed", "path", "and", "return", "each", "directory", "parts" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/utils.py#L69-L93
[ "def", "split_path", "(", "path", ")", ":", "result_parts", "=", "[", "]", "#todo: check loops", "while", "path", "!=", "\"/\"", ":", "parts", "=", "os", ".", "path", ".", "split", "(", "path", ")", "if", "parts", "[", "1", "]", "==", "path", ":", "result_parts", ".", "insert", "(", "0", ",", "parts", "[", "1", "]", ")", "break", "elif", "parts", "[", "0", "]", "==", "path", ":", "result_parts", ".", "insert", "(", "0", ",", "parts", "[", "0", "]", ")", "break", "else", ":", "path", "=", "parts", "[", "0", "]", "result_parts", ".", "insert", "(", "0", ",", "parts", "[", "1", "]", ")", "return", "result_parts" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
RESTClient._create_api_uri
Creates fully qualified endpoint URIs. :param parts: the string parts that form the request URI
cbexchange/client.py
def _create_api_uri(self, *parts): """Creates fully qualified endpoint URIs. :param parts: the string parts that form the request URI """ return urljoin(self.API_URI, '/'.join(map(quote, parts)))
def _create_api_uri(self, *parts): """Creates fully qualified endpoint URIs. :param parts: the string parts that form the request URI """ return urljoin(self.API_URI, '/'.join(map(quote, parts)))
[ "Creates", "fully", "qualified", "endpoint", "URIs", "." ]
agsimeonov/cbexchange
python
https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/client.py#L31-L37
[ "def", "_create_api_uri", "(", "self", ",", "*", "parts", ")", ":", "return", "urljoin", "(", "self", ".", "API_URI", ",", "'/'", ".", "join", "(", "map", "(", "quote", ",", "parts", ")", ")", ")" ]
e3762f77583f89cf7b4f501ab3c7675fc7d30ab3
valid
RESTClient._format_iso_time
Makes sure we have proper ISO 8601 time. :param time: either already ISO 8601 a string or datetime.datetime :returns: ISO 8601 time :rtype: str
cbexchange/client.py
def _format_iso_time(self, time): """Makes sure we have proper ISO 8601 time. :param time: either already ISO 8601 a string or datetime.datetime :returns: ISO 8601 time :rtype: str """ if isinstance(time, str): return time elif isinstance(time, datetime): return time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') else: return None
def _format_iso_time(self, time): """Makes sure we have proper ISO 8601 time. :param time: either already ISO 8601 a string or datetime.datetime :returns: ISO 8601 time :rtype: str """ if isinstance(time, str): return time elif isinstance(time, datetime): return time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') else: return None
[ "Makes", "sure", "we", "have", "proper", "ISO", "8601", "time", "." ]
agsimeonov/cbexchange
python
https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/client.py#L39-L52
[ "def", "_format_iso_time", "(", "self", ",", "time", ")", ":", "if", "isinstance", "(", "time", ",", "str", ")", ":", "return", "time", "elif", "isinstance", "(", "time", ",", "datetime", ")", ":", "return", "time", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%S.%fZ'", ")", "else", ":", "return", "None" ]
e3762f77583f89cf7b4f501ab3c7675fc7d30ab3
valid
RESTClient._handle_response
Returns the given response or raises an APIError for non-2xx responses. :param requests.Response response: HTTP response :returns: requested data :rtype: requests.Response :raises APIError: for non-2xx responses
cbexchange/client.py
def _handle_response(self, response): """Returns the given response or raises an APIError for non-2xx responses. :param requests.Response response: HTTP response :returns: requested data :rtype: requests.Response :raises APIError: for non-2xx responses """ if not str(response.status_code).startswith('2'): raise get_api_error(response) return response
def _handle_response(self, response): """Returns the given response or raises an APIError for non-2xx responses. :param requests.Response response: HTTP response :returns: requested data :rtype: requests.Response :raises APIError: for non-2xx responses """ if not str(response.status_code).startswith('2'): raise get_api_error(response) return response
[ "Returns", "the", "given", "response", "or", "raises", "an", "APIError", "for", "non", "-", "2xx", "responses", "." ]
agsimeonov/cbexchange
python
https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/client.py#L54-L65
[ "def", "_handle_response", "(", "self", ",", "response", ")", ":", "if", "not", "str", "(", "response", ".", "status_code", ")", ".", "startswith", "(", "'2'", ")", ":", "raise", "get_api_error", "(", "response", ")", "return", "response" ]
e3762f77583f89cf7b4f501ab3c7675fc7d30ab3
valid
PaginationClient._check_next
Checks if a next message is possible. :returns: True if a next message is possible, otherwise False :rtype: bool
cbexchange/client.py
def _check_next(self): """Checks if a next message is possible. :returns: True if a next message is possible, otherwise False :rtype: bool """ if self.is_initial: return True if self.before: if self.before_cursor: return True else: return False else: if self.after_cursor: return True else: return False
def _check_next(self): """Checks if a next message is possible. :returns: True if a next message is possible, otherwise False :rtype: bool """ if self.is_initial: return True if self.before: if self.before_cursor: return True else: return False else: if self.after_cursor: return True else: return False
[ "Checks", "if", "a", "next", "message", "is", "possible", "." ]
agsimeonov/cbexchange
python
https://github.com/agsimeonov/cbexchange/blob/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3/cbexchange/client.py#L158-L176
[ "def", "_check_next", "(", "self", ")", ":", "if", "self", ".", "is_initial", ":", "return", "True", "if", "self", ".", "before", ":", "if", "self", ".", "before_cursor", ":", "return", "True", "else", ":", "return", "False", "else", ":", "if", "self", ".", "after_cursor", ":", "return", "True", "else", ":", "return", "False" ]
e3762f77583f89cf7b4f501ab3c7675fc7d30ab3
valid
Colors._wrap_color
Colors text with code and given format
colors.py
def _wrap_color(self, code, text, format=None, style=None): """ Colors text with code and given format """ color = None if code[:3] == self.bg.PREFIX: color = self.bg.COLORS.get(code, None) if not color: color = self.fg.COLORS.get(code, None) if not color: raise Exception('Color code not found') if format and format not in self.formats: raise Exception('Color format not found') fmt = "0;" if format == 'bold': fmt = "1;" elif format == 'underline': fmt = "4;" # Manage the format parts = color.split('[') color = '{0}[{1}{2}'.format(parts[0], fmt, parts[1]) if self.has_colors and self.colors_enabled: # Set brightness st = '' if style: st = self.st.COLORS.get(style, '') return "{0}{1}{2}{3}".format(st, color, text, self.st.COLORS['reset_all']) else: return text
def _wrap_color(self, code, text, format=None, style=None): """ Colors text with code and given format """ color = None if code[:3] == self.bg.PREFIX: color = self.bg.COLORS.get(code, None) if not color: color = self.fg.COLORS.get(code, None) if not color: raise Exception('Color code not found') if format and format not in self.formats: raise Exception('Color format not found') fmt = "0;" if format == 'bold': fmt = "1;" elif format == 'underline': fmt = "4;" # Manage the format parts = color.split('[') color = '{0}[{1}{2}'.format(parts[0], fmt, parts[1]) if self.has_colors and self.colors_enabled: # Set brightness st = '' if style: st = self.st.COLORS.get(style, '') return "{0}{1}{2}{3}".format(st, color, text, self.st.COLORS['reset_all']) else: return text
[ "Colors", "text", "with", "code", "and", "given", "format" ]
chrisgilmerproj/pycolors2
python
https://github.com/chrisgilmerproj/pycolors2/blob/20e447005b70d29fc9f3852bcd526fc6fb337ea3/colors.py#L122-L153
[ "def", "_wrap_color", "(", "self", ",", "code", ",", "text", ",", "format", "=", "None", ",", "style", "=", "None", ")", ":", "color", "=", "None", "if", "code", "[", ":", "3", "]", "==", "self", ".", "bg", ".", "PREFIX", ":", "color", "=", "self", ".", "bg", ".", "COLORS", ".", "get", "(", "code", ",", "None", ")", "if", "not", "color", ":", "color", "=", "self", ".", "fg", ".", "COLORS", ".", "get", "(", "code", ",", "None", ")", "if", "not", "color", ":", "raise", "Exception", "(", "'Color code not found'", ")", "if", "format", "and", "format", "not", "in", "self", ".", "formats", ":", "raise", "Exception", "(", "'Color format not found'", ")", "fmt", "=", "\"0;\"", "if", "format", "==", "'bold'", ":", "fmt", "=", "\"1;\"", "elif", "format", "==", "'underline'", ":", "fmt", "=", "\"4;\"", "# Manage the format", "parts", "=", "color", ".", "split", "(", "'['", ")", "color", "=", "'{0}[{1}{2}'", ".", "format", "(", "parts", "[", "0", "]", ",", "fmt", ",", "parts", "[", "1", "]", ")", "if", "self", ".", "has_colors", "and", "self", ".", "colors_enabled", ":", "# Set brightness", "st", "=", "''", "if", "style", ":", "st", "=", "self", ".", "st", ".", "COLORS", ".", "get", "(", "style", ",", "''", ")", "return", "\"{0}{1}{2}{3}\"", ".", "format", "(", "st", ",", "color", ",", "text", ",", "self", ".", "st", ".", "COLORS", "[", "'reset_all'", "]", ")", "else", ":", "return", "text" ]
20e447005b70d29fc9f3852bcd526fc6fb337ea3
valid
SymbolDatabase.RegisterMessage
Registers the given message type in the local database. Args: message: a message.Message, to be registered. Returns: The provided message.
typy/google/protobuf/symbol_database.py
def RegisterMessage(self, message): """Registers the given message type in the local database. Args: message: a message.Message, to be registered. Returns: The provided message. """ desc = message.DESCRIPTOR self._symbols[desc.full_name] = message if desc.file.name not in self._symbols_by_file: self._symbols_by_file[desc.file.name] = {} self._symbols_by_file[desc.file.name][desc.full_name] = message self.pool.AddDescriptor(desc) return message
def RegisterMessage(self, message): """Registers the given message type in the local database. Args: message: a message.Message, to be registered. Returns: The provided message. """ desc = message.DESCRIPTOR self._symbols[desc.full_name] = message if desc.file.name not in self._symbols_by_file: self._symbols_by_file[desc.file.name] = {} self._symbols_by_file[desc.file.name][desc.full_name] = message self.pool.AddDescriptor(desc) return message
[ "Registers", "the", "given", "message", "type", "in", "the", "local", "database", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/symbol_database.py#L82-L98
[ "def", "RegisterMessage", "(", "self", ",", "message", ")", ":", "desc", "=", "message", ".", "DESCRIPTOR", "self", ".", "_symbols", "[", "desc", ".", "full_name", "]", "=", "message", "if", "desc", ".", "file", ".", "name", "not", "in", "self", ".", "_symbols_by_file", ":", "self", ".", "_symbols_by_file", "[", "desc", ".", "file", ".", "name", "]", "=", "{", "}", "self", ".", "_symbols_by_file", "[", "desc", ".", "file", ".", "name", "]", "[", "desc", ".", "full_name", "]", "=", "message", "self", ".", "pool", ".", "AddDescriptor", "(", "desc", ")", "return", "message" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
SymbolDatabase.GetMessages
Gets all the messages from a specified file. This will find and resolve dependencies, failing if they are not registered in the symbol database. Args: files: The file names to extract messages from. Returns: A dictionary mapping proto names to the message classes. This will include any dependent messages as well as any messages defined in the same file as a specified message. Raises: KeyError: if a file could not be found.
typy/google/protobuf/symbol_database.py
def GetMessages(self, files): """Gets all the messages from a specified file. This will find and resolve dependencies, failing if they are not registered in the symbol database. Args: files: The file names to extract messages from. Returns: A dictionary mapping proto names to the message classes. This will include any dependent messages as well as any messages defined in the same file as a specified message. Raises: KeyError: if a file could not be found. """ result = {} for f in files: result.update(self._symbols_by_file[f]) return result
def GetMessages(self, files): """Gets all the messages from a specified file. This will find and resolve dependencies, failing if they are not registered in the symbol database. Args: files: The file names to extract messages from. Returns: A dictionary mapping proto names to the message classes. This will include any dependent messages as well as any messages defined in the same file as a specified message. Raises: KeyError: if a file could not be found. """ result = {} for f in files: result.update(self._symbols_by_file[f]) return result
[ "Gets", "all", "the", "messages", "from", "a", "specified", "file", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/symbol_database.py#L156-L178
[ "def", "GetMessages", "(", "self", ",", "files", ")", ":", "result", "=", "{", "}", "for", "f", "in", "files", ":", "result", ".", "update", "(", "self", ".", "_symbols_by_file", "[", "f", "]", ")", "return", "result" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
RuntimePath.insert
Insert object before index. :param int index: index to insert in :param string value: path to insert
headlessvim/runtimepath.py
def insert(self, index, value): """ Insert object before index. :param int index: index to insert in :param string value: path to insert """ self._list.insert(index, value) self._sync()
def insert(self, index, value): """ Insert object before index. :param int index: index to insert in :param string value: path to insert """ self._list.insert(index, value) self._sync()
[ "Insert", "object", "before", "index", "." ]
manicmaniac/headlessvim
python
https://github.com/manicmaniac/headlessvim/blob/3e4657f95d981ddf21fd285b7e1b9da2154f9cb9/headlessvim/runtimepath.py#L41-L49
[ "def", "insert", "(", "self", ",", "index", ",", "value", ")", ":", "self", ".", "_list", ".", "insert", "(", "index", ",", "value", ")", "self", ".", "_sync", "(", ")" ]
3e4657f95d981ddf21fd285b7e1b9da2154f9cb9
valid
RuntimePath.parse
Parse runtime path representation to list. :param string string: runtime path string :return: list of runtime paths :rtype: list of string
headlessvim/runtimepath.py
def parse(self, string): """ Parse runtime path representation to list. :param string string: runtime path string :return: list of runtime paths :rtype: list of string """ var, eq, values = string.strip().partition('=') assert var == 'runtimepath' assert eq == '=' return values.split(',')
def parse(self, string): """ Parse runtime path representation to list. :param string string: runtime path string :return: list of runtime paths :rtype: list of string """ var, eq, values = string.strip().partition('=') assert var == 'runtimepath' assert eq == '=' return values.split(',')
[ "Parse", "runtime", "path", "representation", "to", "list", "." ]
manicmaniac/headlessvim
python
https://github.com/manicmaniac/headlessvim/blob/3e4657f95d981ddf21fd285b7e1b9da2154f9cb9/headlessvim/runtimepath.py#L63-L74
[ "def", "parse", "(", "self", ",", "string", ")", ":", "var", ",", "eq", ",", "values", "=", "string", ".", "strip", "(", ")", ".", "partition", "(", "'='", ")", "assert", "var", "==", "'runtimepath'", "assert", "eq", "==", "'='", "return", "values", ".", "split", "(", "','", ")" ]
3e4657f95d981ddf21fd285b7e1b9da2154f9cb9
valid
Asset.add_bundle
Add some bundle to build group :type bundle: static_bundle.bundles.AbstractBundle @rtype: BuildGroup
static_bundle/builders.py
def add_bundle(self, *args): """ Add some bundle to build group :type bundle: static_bundle.bundles.AbstractBundle @rtype: BuildGroup """ for bundle in args: if not self.multitype and self.has_bundles(): first_bundle = self.get_first_bundle() if first_bundle.get_type() != bundle.get_type(): raise Exception( 'Different bundle types for one Asset: %s[%s -> %s]' 'check types or set multitype parameter to True' % (self.name, first_bundle.get_type(), bundle.get_type()) ) self.bundles.append(bundle) return self
def add_bundle(self, *args): """ Add some bundle to build group :type bundle: static_bundle.bundles.AbstractBundle @rtype: BuildGroup """ for bundle in args: if not self.multitype and self.has_bundles(): first_bundle = self.get_first_bundle() if first_bundle.get_type() != bundle.get_type(): raise Exception( 'Different bundle types for one Asset: %s[%s -> %s]' 'check types or set multitype parameter to True' % (self.name, first_bundle.get_type(), bundle.get_type()) ) self.bundles.append(bundle) return self
[ "Add", "some", "bundle", "to", "build", "group" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L44-L61
[ "def", "add_bundle", "(", "self", ",", "*", "args", ")", ":", "for", "bundle", "in", "args", ":", "if", "not", "self", ".", "multitype", "and", "self", ".", "has_bundles", "(", ")", ":", "first_bundle", "=", "self", ".", "get_first_bundle", "(", ")", "if", "first_bundle", ".", "get_type", "(", ")", "!=", "bundle", ".", "get_type", "(", ")", ":", "raise", "Exception", "(", "'Different bundle types for one Asset: %s[%s -> %s]'", "'check types or set multitype parameter to True'", "%", "(", "self", ".", "name", ",", "first_bundle", ".", "get_type", "(", ")", ",", "bundle", ".", "get_type", "(", ")", ")", ")", "self", ".", "bundles", ".", "append", "(", "bundle", ")", "return", "self" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
Asset.collect_files
Return collected files links :rtype: list[static_bundle.files.StaticFileResult]
static_bundle/builders.py
def collect_files(self): """ Return collected files links :rtype: list[static_bundle.files.StaticFileResult] """ self.files = [] for bundle in self.bundles: bundle.init_build(self, self.builder) bundle_files = bundle.prepare() self.files.extend(bundle_files) return self
def collect_files(self): """ Return collected files links :rtype: list[static_bundle.files.StaticFileResult] """ self.files = [] for bundle in self.bundles: bundle.init_build(self, self.builder) bundle_files = bundle.prepare() self.files.extend(bundle_files) return self
[ "Return", "collected", "files", "links" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L63-L74
[ "def", "collect_files", "(", "self", ")", ":", "self", ".", "files", "=", "[", "]", "for", "bundle", "in", "self", ".", "bundles", ":", "bundle", ".", "init_build", "(", "self", ",", "self", ".", "builder", ")", "bundle_files", "=", "bundle", ".", "prepare", "(", ")", "self", ".", "files", ".", "extend", "(", "bundle_files", ")", "return", "self" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
Asset.get_minifier
Asset minifier Uses default minifier in bundle if it's not defined :rtype: static_bundle.minifiers.DefaultMinifier|None
static_bundle/builders.py
def get_minifier(self): """ Asset minifier Uses default minifier in bundle if it's not defined :rtype: static_bundle.minifiers.DefaultMinifier|None """ if self.minifier is None: if not self.has_bundles(): raise Exception("Unable to get default minifier, no bundles in build group") minifier = self.get_first_bundle().get_default_minifier() else: minifier = self.minifier if minifier: minifier.init_asset(self) return minifier
def get_minifier(self): """ Asset minifier Uses default minifier in bundle if it's not defined :rtype: static_bundle.minifiers.DefaultMinifier|None """ if self.minifier is None: if not self.has_bundles(): raise Exception("Unable to get default minifier, no bundles in build group") minifier = self.get_first_bundle().get_default_minifier() else: minifier = self.minifier if minifier: minifier.init_asset(self) return minifier
[ "Asset", "minifier", "Uses", "default", "minifier", "in", "bundle", "if", "it", "s", "not", "defined" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L76-L91
[ "def", "get_minifier", "(", "self", ")", ":", "if", "self", ".", "minifier", "is", "None", ":", "if", "not", "self", ".", "has_bundles", "(", ")", ":", "raise", "Exception", "(", "\"Unable to get default minifier, no bundles in build group\"", ")", "minifier", "=", "self", ".", "get_first_bundle", "(", ")", ".", "get_default_minifier", "(", ")", "else", ":", "minifier", "=", "self", ".", "minifier", "if", "minifier", ":", "minifier", ".", "init_asset", "(", "self", ")", "return", "minifier" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
StandardBuilder.create_asset
Create asset :type name: unicode|str :rtype: Asset
static_bundle/builders.py
def create_asset(self, name, **kwargs): """ Create asset :type name: unicode|str :rtype: Asset """ asset = Asset(self, name, **kwargs) self.assets[name] = asset return asset
def create_asset(self, name, **kwargs): """ Create asset :type name: unicode|str :rtype: Asset """ asset = Asset(self, name, **kwargs) self.assets[name] = asset return asset
[ "Create", "asset" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L120-L129
[ "def", "create_asset", "(", "self", ",", "name", ",", "*", "*", "kwargs", ")", ":", "asset", "=", "Asset", "(", "self", ",", "name", ",", "*", "*", "kwargs", ")", "self", ".", "assets", "[", "name", "]", "=", "asset", "return", "asset" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
StandardBuilder.render_asset
Render all includes in asset by names :type name: str|unicode :rtype: str|unicode
static_bundle/builders.py
def render_asset(self, name): """ Render all includes in asset by names :type name: str|unicode :rtype: str|unicode """ result = "" if self.has_asset(name): asset = self.get_asset(name) if asset.files: for f in asset.files: result += f.render_include() + "\r\n" return result
def render_asset(self, name): """ Render all includes in asset by names :type name: str|unicode :rtype: str|unicode """ result = "" if self.has_asset(name): asset = self.get_asset(name) if asset.files: for f in asset.files: result += f.render_include() + "\r\n" return result
[ "Render", "all", "includes", "in", "asset", "by", "names" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L157-L170
[ "def", "render_asset", "(", "self", ",", "name", ")", ":", "result", "=", "\"\"", "if", "self", ".", "has_asset", "(", "name", ")", ":", "asset", "=", "self", ".", "get_asset", "(", "name", ")", "if", "asset", ".", "files", ":", "for", "f", "in", "asset", ".", "files", ":", "result", "+=", "f", ".", "render_include", "(", ")", "+", "\"\\r\\n\"", "return", "result" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
StandardBuilder.collect_links
Return links without build files
static_bundle/builders.py
def collect_links(self, env=None): """ Return links without build files """ for asset in self.assets.values(): if asset.has_bundles(): asset.collect_files() if env is None: env = self.config.env if env == static_bundle.ENV_PRODUCTION: self._minify(emulate=True) self._add_url_prefix()
def collect_links(self, env=None): """ Return links without build files """ for asset in self.assets.values(): if asset.has_bundles(): asset.collect_files() if env is None: env = self.config.env if env == static_bundle.ENV_PRODUCTION: self._minify(emulate=True) self._add_url_prefix()
[ "Return", "links", "without", "build", "files" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L178-L189
[ "def", "collect_links", "(", "self", ",", "env", "=", "None", ")", ":", "for", "asset", "in", "self", ".", "assets", ".", "values", "(", ")", ":", "if", "asset", ".", "has_bundles", "(", ")", ":", "asset", ".", "collect_files", "(", ")", "if", "env", "is", "None", ":", "env", "=", "self", ".", "config", ".", "env", "if", "env", "==", "static_bundle", ".", "ENV_PRODUCTION", ":", "self", ".", "_minify", "(", "emulate", "=", "True", ")", "self", ".", "_add_url_prefix", "(", ")" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
StandardBuilder.make_build
Move files / make static build
static_bundle/builders.py
def make_build(self): """ Move files / make static build """ for asset in self.assets.values(): if asset.has_bundles(): asset.collect_files() if not os.path.exists(self.config.output_dir): os.makedirs(self.config.output_dir) if self.config.copy_only_bundles: for asset in self.assets.values(): if not asset.minify and asset.files: for f in asset.files: copy_file(f.abs_path, self._get_output_path(f.abs_path)) else: copy_excludes = {} for asset in self.assets.values(): if asset.minify and asset.files: for f in asset.files: copy_excludes[f.abs_path] = f for root, dirs, files in os.walk(self.config.input_dir): for fpath in files: current_file_path = os.path.join(root, fpath) if current_file_path not in copy_excludes: copy_file(current_file_path, self._get_output_path(current_file_path)) self._minify()
def make_build(self): """ Move files / make static build """ for asset in self.assets.values(): if asset.has_bundles(): asset.collect_files() if not os.path.exists(self.config.output_dir): os.makedirs(self.config.output_dir) if self.config.copy_only_bundles: for asset in self.assets.values(): if not asset.minify and asset.files: for f in asset.files: copy_file(f.abs_path, self._get_output_path(f.abs_path)) else: copy_excludes = {} for asset in self.assets.values(): if asset.minify and asset.files: for f in asset.files: copy_excludes[f.abs_path] = f for root, dirs, files in os.walk(self.config.input_dir): for fpath in files: current_file_path = os.path.join(root, fpath) if current_file_path not in copy_excludes: copy_file(current_file_path, self._get_output_path(current_file_path)) self._minify()
[ "Move", "files", "/", "make", "static", "build" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L191-L216
[ "def", "make_build", "(", "self", ")", ":", "for", "asset", "in", "self", ".", "assets", ".", "values", "(", ")", ":", "if", "asset", ".", "has_bundles", "(", ")", ":", "asset", ".", "collect_files", "(", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "config", ".", "output_dir", ")", ":", "os", ".", "makedirs", "(", "self", ".", "config", ".", "output_dir", ")", "if", "self", ".", "config", ".", "copy_only_bundles", ":", "for", "asset", "in", "self", ".", "assets", ".", "values", "(", ")", ":", "if", "not", "asset", ".", "minify", "and", "asset", ".", "files", ":", "for", "f", "in", "asset", ".", "files", ":", "copy_file", "(", "f", ".", "abs_path", ",", "self", ".", "_get_output_path", "(", "f", ".", "abs_path", ")", ")", "else", ":", "copy_excludes", "=", "{", "}", "for", "asset", "in", "self", ".", "assets", ".", "values", "(", ")", ":", "if", "asset", ".", "minify", "and", "asset", ".", "files", ":", "for", "f", "in", "asset", ".", "files", ":", "copy_excludes", "[", "f", ".", "abs_path", "]", "=", "f", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "self", ".", "config", ".", "input_dir", ")", ":", "for", "fpath", "in", "files", ":", "current_file_path", "=", "os", ".", "path", ".", "join", "(", "root", ",", "fpath", ")", "if", "current_file_path", "not", "in", "copy_excludes", ":", "copy_file", "(", "current_file_path", ",", "self", ".", "_get_output_path", "(", "current_file_path", ")", ")", "self", ".", "_minify", "(", ")" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
StandardBuilder.clear
Clear build output dir :type exclude: list|None
static_bundle/builders.py
def clear(self, exclude=None): """ Clear build output dir :type exclude: list|None """ exclude = exclude or [] for root, dirs, files in os.walk(self.config.output_dir): for f in files: if f not in exclude: os.unlink(os.path.join(root, f)) for d in dirs: if d not in exclude: shutil.rmtree(os.path.join(root, d))
def clear(self, exclude=None): """ Clear build output dir :type exclude: list|None """ exclude = exclude or [] for root, dirs, files in os.walk(self.config.output_dir): for f in files: if f not in exclude: os.unlink(os.path.join(root, f)) for d in dirs: if d not in exclude: shutil.rmtree(os.path.join(root, d))
[ "Clear", "build", "output", "dir", ":", "type", "exclude", ":", "list|None" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/builders.py#L218-L230
[ "def", "clear", "(", "self", ",", "exclude", "=", "None", ")", ":", "exclude", "=", "exclude", "or", "[", "]", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "self", ".", "config", ".", "output_dir", ")", ":", "for", "f", "in", "files", ":", "if", "f", "not", "in", "exclude", ":", "os", ".", "unlink", "(", "os", ".", "path", ".", "join", "(", "root", ",", "f", ")", ")", "for", "d", "in", "dirs", ":", "if", "d", "not", "in", "exclude", ":", "shutil", ".", "rmtree", "(", "os", ".", "path", ".", "join", "(", "root", ",", "d", ")", ")" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
_default_json_default
Coerce everything to strings. All objects representing time get output according to default_date_fmt.
pyzlog/__init__.py
def _default_json_default(obj): """ Coerce everything to strings. All objects representing time get output according to default_date_fmt. """ if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)): return obj.strftime(default_date_fmt) else: return str(obj)
def _default_json_default(obj): """ Coerce everything to strings. All objects representing time get output according to default_date_fmt. """ if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)): return obj.strftime(default_date_fmt) else: return str(obj)
[ "Coerce", "everything", "to", "strings", ".", "All", "objects", "representing", "time", "get", "output", "according", "to", "default_date_fmt", "." ]
zeeto/pyzlog
python
https://github.com/zeeto/pyzlog/blob/c26d680bec04f9edd57ed5be733cae43ec828107/pyzlog/__init__.py#L48-L55
[ "def", "_default_json_default", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "(", "datetime", ".", "datetime", ",", "datetime", ".", "date", ",", "datetime", ".", "time", ")", ")", ":", "return", "obj", ".", "strftime", "(", "default_date_fmt", ")", "else", ":", "return", "str", "(", "obj", ")" ]
c26d680bec04f9edd57ed5be733cae43ec828107
valid
init_logs
Initialize the zlogger. Sets up a rotating file handler to the specified path and file with the given size and backup count limits, sets the default application_name, server_hostname, and default/whitelist fields. :param path: path to write the log file :param target: name of the log file :param logger_name: name of the logger (defaults to root) :param level: log level for this logger (defaults to logging.DEBUG) :param maxBytes: size of the file before rotation (default 1MB) :param application_name: app name to add to each log entry :param server_hostname: hostname to add to each log entry :param fields: default/whitelist fields. :type path: string :type target: string :type logger_name: string :type level: int :type maxBytes: int :type backupCount: int :type application_name: string :type server_hostname: string :type fields: dict
pyzlog/__init__.py
def init_logs(path=None, target=None, logger_name='root', level=logging.DEBUG, maxBytes=1*1024*1024, backupCount=5, application_name='default', server_hostname=None, fields=None): """Initialize the zlogger. Sets up a rotating file handler to the specified path and file with the given size and backup count limits, sets the default application_name, server_hostname, and default/whitelist fields. :param path: path to write the log file :param target: name of the log file :param logger_name: name of the logger (defaults to root) :param level: log level for this logger (defaults to logging.DEBUG) :param maxBytes: size of the file before rotation (default 1MB) :param application_name: app name to add to each log entry :param server_hostname: hostname to add to each log entry :param fields: default/whitelist fields. :type path: string :type target: string :type logger_name: string :type level: int :type maxBytes: int :type backupCount: int :type application_name: string :type server_hostname: string :type fields: dict """ log_file = os.path.abspath( os.path.join(path, target)) logger = logging.getLogger(logger_name) logger.setLevel(level) handler = logging.handlers.RotatingFileHandler( log_file, maxBytes=maxBytes, backupCount=backupCount) handler.setLevel(level) handler.setFormatter( JsonFormatter( application_name=application_name, server_hostname=server_hostname, fields=fields)) logger.addHandler(handler)
def init_logs(path=None, target=None, logger_name='root', level=logging.DEBUG, maxBytes=1*1024*1024, backupCount=5, application_name='default', server_hostname=None, fields=None): """Initialize the zlogger. Sets up a rotating file handler to the specified path and file with the given size and backup count limits, sets the default application_name, server_hostname, and default/whitelist fields. :param path: path to write the log file :param target: name of the log file :param logger_name: name of the logger (defaults to root) :param level: log level for this logger (defaults to logging.DEBUG) :param maxBytes: size of the file before rotation (default 1MB) :param application_name: app name to add to each log entry :param server_hostname: hostname to add to each log entry :param fields: default/whitelist fields. :type path: string :type target: string :type logger_name: string :type level: int :type maxBytes: int :type backupCount: int :type application_name: string :type server_hostname: string :type fields: dict """ log_file = os.path.abspath( os.path.join(path, target)) logger = logging.getLogger(logger_name) logger.setLevel(level) handler = logging.handlers.RotatingFileHandler( log_file, maxBytes=maxBytes, backupCount=backupCount) handler.setLevel(level) handler.setFormatter( JsonFormatter( application_name=application_name, server_hostname=server_hostname, fields=fields)) logger.addHandler(handler)
[ "Initialize", "the", "zlogger", "." ]
zeeto/pyzlog
python
https://github.com/zeeto/pyzlog/blob/c26d680bec04f9edd57ed5be733cae43ec828107/pyzlog/__init__.py#L225-L273
[ "def", "init_logs", "(", "path", "=", "None", ",", "target", "=", "None", ",", "logger_name", "=", "'root'", ",", "level", "=", "logging", ".", "DEBUG", ",", "maxBytes", "=", "1", "*", "1024", "*", "1024", ",", "backupCount", "=", "5", ",", "application_name", "=", "'default'", ",", "server_hostname", "=", "None", ",", "fields", "=", "None", ")", ":", "log_file", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "path", ",", "target", ")", ")", "logger", "=", "logging", ".", "getLogger", "(", "logger_name", ")", "logger", ".", "setLevel", "(", "level", ")", "handler", "=", "logging", ".", "handlers", ".", "RotatingFileHandler", "(", "log_file", ",", "maxBytes", "=", "maxBytes", ",", "backupCount", "=", "backupCount", ")", "handler", ".", "setLevel", "(", "level", ")", "handler", ".", "setFormatter", "(", "JsonFormatter", "(", "application_name", "=", "application_name", ",", "server_hostname", "=", "server_hostname", ",", "fields", "=", "fields", ")", ")", "logger", ".", "addHandler", "(", "handler", ")" ]
c26d680bec04f9edd57ed5be733cae43ec828107
valid
JsonFormatter.format
formats a logging.Record into a standard json log entry :param record: record to be formatted :type record: logging.Record :return: the formatted json string :rtype: string
pyzlog/__init__.py
def format(self, record): """formats a logging.Record into a standard json log entry :param record: record to be formatted :type record: logging.Record :return: the formatted json string :rtype: string """ record_fields = record.__dict__.copy() self._set_exc_info(record_fields) event_name = 'default' if record_fields.get('event_name'): event_name = record_fields.pop('event_name') log_level = 'INFO' if record_fields.get('log_level'): log_level = record_fields.pop('log_level') [record_fields.pop(k) for k in record_fields.keys() if k not in self.fields] defaults = self.defaults.copy() fields = self.fields.copy() fields.update(record_fields) filtered_fields = {} for k, v in fields.iteritems(): if v is not None: filtered_fields[k] = v defaults.update({ 'event_timestamp': self._get_now(), 'event_name': event_name, 'log_level': log_level, 'fields': filtered_fields}) return json.dumps(defaults, default=self.json_default)
def format(self, record): """formats a logging.Record into a standard json log entry :param record: record to be formatted :type record: logging.Record :return: the formatted json string :rtype: string """ record_fields = record.__dict__.copy() self._set_exc_info(record_fields) event_name = 'default' if record_fields.get('event_name'): event_name = record_fields.pop('event_name') log_level = 'INFO' if record_fields.get('log_level'): log_level = record_fields.pop('log_level') [record_fields.pop(k) for k in record_fields.keys() if k not in self.fields] defaults = self.defaults.copy() fields = self.fields.copy() fields.update(record_fields) filtered_fields = {} for k, v in fields.iteritems(): if v is not None: filtered_fields[k] = v defaults.update({ 'event_timestamp': self._get_now(), 'event_name': event_name, 'log_level': log_level, 'fields': filtered_fields}) return json.dumps(defaults, default=self.json_default)
[ "formats", "a", "logging", ".", "Record", "into", "a", "standard", "json", "log", "entry" ]
zeeto/pyzlog
python
https://github.com/zeeto/pyzlog/blob/c26d680bec04f9edd57ed5be733cae43ec828107/pyzlog/__init__.py#L173-L210
[ "def", "format", "(", "self", ",", "record", ")", ":", "record_fields", "=", "record", ".", "__dict__", ".", "copy", "(", ")", "self", ".", "_set_exc_info", "(", "record_fields", ")", "event_name", "=", "'default'", "if", "record_fields", ".", "get", "(", "'event_name'", ")", ":", "event_name", "=", "record_fields", ".", "pop", "(", "'event_name'", ")", "log_level", "=", "'INFO'", "if", "record_fields", ".", "get", "(", "'log_level'", ")", ":", "log_level", "=", "record_fields", ".", "pop", "(", "'log_level'", ")", "[", "record_fields", ".", "pop", "(", "k", ")", "for", "k", "in", "record_fields", ".", "keys", "(", ")", "if", "k", "not", "in", "self", ".", "fields", "]", "defaults", "=", "self", ".", "defaults", ".", "copy", "(", ")", "fields", "=", "self", ".", "fields", ".", "copy", "(", ")", "fields", ".", "update", "(", "record_fields", ")", "filtered_fields", "=", "{", "}", "for", "k", ",", "v", "in", "fields", ".", "iteritems", "(", ")", ":", "if", "v", "is", "not", "None", ":", "filtered_fields", "[", "k", "]", "=", "v", "defaults", ".", "update", "(", "{", "'event_timestamp'", ":", "self", ".", "_get_now", "(", ")", ",", "'event_name'", ":", "event_name", ",", "'log_level'", ":", "log_level", ",", "'fields'", ":", "filtered_fields", "}", ")", "return", "json", ".", "dumps", "(", "defaults", ",", "default", "=", "self", ".", "json_default", ")" ]
c26d680bec04f9edd57ed5be733cae43ec828107
valid
includeme
Initialize the model for a Pyramid app. Activate this setup using ``config.include('baka_model')``.
baka_model/__init__.py
def includeme(config): """ Initialize the model for a Pyramid app. Activate this setup using ``config.include('baka_model')``. """ settings = config.get_settings() should_create = asbool(settings.get('baka_model.should_create_all', False)) should_drop = asbool(settings.get('baka_model.should_drop_all', False)) # Configure the transaction manager to support retrying retryable # exceptions. We also register the session factory with the thread-local # transaction manager, so that all sessions it creates are registered. # "tm.attempts": 3, config.add_settings({ "retry.attempts": 3, "tm.activate_hook": tm_activate_hook, "tm.annotate_user": False, }) # use pyramid_retry couse pyramid_tm disabled it config.include('pyramid_retry') # use pyramid_tm to hook the transaction lifecycle to the request config.include('pyramid_tm') engine = get_engine(settings) session_factory = get_session_factory(engine) config.registry['db_session_factory'] = session_factory # make request.db available for use in Pyramid config.add_request_method( # r.tm is the transaction manager used by pyramid_tm lambda r: get_tm_session(session_factory, r.tm), 'db', reify=True ) # service model factory config.include('.service') # Register a deferred action to bind the engine when the configuration is # committed. Deferring the action means that this module can be included # before model modules without ill effect. config.action(None, bind_engine, (engine,), { 'should_create': should_create, 'should_drop': should_drop }, order=10)
def includeme(config): """ Initialize the model for a Pyramid app. Activate this setup using ``config.include('baka_model')``. """ settings = config.get_settings() should_create = asbool(settings.get('baka_model.should_create_all', False)) should_drop = asbool(settings.get('baka_model.should_drop_all', False)) # Configure the transaction manager to support retrying retryable # exceptions. We also register the session factory with the thread-local # transaction manager, so that all sessions it creates are registered. # "tm.attempts": 3, config.add_settings({ "retry.attempts": 3, "tm.activate_hook": tm_activate_hook, "tm.annotate_user": False, }) # use pyramid_retry couse pyramid_tm disabled it config.include('pyramid_retry') # use pyramid_tm to hook the transaction lifecycle to the request config.include('pyramid_tm') engine = get_engine(settings) session_factory = get_session_factory(engine) config.registry['db_session_factory'] = session_factory # make request.db available for use in Pyramid config.add_request_method( # r.tm is the transaction manager used by pyramid_tm lambda r: get_tm_session(session_factory, r.tm), 'db', reify=True ) # service model factory config.include('.service') # Register a deferred action to bind the engine when the configuration is # committed. Deferring the action means that this module can be included # before model modules without ill effect. config.action(None, bind_engine, (engine,), { 'should_create': should_create, 'should_drop': should_drop }, order=10)
[ "Initialize", "the", "model", "for", "a", "Pyramid", "app", "." ]
suryakencana007/baka_model
python
https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/__init__.py#L38-L86
[ "def", "includeme", "(", "config", ")", ":", "settings", "=", "config", ".", "get_settings", "(", ")", "should_create", "=", "asbool", "(", "settings", ".", "get", "(", "'baka_model.should_create_all'", ",", "False", ")", ")", "should_drop", "=", "asbool", "(", "settings", ".", "get", "(", "'baka_model.should_drop_all'", ",", "False", ")", ")", "# Configure the transaction manager to support retrying retryable", "# exceptions. We also register the session factory with the thread-local", "# transaction manager, so that all sessions it creates are registered.", "# \"tm.attempts\": 3,", "config", ".", "add_settings", "(", "{", "\"retry.attempts\"", ":", "3", ",", "\"tm.activate_hook\"", ":", "tm_activate_hook", ",", "\"tm.annotate_user\"", ":", "False", ",", "}", ")", "# use pyramid_retry couse pyramid_tm disabled it", "config", ".", "include", "(", "'pyramid_retry'", ")", "# use pyramid_tm to hook the transaction lifecycle to the request", "config", ".", "include", "(", "'pyramid_tm'", ")", "engine", "=", "get_engine", "(", "settings", ")", "session_factory", "=", "get_session_factory", "(", "engine", ")", "config", ".", "registry", "[", "'db_session_factory'", "]", "=", "session_factory", "# make request.db available for use in Pyramid", "config", ".", "add_request_method", "(", "# r.tm is the transaction manager used by pyramid_tm", "lambda", "r", ":", "get_tm_session", "(", "session_factory", ",", "r", ".", "tm", ")", ",", "'db'", ",", "reify", "=", "True", ")", "# service model factory", "config", ".", "include", "(", "'.service'", ")", "# Register a deferred action to bind the engine when the configuration is", "# committed. Deferring the action means that this module can be included", "# before model modules without ill effect.", "config", ".", "action", "(", "None", ",", "bind_engine", ",", "(", "engine", ",", ")", ",", "{", "'should_create'", ":", "should_create", ",", "'should_drop'", ":", "should_drop", "}", ",", "order", "=", "10", ")" ]
915c2da9920e973302f5764ae63799acd5ecf0b7
valid
DataStore.store
validate the passed values in kwargs based on the collection, store them in the mongodb collection
proauth2/data_stores/mongo_ds.py
def store( self, collection, **kwargs ): ''' validate the passed values in kwargs based on the collection, store them in the mongodb collection ''' key = validate( collection, **kwargs ) if self.fetch( collection, **{ key : kwargs[key] } ): raise Proauth2Error( 'duplicate_key' ) self.db[collection].insert( kwargs )
def store( self, collection, **kwargs ): ''' validate the passed values in kwargs based on the collection, store them in the mongodb collection ''' key = validate( collection, **kwargs ) if self.fetch( collection, **{ key : kwargs[key] } ): raise Proauth2Error( 'duplicate_key' ) self.db[collection].insert( kwargs )
[ "validate", "the", "passed", "values", "in", "kwargs", "based", "on", "the", "collection", "store", "them", "in", "the", "mongodb", "collection" ]
charlesthomas/proauth2
python
https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/data_stores/mongo_ds.py#L44-L52
[ "def", "store", "(", "self", ",", "collection", ",", "*", "*", "kwargs", ")", ":", "key", "=", "validate", "(", "collection", ",", "*", "*", "kwargs", ")", "if", "self", ".", "fetch", "(", "collection", ",", "*", "*", "{", "key", ":", "kwargs", "[", "key", "]", "}", ")", ":", "raise", "Proauth2Error", "(", "'duplicate_key'", ")", "self", ".", "db", "[", "collection", "]", ".", "insert", "(", "kwargs", ")" ]
f88c8df966a1802414047ed304d02df1dd520097
valid
AbstractPath.get_abs_and_rel_paths
Return absolute and relative path for file :type root_path: str|unicode :type file_name: str|unicode :type input_dir: str|unicode :rtype: tuple
static_bundle/paths.py
def get_abs_and_rel_paths(self, root_path, file_name, input_dir): """ Return absolute and relative path for file :type root_path: str|unicode :type file_name: str|unicode :type input_dir: str|unicode :rtype: tuple """ # todo: change relative path resolving [bug on duplicate dir names in path] relative_dir = root_path.replace(input_dir, '') return os.path.join(root_path, file_name), relative_dir + '/' + file_name
def get_abs_and_rel_paths(self, root_path, file_name, input_dir): """ Return absolute and relative path for file :type root_path: str|unicode :type file_name: str|unicode :type input_dir: str|unicode :rtype: tuple """ # todo: change relative path resolving [bug on duplicate dir names in path] relative_dir = root_path.replace(input_dir, '') return os.path.join(root_path, file_name), relative_dir + '/' + file_name
[ "Return", "absolute", "and", "relative", "path", "for", "file" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/paths.py#L23-L35
[ "def", "get_abs_and_rel_paths", "(", "self", ",", "root_path", ",", "file_name", ",", "input_dir", ")", ":", "# todo: change relative path resolving [bug on duplicate dir names in path]", "relative_dir", "=", "root_path", ".", "replace", "(", "input_dir", ",", "''", ")", "return", "os", ".", "path", ".", "join", "(", "root_path", ",", "file_name", ")", ",", "relative_dir", "+", "'/'", "+", "file_name" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
FilePath.get_files
:inheritdoc
static_bundle/paths.py
def get_files(self): """ :inheritdoc """ assert self.bundle, 'Cannot fetch file name with empty bundle' abs_path, rel_path = self.get_abs_and_rel_paths(self.bundle.path, self.file_path, self.bundle.input_dir) file_cls = self.bundle.get_file_cls() return [file_cls(rel_path, abs_path)]
def get_files(self): """ :inheritdoc """ assert self.bundle, 'Cannot fetch file name with empty bundle' abs_path, rel_path = self.get_abs_and_rel_paths(self.bundle.path, self.file_path, self.bundle.input_dir) file_cls = self.bundle.get_file_cls() return [file_cls(rel_path, abs_path)]
[ ":", "inheritdoc" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/paths.py#L51-L58
[ "def", "get_files", "(", "self", ")", ":", "assert", "self", ".", "bundle", ",", "'Cannot fetch file name with empty bundle'", "abs_path", ",", "rel_path", "=", "self", ".", "get_abs_and_rel_paths", "(", "self", ".", "bundle", ".", "path", ",", "self", ".", "file_path", ",", "self", ".", "bundle", ".", "input_dir", ")", "file_cls", "=", "self", ".", "bundle", ".", "get_file_cls", "(", ")", "return", "[", "file_cls", "(", "rel_path", ",", "abs_path", ")", "]" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
DirectoryPath.get_files
:inheritdoc
static_bundle/paths.py
def get_files(self): """ :inheritdoc """ assert self.bundle, 'Cannot fetch directory name with empty bundle' result_files = [] bundle_ext = self.bundle.get_extension() ext = "." + bundle_ext if bundle_ext else None if self.directory_path == "": root_path = self.bundle.path else: root_path = os.path.join(self.bundle.path, self.directory_path) for root, dirs, files in os.walk(root_path): for fpath in files: if (not ext or fpath.endswith(ext)) and (not self.exclusions or all(fpath != n for n in self.exclusions)): abs_path, rel_path = self.get_abs_and_rel_paths(root, fpath, self.bundle.input_dir) file_cls = self.bundle.get_file_cls() result_files.append(file_cls(rel_path, abs_path)) return result_files
def get_files(self): """ :inheritdoc """ assert self.bundle, 'Cannot fetch directory name with empty bundle' result_files = [] bundle_ext = self.bundle.get_extension() ext = "." + bundle_ext if bundle_ext else None if self.directory_path == "": root_path = self.bundle.path else: root_path = os.path.join(self.bundle.path, self.directory_path) for root, dirs, files in os.walk(root_path): for fpath in files: if (not ext or fpath.endswith(ext)) and (not self.exclusions or all(fpath != n for n in self.exclusions)): abs_path, rel_path = self.get_abs_and_rel_paths(root, fpath, self.bundle.input_dir) file_cls = self.bundle.get_file_cls() result_files.append(file_cls(rel_path, abs_path)) return result_files
[ ":", "inheritdoc" ]
Rikanishu/static-bundle
python
https://github.com/Rikanishu/static-bundle/blob/2f6458cb9d9d9049b4fd829f7d6951a45d547c68/static_bundle/paths.py#L73-L91
[ "def", "get_files", "(", "self", ")", ":", "assert", "self", ".", "bundle", ",", "'Cannot fetch directory name with empty bundle'", "result_files", "=", "[", "]", "bundle_ext", "=", "self", ".", "bundle", ".", "get_extension", "(", ")", "ext", "=", "\".\"", "+", "bundle_ext", "if", "bundle_ext", "else", "None", "if", "self", ".", "directory_path", "==", "\"\"", ":", "root_path", "=", "self", ".", "bundle", ".", "path", "else", ":", "root_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "bundle", ".", "path", ",", "self", ".", "directory_path", ")", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "root_path", ")", ":", "for", "fpath", "in", "files", ":", "if", "(", "not", "ext", "or", "fpath", ".", "endswith", "(", "ext", ")", ")", "and", "(", "not", "self", ".", "exclusions", "or", "all", "(", "fpath", "!=", "n", "for", "n", "in", "self", ".", "exclusions", ")", ")", ":", "abs_path", ",", "rel_path", "=", "self", ".", "get_abs_and_rel_paths", "(", "root", ",", "fpath", ",", "self", ".", "bundle", ".", "input_dir", ")", "file_cls", "=", "self", ".", "bundle", ".", "get_file_cls", "(", ")", "result_files", ".", "append", "(", "file_cls", "(", "rel_path", ",", "abs_path", ")", ")", "return", "result_files" ]
2f6458cb9d9d9049b4fd829f7d6951a45d547c68
valid
replicate_existing
Replicate an existing database to another existing database.
relax/couchdb/replicate.py
def replicate_existing(source_db, target_db): """Replicate an existing database to another existing database.""" # Get the server from which to manage the replication. server = shortcuts.get_server() logger = logging.getLogger('relax.couchdb.replicate') logger.debug('POST ' + urlparse.urljoin(server.resource.uri, '/_replicate')) source, target = specifier_to_db(source_db), specifier_to_db(target_db) logger.debug('Source DB: %s' % (source,)) logger.debug('Target DB: %s' % (target,)) try: resp_headers, resp_body = server.resource.post(path='/_replicate', content=json.dumps({'source': source, 'target': target})) except couchdb.client.ServerError, exc: logger.error('Replication failed.') raise ReplicationError(exc.args) result = resp_body['history'][0] if resp_body['ok']: logger.info('Replication %s... successful!' % ( resp_body['session_id'][:6],)) logger.info('Replication started: ' + result['start_time']) logger.info('Replication finished: ' + result['end_time']) result['start_time'] = datetime.datetime.strptime(result['start_time'], '%a, %d %b %Y %H:%M:%S GMT') result['end_time'] = datetime.datetime.strptime(result['end_time'], '%a, %d %b %Y %H:%M:%S GMT') timedelta = result['end_time'] - result['start_time'] if timedelta.days: logger.info('Replication took %d days and %.2f seconds.' % ( timedelta.days, timedelta.seconds + (timedelta.microseconds * (1e-6)))) else: logger.info('Replication took %.2f seconds.' % ( timedelta.seconds + (timedelta.microseconds * (1e-6)))) # Prepare the 'result' dictionary. result['ok'] = resp_body['ok'] result['session_id'] = resp_body['session_id'] result['source_last_seq'] = resp_body['source_last_seq'] # Info-log the number of docs read/written and checked/found. if result['docs_read'] == 1: docs_read = '1 document read' else: docs_read = '%d documents read' % (result['docs_read'],) if result['docs_written'] == 1: docs_written = '1 document written' else: docs_written = '%d documents written' % (result['docs_written'],) if result['missing_checked'] == 1: missing_checked = 'Checked for 1 missing document, found %d.' % ( result['missing_found'],) else: missing_checked = 'Checked for %d missing documents, found %d.' % ( result['missing_checked'], result['missing_found'],) logging.info('%s, %s' % (docs_read, docs_written)) logging.info(missing_checked) return result else: logger.error('Replication %s... failed.' % ( resp_body['session_id'][:6],)) result['ok'] = resp_body['ok'] result['session_id'] = resp_body['session_id'] result['source_last_seq'] = resp_body['source_last_seq'] raise ReplicationFailure(resp_headers, result)
def replicate_existing(source_db, target_db): """Replicate an existing database to another existing database.""" # Get the server from which to manage the replication. server = shortcuts.get_server() logger = logging.getLogger('relax.couchdb.replicate') logger.debug('POST ' + urlparse.urljoin(server.resource.uri, '/_replicate')) source, target = specifier_to_db(source_db), specifier_to_db(target_db) logger.debug('Source DB: %s' % (source,)) logger.debug('Target DB: %s' % (target,)) try: resp_headers, resp_body = server.resource.post(path='/_replicate', content=json.dumps({'source': source, 'target': target})) except couchdb.client.ServerError, exc: logger.error('Replication failed.') raise ReplicationError(exc.args) result = resp_body['history'][0] if resp_body['ok']: logger.info('Replication %s... successful!' % ( resp_body['session_id'][:6],)) logger.info('Replication started: ' + result['start_time']) logger.info('Replication finished: ' + result['end_time']) result['start_time'] = datetime.datetime.strptime(result['start_time'], '%a, %d %b %Y %H:%M:%S GMT') result['end_time'] = datetime.datetime.strptime(result['end_time'], '%a, %d %b %Y %H:%M:%S GMT') timedelta = result['end_time'] - result['start_time'] if timedelta.days: logger.info('Replication took %d days and %.2f seconds.' % ( timedelta.days, timedelta.seconds + (timedelta.microseconds * (1e-6)))) else: logger.info('Replication took %.2f seconds.' % ( timedelta.seconds + (timedelta.microseconds * (1e-6)))) # Prepare the 'result' dictionary. result['ok'] = resp_body['ok'] result['session_id'] = resp_body['session_id'] result['source_last_seq'] = resp_body['source_last_seq'] # Info-log the number of docs read/written and checked/found. if result['docs_read'] == 1: docs_read = '1 document read' else: docs_read = '%d documents read' % (result['docs_read'],) if result['docs_written'] == 1: docs_written = '1 document written' else: docs_written = '%d documents written' % (result['docs_written'],) if result['missing_checked'] == 1: missing_checked = 'Checked for 1 missing document, found %d.' % ( result['missing_found'],) else: missing_checked = 'Checked for %d missing documents, found %d.' % ( result['missing_checked'], result['missing_found'],) logging.info('%s, %s' % (docs_read, docs_written)) logging.info(missing_checked) return result else: logger.error('Replication %s... failed.' % ( resp_body['session_id'][:6],)) result['ok'] = resp_body['ok'] result['session_id'] = resp_body['session_id'] result['source_last_seq'] = resp_body['source_last_seq'] raise ReplicationFailure(resp_headers, result)
[ "Replicate", "an", "existing", "database", "to", "another", "existing", "database", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/couchdb/replicate.py#L31-L92
[ "def", "replicate_existing", "(", "source_db", ",", "target_db", ")", ":", "# Get the server from which to manage the replication.", "server", "=", "shortcuts", ".", "get_server", "(", ")", "logger", "=", "logging", ".", "getLogger", "(", "'relax.couchdb.replicate'", ")", "logger", ".", "debug", "(", "'POST '", "+", "urlparse", ".", "urljoin", "(", "server", ".", "resource", ".", "uri", ",", "'/_replicate'", ")", ")", "source", ",", "target", "=", "specifier_to_db", "(", "source_db", ")", ",", "specifier_to_db", "(", "target_db", ")", "logger", ".", "debug", "(", "'Source DB: %s'", "%", "(", "source", ",", ")", ")", "logger", ".", "debug", "(", "'Target DB: %s'", "%", "(", "target", ",", ")", ")", "try", ":", "resp_headers", ",", "resp_body", "=", "server", ".", "resource", ".", "post", "(", "path", "=", "'/_replicate'", ",", "content", "=", "json", ".", "dumps", "(", "{", "'source'", ":", "source", ",", "'target'", ":", "target", "}", ")", ")", "except", "couchdb", ".", "client", ".", "ServerError", ",", "exc", ":", "logger", ".", "error", "(", "'Replication failed.'", ")", "raise", "ReplicationError", "(", "exc", ".", "args", ")", "result", "=", "resp_body", "[", "'history'", "]", "[", "0", "]", "if", "resp_body", "[", "'ok'", "]", ":", "logger", ".", "info", "(", "'Replication %s... successful!'", "%", "(", "resp_body", "[", "'session_id'", "]", "[", ":", "6", "]", ",", ")", ")", "logger", ".", "info", "(", "'Replication started: '", "+", "result", "[", "'start_time'", "]", ")", "logger", ".", "info", "(", "'Replication finished: '", "+", "result", "[", "'end_time'", "]", ")", "result", "[", "'start_time'", "]", "=", "datetime", ".", "datetime", ".", "strptime", "(", "result", "[", "'start_time'", "]", ",", "'%a, %d %b %Y %H:%M:%S GMT'", ")", "result", "[", "'end_time'", "]", "=", "datetime", ".", "datetime", ".", "strptime", "(", "result", "[", "'end_time'", "]", ",", "'%a, %d %b %Y %H:%M:%S GMT'", ")", "timedelta", "=", "result", "[", "'end_time'", "]", "-", "result", "[", "'start_time'", "]", "if", "timedelta", ".", "days", ":", "logger", ".", "info", "(", "'Replication took %d days and %.2f seconds.'", "%", "(", "timedelta", ".", "days", ",", "timedelta", ".", "seconds", "+", "(", "timedelta", ".", "microseconds", "*", "(", "1e-6", ")", ")", ")", ")", "else", ":", "logger", ".", "info", "(", "'Replication took %.2f seconds.'", "%", "(", "timedelta", ".", "seconds", "+", "(", "timedelta", ".", "microseconds", "*", "(", "1e-6", ")", ")", ")", ")", "# Prepare the 'result' dictionary.", "result", "[", "'ok'", "]", "=", "resp_body", "[", "'ok'", "]", "result", "[", "'session_id'", "]", "=", "resp_body", "[", "'session_id'", "]", "result", "[", "'source_last_seq'", "]", "=", "resp_body", "[", "'source_last_seq'", "]", "# Info-log the number of docs read/written and checked/found.", "if", "result", "[", "'docs_read'", "]", "==", "1", ":", "docs_read", "=", "'1 document read'", "else", ":", "docs_read", "=", "'%d documents read'", "%", "(", "result", "[", "'docs_read'", "]", ",", ")", "if", "result", "[", "'docs_written'", "]", "==", "1", ":", "docs_written", "=", "'1 document written'", "else", ":", "docs_written", "=", "'%d documents written'", "%", "(", "result", "[", "'docs_written'", "]", ",", ")", "if", "result", "[", "'missing_checked'", "]", "==", "1", ":", "missing_checked", "=", "'Checked for 1 missing document, found %d.'", "%", "(", "result", "[", "'missing_found'", "]", ",", ")", "else", ":", "missing_checked", "=", "'Checked for %d missing documents, found %d.'", "%", "(", "result", "[", "'missing_checked'", "]", ",", "result", "[", "'missing_found'", "]", ",", ")", "logging", ".", "info", "(", "'%s, %s'", "%", "(", "docs_read", ",", "docs_written", ")", ")", "logging", ".", "info", "(", "missing_checked", ")", "return", "result", "else", ":", "logger", ".", "error", "(", "'Replication %s... failed.'", "%", "(", "resp_body", "[", "'session_id'", "]", "[", ":", "6", "]", ",", ")", ")", "result", "[", "'ok'", "]", "=", "resp_body", "[", "'ok'", "]", "result", "[", "'session_id'", "]", "=", "resp_body", "[", "'session_id'", "]", "result", "[", "'source_last_seq'", "]", "=", "resp_body", "[", "'source_last_seq'", "]", "raise", "ReplicationFailure", "(", "resp_headers", ",", "result", ")" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
mcmc_advance
Generic Metropolis MCMC. Advances the chain by nsteps. Called by :func:`mcmc` :param adapt: enables adaptive stepwidth alteration (converges).
jbopt/mcmc.py
def mcmc_advance(start, stdevs, logp, nsteps = 1e300, adapt=True, callback=None): """ Generic Metropolis MCMC. Advances the chain by nsteps. Called by :func:`mcmc` :param adapt: enables adaptive stepwidth alteration (converges). """ import scipy from numpy import log import progressbar prob = logp(start) chain = [start] accepts = [True] probs = [prob] assert not numpy.isnan(start).any() assert not numpy.isnan(stdevs).any() i = 0 widgets=['AR', progressbar.Percentage(), progressbar.Counter('%5d'), progressbar.Bar(), progressbar.ETA()] pbar = progressbar.ProgressBar(widgets=widgets, maxval=nsteps).start() prev = start prev_prob = prob print 'MCMC: start at prob', prob stepchange = 0.1 while len(chain) < nsteps: i = i + 1 next = scipy.random.normal(prev, stdevs) next[next > 1] = 1 next[next < 0] = 0 next_prob = logp(next) assert not numpy.isnan(next).any() assert not numpy.isnan(next_prob).any() delta = next_prob - prev_prob dice = log(scipy.random.uniform(0, 1)) accept = delta > dice if accept: prev = next prev_prob = next_prob if adapt: stdevs *= (1 + stepchange) else: if adapt: stdevs *= (1 + stepchange)**(-0.4) # aiming for 40% acceptance if callback: callback(prev_prob, prev, accept) chain.append(prev) accepts.append(accept) probs.append(prev_prob) if adapt: stepchange = min(0.1, 10. / i) #print 'STDEV', stdevs[:5], stepchange # compute stats widgets[0] = 'AR: %.03f' % numpy.mean(numpy.array(accepts[len(accepts)/3:])+0) pbar.update(pbar.currval + 1) pbar.finish() return chain, probs, accepts, stdevs
def mcmc_advance(start, stdevs, logp, nsteps = 1e300, adapt=True, callback=None): """ Generic Metropolis MCMC. Advances the chain by nsteps. Called by :func:`mcmc` :param adapt: enables adaptive stepwidth alteration (converges). """ import scipy from numpy import log import progressbar prob = logp(start) chain = [start] accepts = [True] probs = [prob] assert not numpy.isnan(start).any() assert not numpy.isnan(stdevs).any() i = 0 widgets=['AR', progressbar.Percentage(), progressbar.Counter('%5d'), progressbar.Bar(), progressbar.ETA()] pbar = progressbar.ProgressBar(widgets=widgets, maxval=nsteps).start() prev = start prev_prob = prob print 'MCMC: start at prob', prob stepchange = 0.1 while len(chain) < nsteps: i = i + 1 next = scipy.random.normal(prev, stdevs) next[next > 1] = 1 next[next < 0] = 0 next_prob = logp(next) assert not numpy.isnan(next).any() assert not numpy.isnan(next_prob).any() delta = next_prob - prev_prob dice = log(scipy.random.uniform(0, 1)) accept = delta > dice if accept: prev = next prev_prob = next_prob if adapt: stdevs *= (1 + stepchange) else: if adapt: stdevs *= (1 + stepchange)**(-0.4) # aiming for 40% acceptance if callback: callback(prev_prob, prev, accept) chain.append(prev) accepts.append(accept) probs.append(prev_prob) if adapt: stepchange = min(0.1, 10. / i) #print 'STDEV', stdevs[:5], stepchange # compute stats widgets[0] = 'AR: %.03f' % numpy.mean(numpy.array(accepts[len(accepts)/3:])+0) pbar.update(pbar.currval + 1) pbar.finish() return chain, probs, accepts, stdevs
[ "Generic", "Metropolis", "MCMC", ".", "Advances", "the", "chain", "by", "nsteps", ".", "Called", "by", ":", "func", ":", "mcmc", ":", "param", "adapt", ":", "enables", "adaptive", "stepwidth", "alteration", "(", "converges", ")", "." ]
JohannesBuchner/jbopt
python
https://github.com/JohannesBuchner/jbopt/blob/11b721ea001625ad7820f71ff684723c71216646/jbopt/mcmc.py#L6-L63
[ "def", "mcmc_advance", "(", "start", ",", "stdevs", ",", "logp", ",", "nsteps", "=", "1e300", ",", "adapt", "=", "True", ",", "callback", "=", "None", ")", ":", "import", "scipy", "from", "numpy", "import", "log", "import", "progressbar", "prob", "=", "logp", "(", "start", ")", "chain", "=", "[", "start", "]", "accepts", "=", "[", "True", "]", "probs", "=", "[", "prob", "]", "assert", "not", "numpy", ".", "isnan", "(", "start", ")", ".", "any", "(", ")", "assert", "not", "numpy", ".", "isnan", "(", "stdevs", ")", ".", "any", "(", ")", "i", "=", "0", "widgets", "=", "[", "'AR'", ",", "progressbar", ".", "Percentage", "(", ")", ",", "progressbar", ".", "Counter", "(", "'%5d'", ")", ",", "progressbar", ".", "Bar", "(", ")", ",", "progressbar", ".", "ETA", "(", ")", "]", "pbar", "=", "progressbar", ".", "ProgressBar", "(", "widgets", "=", "widgets", ",", "maxval", "=", "nsteps", ")", ".", "start", "(", ")", "prev", "=", "start", "prev_prob", "=", "prob", "print", "'MCMC: start at prob'", ",", "prob", "stepchange", "=", "0.1", "while", "len", "(", "chain", ")", "<", "nsteps", ":", "i", "=", "i", "+", "1", "next", "=", "scipy", ".", "random", ".", "normal", "(", "prev", ",", "stdevs", ")", "next", "[", "next", ">", "1", "]", "=", "1", "next", "[", "next", "<", "0", "]", "=", "0", "next_prob", "=", "logp", "(", "next", ")", "assert", "not", "numpy", ".", "isnan", "(", "next", ")", ".", "any", "(", ")", "assert", "not", "numpy", ".", "isnan", "(", "next_prob", ")", ".", "any", "(", ")", "delta", "=", "next_prob", "-", "prev_prob", "dice", "=", "log", "(", "scipy", ".", "random", ".", "uniform", "(", "0", ",", "1", ")", ")", "accept", "=", "delta", ">", "dice", "if", "accept", ":", "prev", "=", "next", "prev_prob", "=", "next_prob", "if", "adapt", ":", "stdevs", "*=", "(", "1", "+", "stepchange", ")", "else", ":", "if", "adapt", ":", "stdevs", "*=", "(", "1", "+", "stepchange", ")", "**", "(", "-", "0.4", ")", "# aiming for 40% acceptance", "if", "callback", ":", "callback", "(", "prev_prob", ",", "prev", ",", "accept", ")", "chain", ".", "append", "(", "prev", ")", "accepts", ".", "append", "(", "accept", ")", "probs", ".", "append", "(", "prev_prob", ")", "if", "adapt", ":", "stepchange", "=", "min", "(", "0.1", ",", "10.", "/", "i", ")", "#print 'STDEV', stdevs[:5], stepchange", "# compute stats", "widgets", "[", "0", "]", "=", "'AR: %.03f'", "%", "numpy", ".", "mean", "(", "numpy", ".", "array", "(", "accepts", "[", "len", "(", "accepts", ")", "/", "3", ":", "]", ")", "+", "0", ")", "pbar", ".", "update", "(", "pbar", ".", "currval", "+", "1", ")", "pbar", ".", "finish", "(", ")", "return", "chain", ",", "probs", ",", "accepts", ",", "stdevs" ]
11b721ea001625ad7820f71ff684723c71216646
valid
mcmc
**Metropolis Hastings MCMC** with automatic step width adaption. Burnin period is also used to guess steps. :param nburn: number of burnin steps :param stdevs: step widths to start with
jbopt/mcmc.py
def mcmc(transform, loglikelihood, parameter_names, nsteps=40000, nburn=400, stdevs=0.1, start = 0.5, **problem): """ **Metropolis Hastings MCMC** with automatic step width adaption. Burnin period is also used to guess steps. :param nburn: number of burnin steps :param stdevs: step widths to start with """ if 'seed' in problem: numpy.random.seed(problem['seed']) n_params = len(parameter_names) def like(cube): cube = numpy.array(cube) if (cube <= 1e-10).any() or (cube >= 1-1e-10).any(): return -1e100 params = transform(cube) return loglikelihood(params) start = start + numpy.zeros(n_params) stdevs = stdevs + numpy.zeros(n_params) def compute_stepwidths(chain): return numpy.std(chain, axis=0) / 3 import matplotlib.pyplot as plt plt.figure(figsize=(7, 7)) steps = numpy.array([0.1]*(n_params)) print 'burn-in (1/2)...' chain, prob, _, steps_ = mcmc_advance(start, steps, like, nsteps=nburn / 2, adapt=True) steps = compute_stepwidths(chain) print 'burn-in (2/2)...' chain, prob, _, steps_ = mcmc_advance(chain[-1], steps, like, nsteps=nburn / 2, adapt=True) steps = compute_stepwidths(chain) print 'recording chain ...' chain, prob, _, steps_ = mcmc_advance(chain[-1], steps, like, nsteps=nsteps) chain = numpy.array(chain) i = numpy.argmax(prob) final = chain[-1] print 'postprocessing...' chain = numpy.array([transform(params) for params in chain]) return dict(start=chain[-1], maximum=chain[i], seeds=[final, chain[i]], chain=chain, method='Metropolis MCMC')
def mcmc(transform, loglikelihood, parameter_names, nsteps=40000, nburn=400, stdevs=0.1, start = 0.5, **problem): """ **Metropolis Hastings MCMC** with automatic step width adaption. Burnin period is also used to guess steps. :param nburn: number of burnin steps :param stdevs: step widths to start with """ if 'seed' in problem: numpy.random.seed(problem['seed']) n_params = len(parameter_names) def like(cube): cube = numpy.array(cube) if (cube <= 1e-10).any() or (cube >= 1-1e-10).any(): return -1e100 params = transform(cube) return loglikelihood(params) start = start + numpy.zeros(n_params) stdevs = stdevs + numpy.zeros(n_params) def compute_stepwidths(chain): return numpy.std(chain, axis=0) / 3 import matplotlib.pyplot as plt plt.figure(figsize=(7, 7)) steps = numpy.array([0.1]*(n_params)) print 'burn-in (1/2)...' chain, prob, _, steps_ = mcmc_advance(start, steps, like, nsteps=nburn / 2, adapt=True) steps = compute_stepwidths(chain) print 'burn-in (2/2)...' chain, prob, _, steps_ = mcmc_advance(chain[-1], steps, like, nsteps=nburn / 2, adapt=True) steps = compute_stepwidths(chain) print 'recording chain ...' chain, prob, _, steps_ = mcmc_advance(chain[-1], steps, like, nsteps=nsteps) chain = numpy.array(chain) i = numpy.argmax(prob) final = chain[-1] print 'postprocessing...' chain = numpy.array([transform(params) for params in chain]) return dict(start=chain[-1], maximum=chain[i], seeds=[final, chain[i]], chain=chain, method='Metropolis MCMC')
[ "**", "Metropolis", "Hastings", "MCMC", "**" ]
JohannesBuchner/jbopt
python
https://github.com/JohannesBuchner/jbopt/blob/11b721ea001625ad7820f71ff684723c71216646/jbopt/mcmc.py#L65-L112
[ "def", "mcmc", "(", "transform", ",", "loglikelihood", ",", "parameter_names", ",", "nsteps", "=", "40000", ",", "nburn", "=", "400", ",", "stdevs", "=", "0.1", ",", "start", "=", "0.5", ",", "*", "*", "problem", ")", ":", "if", "'seed'", "in", "problem", ":", "numpy", ".", "random", ".", "seed", "(", "problem", "[", "'seed'", "]", ")", "n_params", "=", "len", "(", "parameter_names", ")", "def", "like", "(", "cube", ")", ":", "cube", "=", "numpy", ".", "array", "(", "cube", ")", "if", "(", "cube", "<=", "1e-10", ")", ".", "any", "(", ")", "or", "(", "cube", ">=", "1", "-", "1e-10", ")", ".", "any", "(", ")", ":", "return", "-", "1e100", "params", "=", "transform", "(", "cube", ")", "return", "loglikelihood", "(", "params", ")", "start", "=", "start", "+", "numpy", ".", "zeros", "(", "n_params", ")", "stdevs", "=", "stdevs", "+", "numpy", ".", "zeros", "(", "n_params", ")", "def", "compute_stepwidths", "(", "chain", ")", ":", "return", "numpy", ".", "std", "(", "chain", ",", "axis", "=", "0", ")", "/", "3", "import", "matplotlib", ".", "pyplot", "as", "plt", "plt", ".", "figure", "(", "figsize", "=", "(", "7", ",", "7", ")", ")", "steps", "=", "numpy", ".", "array", "(", "[", "0.1", "]", "*", "(", "n_params", ")", ")", "print", "'burn-in (1/2)...'", "chain", ",", "prob", ",", "_", ",", "steps_", "=", "mcmc_advance", "(", "start", ",", "steps", ",", "like", ",", "nsteps", "=", "nburn", "/", "2", ",", "adapt", "=", "True", ")", "steps", "=", "compute_stepwidths", "(", "chain", ")", "print", "'burn-in (2/2)...'", "chain", ",", "prob", ",", "_", ",", "steps_", "=", "mcmc_advance", "(", "chain", "[", "-", "1", "]", ",", "steps", ",", "like", ",", "nsteps", "=", "nburn", "/", "2", ",", "adapt", "=", "True", ")", "steps", "=", "compute_stepwidths", "(", "chain", ")", "print", "'recording chain ...'", "chain", ",", "prob", ",", "_", ",", "steps_", "=", "mcmc_advance", "(", "chain", "[", "-", "1", "]", ",", "steps", ",", "like", ",", "nsteps", "=", "nsteps", ")", "chain", "=", "numpy", ".", "array", "(", "chain", ")", "i", "=", "numpy", ".", "argmax", "(", "prob", ")", "final", "=", "chain", "[", "-", "1", "]", "print", "'postprocessing...'", "chain", "=", "numpy", ".", "array", "(", "[", "transform", "(", "params", ")", "for", "params", "in", "chain", "]", ")", "return", "dict", "(", "start", "=", "chain", "[", "-", "1", "]", ",", "maximum", "=", "chain", "[", "i", "]", ",", "seeds", "=", "[", "final", ",", "chain", "[", "i", "]", "]", ",", "chain", "=", "chain", ",", "method", "=", "'Metropolis MCMC'", ")" ]
11b721ea001625ad7820f71ff684723c71216646
valid
ensemble
**Ensemble MCMC** via `emcee <http://dan.iel.fm/emcee/>`_
jbopt/mcmc.py
def ensemble(transform, loglikelihood, parameter_names, nsteps=40000, nburn=400, start=0.5, **problem): """ **Ensemble MCMC** via `emcee <http://dan.iel.fm/emcee/>`_ """ import emcee import progressbar if 'seed' in problem: numpy.random.seed(problem['seed']) n_params = len(parameter_names) nwalkers = 50 + n_params * 2 if nwalkers > 200: nwalkers = 200 p0 = [numpy.random.rand(n_params) for i in xrange(nwalkers)] start = start + numpy.zeros(n_params) p0[0] = start def like(cube): cube = numpy.array(cube) if (cube <= 1e-10).any() or (cube >= 1-1e-10).any(): return -1e100 params = transform(cube) return loglikelihood(params) sampler = emcee.EnsembleSampler(nwalkers, n_params, like, live_dangerously=True) print 'burn-in...' pos, prob, state = sampler.run_mcmc(p0, nburn / nwalkers) # Reset the chain to remove the burn-in samples. sampler.reset() print 'running ...' # Starting from the final position in the burn-in chain, sample pbar = progressbar.ProgressBar( widgets=[progressbar.Percentage(), progressbar.Counter('%5d'), progressbar.Bar(), progressbar.ETA()], maxval=nsteps).start() for results in sampler.sample(pos, iterations=nsteps / nwalkers, rstate0=state): pbar.update(pbar.currval + 1) pbar.finish() print "Mean acceptance fraction:", numpy.mean(sampler.acceptance_fraction) chain = sampler.flatchain final = chain[-1] print 'postprocessing...' chain_post = numpy.array([transform(params) for params in chain]) chain_prob = sampler.flatlnprobability return dict(start=final, chain=chain_post, chain_prior=chain, chain_prob=chain_prob, method='Ensemble MCMC')
def ensemble(transform, loglikelihood, parameter_names, nsteps=40000, nburn=400, start=0.5, **problem): """ **Ensemble MCMC** via `emcee <http://dan.iel.fm/emcee/>`_ """ import emcee import progressbar if 'seed' in problem: numpy.random.seed(problem['seed']) n_params = len(parameter_names) nwalkers = 50 + n_params * 2 if nwalkers > 200: nwalkers = 200 p0 = [numpy.random.rand(n_params) for i in xrange(nwalkers)] start = start + numpy.zeros(n_params) p0[0] = start def like(cube): cube = numpy.array(cube) if (cube <= 1e-10).any() or (cube >= 1-1e-10).any(): return -1e100 params = transform(cube) return loglikelihood(params) sampler = emcee.EnsembleSampler(nwalkers, n_params, like, live_dangerously=True) print 'burn-in...' pos, prob, state = sampler.run_mcmc(p0, nburn / nwalkers) # Reset the chain to remove the burn-in samples. sampler.reset() print 'running ...' # Starting from the final position in the burn-in chain, sample pbar = progressbar.ProgressBar( widgets=[progressbar.Percentage(), progressbar.Counter('%5d'), progressbar.Bar(), progressbar.ETA()], maxval=nsteps).start() for results in sampler.sample(pos, iterations=nsteps / nwalkers, rstate0=state): pbar.update(pbar.currval + 1) pbar.finish() print "Mean acceptance fraction:", numpy.mean(sampler.acceptance_fraction) chain = sampler.flatchain final = chain[-1] print 'postprocessing...' chain_post = numpy.array([transform(params) for params in chain]) chain_prob = sampler.flatlnprobability return dict(start=final, chain=chain_post, chain_prior=chain, chain_prob=chain_prob, method='Ensemble MCMC')
[ "**", "Ensemble", "MCMC", "**", "via", "emcee", "<http", ":", "//", "dan", ".", "iel", ".", "fm", "/", "emcee", "/", ">", "_" ]
JohannesBuchner/jbopt
python
https://github.com/JohannesBuchner/jbopt/blob/11b721ea001625ad7820f71ff684723c71216646/jbopt/mcmc.py#L114-L170
[ "def", "ensemble", "(", "transform", ",", "loglikelihood", ",", "parameter_names", ",", "nsteps", "=", "40000", ",", "nburn", "=", "400", ",", "start", "=", "0.5", ",", "*", "*", "problem", ")", ":", "import", "emcee", "import", "progressbar", "if", "'seed'", "in", "problem", ":", "numpy", ".", "random", ".", "seed", "(", "problem", "[", "'seed'", "]", ")", "n_params", "=", "len", "(", "parameter_names", ")", "nwalkers", "=", "50", "+", "n_params", "*", "2", "if", "nwalkers", ">", "200", ":", "nwalkers", "=", "200", "p0", "=", "[", "numpy", ".", "random", ".", "rand", "(", "n_params", ")", "for", "i", "in", "xrange", "(", "nwalkers", ")", "]", "start", "=", "start", "+", "numpy", ".", "zeros", "(", "n_params", ")", "p0", "[", "0", "]", "=", "start", "def", "like", "(", "cube", ")", ":", "cube", "=", "numpy", ".", "array", "(", "cube", ")", "if", "(", "cube", "<=", "1e-10", ")", ".", "any", "(", ")", "or", "(", "cube", ">=", "1", "-", "1e-10", ")", ".", "any", "(", ")", ":", "return", "-", "1e100", "params", "=", "transform", "(", "cube", ")", "return", "loglikelihood", "(", "params", ")", "sampler", "=", "emcee", ".", "EnsembleSampler", "(", "nwalkers", ",", "n_params", ",", "like", ",", "live_dangerously", "=", "True", ")", "print", "'burn-in...'", "pos", ",", "prob", ",", "state", "=", "sampler", ".", "run_mcmc", "(", "p0", ",", "nburn", "/", "nwalkers", ")", "# Reset the chain to remove the burn-in samples.", "sampler", ".", "reset", "(", ")", "print", "'running ...'", "# Starting from the final position in the burn-in chain, sample", "pbar", "=", "progressbar", ".", "ProgressBar", "(", "widgets", "=", "[", "progressbar", ".", "Percentage", "(", ")", ",", "progressbar", ".", "Counter", "(", "'%5d'", ")", ",", "progressbar", ".", "Bar", "(", ")", ",", "progressbar", ".", "ETA", "(", ")", "]", ",", "maxval", "=", "nsteps", ")", ".", "start", "(", ")", "for", "results", "in", "sampler", ".", "sample", "(", "pos", ",", "iterations", "=", "nsteps", "/", "nwalkers", ",", "rstate0", "=", "state", ")", ":", "pbar", ".", "update", "(", "pbar", ".", "currval", "+", "1", ")", "pbar", ".", "finish", "(", ")", "print", "\"Mean acceptance fraction:\"", ",", "numpy", ".", "mean", "(", "sampler", ".", "acceptance_fraction", ")", "chain", "=", "sampler", ".", "flatchain", "final", "=", "chain", "[", "-", "1", "]", "print", "'postprocessing...'", "chain_post", "=", "numpy", ".", "array", "(", "[", "transform", "(", "params", ")", "for", "params", "in", "chain", "]", ")", "chain_prob", "=", "sampler", ".", "flatlnprobability", "return", "dict", "(", "start", "=", "final", ",", "chain", "=", "chain_post", ",", "chain_prior", "=", "chain", ",", "chain_prob", "=", "chain_prob", ",", "method", "=", "'Ensemble MCMC'", ")" ]
11b721ea001625ad7820f71ff684723c71216646
valid
TaskProcessor._get_classes
search monits or works classes. Class must have 'name' attribute :param package_name: 'monits' or 'works' :param base_class: Monit or Work :return: tuple of tuples monit/work-name and class
parkworker/task_processor.py
def _get_classes(package_name, base_class): """ search monits or works classes. Class must have 'name' attribute :param package_name: 'monits' or 'works' :param base_class: Monit or Work :return: tuple of tuples monit/work-name and class """ classes = {} base_dir = os.getcwd() root_module_name = base_dir.split('/')[-1] package_dir = base_dir + '/%s' % package_name if os.path.isdir(package_dir): for module_path in os.listdir(package_dir): if not module_path.endswith('.py'): continue module_name = os.path.splitext(module_path)[0] module_full_name = '%s.%s.%s' % (root_module_name, package_name, module_name) __import__(module_full_name) work_module = sys.modules[module_full_name] for module_item in work_module.__dict__.values(): if type(module_item) is type \ and issubclass(module_item, base_class) \ and module_item is not base_class\ and hasattr(module_item, 'name') and module_item.name: classes.setdefault(module_item.name, []).append(module_item) # check no duplicated names for work_name, work_modules in classes.items(): if len(work_modules) > 1: raise DuplicatedNameException('Modules %s have same name "%s"' % ( ' and '.join(map(str, work_modules)), work_name )) # create immutable list of modules return tuple([(work_name, work_modules[0]) for work_name, work_modules in classes.items()])
def _get_classes(package_name, base_class): """ search monits or works classes. Class must have 'name' attribute :param package_name: 'monits' or 'works' :param base_class: Monit or Work :return: tuple of tuples monit/work-name and class """ classes = {} base_dir = os.getcwd() root_module_name = base_dir.split('/')[-1] package_dir = base_dir + '/%s' % package_name if os.path.isdir(package_dir): for module_path in os.listdir(package_dir): if not module_path.endswith('.py'): continue module_name = os.path.splitext(module_path)[0] module_full_name = '%s.%s.%s' % (root_module_name, package_name, module_name) __import__(module_full_name) work_module = sys.modules[module_full_name] for module_item in work_module.__dict__.values(): if type(module_item) is type \ and issubclass(module_item, base_class) \ and module_item is not base_class\ and hasattr(module_item, 'name') and module_item.name: classes.setdefault(module_item.name, []).append(module_item) # check no duplicated names for work_name, work_modules in classes.items(): if len(work_modules) > 1: raise DuplicatedNameException('Modules %s have same name "%s"' % ( ' and '.join(map(str, work_modules)), work_name )) # create immutable list of modules return tuple([(work_name, work_modules[0]) for work_name, work_modules in classes.items()])
[ "search", "monits", "or", "works", "classes", ".", "Class", "must", "have", "name", "attribute", ":", "param", "package_name", ":", "monits", "or", "works", ":", "param", "base_class", ":", "Monit", "or", "Work", ":", "return", ":", "tuple", "of", "tuples", "monit", "/", "work", "-", "name", "and", "class" ]
telminov/park-worker-base
python
https://github.com/telminov/park-worker-base/blob/35fa90939b68bd47f1c82ba49fc80d419a079964/parkworker/task_processor.py#L90-L127
[ "def", "_get_classes", "(", "package_name", ",", "base_class", ")", ":", "classes", "=", "{", "}", "base_dir", "=", "os", ".", "getcwd", "(", ")", "root_module_name", "=", "base_dir", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", "package_dir", "=", "base_dir", "+", "'/%s'", "%", "package_name", "if", "os", ".", "path", ".", "isdir", "(", "package_dir", ")", ":", "for", "module_path", "in", "os", ".", "listdir", "(", "package_dir", ")", ":", "if", "not", "module_path", ".", "endswith", "(", "'.py'", ")", ":", "continue", "module_name", "=", "os", ".", "path", ".", "splitext", "(", "module_path", ")", "[", "0", "]", "module_full_name", "=", "'%s.%s.%s'", "%", "(", "root_module_name", ",", "package_name", ",", "module_name", ")", "__import__", "(", "module_full_name", ")", "work_module", "=", "sys", ".", "modules", "[", "module_full_name", "]", "for", "module_item", "in", "work_module", ".", "__dict__", ".", "values", "(", ")", ":", "if", "type", "(", "module_item", ")", "is", "type", "and", "issubclass", "(", "module_item", ",", "base_class", ")", "and", "module_item", "is", "not", "base_class", "and", "hasattr", "(", "module_item", ",", "'name'", ")", "and", "module_item", ".", "name", ":", "classes", ".", "setdefault", "(", "module_item", ".", "name", ",", "[", "]", ")", ".", "append", "(", "module_item", ")", "# check no duplicated names", "for", "work_name", ",", "work_modules", "in", "classes", ".", "items", "(", ")", ":", "if", "len", "(", "work_modules", ")", ">", "1", ":", "raise", "DuplicatedNameException", "(", "'Modules %s have same name \"%s\"'", "%", "(", "' and '", ".", "join", "(", "map", "(", "str", ",", "work_modules", ")", ")", ",", "work_name", ")", ")", "# create immutable list of modules", "return", "tuple", "(", "[", "(", "work_name", ",", "work_modules", "[", "0", "]", ")", "for", "work_name", ",", "work_modules", "in", "classes", ".", "items", "(", ")", "]", ")" ]
35fa90939b68bd47f1c82ba49fc80d419a079964
valid
DescriptorPool.AddEnumDescriptor
Adds an EnumDescriptor to the pool. This method also registers the FileDescriptor associated with the message. Args: enum_desc: An EnumDescriptor.
typy/google/protobuf/descriptor_pool.py
def AddEnumDescriptor(self, enum_desc): """Adds an EnumDescriptor to the pool. This method also registers the FileDescriptor associated with the message. Args: enum_desc: An EnumDescriptor. """ if not isinstance(enum_desc, descriptor.EnumDescriptor): raise TypeError('Expected instance of descriptor.EnumDescriptor.') self._enum_descriptors[enum_desc.full_name] = enum_desc self.AddFileDescriptor(enum_desc.file)
def AddEnumDescriptor(self, enum_desc): """Adds an EnumDescriptor to the pool. This method also registers the FileDescriptor associated with the message. Args: enum_desc: An EnumDescriptor. """ if not isinstance(enum_desc, descriptor.EnumDescriptor): raise TypeError('Expected instance of descriptor.EnumDescriptor.') self._enum_descriptors[enum_desc.full_name] = enum_desc self.AddFileDescriptor(enum_desc.file)
[ "Adds", "an", "EnumDescriptor", "to", "the", "pool", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L150-L163
[ "def", "AddEnumDescriptor", "(", "self", ",", "enum_desc", ")", ":", "if", "not", "isinstance", "(", "enum_desc", ",", "descriptor", ".", "EnumDescriptor", ")", ":", "raise", "TypeError", "(", "'Expected instance of descriptor.EnumDescriptor.'", ")", "self", ".", "_enum_descriptors", "[", "enum_desc", ".", "full_name", "]", "=", "enum_desc", "self", ".", "AddFileDescriptor", "(", "enum_desc", ".", "file", ")" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
DescriptorPool.FindFileContainingSymbol
Gets the FileDescriptor for the file containing the specified symbol. Args: symbol: The name of the symbol to search for. Returns: A FileDescriptor that contains the specified symbol. Raises: KeyError: if the file can not be found in the pool.
typy/google/protobuf/descriptor_pool.py
def FindFileContainingSymbol(self, symbol): """Gets the FileDescriptor for the file containing the specified symbol. Args: symbol: The name of the symbol to search for. Returns: A FileDescriptor that contains the specified symbol. Raises: KeyError: if the file can not be found in the pool. """ symbol = _NormalizeFullyQualifiedName(symbol) try: return self._descriptors[symbol].file except KeyError: pass try: return self._enum_descriptors[symbol].file except KeyError: pass try: file_proto = self._internal_db.FindFileContainingSymbol(symbol) except KeyError as error: if self._descriptor_db: file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) else: raise error if not file_proto: raise KeyError('Cannot find a file containing %s' % symbol) return self._ConvertFileProtoToFileDescriptor(file_proto)
def FindFileContainingSymbol(self, symbol): """Gets the FileDescriptor for the file containing the specified symbol. Args: symbol: The name of the symbol to search for. Returns: A FileDescriptor that contains the specified symbol. Raises: KeyError: if the file can not be found in the pool. """ symbol = _NormalizeFullyQualifiedName(symbol) try: return self._descriptors[symbol].file except KeyError: pass try: return self._enum_descriptors[symbol].file except KeyError: pass try: file_proto = self._internal_db.FindFileContainingSymbol(symbol) except KeyError as error: if self._descriptor_db: file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) else: raise error if not file_proto: raise KeyError('Cannot find a file containing %s' % symbol) return self._ConvertFileProtoToFileDescriptor(file_proto)
[ "Gets", "the", "FileDescriptor", "for", "the", "file", "containing", "the", "specified", "symbol", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L208-L241
[ "def", "FindFileContainingSymbol", "(", "self", ",", "symbol", ")", ":", "symbol", "=", "_NormalizeFullyQualifiedName", "(", "symbol", ")", "try", ":", "return", "self", ".", "_descriptors", "[", "symbol", "]", ".", "file", "except", "KeyError", ":", "pass", "try", ":", "return", "self", ".", "_enum_descriptors", "[", "symbol", "]", ".", "file", "except", "KeyError", ":", "pass", "try", ":", "file_proto", "=", "self", ".", "_internal_db", ".", "FindFileContainingSymbol", "(", "symbol", ")", "except", "KeyError", "as", "error", ":", "if", "self", ".", "_descriptor_db", ":", "file_proto", "=", "self", ".", "_descriptor_db", ".", "FindFileContainingSymbol", "(", "symbol", ")", "else", ":", "raise", "error", "if", "not", "file_proto", ":", "raise", "KeyError", "(", "'Cannot find a file containing %s'", "%", "symbol", ")", "return", "self", ".", "_ConvertFileProtoToFileDescriptor", "(", "file_proto", ")" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
DescriptorPool.FindMessageTypeByName
Loads the named descriptor from the pool. Args: full_name: The full name of the descriptor to load. Returns: The descriptor for the named type.
typy/google/protobuf/descriptor_pool.py
def FindMessageTypeByName(self, full_name): """Loads the named descriptor from the pool. Args: full_name: The full name of the descriptor to load. Returns: The descriptor for the named type. """ full_name = _NormalizeFullyQualifiedName(full_name) if full_name not in self._descriptors: self.FindFileContainingSymbol(full_name) return self._descriptors[full_name]
def FindMessageTypeByName(self, full_name): """Loads the named descriptor from the pool. Args: full_name: The full name of the descriptor to load. Returns: The descriptor for the named type. """ full_name = _NormalizeFullyQualifiedName(full_name) if full_name not in self._descriptors: self.FindFileContainingSymbol(full_name) return self._descriptors[full_name]
[ "Loads", "the", "named", "descriptor", "from", "the", "pool", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L243-L256
[ "def", "FindMessageTypeByName", "(", "self", ",", "full_name", ")", ":", "full_name", "=", "_NormalizeFullyQualifiedName", "(", "full_name", ")", "if", "full_name", "not", "in", "self", ".", "_descriptors", ":", "self", ".", "FindFileContainingSymbol", "(", "full_name", ")", "return", "self", ".", "_descriptors", "[", "full_name", "]" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
DescriptorPool.FindEnumTypeByName
Loads the named enum descriptor from the pool. Args: full_name: The full name of the enum descriptor to load. Returns: The enum descriptor for the named type.
typy/google/protobuf/descriptor_pool.py
def FindEnumTypeByName(self, full_name): """Loads the named enum descriptor from the pool. Args: full_name: The full name of the enum descriptor to load. Returns: The enum descriptor for the named type. """ full_name = _NormalizeFullyQualifiedName(full_name) if full_name not in self._enum_descriptors: self.FindFileContainingSymbol(full_name) return self._enum_descriptors[full_name]
def FindEnumTypeByName(self, full_name): """Loads the named enum descriptor from the pool. Args: full_name: The full name of the enum descriptor to load. Returns: The enum descriptor for the named type. """ full_name = _NormalizeFullyQualifiedName(full_name) if full_name not in self._enum_descriptors: self.FindFileContainingSymbol(full_name) return self._enum_descriptors[full_name]
[ "Loads", "the", "named", "enum", "descriptor", "from", "the", "pool", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L258-L271
[ "def", "FindEnumTypeByName", "(", "self", ",", "full_name", ")", ":", "full_name", "=", "_NormalizeFullyQualifiedName", "(", "full_name", ")", "if", "full_name", "not", "in", "self", ".", "_enum_descriptors", ":", "self", ".", "FindFileContainingSymbol", "(", "full_name", ")", "return", "self", ".", "_enum_descriptors", "[", "full_name", "]" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
DescriptorPool.FindExtensionByName
Loads the named extension descriptor from the pool. Args: full_name: The full name of the extension descriptor to load. Returns: A FieldDescriptor, describing the named extension.
typy/google/protobuf/descriptor_pool.py
def FindExtensionByName(self, full_name): """Loads the named extension descriptor from the pool. Args: full_name: The full name of the extension descriptor to load. Returns: A FieldDescriptor, describing the named extension. """ full_name = _NormalizeFullyQualifiedName(full_name) message_name, _, extension_name = full_name.rpartition('.') try: # Most extensions are nested inside a message. scope = self.FindMessageTypeByName(message_name) except KeyError: # Some extensions are defined at file scope. scope = self.FindFileContainingSymbol(full_name) return scope.extensions_by_name[extension_name]
def FindExtensionByName(self, full_name): """Loads the named extension descriptor from the pool. Args: full_name: The full name of the extension descriptor to load. Returns: A FieldDescriptor, describing the named extension. """ full_name = _NormalizeFullyQualifiedName(full_name) message_name, _, extension_name = full_name.rpartition('.') try: # Most extensions are nested inside a message. scope = self.FindMessageTypeByName(message_name) except KeyError: # Some extensions are defined at file scope. scope = self.FindFileContainingSymbol(full_name) return scope.extensions_by_name[extension_name]
[ "Loads", "the", "named", "extension", "descriptor", "from", "the", "pool", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L287-L304
[ "def", "FindExtensionByName", "(", "self", ",", "full_name", ")", ":", "full_name", "=", "_NormalizeFullyQualifiedName", "(", "full_name", ")", "message_name", ",", "_", ",", "extension_name", "=", "full_name", ".", "rpartition", "(", "'.'", ")", "try", ":", "# Most extensions are nested inside a message.", "scope", "=", "self", ".", "FindMessageTypeByName", "(", "message_name", ")", "except", "KeyError", ":", "# Some extensions are defined at file scope.", "scope", "=", "self", ".", "FindFileContainingSymbol", "(", "full_name", ")", "return", "scope", ".", "extensions_by_name", "[", "extension_name", "]" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
DescriptorPool._ConvertEnumDescriptor
Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. Args: enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. package: Optional package name for the new message EnumDescriptor. file_desc: The file containing the enum descriptor. containing_type: The type containing this enum. scope: Scope containing available types. Returns: The added descriptor
typy/google/protobuf/descriptor_pool.py
def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, containing_type=None, scope=None): """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. Args: enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. package: Optional package name for the new message EnumDescriptor. file_desc: The file containing the enum descriptor. containing_type: The type containing this enum. scope: Scope containing available types. Returns: The added descriptor """ if package: enum_name = '.'.join((package, enum_proto.name)) else: enum_name = enum_proto.name if file_desc is None: file_name = None else: file_name = file_desc.name values = [self._MakeEnumValueDescriptor(value, index) for index, value in enumerate(enum_proto.value)] desc = descriptor.EnumDescriptor(name=enum_proto.name, full_name=enum_name, filename=file_name, file=file_desc, values=values, containing_type=containing_type, options=enum_proto.options) scope['.%s' % enum_name] = desc self._enum_descriptors[enum_name] = desc return desc
def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, containing_type=None, scope=None): """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. Args: enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. package: Optional package name for the new message EnumDescriptor. file_desc: The file containing the enum descriptor. containing_type: The type containing this enum. scope: Scope containing available types. Returns: The added descriptor """ if package: enum_name = '.'.join((package, enum_proto.name)) else: enum_name = enum_proto.name if file_desc is None: file_name = None else: file_name = file_desc.name values = [self._MakeEnumValueDescriptor(value, index) for index, value in enumerate(enum_proto.value)] desc = descriptor.EnumDescriptor(name=enum_proto.name, full_name=enum_name, filename=file_name, file=file_desc, values=values, containing_type=containing_type, options=enum_proto.options) scope['.%s' % enum_name] = desc self._enum_descriptors[enum_name] = desc return desc
[ "Make", "a", "protobuf", "EnumDescriptor", "given", "an", "EnumDescriptorProto", "protobuf", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L482-L518
[ "def", "_ConvertEnumDescriptor", "(", "self", ",", "enum_proto", ",", "package", "=", "None", ",", "file_desc", "=", "None", ",", "containing_type", "=", "None", ",", "scope", "=", "None", ")", ":", "if", "package", ":", "enum_name", "=", "'.'", ".", "join", "(", "(", "package", ",", "enum_proto", ".", "name", ")", ")", "else", ":", "enum_name", "=", "enum_proto", ".", "name", "if", "file_desc", "is", "None", ":", "file_name", "=", "None", "else", ":", "file_name", "=", "file_desc", ".", "name", "values", "=", "[", "self", ".", "_MakeEnumValueDescriptor", "(", "value", ",", "index", ")", "for", "index", ",", "value", "in", "enumerate", "(", "enum_proto", ".", "value", ")", "]", "desc", "=", "descriptor", ".", "EnumDescriptor", "(", "name", "=", "enum_proto", ".", "name", ",", "full_name", "=", "enum_name", ",", "filename", "=", "file_name", ",", "file", "=", "file_desc", ",", "values", "=", "values", ",", "containing_type", "=", "containing_type", ",", "options", "=", "enum_proto", ".", "options", ")", "scope", "[", "'.%s'", "%", "enum_name", "]", "=", "desc", "self", ".", "_enum_descriptors", "[", "enum_name", "]", "=", "desc", "return", "desc" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
DescriptorPool._MakeFieldDescriptor
Creates a field descriptor from a FieldDescriptorProto. For message and enum type fields, this method will do a look up in the pool for the appropriate descriptor for that type. If it is unavailable, it will fall back to the _source function to create it. If this type is still unavailable, construction will fail. Args: field_proto: The proto describing the field. message_name: The name of the containing message. index: Index of the field is_extension: Indication that this field is for an extension. Returns: An initialized FieldDescriptor object
typy/google/protobuf/descriptor_pool.py
def _MakeFieldDescriptor(self, field_proto, message_name, index, is_extension=False): """Creates a field descriptor from a FieldDescriptorProto. For message and enum type fields, this method will do a look up in the pool for the appropriate descriptor for that type. If it is unavailable, it will fall back to the _source function to create it. If this type is still unavailable, construction will fail. Args: field_proto: The proto describing the field. message_name: The name of the containing message. index: Index of the field is_extension: Indication that this field is for an extension. Returns: An initialized FieldDescriptor object """ if message_name: full_name = '.'.join((message_name, field_proto.name)) else: full_name = field_proto.name return descriptor.FieldDescriptor( name=field_proto.name, full_name=full_name, index=index, number=field_proto.number, type=field_proto.type, cpp_type=None, message_type=None, enum_type=None, containing_type=None, label=field_proto.label, has_default_value=False, default_value=None, is_extension=is_extension, extension_scope=None, options=field_proto.options)
def _MakeFieldDescriptor(self, field_proto, message_name, index, is_extension=False): """Creates a field descriptor from a FieldDescriptorProto. For message and enum type fields, this method will do a look up in the pool for the appropriate descriptor for that type. If it is unavailable, it will fall back to the _source function to create it. If this type is still unavailable, construction will fail. Args: field_proto: The proto describing the field. message_name: The name of the containing message. index: Index of the field is_extension: Indication that this field is for an extension. Returns: An initialized FieldDescriptor object """ if message_name: full_name = '.'.join((message_name, field_proto.name)) else: full_name = field_proto.name return descriptor.FieldDescriptor( name=field_proto.name, full_name=full_name, index=index, number=field_proto.number, type=field_proto.type, cpp_type=None, message_type=None, enum_type=None, containing_type=None, label=field_proto.label, has_default_value=False, default_value=None, is_extension=is_extension, extension_scope=None, options=field_proto.options)
[ "Creates", "a", "field", "descriptor", "from", "a", "FieldDescriptorProto", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor_pool.py#L520-L560
[ "def", "_MakeFieldDescriptor", "(", "self", ",", "field_proto", ",", "message_name", ",", "index", ",", "is_extension", "=", "False", ")", ":", "if", "message_name", ":", "full_name", "=", "'.'", ".", "join", "(", "(", "message_name", ",", "field_proto", ".", "name", ")", ")", "else", ":", "full_name", "=", "field_proto", ".", "name", "return", "descriptor", ".", "FieldDescriptor", "(", "name", "=", "field_proto", ".", "name", ",", "full_name", "=", "full_name", ",", "index", "=", "index", ",", "number", "=", "field_proto", ".", "number", ",", "type", "=", "field_proto", ".", "type", ",", "cpp_type", "=", "None", ",", "message_type", "=", "None", ",", "enum_type", "=", "None", ",", "containing_type", "=", "None", ",", "label", "=", "field_proto", ".", "label", ",", "has_default_value", "=", "False", ",", "default_value", "=", "None", ",", "is_extension", "=", "is_extension", ",", "extension_scope", "=", "None", ",", "options", "=", "field_proto", ".", "options", ")" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
validate
theoretically, any data store can be implemented to work with this package, which means basic data validation must be done in-package, so that weird stuff can't be stored in the data store. this function raises an exception if an invalid table name is passed, not all of the required fields are in the data kwargs, or if a field that was passed is not expected. it also returns the key field name, for ensuring uniqueness (again, that may not be built into whatever data store is impelemented.)
proauth2/data_stores/validate.py
def validate( table, **data ): ''' theoretically, any data store can be implemented to work with this package, which means basic data validation must be done in-package, so that weird stuff can't be stored in the data store. this function raises an exception if an invalid table name is passed, not all of the required fields are in the data kwargs, or if a field that was passed is not expected. it also returns the key field name, for ensuring uniqueness (again, that may not be built into whatever data store is impelemented.) ''' if table not in good.keys(): raise Proauth2Error( 'invalid_request', 'invalid name: %s' % table ) for req in good[table]['required']: if not data.get( req, None ): raise Proauth2Error( 'invalid_request', 'missing required field: %s' % req ) for key in data.keys(): if key not in good[table]['required'] and \ key not in good[table]['optional']: raise Proauth2Error( 'invalid_request', 'invalid field: %s' % key ) return good[table]['key']
def validate( table, **data ): ''' theoretically, any data store can be implemented to work with this package, which means basic data validation must be done in-package, so that weird stuff can't be stored in the data store. this function raises an exception if an invalid table name is passed, not all of the required fields are in the data kwargs, or if a field that was passed is not expected. it also returns the key field name, for ensuring uniqueness (again, that may not be built into whatever data store is impelemented.) ''' if table not in good.keys(): raise Proauth2Error( 'invalid_request', 'invalid name: %s' % table ) for req in good[table]['required']: if not data.get( req, None ): raise Proauth2Error( 'invalid_request', 'missing required field: %s' % req ) for key in data.keys(): if key not in good[table]['required'] and \ key not in good[table]['optional']: raise Proauth2Error( 'invalid_request', 'invalid field: %s' % key ) return good[table]['key']
[ "theoretically", "any", "data", "store", "can", "be", "implemented", "to", "work", "with", "this", "package", "which", "means", "basic", "data", "validation", "must", "be", "done", "in", "-", "package", "so", "that", "weird", "stuff", "can", "t", "be", "stored", "in", "the", "data", "store", ".", "this", "function", "raises", "an", "exception", "if", "an", "invalid", "table", "name", "is", "passed", "not", "all", "of", "the", "required", "fields", "are", "in", "the", "data", "kwargs", "or", "if", "a", "field", "that", "was", "passed", "is", "not", "expected", ".", "it", "also", "returns", "the", "key", "field", "name", "for", "ensuring", "uniqueness", "(", "again", "that", "may", "not", "be", "built", "into", "whatever", "data", "store", "is", "impelemented", ".", ")" ]
charlesthomas/proauth2
python
https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/data_stores/validate.py#L22-L43
[ "def", "validate", "(", "table", ",", "*", "*", "data", ")", ":", "if", "table", "not", "in", "good", ".", "keys", "(", ")", ":", "raise", "Proauth2Error", "(", "'invalid_request'", ",", "'invalid name: %s'", "%", "table", ")", "for", "req", "in", "good", "[", "table", "]", "[", "'required'", "]", ":", "if", "not", "data", ".", "get", "(", "req", ",", "None", ")", ":", "raise", "Proauth2Error", "(", "'invalid_request'", ",", "'missing required field: %s'", "%", "req", ")", "for", "key", "in", "data", ".", "keys", "(", ")", ":", "if", "key", "not", "in", "good", "[", "table", "]", "[", "'required'", "]", "and", "key", "not", "in", "good", "[", "table", "]", "[", "'optional'", "]", ":", "raise", "Proauth2Error", "(", "'invalid_request'", ",", "'invalid field: %s'", "%", "key", ")", "return", "good", "[", "table", "]", "[", "'key'", "]" ]
f88c8df966a1802414047ed304d02df1dd520097
valid
ExperimentLogger.record_results
Record the results of this experiment, by updating the tag. :param results: A dictionary containing the results of the experiment. :type results: dict
experimenter/experimentlogger.py
def record_results(self, results): """ Record the results of this experiment, by updating the tag. :param results: A dictionary containing the results of the experiment. :type results: dict """ repository = Repo(self.__repository_directory, search_parent_directories=True) for tag in repository.tags: if tag.name == self.__tag_name: tag_object = tag break else: raise Exception("Experiment tag has been deleted since experiment started") data = json.loads(tag_object.tag.message) data["results"] = results TagReference.create(repository, self.__tag_name, message=json.dumps(data), ref=tag_object.tag.object, force=True) self.__results_recorded = True
def record_results(self, results): """ Record the results of this experiment, by updating the tag. :param results: A dictionary containing the results of the experiment. :type results: dict """ repository = Repo(self.__repository_directory, search_parent_directories=True) for tag in repository.tags: if tag.name == self.__tag_name: tag_object = tag break else: raise Exception("Experiment tag has been deleted since experiment started") data = json.loads(tag_object.tag.message) data["results"] = results TagReference.create(repository, self.__tag_name, message=json.dumps(data), ref=tag_object.tag.object, force=True) self.__results_recorded = True
[ "Record", "the", "results", "of", "this", "experiment", "by", "updating", "the", "tag", ".", ":", "param", "results", ":", "A", "dictionary", "containing", "the", "results", "of", "the", "experiment", ".", ":", "type", "results", ":", "dict" ]
mallamanis/experimenter
python
https://github.com/mallamanis/experimenter/blob/2ed5ce85084cc47251ccba3aae0cb3431fbe4259/experimenter/experimentlogger.py#L46-L63
[ "def", "record_results", "(", "self", ",", "results", ")", ":", "repository", "=", "Repo", "(", "self", ".", "__repository_directory", ",", "search_parent_directories", "=", "True", ")", "for", "tag", "in", "repository", ".", "tags", ":", "if", "tag", ".", "name", "==", "self", ".", "__tag_name", ":", "tag_object", "=", "tag", "break", "else", ":", "raise", "Exception", "(", "\"Experiment tag has been deleted since experiment started\"", ")", "data", "=", "json", ".", "loads", "(", "tag_object", ".", "tag", ".", "message", ")", "data", "[", "\"results\"", "]", "=", "results", "TagReference", ".", "create", "(", "repository", ",", "self", ".", "__tag_name", ",", "message", "=", "json", ".", "dumps", "(", "data", ")", ",", "ref", "=", "tag_object", ".", "tag", ".", "object", ",", "force", "=", "True", ")", "self", ".", "__results_recorded", "=", "True" ]
2ed5ce85084cc47251ccba3aae0cb3431fbe4259
valid
ExperimentLogger.__tag_repo
Tag the current repository. :param data: a dictionary containing the data about the experiment :type data: dict
experimenter/experimentlogger.py
def __tag_repo(self, data, repository): """ Tag the current repository. :param data: a dictionary containing the data about the experiment :type data: dict """ assert self.__tag_name not in [t.name for t in repository.tags] return TagReference.create(repository, self.__tag_name, message=json.dumps(data))
def __tag_repo(self, data, repository): """ Tag the current repository. :param data: a dictionary containing the data about the experiment :type data: dict """ assert self.__tag_name not in [t.name for t in repository.tags] return TagReference.create(repository, self.__tag_name, message=json.dumps(data))
[ "Tag", "the", "current", "repository", ".", ":", "param", "data", ":", "a", "dictionary", "containing", "the", "data", "about", "the", "experiment", ":", "type", "data", ":", "dict" ]
mallamanis/experimenter
python
https://github.com/mallamanis/experimenter/blob/2ed5ce85084cc47251ccba3aae0cb3431fbe4259/experimenter/experimentlogger.py#L80-L87
[ "def", "__tag_repo", "(", "self", ",", "data", ",", "repository", ")", ":", "assert", "self", ".", "__tag_name", "not", "in", "[", "t", ".", "name", "for", "t", "in", "repository", ".", "tags", "]", "return", "TagReference", ".", "create", "(", "repository", ",", "self", ".", "__tag_name", ",", "message", "=", "json", ".", "dumps", "(", "data", ")", ")" ]
2ed5ce85084cc47251ccba3aae0cb3431fbe4259
valid
ExperimentLogger.__get_files_to_be_added
:return: the files that have been modified and can be added
experimenter/experimentlogger.py
def __get_files_to_be_added(self, repository): """ :return: the files that have been modified and can be added """ for root, dirs, files in os.walk(repository.working_dir): for f in files: relative_path = os.path.join(root, f)[len(repository.working_dir) + 1:] try: repository.head.commit.tree[relative_path] # will fail if not tracked yield relative_path except: pass
def __get_files_to_be_added(self, repository): """ :return: the files that have been modified and can be added """ for root, dirs, files in os.walk(repository.working_dir): for f in files: relative_path = os.path.join(root, f)[len(repository.working_dir) + 1:] try: repository.head.commit.tree[relative_path] # will fail if not tracked yield relative_path except: pass
[ ":", "return", ":", "the", "files", "that", "have", "been", "modified", "and", "can", "be", "added" ]
mallamanis/experimenter
python
https://github.com/mallamanis/experimenter/blob/2ed5ce85084cc47251ccba3aae0cb3431fbe4259/experimenter/experimentlogger.py#L89-L100
[ "def", "__get_files_to_be_added", "(", "self", ",", "repository", ")", ":", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "repository", ".", "working_dir", ")", ":", "for", "f", "in", "files", ":", "relative_path", "=", "os", ".", "path", ".", "join", "(", "root", ",", "f", ")", "[", "len", "(", "repository", ".", "working_dir", ")", "+", "1", ":", "]", "try", ":", "repository", ".", "head", ".", "commit", ".", "tree", "[", "relative_path", "]", "# will fail if not tracked", "yield", "relative_path", "except", ":", "pass" ]
2ed5ce85084cc47251ccba3aae0cb3431fbe4259
valid
ExperimentLogger.__start_experiment
Start an experiment by capturing the state of the code :param parameters: a dictionary containing the parameters of the experiment :type parameters: dict :return: the tag representing this experiment :rtype: TagReference
experimenter/experimentlogger.py
def __start_experiment(self, parameters): """ Start an experiment by capturing the state of the code :param parameters: a dictionary containing the parameters of the experiment :type parameters: dict :return: the tag representing this experiment :rtype: TagReference """ repository = Repo(self.__repository_directory, search_parent_directories=True) if len(repository.untracked_files) > 0: logging.warning("Untracked files will not be recorded: %s", repository.untracked_files) current_commit = repository.head.commit started_state_is_dirty = repository.is_dirty() if started_state_is_dirty: repository.index.add([p for p in self.__get_files_to_be_added(repository)]) commit_obj = repository.index.commit("Temporary commit for experiment " + self.__experiment_name) sha = commit_obj.hexsha else: sha = repository.head.object.hexsha data = {"parameters": parameters, "started": time.time(), "description": self.__description, "commit_sha": sha} tag_object = self.__tag_repo(data, repository) if started_state_is_dirty: repository.head.reset(current_commit, working_tree=False, index=True) return tag_object
def __start_experiment(self, parameters): """ Start an experiment by capturing the state of the code :param parameters: a dictionary containing the parameters of the experiment :type parameters: dict :return: the tag representing this experiment :rtype: TagReference """ repository = Repo(self.__repository_directory, search_parent_directories=True) if len(repository.untracked_files) > 0: logging.warning("Untracked files will not be recorded: %s", repository.untracked_files) current_commit = repository.head.commit started_state_is_dirty = repository.is_dirty() if started_state_is_dirty: repository.index.add([p for p in self.__get_files_to_be_added(repository)]) commit_obj = repository.index.commit("Temporary commit for experiment " + self.__experiment_name) sha = commit_obj.hexsha else: sha = repository.head.object.hexsha data = {"parameters": parameters, "started": time.time(), "description": self.__description, "commit_sha": sha} tag_object = self.__tag_repo(data, repository) if started_state_is_dirty: repository.head.reset(current_commit, working_tree=False, index=True) return tag_object
[ "Start", "an", "experiment", "by", "capturing", "the", "state", "of", "the", "code", ":", "param", "parameters", ":", "a", "dictionary", "containing", "the", "parameters", "of", "the", "experiment", ":", "type", "parameters", ":", "dict", ":", "return", ":", "the", "tag", "representing", "this", "experiment", ":", "rtype", ":", "TagReference" ]
mallamanis/experimenter
python
https://github.com/mallamanis/experimenter/blob/2ed5ce85084cc47251ccba3aae0cb3431fbe4259/experimenter/experimentlogger.py#L102-L130
[ "def", "__start_experiment", "(", "self", ",", "parameters", ")", ":", "repository", "=", "Repo", "(", "self", ".", "__repository_directory", ",", "search_parent_directories", "=", "True", ")", "if", "len", "(", "repository", ".", "untracked_files", ")", ">", "0", ":", "logging", ".", "warning", "(", "\"Untracked files will not be recorded: %s\"", ",", "repository", ".", "untracked_files", ")", "current_commit", "=", "repository", ".", "head", ".", "commit", "started_state_is_dirty", "=", "repository", ".", "is_dirty", "(", ")", "if", "started_state_is_dirty", ":", "repository", ".", "index", ".", "add", "(", "[", "p", "for", "p", "in", "self", ".", "__get_files_to_be_added", "(", "repository", ")", "]", ")", "commit_obj", "=", "repository", ".", "index", ".", "commit", "(", "\"Temporary commit for experiment \"", "+", "self", ".", "__experiment_name", ")", "sha", "=", "commit_obj", ".", "hexsha", "else", ":", "sha", "=", "repository", ".", "head", ".", "object", ".", "hexsha", "data", "=", "{", "\"parameters\"", ":", "parameters", ",", "\"started\"", ":", "time", ".", "time", "(", ")", ",", "\"description\"", ":", "self", ".", "__description", ",", "\"commit_sha\"", ":", "sha", "}", "tag_object", "=", "self", ".", "__tag_repo", "(", "data", ",", "repository", ")", "if", "started_state_is_dirty", ":", "repository", ".", "head", ".", "reset", "(", "current_commit", ",", "working_tree", "=", "False", ",", "index", "=", "True", ")", "return", "tag_object" ]
2ed5ce85084cc47251ccba3aae0cb3431fbe4259
valid
get_tm_session
Get a ``sqlalchemy.orm.Session`` instance backed by a transaction. This function will hook the session to the transaction manager which will take care of committing any changes. - When using pyramid_tm it will automatically be committed or aborted depending on whether an exception is raised. - When using scripts you should wrap the session in a manager yourself. For example:: import transaction engine = get_engine(settings) session_factory = get_session_factory(engine) with transaction.manager: dbsession = get_tm_session(session_factory, transaction.manager)
baka_model/model/meta/base.py
def get_tm_session(session_factory, transaction_manager): """ Get a ``sqlalchemy.orm.Session`` instance backed by a transaction. This function will hook the session to the transaction manager which will take care of committing any changes. - When using pyramid_tm it will automatically be committed or aborted depending on whether an exception is raised. - When using scripts you should wrap the session in a manager yourself. For example:: import transaction engine = get_engine(settings) session_factory = get_session_factory(engine) with transaction.manager: dbsession = get_tm_session(session_factory, transaction.manager) """ dbsession = session_factory() zope.sqlalchemy.register( dbsession, transaction_manager=transaction_manager) return dbsession
def get_tm_session(session_factory, transaction_manager): """ Get a ``sqlalchemy.orm.Session`` instance backed by a transaction. This function will hook the session to the transaction manager which will take care of committing any changes. - When using pyramid_tm it will automatically be committed or aborted depending on whether an exception is raised. - When using scripts you should wrap the session in a manager yourself. For example:: import transaction engine = get_engine(settings) session_factory = get_session_factory(engine) with transaction.manager: dbsession = get_tm_session(session_factory, transaction.manager) """ dbsession = session_factory() zope.sqlalchemy.register( dbsession, transaction_manager=transaction_manager) return dbsession
[ "Get", "a", "sqlalchemy", ".", "orm", ".", "Session", "instance", "backed", "by", "a", "transaction", "." ]
suryakencana007/baka_model
python
https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/model/meta/base.py#L71-L95
[ "def", "get_tm_session", "(", "session_factory", ",", "transaction_manager", ")", ":", "dbsession", "=", "session_factory", "(", ")", "zope", ".", "sqlalchemy", ".", "register", "(", "dbsession", ",", "transaction_manager", "=", "transaction_manager", ")", "return", "dbsession" ]
915c2da9920e973302f5764ae63799acd5ecf0b7
valid
MainThread.run
run your main spider here, and get a list/tuple of url as result then make the instance of branch thread :return: None
threads_creator/threads/main_thread.py
def run(self): """run your main spider here, and get a list/tuple of url as result then make the instance of branch thread :return: None """ global existed_urls_list config = config_creator() debug = config.debug main_thread_sleep = config.main_thread_sleep branch_thread_num = config.branch_thread_num while 1: url = self.main_queue.get() if debug: print('main thread-{} start'.format(url)) main_spider = self.main_spider(url) sleep(random.randrange(*main_thread_sleep)) links = main_spider.request_urls() try: assert type(links) in VALIDATE_URLS except AssertionError: error_message('except to return a list or tuple which contains url') links = list() branch_queue = queue.Queue(branch_thread_num) for i in range(branch_thread_num): branch_thread = BranchThread(branch_queue=branch_queue, branch_spider=self.branch_spider) branch_thread.daemon = True branch_thread.start() for link in links: if link not in existed_urls_list: existed_urls_list.append(link) branch_queue.put(link) branch_queue.join() if debug: print('main thread-{}\'s child threads is all finish'.format(url)) self.main_queue.task_done()
def run(self): """run your main spider here, and get a list/tuple of url as result then make the instance of branch thread :return: None """ global existed_urls_list config = config_creator() debug = config.debug main_thread_sleep = config.main_thread_sleep branch_thread_num = config.branch_thread_num while 1: url = self.main_queue.get() if debug: print('main thread-{} start'.format(url)) main_spider = self.main_spider(url) sleep(random.randrange(*main_thread_sleep)) links = main_spider.request_urls() try: assert type(links) in VALIDATE_URLS except AssertionError: error_message('except to return a list or tuple which contains url') links = list() branch_queue = queue.Queue(branch_thread_num) for i in range(branch_thread_num): branch_thread = BranchThread(branch_queue=branch_queue, branch_spider=self.branch_spider) branch_thread.daemon = True branch_thread.start() for link in links: if link not in existed_urls_list: existed_urls_list.append(link) branch_queue.put(link) branch_queue.join() if debug: print('main thread-{}\'s child threads is all finish'.format(url)) self.main_queue.task_done()
[ "run", "your", "main", "spider", "here", "and", "get", "a", "list", "/", "tuple", "of", "url", "as", "result", "then", "make", "the", "instance", "of", "branch", "thread" ]
ecmadao/threads-creator
python
https://github.com/ecmadao/threads-creator/blob/f081091425d4382e5e9776c395c20e1af2332657/threads_creator/threads/main_thread.py#L27-L68
[ "def", "run", "(", "self", ")", ":", "global", "existed_urls_list", "config", "=", "config_creator", "(", ")", "debug", "=", "config", ".", "debug", "main_thread_sleep", "=", "config", ".", "main_thread_sleep", "branch_thread_num", "=", "config", ".", "branch_thread_num", "while", "1", ":", "url", "=", "self", ".", "main_queue", ".", "get", "(", ")", "if", "debug", ":", "print", "(", "'main thread-{} start'", ".", "format", "(", "url", ")", ")", "main_spider", "=", "self", ".", "main_spider", "(", "url", ")", "sleep", "(", "random", ".", "randrange", "(", "*", "main_thread_sleep", ")", ")", "links", "=", "main_spider", ".", "request_urls", "(", ")", "try", ":", "assert", "type", "(", "links", ")", "in", "VALIDATE_URLS", "except", "AssertionError", ":", "error_message", "(", "'except to return a list or tuple which contains url'", ")", "links", "=", "list", "(", ")", "branch_queue", "=", "queue", ".", "Queue", "(", "branch_thread_num", ")", "for", "i", "in", "range", "(", "branch_thread_num", ")", ":", "branch_thread", "=", "BranchThread", "(", "branch_queue", "=", "branch_queue", ",", "branch_spider", "=", "self", ".", "branch_spider", ")", "branch_thread", ".", "daemon", "=", "True", "branch_thread", ".", "start", "(", ")", "for", "link", "in", "links", ":", "if", "link", "not", "in", "existed_urls_list", ":", "existed_urls_list", ".", "append", "(", "link", ")", "branch_queue", ".", "put", "(", "link", ")", "branch_queue", ".", "join", "(", ")", "if", "debug", ":", "print", "(", "'main thread-{}\\'s child threads is all finish'", ".", "format", "(", "url", ")", ")", "self", ".", "main_queue", ".", "task_done", "(", ")" ]
f081091425d4382e5e9776c395c20e1af2332657
valid
pypi_render
Copied (and slightly adapted) from pypi.description_tools
tools/check_render.py
def pypi_render(source): """ Copied (and slightly adapted) from pypi.description_tools """ ALLOWED_SCHEMES = '''file ftp gopher hdl http https imap mailto mms news nntp prospero rsync rtsp rtspu sftp shttp sip sips snews svn svn+ssh telnet wais irc'''.split() settings_overrides = { "raw_enabled": 0, # no raw HTML code "file_insertion_enabled": 0, # no file/URL access "halt_level": 2, # at warnings or errors, raise an exception "report_level": 5, # never report problems with the reST code } # capture publishing errors, they go to stderr old_stderr = sys.stderr sys.stderr = s = StringIO.StringIO() parts = None try: # Convert reStructuredText to HTML using Docutils. document = publish_doctree(source=source, settings_overrides=settings_overrides) for node in document.traverse(): if node.tagname == '#text': continue if node.hasattr('refuri'): uri = node['refuri'] elif node.hasattr('uri'): uri = node['uri'] else: continue o = urlparse.urlparse(uri) if o.scheme not in ALLOWED_SCHEMES: raise TransformError('link scheme not allowed') # now turn the transformed document into HTML reader = readers.doctree.Reader(parser_name='null') pub = Publisher(reader, source=io.DocTreeInput(document), destination_class=io.StringOutput) pub.set_writer('html') pub.process_programmatic_settings(None, settings_overrides, None) pub.set_destination(None, None) pub.publish() parts = pub.writer.parts except: pass sys.stderr = old_stderr # original text if publishing errors occur if parts is None or len(s.getvalue()) > 0: return None else: return parts['body']
def pypi_render(source): """ Copied (and slightly adapted) from pypi.description_tools """ ALLOWED_SCHEMES = '''file ftp gopher hdl http https imap mailto mms news nntp prospero rsync rtsp rtspu sftp shttp sip sips snews svn svn+ssh telnet wais irc'''.split() settings_overrides = { "raw_enabled": 0, # no raw HTML code "file_insertion_enabled": 0, # no file/URL access "halt_level": 2, # at warnings or errors, raise an exception "report_level": 5, # never report problems with the reST code } # capture publishing errors, they go to stderr old_stderr = sys.stderr sys.stderr = s = StringIO.StringIO() parts = None try: # Convert reStructuredText to HTML using Docutils. document = publish_doctree(source=source, settings_overrides=settings_overrides) for node in document.traverse(): if node.tagname == '#text': continue if node.hasattr('refuri'): uri = node['refuri'] elif node.hasattr('uri'): uri = node['uri'] else: continue o = urlparse.urlparse(uri) if o.scheme not in ALLOWED_SCHEMES: raise TransformError('link scheme not allowed') # now turn the transformed document into HTML reader = readers.doctree.Reader(parser_name='null') pub = Publisher(reader, source=io.DocTreeInput(document), destination_class=io.StringOutput) pub.set_writer('html') pub.process_programmatic_settings(None, settings_overrides, None) pub.set_destination(None, None) pub.publish() parts = pub.writer.parts except: pass sys.stderr = old_stderr # original text if publishing errors occur if parts is None or len(s.getvalue()) > 0: return None else: return parts['body']
[ "Copied", "(", "and", "slightly", "adapted", ")", "from", "pypi", ".", "description_tools" ]
dstufft/recliner
python
https://github.com/dstufft/recliner/blob/64248e46805cbe75491933e9f75e5eb9961cf04e/tools/check_render.py#L19-L76
[ "def", "pypi_render", "(", "source", ")", ":", "ALLOWED_SCHEMES", "=", "'''file ftp gopher hdl http https imap mailto mms news\n nntp prospero rsync rtsp rtspu sftp shttp sip sips snews svn svn+ssh\n telnet wais irc'''", ".", "split", "(", ")", "settings_overrides", "=", "{", "\"raw_enabled\"", ":", "0", ",", "# no raw HTML code", "\"file_insertion_enabled\"", ":", "0", ",", "# no file/URL access", "\"halt_level\"", ":", "2", ",", "# at warnings or errors, raise an exception", "\"report_level\"", ":", "5", ",", "# never report problems with the reST code", "}", "# capture publishing errors, they go to stderr", "old_stderr", "=", "sys", ".", "stderr", "sys", ".", "stderr", "=", "s", "=", "StringIO", ".", "StringIO", "(", ")", "parts", "=", "None", "try", ":", "# Convert reStructuredText to HTML using Docutils.", "document", "=", "publish_doctree", "(", "source", "=", "source", ",", "settings_overrides", "=", "settings_overrides", ")", "for", "node", "in", "document", ".", "traverse", "(", ")", ":", "if", "node", ".", "tagname", "==", "'#text'", ":", "continue", "if", "node", ".", "hasattr", "(", "'refuri'", ")", ":", "uri", "=", "node", "[", "'refuri'", "]", "elif", "node", ".", "hasattr", "(", "'uri'", ")", ":", "uri", "=", "node", "[", "'uri'", "]", "else", ":", "continue", "o", "=", "urlparse", ".", "urlparse", "(", "uri", ")", "if", "o", ".", "scheme", "not", "in", "ALLOWED_SCHEMES", ":", "raise", "TransformError", "(", "'link scheme not allowed'", ")", "# now turn the transformed document into HTML", "reader", "=", "readers", ".", "doctree", ".", "Reader", "(", "parser_name", "=", "'null'", ")", "pub", "=", "Publisher", "(", "reader", ",", "source", "=", "io", ".", "DocTreeInput", "(", "document", ")", ",", "destination_class", "=", "io", ".", "StringOutput", ")", "pub", ".", "set_writer", "(", "'html'", ")", "pub", ".", "process_programmatic_settings", "(", "None", ",", "settings_overrides", ",", "None", ")", "pub", ".", "set_destination", "(", "None", ",", "None", ")", "pub", ".", "publish", "(", ")", "parts", "=", "pub", ".", "writer", ".", "parts", "except", ":", "pass", "sys", ".", "stderr", "=", "old_stderr", "# original text if publishing errors occur", "if", "parts", "is", "None", "or", "len", "(", "s", ".", "getvalue", "(", ")", ")", ">", "0", ":", "return", "None", "else", ":", "return", "parts", "[", "'body'", "]" ]
64248e46805cbe75491933e9f75e5eb9961cf04e
valid
generate
Generate a random string of the specified length. The returned string is composed of an alphabet that shouldn't include any characters that are easily mistakeable for one another (I, 1, O, 0), and hopefully won't accidentally contain any English-language curse words.
baka_model/model/pubid.py
def generate(length=DEFAULT_LENGTH): """ Generate a random string of the specified length. The returned string is composed of an alphabet that shouldn't include any characters that are easily mistakeable for one another (I, 1, O, 0), and hopefully won't accidentally contain any English-language curse words. """ return ''.join(random.SystemRandom().choice(ALPHABET) for _ in range(length))
def generate(length=DEFAULT_LENGTH): """ Generate a random string of the specified length. The returned string is composed of an alphabet that shouldn't include any characters that are easily mistakeable for one another (I, 1, O, 0), and hopefully won't accidentally contain any English-language curse words. """ return ''.join(random.SystemRandom().choice(ALPHABET) for _ in range(length))
[ "Generate", "a", "random", "string", "of", "the", "specified", "length", "." ]
suryakencana007/baka_model
python
https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/model/pubid.py#L36-L45
[ "def", "generate", "(", "length", "=", "DEFAULT_LENGTH", ")", ":", "return", "''", ".", "join", "(", "random", ".", "SystemRandom", "(", ")", ".", "choice", "(", "ALPHABET", ")", "for", "_", "in", "range", "(", "length", ")", ")" ]
915c2da9920e973302f5764ae63799acd5ecf0b7
valid
require
Require that the named `field` has the right `data_type`
librato_bg/client.py
def require(name, field, data_type): """Require that the named `field` has the right `data_type`""" if not isinstance(field, data_type): msg = '{0} must have {1}, got: {2}'.format(name, data_type, field) raise AssertionError(msg)
def require(name, field, data_type): """Require that the named `field` has the right `data_type`""" if not isinstance(field, data_type): msg = '{0} must have {1}, got: {2}'.format(name, data_type, field) raise AssertionError(msg)
[ "Require", "that", "the", "named", "field", "has", "the", "right", "data_type" ]
nyaruka/python-librato-bg
python
https://github.com/nyaruka/python-librato-bg/blob/e541092838694de31d256becea8391a9cfe086c7/librato_bg/client.py#L63-L67
[ "def", "require", "(", "name", ",", "field", ",", "data_type", ")", ":", "if", "not", "isinstance", "(", "field", ",", "data_type", ")", ":", "msg", "=", "'{0} must have {1}, got: {2}'", ".", "format", "(", "name", ",", "data_type", ",", "field", ")", "raise", "AssertionError", "(", "msg", ")" ]
e541092838694de31d256becea8391a9cfe086c7
valid
Client._enqueue
Push a new `msg` onto the queue, return `(success, msg)`
librato_bg/client.py
def _enqueue(self, msg): """Push a new `msg` onto the queue, return `(success, msg)`""" self.log.debug('queueing: %s', msg) if self.queue.full(): self.log.warn('librato_bg queue is full') return False, msg self.queue.put(msg) self.log.debug('enqueued %s.', msg) return True, msg
def _enqueue(self, msg): """Push a new `msg` onto the queue, return `(success, msg)`""" self.log.debug('queueing: %s', msg) if self.queue.full(): self.log.warn('librato_bg queue is full') return False, msg self.queue.put(msg) self.log.debug('enqueued %s.', msg) return True, msg
[ "Push", "a", "new", "msg", "onto", "the", "queue", "return", "(", "success", "msg", ")" ]
nyaruka/python-librato-bg
python
https://github.com/nyaruka/python-librato-bg/blob/e541092838694de31d256becea8391a9cfe086c7/librato_bg/client.py#L38-L48
[ "def", "_enqueue", "(", "self", ",", "msg", ")", ":", "self", ".", "log", ".", "debug", "(", "'queueing: %s'", ",", "msg", ")", "if", "self", ".", "queue", ".", "full", "(", ")", ":", "self", ".", "log", ".", "warn", "(", "'librato_bg queue is full'", ")", "return", "False", ",", "msg", "self", ".", "queue", ".", "put", "(", "msg", ")", "self", ".", "log", ".", "debug", "(", "'enqueued %s.'", ",", "msg", ")", "return", "True", ",", "msg" ]
e541092838694de31d256becea8391a9cfe086c7
valid
Client.flush
Forces a flush from the internal queue to the server
librato_bg/client.py
def flush(self): """Forces a flush from the internal queue to the server""" queue = self.queue size = queue.qsize() queue.join() self.log.debug('successfully flushed %s items.', size)
def flush(self): """Forces a flush from the internal queue to the server""" queue = self.queue size = queue.qsize() queue.join() self.log.debug('successfully flushed %s items.', size)
[ "Forces", "a", "flush", "from", "the", "internal", "queue", "to", "the", "server" ]
nyaruka/python-librato-bg
python
https://github.com/nyaruka/python-librato-bg/blob/e541092838694de31d256becea8391a9cfe086c7/librato_bg/client.py#L50-L55
[ "def", "flush", "(", "self", ")", ":", "queue", "=", "self", ".", "queue", "size", "=", "queue", ".", "qsize", "(", ")", "queue", ".", "join", "(", ")", "self", ".", "log", ".", "debug", "(", "'successfully flushed %s items.'", ",", "size", ")" ]
e541092838694de31d256becea8391a9cfe086c7
valid
open
Use all decompressor possible to make the stream
destream/guesser.py
def open(name=None, fileobj=None, closefd=True): """ Use all decompressor possible to make the stream """ return Guesser().open(name=name, fileobj=fileobj, closefd=closefd)
def open(name=None, fileobj=None, closefd=True): """ Use all decompressor possible to make the stream """ return Guesser().open(name=name, fileobj=fileobj, closefd=closefd)
[ "Use", "all", "decompressor", "possible", "to", "make", "the", "stream" ]
cecton/destream
python
https://github.com/cecton/destream/blob/a9e12b4ac7d41bcd9af54a820c235d77a68a9b8c/destream/guesser.py#L46-L50
[ "def", "open", "(", "name", "=", "None", ",", "fileobj", "=", "None", ",", "closefd", "=", "True", ")", ":", "return", "Guesser", "(", ")", ".", "open", "(", "name", "=", "name", ",", "fileobj", "=", "fileobj", ",", "closefd", "=", "closefd", ")" ]
a9e12b4ac7d41bcd9af54a820c235d77a68a9b8c
valid
marv
Manage a Marv site
marv_cli/__init__.py
def marv(ctx, config, loglevel, logfilter, verbosity): """Manage a Marv site""" if config is None: cwd = os.path.abspath(os.path.curdir) while cwd != os.path.sep: config = os.path.join(cwd, 'marv.conf') if os.path.exists(config): break cwd = os.path.dirname(cwd) else: config = '/etc/marv/marv.conf' if not os.path.exists(config): config = None ctx.obj = config setup_logging(loglevel, verbosity, logfilter)
def marv(ctx, config, loglevel, logfilter, verbosity): """Manage a Marv site""" if config is None: cwd = os.path.abspath(os.path.curdir) while cwd != os.path.sep: config = os.path.join(cwd, 'marv.conf') if os.path.exists(config): break cwd = os.path.dirname(cwd) else: config = '/etc/marv/marv.conf' if not os.path.exists(config): config = None ctx.obj = config setup_logging(loglevel, verbosity, logfilter)
[ "Manage", "a", "Marv", "site" ]
ternaris/marv-cli
python
https://github.com/ternaris/marv-cli/blob/c06abf4f527c22035dd3b602849f6906877c6e68/marv_cli/__init__.py#L110-L124
[ "def", "marv", "(", "ctx", ",", "config", ",", "loglevel", ",", "logfilter", ",", "verbosity", ")", ":", "if", "config", "is", "None", ":", "cwd", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "curdir", ")", "while", "cwd", "!=", "os", ".", "path", ".", "sep", ":", "config", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", "'marv.conf'", ")", "if", "os", ".", "path", ".", "exists", "(", "config", ")", ":", "break", "cwd", "=", "os", ".", "path", ".", "dirname", "(", "cwd", ")", "else", ":", "config", "=", "'/etc/marv/marv.conf'", "if", "not", "os", ".", "path", ".", "exists", "(", "config", ")", ":", "config", "=", "None", "ctx", ".", "obj", "=", "config", "setup_logging", "(", "loglevel", ",", "verbosity", ",", "logfilter", ")" ]
c06abf4f527c22035dd3b602849f6906877c6e68
valid
main
This function returns a Pyramid WSGI application.
cms/__init__.py
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ set_cache_regions_from_settings(settings) config = Configurator(settings=settings) config.include('cms') config.configure_celery(global_config['__file__']) return config.make_wsgi_app()
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ set_cache_regions_from_settings(settings) config = Configurator(settings=settings) config.include('cms') config.configure_celery(global_config['__file__']) return config.make_wsgi_app()
[ "This", "function", "returns", "a", "Pyramid", "WSGI", "application", "." ]
universalcore/unicore-cms
python
https://github.com/universalcore/unicore-cms/blob/f68385fe742eb7efcce0d8f04f42f26ccf05d624/cms/__init__.py#L30-L37
[ "def", "main", "(", "global_config", ",", "*", "*", "settings", ")", ":", "set_cache_regions_from_settings", "(", "settings", ")", "config", "=", "Configurator", "(", "settings", "=", "settings", ")", "config", ".", "include", "(", "'cms'", ")", "config", ".", "configure_celery", "(", "global_config", "[", "'__file__'", "]", ")", "return", "config", ".", "make_wsgi_app", "(", ")" ]
f68385fe742eb7efcce0d8f04f42f26ccf05d624
valid
_SignedVarintDecoder
Like _VarintDecoder() but decodes signed values.
typy/google/protobuf/internal/decoder.py
def _SignedVarintDecoder(mask, result_type): """Like _VarintDecoder() but decodes signed values.""" def DecodeVarint(buffer, pos): result = 0 shift = 0 while 1: b = six.indexbytes(buffer, pos) result |= ((b & 0x7f) << shift) pos += 1 if not (b & 0x80): if result > 0x7fffffffffffffff: result -= (1 << 64) result |= ~mask else: result &= mask result = result_type(result) return (result, pos) shift += 7 if shift >= 64: raise _DecodeError('Too many bytes when decoding varint.') return DecodeVarint
def _SignedVarintDecoder(mask, result_type): """Like _VarintDecoder() but decodes signed values.""" def DecodeVarint(buffer, pos): result = 0 shift = 0 while 1: b = six.indexbytes(buffer, pos) result |= ((b & 0x7f) << shift) pos += 1 if not (b & 0x80): if result > 0x7fffffffffffffff: result -= (1 << 64) result |= ~mask else: result &= mask result = result_type(result) return (result, pos) shift += 7 if shift >= 64: raise _DecodeError('Too many bytes when decoding varint.') return DecodeVarint
[ "Like", "_VarintDecoder", "()", "but", "decodes", "signed", "values", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/internal/decoder.py#L134-L155
[ "def", "_SignedVarintDecoder", "(", "mask", ",", "result_type", ")", ":", "def", "DecodeVarint", "(", "buffer", ",", "pos", ")", ":", "result", "=", "0", "shift", "=", "0", "while", "1", ":", "b", "=", "six", ".", "indexbytes", "(", "buffer", ",", "pos", ")", "result", "|=", "(", "(", "b", "&", "0x7f", ")", "<<", "shift", ")", "pos", "+=", "1", "if", "not", "(", "b", "&", "0x80", ")", ":", "if", "result", ">", "0x7fffffffffffffff", ":", "result", "-=", "(", "1", "<<", "64", ")", "result", "|=", "~", "mask", "else", ":", "result", "&=", "mask", "result", "=", "result_type", "(", "result", ")", "return", "(", "result", ",", "pos", ")", "shift", "+=", "7", "if", "shift", ">=", "64", ":", "raise", "_DecodeError", "(", "'Too many bytes when decoding varint.'", ")", "return", "DecodeVarint" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
MessageSetItemDecoder
Returns a decoder for a MessageSet item. The parameter is the _extensions_by_number map for the message class. The message set message looks like this: message MessageSet { repeated group Item = 1 { required int32 type_id = 2; required string message = 3; } }
typy/google/protobuf/internal/decoder.py
def MessageSetItemDecoder(extensions_by_number): """Returns a decoder for a MessageSet item. The parameter is the _extensions_by_number map for the message class. The message set message looks like this: message MessageSet { repeated group Item = 1 { required int32 type_id = 2; required string message = 3; } } """ type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) local_ReadTag = ReadTag local_DecodeVarint = _DecodeVarint local_SkipField = SkipField def DecodeItem(buffer, pos, end, message, field_dict): message_set_item_start = pos type_id = -1 message_start = -1 message_end = -1 # Technically, type_id and message can appear in any order, so we need # a little loop here. while 1: (tag_bytes, pos) = local_ReadTag(buffer, pos) if tag_bytes == type_id_tag_bytes: (type_id, pos) = local_DecodeVarint(buffer, pos) elif tag_bytes == message_tag_bytes: (size, message_start) = local_DecodeVarint(buffer, pos) pos = message_end = message_start + size elif tag_bytes == item_end_tag_bytes: break else: pos = SkipField(buffer, pos, end, tag_bytes) if pos == -1: raise _DecodeError('Missing group end tag.') if pos > end: raise _DecodeError('Truncated message.') if type_id == -1: raise _DecodeError('MessageSet item missing type_id.') if message_start == -1: raise _DecodeError('MessageSet item missing message.') extension = extensions_by_number.get(type_id) if extension is not None: value = field_dict.get(extension) if value is None: value = field_dict.setdefault( extension, extension.message_type._concrete_class()) if value._InternalParse(buffer, message_start,message_end) != message_end: # The only reason _InternalParse would return early is if it encountered # an end-group tag. raise _DecodeError('Unexpected end-group tag.') else: if not message._unknown_fields: message._unknown_fields = [] message._unknown_fields.append((MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos])) return pos return DecodeItem
def MessageSetItemDecoder(extensions_by_number): """Returns a decoder for a MessageSet item. The parameter is the _extensions_by_number map for the message class. The message set message looks like this: message MessageSet { repeated group Item = 1 { required int32 type_id = 2; required string message = 3; } } """ type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) local_ReadTag = ReadTag local_DecodeVarint = _DecodeVarint local_SkipField = SkipField def DecodeItem(buffer, pos, end, message, field_dict): message_set_item_start = pos type_id = -1 message_start = -1 message_end = -1 # Technically, type_id and message can appear in any order, so we need # a little loop here. while 1: (tag_bytes, pos) = local_ReadTag(buffer, pos) if tag_bytes == type_id_tag_bytes: (type_id, pos) = local_DecodeVarint(buffer, pos) elif tag_bytes == message_tag_bytes: (size, message_start) = local_DecodeVarint(buffer, pos) pos = message_end = message_start + size elif tag_bytes == item_end_tag_bytes: break else: pos = SkipField(buffer, pos, end, tag_bytes) if pos == -1: raise _DecodeError('Missing group end tag.') if pos > end: raise _DecodeError('Truncated message.') if type_id == -1: raise _DecodeError('MessageSet item missing type_id.') if message_start == -1: raise _DecodeError('MessageSet item missing message.') extension = extensions_by_number.get(type_id) if extension is not None: value = field_dict.get(extension) if value is None: value = field_dict.setdefault( extension, extension.message_type._concrete_class()) if value._InternalParse(buffer, message_start,message_end) != message_end: # The only reason _InternalParse would return early is if it encountered # an end-group tag. raise _DecodeError('Unexpected end-group tag.') else: if not message._unknown_fields: message._unknown_fields = [] message._unknown_fields.append((MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos])) return pos return DecodeItem
[ "Returns", "a", "decoder", "for", "a", "MessageSet", "item", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/internal/decoder.py#L645-L715
[ "def", "MessageSetItemDecoder", "(", "extensions_by_number", ")", ":", "type_id_tag_bytes", "=", "encoder", ".", "TagBytes", "(", "2", ",", "wire_format", ".", "WIRETYPE_VARINT", ")", "message_tag_bytes", "=", "encoder", ".", "TagBytes", "(", "3", ",", "wire_format", ".", "WIRETYPE_LENGTH_DELIMITED", ")", "item_end_tag_bytes", "=", "encoder", ".", "TagBytes", "(", "1", ",", "wire_format", ".", "WIRETYPE_END_GROUP", ")", "local_ReadTag", "=", "ReadTag", "local_DecodeVarint", "=", "_DecodeVarint", "local_SkipField", "=", "SkipField", "def", "DecodeItem", "(", "buffer", ",", "pos", ",", "end", ",", "message", ",", "field_dict", ")", ":", "message_set_item_start", "=", "pos", "type_id", "=", "-", "1", "message_start", "=", "-", "1", "message_end", "=", "-", "1", "# Technically, type_id and message can appear in any order, so we need", "# a little loop here.", "while", "1", ":", "(", "tag_bytes", ",", "pos", ")", "=", "local_ReadTag", "(", "buffer", ",", "pos", ")", "if", "tag_bytes", "==", "type_id_tag_bytes", ":", "(", "type_id", ",", "pos", ")", "=", "local_DecodeVarint", "(", "buffer", ",", "pos", ")", "elif", "tag_bytes", "==", "message_tag_bytes", ":", "(", "size", ",", "message_start", ")", "=", "local_DecodeVarint", "(", "buffer", ",", "pos", ")", "pos", "=", "message_end", "=", "message_start", "+", "size", "elif", "tag_bytes", "==", "item_end_tag_bytes", ":", "break", "else", ":", "pos", "=", "SkipField", "(", "buffer", ",", "pos", ",", "end", ",", "tag_bytes", ")", "if", "pos", "==", "-", "1", ":", "raise", "_DecodeError", "(", "'Missing group end tag.'", ")", "if", "pos", ">", "end", ":", "raise", "_DecodeError", "(", "'Truncated message.'", ")", "if", "type_id", "==", "-", "1", ":", "raise", "_DecodeError", "(", "'MessageSet item missing type_id.'", ")", "if", "message_start", "==", "-", "1", ":", "raise", "_DecodeError", "(", "'MessageSet item missing message.'", ")", "extension", "=", "extensions_by_number", ".", "get", "(", "type_id", ")", "if", "extension", "is", "not", "None", ":", "value", "=", "field_dict", ".", "get", "(", "extension", ")", "if", "value", "is", "None", ":", "value", "=", "field_dict", ".", "setdefault", "(", "extension", ",", "extension", ".", "message_type", ".", "_concrete_class", "(", ")", ")", "if", "value", ".", "_InternalParse", "(", "buffer", ",", "message_start", ",", "message_end", ")", "!=", "message_end", ":", "# The only reason _InternalParse would return early is if it encountered", "# an end-group tag.", "raise", "_DecodeError", "(", "'Unexpected end-group tag.'", ")", "else", ":", "if", "not", "message", ".", "_unknown_fields", ":", "message", ".", "_unknown_fields", "=", "[", "]", "message", ".", "_unknown_fields", ".", "append", "(", "(", "MESSAGE_SET_ITEM_TAG", ",", "buffer", "[", "message_set_item_start", ":", "pos", "]", ")", ")", "return", "pos", "return", "DecodeItem" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
get_app_name
Flask like implementation of getting the applicaiton name via the filename of the including file
happymongo/__init__.py
def get_app_name(): """Flask like implementation of getting the applicaiton name via the filename of the including file """ fn = getattr(sys.modules['__main__'], '__file__', None) if fn is None: return '__main__' return os.path.splitext(os.path.basename(fn))[0]
def get_app_name(): """Flask like implementation of getting the applicaiton name via the filename of the including file """ fn = getattr(sys.modules['__main__'], '__file__', None) if fn is None: return '__main__' return os.path.splitext(os.path.basename(fn))[0]
[ "Flask", "like", "implementation", "of", "getting", "the", "applicaiton", "name", "via", "the", "filename", "of", "the", "including", "file" ]
sivel/happymongo
python
https://github.com/sivel/happymongo/blob/05831465ef9b88210a67d00c35b37d7f114c6a63/happymongo/__init__.py#L33-L41
[ "def", "get_app_name", "(", ")", ":", "fn", "=", "getattr", "(", "sys", ".", "modules", "[", "'__main__'", "]", ",", "'__file__'", ",", "None", ")", "if", "fn", "is", "None", ":", "return", "'__main__'", "return", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "fn", ")", ")", "[", "0", "]" ]
05831465ef9b88210a67d00c35b37d7f114c6a63
valid
get_function
Given a Python function name, return the function it refers to.
relax/viewserver.py
def get_function(function_name): """ Given a Python function name, return the function it refers to. """ module, basename = str(function_name).rsplit('.', 1) try: return getattr(__import__(module, fromlist=[basename]), basename) except (ImportError, AttributeError): raise FunctionNotFound(function_name)
def get_function(function_name): """ Given a Python function name, return the function it refers to. """ module, basename = str(function_name).rsplit('.', 1) try: return getattr(__import__(module, fromlist=[basename]), basename) except (ImportError, AttributeError): raise FunctionNotFound(function_name)
[ "Given", "a", "Python", "function", "name", "return", "the", "function", "it", "refers", "to", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L16-L24
[ "def", "get_function", "(", "function_name", ")", ":", "module", ",", "basename", "=", "str", "(", "function_name", ")", ".", "rsplit", "(", "'.'", ",", "1", ")", "try", ":", "return", "getattr", "(", "__import__", "(", "module", ",", "fromlist", "=", "[", "basename", "]", ")", ",", "basename", ")", "except", "(", "ImportError", ",", "AttributeError", ")", ":", "raise", "FunctionNotFound", "(", "function_name", ")" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
ViewServerRequestHandler.handle_add_fun
Add a function to the function list, in order.
relax/viewserver.py
def handle_add_fun(self, function_name): """Add a function to the function list, in order.""" function_name = function_name.strip() try: function = get_function(function_name) except Exception, exc: self.wfile.write(js_error(exc) + NEWLINE) return # This tests to see if the function has been decorated with the view # server synchronisation decorator (``decorate_view``). if not getattr(function, 'view_decorated', None): self.functions[function_name] = (self.function_counter, function) # The decorator gets called with the logger function. else: self.functions[function_name] = (self.function_counter, function(self.log)) self.function_counter += 1 return True
def handle_add_fun(self, function_name): """Add a function to the function list, in order.""" function_name = function_name.strip() try: function = get_function(function_name) except Exception, exc: self.wfile.write(js_error(exc) + NEWLINE) return # This tests to see if the function has been decorated with the view # server synchronisation decorator (``decorate_view``). if not getattr(function, 'view_decorated', None): self.functions[function_name] = (self.function_counter, function) # The decorator gets called with the logger function. else: self.functions[function_name] = (self.function_counter, function(self.log)) self.function_counter += 1 return True
[ "Add", "a", "function", "to", "the", "function", "list", "in", "order", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L63-L80
[ "def", "handle_add_fun", "(", "self", ",", "function_name", ")", ":", "function_name", "=", "function_name", ".", "strip", "(", ")", "try", ":", "function", "=", "get_function", "(", "function_name", ")", "except", "Exception", ",", "exc", ":", "self", ".", "wfile", ".", "write", "(", "js_error", "(", "exc", ")", "+", "NEWLINE", ")", "return", "# This tests to see if the function has been decorated with the view", "# server synchronisation decorator (``decorate_view``).", "if", "not", "getattr", "(", "function", ",", "'view_decorated'", ",", "None", ")", ":", "self", ".", "functions", "[", "function_name", "]", "=", "(", "self", ".", "function_counter", ",", "function", ")", "# The decorator gets called with the logger function.", "else", ":", "self", ".", "functions", "[", "function_name", "]", "=", "(", "self", ".", "function_counter", ",", "function", "(", "self", ".", "log", ")", ")", "self", ".", "function_counter", "+=", "1", "return", "True" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
ViewServerRequestHandler.handle_map_doc
Return the mapping of a document according to the function list.
relax/viewserver.py
def handle_map_doc(self, document): """Return the mapping of a document according to the function list.""" # This uses the stored set of functions, sorted by order of addition. for function in sorted(self.functions.values(), key=lambda x: x[0]): try: # It has to be run through ``list``, because it may be a # generator function. yield [list(function(document))] except Exception, exc: # Otherwise, return an empty list and log the event. yield [] self.log(repr(exc))
def handle_map_doc(self, document): """Return the mapping of a document according to the function list.""" # This uses the stored set of functions, sorted by order of addition. for function in sorted(self.functions.values(), key=lambda x: x[0]): try: # It has to be run through ``list``, because it may be a # generator function. yield [list(function(document))] except Exception, exc: # Otherwise, return an empty list and log the event. yield [] self.log(repr(exc))
[ "Return", "the", "mapping", "of", "a", "document", "according", "to", "the", "function", "list", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L83-L94
[ "def", "handle_map_doc", "(", "self", ",", "document", ")", ":", "# This uses the stored set of functions, sorted by order of addition.", "for", "function", "in", "sorted", "(", "self", ".", "functions", ".", "values", "(", ")", ",", "key", "=", "lambda", "x", ":", "x", "[", "0", "]", ")", ":", "try", ":", "# It has to be run through ``list``, because it may be a", "# generator function.", "yield", "[", "list", "(", "function", "(", "document", ")", ")", "]", "except", "Exception", ",", "exc", ":", "# Otherwise, return an empty list and log the event.", "yield", "[", "]", "self", ".", "log", "(", "repr", "(", "exc", ")", ")" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
ViewServerRequestHandler.handle_reduce
Reduce several mapped documents by several reduction functions.
relax/viewserver.py
def handle_reduce(self, reduce_function_names, mapped_docs): """Reduce several mapped documents by several reduction functions.""" reduce_functions = [] # This gets a large list of reduction functions, given their names. for reduce_function_name in reduce_function_names: try: reduce_function = get_function(reduce_function_name) if getattr(reduce_function, 'view_decorated', None): reduce_function = reduce_function(self.log) reduce_functions.append(reduce_function) except Exception, exc: self.log(repr(exc)) reduce_functions.append(lambda *args, **kwargs: None) # Transform lots of (key, value) pairs into one (keys, values) pair. keys, values = zip( (key, value) for ((key, doc_id), value) in mapped_docs) # This gets the list of results from the reduction functions. results = [] for reduce_function in reduce_functions: try: results.append(reduce_function(keys, values, rereduce=False)) except Exception, exc: self.log(repr(exc)) results.append(None) return [True, results]
def handle_reduce(self, reduce_function_names, mapped_docs): """Reduce several mapped documents by several reduction functions.""" reduce_functions = [] # This gets a large list of reduction functions, given their names. for reduce_function_name in reduce_function_names: try: reduce_function = get_function(reduce_function_name) if getattr(reduce_function, 'view_decorated', None): reduce_function = reduce_function(self.log) reduce_functions.append(reduce_function) except Exception, exc: self.log(repr(exc)) reduce_functions.append(lambda *args, **kwargs: None) # Transform lots of (key, value) pairs into one (keys, values) pair. keys, values = zip( (key, value) for ((key, doc_id), value) in mapped_docs) # This gets the list of results from the reduction functions. results = [] for reduce_function in reduce_functions: try: results.append(reduce_function(keys, values, rereduce=False)) except Exception, exc: self.log(repr(exc)) results.append(None) return [True, results]
[ "Reduce", "several", "mapped", "documents", "by", "several", "reduction", "functions", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L96-L120
[ "def", "handle_reduce", "(", "self", ",", "reduce_function_names", ",", "mapped_docs", ")", ":", "reduce_functions", "=", "[", "]", "# This gets a large list of reduction functions, given their names.", "for", "reduce_function_name", "in", "reduce_function_names", ":", "try", ":", "reduce_function", "=", "get_function", "(", "reduce_function_name", ")", "if", "getattr", "(", "reduce_function", ",", "'view_decorated'", ",", "None", ")", ":", "reduce_function", "=", "reduce_function", "(", "self", ".", "log", ")", "reduce_functions", ".", "append", "(", "reduce_function", ")", "except", "Exception", ",", "exc", ":", "self", ".", "log", "(", "repr", "(", "exc", ")", ")", "reduce_functions", ".", "append", "(", "lambda", "*", "args", ",", "*", "*", "kwargs", ":", "None", ")", "# Transform lots of (key, value) pairs into one (keys, values) pair.", "keys", ",", "values", "=", "zip", "(", "(", "key", ",", "value", ")", "for", "(", "(", "key", ",", "doc_id", ")", ",", "value", ")", "in", "mapped_docs", ")", "# This gets the list of results from the reduction functions.", "results", "=", "[", "]", "for", "reduce_function", "in", "reduce_functions", ":", "try", ":", "results", ".", "append", "(", "reduce_function", "(", "keys", ",", "values", ",", "rereduce", "=", "False", ")", ")", "except", "Exception", ",", "exc", ":", "self", ".", "log", "(", "repr", "(", "exc", ")", ")", "results", ".", "append", "(", "None", ")", "return", "[", "True", ",", "results", "]" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
ViewServerRequestHandler.handle_rereduce
Re-reduce a set of values, with a list of rereduction functions.
relax/viewserver.py
def handle_rereduce(self, reduce_function_names, values): """Re-reduce a set of values, with a list of rereduction functions.""" # This gets a large list of reduction functions, given their names. reduce_functions = [] for reduce_function_name in reduce_function_names: try: reduce_function = get_function(reduce_function_name) if getattr(reduce_function, 'view_decorated', None): reduce_function = reduce_function(self.log) reduce_functions.append(reduce_function) except Exception, exc: self.log(repr(exc)) reduce_functions.append(lambda *args, **kwargs: None) # This gets the list of results from those functions. results = [] for reduce_function in reduce_functions: try: results.append(reduce_function(None, values, rereduce=True)) except Exception, exc: self.log(repr(exc)) results.append(None) return [True, results]
def handle_rereduce(self, reduce_function_names, values): """Re-reduce a set of values, with a list of rereduction functions.""" # This gets a large list of reduction functions, given their names. reduce_functions = [] for reduce_function_name in reduce_function_names: try: reduce_function = get_function(reduce_function_name) if getattr(reduce_function, 'view_decorated', None): reduce_function = reduce_function(self.log) reduce_functions.append(reduce_function) except Exception, exc: self.log(repr(exc)) reduce_functions.append(lambda *args, **kwargs: None) # This gets the list of results from those functions. results = [] for reduce_function in reduce_functions: try: results.append(reduce_function(None, values, rereduce=True)) except Exception, exc: self.log(repr(exc)) results.append(None) return [True, results]
[ "Re", "-", "reduce", "a", "set", "of", "values", "with", "a", "list", "of", "rereduction", "functions", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L122-L143
[ "def", "handle_rereduce", "(", "self", ",", "reduce_function_names", ",", "values", ")", ":", "# This gets a large list of reduction functions, given their names.", "reduce_functions", "=", "[", "]", "for", "reduce_function_name", "in", "reduce_function_names", ":", "try", ":", "reduce_function", "=", "get_function", "(", "reduce_function_name", ")", "if", "getattr", "(", "reduce_function", ",", "'view_decorated'", ",", "None", ")", ":", "reduce_function", "=", "reduce_function", "(", "self", ".", "log", ")", "reduce_functions", ".", "append", "(", "reduce_function", ")", "except", "Exception", ",", "exc", ":", "self", ".", "log", "(", "repr", "(", "exc", ")", ")", "reduce_functions", ".", "append", "(", "lambda", "*", "args", ",", "*", "*", "kwargs", ":", "None", ")", "# This gets the list of results from those functions.", "results", "=", "[", "]", "for", "reduce_function", "in", "reduce_functions", ":", "try", ":", "results", ".", "append", "(", "reduce_function", "(", "None", ",", "values", ",", "rereduce", "=", "True", ")", ")", "except", "Exception", ",", "exc", ":", "self", ".", "log", "(", "repr", "(", "exc", ")", ")", "results", ".", "append", "(", "None", ")", "return", "[", "True", ",", "results", "]" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
ViewServerRequestHandler.handle_validate
Validate...this function is undocumented, but still in CouchDB.
relax/viewserver.py
def handle_validate(self, function_name, new_doc, old_doc, user_ctx): """Validate...this function is undocumented, but still in CouchDB.""" try: function = get_function(function_name) except Exception, exc: self.log(repr(exc)) return False try: return function(new_doc, old_doc, user_ctx) except Exception, exc: self.log(repr(exc)) return repr(exc)
def handle_validate(self, function_name, new_doc, old_doc, user_ctx): """Validate...this function is undocumented, but still in CouchDB.""" try: function = get_function(function_name) except Exception, exc: self.log(repr(exc)) return False try: return function(new_doc, old_doc, user_ctx) except Exception, exc: self.log(repr(exc)) return repr(exc)
[ "Validate", "...", "this", "function", "is", "undocumented", "but", "still", "in", "CouchDB", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L145-L156
[ "def", "handle_validate", "(", "self", ",", "function_name", ",", "new_doc", ",", "old_doc", ",", "user_ctx", ")", ":", "try", ":", "function", "=", "get_function", "(", "function_name", ")", "except", "Exception", ",", "exc", ":", "self", ".", "log", "(", "repr", "(", "exc", ")", ")", "return", "False", "try", ":", "return", "function", "(", "new_doc", ",", "old_doc", ",", "user_ctx", ")", "except", "Exception", ",", "exc", ":", "self", ".", "log", "(", "repr", "(", "exc", ")", ")", "return", "repr", "(", "exc", ")" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
ViewServerRequestHandler.handle
The main function called to handle a request.
relax/viewserver.py
def handle(self): """The main function called to handle a request.""" while True: try: line = self.rfile.readline() try: # All input data are lines of JSON like the following: # ["<cmd_name>" "<cmd_arg1>" "<cmd_arg2>" ...] # So I handle this by dispatching to various methods. cmd = json.loads(line) except Exception, exc: # Sometimes errors come up. Once again, I can't predict # anything, but can at least tell CouchDB about the error. self.wfile.write(repr(exc) + NEWLINE) continue else: # Automagically get the command handler. handler = getattr(self, 'handle_' + cmd[0], None) if not handler: # We are ready to not find commands. It probably won't # happen, but fortune favours the prepared. self.wfile.write( repr(CommandNotFound(cmd[0])) + NEWLINE) continue return_value = handler(*cmd[1:]) if not return_value: continue # We write the output back to CouchDB. self.wfile.write( one_lineify(json.dumps(return_value)) + NEWLINE) except Exception, exc: self.wfile.write(repr(exc) + NEWLINE) continue
def handle(self): """The main function called to handle a request.""" while True: try: line = self.rfile.readline() try: # All input data are lines of JSON like the following: # ["<cmd_name>" "<cmd_arg1>" "<cmd_arg2>" ...] # So I handle this by dispatching to various methods. cmd = json.loads(line) except Exception, exc: # Sometimes errors come up. Once again, I can't predict # anything, but can at least tell CouchDB about the error. self.wfile.write(repr(exc) + NEWLINE) continue else: # Automagically get the command handler. handler = getattr(self, 'handle_' + cmd[0], None) if not handler: # We are ready to not find commands. It probably won't # happen, but fortune favours the prepared. self.wfile.write( repr(CommandNotFound(cmd[0])) + NEWLINE) continue return_value = handler(*cmd[1:]) if not return_value: continue # We write the output back to CouchDB. self.wfile.write( one_lineify(json.dumps(return_value)) + NEWLINE) except Exception, exc: self.wfile.write(repr(exc) + NEWLINE) continue
[ "The", "main", "function", "called", "to", "handle", "a", "request", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L158-L190
[ "def", "handle", "(", "self", ")", ":", "while", "True", ":", "try", ":", "line", "=", "self", ".", "rfile", ".", "readline", "(", ")", "try", ":", "# All input data are lines of JSON like the following:", "# [\"<cmd_name>\" \"<cmd_arg1>\" \"<cmd_arg2>\" ...]", "# So I handle this by dispatching to various methods.", "cmd", "=", "json", ".", "loads", "(", "line", ")", "except", "Exception", ",", "exc", ":", "# Sometimes errors come up. Once again, I can't predict", "# anything, but can at least tell CouchDB about the error.", "self", ".", "wfile", ".", "write", "(", "repr", "(", "exc", ")", "+", "NEWLINE", ")", "continue", "else", ":", "# Automagically get the command handler.", "handler", "=", "getattr", "(", "self", ",", "'handle_'", "+", "cmd", "[", "0", "]", ",", "None", ")", "if", "not", "handler", ":", "# We are ready to not find commands. It probably won't", "# happen, but fortune favours the prepared.", "self", ".", "wfile", ".", "write", "(", "repr", "(", "CommandNotFound", "(", "cmd", "[", "0", "]", ")", ")", "+", "NEWLINE", ")", "continue", "return_value", "=", "handler", "(", "*", "cmd", "[", "1", ":", "]", ")", "if", "not", "return_value", ":", "continue", "# We write the output back to CouchDB.", "self", ".", "wfile", ".", "write", "(", "one_lineify", "(", "json", ".", "dumps", "(", "return_value", ")", ")", "+", "NEWLINE", ")", "except", "Exception", ",", "exc", ":", "self", ".", "wfile", ".", "write", "(", "repr", "(", "exc", ")", "+", "NEWLINE", ")", "continue" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
ViewServerRequestHandler.log
Log an event on the CouchDB server.
relax/viewserver.py
def log(self, string): """Log an event on the CouchDB server.""" self.wfile.write(json.dumps({'log': string}) + NEWLINE)
def log(self, string): """Log an event on the CouchDB server.""" self.wfile.write(json.dumps({'log': string}) + NEWLINE)
[ "Log", "an", "event", "on", "the", "CouchDB", "server", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/viewserver.py#L192-L194
[ "def", "log", "(", "self", ",", "string", ")", ":", "self", ".", "wfile", ".", "write", "(", "json", ".", "dumps", "(", "{", "'log'", ":", "string", "}", ")", "+", "NEWLINE", ")" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
guid
Generates a universally unique ID. Any arguments only create more randomness.
baka_model/model/helper.py
def guid(*args): """ Generates a universally unique ID. Any arguments only create more randomness. """ t = float(time.time() * 1000) r = float(random.random()*10000000000000) a = random.random() * 10000000000000 data = str(t) + ' ' + str(r) + ' ' + str(a) + ' ' + str(args) data = hashlib.md5(data.encode()).hexdigest()[:10] return data
def guid(*args): """ Generates a universally unique ID. Any arguments only create more randomness. """ t = float(time.time() * 1000) r = float(random.random()*10000000000000) a = random.random() * 10000000000000 data = str(t) + ' ' + str(r) + ' ' + str(a) + ' ' + str(args) data = hashlib.md5(data.encode()).hexdigest()[:10] return data
[ "Generates", "a", "universally", "unique", "ID", ".", "Any", "arguments", "only", "create", "more", "randomness", "." ]
suryakencana007/baka_model
python
https://github.com/suryakencana007/baka_model/blob/915c2da9920e973302f5764ae63799acd5ecf0b7/baka_model/model/helper.py#L42-L54
[ "def", "guid", "(", "*", "args", ")", ":", "t", "=", "float", "(", "time", ".", "time", "(", ")", "*", "1000", ")", "r", "=", "float", "(", "random", ".", "random", "(", ")", "*", "10000000000000", ")", "a", "=", "random", ".", "random", "(", ")", "*", "10000000000000", "data", "=", "str", "(", "t", ")", "+", "' '", "+", "str", "(", "r", ")", "+", "' '", "+", "str", "(", "a", ")", "+", "' '", "+", "str", "(", "args", ")", "data", "=", "hashlib", ".", "md5", "(", "data", ".", "encode", "(", ")", ")", ".", "hexdigest", "(", ")", "[", ":", "10", "]", "return", "data" ]
915c2da9920e973302f5764ae63799acd5ecf0b7
valid
CmsViews.get_pages
Return pages the GitModel knows about. :param int limit: The number of pages to return, defaults to 5. :param tuple order_by: The attributes to order on, defaults to ('position', '-modified_at')
cms/views/cms_views.py
def get_pages(self, limit=5, order_by=('position', '-modified_at')): """ Return pages the GitModel knows about. :param int limit: The number of pages to return, defaults to 5. :param tuple order_by: The attributes to order on, defaults to ('position', '-modified_at') """ return to_eg_objects(self.workspace.S(Page).filter( language=self.locale).order_by(*order_by)[:limit])
def get_pages(self, limit=5, order_by=('position', '-modified_at')): """ Return pages the GitModel knows about. :param int limit: The number of pages to return, defaults to 5. :param tuple order_by: The attributes to order on, defaults to ('position', '-modified_at') """ return to_eg_objects(self.workspace.S(Page).filter( language=self.locale).order_by(*order_by)[:limit])
[ "Return", "pages", "the", "GitModel", "knows", "about", ".", ":", "param", "int", "limit", ":", "The", "number", "of", "pages", "to", "return", "defaults", "to", "5", ".", ":", "param", "tuple", "order_by", ":", "The", "attributes", "to", "order", "on", "defaults", "to", "(", "position", "-", "modified_at", ")" ]
universalcore/unicore-cms
python
https://github.com/universalcore/unicore-cms/blob/f68385fe742eb7efcce0d8f04f42f26ccf05d624/cms/views/cms_views.py#L159-L169
[ "def", "get_pages", "(", "self", ",", "limit", "=", "5", ",", "order_by", "=", "(", "'position'", ",", "'-modified_at'", ")", ")", ":", "return", "to_eg_objects", "(", "self", ".", "workspace", ".", "S", "(", "Page", ")", ".", "filter", "(", "language", "=", "self", ".", "locale", ")", ".", "order_by", "(", "*", "order_by", ")", "[", ":", "limit", "]", ")" ]
f68385fe742eb7efcce0d8f04f42f26ccf05d624
valid
CmsViews.get_featured_pages
Return featured pages the GitModel knows about. :param str locale: The locale string, like `eng_UK`. :param int limit: The number of pages to return, defaults to 5. :param tuple order_by: The attributes to order on, defaults to ('position', '-modified_at').
cms/views/cms_views.py
def get_featured_pages( self, limit=5, order_by=('position', '-modified_at')): """ Return featured pages the GitModel knows about. :param str locale: The locale string, like `eng_UK`. :param int limit: The number of pages to return, defaults to 5. :param tuple order_by: The attributes to order on, defaults to ('position', '-modified_at'). """ return self._get_featured_pages(self.locale, limit, order_by)
def get_featured_pages( self, limit=5, order_by=('position', '-modified_at')): """ Return featured pages the GitModel knows about. :param str locale: The locale string, like `eng_UK`. :param int limit: The number of pages to return, defaults to 5. :param tuple order_by: The attributes to order on, defaults to ('position', '-modified_at'). """ return self._get_featured_pages(self.locale, limit, order_by)
[ "Return", "featured", "pages", "the", "GitModel", "knows", "about", ".", ":", "param", "str", "locale", ":", "The", "locale", "string", "like", "eng_UK", ".", ":", "param", "int", "limit", ":", "The", "number", "of", "pages", "to", "return", "defaults", "to", "5", ".", ":", "param", "tuple", "order_by", ":", "The", "attributes", "to", "order", "on", "defaults", "to", "(", "position", "-", "modified_at", ")", "." ]
universalcore/unicore-cms
python
https://github.com/universalcore/unicore-cms/blob/f68385fe742eb7efcce0d8f04f42f26ccf05d624/cms/views/cms_views.py#L176-L188
[ "def", "get_featured_pages", "(", "self", ",", "limit", "=", "5", ",", "order_by", "=", "(", "'position'", ",", "'-modified_at'", ")", ")", ":", "return", "self", ".", "_get_featured_pages", "(", "self", ".", "locale", ",", "limit", ",", "order_by", ")" ]
f68385fe742eb7efcce0d8f04f42f26ccf05d624
valid
AsyncProauth2.register_app
register_app takes an application name and redirect_uri It generates client_id (client_key) and client_secret, then stores all of the above in the data_store, and returns a dictionary containing the client_id and client_secret.
proauth2/async_proauth2.py
def register_app(self, name, redirect_uri, callback): ''' register_app takes an application name and redirect_uri It generates client_id (client_key) and client_secret, then stores all of the above in the data_store, and returns a dictionary containing the client_id and client_secret. ''' client_id = self._generate_token() client_secret = self._generate_token(64) yield Task(self.data_store.store, 'applications', client_id=client_id, client_secret=client_secret, name=name, redirect_uri=redirect_uri) callback({'client_id':client_id, 'client_secret':client_secret})
def register_app(self, name, redirect_uri, callback): ''' register_app takes an application name and redirect_uri It generates client_id (client_key) and client_secret, then stores all of the above in the data_store, and returns a dictionary containing the client_id and client_secret. ''' client_id = self._generate_token() client_secret = self._generate_token(64) yield Task(self.data_store.store, 'applications', client_id=client_id, client_secret=client_secret, name=name, redirect_uri=redirect_uri) callback({'client_id':client_id, 'client_secret':client_secret})
[ "register_app", "takes", "an", "application", "name", "and", "redirect_uri", "It", "generates", "client_id", "(", "client_key", ")", "and", "client_secret", "then", "stores", "all", "of", "the", "above", "in", "the", "data_store", "and", "returns", "a", "dictionary", "containing", "the", "client_id", "and", "client_secret", "." ]
charlesthomas/proauth2
python
https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L22-L34
[ "def", "register_app", "(", "self", ",", "name", ",", "redirect_uri", ",", "callback", ")", ":", "client_id", "=", "self", ".", "_generate_token", "(", ")", "client_secret", "=", "self", ".", "_generate_token", "(", "64", ")", "yield", "Task", "(", "self", ".", "data_store", ".", "store", ",", "'applications'", ",", "client_id", "=", "client_id", ",", "client_secret", "=", "client_secret", ",", "name", "=", "name", ",", "redirect_uri", "=", "redirect_uri", ")", "callback", "(", "{", "'client_id'", ":", "client_id", ",", "'client_secret'", ":", "client_secret", "}", ")" ]
f88c8df966a1802414047ed304d02df1dd520097
valid
AsyncProauth2.request_authorization
request_authorization generates a nonce, and stores it in the data_store along with the client_id, user_id, and expiration timestamp. It then returns a dictionary containing the nonce as "code," and the passed state. --- response_type MUST be "code." this is directly from the OAuth2 spec. this probably doesn't need to be checked here, but if it's in the spec I guess it should be verified somewhere. scope has not been implemented here. it will be stored, but there is no scope-checking built in here at this time. if a redirect_uri is passed, it must match the registered redirect_uri. again, this is per spec.
proauth2/async_proauth2.py
def request_authorization(self, client_id, user_id, response_type, redirect_uri=None, scope=None, state=None, expires=600, callback=None): ''' request_authorization generates a nonce, and stores it in the data_store along with the client_id, user_id, and expiration timestamp. It then returns a dictionary containing the nonce as "code," and the passed state. --- response_type MUST be "code." this is directly from the OAuth2 spec. this probably doesn't need to be checked here, but if it's in the spec I guess it should be verified somewhere. scope has not been implemented here. it will be stored, but there is no scope-checking built in here at this time. if a redirect_uri is passed, it must match the registered redirect_uri. again, this is per spec. ''' if response_type != 'code': raise Proauth2Error('invalid_request', 'response_type must be "code"', state=state) client = yield Task(self.data_store.fetch, 'applications', client_id=client_id) if not client: raise Proauth2Error('access_denied') if redirect_uri and client['redirect_uri'] != redirect_uri: raise Proauth2Error('invalid_request', "redirect_uris don't match") nonce_code = self._generate_token() expires = time() + expires try: yield Task(self.data_store.store, 'nonce_codes', code=nonce_code, client_id=client_id, expires=expires, user_id=user_id, scope=scope) except Proauth2Error as e: e.state = state raise e callback({'code':nonce_code, 'state':state})
def request_authorization(self, client_id, user_id, response_type, redirect_uri=None, scope=None, state=None, expires=600, callback=None): ''' request_authorization generates a nonce, and stores it in the data_store along with the client_id, user_id, and expiration timestamp. It then returns a dictionary containing the nonce as "code," and the passed state. --- response_type MUST be "code." this is directly from the OAuth2 spec. this probably doesn't need to be checked here, but if it's in the spec I guess it should be verified somewhere. scope has not been implemented here. it will be stored, but there is no scope-checking built in here at this time. if a redirect_uri is passed, it must match the registered redirect_uri. again, this is per spec. ''' if response_type != 'code': raise Proauth2Error('invalid_request', 'response_type must be "code"', state=state) client = yield Task(self.data_store.fetch, 'applications', client_id=client_id) if not client: raise Proauth2Error('access_denied') if redirect_uri and client['redirect_uri'] != redirect_uri: raise Proauth2Error('invalid_request', "redirect_uris don't match") nonce_code = self._generate_token() expires = time() + expires try: yield Task(self.data_store.store, 'nonce_codes', code=nonce_code, client_id=client_id, expires=expires, user_id=user_id, scope=scope) except Proauth2Error as e: e.state = state raise e callback({'code':nonce_code, 'state':state})
[ "request_authorization", "generates", "a", "nonce", "and", "stores", "it", "in", "the", "data_store", "along", "with", "the", "client_id", "user_id", "and", "expiration", "timestamp", ".", "It", "then", "returns", "a", "dictionary", "containing", "the", "nonce", "as", "code", "and", "the", "passed", "state", ".", "---", "response_type", "MUST", "be", "code", ".", "this", "is", "directly", "from", "the", "OAuth2", "spec", ".", "this", "probably", "doesn", "t", "need", "to", "be", "checked", "here", "but", "if", "it", "s", "in", "the", "spec", "I", "guess", "it", "should", "be", "verified", "somewhere", ".", "scope", "has", "not", "been", "implemented", "here", ".", "it", "will", "be", "stored", "but", "there", "is", "no", "scope", "-", "checking", "built", "in", "here", "at", "this", "time", ".", "if", "a", "redirect_uri", "is", "passed", "it", "must", "match", "the", "registered", "redirect_uri", ".", "again", "this", "is", "per", "spec", "." ]
charlesthomas/proauth2
python
https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L37-L74
[ "def", "request_authorization", "(", "self", ",", "client_id", ",", "user_id", ",", "response_type", ",", "redirect_uri", "=", "None", ",", "scope", "=", "None", ",", "state", "=", "None", ",", "expires", "=", "600", ",", "callback", "=", "None", ")", ":", "if", "response_type", "!=", "'code'", ":", "raise", "Proauth2Error", "(", "'invalid_request'", ",", "'response_type must be \"code\"'", ",", "state", "=", "state", ")", "client", "=", "yield", "Task", "(", "self", ".", "data_store", ".", "fetch", ",", "'applications'", ",", "client_id", "=", "client_id", ")", "if", "not", "client", ":", "raise", "Proauth2Error", "(", "'access_denied'", ")", "if", "redirect_uri", "and", "client", "[", "'redirect_uri'", "]", "!=", "redirect_uri", ":", "raise", "Proauth2Error", "(", "'invalid_request'", ",", "\"redirect_uris don't match\"", ")", "nonce_code", "=", "self", ".", "_generate_token", "(", ")", "expires", "=", "time", "(", ")", "+", "expires", "try", ":", "yield", "Task", "(", "self", ".", "data_store", ".", "store", ",", "'nonce_codes'", ",", "code", "=", "nonce_code", ",", "client_id", "=", "client_id", ",", "expires", "=", "expires", ",", "user_id", "=", "user_id", ",", "scope", "=", "scope", ")", "except", "Proauth2Error", "as", "e", ":", "e", ".", "state", "=", "state", "raise", "e", "callback", "(", "{", "'code'", ":", "nonce_code", ",", "'state'", ":", "state", "}", ")" ]
f88c8df966a1802414047ed304d02df1dd520097
valid
AsyncProauth2.request_access_token
request_access_token validates the client_id and client_secret, using the provided method, then generates an access_token, stores it with the user_id from the nonce, and returns a dictionary containing an access_token and bearer token. --- from the spec, it looks like there are different types of tokens, but i don't understand the disctintions, so someone else can fix this if need be. regarding the method: it appears that it is intended for there to be multiple ways to verify the client_id. my assumption is that you use the secret as the salt and pass the hashed of the client_id or something, and then compare hashes on the server end. currently the only implemented method is direct comparison of the client_ids and client_secrets. additional methods can be added to proauth2.auth_methods
proauth2/async_proauth2.py
def request_access_token(self, client_id, key, code, grant_type, redirect_uri=None, method='direct_auth', callback=None): ''' request_access_token validates the client_id and client_secret, using the provided method, then generates an access_token, stores it with the user_id from the nonce, and returns a dictionary containing an access_token and bearer token. --- from the spec, it looks like there are different types of tokens, but i don't understand the disctintions, so someone else can fix this if need be. regarding the method: it appears that it is intended for there to be multiple ways to verify the client_id. my assumption is that you use the secret as the salt and pass the hashed of the client_id or something, and then compare hashes on the server end. currently the only implemented method is direct comparison of the client_ids and client_secrets. additional methods can be added to proauth2.auth_methods ''' if grant_type != 'authorization_code': raise Proauth2Error('invalid_request', 'grant_type must be "authorization_code"') yield Task(self._auth, client_id, key, method) user_id = yield Task(self._validate_request_code, code, client_id) access_token = self._generate_token(64) yield Task(self.data_store.store, 'tokens', token=access_token, user_id=user_id, client_id=client_id) callback({'access_token':access_token, 'token_type':'bearer'})
def request_access_token(self, client_id, key, code, grant_type, redirect_uri=None, method='direct_auth', callback=None): ''' request_access_token validates the client_id and client_secret, using the provided method, then generates an access_token, stores it with the user_id from the nonce, and returns a dictionary containing an access_token and bearer token. --- from the spec, it looks like there are different types of tokens, but i don't understand the disctintions, so someone else can fix this if need be. regarding the method: it appears that it is intended for there to be multiple ways to verify the client_id. my assumption is that you use the secret as the salt and pass the hashed of the client_id or something, and then compare hashes on the server end. currently the only implemented method is direct comparison of the client_ids and client_secrets. additional methods can be added to proauth2.auth_methods ''' if grant_type != 'authorization_code': raise Proauth2Error('invalid_request', 'grant_type must be "authorization_code"') yield Task(self._auth, client_id, key, method) user_id = yield Task(self._validate_request_code, code, client_id) access_token = self._generate_token(64) yield Task(self.data_store.store, 'tokens', token=access_token, user_id=user_id, client_id=client_id) callback({'access_token':access_token, 'token_type':'bearer'})
[ "request_access_token", "validates", "the", "client_id", "and", "client_secret", "using", "the", "provided", "method", "then", "generates", "an", "access_token", "stores", "it", "with", "the", "user_id", "from", "the", "nonce", "and", "returns", "a", "dictionary", "containing", "an", "access_token", "and", "bearer", "token", ".", "---", "from", "the", "spec", "it", "looks", "like", "there", "are", "different", "types", "of", "tokens", "but", "i", "don", "t", "understand", "the", "disctintions", "so", "someone", "else", "can", "fix", "this", "if", "need", "be", ".", "regarding", "the", "method", ":", "it", "appears", "that", "it", "is", "intended", "for", "there", "to", "be", "multiple", "ways", "to", "verify", "the", "client_id", ".", "my", "assumption", "is", "that", "you", "use", "the", "secret", "as", "the", "salt", "and", "pass", "the", "hashed", "of", "the", "client_id", "or", "something", "and", "then", "compare", "hashes", "on", "the", "server", "end", ".", "currently", "the", "only", "implemented", "method", "is", "direct", "comparison", "of", "the", "client_ids", "and", "client_secrets", ".", "additional", "methods", "can", "be", "added", "to", "proauth2", ".", "auth_methods" ]
charlesthomas/proauth2
python
https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L77-L106
[ "def", "request_access_token", "(", "self", ",", "client_id", ",", "key", ",", "code", ",", "grant_type", ",", "redirect_uri", "=", "None", ",", "method", "=", "'direct_auth'", ",", "callback", "=", "None", ")", ":", "if", "grant_type", "!=", "'authorization_code'", ":", "raise", "Proauth2Error", "(", "'invalid_request'", ",", "'grant_type must be \"authorization_code\"'", ")", "yield", "Task", "(", "self", ".", "_auth", ",", "client_id", ",", "key", ",", "method", ")", "user_id", "=", "yield", "Task", "(", "self", ".", "_validate_request_code", ",", "code", ",", "client_id", ")", "access_token", "=", "self", ".", "_generate_token", "(", "64", ")", "yield", "Task", "(", "self", ".", "data_store", ".", "store", ",", "'tokens'", ",", "token", "=", "access_token", ",", "user_id", "=", "user_id", ",", "client_id", "=", "client_id", ")", "callback", "(", "{", "'access_token'", ":", "access_token", ",", "'token_type'", ":", "'bearer'", "}", ")" ]
f88c8df966a1802414047ed304d02df1dd520097
valid
AsyncProauth2.authenticate_token
authenticate_token checks the passed token and returns the user_id it is associated with. it is assumed that this method won't be directly exposed to the oauth client, but some kind of framework or wrapper. this allows the framework to have the user_id without doing additional DB calls.
proauth2/async_proauth2.py
def authenticate_token(self, token, callback): ''' authenticate_token checks the passed token and returns the user_id it is associated with. it is assumed that this method won't be directly exposed to the oauth client, but some kind of framework or wrapper. this allows the framework to have the user_id without doing additional DB calls. ''' token_data = yield Task(self.data_store.fetch, 'tokens', token=token) if not token_data: raise Proauth2Error('access_denied', 'token does not exist or has been revoked') callback(token_data['user_id'])
def authenticate_token(self, token, callback): ''' authenticate_token checks the passed token and returns the user_id it is associated with. it is assumed that this method won't be directly exposed to the oauth client, but some kind of framework or wrapper. this allows the framework to have the user_id without doing additional DB calls. ''' token_data = yield Task(self.data_store.fetch, 'tokens', token=token) if not token_data: raise Proauth2Error('access_denied', 'token does not exist or has been revoked') callback(token_data['user_id'])
[ "authenticate_token", "checks", "the", "passed", "token", "and", "returns", "the", "user_id", "it", "is", "associated", "with", ".", "it", "is", "assumed", "that", "this", "method", "won", "t", "be", "directly", "exposed", "to", "the", "oauth", "client", "but", "some", "kind", "of", "framework", "or", "wrapper", ".", "this", "allows", "the", "framework", "to", "have", "the", "user_id", "without", "doing", "additional", "DB", "calls", "." ]
charlesthomas/proauth2
python
https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L109-L120
[ "def", "authenticate_token", "(", "self", ",", "token", ",", "callback", ")", ":", "token_data", "=", "yield", "Task", "(", "self", ".", "data_store", ".", "fetch", ",", "'tokens'", ",", "token", "=", "token", ")", "if", "not", "token_data", ":", "raise", "Proauth2Error", "(", "'access_denied'", ",", "'token does not exist or has been revoked'", ")", "callback", "(", "token_data", "[", "'user_id'", "]", ")" ]
f88c8df966a1802414047ed304d02df1dd520097
valid
AsyncProauth2.revoke_token
revoke_token removes the access token from the data_store
proauth2/async_proauth2.py
def revoke_token(self, token, callback): ''' revoke_token removes the access token from the data_store ''' yield Task(self.data_store.remove, 'tokens', token=token) callback()
def revoke_token(self, token, callback): ''' revoke_token removes the access token from the data_store ''' yield Task(self.data_store.remove, 'tokens', token=token) callback()
[ "revoke_token", "removes", "the", "access", "token", "from", "the", "data_store" ]
charlesthomas/proauth2
python
https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L123-L128
[ "def", "revoke_token", "(", "self", ",", "token", ",", "callback", ")", ":", "yield", "Task", "(", "self", ".", "data_store", ".", "remove", ",", "'tokens'", ",", "token", "=", "token", ")", "callback", "(", ")" ]
f88c8df966a1802414047ed304d02df1dd520097
valid
AsyncProauth2._auth
_auth - internal method to ensure the client_id and client_secret passed with the nonce match
proauth2/async_proauth2.py
def _auth(self, client_id, key, method, callback): ''' _auth - internal method to ensure the client_id and client_secret passed with the nonce match ''' available = auth_methods.keys() if method not in available: raise Proauth2Error('invalid_request', 'unsupported authentication method: %s' 'available methods: %s' % \ (method, '\n'.join(available))) client = yield Task(self.data_store.fetch, 'applications', client_id=client_id) if not client: raise Proauth2Error('access_denied') if not auth_methods[method](key, client['client_secret']): raise Proauth2Error('access_denied') callback()
def _auth(self, client_id, key, method, callback): ''' _auth - internal method to ensure the client_id and client_secret passed with the nonce match ''' available = auth_methods.keys() if method not in available: raise Proauth2Error('invalid_request', 'unsupported authentication method: %s' 'available methods: %s' % \ (method, '\n'.join(available))) client = yield Task(self.data_store.fetch, 'applications', client_id=client_id) if not client: raise Proauth2Error('access_denied') if not auth_methods[method](key, client['client_secret']): raise Proauth2Error('access_denied') callback()
[ "_auth", "-", "internal", "method", "to", "ensure", "the", "client_id", "and", "client_secret", "passed", "with", "the", "nonce", "match" ]
charlesthomas/proauth2
python
https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L131-L147
[ "def", "_auth", "(", "self", ",", "client_id", ",", "key", ",", "method", ",", "callback", ")", ":", "available", "=", "auth_methods", ".", "keys", "(", ")", "if", "method", "not", "in", "available", ":", "raise", "Proauth2Error", "(", "'invalid_request'", ",", "'unsupported authentication method: %s'", "'available methods: %s'", "%", "(", "method", ",", "'\\n'", ".", "join", "(", "available", ")", ")", ")", "client", "=", "yield", "Task", "(", "self", ".", "data_store", ".", "fetch", ",", "'applications'", ",", "client_id", "=", "client_id", ")", "if", "not", "client", ":", "raise", "Proauth2Error", "(", "'access_denied'", ")", "if", "not", "auth_methods", "[", "method", "]", "(", "key", ",", "client", "[", "'client_secret'", "]", ")", ":", "raise", "Proauth2Error", "(", "'access_denied'", ")", "callback", "(", ")" ]
f88c8df966a1802414047ed304d02df1dd520097
valid
AsyncProauth2._validate_request_code
_validate_request_code - internal method for verifying the the given nonce. also removes the nonce from the data_store, as they are intended for one-time use.
proauth2/async_proauth2.py
def _validate_request_code(self, code, client_id, callback): ''' _validate_request_code - internal method for verifying the the given nonce. also removes the nonce from the data_store, as they are intended for one-time use. ''' nonce = yield Task(self.data_store.fetch, 'nonce_codes', code=code) if not nonce: raise Proauth2Error('access_denied', 'invalid request code: %s' % code) if client_id != nonce['client_id']: raise Proauth2Error('access_denied', 'invalid request code: %s' % code) user_id = nonce['user_id'] expires = nonce['expires'] yield Task(self.data_store.remove, 'nonce_codes', code=code, client_id=client_id, user_id=user_id) if time() > expires: raise Proauth2Error('access_denied', 'request code %s expired' % code) callback(user_id)
def _validate_request_code(self, code, client_id, callback): ''' _validate_request_code - internal method for verifying the the given nonce. also removes the nonce from the data_store, as they are intended for one-time use. ''' nonce = yield Task(self.data_store.fetch, 'nonce_codes', code=code) if not nonce: raise Proauth2Error('access_denied', 'invalid request code: %s' % code) if client_id != nonce['client_id']: raise Proauth2Error('access_denied', 'invalid request code: %s' % code) user_id = nonce['user_id'] expires = nonce['expires'] yield Task(self.data_store.remove, 'nonce_codes', code=code, client_id=client_id, user_id=user_id) if time() > expires: raise Proauth2Error('access_denied', 'request code %s expired' % code) callback(user_id)
[ "_validate_request_code", "-", "internal", "method", "for", "verifying", "the", "the", "given", "nonce", ".", "also", "removes", "the", "nonce", "from", "the", "data_store", "as", "they", "are", "intended", "for", "one", "-", "time", "use", "." ]
charlesthomas/proauth2
python
https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L150-L169
[ "def", "_validate_request_code", "(", "self", ",", "code", ",", "client_id", ",", "callback", ")", ":", "nonce", "=", "yield", "Task", "(", "self", ".", "data_store", ".", "fetch", ",", "'nonce_codes'", ",", "code", "=", "code", ")", "if", "not", "nonce", ":", "raise", "Proauth2Error", "(", "'access_denied'", ",", "'invalid request code: %s'", "%", "code", ")", "if", "client_id", "!=", "nonce", "[", "'client_id'", "]", ":", "raise", "Proauth2Error", "(", "'access_denied'", ",", "'invalid request code: %s'", "%", "code", ")", "user_id", "=", "nonce", "[", "'user_id'", "]", "expires", "=", "nonce", "[", "'expires'", "]", "yield", "Task", "(", "self", ".", "data_store", ".", "remove", ",", "'nonce_codes'", ",", "code", "=", "code", ",", "client_id", "=", "client_id", ",", "user_id", "=", "user_id", ")", "if", "time", "(", ")", ">", "expires", ":", "raise", "Proauth2Error", "(", "'access_denied'", ",", "'request code %s expired'", "%", "code", ")", "callback", "(", "user_id", ")" ]
f88c8df966a1802414047ed304d02df1dd520097
valid
AsyncProauth2._generate_token
_generate_token - internal function for generating randomized alphanumberic strings of a given length
proauth2/async_proauth2.py
def _generate_token(self, length=32): ''' _generate_token - internal function for generating randomized alphanumberic strings of a given length ''' return ''.join(choice(ascii_letters + digits) for x in range(length))
def _generate_token(self, length=32): ''' _generate_token - internal function for generating randomized alphanumberic strings of a given length ''' return ''.join(choice(ascii_letters + digits) for x in range(length))
[ "_generate_token", "-", "internal", "function", "for", "generating", "randomized", "alphanumberic", "strings", "of", "a", "given", "length" ]
charlesthomas/proauth2
python
https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/async_proauth2.py#L171-L176
[ "def", "_generate_token", "(", "self", ",", "length", "=", "32", ")", ":", "return", "''", ".", "join", "(", "choice", "(", "ascii_letters", "+", "digits", ")", "for", "x", "in", "range", "(", "length", ")", ")" ]
f88c8df966a1802414047ed304d02df1dd520097
valid
merge_ordered
Merge multiple ordered so that within-ordered order is preserved
chatora/util/functional.py
def merge_ordered(ordereds: typing.Iterable[typing.Any]) -> typing.Iterable[typing.Any]: """Merge multiple ordered so that within-ordered order is preserved """ seen_set = set() add_seen = seen_set.add return reversed(tuple(map( lambda obj: add_seen(obj) or obj, filterfalse( seen_set.__contains__, chain.from_iterable(map(reversed, reversed(ordereds))), ), )))
def merge_ordered(ordereds: typing.Iterable[typing.Any]) -> typing.Iterable[typing.Any]: """Merge multiple ordered so that within-ordered order is preserved """ seen_set = set() add_seen = seen_set.add return reversed(tuple(map( lambda obj: add_seen(obj) or obj, filterfalse( seen_set.__contains__, chain.from_iterable(map(reversed, reversed(ordereds))), ), )))
[ "Merge", "multiple", "ordered", "so", "that", "within", "-", "ordered", "order", "is", "preserved" ]
takaomag/chatora.util
python
https://github.com/takaomag/chatora.util/blob/0fb36aca5da93bdd8e23a0c783095d621b582d89/chatora/util/functional.py#L86-L97
[ "def", "merge_ordered", "(", "ordereds", ":", "typing", ".", "Iterable", "[", "typing", ".", "Any", "]", ")", "->", "typing", ".", "Iterable", "[", "typing", ".", "Any", "]", ":", "seen_set", "=", "set", "(", ")", "add_seen", "=", "seen_set", ".", "add", "return", "reversed", "(", "tuple", "(", "map", "(", "lambda", "obj", ":", "add_seen", "(", "obj", ")", "or", "obj", ",", "filterfalse", "(", "seen_set", ".", "__contains__", ",", "chain", ".", "from_iterable", "(", "map", "(", "reversed", ",", "reversed", "(", "ordereds", ")", ")", ")", ",", ")", ",", ")", ")", ")" ]
0fb36aca5da93bdd8e23a0c783095d621b582d89
valid
validate_params
Helps us validate the parameters for the request :param valid_options: a list of strings of valid options for the api request :param params: a dict, the key-value store which we really only care about the key which has tells us what the user is using for the API request :returns: None or throws an exception if the validation fails
ShirtsIO/helpers.py
def validate_params(required, optional, params): """ Helps us validate the parameters for the request :param valid_options: a list of strings of valid options for the api request :param params: a dict, the key-value store which we really only care about the key which has tells us what the user is using for the API request :returns: None or throws an exception if the validation fails """ missing_fields = [x for x in required if x not in params] if missing_fields: field_strings = ", ".join(missing_fields) raise Exception("Missing fields: %s" % field_strings) disallowed_fields = [x for x in params if x not in optional and x not in required] if disallowed_fields: field_strings = ", ".join(disallowed_fields) raise Exception("Disallowed fields: %s" % field_strings)
def validate_params(required, optional, params): """ Helps us validate the parameters for the request :param valid_options: a list of strings of valid options for the api request :param params: a dict, the key-value store which we really only care about the key which has tells us what the user is using for the API request :returns: None or throws an exception if the validation fails """ missing_fields = [x for x in required if x not in params] if missing_fields: field_strings = ", ".join(missing_fields) raise Exception("Missing fields: %s" % field_strings) disallowed_fields = [x for x in params if x not in optional and x not in required] if disallowed_fields: field_strings = ", ".join(disallowed_fields) raise Exception("Disallowed fields: %s" % field_strings)
[ "Helps", "us", "validate", "the", "parameters", "for", "the", "request" ]
tklovett/PyShirtsIO
python
https://github.com/tklovett/PyShirtsIO/blob/ff2f2d3b5e4ab2813abbce8545b27319c6af0def/ShirtsIO/helpers.py#L1-L22
[ "def", "validate_params", "(", "required", ",", "optional", ",", "params", ")", ":", "missing_fields", "=", "[", "x", "for", "x", "in", "required", "if", "x", "not", "in", "params", "]", "if", "missing_fields", ":", "field_strings", "=", "\", \"", ".", "join", "(", "missing_fields", ")", "raise", "Exception", "(", "\"Missing fields: %s\"", "%", "field_strings", ")", "disallowed_fields", "=", "[", "x", "for", "x", "in", "params", "if", "x", "not", "in", "optional", "and", "x", "not", "in", "required", "]", "if", "disallowed_fields", ":", "field_strings", "=", "\", \"", ".", "join", "(", "disallowed_fields", ")", "raise", "Exception", "(", "\"Disallowed fields: %s\"", "%", "field_strings", ")" ]
ff2f2d3b5e4ab2813abbce8545b27319c6af0def
valid
Proauth2.authenticate_token
authenticate_token checks the passed token and returns the user_id it is associated with. it is assumed that this method won't be directly exposed to the oauth client, but some kind of framework or wrapper. this allows the framework to have the user_id without doing additional DB calls.
proauth2/proauth2.py
def authenticate_token( self, token ): ''' authenticate_token checks the passed token and returns the user_id it is associated with. it is assumed that this method won't be directly exposed to the oauth client, but some kind of framework or wrapper. this allows the framework to have the user_id without doing additional DB calls. ''' token_data = self.data_store.fetch( 'tokens', token=token ) if not token_data: raise Proauth2Error( 'access_denied', 'token does not exist or has been revoked' ) return token_data['user_id']
def authenticate_token( self, token ): ''' authenticate_token checks the passed token and returns the user_id it is associated with. it is assumed that this method won't be directly exposed to the oauth client, but some kind of framework or wrapper. this allows the framework to have the user_id without doing additional DB calls. ''' token_data = self.data_store.fetch( 'tokens', token=token ) if not token_data: raise Proauth2Error( 'access_denied', 'token does not exist or has been revoked' ) return token_data['user_id']
[ "authenticate_token", "checks", "the", "passed", "token", "and", "returns", "the", "user_id", "it", "is", "associated", "with", ".", "it", "is", "assumed", "that", "this", "method", "won", "t", "be", "directly", "exposed", "to", "the", "oauth", "client", "but", "some", "kind", "of", "framework", "or", "wrapper", ".", "this", "allows", "the", "framework", "to", "have", "the", "user_id", "without", "doing", "additional", "DB", "calls", "." ]
charlesthomas/proauth2
python
https://github.com/charlesthomas/proauth2/blob/f88c8df966a1802414047ed304d02df1dd520097/proauth2/proauth2.py#L107-L118
[ "def", "authenticate_token", "(", "self", ",", "token", ")", ":", "token_data", "=", "self", ".", "data_store", ".", "fetch", "(", "'tokens'", ",", "token", "=", "token", ")", "if", "not", "token_data", ":", "raise", "Proauth2Error", "(", "'access_denied'", ",", "'token does not exist or has been revoked'", ")", "return", "token_data", "[", "'user_id'", "]" ]
f88c8df966a1802414047ed304d02df1dd520097
valid
main
Register your own mode and handle method here.
scripts/check_wmi_sh.py
def main(): """Register your own mode and handle method here.""" plugin = Register() if plugin.args.option == 'filenumber': plugin.filenumber_handle() elif plugin.args.option == 'fileage': plugin.fileage_handle() elif plugin.args.option == 'sqlserverlocks': plugin.sqlserverlocks_handle() else: plugin.unknown("Unknown actions.")
def main(): """Register your own mode and handle method here.""" plugin = Register() if plugin.args.option == 'filenumber': plugin.filenumber_handle() elif plugin.args.option == 'fileage': plugin.fileage_handle() elif plugin.args.option == 'sqlserverlocks': plugin.sqlserverlocks_handle() else: plugin.unknown("Unknown actions.")
[ "Register", "your", "own", "mode", "and", "handle", "method", "here", "." ]
crazy-canux/arguspy
python
https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/scripts/check_wmi_sh.py#L436-L446
[ "def", "main", "(", ")", ":", "plugin", "=", "Register", "(", ")", "if", "plugin", ".", "args", ".", "option", "==", "'filenumber'", ":", "plugin", ".", "filenumber_handle", "(", ")", "elif", "plugin", ".", "args", ".", "option", "==", "'fileage'", ":", "plugin", ".", "fileage_handle", "(", ")", "elif", "plugin", ".", "args", ".", "option", "==", "'sqlserverlocks'", ":", "plugin", ".", "sqlserverlocks_handle", "(", ")", "else", ":", "plugin", ".", "unknown", "(", "\"Unknown actions.\"", ")" ]
e9486b5df61978a990d56bf43de35f3a4cdefcc3
valid
FileNumber.filenumber_handle
Get the number of file in the folder.
scripts/check_wmi_sh.py
def filenumber_handle(self): """Get the number of file in the folder.""" self.file_list = [] self.count = 0 status = self.ok if self.args.recursion: self.__result, self.__file_list = self.__get_folder(self.args.path) else: self.__result, self.__file_list = self.__get_file(self.args.path) # Compare the vlaue. if self.__result > self.args.critical: status = self.critical elif self.__result > self.args.warning: status = self.warning else: status = self.ok # Output self.shortoutput = "Found {0} files in {1}.".format(self.__result, self.args.path) self.logger.debug("file_list: {}".format(self.__file_list)) [self.longoutput.append(file_data.get('Name')) for file_data in self.__file_list] self.perfdata.append("{path}={result};{warn};{crit};0;".format( crit=self.args.critical, warn=self.args.warning, result=self.__result, path=self.args.path)) # Return status with message to Nagios. status(self.output(long_output_limit=None)) self.logger.debug("Return status and exit to Nagios.")
def filenumber_handle(self): """Get the number of file in the folder.""" self.file_list = [] self.count = 0 status = self.ok if self.args.recursion: self.__result, self.__file_list = self.__get_folder(self.args.path) else: self.__result, self.__file_list = self.__get_file(self.args.path) # Compare the vlaue. if self.__result > self.args.critical: status = self.critical elif self.__result > self.args.warning: status = self.warning else: status = self.ok # Output self.shortoutput = "Found {0} files in {1}.".format(self.__result, self.args.path) self.logger.debug("file_list: {}".format(self.__file_list)) [self.longoutput.append(file_data.get('Name')) for file_data in self.__file_list] self.perfdata.append("{path}={result};{warn};{crit};0;".format( crit=self.args.critical, warn=self.args.warning, result=self.__result, path=self.args.path)) # Return status with message to Nagios. status(self.output(long_output_limit=None)) self.logger.debug("Return status and exit to Nagios.")
[ "Get", "the", "number", "of", "file", "in", "the", "folder", "." ]
crazy-canux/arguspy
python
https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/scripts/check_wmi_sh.py#L106-L139
[ "def", "filenumber_handle", "(", "self", ")", ":", "self", ".", "file_list", "=", "[", "]", "self", ".", "count", "=", "0", "status", "=", "self", ".", "ok", "if", "self", ".", "args", ".", "recursion", ":", "self", ".", "__result", ",", "self", ".", "__file_list", "=", "self", ".", "__get_folder", "(", "self", ".", "args", ".", "path", ")", "else", ":", "self", ".", "__result", ",", "self", ".", "__file_list", "=", "self", ".", "__get_file", "(", "self", ".", "args", ".", "path", ")", "# Compare the vlaue.", "if", "self", ".", "__result", ">", "self", ".", "args", ".", "critical", ":", "status", "=", "self", ".", "critical", "elif", "self", ".", "__result", ">", "self", ".", "args", ".", "warning", ":", "status", "=", "self", ".", "warning", "else", ":", "status", "=", "self", ".", "ok", "# Output", "self", ".", "shortoutput", "=", "\"Found {0} files in {1}.\"", ".", "format", "(", "self", ".", "__result", ",", "self", ".", "args", ".", "path", ")", "self", ".", "logger", ".", "debug", "(", "\"file_list: {}\"", ".", "format", "(", "self", ".", "__file_list", ")", ")", "[", "self", ".", "longoutput", ".", "append", "(", "file_data", ".", "get", "(", "'Name'", ")", ")", "for", "file_data", "in", "self", ".", "__file_list", "]", "self", ".", "perfdata", ".", "append", "(", "\"{path}={result};{warn};{crit};0;\"", ".", "format", "(", "crit", "=", "self", ".", "args", ".", "critical", ",", "warn", "=", "self", ".", "args", ".", "warning", ",", "result", "=", "self", ".", "__result", ",", "path", "=", "self", ".", "args", ".", "path", ")", ")", "# Return status with message to Nagios.", "status", "(", "self", ".", "output", "(", "long_output_limit", "=", "None", ")", ")", "self", ".", "logger", ".", "debug", "(", "\"Return status and exit to Nagios.\"", ")" ]
e9486b5df61978a990d56bf43de35f3a4cdefcc3
valid
FileAge.__get_current_datetime
Get current datetime for every file.
scripts/check_wmi_sh.py
def __get_current_datetime(self): """Get current datetime for every file.""" self.wql_time = "SELECT LocalDateTime FROM Win32_OperatingSystem" self.current_time = self.query(self.wql_time) # [{'LocalDateTime': '20160824161431.977000+480'}]' self.current_time_string = str( self.current_time[0].get('LocalDateTime').split('.')[0]) # '20160824161431' self.current_time_format = datetime.datetime.strptime( self.current_time_string, '%Y%m%d%H%M%S') # param: datetime.datetime(2016, 8, 24, 16, 14, 31) -> type: # datetime.datetime return self.current_time_format
def __get_current_datetime(self): """Get current datetime for every file.""" self.wql_time = "SELECT LocalDateTime FROM Win32_OperatingSystem" self.current_time = self.query(self.wql_time) # [{'LocalDateTime': '20160824161431.977000+480'}]' self.current_time_string = str( self.current_time[0].get('LocalDateTime').split('.')[0]) # '20160824161431' self.current_time_format = datetime.datetime.strptime( self.current_time_string, '%Y%m%d%H%M%S') # param: datetime.datetime(2016, 8, 24, 16, 14, 31) -> type: # datetime.datetime return self.current_time_format
[ "Get", "current", "datetime", "for", "every", "file", "." ]
crazy-canux/arguspy
python
https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/scripts/check_wmi_sh.py#L226-L238
[ "def", "__get_current_datetime", "(", "self", ")", ":", "self", ".", "wql_time", "=", "\"SELECT LocalDateTime FROM Win32_OperatingSystem\"", "self", ".", "current_time", "=", "self", ".", "query", "(", "self", ".", "wql_time", ")", "# [{'LocalDateTime': '20160824161431.977000+480'}]'", "self", ".", "current_time_string", "=", "str", "(", "self", ".", "current_time", "[", "0", "]", ".", "get", "(", "'LocalDateTime'", ")", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", "# '20160824161431'", "self", ".", "current_time_format", "=", "datetime", ".", "datetime", ".", "strptime", "(", "self", ".", "current_time_string", ",", "'%Y%m%d%H%M%S'", ")", "# param: datetime.datetime(2016, 8, 24, 16, 14, 31) -> type:", "# datetime.datetime", "return", "self", ".", "current_time_format" ]
e9486b5df61978a990d56bf43de35f3a4cdefcc3
valid
FileAge.fileage_handle
Get the number of file in the folder.
scripts/check_wmi_sh.py
def fileage_handle(self): """Get the number of file in the folder.""" self.file_list = [] self.ok_file = [] self.warn_file = [] self.crit_file = [] status = self.ok if self.args.recursion: self.__file_list = self.__get_folder(self.args.path) else: self.__file_list = self.__get_file(self.args.path) self.logger.debug("file_list: {}".format(self.__file_list)) # [{'LastModified': '20160824142017.737101+480', 'Name': 'd:\\test\\1.txt'}, # {'LastModified': '20160824142021.392101+480', 'Name': 'd:\\test\\2.txt'}, # {'LastModified': '20160824142106.460101+480', 'Name': 'd:\\test\\test1\\21.txt'}] for file_dict in self.__file_list: self.filename = file_dict.get('Name') if self.filename and self.filename != 'Name': self.logger.debug( "===== start to compare {} =====".format( self.filename)) self.file_datetime_string = file_dict.get( 'LastModified').split('.')[0] self.file_datetime = datetime.datetime.strptime( self.file_datetime_string, '%Y%m%d%H%M%S') self.logger.debug( "file_datetime: {}".format( self.file_datetime)) self.current_datetime = self.__get_current_datetime() self.logger.debug( "current_datetime: {}".format( self.current_datetime)) self.__delta_datetime = self.current_datetime - self.file_datetime self.logger.debug( "delta_datetime: {}".format( self.__delta_datetime)) self.logger.debug( "warn_datetime: {}".format( datetime.timedelta( minutes=self.args.warning))) self.logger.debug( "crit_datetime: {}".format( datetime.timedelta( minutes=self.args.critical))) if self.__delta_datetime > datetime.timedelta( minutes=self.args.critical): self.crit_file.append(self.filename) elif self.__delta_datetime > datetime.timedelta(minutes=self.args.warning): self.warn_file.append(self.filename) else: self.ok_file.append(self.filename) # Compare the vlaue. if self.crit_file: status = self.critical elif self.warn_file: status = self.warning else: status = self.ok # Output self.shortoutput = "Found {0} files out of date.".format( len(self.crit_file)) if self.crit_file: self.longoutput.append("===== Critical File out of date ====") [self.longoutput.append(filename) for filename in self.crit_file if self.crit_file] if self.warn_file: self.longoutput.append("===== Warning File out of date ====") [self.longoutput.append(filename) for filename in self.warn_file if self.warn_file] if self.ok_file: self.longoutput.append("===== OK File out of date ====") [self.longoutput.append(filename) for filename in self.ok_file if self.ok_file] self.perfdata.append("{path}={result};{warn};{crit};0;".format( crit=self.args.critical, warn=self.args.warning, result=len(self.crit_file), path=self.args.drive + self.args.path)) # Return status with message to Nagios. status(self.output(long_output_limit=None)) self.logger.debug("Return status and exit to Nagios.")
def fileage_handle(self): """Get the number of file in the folder.""" self.file_list = [] self.ok_file = [] self.warn_file = [] self.crit_file = [] status = self.ok if self.args.recursion: self.__file_list = self.__get_folder(self.args.path) else: self.__file_list = self.__get_file(self.args.path) self.logger.debug("file_list: {}".format(self.__file_list)) # [{'LastModified': '20160824142017.737101+480', 'Name': 'd:\\test\\1.txt'}, # {'LastModified': '20160824142021.392101+480', 'Name': 'd:\\test\\2.txt'}, # {'LastModified': '20160824142106.460101+480', 'Name': 'd:\\test\\test1\\21.txt'}] for file_dict in self.__file_list: self.filename = file_dict.get('Name') if self.filename and self.filename != 'Name': self.logger.debug( "===== start to compare {} =====".format( self.filename)) self.file_datetime_string = file_dict.get( 'LastModified').split('.')[0] self.file_datetime = datetime.datetime.strptime( self.file_datetime_string, '%Y%m%d%H%M%S') self.logger.debug( "file_datetime: {}".format( self.file_datetime)) self.current_datetime = self.__get_current_datetime() self.logger.debug( "current_datetime: {}".format( self.current_datetime)) self.__delta_datetime = self.current_datetime - self.file_datetime self.logger.debug( "delta_datetime: {}".format( self.__delta_datetime)) self.logger.debug( "warn_datetime: {}".format( datetime.timedelta( minutes=self.args.warning))) self.logger.debug( "crit_datetime: {}".format( datetime.timedelta( minutes=self.args.critical))) if self.__delta_datetime > datetime.timedelta( minutes=self.args.critical): self.crit_file.append(self.filename) elif self.__delta_datetime > datetime.timedelta(minutes=self.args.warning): self.warn_file.append(self.filename) else: self.ok_file.append(self.filename) # Compare the vlaue. if self.crit_file: status = self.critical elif self.warn_file: status = self.warning else: status = self.ok # Output self.shortoutput = "Found {0} files out of date.".format( len(self.crit_file)) if self.crit_file: self.longoutput.append("===== Critical File out of date ====") [self.longoutput.append(filename) for filename in self.crit_file if self.crit_file] if self.warn_file: self.longoutput.append("===== Warning File out of date ====") [self.longoutput.append(filename) for filename in self.warn_file if self.warn_file] if self.ok_file: self.longoutput.append("===== OK File out of date ====") [self.longoutput.append(filename) for filename in self.ok_file if self.ok_file] self.perfdata.append("{path}={result};{warn};{crit};0;".format( crit=self.args.critical, warn=self.args.warning, result=len(self.crit_file), path=self.args.drive + self.args.path)) # Return status with message to Nagios. status(self.output(long_output_limit=None)) self.logger.debug("Return status and exit to Nagios.")
[ "Get", "the", "number", "of", "file", "in", "the", "folder", "." ]
crazy-canux/arguspy
python
https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/scripts/check_wmi_sh.py#L240-L328
[ "def", "fileage_handle", "(", "self", ")", ":", "self", ".", "file_list", "=", "[", "]", "self", ".", "ok_file", "=", "[", "]", "self", ".", "warn_file", "=", "[", "]", "self", ".", "crit_file", "=", "[", "]", "status", "=", "self", ".", "ok", "if", "self", ".", "args", ".", "recursion", ":", "self", ".", "__file_list", "=", "self", ".", "__get_folder", "(", "self", ".", "args", ".", "path", ")", "else", ":", "self", ".", "__file_list", "=", "self", ".", "__get_file", "(", "self", ".", "args", ".", "path", ")", "self", ".", "logger", ".", "debug", "(", "\"file_list: {}\"", ".", "format", "(", "self", ".", "__file_list", ")", ")", "# [{'LastModified': '20160824142017.737101+480', 'Name': 'd:\\\\test\\\\1.txt'},", "# {'LastModified': '20160824142021.392101+480', 'Name': 'd:\\\\test\\\\2.txt'},", "# {'LastModified': '20160824142106.460101+480', 'Name': 'd:\\\\test\\\\test1\\\\21.txt'}]", "for", "file_dict", "in", "self", ".", "__file_list", ":", "self", ".", "filename", "=", "file_dict", ".", "get", "(", "'Name'", ")", "if", "self", ".", "filename", "and", "self", ".", "filename", "!=", "'Name'", ":", "self", ".", "logger", ".", "debug", "(", "\"===== start to compare {} =====\"", ".", "format", "(", "self", ".", "filename", ")", ")", "self", ".", "file_datetime_string", "=", "file_dict", ".", "get", "(", "'LastModified'", ")", ".", "split", "(", "'.'", ")", "[", "0", "]", "self", ".", "file_datetime", "=", "datetime", ".", "datetime", ".", "strptime", "(", "self", ".", "file_datetime_string", ",", "'%Y%m%d%H%M%S'", ")", "self", ".", "logger", ".", "debug", "(", "\"file_datetime: {}\"", ".", "format", "(", "self", ".", "file_datetime", ")", ")", "self", ".", "current_datetime", "=", "self", ".", "__get_current_datetime", "(", ")", "self", ".", "logger", ".", "debug", "(", "\"current_datetime: {}\"", ".", "format", "(", "self", ".", "current_datetime", ")", ")", "self", ".", "__delta_datetime", "=", "self", ".", "current_datetime", "-", "self", ".", "file_datetime", "self", ".", "logger", ".", "debug", "(", "\"delta_datetime: {}\"", ".", "format", "(", "self", ".", "__delta_datetime", ")", ")", "self", ".", "logger", ".", "debug", "(", "\"warn_datetime: {}\"", ".", "format", "(", "datetime", ".", "timedelta", "(", "minutes", "=", "self", ".", "args", ".", "warning", ")", ")", ")", "self", ".", "logger", ".", "debug", "(", "\"crit_datetime: {}\"", ".", "format", "(", "datetime", ".", "timedelta", "(", "minutes", "=", "self", ".", "args", ".", "critical", ")", ")", ")", "if", "self", ".", "__delta_datetime", ">", "datetime", ".", "timedelta", "(", "minutes", "=", "self", ".", "args", ".", "critical", ")", ":", "self", ".", "crit_file", ".", "append", "(", "self", ".", "filename", ")", "elif", "self", ".", "__delta_datetime", ">", "datetime", ".", "timedelta", "(", "minutes", "=", "self", ".", "args", ".", "warning", ")", ":", "self", ".", "warn_file", ".", "append", "(", "self", ".", "filename", ")", "else", ":", "self", ".", "ok_file", ".", "append", "(", "self", ".", "filename", ")", "# Compare the vlaue.", "if", "self", ".", "crit_file", ":", "status", "=", "self", ".", "critical", "elif", "self", ".", "warn_file", ":", "status", "=", "self", ".", "warning", "else", ":", "status", "=", "self", ".", "ok", "# Output", "self", ".", "shortoutput", "=", "\"Found {0} files out of date.\"", ".", "format", "(", "len", "(", "self", ".", "crit_file", ")", ")", "if", "self", ".", "crit_file", ":", "self", ".", "longoutput", ".", "append", "(", "\"===== Critical File out of date ====\"", ")", "[", "self", ".", "longoutput", ".", "append", "(", "filename", ")", "for", "filename", "in", "self", ".", "crit_file", "if", "self", ".", "crit_file", "]", "if", "self", ".", "warn_file", ":", "self", ".", "longoutput", ".", "append", "(", "\"===== Warning File out of date ====\"", ")", "[", "self", ".", "longoutput", ".", "append", "(", "filename", ")", "for", "filename", "in", "self", ".", "warn_file", "if", "self", ".", "warn_file", "]", "if", "self", ".", "ok_file", ":", "self", ".", "longoutput", ".", "append", "(", "\"===== OK File out of date ====\"", ")", "[", "self", ".", "longoutput", ".", "append", "(", "filename", ")", "for", "filename", "in", "self", ".", "ok_file", "if", "self", ".", "ok_file", "]", "self", ".", "perfdata", ".", "append", "(", "\"{path}={result};{warn};{crit};0;\"", ".", "format", "(", "crit", "=", "self", ".", "args", ".", "critical", ",", "warn", "=", "self", ".", "args", ".", "warning", ",", "result", "=", "len", "(", "self", ".", "crit_file", ")", ",", "path", "=", "self", ".", "args", ".", "drive", "+", "self", ".", "args", ".", "path", ")", ")", "# Return status with message to Nagios.", "status", "(", "self", ".", "output", "(", "long_output_limit", "=", "None", ")", ")", "self", ".", "logger", ".", "debug", "(", "\"Return status and exit to Nagios.\"", ")" ]
e9486b5df61978a990d56bf43de35f3a4cdefcc3
valid
BranchThread.run
run your main spider here as for branch spider result data, you can return everything or do whatever with it in your own code :return: None
threads_creator/threads/branch_thread.py
def run(self): """run your main spider here as for branch spider result data, you can return everything or do whatever with it in your own code :return: None """ config = config_creator() debug = config.debug branch_thread_sleep = config.branch_thread_sleep while 1: url = self.branch_queue.get() if debug: print('branch thread-{} start'.format(url)) branch_spider = self.branch_spider(url) sleep(random.randrange(*branch_thread_sleep)) branch_spider.request_page() if debug: print('branch thread-{} end'.format(url)) self.branch_queue.task_done()
def run(self): """run your main spider here as for branch spider result data, you can return everything or do whatever with it in your own code :return: None """ config = config_creator() debug = config.debug branch_thread_sleep = config.branch_thread_sleep while 1: url = self.branch_queue.get() if debug: print('branch thread-{} start'.format(url)) branch_spider = self.branch_spider(url) sleep(random.randrange(*branch_thread_sleep)) branch_spider.request_page() if debug: print('branch thread-{} end'.format(url)) self.branch_queue.task_done()
[ "run", "your", "main", "spider", "here", "as", "for", "branch", "spider", "result", "data", "you", "can", "return", "everything", "or", "do", "whatever", "with", "it", "in", "your", "own", "code" ]
ecmadao/threads-creator
python
https://github.com/ecmadao/threads-creator/blob/f081091425d4382e5e9776c395c20e1af2332657/threads_creator/threads/branch_thread.py#L20-L39
[ "def", "run", "(", "self", ")", ":", "config", "=", "config_creator", "(", ")", "debug", "=", "config", ".", "debug", "branch_thread_sleep", "=", "config", ".", "branch_thread_sleep", "while", "1", ":", "url", "=", "self", ".", "branch_queue", ".", "get", "(", ")", "if", "debug", ":", "print", "(", "'branch thread-{} start'", ".", "format", "(", "url", ")", ")", "branch_spider", "=", "self", ".", "branch_spider", "(", "url", ")", "sleep", "(", "random", ".", "randrange", "(", "*", "branch_thread_sleep", ")", ")", "branch_spider", ".", "request_page", "(", ")", "if", "debug", ":", "print", "(", "'branch thread-{} end'", ".", "format", "(", "url", ")", ")", "self", ".", "branch_queue", ".", "task_done", "(", ")" ]
f081091425d4382e5e9776c395c20e1af2332657
valid
get_version
Read version info from a file without importing it
setup.py
def get_version(relpath): """Read version info from a file without importing it""" from os.path import dirname, join if '__file__' not in globals(): # Allow to use function interactively root = '.' else: root = dirname(__file__) # The code below reads text file with unknown encoding in # in Python2/3 compatible way. Reading this text file # without specifying encoding will fail in Python 3 on some # systems (see http://goo.gl/5XmOH). Specifying encoding as # open() parameter is incompatible with Python 2 # cp437 is the encoding without missing points, safe against: # UnicodeDecodeError: 'charmap' codec can't decode byte... for line in open(join(root, relpath), 'rb'): line = line.decode('cp437') if '__version__' in line: if '"' in line: # __version__ = "0.9" return line.split('"')[1] elif "'" in line: return line.split("'")[1]
def get_version(relpath): """Read version info from a file without importing it""" from os.path import dirname, join if '__file__' not in globals(): # Allow to use function interactively root = '.' else: root = dirname(__file__) # The code below reads text file with unknown encoding in # in Python2/3 compatible way. Reading this text file # without specifying encoding will fail in Python 3 on some # systems (see http://goo.gl/5XmOH). Specifying encoding as # open() parameter is incompatible with Python 2 # cp437 is the encoding without missing points, safe against: # UnicodeDecodeError: 'charmap' codec can't decode byte... for line in open(join(root, relpath), 'rb'): line = line.decode('cp437') if '__version__' in line: if '"' in line: # __version__ = "0.9" return line.split('"')[1] elif "'" in line: return line.split("'")[1]
[ "Read", "version", "info", "from", "a", "file", "without", "importing", "it" ]
nyaruka/python-librato-bg
python
https://github.com/nyaruka/python-librato-bg/blob/e541092838694de31d256becea8391a9cfe086c7/setup.py#L32-L58
[ "def", "get_version", "(", "relpath", ")", ":", "from", "os", ".", "path", "import", "dirname", ",", "join", "if", "'__file__'", "not", "in", "globals", "(", ")", ":", "# Allow to use function interactively", "root", "=", "'.'", "else", ":", "root", "=", "dirname", "(", "__file__", ")", "# The code below reads text file with unknown encoding in", "# in Python2/3 compatible way. Reading this text file", "# without specifying encoding will fail in Python 3 on some", "# systems (see http://goo.gl/5XmOH). Specifying encoding as", "# open() parameter is incompatible with Python 2", "# cp437 is the encoding without missing points, safe against:", "# UnicodeDecodeError: 'charmap' codec can't decode byte...", "for", "line", "in", "open", "(", "join", "(", "root", ",", "relpath", ")", ",", "'rb'", ")", ":", "line", "=", "line", ".", "decode", "(", "'cp437'", ")", "if", "'__version__'", "in", "line", ":", "if", "'\"'", "in", "line", ":", "# __version__ = \"0.9\"", "return", "line", ".", "split", "(", "'\"'", ")", "[", "1", "]", "elif", "\"'\"", "in", "line", ":", "return", "line", ".", "split", "(", "\"'\"", ")", "[", "1", "]" ]
e541092838694de31d256becea8391a9cfe086c7
valid
MakeDescriptor
Make a protobuf Descriptor given a DescriptorProto protobuf. Handles nested descriptors. Note that this is limited to the scope of defining a message inside of another message. Composite fields can currently only be resolved if the message is defined in the same scope as the field. Args: desc_proto: The descriptor_pb2.DescriptorProto protobuf message. package: Optional package name for the new message Descriptor (string). build_file_if_cpp: Update the C++ descriptor pool if api matches. Set to False on recursion, so no duplicates are created. syntax: The syntax/semantics that should be used. Set to "proto3" to get proto3 field presence semantics. Returns: A Descriptor for protobuf messages.
typy/google/protobuf/descriptor.py
def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, syntax=None): """Make a protobuf Descriptor given a DescriptorProto protobuf. Handles nested descriptors. Note that this is limited to the scope of defining a message inside of another message. Composite fields can currently only be resolved if the message is defined in the same scope as the field. Args: desc_proto: The descriptor_pb2.DescriptorProto protobuf message. package: Optional package name for the new message Descriptor (string). build_file_if_cpp: Update the C++ descriptor pool if api matches. Set to False on recursion, so no duplicates are created. syntax: The syntax/semantics that should be used. Set to "proto3" to get proto3 field presence semantics. Returns: A Descriptor for protobuf messages. """ if api_implementation.Type() == 'cpp' and build_file_if_cpp: # The C++ implementation requires all descriptors to be backed by the same # definition in the C++ descriptor pool. To do this, we build a # FileDescriptorProto with the same definition as this descriptor and build # it into the pool. from typy.google.protobuf import descriptor_pb2 file_descriptor_proto = descriptor_pb2.FileDescriptorProto() file_descriptor_proto.message_type.add().MergeFrom(desc_proto) # Generate a random name for this proto file to prevent conflicts with any # imported ones. We need to specify a file name so the descriptor pool # accepts our FileDescriptorProto, but it is not important what that file # name is actually set to. proto_name = str(uuid.uuid4()) if package: file_descriptor_proto.name = os.path.join(package.replace('.', '/'), proto_name + '.proto') file_descriptor_proto.package = package else: file_descriptor_proto.name = proto_name + '.proto' _message.default_pool.Add(file_descriptor_proto) result = _message.default_pool.FindFileByName(file_descriptor_proto.name) if _USE_C_DESCRIPTORS: return result.message_types_by_name[desc_proto.name] full_message_name = [desc_proto.name] if package: full_message_name.insert(0, package) # Create Descriptors for enum types enum_types = {} for enum_proto in desc_proto.enum_type: full_name = '.'.join(full_message_name + [enum_proto.name]) enum_desc = EnumDescriptor( enum_proto.name, full_name, None, [ EnumValueDescriptor(enum_val.name, ii, enum_val.number) for ii, enum_val in enumerate(enum_proto.value)]) enum_types[full_name] = enum_desc # Create Descriptors for nested types nested_types = {} for nested_proto in desc_proto.nested_type: full_name = '.'.join(full_message_name + [nested_proto.name]) # Nested types are just those defined inside of the message, not all types # used by fields in the message, so no loops are possible here. nested_desc = MakeDescriptor(nested_proto, package='.'.join(full_message_name), build_file_if_cpp=False, syntax=syntax) nested_types[full_name] = nested_desc fields = [] for field_proto in desc_proto.field: full_name = '.'.join(full_message_name + [field_proto.name]) enum_desc = None nested_desc = None if field_proto.HasField('type_name'): type_name = field_proto.type_name full_type_name = '.'.join(full_message_name + [type_name[type_name.rfind('.')+1:]]) if full_type_name in nested_types: nested_desc = nested_types[full_type_name] elif full_type_name in enum_types: enum_desc = enum_types[full_type_name] # Else type_name references a non-local type, which isn't implemented field = FieldDescriptor( field_proto.name, full_name, field_proto.number - 1, field_proto.number, field_proto.type, FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), field_proto.label, None, nested_desc, enum_desc, None, False, None, options=field_proto.options, has_default_value=False) fields.append(field) desc_name = '.'.join(full_message_name) return Descriptor(desc_proto.name, desc_name, None, None, fields, list(nested_types.values()), list(enum_types.values()), [], options=desc_proto.options)
def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, syntax=None): """Make a protobuf Descriptor given a DescriptorProto protobuf. Handles nested descriptors. Note that this is limited to the scope of defining a message inside of another message. Composite fields can currently only be resolved if the message is defined in the same scope as the field. Args: desc_proto: The descriptor_pb2.DescriptorProto protobuf message. package: Optional package name for the new message Descriptor (string). build_file_if_cpp: Update the C++ descriptor pool if api matches. Set to False on recursion, so no duplicates are created. syntax: The syntax/semantics that should be used. Set to "proto3" to get proto3 field presence semantics. Returns: A Descriptor for protobuf messages. """ if api_implementation.Type() == 'cpp' and build_file_if_cpp: # The C++ implementation requires all descriptors to be backed by the same # definition in the C++ descriptor pool. To do this, we build a # FileDescriptorProto with the same definition as this descriptor and build # it into the pool. from typy.google.protobuf import descriptor_pb2 file_descriptor_proto = descriptor_pb2.FileDescriptorProto() file_descriptor_proto.message_type.add().MergeFrom(desc_proto) # Generate a random name for this proto file to prevent conflicts with any # imported ones. We need to specify a file name so the descriptor pool # accepts our FileDescriptorProto, but it is not important what that file # name is actually set to. proto_name = str(uuid.uuid4()) if package: file_descriptor_proto.name = os.path.join(package.replace('.', '/'), proto_name + '.proto') file_descriptor_proto.package = package else: file_descriptor_proto.name = proto_name + '.proto' _message.default_pool.Add(file_descriptor_proto) result = _message.default_pool.FindFileByName(file_descriptor_proto.name) if _USE_C_DESCRIPTORS: return result.message_types_by_name[desc_proto.name] full_message_name = [desc_proto.name] if package: full_message_name.insert(0, package) # Create Descriptors for enum types enum_types = {} for enum_proto in desc_proto.enum_type: full_name = '.'.join(full_message_name + [enum_proto.name]) enum_desc = EnumDescriptor( enum_proto.name, full_name, None, [ EnumValueDescriptor(enum_val.name, ii, enum_val.number) for ii, enum_val in enumerate(enum_proto.value)]) enum_types[full_name] = enum_desc # Create Descriptors for nested types nested_types = {} for nested_proto in desc_proto.nested_type: full_name = '.'.join(full_message_name + [nested_proto.name]) # Nested types are just those defined inside of the message, not all types # used by fields in the message, so no loops are possible here. nested_desc = MakeDescriptor(nested_proto, package='.'.join(full_message_name), build_file_if_cpp=False, syntax=syntax) nested_types[full_name] = nested_desc fields = [] for field_proto in desc_proto.field: full_name = '.'.join(full_message_name + [field_proto.name]) enum_desc = None nested_desc = None if field_proto.HasField('type_name'): type_name = field_proto.type_name full_type_name = '.'.join(full_message_name + [type_name[type_name.rfind('.')+1:]]) if full_type_name in nested_types: nested_desc = nested_types[full_type_name] elif full_type_name in enum_types: enum_desc = enum_types[full_type_name] # Else type_name references a non-local type, which isn't implemented field = FieldDescriptor( field_proto.name, full_name, field_proto.number - 1, field_proto.number, field_proto.type, FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), field_proto.label, None, nested_desc, enum_desc, None, False, None, options=field_proto.options, has_default_value=False) fields.append(field) desc_name = '.'.join(full_message_name) return Descriptor(desc_proto.name, desc_name, None, None, fields, list(nested_types.values()), list(enum_types.values()), [], options=desc_proto.options)
[ "Make", "a", "protobuf", "Descriptor", "given", "a", "DescriptorProto", "protobuf", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor.py#L875-L971
[ "def", "MakeDescriptor", "(", "desc_proto", ",", "package", "=", "''", ",", "build_file_if_cpp", "=", "True", ",", "syntax", "=", "None", ")", ":", "if", "api_implementation", ".", "Type", "(", ")", "==", "'cpp'", "and", "build_file_if_cpp", ":", "# The C++ implementation requires all descriptors to be backed by the same", "# definition in the C++ descriptor pool. To do this, we build a", "# FileDescriptorProto with the same definition as this descriptor and build", "# it into the pool.", "from", "typy", ".", "google", ".", "protobuf", "import", "descriptor_pb2", "file_descriptor_proto", "=", "descriptor_pb2", ".", "FileDescriptorProto", "(", ")", "file_descriptor_proto", ".", "message_type", ".", "add", "(", ")", ".", "MergeFrom", "(", "desc_proto", ")", "# Generate a random name for this proto file to prevent conflicts with any", "# imported ones. We need to specify a file name so the descriptor pool", "# accepts our FileDescriptorProto, but it is not important what that file", "# name is actually set to.", "proto_name", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "if", "package", ":", "file_descriptor_proto", ".", "name", "=", "os", ".", "path", ".", "join", "(", "package", ".", "replace", "(", "'.'", ",", "'/'", ")", ",", "proto_name", "+", "'.proto'", ")", "file_descriptor_proto", ".", "package", "=", "package", "else", ":", "file_descriptor_proto", ".", "name", "=", "proto_name", "+", "'.proto'", "_message", ".", "default_pool", ".", "Add", "(", "file_descriptor_proto", ")", "result", "=", "_message", ".", "default_pool", ".", "FindFileByName", "(", "file_descriptor_proto", ".", "name", ")", "if", "_USE_C_DESCRIPTORS", ":", "return", "result", ".", "message_types_by_name", "[", "desc_proto", ".", "name", "]", "full_message_name", "=", "[", "desc_proto", ".", "name", "]", "if", "package", ":", "full_message_name", ".", "insert", "(", "0", ",", "package", ")", "# Create Descriptors for enum types", "enum_types", "=", "{", "}", "for", "enum_proto", "in", "desc_proto", ".", "enum_type", ":", "full_name", "=", "'.'", ".", "join", "(", "full_message_name", "+", "[", "enum_proto", ".", "name", "]", ")", "enum_desc", "=", "EnumDescriptor", "(", "enum_proto", ".", "name", ",", "full_name", ",", "None", ",", "[", "EnumValueDescriptor", "(", "enum_val", ".", "name", ",", "ii", ",", "enum_val", ".", "number", ")", "for", "ii", ",", "enum_val", "in", "enumerate", "(", "enum_proto", ".", "value", ")", "]", ")", "enum_types", "[", "full_name", "]", "=", "enum_desc", "# Create Descriptors for nested types", "nested_types", "=", "{", "}", "for", "nested_proto", "in", "desc_proto", ".", "nested_type", ":", "full_name", "=", "'.'", ".", "join", "(", "full_message_name", "+", "[", "nested_proto", ".", "name", "]", ")", "# Nested types are just those defined inside of the message, not all types", "# used by fields in the message, so no loops are possible here.", "nested_desc", "=", "MakeDescriptor", "(", "nested_proto", ",", "package", "=", "'.'", ".", "join", "(", "full_message_name", ")", ",", "build_file_if_cpp", "=", "False", ",", "syntax", "=", "syntax", ")", "nested_types", "[", "full_name", "]", "=", "nested_desc", "fields", "=", "[", "]", "for", "field_proto", "in", "desc_proto", ".", "field", ":", "full_name", "=", "'.'", ".", "join", "(", "full_message_name", "+", "[", "field_proto", ".", "name", "]", ")", "enum_desc", "=", "None", "nested_desc", "=", "None", "if", "field_proto", ".", "HasField", "(", "'type_name'", ")", ":", "type_name", "=", "field_proto", ".", "type_name", "full_type_name", "=", "'.'", ".", "join", "(", "full_message_name", "+", "[", "type_name", "[", "type_name", ".", "rfind", "(", "'.'", ")", "+", "1", ":", "]", "]", ")", "if", "full_type_name", "in", "nested_types", ":", "nested_desc", "=", "nested_types", "[", "full_type_name", "]", "elif", "full_type_name", "in", "enum_types", ":", "enum_desc", "=", "enum_types", "[", "full_type_name", "]", "# Else type_name references a non-local type, which isn't implemented", "field", "=", "FieldDescriptor", "(", "field_proto", ".", "name", ",", "full_name", ",", "field_proto", ".", "number", "-", "1", ",", "field_proto", ".", "number", ",", "field_proto", ".", "type", ",", "FieldDescriptor", ".", "ProtoTypeToCppProtoType", "(", "field_proto", ".", "type", ")", ",", "field_proto", ".", "label", ",", "None", ",", "nested_desc", ",", "enum_desc", ",", "None", ",", "False", ",", "None", ",", "options", "=", "field_proto", ".", "options", ",", "has_default_value", "=", "False", ")", "fields", ".", "append", "(", "field", ")", "desc_name", "=", "'.'", ".", "join", "(", "full_message_name", ")", "return", "Descriptor", "(", "desc_proto", ".", "name", ",", "desc_name", ",", "None", ",", "None", ",", "fields", ",", "list", "(", "nested_types", ".", "values", "(", ")", ")", ",", "list", "(", "enum_types", ".", "values", "(", ")", ")", ",", "[", "]", ",", "options", "=", "desc_proto", ".", "options", ")" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
_NestedDescriptorBase.GetTopLevelContainingType
Returns the root if this is a nested type, or itself if its the root.
typy/google/protobuf/descriptor.py
def GetTopLevelContainingType(self): """Returns the root if this is a nested type, or itself if its the root.""" desc = self while desc.containing_type is not None: desc = desc.containing_type return desc
def GetTopLevelContainingType(self): """Returns the root if this is a nested type, or itself if its the root.""" desc = self while desc.containing_type is not None: desc = desc.containing_type return desc
[ "Returns", "the", "root", "if", "this", "is", "a", "nested", "type", "or", "itself", "if", "its", "the", "root", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor.py#L174-L179
[ "def", "GetTopLevelContainingType", "(", "self", ")", ":", "desc", "=", "self", "while", "desc", ".", "containing_type", "is", "not", "None", ":", "desc", "=", "desc", ".", "containing_type", "return", "desc" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
ServiceDescriptor.FindMethodByName
Searches for the specified method, and returns its descriptor.
typy/google/protobuf/descriptor.py
def FindMethodByName(self, name): """Searches for the specified method, and returns its descriptor.""" for method in self.methods: if name == method.name: return method return None
def FindMethodByName(self, name): """Searches for the specified method, and returns its descriptor.""" for method in self.methods: if name == method.name: return method return None
[ "Searches", "for", "the", "specified", "method", "and", "returns", "its", "descriptor", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/descriptor.py#L725-L730
[ "def", "FindMethodByName", "(", "self", ",", "name", ")", ":", "for", "method", "in", "self", ".", "methods", ":", "if", "name", "==", "method", ".", "name", ":", "return", "method", "return", "None" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
main
Register your own mode and handle method here.
scripts/check_winrm.py
def main(): """Register your own mode and handle method here.""" plugin = Register() if plugin.args.option == 'sqlserverlocks': plugin.sqlserverlocks_handle() else: plugin.unknown("Unknown actions.")
def main(): """Register your own mode and handle method here.""" plugin = Register() if plugin.args.option == 'sqlserverlocks': plugin.sqlserverlocks_handle() else: plugin.unknown("Unknown actions.")
[ "Register", "your", "own", "mode", "and", "handle", "method", "here", "." ]
crazy-canux/arguspy
python
https://github.com/crazy-canux/arguspy/blob/e9486b5df61978a990d56bf43de35f3a4cdefcc3/scripts/check_winrm.py#L115-L121
[ "def", "main", "(", ")", ":", "plugin", "=", "Register", "(", ")", "if", "plugin", ".", "args", ".", "option", "==", "'sqlserverlocks'", ":", "plugin", ".", "sqlserverlocks_handle", "(", ")", "else", ":", "plugin", ".", "unknown", "(", "\"Unknown actions.\"", ")" ]
e9486b5df61978a990d56bf43de35f3a4cdefcc3
valid
MessageToJson
Converts protobuf message to JSON format. Args: message: The protocol buffers message instance to serialize. including_default_value_fields: If True, singular primitive fields, repeated fields, and map fields will always be serialized. If False, only serialize non-empty fields. Singular message fields and oneof fields are not affected by this option. Returns: A string containing the JSON formatted protocol buffer message.
typy/google/protobuf/json_format.py
def MessageToJson(message, including_default_value_fields=False): """Converts protobuf message to JSON format. Args: message: The protocol buffers message instance to serialize. including_default_value_fields: If True, singular primitive fields, repeated fields, and map fields will always be serialized. If False, only serialize non-empty fields. Singular message fields and oneof fields are not affected by this option. Returns: A string containing the JSON formatted protocol buffer message. """ js = _MessageToJsonObject(message, including_default_value_fields) return json.dumps(js, indent=2)
def MessageToJson(message, including_default_value_fields=False): """Converts protobuf message to JSON format. Args: message: The protocol buffers message instance to serialize. including_default_value_fields: If True, singular primitive fields, repeated fields, and map fields will always be serialized. If False, only serialize non-empty fields. Singular message fields and oneof fields are not affected by this option. Returns: A string containing the JSON formatted protocol buffer message. """ js = _MessageToJsonObject(message, including_default_value_fields) return json.dumps(js, indent=2)
[ "Converts", "protobuf", "message", "to", "JSON", "format", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/json_format.py#L80-L94
[ "def", "MessageToJson", "(", "message", ",", "including_default_value_fields", "=", "False", ")", ":", "js", "=", "_MessageToJsonObject", "(", "message", ",", "including_default_value_fields", ")", "return", "json", ".", "dumps", "(", "js", ",", "indent", "=", "2", ")" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
_MessageToJsonObject
Converts message to an object according to Proto3 JSON Specification.
typy/google/protobuf/json_format.py
def _MessageToJsonObject(message, including_default_value_fields): """Converts message to an object according to Proto3 JSON Specification.""" message_descriptor = message.DESCRIPTOR full_name = message_descriptor.full_name if _IsWrapperMessage(message_descriptor): return _WrapperMessageToJsonObject(message) if full_name in _WKTJSONMETHODS: return _WKTJSONMETHODS[full_name][0]( message, including_default_value_fields) js = {} return _RegularMessageToJsonObject( message, js, including_default_value_fields)
def _MessageToJsonObject(message, including_default_value_fields): """Converts message to an object according to Proto3 JSON Specification.""" message_descriptor = message.DESCRIPTOR full_name = message_descriptor.full_name if _IsWrapperMessage(message_descriptor): return _WrapperMessageToJsonObject(message) if full_name in _WKTJSONMETHODS: return _WKTJSONMETHODS[full_name][0]( message, including_default_value_fields) js = {} return _RegularMessageToJsonObject( message, js, including_default_value_fields)
[ "Converts", "message", "to", "an", "object", "according", "to", "Proto3", "JSON", "Specification", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/json_format.py#L97-L108
[ "def", "_MessageToJsonObject", "(", "message", ",", "including_default_value_fields", ")", ":", "message_descriptor", "=", "message", ".", "DESCRIPTOR", "full_name", "=", "message_descriptor", ".", "full_name", "if", "_IsWrapperMessage", "(", "message_descriptor", ")", ":", "return", "_WrapperMessageToJsonObject", "(", "message", ")", "if", "full_name", "in", "_WKTJSONMETHODS", ":", "return", "_WKTJSONMETHODS", "[", "full_name", "]", "[", "0", "]", "(", "message", ",", "including_default_value_fields", ")", "js", "=", "{", "}", "return", "_RegularMessageToJsonObject", "(", "message", ",", "js", ",", "including_default_value_fields", ")" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7