partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
test
Backend.metadata
Add metadata to an item. It adds metadata to a given item such as how and when it was fetched. The contents from the original item will be stored under the 'data' keyword. :param item: an item fetched by a backend :param filter_classified: sets if classified fields were filtered
perceval/backend.py
def metadata(self, item, filter_classified=False): """Add metadata to an item. It adds metadata to a given item such as how and when it was fetched. The contents from the original item will be stored under the 'data' keyword. :param item: an item fetched by a backend :param filter_classified: sets if classified fields were filtered """ item = { 'backend_name': self.__class__.__name__, 'backend_version': self.version, 'perceval_version': __version__, 'timestamp': datetime_utcnow().timestamp(), 'origin': self.origin, 'uuid': uuid(self.origin, self.metadata_id(item)), 'updated_on': self.metadata_updated_on(item), 'classified_fields_filtered': self.classified_fields if filter_classified else None, 'category': self.metadata_category(item), 'tag': self.tag, 'data': item, } return item
def metadata(self, item, filter_classified=False): """Add metadata to an item. It adds metadata to a given item such as how and when it was fetched. The contents from the original item will be stored under the 'data' keyword. :param item: an item fetched by a backend :param filter_classified: sets if classified fields were filtered """ item = { 'backend_name': self.__class__.__name__, 'backend_version': self.version, 'perceval_version': __version__, 'timestamp': datetime_utcnow().timestamp(), 'origin': self.origin, 'uuid': uuid(self.origin, self.metadata_id(item)), 'updated_on': self.metadata_updated_on(item), 'classified_fields_filtered': self.classified_fields if filter_classified else None, 'category': self.metadata_category(item), 'tag': self.tag, 'data': item, } return item
[ "Add", "metadata", "to", "an", "item", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backend.py#L211-L235
[ "def", "metadata", "(", "self", ",", "item", ",", "filter_classified", "=", "False", ")", ":", "item", "=", "{", "'backend_name'", ":", "self", ".", "__class__", ".", "__name__", ",", "'backend_version'", ":", "self", ".", "version", ",", "'perceval_version'", ":", "__version__", ",", "'timestamp'", ":", "datetime_utcnow", "(", ")", ".", "timestamp", "(", ")", ",", "'origin'", ":", "self", ".", "origin", ",", "'uuid'", ":", "uuid", "(", "self", ".", "origin", ",", "self", ".", "metadata_id", "(", "item", ")", ")", ",", "'updated_on'", ":", "self", ".", "metadata_updated_on", "(", "item", ")", ",", "'classified_fields_filtered'", ":", "self", ".", "classified_fields", "if", "filter_classified", "else", "None", ",", "'category'", ":", "self", ".", "metadata_category", "(", "item", ")", ",", "'tag'", ":", "self", ".", "tag", ",", "'data'", ":", "item", ",", "}", "return", "item" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
BackendCommandArgumentParser.parse
Parse a list of arguments. Parse argument strings needed to run a backend command. The result will be a `argparse.Namespace` object populated with the values obtained after the validation of the parameters. :param args: argument strings :result: an object with the parsed values
perceval/backend.py
def parse(self, *args): """Parse a list of arguments. Parse argument strings needed to run a backend command. The result will be a `argparse.Namespace` object populated with the values obtained after the validation of the parameters. :param args: argument strings :result: an object with the parsed values """ parsed_args = self.parser.parse_args(args) # Category was not set, remove it if parsed_args.category is None: delattr(parsed_args, 'category') if self._from_date: parsed_args.from_date = str_to_datetime(parsed_args.from_date) if self._to_date and parsed_args.to_date: parsed_args.to_date = str_to_datetime(parsed_args.to_date) if self._archive and parsed_args.archived_since: parsed_args.archived_since = str_to_datetime(parsed_args.archived_since) if self._archive and parsed_args.fetch_archive and parsed_args.no_archive: raise AttributeError("fetch-archive and no-archive arguments are not compatible") if self._archive and parsed_args.fetch_archive and not parsed_args.category: raise AttributeError("fetch-archive needs a category to work with") # Set aliases for alias, arg in self.aliases.items(): if (alias not in parsed_args) and (arg in parsed_args): value = getattr(parsed_args, arg, None) setattr(parsed_args, alias, value) return parsed_args
def parse(self, *args): """Parse a list of arguments. Parse argument strings needed to run a backend command. The result will be a `argparse.Namespace` object populated with the values obtained after the validation of the parameters. :param args: argument strings :result: an object with the parsed values """ parsed_args = self.parser.parse_args(args) # Category was not set, remove it if parsed_args.category is None: delattr(parsed_args, 'category') if self._from_date: parsed_args.from_date = str_to_datetime(parsed_args.from_date) if self._to_date and parsed_args.to_date: parsed_args.to_date = str_to_datetime(parsed_args.to_date) if self._archive and parsed_args.archived_since: parsed_args.archived_since = str_to_datetime(parsed_args.archived_since) if self._archive and parsed_args.fetch_archive and parsed_args.no_archive: raise AttributeError("fetch-archive and no-archive arguments are not compatible") if self._archive and parsed_args.fetch_archive and not parsed_args.category: raise AttributeError("fetch-archive needs a category to work with") # Set aliases for alias, arg in self.aliases.items(): if (alias not in parsed_args) and (arg in parsed_args): value = getattr(parsed_args, arg, None) setattr(parsed_args, alias, value) return parsed_args
[ "Parse", "a", "list", "of", "arguments", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backend.py#L340-L375
[ "def", "parse", "(", "self", ",", "*", "args", ")", ":", "parsed_args", "=", "self", ".", "parser", ".", "parse_args", "(", "args", ")", "# Category was not set, remove it", "if", "parsed_args", ".", "category", "is", "None", ":", "delattr", "(", "parsed_args", ",", "'category'", ")", "if", "self", ".", "_from_date", ":", "parsed_args", ".", "from_date", "=", "str_to_datetime", "(", "parsed_args", ".", "from_date", ")", "if", "self", ".", "_to_date", "and", "parsed_args", ".", "to_date", ":", "parsed_args", ".", "to_date", "=", "str_to_datetime", "(", "parsed_args", ".", "to_date", ")", "if", "self", ".", "_archive", "and", "parsed_args", ".", "archived_since", ":", "parsed_args", ".", "archived_since", "=", "str_to_datetime", "(", "parsed_args", ".", "archived_since", ")", "if", "self", ".", "_archive", "and", "parsed_args", ".", "fetch_archive", "and", "parsed_args", ".", "no_archive", ":", "raise", "AttributeError", "(", "\"fetch-archive and no-archive arguments are not compatible\"", ")", "if", "self", ".", "_archive", "and", "parsed_args", ".", "fetch_archive", "and", "not", "parsed_args", ".", "category", ":", "raise", "AttributeError", "(", "\"fetch-archive needs a category to work with\"", ")", "# Set aliases", "for", "alias", ",", "arg", "in", "self", ".", "aliases", ".", "items", "(", ")", ":", "if", "(", "alias", "not", "in", "parsed_args", ")", "and", "(", "arg", "in", "parsed_args", ")", ":", "value", "=", "getattr", "(", "parsed_args", ",", "arg", ",", "None", ")", "setattr", "(", "parsed_args", ",", "alias", ",", "value", ")", "return", "parsed_args" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
BackendCommandArgumentParser._set_auth_arguments
Activate authentication arguments parsing
perceval/backend.py
def _set_auth_arguments(self, basic_auth=True, token_auth=False): """Activate authentication arguments parsing""" group = self.parser.add_argument_group('authentication arguments') if basic_auth: group.add_argument('-u', '--backend-user', dest='user', help="backend user") group.add_argument('-p', '--backend-password', dest='password', help="backend password") if token_auth: group.add_argument('-t', '--api-token', dest='api_token', help="backend authentication token / API key")
def _set_auth_arguments(self, basic_auth=True, token_auth=False): """Activate authentication arguments parsing""" group = self.parser.add_argument_group('authentication arguments') if basic_auth: group.add_argument('-u', '--backend-user', dest='user', help="backend user") group.add_argument('-p', '--backend-password', dest='password', help="backend password") if token_auth: group.add_argument('-t', '--api-token', dest='api_token', help="backend authentication token / API key")
[ "Activate", "authentication", "arguments", "parsing" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backend.py#L377-L389
[ "def", "_set_auth_arguments", "(", "self", ",", "basic_auth", "=", "True", ",", "token_auth", "=", "False", ")", ":", "group", "=", "self", ".", "parser", ".", "add_argument_group", "(", "'authentication arguments'", ")", "if", "basic_auth", ":", "group", ".", "add_argument", "(", "'-u'", ",", "'--backend-user'", ",", "dest", "=", "'user'", ",", "help", "=", "\"backend user\"", ")", "group", ".", "add_argument", "(", "'-p'", ",", "'--backend-password'", ",", "dest", "=", "'password'", ",", "help", "=", "\"backend password\"", ")", "if", "token_auth", ":", "group", ".", "add_argument", "(", "'-t'", ",", "'--api-token'", ",", "dest", "=", "'api_token'", ",", "help", "=", "\"backend authentication token / API key\"", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
BackendCommandArgumentParser._set_archive_arguments
Activate archive arguments parsing
perceval/backend.py
def _set_archive_arguments(self): """Activate archive arguments parsing""" group = self.parser.add_argument_group('archive arguments') group.add_argument('--archive-path', dest='archive_path', default=None, help="directory path to the archives") group.add_argument('--no-archive', dest='no_archive', action='store_true', help="do not archive data") group.add_argument('--fetch-archive', dest='fetch_archive', action='store_true', help="fetch data from the archives") group.add_argument('--archived-since', dest='archived_since', default='1970-01-01', help="retrieve items archived since the given date")
def _set_archive_arguments(self): """Activate archive arguments parsing""" group = self.parser.add_argument_group('archive arguments') group.add_argument('--archive-path', dest='archive_path', default=None, help="directory path to the archives") group.add_argument('--no-archive', dest='no_archive', action='store_true', help="do not archive data") group.add_argument('--fetch-archive', dest='fetch_archive', action='store_true', help="fetch data from the archives") group.add_argument('--archived-since', dest='archived_since', default='1970-01-01', help="retrieve items archived since the given date")
[ "Activate", "archive", "arguments", "parsing" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backend.py#L391-L402
[ "def", "_set_archive_arguments", "(", "self", ")", ":", "group", "=", "self", ".", "parser", ".", "add_argument_group", "(", "'archive arguments'", ")", "group", ".", "add_argument", "(", "'--archive-path'", ",", "dest", "=", "'archive_path'", ",", "default", "=", "None", ",", "help", "=", "\"directory path to the archives\"", ")", "group", ".", "add_argument", "(", "'--no-archive'", ",", "dest", "=", "'no_archive'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"do not archive data\"", ")", "group", ".", "add_argument", "(", "'--fetch-archive'", ",", "dest", "=", "'fetch_archive'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"fetch data from the archives\"", ")", "group", ".", "add_argument", "(", "'--archived-since'", ",", "dest", "=", "'archived_since'", ",", "default", "=", "'1970-01-01'", ",", "help", "=", "\"retrieve items archived since the given date\"", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
BackendCommandArgumentParser._set_output_arguments
Activate output arguments parsing
perceval/backend.py
def _set_output_arguments(self): """Activate output arguments parsing""" group = self.parser.add_argument_group('output arguments') group.add_argument('-o', '--output', type=argparse.FileType('w'), dest='outfile', default=sys.stdout, help="output file") group.add_argument('--json-line', dest='json_line', action='store_true', help="produce a JSON line for each output item")
def _set_output_arguments(self): """Activate output arguments parsing""" group = self.parser.add_argument_group('output arguments') group.add_argument('-o', '--output', type=argparse.FileType('w'), dest='outfile', default=sys.stdout, help="output file") group.add_argument('--json-line', dest='json_line', action='store_true', help="produce a JSON line for each output item")
[ "Activate", "output", "arguments", "parsing" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backend.py#L404-L412
[ "def", "_set_output_arguments", "(", "self", ")", ":", "group", "=", "self", ".", "parser", ".", "add_argument_group", "(", "'output arguments'", ")", "group", ".", "add_argument", "(", "'-o'", ",", "'--output'", ",", "type", "=", "argparse", ".", "FileType", "(", "'w'", ")", ",", "dest", "=", "'outfile'", ",", "default", "=", "sys", ".", "stdout", ",", "help", "=", "\"output file\"", ")", "group", ".", "add_argument", "(", "'--json-line'", ",", "dest", "=", "'json_line'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"produce a JSON line for each output item\"", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
BackendCommand.run
Fetch and write items. This method runs the backend to fetch the items from the given origin. Items are converted to JSON objects and written to the defined output. If `fetch-archive` parameter was given as an argument during the inizialization of the instance, the items will be retrieved using the archive manager.
perceval/backend.py
def run(self): """Fetch and write items. This method runs the backend to fetch the items from the given origin. Items are converted to JSON objects and written to the defined output. If `fetch-archive` parameter was given as an argument during the inizialization of the instance, the items will be retrieved using the archive manager. """ backend_args = vars(self.parsed_args) category = backend_args.pop('category', None) filter_classified = backend_args.pop('filter_classified', False) archived_since = backend_args.pop('archived_since', None) if self.archive_manager and self.parsed_args.fetch_archive: items = fetch_from_archive(self.BACKEND, backend_args, self.archive_manager, category, archived_since) else: items = fetch(self.BACKEND, backend_args, category, filter_classified=filter_classified, manager=self.archive_manager) try: for item in items: if self.json_line: obj = json.dumps(item, separators=(',', ':'), sort_keys=True) else: obj = json.dumps(item, indent=4, sort_keys=True) self.outfile.write(obj) self.outfile.write('\n') except IOError as e: raise RuntimeError(str(e)) except Exception as e: raise RuntimeError(str(e))
def run(self): """Fetch and write items. This method runs the backend to fetch the items from the given origin. Items are converted to JSON objects and written to the defined output. If `fetch-archive` parameter was given as an argument during the inizialization of the instance, the items will be retrieved using the archive manager. """ backend_args = vars(self.parsed_args) category = backend_args.pop('category', None) filter_classified = backend_args.pop('filter_classified', False) archived_since = backend_args.pop('archived_since', None) if self.archive_manager and self.parsed_args.fetch_archive: items = fetch_from_archive(self.BACKEND, backend_args, self.archive_manager, category, archived_since) else: items = fetch(self.BACKEND, backend_args, category, filter_classified=filter_classified, manager=self.archive_manager) try: for item in items: if self.json_line: obj = json.dumps(item, separators=(',', ':'), sort_keys=True) else: obj = json.dumps(item, indent=4, sort_keys=True) self.outfile.write(obj) self.outfile.write('\n') except IOError as e: raise RuntimeError(str(e)) except Exception as e: raise RuntimeError(str(e))
[ "Fetch", "and", "write", "items", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backend.py#L445-L482
[ "def", "run", "(", "self", ")", ":", "backend_args", "=", "vars", "(", "self", ".", "parsed_args", ")", "category", "=", "backend_args", ".", "pop", "(", "'category'", ",", "None", ")", "filter_classified", "=", "backend_args", ".", "pop", "(", "'filter_classified'", ",", "False", ")", "archived_since", "=", "backend_args", ".", "pop", "(", "'archived_since'", ",", "None", ")", "if", "self", ".", "archive_manager", "and", "self", ".", "parsed_args", ".", "fetch_archive", ":", "items", "=", "fetch_from_archive", "(", "self", ".", "BACKEND", ",", "backend_args", ",", "self", ".", "archive_manager", ",", "category", ",", "archived_since", ")", "else", ":", "items", "=", "fetch", "(", "self", ".", "BACKEND", ",", "backend_args", ",", "category", ",", "filter_classified", "=", "filter_classified", ",", "manager", "=", "self", ".", "archive_manager", ")", "try", ":", "for", "item", "in", "items", ":", "if", "self", ".", "json_line", ":", "obj", "=", "json", ".", "dumps", "(", "item", ",", "separators", "=", "(", "','", ",", "':'", ")", ",", "sort_keys", "=", "True", ")", "else", ":", "obj", "=", "json", ".", "dumps", "(", "item", ",", "indent", "=", "4", ",", "sort_keys", "=", "True", ")", "self", ".", "outfile", ".", "write", "(", "obj", ")", "self", ".", "outfile", ".", "write", "(", "'\\n'", ")", "except", "IOError", "as", "e", ":", "raise", "RuntimeError", "(", "str", "(", "e", ")", ")", "except", "Exception", "as", "e", ":", "raise", "RuntimeError", "(", "str", "(", "e", ")", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
BackendCommand._initialize_archive
Initialize archive based on the parsed parameters
perceval/backend.py
def _initialize_archive(self): """Initialize archive based on the parsed parameters""" if 'archive_path' not in self.parsed_args: manager = None elif self.parsed_args.no_archive: manager = None else: if not self.parsed_args.archive_path: archive_path = os.path.expanduser(ARCHIVES_DEFAULT_PATH) else: archive_path = self.parsed_args.archive_path manager = ArchiveManager(archive_path) self.archive_manager = manager
def _initialize_archive(self): """Initialize archive based on the parsed parameters""" if 'archive_path' not in self.parsed_args: manager = None elif self.parsed_args.no_archive: manager = None else: if not self.parsed_args.archive_path: archive_path = os.path.expanduser(ARCHIVES_DEFAULT_PATH) else: archive_path = self.parsed_args.archive_path manager = ArchiveManager(archive_path) self.archive_manager = manager
[ "Initialize", "archive", "based", "on", "the", "parsed", "parameters" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backend.py#L492-L507
[ "def", "_initialize_archive", "(", "self", ")", ":", "if", "'archive_path'", "not", "in", "self", ".", "parsed_args", ":", "manager", "=", "None", "elif", "self", ".", "parsed_args", ".", "no_archive", ":", "manager", "=", "None", "else", ":", "if", "not", "self", ".", "parsed_args", ".", "archive_path", ":", "archive_path", "=", "os", ".", "path", ".", "expanduser", "(", "ARCHIVES_DEFAULT_PATH", ")", "else", ":", "archive_path", "=", "self", ".", "parsed_args", ".", "archive_path", "manager", "=", "ArchiveManager", "(", "archive_path", ")", "self", ".", "archive_manager", "=", "manager" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
MBox.fetch_items
Fetch the messages :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items
perceval/backends/core/mbox.py
def fetch_items(self, category, **kwargs): """Fetch the messages :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ from_date = kwargs['from_date'] logger.info("Looking for messages from '%s' on '%s' since %s", self.uri, self.dirpath, str(from_date)) mailing_list = MailingList(self.uri, self.dirpath) messages = self._fetch_and_parse_messages(mailing_list, from_date) for message in messages: yield message logger.info("Fetch process completed")
def fetch_items(self, category, **kwargs): """Fetch the messages :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ from_date = kwargs['from_date'] logger.info("Looking for messages from '%s' on '%s' since %s", self.uri, self.dirpath, str(from_date)) mailing_list = MailingList(self.uri, self.dirpath) messages = self._fetch_and_parse_messages(mailing_list, from_date) for message in messages: yield message logger.info("Fetch process completed")
[ "Fetch", "the", "messages" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/mbox.py#L98-L118
[ "def", "fetch_items", "(", "self", ",", "category", ",", "*", "*", "kwargs", ")", ":", "from_date", "=", "kwargs", "[", "'from_date'", "]", "logger", ".", "info", "(", "\"Looking for messages from '%s' on '%s' since %s\"", ",", "self", ".", "uri", ",", "self", ".", "dirpath", ",", "str", "(", "from_date", ")", ")", "mailing_list", "=", "MailingList", "(", "self", ".", "uri", ",", "self", ".", "dirpath", ")", "messages", "=", "self", ".", "_fetch_and_parse_messages", "(", "mailing_list", ",", "from_date", ")", "for", "message", "in", "messages", ":", "yield", "message", "logger", ".", "info", "(", "\"Fetch process completed\"", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
MBox.metadata_updated_on
Extracts the update time from a MBox item. The timestamp used is extracted from 'Date' field in its several forms. This date is converted to UNIX timestamp format. :param item: item generated by the backend :returns: a UNIX timestamp
perceval/backends/core/mbox.py
def metadata_updated_on(item): """Extracts the update time from a MBox item. The timestamp used is extracted from 'Date' field in its several forms. This date is converted to UNIX timestamp format. :param item: item generated by the backend :returns: a UNIX timestamp """ ts = item[MBox.DATE_FIELD] ts = str_to_datetime(ts) return ts.timestamp()
def metadata_updated_on(item): """Extracts the update time from a MBox item. The timestamp used is extracted from 'Date' field in its several forms. This date is converted to UNIX timestamp format. :param item: item generated by the backend :returns: a UNIX timestamp """ ts = item[MBox.DATE_FIELD] ts = str_to_datetime(ts) return ts.timestamp()
[ "Extracts", "the", "update", "time", "from", "a", "MBox", "item", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/mbox.py#L143-L157
[ "def", "metadata_updated_on", "(", "item", ")", ":", "ts", "=", "item", "[", "MBox", ".", "DATE_FIELD", "]", "ts", "=", "str_to_datetime", "(", "ts", ")", "return", "ts", ".", "timestamp", "(", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
MBox.parse_mbox
Parse a mbox file. This method parses a mbox file and returns an iterator of dictionaries. Each one of this contains an email message. :param filepath: path of the mbox to parse :returns : generator of messages; each message is stored in a dictionary of type `requests.structures.CaseInsensitiveDict`
perceval/backends/core/mbox.py
def parse_mbox(filepath): """Parse a mbox file. This method parses a mbox file and returns an iterator of dictionaries. Each one of this contains an email message. :param filepath: path of the mbox to parse :returns : generator of messages; each message is stored in a dictionary of type `requests.structures.CaseInsensitiveDict` """ mbox = _MBox(filepath, create=False) for msg in mbox: message = message_to_dict(msg) yield message
def parse_mbox(filepath): """Parse a mbox file. This method parses a mbox file and returns an iterator of dictionaries. Each one of this contains an email message. :param filepath: path of the mbox to parse :returns : generator of messages; each message is stored in a dictionary of type `requests.structures.CaseInsensitiveDict` """ mbox = _MBox(filepath, create=False) for msg in mbox: message = message_to_dict(msg) yield message
[ "Parse", "a", "mbox", "file", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/mbox.py#L169-L184
[ "def", "parse_mbox", "(", "filepath", ")", ":", "mbox", "=", "_MBox", "(", "filepath", ",", "create", "=", "False", ")", "for", "msg", "in", "mbox", ":", "message", "=", "message_to_dict", "(", "msg", ")", "yield", "message" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
MBox._fetch_and_parse_messages
Fetch and parse the messages from a mailing list
perceval/backends/core/mbox.py
def _fetch_and_parse_messages(self, mailing_list, from_date): """Fetch and parse the messages from a mailing list""" from_date = datetime_to_utc(from_date) nmsgs, imsgs, tmsgs = (0, 0, 0) for mbox in mailing_list.mboxes: tmp_path = None try: tmp_path = self._copy_mbox(mbox) for message in self.parse_mbox(tmp_path): tmsgs += 1 if not self._validate_message(message): imsgs += 1 continue # Ignore those messages sent before the given date dt = str_to_datetime(message[MBox.DATE_FIELD]) if dt < from_date: logger.debug("Message %s sent before %s; skipped", message['unixfrom'], str(from_date)) tmsgs -= 1 continue # Convert 'CaseInsensitiveDict' to dict message = self._casedict_to_dict(message) nmsgs += 1 logger.debug("Message %s parsed", message['unixfrom']) yield message except (OSError, EOFError) as e: logger.warning("Ignoring %s mbox due to: %s", mbox.filepath, str(e)) except Exception as e: if tmp_path and os.path.exists(tmp_path): os.remove(tmp_path) raise e finally: if tmp_path and os.path.exists(tmp_path): os.remove(tmp_path) logger.info("Done. %s/%s messages fetched; %s ignored", nmsgs, tmsgs, imsgs)
def _fetch_and_parse_messages(self, mailing_list, from_date): """Fetch and parse the messages from a mailing list""" from_date = datetime_to_utc(from_date) nmsgs, imsgs, tmsgs = (0, 0, 0) for mbox in mailing_list.mboxes: tmp_path = None try: tmp_path = self._copy_mbox(mbox) for message in self.parse_mbox(tmp_path): tmsgs += 1 if not self._validate_message(message): imsgs += 1 continue # Ignore those messages sent before the given date dt = str_to_datetime(message[MBox.DATE_FIELD]) if dt < from_date: logger.debug("Message %s sent before %s; skipped", message['unixfrom'], str(from_date)) tmsgs -= 1 continue # Convert 'CaseInsensitiveDict' to dict message = self._casedict_to_dict(message) nmsgs += 1 logger.debug("Message %s parsed", message['unixfrom']) yield message except (OSError, EOFError) as e: logger.warning("Ignoring %s mbox due to: %s", mbox.filepath, str(e)) except Exception as e: if tmp_path and os.path.exists(tmp_path): os.remove(tmp_path) raise e finally: if tmp_path and os.path.exists(tmp_path): os.remove(tmp_path) logger.info("Done. %s/%s messages fetched; %s ignored", nmsgs, tmsgs, imsgs)
[ "Fetch", "and", "parse", "the", "messages", "from", "a", "mailing", "list" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/mbox.py#L189-L236
[ "def", "_fetch_and_parse_messages", "(", "self", ",", "mailing_list", ",", "from_date", ")", ":", "from_date", "=", "datetime_to_utc", "(", "from_date", ")", "nmsgs", ",", "imsgs", ",", "tmsgs", "=", "(", "0", ",", "0", ",", "0", ")", "for", "mbox", "in", "mailing_list", ".", "mboxes", ":", "tmp_path", "=", "None", "try", ":", "tmp_path", "=", "self", ".", "_copy_mbox", "(", "mbox", ")", "for", "message", "in", "self", ".", "parse_mbox", "(", "tmp_path", ")", ":", "tmsgs", "+=", "1", "if", "not", "self", ".", "_validate_message", "(", "message", ")", ":", "imsgs", "+=", "1", "continue", "# Ignore those messages sent before the given date", "dt", "=", "str_to_datetime", "(", "message", "[", "MBox", ".", "DATE_FIELD", "]", ")", "if", "dt", "<", "from_date", ":", "logger", ".", "debug", "(", "\"Message %s sent before %s; skipped\"", ",", "message", "[", "'unixfrom'", "]", ",", "str", "(", "from_date", ")", ")", "tmsgs", "-=", "1", "continue", "# Convert 'CaseInsensitiveDict' to dict", "message", "=", "self", ".", "_casedict_to_dict", "(", "message", ")", "nmsgs", "+=", "1", "logger", ".", "debug", "(", "\"Message %s parsed\"", ",", "message", "[", "'unixfrom'", "]", ")", "yield", "message", "except", "(", "OSError", ",", "EOFError", ")", "as", "e", ":", "logger", ".", "warning", "(", "\"Ignoring %s mbox due to: %s\"", ",", "mbox", ".", "filepath", ",", "str", "(", "e", ")", ")", "except", "Exception", "as", "e", ":", "if", "tmp_path", "and", "os", ".", "path", ".", "exists", "(", "tmp_path", ")", ":", "os", ".", "remove", "(", "tmp_path", ")", "raise", "e", "finally", ":", "if", "tmp_path", "and", "os", ".", "path", ".", "exists", "(", "tmp_path", ")", ":", "os", ".", "remove", "(", "tmp_path", ")", "logger", ".", "info", "(", "\"Done. %s/%s messages fetched; %s ignored\"", ",", "nmsgs", ",", "tmsgs", ",", "imsgs", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
MBox._copy_mbox
Copy the contents of a mbox to a temporary file
perceval/backends/core/mbox.py
def _copy_mbox(self, mbox): """Copy the contents of a mbox to a temporary file""" tmp_path = tempfile.mktemp(prefix='perceval_') with mbox.container as f_in: with open(tmp_path, mode='wb') as f_out: for l in f_in: f_out.write(l) return tmp_path
def _copy_mbox(self, mbox): """Copy the contents of a mbox to a temporary file""" tmp_path = tempfile.mktemp(prefix='perceval_') with mbox.container as f_in: with open(tmp_path, mode='wb') as f_out: for l in f_in: f_out.write(l) return tmp_path
[ "Copy", "the", "contents", "of", "a", "mbox", "to", "a", "temporary", "file" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/mbox.py#L238-L247
[ "def", "_copy_mbox", "(", "self", ",", "mbox", ")", ":", "tmp_path", "=", "tempfile", ".", "mktemp", "(", "prefix", "=", "'perceval_'", ")", "with", "mbox", ".", "container", "as", "f_in", ":", "with", "open", "(", "tmp_path", ",", "mode", "=", "'wb'", ")", "as", "f_out", ":", "for", "l", "in", "f_in", ":", "f_out", ".", "write", "(", "l", ")", "return", "tmp_path" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
MBox._validate_message
Check if the given message has the mandatory fields
perceval/backends/core/mbox.py
def _validate_message(self, message): """Check if the given message has the mandatory fields""" # This check is "case insensitive" because we're # using 'CaseInsensitiveDict' from requests.structures # module to store the contents of a message. if self.MESSAGE_ID_FIELD not in message: logger.warning("Field 'Message-ID' not found in message %s; ignoring", message['unixfrom']) return False if not message[self.MESSAGE_ID_FIELD]: logger.warning("Field 'Message-ID' is empty in message %s; ignoring", message['unixfrom']) return False if self.DATE_FIELD not in message: logger.warning("Field 'Date' not found in message %s; ignoring", message['unixfrom']) return False if not message[self.DATE_FIELD]: logger.warning("Field 'Date' is empty in message %s; ignoring", message['unixfrom']) return False try: str_to_datetime(message[self.DATE_FIELD]) except InvalidDateError: logger.warning("Invalid date %s in message %s; ignoring", message[self.DATE_FIELD], message['unixfrom']) return False return True
def _validate_message(self, message): """Check if the given message has the mandatory fields""" # This check is "case insensitive" because we're # using 'CaseInsensitiveDict' from requests.structures # module to store the contents of a message. if self.MESSAGE_ID_FIELD not in message: logger.warning("Field 'Message-ID' not found in message %s; ignoring", message['unixfrom']) return False if not message[self.MESSAGE_ID_FIELD]: logger.warning("Field 'Message-ID' is empty in message %s; ignoring", message['unixfrom']) return False if self.DATE_FIELD not in message: logger.warning("Field 'Date' not found in message %s; ignoring", message['unixfrom']) return False if not message[self.DATE_FIELD]: logger.warning("Field 'Date' is empty in message %s; ignoring", message['unixfrom']) return False try: str_to_datetime(message[self.DATE_FIELD]) except InvalidDateError: logger.warning("Invalid date %s in message %s; ignoring", message[self.DATE_FIELD], message['unixfrom']) return False return True
[ "Check", "if", "the", "given", "message", "has", "the", "mandatory", "fields" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/mbox.py#L249-L282
[ "def", "_validate_message", "(", "self", ",", "message", ")", ":", "# This check is \"case insensitive\" because we're", "# using 'CaseInsensitiveDict' from requests.structures", "# module to store the contents of a message.", "if", "self", ".", "MESSAGE_ID_FIELD", "not", "in", "message", ":", "logger", ".", "warning", "(", "\"Field 'Message-ID' not found in message %s; ignoring\"", ",", "message", "[", "'unixfrom'", "]", ")", "return", "False", "if", "not", "message", "[", "self", ".", "MESSAGE_ID_FIELD", "]", ":", "logger", ".", "warning", "(", "\"Field 'Message-ID' is empty in message %s; ignoring\"", ",", "message", "[", "'unixfrom'", "]", ")", "return", "False", "if", "self", ".", "DATE_FIELD", "not", "in", "message", ":", "logger", ".", "warning", "(", "\"Field 'Date' not found in message %s; ignoring\"", ",", "message", "[", "'unixfrom'", "]", ")", "return", "False", "if", "not", "message", "[", "self", ".", "DATE_FIELD", "]", ":", "logger", ".", "warning", "(", "\"Field 'Date' is empty in message %s; ignoring\"", ",", "message", "[", "'unixfrom'", "]", ")", "return", "False", "try", ":", "str_to_datetime", "(", "message", "[", "self", ".", "DATE_FIELD", "]", ")", "except", "InvalidDateError", ":", "logger", ".", "warning", "(", "\"Invalid date %s in message %s; ignoring\"", ",", "message", "[", "self", ".", "DATE_FIELD", "]", ",", "message", "[", "'unixfrom'", "]", ")", "return", "False", "return", "True" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
MBox._casedict_to_dict
Convert a message in CaseInsensitiveDict to dict. This method also converts well known problematic headers, such as Message-ID and Date to a common name.
perceval/backends/core/mbox.py
def _casedict_to_dict(self, message): """Convert a message in CaseInsensitiveDict to dict. This method also converts well known problematic headers, such as Message-ID and Date to a common name. """ message_id = message.pop(self.MESSAGE_ID_FIELD) date = message.pop(self.DATE_FIELD) msg = {k: v for k, v in message.items()} msg[self.MESSAGE_ID_FIELD] = message_id msg[self.DATE_FIELD] = date return msg
def _casedict_to_dict(self, message): """Convert a message in CaseInsensitiveDict to dict. This method also converts well known problematic headers, such as Message-ID and Date to a common name. """ message_id = message.pop(self.MESSAGE_ID_FIELD) date = message.pop(self.DATE_FIELD) msg = {k: v for k, v in message.items()} msg[self.MESSAGE_ID_FIELD] = message_id msg[self.DATE_FIELD] = date return msg
[ "Convert", "a", "message", "in", "CaseInsensitiveDict", "to", "dict", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/mbox.py#L284-L297
[ "def", "_casedict_to_dict", "(", "self", ",", "message", ")", ":", "message_id", "=", "message", ".", "pop", "(", "self", ".", "MESSAGE_ID_FIELD", ")", "date", "=", "message", ".", "pop", "(", "self", ".", "DATE_FIELD", ")", "msg", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "message", ".", "items", "(", ")", "}", "msg", "[", "self", ".", "MESSAGE_ID_FIELD", "]", "=", "message_id", "msg", "[", "self", ".", "DATE_FIELD", "]", "=", "date", "return", "msg" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
_MBox.get_message
Return a Message representation or raise a KeyError.
perceval/backends/core/mbox.py
def get_message(self, key): """Return a Message representation or raise a KeyError.""" start, stop = self._lookup(key) self._file.seek(start) from_line = self._file.readline().replace(mailbox.linesep, b'') string = self._file.read(stop - self._file.tell()) msg = self._message_factory(string.replace(mailbox.linesep, b'\n')) try: msg.set_from(from_line[5:].decode('ascii')) return msg except UnicodeDecodeError: pass try: msg.set_from(from_line[5:].decode('utf-8')) except UnicodeDecodeError: msg.set_from(from_line[5:].decode('iso-8859-1')) return msg
def get_message(self, key): """Return a Message representation or raise a KeyError.""" start, stop = self._lookup(key) self._file.seek(start) from_line = self._file.readline().replace(mailbox.linesep, b'') string = self._file.read(stop - self._file.tell()) msg = self._message_factory(string.replace(mailbox.linesep, b'\n')) try: msg.set_from(from_line[5:].decode('ascii')) return msg except UnicodeDecodeError: pass try: msg.set_from(from_line[5:].decode('utf-8')) except UnicodeDecodeError: msg.set_from(from_line[5:].decode('iso-8859-1')) return msg
[ "Return", "a", "Message", "representation", "or", "raise", "a", "KeyError", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/mbox.py#L303-L323
[ "def", "get_message", "(", "self", ",", "key", ")", ":", "start", ",", "stop", "=", "self", ".", "_lookup", "(", "key", ")", "self", ".", "_file", ".", "seek", "(", "start", ")", "from_line", "=", "self", ".", "_file", ".", "readline", "(", ")", ".", "replace", "(", "mailbox", ".", "linesep", ",", "b''", ")", "string", "=", "self", ".", "_file", ".", "read", "(", "stop", "-", "self", ".", "_file", ".", "tell", "(", ")", ")", "msg", "=", "self", ".", "_message_factory", "(", "string", ".", "replace", "(", "mailbox", ".", "linesep", ",", "b'\\n'", ")", ")", "try", ":", "msg", ".", "set_from", "(", "from_line", "[", "5", ":", "]", ".", "decode", "(", "'ascii'", ")", ")", "return", "msg", "except", "UnicodeDecodeError", ":", "pass", "try", ":", "msg", ".", "set_from", "(", "from_line", "[", "5", ":", "]", ".", "decode", "(", "'utf-8'", ")", ")", "except", "UnicodeDecodeError", ":", "msg", ".", "set_from", "(", "from_line", "[", "5", ":", "]", ".", "decode", "(", "'iso-8859-1'", ")", ")", "return", "msg" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
MailingList.mboxes
Get the mboxes managed by this mailing list. Returns the archives sorted by name. :returns: a list of `.MBoxArchive` objects
perceval/backends/core/mbox.py
def mboxes(self): """Get the mboxes managed by this mailing list. Returns the archives sorted by name. :returns: a list of `.MBoxArchive` objects """ archives = [] if os.path.isfile(self.dirpath): try: archives.append(MBoxArchive(self.dirpath)) except OSError as e: logger.warning("Ignoring %s mbox due to: %s", self.dirpath, str(e)) else: for root, _, files in os.walk(self.dirpath): for filename in sorted(files): try: location = os.path.join(root, filename) archives.append(MBoxArchive(location)) except OSError as e: logger.warning("Ignoring %s mbox due to: %s", filename, str(e)) return archives
def mboxes(self): """Get the mboxes managed by this mailing list. Returns the archives sorted by name. :returns: a list of `.MBoxArchive` objects """ archives = [] if os.path.isfile(self.dirpath): try: archives.append(MBoxArchive(self.dirpath)) except OSError as e: logger.warning("Ignoring %s mbox due to: %s", self.dirpath, str(e)) else: for root, _, files in os.walk(self.dirpath): for filename in sorted(files): try: location = os.path.join(root, filename) archives.append(MBoxArchive(location)) except OSError as e: logger.warning("Ignoring %s mbox due to: %s", filename, str(e)) return archives
[ "Get", "the", "mboxes", "managed", "by", "this", "mailing", "list", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/mbox.py#L400-L422
[ "def", "mboxes", "(", "self", ")", ":", "archives", "=", "[", "]", "if", "os", ".", "path", ".", "isfile", "(", "self", ".", "dirpath", ")", ":", "try", ":", "archives", ".", "append", "(", "MBoxArchive", "(", "self", ".", "dirpath", ")", ")", "except", "OSError", "as", "e", ":", "logger", ".", "warning", "(", "\"Ignoring %s mbox due to: %s\"", ",", "self", ".", "dirpath", ",", "str", "(", "e", ")", ")", "else", ":", "for", "root", ",", "_", ",", "files", "in", "os", ".", "walk", "(", "self", ".", "dirpath", ")", ":", "for", "filename", "in", "sorted", "(", "files", ")", ":", "try", ":", "location", "=", "os", ".", "path", ".", "join", "(", "root", ",", "filename", ")", "archives", ".", "append", "(", "MBoxArchive", "(", "location", ")", ")", "except", "OSError", "as", "e", ":", "logger", ".", "warning", "(", "\"Ignoring %s mbox due to: %s\"", ",", "filename", ",", "str", "(", "e", ")", ")", "return", "archives" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Git.fetch
Fetch commits. The method retrieves from a Git repository or a log file a list of commits. Commits are returned in the same order they were obtained. When `from_date` parameter is given it returns items commited since the given date. The list of `branches` is a list of strings, with the names of the branches to fetch. If the list of branches is empty, no commit is fetched. If the list of branches is None, all commits for all branches will be fetched. The parameter `latest_items` returns only those commits which are new since the last time this method was called. The parameter `no_update` returns all commits without performing an update of the repository before. Take into account that `from_date` and `branches` are ignored when the commits are fetched from a Git log file or when `latest_items` flag is set. The class raises a `RepositoryError` exception when an error occurs accessing the repository. :param category: the category of items to fetch :param from_date: obtain commits newer than a specific date (inclusive) :param to_date: obtain commits older than a specific date :param branches: names of branches to fetch from (default: None) :param latest_items: sync with the repository to fetch only the newest commits :param no_update: if enabled, don't update the repo with the latest changes :returns: a generator of commits
perceval/backends/core/git.py
def fetch(self, category=CATEGORY_COMMIT, from_date=DEFAULT_DATETIME, to_date=DEFAULT_LAST_DATETIME, branches=None, latest_items=False, no_update=False): """Fetch commits. The method retrieves from a Git repository or a log file a list of commits. Commits are returned in the same order they were obtained. When `from_date` parameter is given it returns items commited since the given date. The list of `branches` is a list of strings, with the names of the branches to fetch. If the list of branches is empty, no commit is fetched. If the list of branches is None, all commits for all branches will be fetched. The parameter `latest_items` returns only those commits which are new since the last time this method was called. The parameter `no_update` returns all commits without performing an update of the repository before. Take into account that `from_date` and `branches` are ignored when the commits are fetched from a Git log file or when `latest_items` flag is set. The class raises a `RepositoryError` exception when an error occurs accessing the repository. :param category: the category of items to fetch :param from_date: obtain commits newer than a specific date (inclusive) :param to_date: obtain commits older than a specific date :param branches: names of branches to fetch from (default: None) :param latest_items: sync with the repository to fetch only the newest commits :param no_update: if enabled, don't update the repo with the latest changes :returns: a generator of commits """ if not from_date: from_date = DEFAULT_DATETIME if not to_date: to_date = DEFAULT_LAST_DATETIME kwargs = { 'from_date': from_date, 'to_date': to_date, 'branches': branches, 'latest_items': latest_items, 'no_update': no_update } items = super().fetch(category, **kwargs) return items
def fetch(self, category=CATEGORY_COMMIT, from_date=DEFAULT_DATETIME, to_date=DEFAULT_LAST_DATETIME, branches=None, latest_items=False, no_update=False): """Fetch commits. The method retrieves from a Git repository or a log file a list of commits. Commits are returned in the same order they were obtained. When `from_date` parameter is given it returns items commited since the given date. The list of `branches` is a list of strings, with the names of the branches to fetch. If the list of branches is empty, no commit is fetched. If the list of branches is None, all commits for all branches will be fetched. The parameter `latest_items` returns only those commits which are new since the last time this method was called. The parameter `no_update` returns all commits without performing an update of the repository before. Take into account that `from_date` and `branches` are ignored when the commits are fetched from a Git log file or when `latest_items` flag is set. The class raises a `RepositoryError` exception when an error occurs accessing the repository. :param category: the category of items to fetch :param from_date: obtain commits newer than a specific date (inclusive) :param to_date: obtain commits older than a specific date :param branches: names of branches to fetch from (default: None) :param latest_items: sync with the repository to fetch only the newest commits :param no_update: if enabled, don't update the repo with the latest changes :returns: a generator of commits """ if not from_date: from_date = DEFAULT_DATETIME if not to_date: to_date = DEFAULT_LAST_DATETIME kwargs = { 'from_date': from_date, 'to_date': to_date, 'branches': branches, 'latest_items': latest_items, 'no_update': no_update } items = super().fetch(category, **kwargs) return items
[ "Fetch", "commits", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L78-L132
[ "def", "fetch", "(", "self", ",", "category", "=", "CATEGORY_COMMIT", ",", "from_date", "=", "DEFAULT_DATETIME", ",", "to_date", "=", "DEFAULT_LAST_DATETIME", ",", "branches", "=", "None", ",", "latest_items", "=", "False", ",", "no_update", "=", "False", ")", ":", "if", "not", "from_date", ":", "from_date", "=", "DEFAULT_DATETIME", "if", "not", "to_date", ":", "to_date", "=", "DEFAULT_LAST_DATETIME", "kwargs", "=", "{", "'from_date'", ":", "from_date", ",", "'to_date'", ":", "to_date", ",", "'branches'", ":", "branches", ",", "'latest_items'", ":", "latest_items", ",", "'no_update'", ":", "no_update", "}", "items", "=", "super", "(", ")", ".", "fetch", "(", "category", ",", "*", "*", "kwargs", ")", "return", "items" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Git.fetch_items
Fetch the commits :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items
perceval/backends/core/git.py
def fetch_items(self, category, **kwargs): """Fetch the commits :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ from_date = kwargs['from_date'] to_date = kwargs['to_date'] branches = kwargs['branches'] latest_items = kwargs['latest_items'] no_update = kwargs['no_update'] ncommits = 0 try: if os.path.isfile(self.gitpath): commits = self.__fetch_from_log() else: commits = self.__fetch_from_repo(from_date, to_date, branches, latest_items, no_update) for commit in commits: yield commit ncommits += 1 except EmptyRepositoryError: pass logger.info("Fetch process completed: %s commits fetched", ncommits)
def fetch_items(self, category, **kwargs): """Fetch the commits :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ from_date = kwargs['from_date'] to_date = kwargs['to_date'] branches = kwargs['branches'] latest_items = kwargs['latest_items'] no_update = kwargs['no_update'] ncommits = 0 try: if os.path.isfile(self.gitpath): commits = self.__fetch_from_log() else: commits = self.__fetch_from_repo(from_date, to_date, branches, latest_items, no_update) for commit in commits: yield commit ncommits += 1 except EmptyRepositoryError: pass logger.info("Fetch process completed: %s commits fetched", ncommits)
[ "Fetch", "the", "commits" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L134-L164
[ "def", "fetch_items", "(", "self", ",", "category", ",", "*", "*", "kwargs", ")", ":", "from_date", "=", "kwargs", "[", "'from_date'", "]", "to_date", "=", "kwargs", "[", "'to_date'", "]", "branches", "=", "kwargs", "[", "'branches'", "]", "latest_items", "=", "kwargs", "[", "'latest_items'", "]", "no_update", "=", "kwargs", "[", "'no_update'", "]", "ncommits", "=", "0", "try", ":", "if", "os", ".", "path", ".", "isfile", "(", "self", ".", "gitpath", ")", ":", "commits", "=", "self", ".", "__fetch_from_log", "(", ")", "else", ":", "commits", "=", "self", ".", "__fetch_from_repo", "(", "from_date", ",", "to_date", ",", "branches", ",", "latest_items", ",", "no_update", ")", "for", "commit", "in", "commits", ":", "yield", "commit", "ncommits", "+=", "1", "except", "EmptyRepositoryError", ":", "pass", "logger", ".", "info", "(", "\"Fetch process completed: %s commits fetched\"", ",", "ncommits", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Git.parse_git_log_from_file
Parse a Git log file. The method parses the Git log file and returns an iterator of dictionaries. Each one of this, contains a commit. :param filepath: path to the log file :returns: a generator of parsed commits :raises ParseError: raised when the format of the Git log file is invalid :raises OSError: raised when an error occurs reading the given file
perceval/backends/core/git.py
def parse_git_log_from_file(filepath): """Parse a Git log file. The method parses the Git log file and returns an iterator of dictionaries. Each one of this, contains a commit. :param filepath: path to the log file :returns: a generator of parsed commits :raises ParseError: raised when the format of the Git log file is invalid :raises OSError: raised when an error occurs reading the given file """ with open(filepath, 'r', errors='surrogateescape', newline=os.linesep) as f: parser = GitParser(f) for commit in parser.parse(): yield commit
def parse_git_log_from_file(filepath): """Parse a Git log file. The method parses the Git log file and returns an iterator of dictionaries. Each one of this, contains a commit. :param filepath: path to the log file :returns: a generator of parsed commits :raises ParseError: raised when the format of the Git log file is invalid :raises OSError: raised when an error occurs reading the given file """ with open(filepath, 'r', errors='surrogateescape', newline=os.linesep) as f: parser = GitParser(f) for commit in parser.parse(): yield commit
[ "Parse", "a", "Git", "log", "file", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L215-L235
[ "def", "parse_git_log_from_file", "(", "filepath", ")", ":", "with", "open", "(", "filepath", ",", "'r'", ",", "errors", "=", "'surrogateescape'", ",", "newline", "=", "os", ".", "linesep", ")", "as", "f", ":", "parser", "=", "GitParser", "(", "f", ")", "for", "commit", "in", "parser", ".", "parse", "(", ")", ":", "yield", "commit" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitCommand._pre_init
Initialize repositories directory path
perceval/backends/core/git.py
def _pre_init(self): """Initialize repositories directory path""" if self.parsed_args.git_log: git_path = self.parsed_args.git_log elif not self.parsed_args.git_path: base_path = os.path.expanduser('~/.perceval/repositories/') processed_uri = self.parsed_args.uri.lstrip('/') git_path = os.path.join(base_path, processed_uri) + '-git' else: git_path = self.parsed_args.git_path setattr(self.parsed_args, 'gitpath', git_path)
def _pre_init(self): """Initialize repositories directory path""" if self.parsed_args.git_log: git_path = self.parsed_args.git_log elif not self.parsed_args.git_path: base_path = os.path.expanduser('~/.perceval/repositories/') processed_uri = self.parsed_args.uri.lstrip('/') git_path = os.path.join(base_path, processed_uri) + '-git' else: git_path = self.parsed_args.git_path setattr(self.parsed_args, 'gitpath', git_path)
[ "Initialize", "repositories", "directory", "path" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L330-L342
[ "def", "_pre_init", "(", "self", ")", ":", "if", "self", ".", "parsed_args", ".", "git_log", ":", "git_path", "=", "self", ".", "parsed_args", ".", "git_log", "elif", "not", "self", ".", "parsed_args", ".", "git_path", ":", "base_path", "=", "os", ".", "path", ".", "expanduser", "(", "'~/.perceval/repositories/'", ")", "processed_uri", "=", "self", ".", "parsed_args", ".", "uri", ".", "lstrip", "(", "'/'", ")", "git_path", "=", "os", ".", "path", ".", "join", "(", "base_path", ",", "processed_uri", ")", "+", "'-git'", "else", ":", "git_path", "=", "self", ".", "parsed_args", ".", "git_path", "setattr", "(", "self", ".", "parsed_args", ",", "'gitpath'", ",", "git_path", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitCommand.setup_cmd_parser
Returns the Git argument parser.
perceval/backends/core/git.py
def setup_cmd_parser(cls): """Returns the Git argument parser.""" parser = BackendCommandArgumentParser(cls.BACKEND.CATEGORIES, from_date=True, to_date=True) # Optional arguments group = parser.parser.add_argument_group('Git arguments') group.add_argument('--branches', dest='branches', nargs='+', type=str, default=None, help="Fetch commits only from these branches") # Mutual exclusive parameters exgroup = group.add_mutually_exclusive_group() exgroup.add_argument('--git-path', dest='git_path', help="Path where the Git repository will be cloned") exgroup.add_argument('--git-log', dest='git_log', help="Path to the Git log file") exgroup_fetch = group.add_mutually_exclusive_group() exgroup_fetch.add_argument('--latest-items', dest='latest_items', action='store_true', help="Fetch latest commits added to the repository") exgroup_fetch.add_argument('--no-update', dest='no_update', action='store_true', help="Fetch all commits without updating the repository") # Required arguments parser.parser.add_argument('uri', help="URI of the Git log repository") return parser
def setup_cmd_parser(cls): """Returns the Git argument parser.""" parser = BackendCommandArgumentParser(cls.BACKEND.CATEGORIES, from_date=True, to_date=True) # Optional arguments group = parser.parser.add_argument_group('Git arguments') group.add_argument('--branches', dest='branches', nargs='+', type=str, default=None, help="Fetch commits only from these branches") # Mutual exclusive parameters exgroup = group.add_mutually_exclusive_group() exgroup.add_argument('--git-path', dest='git_path', help="Path where the Git repository will be cloned") exgroup.add_argument('--git-log', dest='git_log', help="Path to the Git log file") exgroup_fetch = group.add_mutually_exclusive_group() exgroup_fetch.add_argument('--latest-items', dest='latest_items', action='store_true', help="Fetch latest commits added to the repository") exgroup_fetch.add_argument('--no-update', dest='no_update', action='store_true', help="Fetch all commits without updating the repository") # Required arguments parser.parser.add_argument('uri', help="URI of the Git log repository") return parser
[ "Returns", "the", "Git", "argument", "parser", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L345-L377
[ "def", "setup_cmd_parser", "(", "cls", ")", ":", "parser", "=", "BackendCommandArgumentParser", "(", "cls", ".", "BACKEND", ".", "CATEGORIES", ",", "from_date", "=", "True", ",", "to_date", "=", "True", ")", "# Optional arguments", "group", "=", "parser", ".", "parser", ".", "add_argument_group", "(", "'Git arguments'", ")", "group", ".", "add_argument", "(", "'--branches'", ",", "dest", "=", "'branches'", ",", "nargs", "=", "'+'", ",", "type", "=", "str", ",", "default", "=", "None", ",", "help", "=", "\"Fetch commits only from these branches\"", ")", "# Mutual exclusive parameters", "exgroup", "=", "group", ".", "add_mutually_exclusive_group", "(", ")", "exgroup", ".", "add_argument", "(", "'--git-path'", ",", "dest", "=", "'git_path'", ",", "help", "=", "\"Path where the Git repository will be cloned\"", ")", "exgroup", ".", "add_argument", "(", "'--git-log'", ",", "dest", "=", "'git_log'", ",", "help", "=", "\"Path to the Git log file\"", ")", "exgroup_fetch", "=", "group", ".", "add_mutually_exclusive_group", "(", ")", "exgroup_fetch", ".", "add_argument", "(", "'--latest-items'", ",", "dest", "=", "'latest_items'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"Fetch latest commits added to the repository\"", ")", "exgroup_fetch", ".", "add_argument", "(", "'--no-update'", ",", "dest", "=", "'no_update'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"Fetch all commits without updating the repository\"", ")", "# Required arguments", "parser", ".", "parser", ".", "add_argument", "(", "'uri'", ",", "help", "=", "\"URI of the Git log repository\"", ")", "return", "parser" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitParser.parse
Parse the Git log stream.
perceval/backends/core/git.py
def parse(self): """Parse the Git log stream.""" for line in self.stream: line = line.rstrip('\n') parsed = False self.nline += 1 while not parsed: parsed = self.handlers[self.state](line) if self.state == self.COMMIT and self.commit: commit = self._build_commit() logger.debug("Commit %s parsed", commit['commit']) yield commit # Return the last commit, if any if self.commit: commit = self._build_commit() logger.debug("Commit %s parsed", commit['commit']) yield commit
def parse(self): """Parse the Git log stream.""" for line in self.stream: line = line.rstrip('\n') parsed = False self.nline += 1 while not parsed: parsed = self.handlers[self.state](line) if self.state == self.COMMIT and self.commit: commit = self._build_commit() logger.debug("Commit %s parsed", commit['commit']) yield commit # Return the last commit, if any if self.commit: commit = self._build_commit() logger.debug("Commit %s parsed", commit['commit']) yield commit
[ "Parse", "the", "Git", "log", "stream", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L525-L545
[ "def", "parse", "(", "self", ")", ":", "for", "line", "in", "self", ".", "stream", ":", "line", "=", "line", ".", "rstrip", "(", "'\\n'", ")", "parsed", "=", "False", "self", ".", "nline", "+=", "1", "while", "not", "parsed", ":", "parsed", "=", "self", ".", "handlers", "[", "self", ".", "state", "]", "(", "line", ")", "if", "self", ".", "state", "==", "self", ".", "COMMIT", "and", "self", ".", "commit", ":", "commit", "=", "self", ".", "_build_commit", "(", ")", "logger", ".", "debug", "(", "\"Commit %s parsed\"", ",", "commit", "[", "'commit'", "]", ")", "yield", "commit", "# Return the last commit, if any", "if", "self", ".", "commit", ":", "commit", "=", "self", ".", "_build_commit", "(", ")", "logger", ".", "debug", "(", "\"Commit %s parsed\"", ",", "commit", "[", "'commit'", "]", ")", "yield", "commit" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitParser.__get_old_filepath
Get the old filepath of a moved/renamed file. Moved or renamed files can be found in the log with any of the next patterns: 'old_name => new_name' '{old_prefix => new_prefix}/name' 'name/{old_suffix => new_suffix}' This method returns the filepath before the file was moved or renamed.
perceval/backends/core/git.py
def __get_old_filepath(self, f): """Get the old filepath of a moved/renamed file. Moved or renamed files can be found in the log with any of the next patterns: 'old_name => new_name' '{old_prefix => new_prefix}/name' 'name/{old_suffix => new_suffix}' This method returns the filepath before the file was moved or renamed. """ i = f.find('{') j = f.find('}') if i > -1 and j > -1: prefix = f[0:i] inner = f[i + 1:f.find(' => ', i)] suffix = f[j + 1:] return prefix + inner + suffix elif ' => ' in f: return f.split(' => ')[0] else: return f
def __get_old_filepath(self, f): """Get the old filepath of a moved/renamed file. Moved or renamed files can be found in the log with any of the next patterns: 'old_name => new_name' '{old_prefix => new_prefix}/name' 'name/{old_suffix => new_suffix}' This method returns the filepath before the file was moved or renamed. """ i = f.find('{') j = f.find('}') if i > -1 and j > -1: prefix = f[0:i] inner = f[i + 1:f.find(' => ', i)] suffix = f[j + 1:] return prefix + inner + suffix elif ' => ' in f: return f.split(' => ')[0] else: return f
[ "Get", "the", "old", "filepath", "of", "a", "moved", "/", "renamed", "file", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L704-L727
[ "def", "__get_old_filepath", "(", "self", ",", "f", ")", ":", "i", "=", "f", ".", "find", "(", "'{'", ")", "j", "=", "f", ".", "find", "(", "'}'", ")", "if", "i", ">", "-", "1", "and", "j", ">", "-", "1", ":", "prefix", "=", "f", "[", "0", ":", "i", "]", "inner", "=", "f", "[", "i", "+", "1", ":", "f", ".", "find", "(", "' => '", ",", "i", ")", "]", "suffix", "=", "f", "[", "j", "+", "1", ":", "]", "return", "prefix", "+", "inner", "+", "suffix", "elif", "' => '", "in", "f", ":", "return", "f", ".", "split", "(", "' => '", ")", "[", "0", "]", "else", ":", "return", "f" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository.clone
Clone a Git repository. Make a bare copy of the repository stored in `uri` into `dirpath`. The repository would be either local or remote. :param uri: URI of the repository :param dirtpath: directory where the repository will be cloned :returns: a `GitRepository` class having cloned the repository :raises RepositoryError: when an error occurs cloning the given repository
perceval/backends/core/git.py
def clone(cls, uri, dirpath): """Clone a Git repository. Make a bare copy of the repository stored in `uri` into `dirpath`. The repository would be either local or remote. :param uri: URI of the repository :param dirtpath: directory where the repository will be cloned :returns: a `GitRepository` class having cloned the repository :raises RepositoryError: when an error occurs cloning the given repository """ cmd = ['git', 'clone', '--bare', uri, dirpath] env = { 'LANG': 'C', 'HOME': os.getenv('HOME', '') } cls._exec(cmd, env=env) logger.debug("Git %s repository cloned into %s", uri, dirpath) return cls(uri, dirpath)
def clone(cls, uri, dirpath): """Clone a Git repository. Make a bare copy of the repository stored in `uri` into `dirpath`. The repository would be either local or remote. :param uri: URI of the repository :param dirtpath: directory where the repository will be cloned :returns: a `GitRepository` class having cloned the repository :raises RepositoryError: when an error occurs cloning the given repository """ cmd = ['git', 'clone', '--bare', uri, dirpath] env = { 'LANG': 'C', 'HOME': os.getenv('HOME', '') } cls._exec(cmd, env=env) logger.debug("Git %s repository cloned into %s", uri, dirpath) return cls(uri, dirpath)
[ "Clone", "a", "Git", "repository", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L804-L829
[ "def", "clone", "(", "cls", ",", "uri", ",", "dirpath", ")", ":", "cmd", "=", "[", "'git'", ",", "'clone'", ",", "'--bare'", ",", "uri", ",", "dirpath", "]", "env", "=", "{", "'LANG'", ":", "'C'", ",", "'HOME'", ":", "os", ".", "getenv", "(", "'HOME'", ",", "''", ")", "}", "cls", ".", "_exec", "(", "cmd", ",", "env", "=", "env", ")", "logger", ".", "debug", "(", "\"Git %s repository cloned into %s\"", ",", "uri", ",", "dirpath", ")", "return", "cls", "(", "uri", ",", "dirpath", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository.count_objects
Count the objects of a repository. The method returns the total number of objects (packed and unpacked) available on the repository. :raises RepositoryError: when an error occurs counting the objects of a repository
perceval/backends/core/git.py
def count_objects(self): """Count the objects of a repository. The method returns the total number of objects (packed and unpacked) available on the repository. :raises RepositoryError: when an error occurs counting the objects of a repository """ cmd_count = ['git', 'count-objects', '-v'] outs = self._exec(cmd_count, cwd=self.dirpath, env=self.gitenv) outs = outs.decode('utf-8', errors='surrogateescape').rstrip() try: cobjs = {k: v for k, v in (x.split(': ') for x in outs.split('\n'))} nobjs = int(cobjs['count']) + int(cobjs['in-pack']) except KeyError as e: error = "unable to parse 'count-objects' output; reason: '%s' entry not found" \ % e.args[0] raise RepositoryError(cause=error) except ValueError as e: error = "unable to parse 'count-objects' output; reason: %s" % str(e) raise RepositoryError(cause=error) logger.debug("Git %s repository has %s objects", self.uri, str(nobjs)) return nobjs
def count_objects(self): """Count the objects of a repository. The method returns the total number of objects (packed and unpacked) available on the repository. :raises RepositoryError: when an error occurs counting the objects of a repository """ cmd_count = ['git', 'count-objects', '-v'] outs = self._exec(cmd_count, cwd=self.dirpath, env=self.gitenv) outs = outs.decode('utf-8', errors='surrogateescape').rstrip() try: cobjs = {k: v for k, v in (x.split(': ') for x in outs.split('\n'))} nobjs = int(cobjs['count']) + int(cobjs['in-pack']) except KeyError as e: error = "unable to parse 'count-objects' output; reason: '%s' entry not found" \ % e.args[0] raise RepositoryError(cause=error) except ValueError as e: error = "unable to parse 'count-objects' output; reason: %s" % str(e) raise RepositoryError(cause=error) logger.debug("Git %s repository has %s objects", self.uri, str(nobjs)) return nobjs
[ "Count", "the", "objects", "of", "a", "repository", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L831-L859
[ "def", "count_objects", "(", "self", ")", ":", "cmd_count", "=", "[", "'git'", ",", "'count-objects'", ",", "'-v'", "]", "outs", "=", "self", ".", "_exec", "(", "cmd_count", ",", "cwd", "=", "self", ".", "dirpath", ",", "env", "=", "self", ".", "gitenv", ")", "outs", "=", "outs", ".", "decode", "(", "'utf-8'", ",", "errors", "=", "'surrogateescape'", ")", ".", "rstrip", "(", ")", "try", ":", "cobjs", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "(", "x", ".", "split", "(", "': '", ")", "for", "x", "in", "outs", ".", "split", "(", "'\\n'", ")", ")", "}", "nobjs", "=", "int", "(", "cobjs", "[", "'count'", "]", ")", "+", "int", "(", "cobjs", "[", "'in-pack'", "]", ")", "except", "KeyError", "as", "e", ":", "error", "=", "\"unable to parse 'count-objects' output; reason: '%s' entry not found\"", "%", "e", ".", "args", "[", "0", "]", "raise", "RepositoryError", "(", "cause", "=", "error", ")", "except", "ValueError", "as", "e", ":", "error", "=", "\"unable to parse 'count-objects' output; reason: %s\"", "%", "str", "(", "e", ")", "raise", "RepositoryError", "(", "cause", "=", "error", ")", "logger", ".", "debug", "(", "\"Git %s repository has %s objects\"", ",", "self", ".", "uri", ",", "str", "(", "nobjs", ")", ")", "return", "nobjs" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository.is_detached
Check if the repo is in a detached state. The repository is in a detached state when HEAD is not a symbolic reference. :returns: whether the repository is detached or not :raises RepositoryError: when an error occurs checking the state of the repository
perceval/backends/core/git.py
def is_detached(self): """Check if the repo is in a detached state. The repository is in a detached state when HEAD is not a symbolic reference. :returns: whether the repository is detached or not :raises RepositoryError: when an error occurs checking the state of the repository """ cmd_sym = ['git', 'symbolic-ref', 'HEAD'] try: self._exec(cmd_sym, cwd=self.dirpath, env=self.gitenv) except RepositoryError as e: if e.msg.find("ref HEAD is not a symbolic ref") == -1: raise e return True else: return False
def is_detached(self): """Check if the repo is in a detached state. The repository is in a detached state when HEAD is not a symbolic reference. :returns: whether the repository is detached or not :raises RepositoryError: when an error occurs checking the state of the repository """ cmd_sym = ['git', 'symbolic-ref', 'HEAD'] try: self._exec(cmd_sym, cwd=self.dirpath, env=self.gitenv) except RepositoryError as e: if e.msg.find("ref HEAD is not a symbolic ref") == -1: raise e return True else: return False
[ "Check", "if", "the", "repo", "is", "in", "a", "detached", "state", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L861-L881
[ "def", "is_detached", "(", "self", ")", ":", "cmd_sym", "=", "[", "'git'", ",", "'symbolic-ref'", ",", "'HEAD'", "]", "try", ":", "self", ".", "_exec", "(", "cmd_sym", ",", "cwd", "=", "self", ".", "dirpath", ",", "env", "=", "self", ".", "gitenv", ")", "except", "RepositoryError", "as", "e", ":", "if", "e", ".", "msg", ".", "find", "(", "\"ref HEAD is not a symbolic ref\"", ")", "==", "-", "1", ":", "raise", "e", "return", "True", "else", ":", "return", "False" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository.update
Update repository from its remote. Calling this method, the repository will be synchronized with the remote repository using 'fetch' command for 'heads' refs. Any commit stored in the local copy will be removed; refs will be overwritten. :raises RepositoryError: when an error occurs updating the repository
perceval/backends/core/git.py
def update(self): """Update repository from its remote. Calling this method, the repository will be synchronized with the remote repository using 'fetch' command for 'heads' refs. Any commit stored in the local copy will be removed; refs will be overwritten. :raises RepositoryError: when an error occurs updating the repository """ cmd_update = ['git', 'fetch', 'origin', '+refs/heads/*:refs/heads/*', '--prune'] self._exec(cmd_update, cwd=self.dirpath, env=self.gitenv) logger.debug("Git %s repository updated into %s", self.uri, self.dirpath)
def update(self): """Update repository from its remote. Calling this method, the repository will be synchronized with the remote repository using 'fetch' command for 'heads' refs. Any commit stored in the local copy will be removed; refs will be overwritten. :raises RepositoryError: when an error occurs updating the repository """ cmd_update = ['git', 'fetch', 'origin', '+refs/heads/*:refs/heads/*', '--prune'] self._exec(cmd_update, cwd=self.dirpath, env=self.gitenv) logger.debug("Git %s repository updated into %s", self.uri, self.dirpath)
[ "Update", "repository", "from", "its", "remote", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L895-L910
[ "def", "update", "(", "self", ")", ":", "cmd_update", "=", "[", "'git'", ",", "'fetch'", ",", "'origin'", ",", "'+refs/heads/*:refs/heads/*'", ",", "'--prune'", "]", "self", ".", "_exec", "(", "cmd_update", ",", "cwd", "=", "self", ".", "dirpath", ",", "env", "=", "self", ".", "gitenv", ")", "logger", ".", "debug", "(", "\"Git %s repository updated into %s\"", ",", "self", ".", "uri", ",", "self", ".", "dirpath", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository.sync
Keep the repository in sync. This method will synchronize the repository with its 'origin', fetching newest objects and updating references. It uses low level commands which allow to keep track of which things have changed in the repository. The method also returns a list of hashes related to the new commits fetched during the process. :returns: list of new commits :raises RepositoryError: when an error occurs synchronizing the repository
perceval/backends/core/git.py
def sync(self): """Keep the repository in sync. This method will synchronize the repository with its 'origin', fetching newest objects and updating references. It uses low level commands which allow to keep track of which things have changed in the repository. The method also returns a list of hashes related to the new commits fetched during the process. :returns: list of new commits :raises RepositoryError: when an error occurs synchronizing the repository """ pack_name, refs = self._fetch_pack() if pack_name: commits = self._read_commits_from_pack(pack_name) else: commits = [] logger.debug("Git repository %s (%s) does not have any new object", self.uri, self.dirpath) self._update_references(refs) logger.debug("Git repository %s (%s) is synced", self.uri, self.dirpath) return commits
def sync(self): """Keep the repository in sync. This method will synchronize the repository with its 'origin', fetching newest objects and updating references. It uses low level commands which allow to keep track of which things have changed in the repository. The method also returns a list of hashes related to the new commits fetched during the process. :returns: list of new commits :raises RepositoryError: when an error occurs synchronizing the repository """ pack_name, refs = self._fetch_pack() if pack_name: commits = self._read_commits_from_pack(pack_name) else: commits = [] logger.debug("Git repository %s (%s) does not have any new object", self.uri, self.dirpath) self._update_references(refs) logger.debug("Git repository %s (%s) is synced", self.uri, self.dirpath) return commits
[ "Keep", "the", "repository", "in", "sync", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L912-L942
[ "def", "sync", "(", "self", ")", ":", "pack_name", ",", "refs", "=", "self", ".", "_fetch_pack", "(", ")", "if", "pack_name", ":", "commits", "=", "self", ".", "_read_commits_from_pack", "(", "pack_name", ")", "else", ":", "commits", "=", "[", "]", "logger", ".", "debug", "(", "\"Git repository %s (%s) does not have any new object\"", ",", "self", ".", "uri", ",", "self", ".", "dirpath", ")", "self", ".", "_update_references", "(", "refs", ")", "logger", ".", "debug", "(", "\"Git repository %s (%s) is synced\"", ",", "self", ".", "uri", ",", "self", ".", "dirpath", ")", "return", "commits" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository.rev_list
Read the list commits from the repository The list of branches is a list of strings, with the names of the branches to fetch. If the list of branches is empty, no commit is fetched. If the list of branches is None, all commits for all branches will be fetched. The method returns the Git rev-list of the repository using the following options: git rev-list --topo-order :param branches: names of branches to fetch from (default: None) :raises EmptyRepositoryError: when the repository is empty and the action cannot be performed :raises RepositoryError: when an error occurs executing the command
perceval/backends/core/git.py
def rev_list(self, branches=None): """Read the list commits from the repository The list of branches is a list of strings, with the names of the branches to fetch. If the list of branches is empty, no commit is fetched. If the list of branches is None, all commits for all branches will be fetched. The method returns the Git rev-list of the repository using the following options: git rev-list --topo-order :param branches: names of branches to fetch from (default: None) :raises EmptyRepositoryError: when the repository is empty and the action cannot be performed :raises RepositoryError: when an error occurs executing the command """ if self.is_empty(): logger.warning("Git %s repository is empty; unable to get the rev-list", self.uri) raise EmptyRepositoryError(repository=self.uri) cmd_rev_list = ['git', 'rev-list', '--topo-order'] if branches is None: cmd_rev_list.extend(['--branches', '--tags', '--remotes=origin']) elif len(branches) == 0: cmd_rev_list.extend(['--branches', '--tags', '--max-count=0']) else: branches = ['refs/heads/' + branch for branch in branches] cmd_rev_list.extend(branches) for line in self._exec_nb(cmd_rev_list, cwd=self.dirpath, env=self.gitenv): yield line.rstrip('\n') logger.debug("Git rev-list fetched from %s repository (%s)", self.uri, self.dirpath)
def rev_list(self, branches=None): """Read the list commits from the repository The list of branches is a list of strings, with the names of the branches to fetch. If the list of branches is empty, no commit is fetched. If the list of branches is None, all commits for all branches will be fetched. The method returns the Git rev-list of the repository using the following options: git rev-list --topo-order :param branches: names of branches to fetch from (default: None) :raises EmptyRepositoryError: when the repository is empty and the action cannot be performed :raises RepositoryError: when an error occurs executing the command """ if self.is_empty(): logger.warning("Git %s repository is empty; unable to get the rev-list", self.uri) raise EmptyRepositoryError(repository=self.uri) cmd_rev_list = ['git', 'rev-list', '--topo-order'] if branches is None: cmd_rev_list.extend(['--branches', '--tags', '--remotes=origin']) elif len(branches) == 0: cmd_rev_list.extend(['--branches', '--tags', '--max-count=0']) else: branches = ['refs/heads/' + branch for branch in branches] cmd_rev_list.extend(branches) for line in self._exec_nb(cmd_rev_list, cwd=self.dirpath, env=self.gitenv): yield line.rstrip('\n') logger.debug("Git rev-list fetched from %s repository (%s)", self.uri, self.dirpath)
[ "Read", "the", "list", "commits", "from", "the", "repository" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L944-L982
[ "def", "rev_list", "(", "self", ",", "branches", "=", "None", ")", ":", "if", "self", ".", "is_empty", "(", ")", ":", "logger", ".", "warning", "(", "\"Git %s repository is empty; unable to get the rev-list\"", ",", "self", ".", "uri", ")", "raise", "EmptyRepositoryError", "(", "repository", "=", "self", ".", "uri", ")", "cmd_rev_list", "=", "[", "'git'", ",", "'rev-list'", ",", "'--topo-order'", "]", "if", "branches", "is", "None", ":", "cmd_rev_list", ".", "extend", "(", "[", "'--branches'", ",", "'--tags'", ",", "'--remotes=origin'", "]", ")", "elif", "len", "(", "branches", ")", "==", "0", ":", "cmd_rev_list", ".", "extend", "(", "[", "'--branches'", ",", "'--tags'", ",", "'--max-count=0'", "]", ")", "else", ":", "branches", "=", "[", "'refs/heads/'", "+", "branch", "for", "branch", "in", "branches", "]", "cmd_rev_list", ".", "extend", "(", "branches", ")", "for", "line", "in", "self", ".", "_exec_nb", "(", "cmd_rev_list", ",", "cwd", "=", "self", ".", "dirpath", ",", "env", "=", "self", ".", "gitenv", ")", ":", "yield", "line", ".", "rstrip", "(", "'\\n'", ")", "logger", ".", "debug", "(", "\"Git rev-list fetched from %s repository (%s)\"", ",", "self", ".", "uri", ",", "self", ".", "dirpath", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository.log
Read the commit log from the repository. The method returns the Git log of the repository using the following options: git log --raw --numstat --pretty=fuller --decorate=full --all --reverse --topo-order --parents -M -C -c --remotes=origin When `from_date` is given, it gets the commits equal or older than that date. This date is given in a datetime object. The list of branches is a list of strings, with the names of the branches to fetch. If the list of branches is empty, no commit is fetched. If the list of branches is None, all commits for all branches will be fetched. :param from_date: fetch commits newer than a specific date (inclusive) :param branches: names of branches to fetch from (default: None) :param encoding: encode the log using this format :returns: a generator where each item is a line from the log :raises EmptyRepositoryError: when the repository is empty and the action cannot be performed :raises RepositoryError: when an error occurs fetching the log
perceval/backends/core/git.py
def log(self, from_date=None, to_date=None, branches=None, encoding='utf-8'): """Read the commit log from the repository. The method returns the Git log of the repository using the following options: git log --raw --numstat --pretty=fuller --decorate=full --all --reverse --topo-order --parents -M -C -c --remotes=origin When `from_date` is given, it gets the commits equal or older than that date. This date is given in a datetime object. The list of branches is a list of strings, with the names of the branches to fetch. If the list of branches is empty, no commit is fetched. If the list of branches is None, all commits for all branches will be fetched. :param from_date: fetch commits newer than a specific date (inclusive) :param branches: names of branches to fetch from (default: None) :param encoding: encode the log using this format :returns: a generator where each item is a line from the log :raises EmptyRepositoryError: when the repository is empty and the action cannot be performed :raises RepositoryError: when an error occurs fetching the log """ if self.is_empty(): logger.warning("Git %s repository is empty; unable to get the log", self.uri) raise EmptyRepositoryError(repository=self.uri) cmd_log = ['git', 'log', '--reverse', '--topo-order'] cmd_log.extend(self.GIT_PRETTY_OUTPUT_OPTS) if from_date: dt = from_date.strftime("%Y-%m-%d %H:%M:%S %z") cmd_log.append('--since=' + dt) if to_date: dt = to_date.strftime("%Y-%m-%d %H:%M:%S %z") cmd_log.append('--until=' + dt) if branches is None: cmd_log.extend(['--branches', '--tags', '--remotes=origin']) elif len(branches) == 0: cmd_log.append('--max-count=0') else: branches = ['refs/heads/' + branch for branch in branches] cmd_log.extend(branches) for line in self._exec_nb(cmd_log, cwd=self.dirpath, env=self.gitenv): yield line logger.debug("Git log fetched from %s repository (%s)", self.uri, self.dirpath)
def log(self, from_date=None, to_date=None, branches=None, encoding='utf-8'): """Read the commit log from the repository. The method returns the Git log of the repository using the following options: git log --raw --numstat --pretty=fuller --decorate=full --all --reverse --topo-order --parents -M -C -c --remotes=origin When `from_date` is given, it gets the commits equal or older than that date. This date is given in a datetime object. The list of branches is a list of strings, with the names of the branches to fetch. If the list of branches is empty, no commit is fetched. If the list of branches is None, all commits for all branches will be fetched. :param from_date: fetch commits newer than a specific date (inclusive) :param branches: names of branches to fetch from (default: None) :param encoding: encode the log using this format :returns: a generator where each item is a line from the log :raises EmptyRepositoryError: when the repository is empty and the action cannot be performed :raises RepositoryError: when an error occurs fetching the log """ if self.is_empty(): logger.warning("Git %s repository is empty; unable to get the log", self.uri) raise EmptyRepositoryError(repository=self.uri) cmd_log = ['git', 'log', '--reverse', '--topo-order'] cmd_log.extend(self.GIT_PRETTY_OUTPUT_OPTS) if from_date: dt = from_date.strftime("%Y-%m-%d %H:%M:%S %z") cmd_log.append('--since=' + dt) if to_date: dt = to_date.strftime("%Y-%m-%d %H:%M:%S %z") cmd_log.append('--until=' + dt) if branches is None: cmd_log.extend(['--branches', '--tags', '--remotes=origin']) elif len(branches) == 0: cmd_log.append('--max-count=0') else: branches = ['refs/heads/' + branch for branch in branches] cmd_log.extend(branches) for line in self._exec_nb(cmd_log, cwd=self.dirpath, env=self.gitenv): yield line logger.debug("Git log fetched from %s repository (%s)", self.uri, self.dirpath)
[ "Read", "the", "commit", "log", "from", "the", "repository", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L984-L1041
[ "def", "log", "(", "self", ",", "from_date", "=", "None", ",", "to_date", "=", "None", ",", "branches", "=", "None", ",", "encoding", "=", "'utf-8'", ")", ":", "if", "self", ".", "is_empty", "(", ")", ":", "logger", ".", "warning", "(", "\"Git %s repository is empty; unable to get the log\"", ",", "self", ".", "uri", ")", "raise", "EmptyRepositoryError", "(", "repository", "=", "self", ".", "uri", ")", "cmd_log", "=", "[", "'git'", ",", "'log'", ",", "'--reverse'", ",", "'--topo-order'", "]", "cmd_log", ".", "extend", "(", "self", ".", "GIT_PRETTY_OUTPUT_OPTS", ")", "if", "from_date", ":", "dt", "=", "from_date", ".", "strftime", "(", "\"%Y-%m-%d %H:%M:%S %z\"", ")", "cmd_log", ".", "append", "(", "'--since='", "+", "dt", ")", "if", "to_date", ":", "dt", "=", "to_date", ".", "strftime", "(", "\"%Y-%m-%d %H:%M:%S %z\"", ")", "cmd_log", ".", "append", "(", "'--until='", "+", "dt", ")", "if", "branches", "is", "None", ":", "cmd_log", ".", "extend", "(", "[", "'--branches'", ",", "'--tags'", ",", "'--remotes=origin'", "]", ")", "elif", "len", "(", "branches", ")", "==", "0", ":", "cmd_log", ".", "append", "(", "'--max-count=0'", ")", "else", ":", "branches", "=", "[", "'refs/heads/'", "+", "branch", "for", "branch", "in", "branches", "]", "cmd_log", ".", "extend", "(", "branches", ")", "for", "line", "in", "self", ".", "_exec_nb", "(", "cmd_log", ",", "cwd", "=", "self", ".", "dirpath", ",", "env", "=", "self", ".", "gitenv", ")", ":", "yield", "line", "logger", ".", "debug", "(", "\"Git log fetched from %s repository (%s)\"", ",", "self", ".", "uri", ",", "self", ".", "dirpath", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository.show
Show the data of a set of commits. The method returns the output of Git show command for a set of commits using the following options: git show --raw --numstat --pretty=fuller --decorate=full --parents -M -C -c [<commit>...<commit>] When the list of commits is empty, the command will return data about the last commit, like the default behaviour of `git show`. :param commits: list of commits to show data :param encoding: encode the output using this format :returns: a generator where each item is a line from the show output :raises EmptyRepositoryError: when the repository is empty and the action cannot be performed :raises RepositoryError: when an error occurs fetching the show output
perceval/backends/core/git.py
def show(self, commits=None, encoding='utf-8'): """Show the data of a set of commits. The method returns the output of Git show command for a set of commits using the following options: git show --raw --numstat --pretty=fuller --decorate=full --parents -M -C -c [<commit>...<commit>] When the list of commits is empty, the command will return data about the last commit, like the default behaviour of `git show`. :param commits: list of commits to show data :param encoding: encode the output using this format :returns: a generator where each item is a line from the show output :raises EmptyRepositoryError: when the repository is empty and the action cannot be performed :raises RepositoryError: when an error occurs fetching the show output """ if self.is_empty(): logger.warning("Git %s repository is empty; unable to run show", self.uri) raise EmptyRepositoryError(repository=self.uri) if commits is None: commits = [] cmd_show = ['git', 'show'] cmd_show.extend(self.GIT_PRETTY_OUTPUT_OPTS) cmd_show.extend(commits) for line in self._exec_nb(cmd_show, cwd=self.dirpath, env=self.gitenv): yield line logger.debug("Git show fetched from %s repository (%s)", self.uri, self.dirpath)
def show(self, commits=None, encoding='utf-8'): """Show the data of a set of commits. The method returns the output of Git show command for a set of commits using the following options: git show --raw --numstat --pretty=fuller --decorate=full --parents -M -C -c [<commit>...<commit>] When the list of commits is empty, the command will return data about the last commit, like the default behaviour of `git show`. :param commits: list of commits to show data :param encoding: encode the output using this format :returns: a generator where each item is a line from the show output :raises EmptyRepositoryError: when the repository is empty and the action cannot be performed :raises RepositoryError: when an error occurs fetching the show output """ if self.is_empty(): logger.warning("Git %s repository is empty; unable to run show", self.uri) raise EmptyRepositoryError(repository=self.uri) if commits is None: commits = [] cmd_show = ['git', 'show'] cmd_show.extend(self.GIT_PRETTY_OUTPUT_OPTS) cmd_show.extend(commits) for line in self._exec_nb(cmd_show, cwd=self.dirpath, env=self.gitenv): yield line logger.debug("Git show fetched from %s repository (%s)", self.uri, self.dirpath)
[ "Show", "the", "data", "of", "a", "set", "of", "commits", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L1043-L1081
[ "def", "show", "(", "self", ",", "commits", "=", "None", ",", "encoding", "=", "'utf-8'", ")", ":", "if", "self", ".", "is_empty", "(", ")", ":", "logger", ".", "warning", "(", "\"Git %s repository is empty; unable to run show\"", ",", "self", ".", "uri", ")", "raise", "EmptyRepositoryError", "(", "repository", "=", "self", ".", "uri", ")", "if", "commits", "is", "None", ":", "commits", "=", "[", "]", "cmd_show", "=", "[", "'git'", ",", "'show'", "]", "cmd_show", ".", "extend", "(", "self", ".", "GIT_PRETTY_OUTPUT_OPTS", ")", "cmd_show", ".", "extend", "(", "commits", ")", "for", "line", "in", "self", ".", "_exec_nb", "(", "cmd_show", ",", "cwd", "=", "self", ".", "dirpath", ",", "env", "=", "self", ".", "gitenv", ")", ":", "yield", "line", "logger", ".", "debug", "(", "\"Git show fetched from %s repository (%s)\"", ",", "self", ".", "uri", ",", "self", ".", "dirpath", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository._fetch_pack
Fetch changes and store them in a pack.
perceval/backends/core/git.py
def _fetch_pack(self): """Fetch changes and store them in a pack.""" def prepare_refs(refs): return [ref.hash.encode('utf-8') for ref in refs if not ref.refname.endswith('^{}')] def determine_wants(refs): remote_refs = prepare_refs(self._discover_refs(remote=True)) local_refs = prepare_refs(self._discover_refs()) wants = [ref for ref in remote_refs if ref not in local_refs] return wants client, repo_path = dulwich.client.get_transport_and_path(self.uri) repo = dulwich.repo.Repo(self.dirpath) fd = io.BytesIO() local_refs = self._discover_refs() graph_walker = _GraphWalker(local_refs) result = client.fetch_pack(repo_path, determine_wants, graph_walker, fd.write) refs = [GitRef(ref_hash.decode('utf-8'), ref_name.decode('utf-8')) for ref_name, ref_hash in result.refs.items()] if len(fd.getvalue()) > 0: fd.seek(0) pack = repo.object_store.add_thin_pack(fd.read, None) pack_name = pack.name().decode('utf-8') else: pack_name = None return (pack_name, refs)
def _fetch_pack(self): """Fetch changes and store them in a pack.""" def prepare_refs(refs): return [ref.hash.encode('utf-8') for ref in refs if not ref.refname.endswith('^{}')] def determine_wants(refs): remote_refs = prepare_refs(self._discover_refs(remote=True)) local_refs = prepare_refs(self._discover_refs()) wants = [ref for ref in remote_refs if ref not in local_refs] return wants client, repo_path = dulwich.client.get_transport_and_path(self.uri) repo = dulwich.repo.Repo(self.dirpath) fd = io.BytesIO() local_refs = self._discover_refs() graph_walker = _GraphWalker(local_refs) result = client.fetch_pack(repo_path, determine_wants, graph_walker, fd.write) refs = [GitRef(ref_hash.decode('utf-8'), ref_name.decode('utf-8')) for ref_name, ref_hash in result.refs.items()] if len(fd.getvalue()) > 0: fd.seek(0) pack = repo.object_store.add_thin_pack(fd.read, None) pack_name = pack.name().decode('utf-8') else: pack_name = None return (pack_name, refs)
[ "Fetch", "changes", "and", "store", "them", "in", "a", "pack", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L1083-L1117
[ "def", "_fetch_pack", "(", "self", ")", ":", "def", "prepare_refs", "(", "refs", ")", ":", "return", "[", "ref", ".", "hash", ".", "encode", "(", "'utf-8'", ")", "for", "ref", "in", "refs", "if", "not", "ref", ".", "refname", ".", "endswith", "(", "'^{}'", ")", "]", "def", "determine_wants", "(", "refs", ")", ":", "remote_refs", "=", "prepare_refs", "(", "self", ".", "_discover_refs", "(", "remote", "=", "True", ")", ")", "local_refs", "=", "prepare_refs", "(", "self", ".", "_discover_refs", "(", ")", ")", "wants", "=", "[", "ref", "for", "ref", "in", "remote_refs", "if", "ref", "not", "in", "local_refs", "]", "return", "wants", "client", ",", "repo_path", "=", "dulwich", ".", "client", ".", "get_transport_and_path", "(", "self", ".", "uri", ")", "repo", "=", "dulwich", ".", "repo", ".", "Repo", "(", "self", ".", "dirpath", ")", "fd", "=", "io", ".", "BytesIO", "(", ")", "local_refs", "=", "self", ".", "_discover_refs", "(", ")", "graph_walker", "=", "_GraphWalker", "(", "local_refs", ")", "result", "=", "client", ".", "fetch_pack", "(", "repo_path", ",", "determine_wants", ",", "graph_walker", ",", "fd", ".", "write", ")", "refs", "=", "[", "GitRef", "(", "ref_hash", ".", "decode", "(", "'utf-8'", ")", ",", "ref_name", ".", "decode", "(", "'utf-8'", ")", ")", "for", "ref_name", ",", "ref_hash", "in", "result", ".", "refs", ".", "items", "(", ")", "]", "if", "len", "(", "fd", ".", "getvalue", "(", ")", ")", ">", "0", ":", "fd", ".", "seek", "(", "0", ")", "pack", "=", "repo", ".", "object_store", ".", "add_thin_pack", "(", "fd", ".", "read", ",", "None", ")", "pack_name", "=", "pack", ".", "name", "(", ")", ".", "decode", "(", "'utf-8'", ")", "else", ":", "pack_name", "=", "None", "return", "(", "pack_name", ",", "refs", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository._read_commits_from_pack
Read the commits of a pack.
perceval/backends/core/git.py
def _read_commits_from_pack(self, packet_name): """Read the commits of a pack.""" filepath = 'objects/pack/pack-' + packet_name cmd_verify_pack = ['git', 'verify-pack', '-v', filepath] outs = self._exec(cmd_verify_pack, cwd=self.dirpath, env=self.gitenv) outs = outs.decode('utf-8', errors='surrogateescape').rstrip() lines = [line.split(' ') for line in outs.split('\n')] # Commits usually come in the pack ordered from newest to oldest commits = [parts[0] for parts in lines if parts[1] == 'commit'] commits.reverse() return commits
def _read_commits_from_pack(self, packet_name): """Read the commits of a pack.""" filepath = 'objects/pack/pack-' + packet_name cmd_verify_pack = ['git', 'verify-pack', '-v', filepath] outs = self._exec(cmd_verify_pack, cwd=self.dirpath, env=self.gitenv) outs = outs.decode('utf-8', errors='surrogateescape').rstrip() lines = [line.split(' ') for line in outs.split('\n')] # Commits usually come in the pack ordered from newest to oldest commits = [parts[0] for parts in lines if parts[1] == 'commit'] commits.reverse() return commits
[ "Read", "the", "commits", "of", "a", "pack", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L1119-L1135
[ "def", "_read_commits_from_pack", "(", "self", ",", "packet_name", ")", ":", "filepath", "=", "'objects/pack/pack-'", "+", "packet_name", "cmd_verify_pack", "=", "[", "'git'", ",", "'verify-pack'", ",", "'-v'", ",", "filepath", "]", "outs", "=", "self", ".", "_exec", "(", "cmd_verify_pack", ",", "cwd", "=", "self", ".", "dirpath", ",", "env", "=", "self", ".", "gitenv", ")", "outs", "=", "outs", ".", "decode", "(", "'utf-8'", ",", "errors", "=", "'surrogateescape'", ")", ".", "rstrip", "(", ")", "lines", "=", "[", "line", ".", "split", "(", "' '", ")", "for", "line", "in", "outs", ".", "split", "(", "'\\n'", ")", "]", "# Commits usually come in the pack ordered from newest to oldest", "commits", "=", "[", "parts", "[", "0", "]", "for", "parts", "in", "lines", "if", "parts", "[", "1", "]", "==", "'commit'", "]", "commits", ".", "reverse", "(", ")", "return", "commits" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository._update_references
Update references removing old ones.
perceval/backends/core/git.py
def _update_references(self, refs): """Update references removing old ones.""" new_refs = [ref.refname for ref in refs] # Delete old references for old_ref in self._discover_refs(): if not old_ref.refname.startswith('refs/heads/'): continue if old_ref.refname in new_refs: continue self._update_ref(old_ref, delete=True) # Update new references for new_ref in refs: refname = new_ref.refname if refname.endswith('^{}'): logger.debug("Annotated tag %s ignored for updating in sync process", refname) continue elif not refname.startswith('refs/heads/') and not refname.startswith('refs/tags/'): logger.debug("Reference %s not needed; ignored for updating in sync process", refname) continue else: self._update_ref(new_ref) # Prune repository to remove old branches cmd = ['git', 'remote', 'prune', 'origin'] self._exec(cmd, cwd=self.dirpath, env=self.gitenv)
def _update_references(self, refs): """Update references removing old ones.""" new_refs = [ref.refname for ref in refs] # Delete old references for old_ref in self._discover_refs(): if not old_ref.refname.startswith('refs/heads/'): continue if old_ref.refname in new_refs: continue self._update_ref(old_ref, delete=True) # Update new references for new_ref in refs: refname = new_ref.refname if refname.endswith('^{}'): logger.debug("Annotated tag %s ignored for updating in sync process", refname) continue elif not refname.startswith('refs/heads/') and not refname.startswith('refs/tags/'): logger.debug("Reference %s not needed; ignored for updating in sync process", refname) continue else: self._update_ref(new_ref) # Prune repository to remove old branches cmd = ['git', 'remote', 'prune', 'origin'] self._exec(cmd, cwd=self.dirpath, env=self.gitenv)
[ "Update", "references", "removing", "old", "ones", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L1137-L1167
[ "def", "_update_references", "(", "self", ",", "refs", ")", ":", "new_refs", "=", "[", "ref", ".", "refname", "for", "ref", "in", "refs", "]", "# Delete old references", "for", "old_ref", "in", "self", ".", "_discover_refs", "(", ")", ":", "if", "not", "old_ref", ".", "refname", ".", "startswith", "(", "'refs/heads/'", ")", ":", "continue", "if", "old_ref", ".", "refname", "in", "new_refs", ":", "continue", "self", ".", "_update_ref", "(", "old_ref", ",", "delete", "=", "True", ")", "# Update new references", "for", "new_ref", "in", "refs", ":", "refname", "=", "new_ref", ".", "refname", "if", "refname", ".", "endswith", "(", "'^{}'", ")", ":", "logger", ".", "debug", "(", "\"Annotated tag %s ignored for updating in sync process\"", ",", "refname", ")", "continue", "elif", "not", "refname", ".", "startswith", "(", "'refs/heads/'", ")", "and", "not", "refname", ".", "startswith", "(", "'refs/tags/'", ")", ":", "logger", ".", "debug", "(", "\"Reference %s not needed; ignored for updating in sync process\"", ",", "refname", ")", "continue", "else", ":", "self", ".", "_update_ref", "(", "new_ref", ")", "# Prune repository to remove old branches", "cmd", "=", "[", "'git'", ",", "'remote'", ",", "'prune'", ",", "'origin'", "]", "self", ".", "_exec", "(", "cmd", ",", "cwd", "=", "self", ".", "dirpath", ",", "env", "=", "self", ".", "gitenv", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository._discover_refs
Get the current list of local or remote refs.
perceval/backends/core/git.py
def _discover_refs(self, remote=False): """Get the current list of local or remote refs.""" if remote: cmd_refs = ['git', 'ls-remote', '-h', '-t', '--exit-code', 'origin'] sep = '\t' ignored_error_codes = [2] else: # Check first whether the local repo is empty; # Running 'show-ref' in empty repos gives an error if self.is_empty(): raise EmptyRepositoryError(repository=self.uri) cmd_refs = ['git', 'show-ref', '--heads', '--tags'] sep = ' ' ignored_error_codes = [1] # Error codes returned when no matching refs (i.e, no heads # or tags) are found in a repository will be ignored. Otherwise, # the full process would fail for those situations. outs = self._exec(cmd_refs, cwd=self.dirpath, env=self.gitenv, ignored_error_codes=ignored_error_codes) outs = outs.decode('utf-8', errors='surrogateescape').rstrip() outs = outs.split('\n') if outs else [] refs = [] for line in outs: data = line.split(sep) ref = GitRef(data[0], data[1]) refs.append(ref) return refs
def _discover_refs(self, remote=False): """Get the current list of local or remote refs.""" if remote: cmd_refs = ['git', 'ls-remote', '-h', '-t', '--exit-code', 'origin'] sep = '\t' ignored_error_codes = [2] else: # Check first whether the local repo is empty; # Running 'show-ref' in empty repos gives an error if self.is_empty(): raise EmptyRepositoryError(repository=self.uri) cmd_refs = ['git', 'show-ref', '--heads', '--tags'] sep = ' ' ignored_error_codes = [1] # Error codes returned when no matching refs (i.e, no heads # or tags) are found in a repository will be ignored. Otherwise, # the full process would fail for those situations. outs = self._exec(cmd_refs, cwd=self.dirpath, env=self.gitenv, ignored_error_codes=ignored_error_codes) outs = outs.decode('utf-8', errors='surrogateescape').rstrip() outs = outs.split('\n') if outs else [] refs = [] for line in outs: data = line.split(sep) ref = GitRef(data[0], data[1]) refs.append(ref) return refs
[ "Get", "the", "current", "list", "of", "local", "or", "remote", "refs", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L1169-L1202
[ "def", "_discover_refs", "(", "self", ",", "remote", "=", "False", ")", ":", "if", "remote", ":", "cmd_refs", "=", "[", "'git'", ",", "'ls-remote'", ",", "'-h'", ",", "'-t'", ",", "'--exit-code'", ",", "'origin'", "]", "sep", "=", "'\\t'", "ignored_error_codes", "=", "[", "2", "]", "else", ":", "# Check first whether the local repo is empty;", "# Running 'show-ref' in empty repos gives an error", "if", "self", ".", "is_empty", "(", ")", ":", "raise", "EmptyRepositoryError", "(", "repository", "=", "self", ".", "uri", ")", "cmd_refs", "=", "[", "'git'", ",", "'show-ref'", ",", "'--heads'", ",", "'--tags'", "]", "sep", "=", "' '", "ignored_error_codes", "=", "[", "1", "]", "# Error codes returned when no matching refs (i.e, no heads", "# or tags) are found in a repository will be ignored. Otherwise,", "# the full process would fail for those situations.", "outs", "=", "self", ".", "_exec", "(", "cmd_refs", ",", "cwd", "=", "self", ".", "dirpath", ",", "env", "=", "self", ".", "gitenv", ",", "ignored_error_codes", "=", "ignored_error_codes", ")", "outs", "=", "outs", ".", "decode", "(", "'utf-8'", ",", "errors", "=", "'surrogateescape'", ")", ".", "rstrip", "(", ")", "outs", "=", "outs", ".", "split", "(", "'\\n'", ")", "if", "outs", "else", "[", "]", "refs", "=", "[", "]", "for", "line", "in", "outs", ":", "data", "=", "line", ".", "split", "(", "sep", ")", "ref", "=", "GitRef", "(", "data", "[", "0", "]", ",", "data", "[", "1", "]", ")", "refs", ".", "append", "(", "ref", ")", "return", "refs" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository._update_ref
Update a reference.
perceval/backends/core/git.py
def _update_ref(self, ref, delete=False): """Update a reference.""" cmd = ['git', 'update-ref'] if delete: cmd.extend(['-d', ref.refname]) action = 'deleted' else: cmd.extend([ref.refname, ref.hash]) action = 'updated to %s' % ref.hash try: self._exec(cmd, cwd=self.dirpath, env=self.gitenv) except RepositoryError as e: logger.warning("Git %s ref could not be %s during sync process in %s (%s); skipped", ref.refname, action, self.uri, self.dirpath) else: logger.debug("Git %s ref %s in %s (%s)", ref.refname, action, self.uri, self.dirpath)
def _update_ref(self, ref, delete=False): """Update a reference.""" cmd = ['git', 'update-ref'] if delete: cmd.extend(['-d', ref.refname]) action = 'deleted' else: cmd.extend([ref.refname, ref.hash]) action = 'updated to %s' % ref.hash try: self._exec(cmd, cwd=self.dirpath, env=self.gitenv) except RepositoryError as e: logger.warning("Git %s ref could not be %s during sync process in %s (%s); skipped", ref.refname, action, self.uri, self.dirpath) else: logger.debug("Git %s ref %s in %s (%s)", ref.refname, action, self.uri, self.dirpath)
[ "Update", "a", "reference", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L1204-L1223
[ "def", "_update_ref", "(", "self", ",", "ref", ",", "delete", "=", "False", ")", ":", "cmd", "=", "[", "'git'", ",", "'update-ref'", "]", "if", "delete", ":", "cmd", ".", "extend", "(", "[", "'-d'", ",", "ref", ".", "refname", "]", ")", "action", "=", "'deleted'", "else", ":", "cmd", ".", "extend", "(", "[", "ref", ".", "refname", ",", "ref", ".", "hash", "]", ")", "action", "=", "'updated to %s'", "%", "ref", ".", "hash", "try", ":", "self", ".", "_exec", "(", "cmd", ",", "cwd", "=", "self", ".", "dirpath", ",", "env", "=", "self", ".", "gitenv", ")", "except", "RepositoryError", "as", "e", ":", "logger", ".", "warning", "(", "\"Git %s ref could not be %s during sync process in %s (%s); skipped\"", ",", "ref", ".", "refname", ",", "action", ",", "self", ".", "uri", ",", "self", ".", "dirpath", ")", "else", ":", "logger", ".", "debug", "(", "\"Git %s ref %s in %s (%s)\"", ",", "ref", ".", "refname", ",", "action", ",", "self", ".", "uri", ",", "self", ".", "dirpath", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository._exec_nb
Run a command with a non blocking call. Execute `cmd` command with a non blocking call. The command will be run in the directory set by `cwd`. Enviroment variables can be set using the `env` dictionary. The output data is returned as encoded bytes in an iterator. Each item will be a line of the output. :returns: an iterator with the output of the command as encoded bytes :raises RepositoryError: when an error occurs running the command
perceval/backends/core/git.py
def _exec_nb(self, cmd, cwd=None, env=None, encoding='utf-8'): """Run a command with a non blocking call. Execute `cmd` command with a non blocking call. The command will be run in the directory set by `cwd`. Enviroment variables can be set using the `env` dictionary. The output data is returned as encoded bytes in an iterator. Each item will be a line of the output. :returns: an iterator with the output of the command as encoded bytes :raises RepositoryError: when an error occurs running the command """ self.failed_message = None logger.debug("Running command %s (cwd: %s, env: %s)", ' '.join(cmd), cwd, str(env)) try: self.proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env) err_thread = threading.Thread(target=self._read_stderr, kwargs={'encoding': encoding}, daemon=True) err_thread.start() for line in self.proc.stdout: yield line.decode(encoding, errors='surrogateescape') err_thread.join() self.proc.communicate() self.proc.stdout.close() self.proc.stderr.close() except OSError as e: err_thread.join() raise RepositoryError(cause=str(e)) if self.proc.returncode != 0: cause = "git command - %s (return code: %d)" % \ (self.failed_message, self.proc.returncode) raise RepositoryError(cause=cause)
def _exec_nb(self, cmd, cwd=None, env=None, encoding='utf-8'): """Run a command with a non blocking call. Execute `cmd` command with a non blocking call. The command will be run in the directory set by `cwd`. Enviroment variables can be set using the `env` dictionary. The output data is returned as encoded bytes in an iterator. Each item will be a line of the output. :returns: an iterator with the output of the command as encoded bytes :raises RepositoryError: when an error occurs running the command """ self.failed_message = None logger.debug("Running command %s (cwd: %s, env: %s)", ' '.join(cmd), cwd, str(env)) try: self.proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env) err_thread = threading.Thread(target=self._read_stderr, kwargs={'encoding': encoding}, daemon=True) err_thread.start() for line in self.proc.stdout: yield line.decode(encoding, errors='surrogateescape') err_thread.join() self.proc.communicate() self.proc.stdout.close() self.proc.stderr.close() except OSError as e: err_thread.join() raise RepositoryError(cause=str(e)) if self.proc.returncode != 0: cause = "git command - %s (return code: %d)" % \ (self.failed_message, self.proc.returncode) raise RepositoryError(cause=cause)
[ "Run", "a", "command", "with", "a", "non", "blocking", "call", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L1225-L1267
[ "def", "_exec_nb", "(", "self", ",", "cmd", ",", "cwd", "=", "None", ",", "env", "=", "None", ",", "encoding", "=", "'utf-8'", ")", ":", "self", ".", "failed_message", "=", "None", "logger", ".", "debug", "(", "\"Running command %s (cwd: %s, env: %s)\"", ",", "' '", ".", "join", "(", "cmd", ")", ",", "cwd", ",", "str", "(", "env", ")", ")", "try", ":", "self", ".", "proc", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "cwd", "=", "cwd", ",", "env", "=", "env", ")", "err_thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_read_stderr", ",", "kwargs", "=", "{", "'encoding'", ":", "encoding", "}", ",", "daemon", "=", "True", ")", "err_thread", ".", "start", "(", ")", "for", "line", "in", "self", ".", "proc", ".", "stdout", ":", "yield", "line", ".", "decode", "(", "encoding", ",", "errors", "=", "'surrogateescape'", ")", "err_thread", ".", "join", "(", ")", "self", ".", "proc", ".", "communicate", "(", ")", "self", ".", "proc", ".", "stdout", ".", "close", "(", ")", "self", ".", "proc", ".", "stderr", ".", "close", "(", ")", "except", "OSError", "as", "e", ":", "err_thread", ".", "join", "(", ")", "raise", "RepositoryError", "(", "cause", "=", "str", "(", "e", ")", ")", "if", "self", ".", "proc", ".", "returncode", "!=", "0", ":", "cause", "=", "\"git command - %s (return code: %d)\"", "%", "(", "self", ".", "failed_message", ",", "self", ".", "proc", ".", "returncode", ")", "raise", "RepositoryError", "(", "cause", "=", "cause", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository._read_stderr
Reads self.proc.stderr. Usually, this should be read in a thread, to prevent blocking the read from stdout of the stderr buffer is filled, and this function is not called becuase the program is busy in the stderr reading loop. Reads self.proc.stderr (self.proc is the subprocess running the git command), and reads / writes self.failed_message (the message sent to stderr when git fails, usually one line).
perceval/backends/core/git.py
def _read_stderr(self, encoding='utf-8'): """Reads self.proc.stderr. Usually, this should be read in a thread, to prevent blocking the read from stdout of the stderr buffer is filled, and this function is not called becuase the program is busy in the stderr reading loop. Reads self.proc.stderr (self.proc is the subprocess running the git command), and reads / writes self.failed_message (the message sent to stderr when git fails, usually one line). """ for line in self.proc.stderr: err_line = line.decode(encoding, errors='surrogateescape') if self.proc.returncode != 0: # If the subprocess didn't finish successfully, we expect # the last line in stderr to provide the cause if self.failed_message is not None: # We had a message, there is a newer line, print it logger.debug("Git log stderr: " + self.failed_message) self.failed_message = err_line else: # The subprocess is successfully up to now, print the line logger.debug("Git log stderr: " + err_line)
def _read_stderr(self, encoding='utf-8'): """Reads self.proc.stderr. Usually, this should be read in a thread, to prevent blocking the read from stdout of the stderr buffer is filled, and this function is not called becuase the program is busy in the stderr reading loop. Reads self.proc.stderr (self.proc is the subprocess running the git command), and reads / writes self.failed_message (the message sent to stderr when git fails, usually one line). """ for line in self.proc.stderr: err_line = line.decode(encoding, errors='surrogateescape') if self.proc.returncode != 0: # If the subprocess didn't finish successfully, we expect # the last line in stderr to provide the cause if self.failed_message is not None: # We had a message, there is a newer line, print it logger.debug("Git log stderr: " + self.failed_message) self.failed_message = err_line else: # The subprocess is successfully up to now, print the line logger.debug("Git log stderr: " + err_line)
[ "Reads", "self", ".", "proc", ".", "stderr", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L1269-L1293
[ "def", "_read_stderr", "(", "self", ",", "encoding", "=", "'utf-8'", ")", ":", "for", "line", "in", "self", ".", "proc", ".", "stderr", ":", "err_line", "=", "line", ".", "decode", "(", "encoding", ",", "errors", "=", "'surrogateescape'", ")", "if", "self", ".", "proc", ".", "returncode", "!=", "0", ":", "# If the subprocess didn't finish successfully, we expect", "# the last line in stderr to provide the cause", "if", "self", ".", "failed_message", "is", "not", "None", ":", "# We had a message, there is a newer line, print it", "logger", ".", "debug", "(", "\"Git log stderr: \"", "+", "self", ".", "failed_message", ")", "self", ".", "failed_message", "=", "err_line", "else", ":", "# The subprocess is successfully up to now, print the line", "logger", ".", "debug", "(", "\"Git log stderr: \"", "+", "err_line", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitRepository._exec
Run a command. Execute `cmd` command in the directory set by `cwd`. Environment variables can be set using the `env` dictionary. The output data is returned as encoded bytes. Commands which their returning status codes are non-zero will be treated as failed. Error codes considered as valid can be ignored giving them in the `ignored_error_codes` list. :returns: the output of the command as encoded bytes :raises RepositoryError: when an error occurs running the command
perceval/backends/core/git.py
def _exec(cmd, cwd=None, env=None, ignored_error_codes=None, encoding='utf-8'): """Run a command. Execute `cmd` command in the directory set by `cwd`. Environment variables can be set using the `env` dictionary. The output data is returned as encoded bytes. Commands which their returning status codes are non-zero will be treated as failed. Error codes considered as valid can be ignored giving them in the `ignored_error_codes` list. :returns: the output of the command as encoded bytes :raises RepositoryError: when an error occurs running the command """ if ignored_error_codes is None: ignored_error_codes = [] logger.debug("Running command %s (cwd: %s, env: %s)", ' '.join(cmd), cwd, str(env)) try: proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env) (outs, errs) = proc.communicate() except OSError as e: raise RepositoryError(cause=str(e)) if proc.returncode != 0 and proc.returncode not in ignored_error_codes: err = errs.decode(encoding, errors='surrogateescape') cause = "git command - %s" % err raise RepositoryError(cause=cause) else: logger.debug(errs.decode(encoding, errors='surrogateescape')) return outs
def _exec(cmd, cwd=None, env=None, ignored_error_codes=None, encoding='utf-8'): """Run a command. Execute `cmd` command in the directory set by `cwd`. Environment variables can be set using the `env` dictionary. The output data is returned as encoded bytes. Commands which their returning status codes are non-zero will be treated as failed. Error codes considered as valid can be ignored giving them in the `ignored_error_codes` list. :returns: the output of the command as encoded bytes :raises RepositoryError: when an error occurs running the command """ if ignored_error_codes is None: ignored_error_codes = [] logger.debug("Running command %s (cwd: %s, env: %s)", ' '.join(cmd), cwd, str(env)) try: proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env) (outs, errs) = proc.communicate() except OSError as e: raise RepositoryError(cause=str(e)) if proc.returncode != 0 and proc.returncode not in ignored_error_codes: err = errs.decode(encoding, errors='surrogateescape') cause = "git command - %s" % err raise RepositoryError(cause=cause) else: logger.debug(errs.decode(encoding, errors='surrogateescape')) return outs
[ "Run", "a", "command", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/git.py#L1296-L1333
[ "def", "_exec", "(", "cmd", ",", "cwd", "=", "None", ",", "env", "=", "None", ",", "ignored_error_codes", "=", "None", ",", "encoding", "=", "'utf-8'", ")", ":", "if", "ignored_error_codes", "is", "None", ":", "ignored_error_codes", "=", "[", "]", "logger", ".", "debug", "(", "\"Running command %s (cwd: %s, env: %s)\"", ",", "' '", ".", "join", "(", "cmd", ")", ",", "cwd", ",", "str", "(", "env", ")", ")", "try", ":", "proc", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "cwd", "=", "cwd", ",", "env", "=", "env", ")", "(", "outs", ",", "errs", ")", "=", "proc", ".", "communicate", "(", ")", "except", "OSError", "as", "e", ":", "raise", "RepositoryError", "(", "cause", "=", "str", "(", "e", ")", ")", "if", "proc", ".", "returncode", "!=", "0", "and", "proc", ".", "returncode", "not", "in", "ignored_error_codes", ":", "err", "=", "errs", ".", "decode", "(", "encoding", ",", "errors", "=", "'surrogateescape'", ")", "cause", "=", "\"git command - %s\"", "%", "err", "raise", "RepositoryError", "(", "cause", "=", "cause", ")", "else", ":", "logger", ".", "debug", "(", "errs", ".", "decode", "(", "encoding", ",", "errors", "=", "'surrogateescape'", ")", ")", "return", "outs" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Twitter.fetch
Fetch the tweets from the server. This method fetches tweets from the TwitterSearch API published in the last seven days. :param category: the category of items to fetch :param since_id: if not null, it returns results with an ID greater than the specified ID :param max_id: when it is set or if not None, it returns results with an ID less than the specified ID :param geocode: if enabled, returns tweets by users located at latitude,longitude,"mi"|"km" :param lang: if enabled, restricts tweets to the given language, given by an ISO 639-1 code :param include_entities: if disabled, it excludes entities node :param tweets_type: type of tweets returned. Default is “mixed”, others are "recent" and "popular" :returns: a generator of tweets
perceval/backends/core/twitter.py
def fetch(self, category=CATEGORY_TWEET, since_id=None, max_id=None, geocode=None, lang=None, include_entities=True, tweets_type=TWEET_TYPE_MIXED): """Fetch the tweets from the server. This method fetches tweets from the TwitterSearch API published in the last seven days. :param category: the category of items to fetch :param since_id: if not null, it returns results with an ID greater than the specified ID :param max_id: when it is set or if not None, it returns results with an ID less than the specified ID :param geocode: if enabled, returns tweets by users located at latitude,longitude,"mi"|"km" :param lang: if enabled, restricts tweets to the given language, given by an ISO 639-1 code :param include_entities: if disabled, it excludes entities node :param tweets_type: type of tweets returned. Default is “mixed”, others are "recent" and "popular" :returns: a generator of tweets """ kwargs = {"since_id": since_id, "max_id": max_id, "geocode": geocode, "lang": lang, "include_entities": include_entities, "result_type": tweets_type} items = super().fetch(category, **kwargs) return items
def fetch(self, category=CATEGORY_TWEET, since_id=None, max_id=None, geocode=None, lang=None, include_entities=True, tweets_type=TWEET_TYPE_MIXED): """Fetch the tweets from the server. This method fetches tweets from the TwitterSearch API published in the last seven days. :param category: the category of items to fetch :param since_id: if not null, it returns results with an ID greater than the specified ID :param max_id: when it is set or if not None, it returns results with an ID less than the specified ID :param geocode: if enabled, returns tweets by users located at latitude,longitude,"mi"|"km" :param lang: if enabled, restricts tweets to the given language, given by an ISO 639-1 code :param include_entities: if disabled, it excludes entities node :param tweets_type: type of tweets returned. Default is “mixed”, others are "recent" and "popular" :returns: a generator of tweets """ kwargs = {"since_id": since_id, "max_id": max_id, "geocode": geocode, "lang": lang, "include_entities": include_entities, "result_type": tweets_type} items = super().fetch(category, **kwargs) return items
[ "Fetch", "the", "tweets", "from", "the", "server", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/twitter.py#L99-L124
[ "def", "fetch", "(", "self", ",", "category", "=", "CATEGORY_TWEET", ",", "since_id", "=", "None", ",", "max_id", "=", "None", ",", "geocode", "=", "None", ",", "lang", "=", "None", ",", "include_entities", "=", "True", ",", "tweets_type", "=", "TWEET_TYPE_MIXED", ")", ":", "kwargs", "=", "{", "\"since_id\"", ":", "since_id", ",", "\"max_id\"", ":", "max_id", ",", "\"geocode\"", ":", "geocode", ",", "\"lang\"", ":", "lang", ",", "\"include_entities\"", ":", "include_entities", ",", "\"result_type\"", ":", "tweets_type", "}", "items", "=", "super", "(", ")", ".", "fetch", "(", "category", ",", "*", "*", "kwargs", ")", "return", "items" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Twitter.fetch_items
Fetch the tweets :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items
perceval/backends/core/twitter.py
def fetch_items(self, category, **kwargs): """Fetch the tweets :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ since_id = kwargs['since_id'] max_id = kwargs['max_id'] geocode = kwargs['geocode'] lang = kwargs['lang'] entities = kwargs['include_entities'] tweets_type = kwargs['result_type'] logger.info("Fetching tweets %s from %s to %s", self.query, str(since_id), str(max_id) if max_id else '--') tweets_ids = [] min_date = None max_date = None group_tweets = self.client.tweets(self.query, since_id=since_id, max_id=max_id, geocode=geocode, lang=lang, include_entities=entities, result_type=tweets_type) for tweets in group_tweets: for i in range(len(tweets)): tweet = tweets[i] tweets_ids.append(tweet['id']) if tweets[-1] == tweet: min_date = str_to_datetime(tweets[-1]['created_at']) if tweets[0] == tweet and not max_date: max_date = str_to_datetime(tweets[0]['created_at']) yield tweet logger.info("Fetch process completed: %s (unique %s) tweets fetched, from %s to %s", len(tweets_ids), len(list(set(tweets_ids))), min_date, max_date)
def fetch_items(self, category, **kwargs): """Fetch the tweets :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ since_id = kwargs['since_id'] max_id = kwargs['max_id'] geocode = kwargs['geocode'] lang = kwargs['lang'] entities = kwargs['include_entities'] tweets_type = kwargs['result_type'] logger.info("Fetching tweets %s from %s to %s", self.query, str(since_id), str(max_id) if max_id else '--') tweets_ids = [] min_date = None max_date = None group_tweets = self.client.tweets(self.query, since_id=since_id, max_id=max_id, geocode=geocode, lang=lang, include_entities=entities, result_type=tweets_type) for tweets in group_tweets: for i in range(len(tweets)): tweet = tweets[i] tweets_ids.append(tweet['id']) if tweets[-1] == tweet: min_date = str_to_datetime(tweets[-1]['created_at']) if tweets[0] == tweet and not max_date: max_date = str_to_datetime(tweets[0]['created_at']) yield tweet logger.info("Fetch process completed: %s (unique %s) tweets fetched, from %s to %s", len(tweets_ids), len(list(set(tweets_ids))), min_date, max_date)
[ "Fetch", "the", "tweets" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/twitter.py#L126-L165
[ "def", "fetch_items", "(", "self", ",", "category", ",", "*", "*", "kwargs", ")", ":", "since_id", "=", "kwargs", "[", "'since_id'", "]", "max_id", "=", "kwargs", "[", "'max_id'", "]", "geocode", "=", "kwargs", "[", "'geocode'", "]", "lang", "=", "kwargs", "[", "'lang'", "]", "entities", "=", "kwargs", "[", "'include_entities'", "]", "tweets_type", "=", "kwargs", "[", "'result_type'", "]", "logger", ".", "info", "(", "\"Fetching tweets %s from %s to %s\"", ",", "self", ".", "query", ",", "str", "(", "since_id", ")", ",", "str", "(", "max_id", ")", "if", "max_id", "else", "'--'", ")", "tweets_ids", "=", "[", "]", "min_date", "=", "None", "max_date", "=", "None", "group_tweets", "=", "self", ".", "client", ".", "tweets", "(", "self", ".", "query", ",", "since_id", "=", "since_id", ",", "max_id", "=", "max_id", ",", "geocode", "=", "geocode", ",", "lang", "=", "lang", ",", "include_entities", "=", "entities", ",", "result_type", "=", "tweets_type", ")", "for", "tweets", "in", "group_tweets", ":", "for", "i", "in", "range", "(", "len", "(", "tweets", ")", ")", ":", "tweet", "=", "tweets", "[", "i", "]", "tweets_ids", ".", "append", "(", "tweet", "[", "'id'", "]", ")", "if", "tweets", "[", "-", "1", "]", "==", "tweet", ":", "min_date", "=", "str_to_datetime", "(", "tweets", "[", "-", "1", "]", "[", "'created_at'", "]", ")", "if", "tweets", "[", "0", "]", "==", "tweet", "and", "not", "max_date", ":", "max_date", "=", "str_to_datetime", "(", "tweets", "[", "0", "]", "[", "'created_at'", "]", ")", "yield", "tweet", "logger", ".", "info", "(", "\"Fetch process completed: %s (unique %s) tweets fetched, from %s to %s\"", ",", "len", "(", "tweets_ids", ")", ",", "len", "(", "list", "(", "set", "(", "tweets_ids", ")", ")", ")", ",", "min_date", ",", "max_date", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Twitter._init_client
Init client
perceval/backends/core/twitter.py
def _init_client(self, from_archive=False): """Init client""" return TwitterClient(self.api_token, self.max_items, self.sleep_for_rate, self.min_rate_to_sleep, self.sleep_time, self.archive, from_archive)
def _init_client(self, from_archive=False): """Init client""" return TwitterClient(self.api_token, self.max_items, self.sleep_for_rate, self.min_rate_to_sleep, self.sleep_time, self.archive, from_archive)
[ "Init", "client" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/twitter.py#L214-L219
[ "def", "_init_client", "(", "self", ",", "from_archive", "=", "False", ")", ":", "return", "TwitterClient", "(", "self", ".", "api_token", ",", "self", ".", "max_items", ",", "self", ".", "sleep_for_rate", ",", "self", ".", "min_rate_to_sleep", ",", "self", ".", "sleep_time", ",", "self", ".", "archive", ",", "from_archive", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
TwitterClient.tweets
Fetch tweets for a given query between since_id and max_id. :param query: query to fetch tweets :param since_id: if not null, it returns results with an ID greater than the specified ID :param max_id: if not null, it returns results with an ID less than the specified ID :param geocode: if enabled, returns tweets by users located at latitude,longitude,"mi"|"km" :param lang: if enabled, restricts tweets to the given language, given by an ISO 639-1 code :param include_entities: if disabled, it excludes entities node :param result_type: type of tweets returned. Default is “mixed”, others are "recent" and "popular" :returns: a generator of tweets
perceval/backends/core/twitter.py
def tweets(self, query, since_id=None, max_id=None, geocode=None, lang=None, include_entities=True, result_type=TWEET_TYPE_MIXED): """Fetch tweets for a given query between since_id and max_id. :param query: query to fetch tweets :param since_id: if not null, it returns results with an ID greater than the specified ID :param max_id: if not null, it returns results with an ID less than the specified ID :param geocode: if enabled, returns tweets by users located at latitude,longitude,"mi"|"km" :param lang: if enabled, restricts tweets to the given language, given by an ISO 639-1 code :param include_entities: if disabled, it excludes entities node :param result_type: type of tweets returned. Default is “mixed”, others are "recent" and "popular" :returns: a generator of tweets """ resource = self.base_url params = {'q': query, 'count': self.max_items} if since_id: params['since_id'] = since_id if max_id: params['max_id'] = max_id if geocode: params['geocode'] = geocode if lang: params['lang'] = lang params['include_entities'] = include_entities params['result_type'] = result_type while True: raw_tweets = self._fetch(resource, params=params) tweets = json.loads(raw_tweets) if not tweets['statuses']: break params['max_id'] = tweets['statuses'][-1]['id'] - 1 yield tweets['statuses']
def tweets(self, query, since_id=None, max_id=None, geocode=None, lang=None, include_entities=True, result_type=TWEET_TYPE_MIXED): """Fetch tweets for a given query between since_id and max_id. :param query: query to fetch tweets :param since_id: if not null, it returns results with an ID greater than the specified ID :param max_id: if not null, it returns results with an ID less than the specified ID :param geocode: if enabled, returns tweets by users located at latitude,longitude,"mi"|"km" :param lang: if enabled, restricts tweets to the given language, given by an ISO 639-1 code :param include_entities: if disabled, it excludes entities node :param result_type: type of tweets returned. Default is “mixed”, others are "recent" and "popular" :returns: a generator of tweets """ resource = self.base_url params = {'q': query, 'count': self.max_items} if since_id: params['since_id'] = since_id if max_id: params['max_id'] = max_id if geocode: params['geocode'] = geocode if lang: params['lang'] = lang params['include_entities'] = include_entities params['result_type'] = result_type while True: raw_tweets = self._fetch(resource, params=params) tweets = json.loads(raw_tweets) if not tweets['statuses']: break params['max_id'] = tweets['statuses'][-1]['id'] - 1 yield tweets['statuses']
[ "Fetch", "tweets", "for", "a", "given", "query", "between", "since_id", "and", "max_id", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/twitter.py#L274-L315
[ "def", "tweets", "(", "self", ",", "query", ",", "since_id", "=", "None", ",", "max_id", "=", "None", ",", "geocode", "=", "None", ",", "lang", "=", "None", ",", "include_entities", "=", "True", ",", "result_type", "=", "TWEET_TYPE_MIXED", ")", ":", "resource", "=", "self", ".", "base_url", "params", "=", "{", "'q'", ":", "query", ",", "'count'", ":", "self", ".", "max_items", "}", "if", "since_id", ":", "params", "[", "'since_id'", "]", "=", "since_id", "if", "max_id", ":", "params", "[", "'max_id'", "]", "=", "max_id", "if", "geocode", ":", "params", "[", "'geocode'", "]", "=", "geocode", "if", "lang", ":", "params", "[", "'lang'", "]", "=", "lang", "params", "[", "'include_entities'", "]", "=", "include_entities", "params", "[", "'result_type'", "]", "=", "result_type", "while", "True", ":", "raw_tweets", "=", "self", ".", "_fetch", "(", "resource", ",", "params", "=", "params", ")", "tweets", "=", "json", ".", "loads", "(", "raw_tweets", ")", "if", "not", "tweets", "[", "'statuses'", "]", ":", "break", "params", "[", "'max_id'", "]", "=", "tweets", "[", "'statuses'", "]", "[", "-", "1", "]", "[", "'id'", "]", "-", "1", "yield", "tweets", "[", "'statuses'", "]" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
TwitterClient._fetch
Fetch a resource. Method to fetch and to iterate over the contents of a type of resource. The method returns a generator of pages for that resource and parameters. :param url: the endpoint of the API :param params: parameters to filter :returns: the text of the response
perceval/backends/core/twitter.py
def _fetch(self, url, params): """Fetch a resource. Method to fetch and to iterate over the contents of a type of resource. The method returns a generator of pages for that resource and parameters. :param url: the endpoint of the API :param params: parameters to filter :returns: the text of the response """ if not self.from_archive: self.sleep_for_rate_limit() headers = {'Authorization': 'Bearer ' + self.api_key} r = self.fetch(url, payload=params, headers=headers) if not self.from_archive: self.update_rate_limit(r) return r.text
def _fetch(self, url, params): """Fetch a resource. Method to fetch and to iterate over the contents of a type of resource. The method returns a generator of pages for that resource and parameters. :param url: the endpoint of the API :param params: parameters to filter :returns: the text of the response """ if not self.from_archive: self.sleep_for_rate_limit() headers = {'Authorization': 'Bearer ' + self.api_key} r = self.fetch(url, payload=params, headers=headers) if not self.from_archive: self.update_rate_limit(r) return r.text
[ "Fetch", "a", "resource", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/twitter.py#L317-L338
[ "def", "_fetch", "(", "self", ",", "url", ",", "params", ")", ":", "if", "not", "self", ".", "from_archive", ":", "self", ".", "sleep_for_rate_limit", "(", ")", "headers", "=", "{", "'Authorization'", ":", "'Bearer '", "+", "self", ".", "api_key", "}", "r", "=", "self", ".", "fetch", "(", "url", ",", "payload", "=", "params", ",", "headers", "=", "headers", ")", "if", "not", "self", ".", "from_archive", ":", "self", ".", "update_rate_limit", "(", "r", ")", "return", "r", ".", "text" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
TwitterCommand.setup_cmd_parser
Returns the Twitter argument parser.
perceval/backends/core/twitter.py
def setup_cmd_parser(cls): """Returns the Twitter argument parser.""" parser = BackendCommandArgumentParser(cls.BACKEND.CATEGORIES, token_auth=True, archive=True) # Backend token is required action = parser.parser._option_string_actions['--api-token'] action.required = True # Meetup options group = parser.parser.add_argument_group('Twitter arguments') group.add_argument('--max-items', dest='max_items', type=int, default=MAX_ITEMS, help="Maximum number of items requested on the same query") group.add_argument('--no-entities', dest='include_entities', action='store_false', help=" Exclude entities node") group.add_argument('--geo-code', dest='geocode', help="Select tweets by users located at latitude,longitude,radius") group.add_argument('--lang', dest='lang', help="Select tweets to the given language in ISO 639-1 code") group.add_argument('--tweets-type', dest='tweets_type', default=TWEET_TYPE_MIXED, help="Type of tweets returned. Default is 'mixed', others are 'recent' and 'popular'") group.add_argument('--sleep-for-rate', dest='sleep_for_rate', action='store_true', help="sleep for getting more rate") group.add_argument('--min-rate-to-sleep', dest='min_rate_to_sleep', default=MIN_RATE_LIMIT, type=int, help="sleep until reset when the rate limit reaches this value") group.add_argument('--sleep-time', dest='sleep_time', default=SLEEP_TIME, type=int, help="minimun sleeping time to avoid too many request exception") # Required arguments parser.parser.add_argument('query', help="Search query including operators, max 500 chars") return parser
def setup_cmd_parser(cls): """Returns the Twitter argument parser.""" parser = BackendCommandArgumentParser(cls.BACKEND.CATEGORIES, token_auth=True, archive=True) # Backend token is required action = parser.parser._option_string_actions['--api-token'] action.required = True # Meetup options group = parser.parser.add_argument_group('Twitter arguments') group.add_argument('--max-items', dest='max_items', type=int, default=MAX_ITEMS, help="Maximum number of items requested on the same query") group.add_argument('--no-entities', dest='include_entities', action='store_false', help=" Exclude entities node") group.add_argument('--geo-code', dest='geocode', help="Select tweets by users located at latitude,longitude,radius") group.add_argument('--lang', dest='lang', help="Select tweets to the given language in ISO 639-1 code") group.add_argument('--tweets-type', dest='tweets_type', default=TWEET_TYPE_MIXED, help="Type of tweets returned. Default is 'mixed', others are 'recent' and 'popular'") group.add_argument('--sleep-for-rate', dest='sleep_for_rate', action='store_true', help="sleep for getting more rate") group.add_argument('--min-rate-to-sleep', dest='min_rate_to_sleep', default=MIN_RATE_LIMIT, type=int, help="sleep until reset when the rate limit reaches this value") group.add_argument('--sleep-time', dest='sleep_time', default=SLEEP_TIME, type=int, help="minimun sleeping time to avoid too many request exception") # Required arguments parser.parser.add_argument('query', help="Search query including operators, max 500 chars") return parser
[ "Returns", "the", "Twitter", "argument", "parser", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/twitter.py#L347-L386
[ "def", "setup_cmd_parser", "(", "cls", ")", ":", "parser", "=", "BackendCommandArgumentParser", "(", "cls", ".", "BACKEND", ".", "CATEGORIES", ",", "token_auth", "=", "True", ",", "archive", "=", "True", ")", "# Backend token is required", "action", "=", "parser", ".", "parser", ".", "_option_string_actions", "[", "'--api-token'", "]", "action", ".", "required", "=", "True", "# Meetup options", "group", "=", "parser", ".", "parser", ".", "add_argument_group", "(", "'Twitter arguments'", ")", "group", ".", "add_argument", "(", "'--max-items'", ",", "dest", "=", "'max_items'", ",", "type", "=", "int", ",", "default", "=", "MAX_ITEMS", ",", "help", "=", "\"Maximum number of items requested on the same query\"", ")", "group", ".", "add_argument", "(", "'--no-entities'", ",", "dest", "=", "'include_entities'", ",", "action", "=", "'store_false'", ",", "help", "=", "\" Exclude entities node\"", ")", "group", ".", "add_argument", "(", "'--geo-code'", ",", "dest", "=", "'geocode'", ",", "help", "=", "\"Select tweets by users located at latitude,longitude,radius\"", ")", "group", ".", "add_argument", "(", "'--lang'", ",", "dest", "=", "'lang'", ",", "help", "=", "\"Select tweets to the given language in ISO 639-1 code\"", ")", "group", ".", "add_argument", "(", "'--tweets-type'", ",", "dest", "=", "'tweets_type'", ",", "default", "=", "TWEET_TYPE_MIXED", ",", "help", "=", "\"Type of tweets returned. Default is 'mixed', others are 'recent' and 'popular'\"", ")", "group", ".", "add_argument", "(", "'--sleep-for-rate'", ",", "dest", "=", "'sleep_for_rate'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"sleep for getting more rate\"", ")", "group", ".", "add_argument", "(", "'--min-rate-to-sleep'", ",", "dest", "=", "'min_rate_to_sleep'", ",", "default", "=", "MIN_RATE_LIMIT", ",", "type", "=", "int", ",", "help", "=", "\"sleep until reset when the rate limit reaches this value\"", ")", "group", ".", "add_argument", "(", "'--sleep-time'", ",", "dest", "=", "'sleep_time'", ",", "default", "=", "SLEEP_TIME", ",", "type", "=", "int", ",", "help", "=", "\"minimun sleeping time to avoid too many request exception\"", ")", "# Required arguments", "parser", ".", "parser", ".", "add_argument", "(", "'query'", ",", "help", "=", "\"Search query including operators, max 500 chars\"", ")", "return", "parser" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GoogleHits.fetch
Fetch data from Google API. The method retrieves a list of hits for some given keywords using the Google API. :param category: the category of items to fetch :returns: a generator of data
perceval/backends/core/googlehits.py
def fetch(self, category=CATEGORY_HITS): """Fetch data from Google API. The method retrieves a list of hits for some given keywords using the Google API. :param category: the category of items to fetch :returns: a generator of data """ kwargs = {} items = super().fetch(category, **kwargs) return items
def fetch(self, category=CATEGORY_HITS): """Fetch data from Google API. The method retrieves a list of hits for some given keywords using the Google API. :param category: the category of items to fetch :returns: a generator of data """ kwargs = {} items = super().fetch(category, **kwargs) return items
[ "Fetch", "data", "from", "Google", "API", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/googlehits.py#L81-L94
[ "def", "fetch", "(", "self", ",", "category", "=", "CATEGORY_HITS", ")", ":", "kwargs", "=", "{", "}", "items", "=", "super", "(", ")", ".", "fetch", "(", "category", ",", "*", "*", "kwargs", ")", "return", "items" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GoogleHits.fetch_items
Fetch Google hit items :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items
perceval/backends/core/googlehits.py
def fetch_items(self, category, **kwargs): """Fetch Google hit items :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ logger.info("Fetching data for '%s'", self.keywords) hits_raw = self.client.hits(self.keywords) hits = self.__parse_hits(hits_raw) yield hits logger.info("Fetch process completed")
def fetch_items(self, category, **kwargs): """Fetch Google hit items :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ logger.info("Fetching data for '%s'", self.keywords) hits_raw = self.client.hits(self.keywords) hits = self.__parse_hits(hits_raw) yield hits logger.info("Fetch process completed")
[ "Fetch", "Google", "hit", "items" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/googlehits.py#L96-L111
[ "def", "fetch_items", "(", "self", ",", "category", ",", "*", "*", "kwargs", ")", ":", "logger", ".", "info", "(", "\"Fetching data for '%s'\"", ",", "self", ".", "keywords", ")", "hits_raw", "=", "self", ".", "client", ".", "hits", "(", "self", ".", "keywords", ")", "hits", "=", "self", ".", "__parse_hits", "(", "hits_raw", ")", "yield", "hits", "logger", ".", "info", "(", "\"Fetch process completed\"", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GoogleHits._init_client
Init client
perceval/backends/core/googlehits.py
def _init_client(self, from_archive=False): """Init client""" return GoogleHitsClient(self.sleep_time, self.max_retries, archive=self.archive, from_archive=from_archive)
def _init_client(self, from_archive=False): """Init client""" return GoogleHitsClient(self.sleep_time, self.max_retries, archive=self.archive, from_archive=from_archive)
[ "Init", "client" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/googlehits.py#L158-L162
[ "def", "_init_client", "(", "self", ",", "from_archive", "=", "False", ")", ":", "return", "GoogleHitsClient", "(", "self", ".", "sleep_time", ",", "self", ".", "max_retries", ",", "archive", "=", "self", ".", "archive", ",", "from_archive", "=", "from_archive", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GoogleHits.__parse_hits
Parse the hits returned by the Google Search API
perceval/backends/core/googlehits.py
def __parse_hits(self, hit_raw): """Parse the hits returned by the Google Search API""" # Create the soup and get the desired div bs_result = bs4.BeautifulSoup(hit_raw, 'html.parser') hit_string = bs_result.find("div", id="resultStats").text # Remove commas or dots hit_string = hit_string.replace(',', u'') hit_string = hit_string.replace('.', u'') fetched_on = datetime_utcnow().timestamp() id_args = self.keywords[:] id_args.append(str(fetched_on)) hits_json = { 'fetched_on': fetched_on, 'id': uuid(*id_args), 'keywords': self.keywords, 'type': 'googleSearchHits' } if not hit_string: logger.warning("No hits for %s", self.keywords) hits_json['hits'] = 0 return hits_json str_hits = re.search(r'\d+', hit_string).group(0) hits = int(str_hits) hits_json['hits'] = hits return hits_json
def __parse_hits(self, hit_raw): """Parse the hits returned by the Google Search API""" # Create the soup and get the desired div bs_result = bs4.BeautifulSoup(hit_raw, 'html.parser') hit_string = bs_result.find("div", id="resultStats").text # Remove commas or dots hit_string = hit_string.replace(',', u'') hit_string = hit_string.replace('.', u'') fetched_on = datetime_utcnow().timestamp() id_args = self.keywords[:] id_args.append(str(fetched_on)) hits_json = { 'fetched_on': fetched_on, 'id': uuid(*id_args), 'keywords': self.keywords, 'type': 'googleSearchHits' } if not hit_string: logger.warning("No hits for %s", self.keywords) hits_json['hits'] = 0 return hits_json str_hits = re.search(r'\d+', hit_string).group(0) hits = int(str_hits) hits_json['hits'] = hits return hits_json
[ "Parse", "the", "hits", "returned", "by", "the", "Google", "Search", "API" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/googlehits.py#L164-L196
[ "def", "__parse_hits", "(", "self", ",", "hit_raw", ")", ":", "# Create the soup and get the desired div", "bs_result", "=", "bs4", ".", "BeautifulSoup", "(", "hit_raw", ",", "'html.parser'", ")", "hit_string", "=", "bs_result", ".", "find", "(", "\"div\"", ",", "id", "=", "\"resultStats\"", ")", ".", "text", "# Remove commas or dots", "hit_string", "=", "hit_string", ".", "replace", "(", "','", ",", "u''", ")", "hit_string", "=", "hit_string", ".", "replace", "(", "'.'", ",", "u''", ")", "fetched_on", "=", "datetime_utcnow", "(", ")", ".", "timestamp", "(", ")", "id_args", "=", "self", ".", "keywords", "[", ":", "]", "id_args", ".", "append", "(", "str", "(", "fetched_on", ")", ")", "hits_json", "=", "{", "'fetched_on'", ":", "fetched_on", ",", "'id'", ":", "uuid", "(", "*", "id_args", ")", ",", "'keywords'", ":", "self", ".", "keywords", ",", "'type'", ":", "'googleSearchHits'", "}", "if", "not", "hit_string", ":", "logger", ".", "warning", "(", "\"No hits for %s\"", ",", "self", ".", "keywords", ")", "hits_json", "[", "'hits'", "]", "=", "0", "return", "hits_json", "str_hits", "=", "re", ".", "search", "(", "r'\\d+'", ",", "hit_string", ")", ".", "group", "(", "0", ")", "hits", "=", "int", "(", "str_hits", ")", "hits_json", "[", "'hits'", "]", "=", "hits", "return", "hits_json" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GoogleHitsClient.hits
Fetch information about a list of keywords.
perceval/backends/core/googlehits.py
def hits(self, keywords): """Fetch information about a list of keywords.""" if len(keywords) == 1: query_str = keywords[0] else: query_str = ' '.join([k for k in keywords]) logger.info("Fetching hits for '%s'", query_str) params = {'q': query_str} # Make the request req = self.fetch(GOOGLE_SEARCH_URL, payload=params) return req.text
def hits(self, keywords): """Fetch information about a list of keywords.""" if len(keywords) == 1: query_str = keywords[0] else: query_str = ' '.join([k for k in keywords]) logger.info("Fetching hits for '%s'", query_str) params = {'q': query_str} # Make the request req = self.fetch(GOOGLE_SEARCH_URL, payload=params) return req.text
[ "Fetch", "information", "about", "a", "list", "of", "keywords", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/googlehits.py#L219-L233
[ "def", "hits", "(", "self", ",", "keywords", ")", ":", "if", "len", "(", "keywords", ")", "==", "1", ":", "query_str", "=", "keywords", "[", "0", "]", "else", ":", "query_str", "=", "' '", ".", "join", "(", "[", "k", "for", "k", "in", "keywords", "]", ")", "logger", ".", "info", "(", "\"Fetching hits for '%s'\"", ",", "query_str", ")", "params", "=", "{", "'q'", ":", "query_str", "}", "# Make the request", "req", "=", "self", ".", "fetch", "(", "GOOGLE_SEARCH_URL", ",", "payload", "=", "params", ")", "return", "req", ".", "text" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.fetch
Fetch the issues/pull requests from the repository. The method retrieves, from a GitHub repository, the issues/pull requests updated since the given date. :param category: the category of items to fetch :param from_date: obtain issues/pull requests updated since this date :param to_date: obtain issues/pull requests until a specific date (included) :returns: a generator of issues
perceval/backends/core/github.py
def fetch(self, category=CATEGORY_ISSUE, from_date=DEFAULT_DATETIME, to_date=DEFAULT_LAST_DATETIME): """Fetch the issues/pull requests from the repository. The method retrieves, from a GitHub repository, the issues/pull requests updated since the given date. :param category: the category of items to fetch :param from_date: obtain issues/pull requests updated since this date :param to_date: obtain issues/pull requests until a specific date (included) :returns: a generator of issues """ if not from_date: from_date = DEFAULT_DATETIME if not to_date: to_date = DEFAULT_LAST_DATETIME from_date = datetime_to_utc(from_date) to_date = datetime_to_utc(to_date) kwargs = { 'from_date': from_date, 'to_date': to_date } items = super().fetch(category, **kwargs) return items
def fetch(self, category=CATEGORY_ISSUE, from_date=DEFAULT_DATETIME, to_date=DEFAULT_LAST_DATETIME): """Fetch the issues/pull requests from the repository. The method retrieves, from a GitHub repository, the issues/pull requests updated since the given date. :param category: the category of items to fetch :param from_date: obtain issues/pull requests updated since this date :param to_date: obtain issues/pull requests until a specific date (included) :returns: a generator of issues """ if not from_date: from_date = DEFAULT_DATETIME if not to_date: to_date = DEFAULT_LAST_DATETIME from_date = datetime_to_utc(from_date) to_date = datetime_to_utc(to_date) kwargs = { 'from_date': from_date, 'to_date': to_date } items = super().fetch(category, **kwargs) return items
[ "Fetch", "the", "issues", "/", "pull", "requests", "from", "the", "repository", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L117-L143
[ "def", "fetch", "(", "self", ",", "category", "=", "CATEGORY_ISSUE", ",", "from_date", "=", "DEFAULT_DATETIME", ",", "to_date", "=", "DEFAULT_LAST_DATETIME", ")", ":", "if", "not", "from_date", ":", "from_date", "=", "DEFAULT_DATETIME", "if", "not", "to_date", ":", "to_date", "=", "DEFAULT_LAST_DATETIME", "from_date", "=", "datetime_to_utc", "(", "from_date", ")", "to_date", "=", "datetime_to_utc", "(", "to_date", ")", "kwargs", "=", "{", "'from_date'", ":", "from_date", ",", "'to_date'", ":", "to_date", "}", "items", "=", "super", "(", ")", ".", "fetch", "(", "category", ",", "*", "*", "kwargs", ")", "return", "items" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.fetch_items
Fetch the items (issues or pull_requests) :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items
perceval/backends/core/github.py
def fetch_items(self, category, **kwargs): """Fetch the items (issues or pull_requests) :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ from_date = kwargs['from_date'] to_date = kwargs['to_date'] if category == CATEGORY_ISSUE: items = self.__fetch_issues(from_date, to_date) elif category == CATEGORY_PULL_REQUEST: items = self.__fetch_pull_requests(from_date, to_date) else: items = self.__fetch_repo_info() return items
def fetch_items(self, category, **kwargs): """Fetch the items (issues or pull_requests) :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ from_date = kwargs['from_date'] to_date = kwargs['to_date'] if category == CATEGORY_ISSUE: items = self.__fetch_issues(from_date, to_date) elif category == CATEGORY_PULL_REQUEST: items = self.__fetch_pull_requests(from_date, to_date) else: items = self.__fetch_repo_info() return items
[ "Fetch", "the", "items", "(", "issues", "or", "pull_requests", ")" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L145-L163
[ "def", "fetch_items", "(", "self", ",", "category", ",", "*", "*", "kwargs", ")", ":", "from_date", "=", "kwargs", "[", "'from_date'", "]", "to_date", "=", "kwargs", "[", "'to_date'", "]", "if", "category", "==", "CATEGORY_ISSUE", ":", "items", "=", "self", ".", "__fetch_issues", "(", "from_date", ",", "to_date", ")", "elif", "category", "==", "CATEGORY_PULL_REQUEST", ":", "items", "=", "self", ".", "__fetch_pull_requests", "(", "from_date", ",", "to_date", ")", "else", ":", "items", "=", "self", ".", "__fetch_repo_info", "(", ")", "return", "items" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.metadata_updated_on
Extracts the update time from a GitHub item. The timestamp used is extracted from 'updated_at' field. This date is converted to UNIX timestamp format. As GitHub dates are in UTC the conversion is straightforward. :param item: item generated by the backend :returns: a UNIX timestamp
perceval/backends/core/github.py
def metadata_updated_on(item): """Extracts the update time from a GitHub item. The timestamp used is extracted from 'updated_at' field. This date is converted to UNIX timestamp format. As GitHub dates are in UTC the conversion is straightforward. :param item: item generated by the backend :returns: a UNIX timestamp """ if "forks_count" in item: return item['fetched_on'] else: ts = item['updated_at'] ts = str_to_datetime(ts) return ts.timestamp()
def metadata_updated_on(item): """Extracts the update time from a GitHub item. The timestamp used is extracted from 'updated_at' field. This date is converted to UNIX timestamp format. As GitHub dates are in UTC the conversion is straightforward. :param item: item generated by the backend :returns: a UNIX timestamp """ if "forks_count" in item: return item['fetched_on'] else: ts = item['updated_at'] ts = str_to_datetime(ts) return ts.timestamp()
[ "Extracts", "the", "update", "time", "from", "a", "GitHub", "item", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L191-L208
[ "def", "metadata_updated_on", "(", "item", ")", ":", "if", "\"forks_count\"", "in", "item", ":", "return", "item", "[", "'fetched_on'", "]", "else", ":", "ts", "=", "item", "[", "'updated_at'", "]", "ts", "=", "str_to_datetime", "(", "ts", ")", "return", "ts", ".", "timestamp", "(", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.metadata_category
Extracts the category from a GitHub item. This backend generates two types of item which are 'issue' and 'pull_request'.
perceval/backends/core/github.py
def metadata_category(item): """Extracts the category from a GitHub item. This backend generates two types of item which are 'issue' and 'pull_request'. """ if "base" in item: category = CATEGORY_PULL_REQUEST elif "forks_count" in item: category = CATEGORY_REPO else: category = CATEGORY_ISSUE return category
def metadata_category(item): """Extracts the category from a GitHub item. This backend generates two types of item which are 'issue' and 'pull_request'. """ if "base" in item: category = CATEGORY_PULL_REQUEST elif "forks_count" in item: category = CATEGORY_REPO else: category = CATEGORY_ISSUE return category
[ "Extracts", "the", "category", "from", "a", "GitHub", "item", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L211-L225
[ "def", "metadata_category", "(", "item", ")", ":", "if", "\"base\"", "in", "item", ":", "category", "=", "CATEGORY_PULL_REQUEST", "elif", "\"forks_count\"", "in", "item", ":", "category", "=", "CATEGORY_REPO", "else", ":", "category", "=", "CATEGORY_ISSUE", "return", "category" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub._init_client
Init client
perceval/backends/core/github.py
def _init_client(self, from_archive=False): """Init client""" return GitHubClient(self.owner, self.repository, self.api_token, self.base_url, self.sleep_for_rate, self.min_rate_to_sleep, self.sleep_time, self.max_retries, self.archive, from_archive)
def _init_client(self, from_archive=False): """Init client""" return GitHubClient(self.owner, self.repository, self.api_token, self.base_url, self.sleep_for_rate, self.min_rate_to_sleep, self.sleep_time, self.max_retries, self.archive, from_archive)
[ "Init", "client" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L227-L233
[ "def", "_init_client", "(", "self", ",", "from_archive", "=", "False", ")", ":", "return", "GitHubClient", "(", "self", ".", "owner", ",", "self", ".", "repository", ",", "self", ".", "api_token", ",", "self", ".", "base_url", ",", "self", ".", "sleep_for_rate", ",", "self", ".", "min_rate_to_sleep", ",", "self", ".", "sleep_time", ",", "self", ".", "max_retries", ",", "self", ".", "archive", ",", "from_archive", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.__fetch_issues
Fetch the issues
perceval/backends/core/github.py
def __fetch_issues(self, from_date, to_date): """Fetch the issues""" issues_groups = self.client.issues(from_date=from_date) for raw_issues in issues_groups: issues = json.loads(raw_issues) for issue in issues: if str_to_datetime(issue['updated_at']) > to_date: return self.__init_extra_issue_fields(issue) for field in TARGET_ISSUE_FIELDS: if not issue[field]: continue if field == 'user': issue[field + '_data'] = self.__get_user(issue[field]['login']) elif field == 'assignee': issue[field + '_data'] = self.__get_issue_assignee(issue[field]) elif field == 'assignees': issue[field + '_data'] = self.__get_issue_assignees(issue[field]) elif field == 'comments': issue[field + '_data'] = self.__get_issue_comments(issue['number']) elif field == 'reactions': issue[field + '_data'] = \ self.__get_issue_reactions(issue['number'], issue['reactions']['total_count']) yield issue
def __fetch_issues(self, from_date, to_date): """Fetch the issues""" issues_groups = self.client.issues(from_date=from_date) for raw_issues in issues_groups: issues = json.loads(raw_issues) for issue in issues: if str_to_datetime(issue['updated_at']) > to_date: return self.__init_extra_issue_fields(issue) for field in TARGET_ISSUE_FIELDS: if not issue[field]: continue if field == 'user': issue[field + '_data'] = self.__get_user(issue[field]['login']) elif field == 'assignee': issue[field + '_data'] = self.__get_issue_assignee(issue[field]) elif field == 'assignees': issue[field + '_data'] = self.__get_issue_assignees(issue[field]) elif field == 'comments': issue[field + '_data'] = self.__get_issue_comments(issue['number']) elif field == 'reactions': issue[field + '_data'] = \ self.__get_issue_reactions(issue['number'], issue['reactions']['total_count']) yield issue
[ "Fetch", "the", "issues" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L235-L265
[ "def", "__fetch_issues", "(", "self", ",", "from_date", ",", "to_date", ")", ":", "issues_groups", "=", "self", ".", "client", ".", "issues", "(", "from_date", "=", "from_date", ")", "for", "raw_issues", "in", "issues_groups", ":", "issues", "=", "json", ".", "loads", "(", "raw_issues", ")", "for", "issue", "in", "issues", ":", "if", "str_to_datetime", "(", "issue", "[", "'updated_at'", "]", ")", ">", "to_date", ":", "return", "self", ".", "__init_extra_issue_fields", "(", "issue", ")", "for", "field", "in", "TARGET_ISSUE_FIELDS", ":", "if", "not", "issue", "[", "field", "]", ":", "continue", "if", "field", "==", "'user'", ":", "issue", "[", "field", "+", "'_data'", "]", "=", "self", ".", "__get_user", "(", "issue", "[", "field", "]", "[", "'login'", "]", ")", "elif", "field", "==", "'assignee'", ":", "issue", "[", "field", "+", "'_data'", "]", "=", "self", ".", "__get_issue_assignee", "(", "issue", "[", "field", "]", ")", "elif", "field", "==", "'assignees'", ":", "issue", "[", "field", "+", "'_data'", "]", "=", "self", ".", "__get_issue_assignees", "(", "issue", "[", "field", "]", ")", "elif", "field", "==", "'comments'", ":", "issue", "[", "field", "+", "'_data'", "]", "=", "self", ".", "__get_issue_comments", "(", "issue", "[", "'number'", "]", ")", "elif", "field", "==", "'reactions'", ":", "issue", "[", "field", "+", "'_data'", "]", "=", "self", ".", "__get_issue_reactions", "(", "issue", "[", "'number'", "]", ",", "issue", "[", "'reactions'", "]", "[", "'total_count'", "]", ")", "yield", "issue" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.__fetch_pull_requests
Fetch the pull requests
perceval/backends/core/github.py
def __fetch_pull_requests(self, from_date, to_date): """Fetch the pull requests""" raw_pulls = self.client.pulls(from_date=from_date) for raw_pull in raw_pulls: pull = json.loads(raw_pull) if str_to_datetime(pull['updated_at']) > to_date: return self.__init_extra_pull_fields(pull) for field in TARGET_PULL_FIELDS: if not pull[field]: continue if field == 'user': pull[field + '_data'] = self.__get_user(pull[field]['login']) elif field == 'merged_by': pull[field + '_data'] = self.__get_user(pull[field]['login']) elif field == 'review_comments': pull[field + '_data'] = self.__get_pull_review_comments(pull['number']) elif field == 'requested_reviewers': pull[field + '_data'] = self.__get_pull_requested_reviewers(pull['number']) elif field == 'commits': pull[field + '_data'] = self.__get_pull_commits(pull['number']) yield pull
def __fetch_pull_requests(self, from_date, to_date): """Fetch the pull requests""" raw_pulls = self.client.pulls(from_date=from_date) for raw_pull in raw_pulls: pull = json.loads(raw_pull) if str_to_datetime(pull['updated_at']) > to_date: return self.__init_extra_pull_fields(pull) for field in TARGET_PULL_FIELDS: if not pull[field]: continue if field == 'user': pull[field + '_data'] = self.__get_user(pull[field]['login']) elif field == 'merged_by': pull[field + '_data'] = self.__get_user(pull[field]['login']) elif field == 'review_comments': pull[field + '_data'] = self.__get_pull_review_comments(pull['number']) elif field == 'requested_reviewers': pull[field + '_data'] = self.__get_pull_requested_reviewers(pull['number']) elif field == 'commits': pull[field + '_data'] = self.__get_pull_commits(pull['number']) yield pull
[ "Fetch", "the", "pull", "requests" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L267-L294
[ "def", "__fetch_pull_requests", "(", "self", ",", "from_date", ",", "to_date", ")", ":", "raw_pulls", "=", "self", ".", "client", ".", "pulls", "(", "from_date", "=", "from_date", ")", "for", "raw_pull", "in", "raw_pulls", ":", "pull", "=", "json", ".", "loads", "(", "raw_pull", ")", "if", "str_to_datetime", "(", "pull", "[", "'updated_at'", "]", ")", ">", "to_date", ":", "return", "self", ".", "__init_extra_pull_fields", "(", "pull", ")", "for", "field", "in", "TARGET_PULL_FIELDS", ":", "if", "not", "pull", "[", "field", "]", ":", "continue", "if", "field", "==", "'user'", ":", "pull", "[", "field", "+", "'_data'", "]", "=", "self", ".", "__get_user", "(", "pull", "[", "field", "]", "[", "'login'", "]", ")", "elif", "field", "==", "'merged_by'", ":", "pull", "[", "field", "+", "'_data'", "]", "=", "self", ".", "__get_user", "(", "pull", "[", "field", "]", "[", "'login'", "]", ")", "elif", "field", "==", "'review_comments'", ":", "pull", "[", "field", "+", "'_data'", "]", "=", "self", ".", "__get_pull_review_comments", "(", "pull", "[", "'number'", "]", ")", "elif", "field", "==", "'requested_reviewers'", ":", "pull", "[", "field", "+", "'_data'", "]", "=", "self", ".", "__get_pull_requested_reviewers", "(", "pull", "[", "'number'", "]", ")", "elif", "field", "==", "'commits'", ":", "pull", "[", "field", "+", "'_data'", "]", "=", "self", ".", "__get_pull_commits", "(", "pull", "[", "'number'", "]", ")", "yield", "pull" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.__fetch_repo_info
Get repo info about stars, watchers and forks
perceval/backends/core/github.py
def __fetch_repo_info(self): """Get repo info about stars, watchers and forks""" raw_repo = self.client.repo() repo = json.loads(raw_repo) fetched_on = datetime_utcnow() repo['fetched_on'] = fetched_on.timestamp() yield repo
def __fetch_repo_info(self): """Get repo info about stars, watchers and forks""" raw_repo = self.client.repo() repo = json.loads(raw_repo) fetched_on = datetime_utcnow() repo['fetched_on'] = fetched_on.timestamp() yield repo
[ "Get", "repo", "info", "about", "stars", "watchers", "and", "forks" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L296-L305
[ "def", "__fetch_repo_info", "(", "self", ")", ":", "raw_repo", "=", "self", ".", "client", ".", "repo", "(", ")", "repo", "=", "json", ".", "loads", "(", "raw_repo", ")", "fetched_on", "=", "datetime_utcnow", "(", ")", "repo", "[", "'fetched_on'", "]", "=", "fetched_on", ".", "timestamp", "(", ")", "yield", "repo" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.__get_issue_reactions
Get issue reactions
perceval/backends/core/github.py
def __get_issue_reactions(self, issue_number, total_count): """Get issue reactions""" reactions = [] if total_count == 0: return reactions group_reactions = self.client.issue_reactions(issue_number) for raw_reactions in group_reactions: for reaction in json.loads(raw_reactions): reaction['user_data'] = self.__get_user(reaction['user']['login']) reactions.append(reaction) return reactions
def __get_issue_reactions(self, issue_number, total_count): """Get issue reactions""" reactions = [] if total_count == 0: return reactions group_reactions = self.client.issue_reactions(issue_number) for raw_reactions in group_reactions: for reaction in json.loads(raw_reactions): reaction['user_data'] = self.__get_user(reaction['user']['login']) reactions.append(reaction) return reactions
[ "Get", "issue", "reactions" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L307-L323
[ "def", "__get_issue_reactions", "(", "self", ",", "issue_number", ",", "total_count", ")", ":", "reactions", "=", "[", "]", "if", "total_count", "==", "0", ":", "return", "reactions", "group_reactions", "=", "self", ".", "client", ".", "issue_reactions", "(", "issue_number", ")", "for", "raw_reactions", "in", "group_reactions", ":", "for", "reaction", "in", "json", ".", "loads", "(", "raw_reactions", ")", ":", "reaction", "[", "'user_data'", "]", "=", "self", ".", "__get_user", "(", "reaction", "[", "'user'", "]", "[", "'login'", "]", ")", "reactions", ".", "append", "(", "reaction", ")", "return", "reactions" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.__get_issue_comments
Get issue comments
perceval/backends/core/github.py
def __get_issue_comments(self, issue_number): """Get issue comments""" comments = [] group_comments = self.client.issue_comments(issue_number) for raw_comments in group_comments: for comment in json.loads(raw_comments): comment_id = comment.get('id') comment['user_data'] = self.__get_user(comment['user']['login']) comment['reactions_data'] = \ self.__get_issue_comment_reactions(comment_id, comment['reactions']['total_count']) comments.append(comment) return comments
def __get_issue_comments(self, issue_number): """Get issue comments""" comments = [] group_comments = self.client.issue_comments(issue_number) for raw_comments in group_comments: for comment in json.loads(raw_comments): comment_id = comment.get('id') comment['user_data'] = self.__get_user(comment['user']['login']) comment['reactions_data'] = \ self.__get_issue_comment_reactions(comment_id, comment['reactions']['total_count']) comments.append(comment) return comments
[ "Get", "issue", "comments" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L325-L340
[ "def", "__get_issue_comments", "(", "self", ",", "issue_number", ")", ":", "comments", "=", "[", "]", "group_comments", "=", "self", ".", "client", ".", "issue_comments", "(", "issue_number", ")", "for", "raw_comments", "in", "group_comments", ":", "for", "comment", "in", "json", ".", "loads", "(", "raw_comments", ")", ":", "comment_id", "=", "comment", ".", "get", "(", "'id'", ")", "comment", "[", "'user_data'", "]", "=", "self", ".", "__get_user", "(", "comment", "[", "'user'", "]", "[", "'login'", "]", ")", "comment", "[", "'reactions_data'", "]", "=", "self", ".", "__get_issue_comment_reactions", "(", "comment_id", ",", "comment", "[", "'reactions'", "]", "[", "'total_count'", "]", ")", "comments", ".", "append", "(", "comment", ")", "return", "comments" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.__get_issue_comment_reactions
Get reactions on issue comments
perceval/backends/core/github.py
def __get_issue_comment_reactions(self, comment_id, total_count): """Get reactions on issue comments""" reactions = [] if total_count == 0: return reactions group_reactions = self.client.issue_comment_reactions(comment_id) for raw_reactions in group_reactions: for reaction in json.loads(raw_reactions): reaction['user_data'] = self.__get_user(reaction['user']['login']) reactions.append(reaction) return reactions
def __get_issue_comment_reactions(self, comment_id, total_count): """Get reactions on issue comments""" reactions = [] if total_count == 0: return reactions group_reactions = self.client.issue_comment_reactions(comment_id) for raw_reactions in group_reactions: for reaction in json.loads(raw_reactions): reaction['user_data'] = self.__get_user(reaction['user']['login']) reactions.append(reaction) return reactions
[ "Get", "reactions", "on", "issue", "comments" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L342-L358
[ "def", "__get_issue_comment_reactions", "(", "self", ",", "comment_id", ",", "total_count", ")", ":", "reactions", "=", "[", "]", "if", "total_count", "==", "0", ":", "return", "reactions", "group_reactions", "=", "self", ".", "client", ".", "issue_comment_reactions", "(", "comment_id", ")", "for", "raw_reactions", "in", "group_reactions", ":", "for", "reaction", "in", "json", ".", "loads", "(", "raw_reactions", ")", ":", "reaction", "[", "'user_data'", "]", "=", "self", ".", "__get_user", "(", "reaction", "[", "'user'", "]", "[", "'login'", "]", ")", "reactions", ".", "append", "(", "reaction", ")", "return", "reactions" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.__get_issue_assignees
Get issue assignees
perceval/backends/core/github.py
def __get_issue_assignees(self, raw_assignees): """Get issue assignees""" assignees = [] for ra in raw_assignees: assignees.append(self.__get_user(ra['login'])) return assignees
def __get_issue_assignees(self, raw_assignees): """Get issue assignees""" assignees = [] for ra in raw_assignees: assignees.append(self.__get_user(ra['login'])) return assignees
[ "Get", "issue", "assignees" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L367-L374
[ "def", "__get_issue_assignees", "(", "self", ",", "raw_assignees", ")", ":", "assignees", "=", "[", "]", "for", "ra", "in", "raw_assignees", ":", "assignees", ".", "append", "(", "self", ".", "__get_user", "(", "ra", "[", "'login'", "]", ")", ")", "return", "assignees" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.__get_pull_requested_reviewers
Get pull request requested reviewers
perceval/backends/core/github.py
def __get_pull_requested_reviewers(self, pr_number): """Get pull request requested reviewers""" requested_reviewers = [] group_requested_reviewers = self.client.pull_requested_reviewers(pr_number) for raw_requested_reviewers in group_requested_reviewers: group_requested_reviewers = json.loads(raw_requested_reviewers) for requested_reviewer in group_requested_reviewers['users']: user_data = self.__get_user(requested_reviewer['login']) requested_reviewers.append(user_data) return requested_reviewers
def __get_pull_requested_reviewers(self, pr_number): """Get pull request requested reviewers""" requested_reviewers = [] group_requested_reviewers = self.client.pull_requested_reviewers(pr_number) for raw_requested_reviewers in group_requested_reviewers: group_requested_reviewers = json.loads(raw_requested_reviewers) for requested_reviewer in group_requested_reviewers['users']: user_data = self.__get_user(requested_reviewer['login']) requested_reviewers.append(user_data) return requested_reviewers
[ "Get", "pull", "request", "requested", "reviewers" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L376-L389
[ "def", "__get_pull_requested_reviewers", "(", "self", ",", "pr_number", ")", ":", "requested_reviewers", "=", "[", "]", "group_requested_reviewers", "=", "self", ".", "client", ".", "pull_requested_reviewers", "(", "pr_number", ")", "for", "raw_requested_reviewers", "in", "group_requested_reviewers", ":", "group_requested_reviewers", "=", "json", ".", "loads", "(", "raw_requested_reviewers", ")", "for", "requested_reviewer", "in", "group_requested_reviewers", "[", "'users'", "]", ":", "user_data", "=", "self", ".", "__get_user", "(", "requested_reviewer", "[", "'login'", "]", ")", "requested_reviewers", ".", "append", "(", "user_data", ")", "return", "requested_reviewers" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.__get_pull_commits
Get pull request commit hashes
perceval/backends/core/github.py
def __get_pull_commits(self, pr_number): """Get pull request commit hashes""" hashes = [] group_pull_commits = self.client.pull_commits(pr_number) for raw_pull_commits in group_pull_commits: for commit in json.loads(raw_pull_commits): commit_hash = commit['sha'] hashes.append(commit_hash) return hashes
def __get_pull_commits(self, pr_number): """Get pull request commit hashes""" hashes = [] group_pull_commits = self.client.pull_commits(pr_number) for raw_pull_commits in group_pull_commits: for commit in json.loads(raw_pull_commits): commit_hash = commit['sha'] hashes.append(commit_hash) return hashes
[ "Get", "pull", "request", "commit", "hashes" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L391-L403
[ "def", "__get_pull_commits", "(", "self", ",", "pr_number", ")", ":", "hashes", "=", "[", "]", "group_pull_commits", "=", "self", ".", "client", ".", "pull_commits", "(", "pr_number", ")", "for", "raw_pull_commits", "in", "group_pull_commits", ":", "for", "commit", "in", "json", ".", "loads", "(", "raw_pull_commits", ")", ":", "commit_hash", "=", "commit", "[", "'sha'", "]", "hashes", ".", "append", "(", "commit_hash", ")", "return", "hashes" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.__get_pull_review_comments
Get pull request review comments
perceval/backends/core/github.py
def __get_pull_review_comments(self, pr_number): """Get pull request review comments""" comments = [] group_comments = self.client.pull_review_comments(pr_number) for raw_comments in group_comments: for comment in json.loads(raw_comments): comment_id = comment.get('id') user = comment.get('user', None) if not user: logger.warning("Missing user info for %s", comment['url']) comment['user_data'] = None else: comment['user_data'] = self.__get_user(user['login']) comment['reactions_data'] = \ self.__get_pull_review_comment_reactions(comment_id, comment['reactions']['total_count']) comments.append(comment) return comments
def __get_pull_review_comments(self, pr_number): """Get pull request review comments""" comments = [] group_comments = self.client.pull_review_comments(pr_number) for raw_comments in group_comments: for comment in json.loads(raw_comments): comment_id = comment.get('id') user = comment.get('user', None) if not user: logger.warning("Missing user info for %s", comment['url']) comment['user_data'] = None else: comment['user_data'] = self.__get_user(user['login']) comment['reactions_data'] = \ self.__get_pull_review_comment_reactions(comment_id, comment['reactions']['total_count']) comments.append(comment) return comments
[ "Get", "pull", "request", "review", "comments" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L405-L427
[ "def", "__get_pull_review_comments", "(", "self", ",", "pr_number", ")", ":", "comments", "=", "[", "]", "group_comments", "=", "self", ".", "client", ".", "pull_review_comments", "(", "pr_number", ")", "for", "raw_comments", "in", "group_comments", ":", "for", "comment", "in", "json", ".", "loads", "(", "raw_comments", ")", ":", "comment_id", "=", "comment", ".", "get", "(", "'id'", ")", "user", "=", "comment", ".", "get", "(", "'user'", ",", "None", ")", "if", "not", "user", ":", "logger", ".", "warning", "(", "\"Missing user info for %s\"", ",", "comment", "[", "'url'", "]", ")", "comment", "[", "'user_data'", "]", "=", "None", "else", ":", "comment", "[", "'user_data'", "]", "=", "self", ".", "__get_user", "(", "user", "[", "'login'", "]", ")", "comment", "[", "'reactions_data'", "]", "=", "self", ".", "__get_pull_review_comment_reactions", "(", "comment_id", ",", "comment", "[", "'reactions'", "]", "[", "'total_count'", "]", ")", "comments", ".", "append", "(", "comment", ")", "return", "comments" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.__get_pull_review_comment_reactions
Get pull review comment reactions
perceval/backends/core/github.py
def __get_pull_review_comment_reactions(self, comment_id, total_count): """Get pull review comment reactions""" reactions = [] if total_count == 0: return reactions group_reactions = self.client.pull_review_comment_reactions(comment_id) for raw_reactions in group_reactions: for reaction in json.loads(raw_reactions): reaction['user_data'] = self.__get_user(reaction['user']['login']) reactions.append(reaction) return reactions
def __get_pull_review_comment_reactions(self, comment_id, total_count): """Get pull review comment reactions""" reactions = [] if total_count == 0: return reactions group_reactions = self.client.pull_review_comment_reactions(comment_id) for raw_reactions in group_reactions: for reaction in json.loads(raw_reactions): reaction['user_data'] = self.__get_user(reaction['user']['login']) reactions.append(reaction) return reactions
[ "Get", "pull", "review", "comment", "reactions" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L429-L445
[ "def", "__get_pull_review_comment_reactions", "(", "self", ",", "comment_id", ",", "total_count", ")", ":", "reactions", "=", "[", "]", "if", "total_count", "==", "0", ":", "return", "reactions", "group_reactions", "=", "self", ".", "client", ".", "pull_review_comment_reactions", "(", "comment_id", ")", "for", "raw_reactions", "in", "group_reactions", ":", "for", "reaction", "in", "json", ".", "loads", "(", "raw_reactions", ")", ":", "reaction", "[", "'user_data'", "]", "=", "self", ".", "__get_user", "(", "reaction", "[", "'user'", "]", "[", "'login'", "]", ")", "reactions", ".", "append", "(", "reaction", ")", "return", "reactions" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHub.__get_user
Get user and org data for the login
perceval/backends/core/github.py
def __get_user(self, login): """Get user and org data for the login""" user = {} if not login: return user user_raw = self.client.user(login) user = json.loads(user_raw) user_orgs_raw = \ self.client.user_orgs(login) user['organizations'] = json.loads(user_orgs_raw) return user
def __get_user(self, login): """Get user and org data for the login""" user = {} if not login: return user user_raw = self.client.user(login) user = json.loads(user_raw) user_orgs_raw = \ self.client.user_orgs(login) user['organizations'] = json.loads(user_orgs_raw) return user
[ "Get", "user", "and", "org", "data", "for", "the", "login" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L447-L461
[ "def", "__get_user", "(", "self", ",", "login", ")", ":", "user", "=", "{", "}", "if", "not", "login", ":", "return", "user", "user_raw", "=", "self", ".", "client", ".", "user", "(", "login", ")", "user", "=", "json", ".", "loads", "(", "user_raw", ")", "user_orgs_raw", "=", "self", ".", "client", ".", "user_orgs", "(", "login", ")", "user", "[", "'organizations'", "]", "=", "json", ".", "loads", "(", "user_orgs_raw", ")", "return", "user" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient.issue_reactions
Get reactions of an issue
perceval/backends/core/github.py
def issue_reactions(self, issue_number): """Get reactions of an issue""" payload = { 'per_page': PER_PAGE, 'direction': 'asc', 'sort': 'updated' } path = urijoin("issues", str(issue_number), "reactions") return self.fetch_items(path, payload)
def issue_reactions(self, issue_number): """Get reactions of an issue""" payload = { 'per_page': PER_PAGE, 'direction': 'asc', 'sort': 'updated' } path = urijoin("issues", str(issue_number), "reactions") return self.fetch_items(path, payload)
[ "Get", "reactions", "of", "an", "issue" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L542-L552
[ "def", "issue_reactions", "(", "self", ",", "issue_number", ")", ":", "payload", "=", "{", "'per_page'", ":", "PER_PAGE", ",", "'direction'", ":", "'asc'", ",", "'sort'", ":", "'updated'", "}", "path", "=", "urijoin", "(", "\"issues\"", ",", "str", "(", "issue_number", ")", ",", "\"reactions\"", ")", "return", "self", ".", "fetch_items", "(", "path", ",", "payload", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient.issues
Fetch the issues from the repository. The method retrieves, from a GitHub repository, the issues updated since the given date. :param from_date: obtain issues updated since this date :returns: a generator of issues
perceval/backends/core/github.py
def issues(self, from_date=None): """Fetch the issues from the repository. The method retrieves, from a GitHub repository, the issues updated since the given date. :param from_date: obtain issues updated since this date :returns: a generator of issues """ payload = { 'state': 'all', 'per_page': PER_PAGE, 'direction': 'asc', 'sort': 'updated'} if from_date: payload['since'] = from_date.isoformat() path = urijoin("issues") return self.fetch_items(path, payload)
def issues(self, from_date=None): """Fetch the issues from the repository. The method retrieves, from a GitHub repository, the issues updated since the given date. :param from_date: obtain issues updated since this date :returns: a generator of issues """ payload = { 'state': 'all', 'per_page': PER_PAGE, 'direction': 'asc', 'sort': 'updated'} if from_date: payload['since'] = from_date.isoformat() path = urijoin("issues") return self.fetch_items(path, payload)
[ "Fetch", "the", "issues", "from", "the", "repository", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L578-L598
[ "def", "issues", "(", "self", ",", "from_date", "=", "None", ")", ":", "payload", "=", "{", "'state'", ":", "'all'", ",", "'per_page'", ":", "PER_PAGE", ",", "'direction'", ":", "'asc'", ",", "'sort'", ":", "'updated'", "}", "if", "from_date", ":", "payload", "[", "'since'", "]", "=", "from_date", ".", "isoformat", "(", ")", "path", "=", "urijoin", "(", "\"issues\"", ")", "return", "self", ".", "fetch_items", "(", "path", ",", "payload", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient.pulls
Fetch the pull requests from the repository. The method retrieves, from a GitHub repository, the pull requests updated since the given date. :param from_date: obtain pull requests updated since this date :returns: a generator of pull requests
perceval/backends/core/github.py
def pulls(self, from_date=None): """Fetch the pull requests from the repository. The method retrieves, from a GitHub repository, the pull requests updated since the given date. :param from_date: obtain pull requests updated since this date :returns: a generator of pull requests """ issues_groups = self.issues(from_date=from_date) for raw_issues in issues_groups: issues = json.loads(raw_issues) for issue in issues: if "pull_request" not in issue: continue pull_number = issue["number"] path = urijoin(self.base_url, 'repos', self.owner, self.repository, "pulls", pull_number) r = self.fetch(path) pull = r.text yield pull
def pulls(self, from_date=None): """Fetch the pull requests from the repository. The method retrieves, from a GitHub repository, the pull requests updated since the given date. :param from_date: obtain pull requests updated since this date :returns: a generator of pull requests """ issues_groups = self.issues(from_date=from_date) for raw_issues in issues_groups: issues = json.loads(raw_issues) for issue in issues: if "pull_request" not in issue: continue pull_number = issue["number"] path = urijoin(self.base_url, 'repos', self.owner, self.repository, "pulls", pull_number) r = self.fetch(path) pull = r.text yield pull
[ "Fetch", "the", "pull", "requests", "from", "the", "repository", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L600-L625
[ "def", "pulls", "(", "self", ",", "from_date", "=", "None", ")", ":", "issues_groups", "=", "self", ".", "issues", "(", "from_date", "=", "from_date", ")", "for", "raw_issues", "in", "issues_groups", ":", "issues", "=", "json", ".", "loads", "(", "raw_issues", ")", "for", "issue", "in", "issues", ":", "if", "\"pull_request\"", "not", "in", "issue", ":", "continue", "pull_number", "=", "issue", "[", "\"number\"", "]", "path", "=", "urijoin", "(", "self", ".", "base_url", ",", "'repos'", ",", "self", ".", "owner", ",", "self", ".", "repository", ",", "\"pulls\"", ",", "pull_number", ")", "r", "=", "self", ".", "fetch", "(", "path", ")", "pull", "=", "r", ".", "text", "yield", "pull" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient.repo
Get repository data
perceval/backends/core/github.py
def repo(self): """Get repository data""" path = urijoin(self.base_url, 'repos', self.owner, self.repository) r = self.fetch(path) repo = r.text return repo
def repo(self): """Get repository data""" path = urijoin(self.base_url, 'repos', self.owner, self.repository) r = self.fetch(path) repo = r.text return repo
[ "Get", "repository", "data" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L627-L635
[ "def", "repo", "(", "self", ")", ":", "path", "=", "urijoin", "(", "self", ".", "base_url", ",", "'repos'", ",", "self", ".", "owner", ",", "self", ".", "repository", ")", "r", "=", "self", ".", "fetch", "(", "path", ")", "repo", "=", "r", ".", "text", "return", "repo" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient.pull_requested_reviewers
Get pull requested reviewers
perceval/backends/core/github.py
def pull_requested_reviewers(self, pr_number): """Get pull requested reviewers""" requested_reviewers_url = urijoin("pulls", str(pr_number), "requested_reviewers") return self.fetch_items(requested_reviewers_url, {})
def pull_requested_reviewers(self, pr_number): """Get pull requested reviewers""" requested_reviewers_url = urijoin("pulls", str(pr_number), "requested_reviewers") return self.fetch_items(requested_reviewers_url, {})
[ "Get", "pull", "requested", "reviewers" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L637-L641
[ "def", "pull_requested_reviewers", "(", "self", ",", "pr_number", ")", ":", "requested_reviewers_url", "=", "urijoin", "(", "\"pulls\"", ",", "str", "(", "pr_number", ")", ",", "\"requested_reviewers\"", ")", "return", "self", ".", "fetch_items", "(", "requested_reviewers_url", ",", "{", "}", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient.pull_commits
Get pull request commits
perceval/backends/core/github.py
def pull_commits(self, pr_number): """Get pull request commits""" payload = { 'per_page': PER_PAGE, } commit_url = urijoin("pulls", str(pr_number), "commits") return self.fetch_items(commit_url, payload)
def pull_commits(self, pr_number): """Get pull request commits""" payload = { 'per_page': PER_PAGE, } commit_url = urijoin("pulls", str(pr_number), "commits") return self.fetch_items(commit_url, payload)
[ "Get", "pull", "request", "commits" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L643-L651
[ "def", "pull_commits", "(", "self", ",", "pr_number", ")", ":", "payload", "=", "{", "'per_page'", ":", "PER_PAGE", ",", "}", "commit_url", "=", "urijoin", "(", "\"pulls\"", ",", "str", "(", "pr_number", ")", ",", "\"commits\"", ")", "return", "self", ".", "fetch_items", "(", "commit_url", ",", "payload", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient.pull_review_comments
Get pull request review comments
perceval/backends/core/github.py
def pull_review_comments(self, pr_number): """Get pull request review comments""" payload = { 'per_page': PER_PAGE, 'direction': 'asc', 'sort': 'updated' } comments_url = urijoin("pulls", str(pr_number), "comments") return self.fetch_items(comments_url, payload)
def pull_review_comments(self, pr_number): """Get pull request review comments""" payload = { 'per_page': PER_PAGE, 'direction': 'asc', 'sort': 'updated' } comments_url = urijoin("pulls", str(pr_number), "comments") return self.fetch_items(comments_url, payload)
[ "Get", "pull", "request", "review", "comments" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L653-L663
[ "def", "pull_review_comments", "(", "self", ",", "pr_number", ")", ":", "payload", "=", "{", "'per_page'", ":", "PER_PAGE", ",", "'direction'", ":", "'asc'", ",", "'sort'", ":", "'updated'", "}", "comments_url", "=", "urijoin", "(", "\"pulls\"", ",", "str", "(", "pr_number", ")", ",", "\"comments\"", ")", "return", "self", ".", "fetch_items", "(", "comments_url", ",", "payload", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient.pull_review_comment_reactions
Get reactions of a review comment
perceval/backends/core/github.py
def pull_review_comment_reactions(self, comment_id): """Get reactions of a review comment""" payload = { 'per_page': PER_PAGE, 'direction': 'asc', 'sort': 'updated' } path = urijoin("pulls", "comments", str(comment_id), "reactions") return self.fetch_items(path, payload)
def pull_review_comment_reactions(self, comment_id): """Get reactions of a review comment""" payload = { 'per_page': PER_PAGE, 'direction': 'asc', 'sort': 'updated' } path = urijoin("pulls", "comments", str(comment_id), "reactions") return self.fetch_items(path, payload)
[ "Get", "reactions", "of", "a", "review", "comment" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L665-L675
[ "def", "pull_review_comment_reactions", "(", "self", ",", "comment_id", ")", ":", "payload", "=", "{", "'per_page'", ":", "PER_PAGE", ",", "'direction'", ":", "'asc'", ",", "'sort'", ":", "'updated'", "}", "path", "=", "urijoin", "(", "\"pulls\"", ",", "\"comments\"", ",", "str", "(", "comment_id", ")", ",", "\"reactions\"", ")", "return", "self", ".", "fetch_items", "(", "path", ",", "payload", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient.user
Get the user information and update the user cache
perceval/backends/core/github.py
def user(self, login): """Get the user information and update the user cache""" user = None if login in self._users: return self._users[login] url_user = urijoin(self.base_url, 'users', login) logging.info("Getting info for %s" % (url_user)) r = self.fetch(url_user) user = r.text self._users[login] = user return user
def user(self, login): """Get the user information and update the user cache""" user = None if login in self._users: return self._users[login] url_user = urijoin(self.base_url, 'users', login) logging.info("Getting info for %s" % (url_user)) r = self.fetch(url_user) user = r.text self._users[login] = user return user
[ "Get", "the", "user", "information", "and", "update", "the", "user", "cache" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L677-L692
[ "def", "user", "(", "self", ",", "login", ")", ":", "user", "=", "None", "if", "login", "in", "self", ".", "_users", ":", "return", "self", ".", "_users", "[", "login", "]", "url_user", "=", "urijoin", "(", "self", ".", "base_url", ",", "'users'", ",", "login", ")", "logging", ".", "info", "(", "\"Getting info for %s\"", "%", "(", "url_user", ")", ")", "r", "=", "self", ".", "fetch", "(", "url_user", ")", "user", "=", "r", ".", "text", "self", ".", "_users", "[", "login", "]", "=", "user", "return", "user" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient.user_orgs
Get the user public organizations
perceval/backends/core/github.py
def user_orgs(self, login): """Get the user public organizations""" if login in self._users_orgs: return self._users_orgs[login] url = urijoin(self.base_url, 'users', login, 'orgs') try: r = self.fetch(url) orgs = r.text except requests.exceptions.HTTPError as error: # 404 not found is wrongly received sometimes if error.response.status_code == 404: logger.error("Can't get github login orgs: %s", error) orgs = '[]' else: raise error self._users_orgs[login] = orgs return orgs
def user_orgs(self, login): """Get the user public organizations""" if login in self._users_orgs: return self._users_orgs[login] url = urijoin(self.base_url, 'users', login, 'orgs') try: r = self.fetch(url) orgs = r.text except requests.exceptions.HTTPError as error: # 404 not found is wrongly received sometimes if error.response.status_code == 404: logger.error("Can't get github login orgs: %s", error) orgs = '[]' else: raise error self._users_orgs[login] = orgs return orgs
[ "Get", "the", "user", "public", "organizations" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L694-L713
[ "def", "user_orgs", "(", "self", ",", "login", ")", ":", "if", "login", "in", "self", ".", "_users_orgs", ":", "return", "self", ".", "_users_orgs", "[", "login", "]", "url", "=", "urijoin", "(", "self", ".", "base_url", ",", "'users'", ",", "login", ",", "'orgs'", ")", "try", ":", "r", "=", "self", ".", "fetch", "(", "url", ")", "orgs", "=", "r", ".", "text", "except", "requests", ".", "exceptions", ".", "HTTPError", "as", "error", ":", "# 404 not found is wrongly received sometimes", "if", "error", ".", "response", ".", "status_code", "==", "404", ":", "logger", ".", "error", "(", "\"Can't get github login orgs: %s\"", ",", "error", ")", "orgs", "=", "'[]'", "else", ":", "raise", "error", "self", ".", "_users_orgs", "[", "login", "]", "=", "orgs", "return", "orgs" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient._get_token_rate_limit
Return token's remaining API points
perceval/backends/core/github.py
def _get_token_rate_limit(self, token): """Return token's remaining API points""" rate_url = urijoin(self.base_url, "rate_limit") self.session.headers.update({'Authorization': 'token ' + token}) remaining = 0 try: headers = super().fetch(rate_url).headers if self.rate_limit_header in headers: remaining = int(headers[self.rate_limit_header]) except requests.exceptions.HTTPError as error: logger.warning("Rate limit not initialized: %s", error) return remaining
def _get_token_rate_limit(self, token): """Return token's remaining API points""" rate_url = urijoin(self.base_url, "rate_limit") self.session.headers.update({'Authorization': 'token ' + token}) remaining = 0 try: headers = super().fetch(rate_url).headers if self.rate_limit_header in headers: remaining = int(headers[self.rate_limit_header]) except requests.exceptions.HTTPError as error: logger.warning("Rate limit not initialized: %s", error) return remaining
[ "Return", "token", "s", "remaining", "API", "points" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L772-L784
[ "def", "_get_token_rate_limit", "(", "self", ",", "token", ")", ":", "rate_url", "=", "urijoin", "(", "self", ".", "base_url", ",", "\"rate_limit\"", ")", "self", ".", "session", ".", "headers", ".", "update", "(", "{", "'Authorization'", ":", "'token '", "+", "token", "}", ")", "remaining", "=", "0", "try", ":", "headers", "=", "super", "(", ")", ".", "fetch", "(", "rate_url", ")", ".", "headers", "if", "self", ".", "rate_limit_header", "in", "headers", ":", "remaining", "=", "int", "(", "headers", "[", "self", ".", "rate_limit_header", "]", ")", "except", "requests", ".", "exceptions", ".", "HTTPError", "as", "error", ":", "logger", ".", "warning", "(", "\"Rate limit not initialized: %s\"", ",", "error", ")", "return", "remaining" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient._get_tokens_rate_limits
Return array of all tokens remaining API points
perceval/backends/core/github.py
def _get_tokens_rate_limits(self): """Return array of all tokens remaining API points""" remainings = [0] * self.n_tokens # Turn off archiving when checking rates, because that would cause # archive key conflict (the same URLs giving different responses) arch = self.archive self.archive = None for idx, token in enumerate(self.tokens): # Pass flag to skip disabling archiving because this function doies it remainings[idx] = self._get_token_rate_limit(token) # Restore archiving to whatever state it was self.archive = arch logger.debug("Remaining API points: {}".format(remainings)) return remainings
def _get_tokens_rate_limits(self): """Return array of all tokens remaining API points""" remainings = [0] * self.n_tokens # Turn off archiving when checking rates, because that would cause # archive key conflict (the same URLs giving different responses) arch = self.archive self.archive = None for idx, token in enumerate(self.tokens): # Pass flag to skip disabling archiving because this function doies it remainings[idx] = self._get_token_rate_limit(token) # Restore archiving to whatever state it was self.archive = arch logger.debug("Remaining API points: {}".format(remainings)) return remainings
[ "Return", "array", "of", "all", "tokens", "remaining", "API", "points" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L786-L800
[ "def", "_get_tokens_rate_limits", "(", "self", ")", ":", "remainings", "=", "[", "0", "]", "*", "self", ".", "n_tokens", "# Turn off archiving when checking rates, because that would cause", "# archive key conflict (the same URLs giving different responses)", "arch", "=", "self", ".", "archive", "self", ".", "archive", "=", "None", "for", "idx", ",", "token", "in", "enumerate", "(", "self", ".", "tokens", ")", ":", "# Pass flag to skip disabling archiving because this function doies it", "remainings", "[", "idx", "]", "=", "self", ".", "_get_token_rate_limit", "(", "token", ")", "# Restore archiving to whatever state it was", "self", ".", "archive", "=", "arch", "logger", ".", "debug", "(", "\"Remaining API points: {}\"", ".", "format", "(", "remainings", ")", ")", "return", "remainings" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient._choose_best_api_token
Check all API tokens defined and choose one with most remaining API points
perceval/backends/core/github.py
def _choose_best_api_token(self): """Check all API tokens defined and choose one with most remaining API points""" # Return if no tokens given if self.n_tokens == 0: return # If multiple tokens given, choose best token_idx = 0 if self.n_tokens > 1: remainings = self._get_tokens_rate_limits() token_idx = remainings.index(max(remainings)) logger.debug("Remaining API points: {}, choosen index: {}".format(remainings, token_idx)) # If we have any tokens - use best of them self.current_token = self.tokens[token_idx] self.session.headers.update({'Authorization': 'token ' + self.current_token}) # Update rate limit data for the current token self._update_current_rate_limit()
def _choose_best_api_token(self): """Check all API tokens defined and choose one with most remaining API points""" # Return if no tokens given if self.n_tokens == 0: return # If multiple tokens given, choose best token_idx = 0 if self.n_tokens > 1: remainings = self._get_tokens_rate_limits() token_idx = remainings.index(max(remainings)) logger.debug("Remaining API points: {}, choosen index: {}".format(remainings, token_idx)) # If we have any tokens - use best of them self.current_token = self.tokens[token_idx] self.session.headers.update({'Authorization': 'token ' + self.current_token}) # Update rate limit data for the current token self._update_current_rate_limit()
[ "Check", "all", "API", "tokens", "defined", "and", "choose", "one", "with", "most", "remaining", "API", "points" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L802-L820
[ "def", "_choose_best_api_token", "(", "self", ")", ":", "# Return if no tokens given", "if", "self", ".", "n_tokens", "==", "0", ":", "return", "# If multiple tokens given, choose best", "token_idx", "=", "0", "if", "self", ".", "n_tokens", ">", "1", ":", "remainings", "=", "self", ".", "_get_tokens_rate_limits", "(", ")", "token_idx", "=", "remainings", ".", "index", "(", "max", "(", "remainings", ")", ")", "logger", ".", "debug", "(", "\"Remaining API points: {}, choosen index: {}\"", ".", "format", "(", "remainings", ",", "token_idx", ")", ")", "# If we have any tokens - use best of them", "self", ".", "current_token", "=", "self", ".", "tokens", "[", "token_idx", "]", "self", ".", "session", ".", "headers", ".", "update", "(", "{", "'Authorization'", ":", "'token '", "+", "self", ".", "current_token", "}", ")", "# Update rate limit data for the current token", "self", ".", "_update_current_rate_limit", "(", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient._need_check_tokens
Check if we need to switch GitHub API tokens
perceval/backends/core/github.py
def _need_check_tokens(self): """Check if we need to switch GitHub API tokens""" if self.n_tokens <= 1 or self.rate_limit is None: return False elif self.last_rate_limit_checked is None: self.last_rate_limit_checked = self.rate_limit return True # If approaching minimum rate limit for sleep approaching_limit = float(self.min_rate_to_sleep) * (1.0 + TOKEN_USAGE_BEFORE_SWITCH) + 1 if self.rate_limit <= approaching_limit: self.last_rate_limit_checked = self.rate_limit return True # Only switch token when used predefined factor of the current token's remaining API points ratio = float(self.rate_limit) / float(self.last_rate_limit_checked) if ratio < 1.0 - TOKEN_USAGE_BEFORE_SWITCH: self.last_rate_limit_checked = self.rate_limit return True elif ratio > 1.0: self.last_rate_limit_checked = self.rate_limit return False else: return False
def _need_check_tokens(self): """Check if we need to switch GitHub API tokens""" if self.n_tokens <= 1 or self.rate_limit is None: return False elif self.last_rate_limit_checked is None: self.last_rate_limit_checked = self.rate_limit return True # If approaching minimum rate limit for sleep approaching_limit = float(self.min_rate_to_sleep) * (1.0 + TOKEN_USAGE_BEFORE_SWITCH) + 1 if self.rate_limit <= approaching_limit: self.last_rate_limit_checked = self.rate_limit return True # Only switch token when used predefined factor of the current token's remaining API points ratio = float(self.rate_limit) / float(self.last_rate_limit_checked) if ratio < 1.0 - TOKEN_USAGE_BEFORE_SWITCH: self.last_rate_limit_checked = self.rate_limit return True elif ratio > 1.0: self.last_rate_limit_checked = self.rate_limit return False else: return False
[ "Check", "if", "we", "need", "to", "switch", "GitHub", "API", "tokens" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L822-L846
[ "def", "_need_check_tokens", "(", "self", ")", ":", "if", "self", ".", "n_tokens", "<=", "1", "or", "self", ".", "rate_limit", "is", "None", ":", "return", "False", "elif", "self", ".", "last_rate_limit_checked", "is", "None", ":", "self", ".", "last_rate_limit_checked", "=", "self", ".", "rate_limit", "return", "True", "# If approaching minimum rate limit for sleep", "approaching_limit", "=", "float", "(", "self", ".", "min_rate_to_sleep", ")", "*", "(", "1.0", "+", "TOKEN_USAGE_BEFORE_SWITCH", ")", "+", "1", "if", "self", ".", "rate_limit", "<=", "approaching_limit", ":", "self", ".", "last_rate_limit_checked", "=", "self", ".", "rate_limit", "return", "True", "# Only switch token when used predefined factor of the current token's remaining API points", "ratio", "=", "float", "(", "self", ".", "rate_limit", ")", "/", "float", "(", "self", ".", "last_rate_limit_checked", ")", "if", "ratio", "<", "1.0", "-", "TOKEN_USAGE_BEFORE_SWITCH", ":", "self", ".", "last_rate_limit_checked", "=", "self", ".", "rate_limit", "return", "True", "elif", "ratio", ">", "1.0", ":", "self", ".", "last_rate_limit_checked", "=", "self", ".", "rate_limit", "return", "False", "else", ":", "return", "False" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
GitHubClient._update_current_rate_limit
Update rate limits data for the current token
perceval/backends/core/github.py
def _update_current_rate_limit(self): """Update rate limits data for the current token""" url = urijoin(self.base_url, "rate_limit") try: # Turn off archiving when checking rates, because that would cause # archive key conflict (the same URLs giving different responses) arch = self.archive self.archive = None response = super().fetch(url) self.archive = arch self.update_rate_limit(response) self.last_rate_limit_checked = self.rate_limit except requests.exceptions.HTTPError as error: if error.response.status_code == 404: logger.warning("Rate limit not initialized: %s", error) else: raise error
def _update_current_rate_limit(self): """Update rate limits data for the current token""" url = urijoin(self.base_url, "rate_limit") try: # Turn off archiving when checking rates, because that would cause # archive key conflict (the same URLs giving different responses) arch = self.archive self.archive = None response = super().fetch(url) self.archive = arch self.update_rate_limit(response) self.last_rate_limit_checked = self.rate_limit except requests.exceptions.HTTPError as error: if error.response.status_code == 404: logger.warning("Rate limit not initialized: %s", error) else: raise error
[ "Update", "rate", "limits", "data", "for", "the", "current", "token" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L848-L865
[ "def", "_update_current_rate_limit", "(", "self", ")", ":", "url", "=", "urijoin", "(", "self", ".", "base_url", ",", "\"rate_limit\"", ")", "try", ":", "# Turn off archiving when checking rates, because that would cause", "# archive key conflict (the same URLs giving different responses)", "arch", "=", "self", ".", "archive", "self", ".", "archive", "=", "None", "response", "=", "super", "(", ")", ".", "fetch", "(", "url", ")", "self", ".", "archive", "=", "arch", "self", ".", "update_rate_limit", "(", "response", ")", "self", ".", "last_rate_limit_checked", "=", "self", ".", "rate_limit", "except", "requests", ".", "exceptions", ".", "HTTPError", "as", "error", ":", "if", "error", ".", "response", ".", "status_code", "==", "404", ":", "logger", ".", "warning", "(", "\"Rate limit not initialized: %s\"", ",", "error", ")", "else", ":", "raise", "error" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Archive.init_metadata
Init metadata information. Metatada is composed by basic information needed to identify where archived data came from and how it can be retrieved and built into Perceval items. :param: origin: identifier of the repository :param: backend_name: name of the backend :param: backend_version: version of the backend :param: category: category of the items fetched :param: backend_params: dict representation of the fetch parameters raises ArchiveError: when an error occurs initializing the metadata
perceval/archive.py
def init_metadata(self, origin, backend_name, backend_version, category, backend_params): """Init metadata information. Metatada is composed by basic information needed to identify where archived data came from and how it can be retrieved and built into Perceval items. :param: origin: identifier of the repository :param: backend_name: name of the backend :param: backend_version: version of the backend :param: category: category of the items fetched :param: backend_params: dict representation of the fetch parameters raises ArchiveError: when an error occurs initializing the metadata """ created_on = datetime_to_utc(datetime_utcnow()) created_on_dumped = created_on.isoformat() backend_params_dumped = pickle.dumps(backend_params, 0) metadata = (origin, backend_name, backend_version, category, backend_params_dumped, created_on_dumped,) try: cursor = self._db.cursor() insert_stmt = "INSERT INTO " + self.METADATA_TABLE + " "\ "(origin, backend_name, backend_version, " \ "category, backend_params, created_on) " \ "VALUES (?, ?, ?, ?, ?, ?)" cursor.execute(insert_stmt, metadata) self._db.commit() cursor.close() except sqlite3.DatabaseError as e: msg = "metadata initialization error; cause: %s" % str(e) raise ArchiveError(cause=msg) self.origin = origin self.backend_name = backend_name self.backend_version = backend_version self.category = category self.backend_params = backend_params self.created_on = created_on logger.debug("Metadata of archive %s initialized to %s", self.archive_path, metadata)
def init_metadata(self, origin, backend_name, backend_version, category, backend_params): """Init metadata information. Metatada is composed by basic information needed to identify where archived data came from and how it can be retrieved and built into Perceval items. :param: origin: identifier of the repository :param: backend_name: name of the backend :param: backend_version: version of the backend :param: category: category of the items fetched :param: backend_params: dict representation of the fetch parameters raises ArchiveError: when an error occurs initializing the metadata """ created_on = datetime_to_utc(datetime_utcnow()) created_on_dumped = created_on.isoformat() backend_params_dumped = pickle.dumps(backend_params, 0) metadata = (origin, backend_name, backend_version, category, backend_params_dumped, created_on_dumped,) try: cursor = self._db.cursor() insert_stmt = "INSERT INTO " + self.METADATA_TABLE + " "\ "(origin, backend_name, backend_version, " \ "category, backend_params, created_on) " \ "VALUES (?, ?, ?, ?, ?, ?)" cursor.execute(insert_stmt, metadata) self._db.commit() cursor.close() except sqlite3.DatabaseError as e: msg = "metadata initialization error; cause: %s" % str(e) raise ArchiveError(cause=msg) self.origin = origin self.backend_name = backend_name self.backend_version = backend_version self.category = category self.backend_params = backend_params self.created_on = created_on logger.debug("Metadata of archive %s initialized to %s", self.archive_path, metadata)
[ "Init", "metadata", "information", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/archive.py#L106-L151
[ "def", "init_metadata", "(", "self", ",", "origin", ",", "backend_name", ",", "backend_version", ",", "category", ",", "backend_params", ")", ":", "created_on", "=", "datetime_to_utc", "(", "datetime_utcnow", "(", ")", ")", "created_on_dumped", "=", "created_on", ".", "isoformat", "(", ")", "backend_params_dumped", "=", "pickle", ".", "dumps", "(", "backend_params", ",", "0", ")", "metadata", "=", "(", "origin", ",", "backend_name", ",", "backend_version", ",", "category", ",", "backend_params_dumped", ",", "created_on_dumped", ",", ")", "try", ":", "cursor", "=", "self", ".", "_db", ".", "cursor", "(", ")", "insert_stmt", "=", "\"INSERT INTO \"", "+", "self", ".", "METADATA_TABLE", "+", "\" \"", "\"(origin, backend_name, backend_version, \"", "\"category, backend_params, created_on) \"", "\"VALUES (?, ?, ?, ?, ?, ?)\"", "cursor", ".", "execute", "(", "insert_stmt", ",", "metadata", ")", "self", ".", "_db", ".", "commit", "(", ")", "cursor", ".", "close", "(", ")", "except", "sqlite3", ".", "DatabaseError", "as", "e", ":", "msg", "=", "\"metadata initialization error; cause: %s\"", "%", "str", "(", "e", ")", "raise", "ArchiveError", "(", "cause", "=", "msg", ")", "self", ".", "origin", "=", "origin", "self", ".", "backend_name", "=", "backend_name", "self", ".", "backend_version", "=", "backend_version", "self", ".", "category", "=", "category", "self", ".", "backend_params", "=", "backend_params", "self", ".", "created_on", "=", "created_on", "logger", ".", "debug", "(", "\"Metadata of archive %s initialized to %s\"", ",", "self", ".", "archive_path", ",", "metadata", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Archive.store
Store a raw item in this archive. The method will store `data` content in this archive. The unique identifier for that item will be generated using the rest of the parameters. :param uri: request URI :param payload: request payload :param headers: request headers :param data: data to store in this archive :raises ArchiveError: when an error occurs storing the given data
perceval/archive.py
def store(self, uri, payload, headers, data): """Store a raw item in this archive. The method will store `data` content in this archive. The unique identifier for that item will be generated using the rest of the parameters. :param uri: request URI :param payload: request payload :param headers: request headers :param data: data to store in this archive :raises ArchiveError: when an error occurs storing the given data """ hashcode = self.make_hashcode(uri, payload, headers) payload_dump = pickle.dumps(payload, 0) headers_dump = pickle.dumps(headers, 0) data_dump = pickle.dumps(data, 0) logger.debug("Archiving %s with %s %s %s in %s", hashcode, uri, payload, headers, self.archive_path) try: cursor = self._db.cursor() insert_stmt = "INSERT INTO " + self.ARCHIVE_TABLE + " (" \ "id, hashcode, uri, payload, headers, data) " \ "VALUES(?,?,?,?,?,?)" cursor.execute(insert_stmt, (None, hashcode, uri, payload_dump, headers_dump, data_dump)) self._db.commit() cursor.close() except sqlite3.IntegrityError as e: msg = "data storage error; cause: duplicated entry %s" % hashcode raise ArchiveError(cause=msg) except sqlite3.DatabaseError as e: msg = "data storage error; cause: %s" % str(e) raise ArchiveError(cause=msg) logger.debug("%s data archived in %s", hashcode, self.archive_path)
def store(self, uri, payload, headers, data): """Store a raw item in this archive. The method will store `data` content in this archive. The unique identifier for that item will be generated using the rest of the parameters. :param uri: request URI :param payload: request payload :param headers: request headers :param data: data to store in this archive :raises ArchiveError: when an error occurs storing the given data """ hashcode = self.make_hashcode(uri, payload, headers) payload_dump = pickle.dumps(payload, 0) headers_dump = pickle.dumps(headers, 0) data_dump = pickle.dumps(data, 0) logger.debug("Archiving %s with %s %s %s in %s", hashcode, uri, payload, headers, self.archive_path) try: cursor = self._db.cursor() insert_stmt = "INSERT INTO " + self.ARCHIVE_TABLE + " (" \ "id, hashcode, uri, payload, headers, data) " \ "VALUES(?,?,?,?,?,?)" cursor.execute(insert_stmt, (None, hashcode, uri, payload_dump, headers_dump, data_dump)) self._db.commit() cursor.close() except sqlite3.IntegrityError as e: msg = "data storage error; cause: duplicated entry %s" % hashcode raise ArchiveError(cause=msg) except sqlite3.DatabaseError as e: msg = "data storage error; cause: %s" % str(e) raise ArchiveError(cause=msg) logger.debug("%s data archived in %s", hashcode, self.archive_path)
[ "Store", "a", "raw", "item", "in", "this", "archive", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/archive.py#L153-L191
[ "def", "store", "(", "self", ",", "uri", ",", "payload", ",", "headers", ",", "data", ")", ":", "hashcode", "=", "self", ".", "make_hashcode", "(", "uri", ",", "payload", ",", "headers", ")", "payload_dump", "=", "pickle", ".", "dumps", "(", "payload", ",", "0", ")", "headers_dump", "=", "pickle", ".", "dumps", "(", "headers", ",", "0", ")", "data_dump", "=", "pickle", ".", "dumps", "(", "data", ",", "0", ")", "logger", ".", "debug", "(", "\"Archiving %s with %s %s %s in %s\"", ",", "hashcode", ",", "uri", ",", "payload", ",", "headers", ",", "self", ".", "archive_path", ")", "try", ":", "cursor", "=", "self", ".", "_db", ".", "cursor", "(", ")", "insert_stmt", "=", "\"INSERT INTO \"", "+", "self", ".", "ARCHIVE_TABLE", "+", "\" (\"", "\"id, hashcode, uri, payload, headers, data) \"", "\"VALUES(?,?,?,?,?,?)\"", "cursor", ".", "execute", "(", "insert_stmt", ",", "(", "None", ",", "hashcode", ",", "uri", ",", "payload_dump", ",", "headers_dump", ",", "data_dump", ")", ")", "self", ".", "_db", ".", "commit", "(", ")", "cursor", ".", "close", "(", ")", "except", "sqlite3", ".", "IntegrityError", "as", "e", ":", "msg", "=", "\"data storage error; cause: duplicated entry %s\"", "%", "hashcode", "raise", "ArchiveError", "(", "cause", "=", "msg", ")", "except", "sqlite3", ".", "DatabaseError", "as", "e", ":", "msg", "=", "\"data storage error; cause: %s\"", "%", "str", "(", "e", ")", "raise", "ArchiveError", "(", "cause", "=", "msg", ")", "logger", ".", "debug", "(", "\"%s data archived in %s\"", ",", "hashcode", ",", "self", ".", "archive_path", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Archive.retrieve
Retrieve a raw item from the archive. The method will return the `data` content corresponding to the hascode derived from the given parameters. :param uri: request URI :param payload: request payload :param headers: request headers :returns: the archived data :raises ArchiveError: when an error occurs retrieving data
perceval/archive.py
def retrieve(self, uri, payload, headers): """Retrieve a raw item from the archive. The method will return the `data` content corresponding to the hascode derived from the given parameters. :param uri: request URI :param payload: request payload :param headers: request headers :returns: the archived data :raises ArchiveError: when an error occurs retrieving data """ hashcode = self.make_hashcode(uri, payload, headers) logger.debug("Retrieving entry %s with %s %s %s in %s", hashcode, uri, payload, headers, self.archive_path) self._db.row_factory = sqlite3.Row try: cursor = self._db.cursor() select_stmt = "SELECT data " \ "FROM " + self.ARCHIVE_TABLE + " " \ "WHERE hashcode = ?" cursor.execute(select_stmt, (hashcode,)) row = cursor.fetchone() cursor.close() except sqlite3.DatabaseError as e: msg = "data retrieval error; cause: %s" % str(e) raise ArchiveError(cause=msg) if row: found = pickle.loads(row['data']) else: msg = "entry %s not found in archive %s" % (hashcode, self.archive_path) raise ArchiveError(cause=msg) return found
def retrieve(self, uri, payload, headers): """Retrieve a raw item from the archive. The method will return the `data` content corresponding to the hascode derived from the given parameters. :param uri: request URI :param payload: request payload :param headers: request headers :returns: the archived data :raises ArchiveError: when an error occurs retrieving data """ hashcode = self.make_hashcode(uri, payload, headers) logger.debug("Retrieving entry %s with %s %s %s in %s", hashcode, uri, payload, headers, self.archive_path) self._db.row_factory = sqlite3.Row try: cursor = self._db.cursor() select_stmt = "SELECT data " \ "FROM " + self.ARCHIVE_TABLE + " " \ "WHERE hashcode = ?" cursor.execute(select_stmt, (hashcode,)) row = cursor.fetchone() cursor.close() except sqlite3.DatabaseError as e: msg = "data retrieval error; cause: %s" % str(e) raise ArchiveError(cause=msg) if row: found = pickle.loads(row['data']) else: msg = "entry %s not found in archive %s" % (hashcode, self.archive_path) raise ArchiveError(cause=msg) return found
[ "Retrieve", "a", "raw", "item", "from", "the", "archive", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/archive.py#L193-L232
[ "def", "retrieve", "(", "self", ",", "uri", ",", "payload", ",", "headers", ")", ":", "hashcode", "=", "self", ".", "make_hashcode", "(", "uri", ",", "payload", ",", "headers", ")", "logger", ".", "debug", "(", "\"Retrieving entry %s with %s %s %s in %s\"", ",", "hashcode", ",", "uri", ",", "payload", ",", "headers", ",", "self", ".", "archive_path", ")", "self", ".", "_db", ".", "row_factory", "=", "sqlite3", ".", "Row", "try", ":", "cursor", "=", "self", ".", "_db", ".", "cursor", "(", ")", "select_stmt", "=", "\"SELECT data \"", "\"FROM \"", "+", "self", ".", "ARCHIVE_TABLE", "+", "\" \"", "\"WHERE hashcode = ?\"", "cursor", ".", "execute", "(", "select_stmt", ",", "(", "hashcode", ",", ")", ")", "row", "=", "cursor", ".", "fetchone", "(", ")", "cursor", ".", "close", "(", ")", "except", "sqlite3", ".", "DatabaseError", "as", "e", ":", "msg", "=", "\"data retrieval error; cause: %s\"", "%", "str", "(", "e", ")", "raise", "ArchiveError", "(", "cause", "=", "msg", ")", "if", "row", ":", "found", "=", "pickle", ".", "loads", "(", "row", "[", "'data'", "]", ")", "else", ":", "msg", "=", "\"entry %s not found in archive %s\"", "%", "(", "hashcode", ",", "self", ".", "archive_path", ")", "raise", "ArchiveError", "(", "cause", "=", "msg", ")", "return", "found" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Archive.create
Create a brand new archive. Call this method to create a new and empty archive. It will initialize the storage file in the path defined by `archive_path`. :param archive_path: absolute path where the archive file will be created :raises ArchiveError: when the archive file already exists
perceval/archive.py
def create(cls, archive_path): """Create a brand new archive. Call this method to create a new and empty archive. It will initialize the storage file in the path defined by `archive_path`. :param archive_path: absolute path where the archive file will be created :raises ArchiveError: when the archive file already exists """ if os.path.exists(archive_path): msg = "archive %s already exists; remove it before creating a new one" raise ArchiveError(cause=msg % (archive_path)) conn = sqlite3.connect(archive_path) cursor = conn.cursor() cursor.execute(cls.METADATA_CREATE_STMT) cursor.execute(cls.ARCHIVE_CREATE_STMT) conn.commit() cursor.close() conn.close() logger.debug("Creating archive %s", archive_path) archive = cls(archive_path) logger.debug("Achive %s was created", archive_path) return archive
def create(cls, archive_path): """Create a brand new archive. Call this method to create a new and empty archive. It will initialize the storage file in the path defined by `archive_path`. :param archive_path: absolute path where the archive file will be created :raises ArchiveError: when the archive file already exists """ if os.path.exists(archive_path): msg = "archive %s already exists; remove it before creating a new one" raise ArchiveError(cause=msg % (archive_path)) conn = sqlite3.connect(archive_path) cursor = conn.cursor() cursor.execute(cls.METADATA_CREATE_STMT) cursor.execute(cls.ARCHIVE_CREATE_STMT) conn.commit() cursor.close() conn.close() logger.debug("Creating archive %s", archive_path) archive = cls(archive_path) logger.debug("Achive %s was created", archive_path) return archive
[ "Create", "a", "brand", "new", "archive", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/archive.py#L235-L263
[ "def", "create", "(", "cls", ",", "archive_path", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "archive_path", ")", ":", "msg", "=", "\"archive %s already exists; remove it before creating a new one\"", "raise", "ArchiveError", "(", "cause", "=", "msg", "%", "(", "archive_path", ")", ")", "conn", "=", "sqlite3", ".", "connect", "(", "archive_path", ")", "cursor", "=", "conn", ".", "cursor", "(", ")", "cursor", ".", "execute", "(", "cls", ".", "METADATA_CREATE_STMT", ")", "cursor", ".", "execute", "(", "cls", ".", "ARCHIVE_CREATE_STMT", ")", "conn", ".", "commit", "(", ")", "cursor", ".", "close", "(", ")", "conn", ".", "close", "(", ")", "logger", ".", "debug", "(", "\"Creating archive %s\"", ",", "archive_path", ")", "archive", "=", "cls", "(", "archive_path", ")", "logger", ".", "debug", "(", "\"Achive %s was created\"", ",", "archive_path", ")", "return", "archive" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Archive.make_hashcode
Generate a SHA1 based on the given arguments. Hashcodes created by this method will used as unique identifiers for the raw items or resources stored by this archive. :param uri: URI to the resource :param payload: payload of the request needed to fetch the resource :param headers: headers of the request needed to fetch the resource :returns: a SHA1 hash code
perceval/archive.py
def make_hashcode(uri, payload, headers): """Generate a SHA1 based on the given arguments. Hashcodes created by this method will used as unique identifiers for the raw items or resources stored by this archive. :param uri: URI to the resource :param payload: payload of the request needed to fetch the resource :param headers: headers of the request needed to fetch the resource :returns: a SHA1 hash code """ def dict_to_json_str(data): return json.dumps(data, sort_keys=True) content = ':'.join([uri, dict_to_json_str(payload), dict_to_json_str(headers)]) hashcode = hashlib.sha1(content.encode('utf-8')) return hashcode.hexdigest()
def make_hashcode(uri, payload, headers): """Generate a SHA1 based on the given arguments. Hashcodes created by this method will used as unique identifiers for the raw items or resources stored by this archive. :param uri: URI to the resource :param payload: payload of the request needed to fetch the resource :param headers: headers of the request needed to fetch the resource :returns: a SHA1 hash code """ def dict_to_json_str(data): return json.dumps(data, sort_keys=True) content = ':'.join([uri, dict_to_json_str(payload), dict_to_json_str(headers)]) hashcode = hashlib.sha1(content.encode('utf-8')) return hashcode.hexdigest()
[ "Generate", "a", "SHA1", "based", "on", "the", "given", "arguments", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/archive.py#L266-L283
[ "def", "make_hashcode", "(", "uri", ",", "payload", ",", "headers", ")", ":", "def", "dict_to_json_str", "(", "data", ")", ":", "return", "json", ".", "dumps", "(", "data", ",", "sort_keys", "=", "True", ")", "content", "=", "':'", ".", "join", "(", "[", "uri", ",", "dict_to_json_str", "(", "payload", ")", ",", "dict_to_json_str", "(", "headers", ")", "]", ")", "hashcode", "=", "hashlib", ".", "sha1", "(", "content", ".", "encode", "(", "'utf-8'", ")", ")", "return", "hashcode", ".", "hexdigest", "(", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Archive._verify_archive
Check whether the archive is valid or not. This method will check if tables were created and if they contain valid data.
perceval/archive.py
def _verify_archive(self): """Check whether the archive is valid or not. This method will check if tables were created and if they contain valid data. """ nentries = self._count_table_rows(self.ARCHIVE_TABLE) nmetadata = self._count_table_rows(self.METADATA_TABLE) if nmetadata > 1: msg = "archive %s metadata corrupted; multiple metadata entries" % (self.archive_path) raise ArchiveError(cause=msg) if nmetadata == 0 and nentries > 0: msg = "archive %s metadata is empty but %s entries were achived" % (self.archive_path) raise ArchiveError(cause=msg) logger.debug("Integrity of archive %s OK; entries: %s rows, metadata: %s rows", self.archive_path, nentries, nmetadata)
def _verify_archive(self): """Check whether the archive is valid or not. This method will check if tables were created and if they contain valid data. """ nentries = self._count_table_rows(self.ARCHIVE_TABLE) nmetadata = self._count_table_rows(self.METADATA_TABLE) if nmetadata > 1: msg = "archive %s metadata corrupted; multiple metadata entries" % (self.archive_path) raise ArchiveError(cause=msg) if nmetadata == 0 and nentries > 0: msg = "archive %s metadata is empty but %s entries were achived" % (self.archive_path) raise ArchiveError(cause=msg) logger.debug("Integrity of archive %s OK; entries: %s rows, metadata: %s rows", self.archive_path, nentries, nmetadata)
[ "Check", "whether", "the", "archive", "is", "valid", "or", "not", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/archive.py#L285-L302
[ "def", "_verify_archive", "(", "self", ")", ":", "nentries", "=", "self", ".", "_count_table_rows", "(", "self", ".", "ARCHIVE_TABLE", ")", "nmetadata", "=", "self", ".", "_count_table_rows", "(", "self", ".", "METADATA_TABLE", ")", "if", "nmetadata", ">", "1", ":", "msg", "=", "\"archive %s metadata corrupted; multiple metadata entries\"", "%", "(", "self", ".", "archive_path", ")", "raise", "ArchiveError", "(", "cause", "=", "msg", ")", "if", "nmetadata", "==", "0", "and", "nentries", ">", "0", ":", "msg", "=", "\"archive %s metadata is empty but %s entries were achived\"", "%", "(", "self", ".", "archive_path", ")", "raise", "ArchiveError", "(", "cause", "=", "msg", ")", "logger", ".", "debug", "(", "\"Integrity of archive %s OK; entries: %s rows, metadata: %s rows\"", ",", "self", ".", "archive_path", ",", "nentries", ",", "nmetadata", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Archive._load_metadata
Load metadata from the archive file
perceval/archive.py
def _load_metadata(self): """Load metadata from the archive file""" logger.debug("Loading metadata infomation of archive %s", self.archive_path) cursor = self._db.cursor() select_stmt = "SELECT origin, backend_name, backend_version, " \ "category, backend_params, created_on " \ "FROM " + self.METADATA_TABLE + " " \ "LIMIT 1" cursor.execute(select_stmt) row = cursor.fetchone() cursor.close() if row: self.origin = row[0] self.backend_name = row[1] self.backend_version = row[2] self.category = row[3] self.backend_params = pickle.loads(row[4]) self.created_on = str_to_datetime(row[5]) else: logger.debug("Metadata of archive %s was empty", self.archive_path) logger.debug("Metadata of archive %s loaded", self.archive_path)
def _load_metadata(self): """Load metadata from the archive file""" logger.debug("Loading metadata infomation of archive %s", self.archive_path) cursor = self._db.cursor() select_stmt = "SELECT origin, backend_name, backend_version, " \ "category, backend_params, created_on " \ "FROM " + self.METADATA_TABLE + " " \ "LIMIT 1" cursor.execute(select_stmt) row = cursor.fetchone() cursor.close() if row: self.origin = row[0] self.backend_name = row[1] self.backend_version = row[2] self.category = row[3] self.backend_params = pickle.loads(row[4]) self.created_on = str_to_datetime(row[5]) else: logger.debug("Metadata of archive %s was empty", self.archive_path) logger.debug("Metadata of archive %s loaded", self.archive_path)
[ "Load", "metadata", "from", "the", "archive", "file" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/archive.py#L304-L328
[ "def", "_load_metadata", "(", "self", ")", ":", "logger", ".", "debug", "(", "\"Loading metadata infomation of archive %s\"", ",", "self", ".", "archive_path", ")", "cursor", "=", "self", ".", "_db", ".", "cursor", "(", ")", "select_stmt", "=", "\"SELECT origin, backend_name, backend_version, \"", "\"category, backend_params, created_on \"", "\"FROM \"", "+", "self", ".", "METADATA_TABLE", "+", "\" \"", "\"LIMIT 1\"", "cursor", ".", "execute", "(", "select_stmt", ")", "row", "=", "cursor", ".", "fetchone", "(", ")", "cursor", ".", "close", "(", ")", "if", "row", ":", "self", ".", "origin", "=", "row", "[", "0", "]", "self", ".", "backend_name", "=", "row", "[", "1", "]", "self", ".", "backend_version", "=", "row", "[", "2", "]", "self", ".", "category", "=", "row", "[", "3", "]", "self", ".", "backend_params", "=", "pickle", ".", "loads", "(", "row", "[", "4", "]", ")", "self", ".", "created_on", "=", "str_to_datetime", "(", "row", "[", "5", "]", ")", "else", ":", "logger", ".", "debug", "(", "\"Metadata of archive %s was empty\"", ",", "self", ".", "archive_path", ")", "logger", ".", "debug", "(", "\"Metadata of archive %s loaded\"", ",", "self", ".", "archive_path", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
Archive._count_table_rows
Fetch the number of rows in a table
perceval/archive.py
def _count_table_rows(self, table_name): """Fetch the number of rows in a table""" cursor = self._db.cursor() select_stmt = "SELECT COUNT(*) FROM " + table_name try: cursor.execute(select_stmt) row = cursor.fetchone() except sqlite3.DatabaseError as e: msg = "invalid archive file; cause: %s" % str(e) raise ArchiveError(cause=msg) finally: cursor.close() return row[0]
def _count_table_rows(self, table_name): """Fetch the number of rows in a table""" cursor = self._db.cursor() select_stmt = "SELECT COUNT(*) FROM " + table_name try: cursor.execute(select_stmt) row = cursor.fetchone() except sqlite3.DatabaseError as e: msg = "invalid archive file; cause: %s" % str(e) raise ArchiveError(cause=msg) finally: cursor.close() return row[0]
[ "Fetch", "the", "number", "of", "rows", "in", "a", "table" ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/archive.py#L330-L345
[ "def", "_count_table_rows", "(", "self", ",", "table_name", ")", ":", "cursor", "=", "self", ".", "_db", ".", "cursor", "(", ")", "select_stmt", "=", "\"SELECT COUNT(*) FROM \"", "+", "table_name", "try", ":", "cursor", ".", "execute", "(", "select_stmt", ")", "row", "=", "cursor", ".", "fetchone", "(", ")", "except", "sqlite3", ".", "DatabaseError", "as", "e", ":", "msg", "=", "\"invalid archive file; cause: %s\"", "%", "str", "(", "e", ")", "raise", "ArchiveError", "(", "cause", "=", "msg", ")", "finally", ":", "cursor", ".", "close", "(", ")", "return", "row", "[", "0", "]" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
ArchiveManager.create_archive
Create a new archive. The method creates in the filesystem a brand new archive with a random SHA1 as its name. The first byte of the hashcode will be the name of the subdirectory; the remaining bytes, the archive name. :returns: a new `Archive` object :raises ArchiveManagerError: when an error occurs creating the new archive
perceval/archive.py
def create_archive(self): """Create a new archive. The method creates in the filesystem a brand new archive with a random SHA1 as its name. The first byte of the hashcode will be the name of the subdirectory; the remaining bytes, the archive name. :returns: a new `Archive` object :raises ArchiveManagerError: when an error occurs creating the new archive """ hashcode = uuid.uuid4().hex archive_dir = os.path.join(self.dirpath, hashcode[0:2]) archive_name = hashcode[2:] + self.STORAGE_EXT archive_path = os.path.join(archive_dir, archive_name) if not os.path.exists(archive_dir): os.makedirs(archive_dir) try: archive = Archive.create(archive_path) except ArchiveError as e: raise ArchiveManagerError(cause=str(e)) return archive
def create_archive(self): """Create a new archive. The method creates in the filesystem a brand new archive with a random SHA1 as its name. The first byte of the hashcode will be the name of the subdirectory; the remaining bytes, the archive name. :returns: a new `Archive` object :raises ArchiveManagerError: when an error occurs creating the new archive """ hashcode = uuid.uuid4().hex archive_dir = os.path.join(self.dirpath, hashcode[0:2]) archive_name = hashcode[2:] + self.STORAGE_EXT archive_path = os.path.join(archive_dir, archive_name) if not os.path.exists(archive_dir): os.makedirs(archive_dir) try: archive = Archive.create(archive_path) except ArchiveError as e: raise ArchiveManagerError(cause=str(e)) return archive
[ "Create", "a", "new", "archive", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/archive.py#L368-L394
[ "def", "create_archive", "(", "self", ")", ":", "hashcode", "=", "uuid", ".", "uuid4", "(", ")", ".", "hex", "archive_dir", "=", "os", ".", "path", ".", "join", "(", "self", ".", "dirpath", ",", "hashcode", "[", "0", ":", "2", "]", ")", "archive_name", "=", "hashcode", "[", "2", ":", "]", "+", "self", ".", "STORAGE_EXT", "archive_path", "=", "os", ".", "path", ".", "join", "(", "archive_dir", ",", "archive_name", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "archive_dir", ")", ":", "os", ".", "makedirs", "(", "archive_dir", ")", "try", ":", "archive", "=", "Archive", ".", "create", "(", "archive_path", ")", "except", "ArchiveError", "as", "e", ":", "raise", "ArchiveManagerError", "(", "cause", "=", "str", "(", "e", ")", ")", "return", "archive" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
ArchiveManager.remove_archive
Remove an archive. This method deletes from the filesystem the archive stored in `archive_path`. :param archive_path: path to the archive :raises ArchiveManangerError: when an error occurs removing the archive
perceval/archive.py
def remove_archive(self, archive_path): """Remove an archive. This method deletes from the filesystem the archive stored in `archive_path`. :param archive_path: path to the archive :raises ArchiveManangerError: when an error occurs removing the archive """ try: Archive(archive_path) except ArchiveError as e: raise ArchiveManagerError(cause=str(e)) os.remove(archive_path)
def remove_archive(self, archive_path): """Remove an archive. This method deletes from the filesystem the archive stored in `archive_path`. :param archive_path: path to the archive :raises ArchiveManangerError: when an error occurs removing the archive """ try: Archive(archive_path) except ArchiveError as e: raise ArchiveManagerError(cause=str(e)) os.remove(archive_path)
[ "Remove", "an", "archive", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/archive.py#L396-L412
[ "def", "remove_archive", "(", "self", ",", "archive_path", ")", ":", "try", ":", "Archive", "(", "archive_path", ")", "except", "ArchiveError", "as", "e", ":", "raise", "ArchiveManagerError", "(", "cause", "=", "str", "(", "e", ")", ")", "os", ".", "remove", "(", "archive_path", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
ArchiveManager.search
Search archives. Get the archives which store data based on the given parameters. These parameters define which the origin was (`origin`), how data was fetched (`backend_name`) and data type ('category'). Only those archives created on or after `archived_after` will be returned. The method returns a list with the file paths to those archives. The list is sorted by the date of creation of each archive. :param origin: data origin :param backend_name: backed used to fetch data :param category: type of the items fetched by the backend :param archived_after: get archives created on or after this date :returns: a list with archive names which match the search criteria
perceval/archive.py
def search(self, origin, backend_name, category, archived_after): """Search archives. Get the archives which store data based on the given parameters. These parameters define which the origin was (`origin`), how data was fetched (`backend_name`) and data type ('category'). Only those archives created on or after `archived_after` will be returned. The method returns a list with the file paths to those archives. The list is sorted by the date of creation of each archive. :param origin: data origin :param backend_name: backed used to fetch data :param category: type of the items fetched by the backend :param archived_after: get archives created on or after this date :returns: a list with archive names which match the search criteria """ archives = self._search_archives(origin, backend_name, category, archived_after) archives = [(fp, date) for fp, date in archives] archives = [fp for fp, _ in sorted(archives, key=lambda x: x[1])] return archives
def search(self, origin, backend_name, category, archived_after): """Search archives. Get the archives which store data based on the given parameters. These parameters define which the origin was (`origin`), how data was fetched (`backend_name`) and data type ('category'). Only those archives created on or after `archived_after` will be returned. The method returns a list with the file paths to those archives. The list is sorted by the date of creation of each archive. :param origin: data origin :param backend_name: backed used to fetch data :param category: type of the items fetched by the backend :param archived_after: get archives created on or after this date :returns: a list with archive names which match the search criteria """ archives = self._search_archives(origin, backend_name, category, archived_after) archives = [(fp, date) for fp, date in archives] archives = [fp for fp, _ in sorted(archives, key=lambda x: x[1])] return archives
[ "Search", "archives", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/archive.py#L414-L438
[ "def", "search", "(", "self", ",", "origin", ",", "backend_name", ",", "category", ",", "archived_after", ")", ":", "archives", "=", "self", ".", "_search_archives", "(", "origin", ",", "backend_name", ",", "category", ",", "archived_after", ")", "archives", "=", "[", "(", "fp", ",", "date", ")", "for", "fp", ",", "date", "in", "archives", "]", "archives", "=", "[", "fp", "for", "fp", ",", "_", "in", "sorted", "(", "archives", ",", "key", "=", "lambda", "x", ":", "x", "[", "1", "]", ")", "]", "return", "archives" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
ArchiveManager._search_archives
Search archives using filters.
perceval/archive.py
def _search_archives(self, origin, backend_name, category, archived_after): """Search archives using filters.""" for archive_path in self._search_files(): try: archive = Archive(archive_path) except ArchiveError: continue match = archive.origin == origin and \ archive.backend_name == backend_name and \ archive.category == category and \ archive.created_on >= archived_after if not match: continue yield archive_path, archive.created_on
def _search_archives(self, origin, backend_name, category, archived_after): """Search archives using filters.""" for archive_path in self._search_files(): try: archive = Archive(archive_path) except ArchiveError: continue match = archive.origin == origin and \ archive.backend_name == backend_name and \ archive.category == category and \ archive.created_on >= archived_after if not match: continue yield archive_path, archive.created_on
[ "Search", "archives", "using", "filters", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/archive.py#L440-L457
[ "def", "_search_archives", "(", "self", ",", "origin", ",", "backend_name", ",", "category", ",", "archived_after", ")", ":", "for", "archive_path", "in", "self", ".", "_search_files", "(", ")", ":", "try", ":", "archive", "=", "Archive", "(", "archive_path", ")", "except", "ArchiveError", ":", "continue", "match", "=", "archive", ".", "origin", "==", "origin", "and", "archive", ".", "backend_name", "==", "backend_name", "and", "archive", ".", "category", "==", "category", "and", "archive", ".", "created_on", ">=", "archived_after", "if", "not", "match", ":", "continue", "yield", "archive_path", ",", "archive", ".", "created_on" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
ArchiveManager._search_files
Retrieve the file paths stored under the base path.
perceval/archive.py
def _search_files(self): """Retrieve the file paths stored under the base path.""" for root, _, files in os.walk(self.dirpath): for filename in files: location = os.path.join(root, filename) yield location
def _search_files(self): """Retrieve the file paths stored under the base path.""" for root, _, files in os.walk(self.dirpath): for filename in files: location = os.path.join(root, filename) yield location
[ "Retrieve", "the", "file", "paths", "stored", "under", "the", "base", "path", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/archive.py#L459-L465
[ "def", "_search_files", "(", "self", ")", ":", "for", "root", ",", "_", ",", "files", "in", "os", ".", "walk", "(", "self", ".", "dirpath", ")", ":", "for", "filename", "in", "files", ":", "location", "=", "os", ".", "path", ".", "join", "(", "root", ",", "filename", ")", "yield", "location" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
check_compressed_file_type
Check if filename is a compressed file supported by the tool. This function uses magic numbers (first four bytes) to determine the type of the file. Supported types are 'gz' and 'bz2'. When the filetype is not supported, the function returns `None`. :param filepath: path to the file :returns: 'gz' or 'bz2'; `None` if the type is not supported
perceval/utils.py
def check_compressed_file_type(filepath): """Check if filename is a compressed file supported by the tool. This function uses magic numbers (first four bytes) to determine the type of the file. Supported types are 'gz' and 'bz2'. When the filetype is not supported, the function returns `None`. :param filepath: path to the file :returns: 'gz' or 'bz2'; `None` if the type is not supported """ def compressed_file_type(content): magic_dict = { b'\x1f\x8b\x08': 'gz', b'\x42\x5a\x68': 'bz2', b'PK\x03\x04': 'zip' } for magic, filetype in magic_dict.items(): if content.startswith(magic): return filetype return None with open(filepath, mode='rb') as f: magic_number = f.read(4) return compressed_file_type(magic_number)
def check_compressed_file_type(filepath): """Check if filename is a compressed file supported by the tool. This function uses magic numbers (first four bytes) to determine the type of the file. Supported types are 'gz' and 'bz2'. When the filetype is not supported, the function returns `None`. :param filepath: path to the file :returns: 'gz' or 'bz2'; `None` if the type is not supported """ def compressed_file_type(content): magic_dict = { b'\x1f\x8b\x08': 'gz', b'\x42\x5a\x68': 'bz2', b'PK\x03\x04': 'zip' } for magic, filetype in magic_dict.items(): if content.startswith(magic): return filetype return None with open(filepath, mode='rb') as f: magic_number = f.read(4) return compressed_file_type(magic_number)
[ "Check", "if", "filename", "is", "a", "compressed", "file", "supported", "by", "the", "tool", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/utils.py#L50-L76
[ "def", "check_compressed_file_type", "(", "filepath", ")", ":", "def", "compressed_file_type", "(", "content", ")", ":", "magic_dict", "=", "{", "b'\\x1f\\x8b\\x08'", ":", "'gz'", ",", "b'\\x42\\x5a\\x68'", ":", "'bz2'", ",", "b'PK\\x03\\x04'", ":", "'zip'", "}", "for", "magic", ",", "filetype", "in", "magic_dict", ".", "items", "(", ")", ":", "if", "content", ".", "startswith", "(", "magic", ")", ":", "return", "filetype", "return", "None", "with", "open", "(", "filepath", ",", "mode", "=", "'rb'", ")", "as", "f", ":", "magic_number", "=", "f", ".", "read", "(", "4", ")", "return", "compressed_file_type", "(", "magic_number", ")" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
months_range
Generate a months range. Generator of months starting on `from_date` util `to_date`. Each returned item is a tuple of two datatime objects like in (month, month+1). Thus, the result will follow the sequence: ((fd, fd+1), (fd+1, fd+2), ..., (td-2, td-1), (td-1, td)) :param from_date: generate dates starting on this month :param to_date: generate dates until this month :result: a generator of months range
perceval/utils.py
def months_range(from_date, to_date): """Generate a months range. Generator of months starting on `from_date` util `to_date`. Each returned item is a tuple of two datatime objects like in (month, month+1). Thus, the result will follow the sequence: ((fd, fd+1), (fd+1, fd+2), ..., (td-2, td-1), (td-1, td)) :param from_date: generate dates starting on this month :param to_date: generate dates until this month :result: a generator of months range """ start = datetime.datetime(from_date.year, from_date.month, 1) end = datetime.datetime(to_date.year, to_date.month, 1) month_gen = dateutil.rrule.rrule(freq=dateutil.rrule.MONTHLY, dtstart=start, until=end) months = [d for d in month_gen] pos = 0 for x in range(1, len(months)): yield months[pos], months[x] pos = x
def months_range(from_date, to_date): """Generate a months range. Generator of months starting on `from_date` util `to_date`. Each returned item is a tuple of two datatime objects like in (month, month+1). Thus, the result will follow the sequence: ((fd, fd+1), (fd+1, fd+2), ..., (td-2, td-1), (td-1, td)) :param from_date: generate dates starting on this month :param to_date: generate dates until this month :result: a generator of months range """ start = datetime.datetime(from_date.year, from_date.month, 1) end = datetime.datetime(to_date.year, to_date.month, 1) month_gen = dateutil.rrule.rrule(freq=dateutil.rrule.MONTHLY, dtstart=start, until=end) months = [d for d in month_gen] pos = 0 for x in range(1, len(months)): yield months[pos], months[x] pos = x
[ "Generate", "a", "months", "range", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/utils.py#L79-L102
[ "def", "months_range", "(", "from_date", ",", "to_date", ")", ":", "start", "=", "datetime", ".", "datetime", "(", "from_date", ".", "year", ",", "from_date", ".", "month", ",", "1", ")", "end", "=", "datetime", ".", "datetime", "(", "to_date", ".", "year", ",", "to_date", ".", "month", ",", "1", ")", "month_gen", "=", "dateutil", ".", "rrule", ".", "rrule", "(", "freq", "=", "dateutil", ".", "rrule", ".", "MONTHLY", ",", "dtstart", "=", "start", ",", "until", "=", "end", ")", "months", "=", "[", "d", "for", "d", "in", "month_gen", "]", "pos", "=", "0", "for", "x", "in", "range", "(", "1", ",", "len", "(", "months", ")", ")", ":", "yield", "months", "[", "pos", "]", ",", "months", "[", "x", "]", "pos", "=", "x" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
message_to_dict
Convert an email message into a dictionary. This function transforms an `email.message.Message` object into a dictionary. Headers are stored as key:value pairs while the body of the message is stored inside `body` key. Body may have two other keys inside, 'plain', for plain body messages and 'html', for HTML encoded messages. The returned dictionary has the type `requests.structures.CaseInsensitiveDict` due to same headers with different case formats can appear in the same message. :param msg: email message of type `email.message.Message` :returns : dictionary of type `requests.structures.CaseInsensitiveDict` :raises ParseError: when an error occurs transforming the message to a dictionary
perceval/utils.py
def message_to_dict(msg): """Convert an email message into a dictionary. This function transforms an `email.message.Message` object into a dictionary. Headers are stored as key:value pairs while the body of the message is stored inside `body` key. Body may have two other keys inside, 'plain', for plain body messages and 'html', for HTML encoded messages. The returned dictionary has the type `requests.structures.CaseInsensitiveDict` due to same headers with different case formats can appear in the same message. :param msg: email message of type `email.message.Message` :returns : dictionary of type `requests.structures.CaseInsensitiveDict` :raises ParseError: when an error occurs transforming the message to a dictionary """ def parse_headers(msg): headers = {} for header, value in msg.items(): hv = [] for text, charset in email.header.decode_header(value): if type(text) == bytes: charset = charset if charset else 'utf-8' try: text = text.decode(charset, errors='surrogateescape') except (UnicodeError, LookupError): # Try again with a 7bit encoding text = text.decode('ascii', errors='surrogateescape') hv.append(text) v = ' '.join(hv) headers[header] = v if v else None return headers def parse_payload(msg): body = {} if not msg.is_multipart(): payload = decode_payload(msg) subtype = msg.get_content_subtype() body[subtype] = [payload] else: # Include all the attached texts if it is multipart # Ignores binary parts by default for part in email.iterators.typed_subpart_iterator(msg): payload = decode_payload(part) subtype = part.get_content_subtype() body.setdefault(subtype, []).append(payload) return {k: '\n'.join(v) for k, v in body.items()} def decode_payload(msg_or_part): charset = msg_or_part.get_content_charset('utf-8') payload = msg_or_part.get_payload(decode=True) try: payload = payload.decode(charset, errors='surrogateescape') except (UnicodeError, LookupError): # Try again with a 7bit encoding payload = payload.decode('ascii', errors='surrogateescape') return payload # The function starts here message = requests.structures.CaseInsensitiveDict() if isinstance(msg, mailbox.mboxMessage): message['unixfrom'] = msg.get_from() else: message['unixfrom'] = None try: for k, v in parse_headers(msg).items(): message[k] = v message['body'] = parse_payload(msg) except UnicodeError as e: raise ParseError(cause=str(e)) return message
def message_to_dict(msg): """Convert an email message into a dictionary. This function transforms an `email.message.Message` object into a dictionary. Headers are stored as key:value pairs while the body of the message is stored inside `body` key. Body may have two other keys inside, 'plain', for plain body messages and 'html', for HTML encoded messages. The returned dictionary has the type `requests.structures.CaseInsensitiveDict` due to same headers with different case formats can appear in the same message. :param msg: email message of type `email.message.Message` :returns : dictionary of type `requests.structures.CaseInsensitiveDict` :raises ParseError: when an error occurs transforming the message to a dictionary """ def parse_headers(msg): headers = {} for header, value in msg.items(): hv = [] for text, charset in email.header.decode_header(value): if type(text) == bytes: charset = charset if charset else 'utf-8' try: text = text.decode(charset, errors='surrogateescape') except (UnicodeError, LookupError): # Try again with a 7bit encoding text = text.decode('ascii', errors='surrogateescape') hv.append(text) v = ' '.join(hv) headers[header] = v if v else None return headers def parse_payload(msg): body = {} if not msg.is_multipart(): payload = decode_payload(msg) subtype = msg.get_content_subtype() body[subtype] = [payload] else: # Include all the attached texts if it is multipart # Ignores binary parts by default for part in email.iterators.typed_subpart_iterator(msg): payload = decode_payload(part) subtype = part.get_content_subtype() body.setdefault(subtype, []).append(payload) return {k: '\n'.join(v) for k, v in body.items()} def decode_payload(msg_or_part): charset = msg_or_part.get_content_charset('utf-8') payload = msg_or_part.get_payload(decode=True) try: payload = payload.decode(charset, errors='surrogateescape') except (UnicodeError, LookupError): # Try again with a 7bit encoding payload = payload.decode('ascii', errors='surrogateescape') return payload # The function starts here message = requests.structures.CaseInsensitiveDict() if isinstance(msg, mailbox.mboxMessage): message['unixfrom'] = msg.get_from() else: message['unixfrom'] = None try: for k, v in parse_headers(msg).items(): message[k] = v message['body'] = parse_payload(msg) except UnicodeError as e: raise ParseError(cause=str(e)) return message
[ "Convert", "an", "email", "message", "into", "a", "dictionary", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/utils.py#L105-L189
[ "def", "message_to_dict", "(", "msg", ")", ":", "def", "parse_headers", "(", "msg", ")", ":", "headers", "=", "{", "}", "for", "header", ",", "value", "in", "msg", ".", "items", "(", ")", ":", "hv", "=", "[", "]", "for", "text", ",", "charset", "in", "email", ".", "header", ".", "decode_header", "(", "value", ")", ":", "if", "type", "(", "text", ")", "==", "bytes", ":", "charset", "=", "charset", "if", "charset", "else", "'utf-8'", "try", ":", "text", "=", "text", ".", "decode", "(", "charset", ",", "errors", "=", "'surrogateescape'", ")", "except", "(", "UnicodeError", ",", "LookupError", ")", ":", "# Try again with a 7bit encoding", "text", "=", "text", ".", "decode", "(", "'ascii'", ",", "errors", "=", "'surrogateescape'", ")", "hv", ".", "append", "(", "text", ")", "v", "=", "' '", ".", "join", "(", "hv", ")", "headers", "[", "header", "]", "=", "v", "if", "v", "else", "None", "return", "headers", "def", "parse_payload", "(", "msg", ")", ":", "body", "=", "{", "}", "if", "not", "msg", ".", "is_multipart", "(", ")", ":", "payload", "=", "decode_payload", "(", "msg", ")", "subtype", "=", "msg", ".", "get_content_subtype", "(", ")", "body", "[", "subtype", "]", "=", "[", "payload", "]", "else", ":", "# Include all the attached texts if it is multipart", "# Ignores binary parts by default", "for", "part", "in", "email", ".", "iterators", ".", "typed_subpart_iterator", "(", "msg", ")", ":", "payload", "=", "decode_payload", "(", "part", ")", "subtype", "=", "part", ".", "get_content_subtype", "(", ")", "body", ".", "setdefault", "(", "subtype", ",", "[", "]", ")", ".", "append", "(", "payload", ")", "return", "{", "k", ":", "'\\n'", ".", "join", "(", "v", ")", "for", "k", ",", "v", "in", "body", ".", "items", "(", ")", "}", "def", "decode_payload", "(", "msg_or_part", ")", ":", "charset", "=", "msg_or_part", ".", "get_content_charset", "(", "'utf-8'", ")", "payload", "=", "msg_or_part", ".", "get_payload", "(", "decode", "=", "True", ")", "try", ":", "payload", "=", "payload", ".", "decode", "(", "charset", ",", "errors", "=", "'surrogateescape'", ")", "except", "(", "UnicodeError", ",", "LookupError", ")", ":", "# Try again with a 7bit encoding", "payload", "=", "payload", ".", "decode", "(", "'ascii'", ",", "errors", "=", "'surrogateescape'", ")", "return", "payload", "# The function starts here", "message", "=", "requests", ".", "structures", ".", "CaseInsensitiveDict", "(", ")", "if", "isinstance", "(", "msg", ",", "mailbox", ".", "mboxMessage", ")", ":", "message", "[", "'unixfrom'", "]", "=", "msg", ".", "get_from", "(", ")", "else", ":", "message", "[", "'unixfrom'", "]", "=", "None", "try", ":", "for", "k", ",", "v", "in", "parse_headers", "(", "msg", ")", ".", "items", "(", ")", ":", "message", "[", "k", "]", "=", "v", "message", "[", "'body'", "]", "=", "parse_payload", "(", "msg", ")", "except", "UnicodeError", "as", "e", ":", "raise", "ParseError", "(", "cause", "=", "str", "(", "e", ")", ")", "return", "message" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
remove_invalid_xml_chars
Remove control and invalid characters from an xml stream. Looks for invalid characters and subtitutes them with whitespaces. This solution is based on these two posts: Olemis Lang's reponse on StackOverflow (http://stackoverflow.com/questions/1707890) and lawlesst's on GitHub Gist (https://gist.github.com/lawlesst/4110923), that is based on the previous answer. :param xml: XML stream :returns: a purged XML stream
perceval/utils.py
def remove_invalid_xml_chars(raw_xml): """Remove control and invalid characters from an xml stream. Looks for invalid characters and subtitutes them with whitespaces. This solution is based on these two posts: Olemis Lang's reponse on StackOverflow (http://stackoverflow.com/questions/1707890) and lawlesst's on GitHub Gist (https://gist.github.com/lawlesst/4110923), that is based on the previous answer. :param xml: XML stream :returns: a purged XML stream """ illegal_unichrs = [(0x00, 0x08), (0x0B, 0x1F), (0x7F, 0x84), (0x86, 0x9F)] illegal_ranges = ['%s-%s' % (chr(low), chr(high)) for (low, high) in illegal_unichrs if low < sys.maxunicode] illegal_xml_re = re.compile('[%s]' % ''.join(illegal_ranges)) purged_xml = '' for c in raw_xml: if illegal_xml_re.search(c) is not None: c = ' ' purged_xml += c return purged_xml
def remove_invalid_xml_chars(raw_xml): """Remove control and invalid characters from an xml stream. Looks for invalid characters and subtitutes them with whitespaces. This solution is based on these two posts: Olemis Lang's reponse on StackOverflow (http://stackoverflow.com/questions/1707890) and lawlesst's on GitHub Gist (https://gist.github.com/lawlesst/4110923), that is based on the previous answer. :param xml: XML stream :returns: a purged XML stream """ illegal_unichrs = [(0x00, 0x08), (0x0B, 0x1F), (0x7F, 0x84), (0x86, 0x9F)] illegal_ranges = ['%s-%s' % (chr(low), chr(high)) for (low, high) in illegal_unichrs if low < sys.maxunicode] illegal_xml_re = re.compile('[%s]' % ''.join(illegal_ranges)) purged_xml = '' for c in raw_xml: if illegal_xml_re.search(c) is not None: c = ' ' purged_xml += c return purged_xml
[ "Remove", "control", "and", "invalid", "characters", "from", "an", "xml", "stream", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/utils.py#L192-L221
[ "def", "remove_invalid_xml_chars", "(", "raw_xml", ")", ":", "illegal_unichrs", "=", "[", "(", "0x00", ",", "0x08", ")", ",", "(", "0x0B", ",", "0x1F", ")", ",", "(", "0x7F", ",", "0x84", ")", ",", "(", "0x86", ",", "0x9F", ")", "]", "illegal_ranges", "=", "[", "'%s-%s'", "%", "(", "chr", "(", "low", ")", ",", "chr", "(", "high", ")", ")", "for", "(", "low", ",", "high", ")", "in", "illegal_unichrs", "if", "low", "<", "sys", ".", "maxunicode", "]", "illegal_xml_re", "=", "re", ".", "compile", "(", "'[%s]'", "%", "''", ".", "join", "(", "illegal_ranges", ")", ")", "purged_xml", "=", "''", "for", "c", "in", "raw_xml", ":", "if", "illegal_xml_re", ".", "search", "(", "c", ")", "is", "not", "None", ":", "c", "=", "' '", "purged_xml", "+=", "c", "return", "purged_xml" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e
test
xml_to_dict
Convert a XML stream into a dictionary. This function transforms a xml stream into a dictionary. The attributes are stored as single elements while child nodes are stored into lists. The text node is stored using the special key '__text__'. This code is based on Winston Ewert's solution to this problem. See http://codereview.stackexchange.com/questions/10400/convert-elementtree-to-dict for more info. The code was licensed as cc by-sa 3.0. :param raw_xml: XML stream :returns: a dict with the XML data :raises ParseError: raised when an error occurs parsing the given XML stream
perceval/utils.py
def xml_to_dict(raw_xml): """Convert a XML stream into a dictionary. This function transforms a xml stream into a dictionary. The attributes are stored as single elements while child nodes are stored into lists. The text node is stored using the special key '__text__'. This code is based on Winston Ewert's solution to this problem. See http://codereview.stackexchange.com/questions/10400/convert-elementtree-to-dict for more info. The code was licensed as cc by-sa 3.0. :param raw_xml: XML stream :returns: a dict with the XML data :raises ParseError: raised when an error occurs parsing the given XML stream """ def node_to_dict(node): d = {} d.update(node.items()) text = getattr(node, 'text', None) if text is not None: d['__text__'] = text childs = {} for child in node: childs.setdefault(child.tag, []).append(node_to_dict(child)) d.update(childs.items()) return d purged_xml = remove_invalid_xml_chars(raw_xml) try: tree = xml.etree.ElementTree.fromstring(purged_xml) except xml.etree.ElementTree.ParseError as e: cause = "XML stream %s" % (str(e)) raise ParseError(cause=cause) d = node_to_dict(tree) return d
def xml_to_dict(raw_xml): """Convert a XML stream into a dictionary. This function transforms a xml stream into a dictionary. The attributes are stored as single elements while child nodes are stored into lists. The text node is stored using the special key '__text__'. This code is based on Winston Ewert's solution to this problem. See http://codereview.stackexchange.com/questions/10400/convert-elementtree-to-dict for more info. The code was licensed as cc by-sa 3.0. :param raw_xml: XML stream :returns: a dict with the XML data :raises ParseError: raised when an error occurs parsing the given XML stream """ def node_to_dict(node): d = {} d.update(node.items()) text = getattr(node, 'text', None) if text is not None: d['__text__'] = text childs = {} for child in node: childs.setdefault(child.tag, []).append(node_to_dict(child)) d.update(childs.items()) return d purged_xml = remove_invalid_xml_chars(raw_xml) try: tree = xml.etree.ElementTree.fromstring(purged_xml) except xml.etree.ElementTree.ParseError as e: cause = "XML stream %s" % (str(e)) raise ParseError(cause=cause) d = node_to_dict(tree) return d
[ "Convert", "a", "XML", "stream", "into", "a", "dictionary", "." ]
chaoss/grimoirelab-perceval
python
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/utils.py#L224-L270
[ "def", "xml_to_dict", "(", "raw_xml", ")", ":", "def", "node_to_dict", "(", "node", ")", ":", "d", "=", "{", "}", "d", ".", "update", "(", "node", ".", "items", "(", ")", ")", "text", "=", "getattr", "(", "node", ",", "'text'", ",", "None", ")", "if", "text", "is", "not", "None", ":", "d", "[", "'__text__'", "]", "=", "text", "childs", "=", "{", "}", "for", "child", "in", "node", ":", "childs", ".", "setdefault", "(", "child", ".", "tag", ",", "[", "]", ")", ".", "append", "(", "node_to_dict", "(", "child", ")", ")", "d", ".", "update", "(", "childs", ".", "items", "(", ")", ")", "return", "d", "purged_xml", "=", "remove_invalid_xml_chars", "(", "raw_xml", ")", "try", ":", "tree", "=", "xml", ".", "etree", ".", "ElementTree", ".", "fromstring", "(", "purged_xml", ")", "except", "xml", ".", "etree", ".", "ElementTree", ".", "ParseError", "as", "e", ":", "cause", "=", "\"XML stream %s\"", "%", "(", "str", "(", "e", ")", ")", "raise", "ParseError", "(", "cause", "=", "cause", ")", "d", "=", "node_to_dict", "(", "tree", ")", "return", "d" ]
41c908605e88b7ebc3a536c643fa0f212eaf9e0e