partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
test
BigQueryBaseCursor.delete_dataset
Delete a dataset of Big query in your project. :param project_id: The name of the project where we have the dataset . :type project_id: str :param dataset_id: The dataset to be delete. :type dataset_id: str :return:
airflow/contrib/hooks/bigquery_hook.py
def delete_dataset(self, project_id, dataset_id): """ Delete a dataset of Big query in your project. :param project_id: The name of the project where we have the dataset . :type project_id: str :param dataset_id: The dataset to be delete. :type dataset_id: str :return: """ project_id = project_id if project_id is not None else self.project_id self.log.info('Deleting from project: %s Dataset:%s', project_id, dataset_id) try: self.service.datasets().delete( projectId=project_id, datasetId=dataset_id).execute(num_retries=self.num_retries) self.log.info('Dataset deleted successfully: In project %s ' 'Dataset %s', project_id, dataset_id) except HttpError as err: raise AirflowException( 'BigQuery job failed. Error was: {}'.format(err.content) )
def delete_dataset(self, project_id, dataset_id): """ Delete a dataset of Big query in your project. :param project_id: The name of the project where we have the dataset . :type project_id: str :param dataset_id: The dataset to be delete. :type dataset_id: str :return: """ project_id = project_id if project_id is not None else self.project_id self.log.info('Deleting from project: %s Dataset:%s', project_id, dataset_id) try: self.service.datasets().delete( projectId=project_id, datasetId=dataset_id).execute(num_retries=self.num_retries) self.log.info('Dataset deleted successfully: In project %s ' 'Dataset %s', project_id, dataset_id) except HttpError as err: raise AirflowException( 'BigQuery job failed. Error was: {}'.format(err.content) )
[ "Delete", "a", "dataset", "of", "Big", "query", "in", "your", "project", ".", ":", "param", "project_id", ":", "The", "name", "of", "the", "project", "where", "we", "have", "the", "dataset", ".", ":", "type", "project_id", ":", "str", ":", "param", "dataset_id", ":", "The", "dataset", "to", "be", "delete", ".", ":", "type", "dataset_id", ":", "str", ":", "return", ":" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1600-L1623
[ "def", "delete_dataset", "(", "self", ",", "project_id", ",", "dataset_id", ")", ":", "project_id", "=", "project_id", "if", "project_id", "is", "not", "None", "else", "self", ".", "project_id", "self", ".", "log", ".", "info", "(", "'Deleting from project: %s Dataset:%s'", ",", "project_id", ",", "dataset_id", ")", "try", ":", "self", ".", "service", ".", "datasets", "(", ")", ".", "delete", "(", "projectId", "=", "project_id", ",", "datasetId", "=", "dataset_id", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "self", ".", "log", ".", "info", "(", "'Dataset deleted successfully: In project %s '", "'Dataset %s'", ",", "project_id", ",", "dataset_id", ")", "except", "HttpError", "as", "err", ":", "raise", "AirflowException", "(", "'BigQuery job failed. Error was: {}'", ".", "format", "(", "err", ".", "content", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BigQueryBaseCursor.get_dataset
Method returns dataset_resource if dataset exist and raised 404 error if dataset does not exist :param dataset_id: The BigQuery Dataset ID :type dataset_id: str :param project_id: The GCP Project ID :type project_id: str :return: dataset_resource .. seealso:: For more information, see Dataset Resource content: https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#resource
airflow/contrib/hooks/bigquery_hook.py
def get_dataset(self, dataset_id, project_id=None): """ Method returns dataset_resource if dataset exist and raised 404 error if dataset does not exist :param dataset_id: The BigQuery Dataset ID :type dataset_id: str :param project_id: The GCP Project ID :type project_id: str :return: dataset_resource .. seealso:: For more information, see Dataset Resource content: https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#resource """ if not dataset_id or not isinstance(dataset_id, str): raise ValueError("dataset_id argument must be provided and has " "a type 'str'. You provided: {}".format(dataset_id)) dataset_project_id = project_id if project_id else self.project_id try: dataset_resource = self.service.datasets().get( datasetId=dataset_id, projectId=dataset_project_id).execute(num_retries=self.num_retries) self.log.info("Dataset Resource: %s", dataset_resource) except HttpError as err: raise AirflowException( 'BigQuery job failed. Error was: {}'.format(err.content)) return dataset_resource
def get_dataset(self, dataset_id, project_id=None): """ Method returns dataset_resource if dataset exist and raised 404 error if dataset does not exist :param dataset_id: The BigQuery Dataset ID :type dataset_id: str :param project_id: The GCP Project ID :type project_id: str :return: dataset_resource .. seealso:: For more information, see Dataset Resource content: https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#resource """ if not dataset_id or not isinstance(dataset_id, str): raise ValueError("dataset_id argument must be provided and has " "a type 'str'. You provided: {}".format(dataset_id)) dataset_project_id = project_id if project_id else self.project_id try: dataset_resource = self.service.datasets().get( datasetId=dataset_id, projectId=dataset_project_id).execute(num_retries=self.num_retries) self.log.info("Dataset Resource: %s", dataset_resource) except HttpError as err: raise AirflowException( 'BigQuery job failed. Error was: {}'.format(err.content)) return dataset_resource
[ "Method", "returns", "dataset_resource", "if", "dataset", "exist", "and", "raised", "404", "error", "if", "dataset", "does", "not", "exist" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1625-L1655
[ "def", "get_dataset", "(", "self", ",", "dataset_id", ",", "project_id", "=", "None", ")", ":", "if", "not", "dataset_id", "or", "not", "isinstance", "(", "dataset_id", ",", "str", ")", ":", "raise", "ValueError", "(", "\"dataset_id argument must be provided and has \"", "\"a type 'str'. You provided: {}\"", ".", "format", "(", "dataset_id", ")", ")", "dataset_project_id", "=", "project_id", "if", "project_id", "else", "self", ".", "project_id", "try", ":", "dataset_resource", "=", "self", ".", "service", ".", "datasets", "(", ")", ".", "get", "(", "datasetId", "=", "dataset_id", ",", "projectId", "=", "dataset_project_id", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "self", ".", "log", ".", "info", "(", "\"Dataset Resource: %s\"", ",", "dataset_resource", ")", "except", "HttpError", "as", "err", ":", "raise", "AirflowException", "(", "'BigQuery job failed. Error was: {}'", ".", "format", "(", "err", ".", "content", ")", ")", "return", "dataset_resource" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BigQueryBaseCursor.get_datasets_list
Method returns full list of BigQuery datasets in the current project .. seealso:: For more information, see: https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list :param project_id: Google Cloud Project for which you try to get all datasets :type project_id: str :return: datasets_list Example of returned datasets_list: :: { "kind":"bigquery#dataset", "location":"US", "id":"your-project:dataset_2_test", "datasetReference":{ "projectId":"your-project", "datasetId":"dataset_2_test" } }, { "kind":"bigquery#dataset", "location":"US", "id":"your-project:dataset_1_test", "datasetReference":{ "projectId":"your-project", "datasetId":"dataset_1_test" } } ]
airflow/contrib/hooks/bigquery_hook.py
def get_datasets_list(self, project_id=None): """ Method returns full list of BigQuery datasets in the current project .. seealso:: For more information, see: https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list :param project_id: Google Cloud Project for which you try to get all datasets :type project_id: str :return: datasets_list Example of returned datasets_list: :: { "kind":"bigquery#dataset", "location":"US", "id":"your-project:dataset_2_test", "datasetReference":{ "projectId":"your-project", "datasetId":"dataset_2_test" } }, { "kind":"bigquery#dataset", "location":"US", "id":"your-project:dataset_1_test", "datasetReference":{ "projectId":"your-project", "datasetId":"dataset_1_test" } } ] """ dataset_project_id = project_id if project_id else self.project_id try: datasets_list = self.service.datasets().list( projectId=dataset_project_id).execute(num_retries=self.num_retries)['datasets'] self.log.info("Datasets List: %s", datasets_list) except HttpError as err: raise AirflowException( 'BigQuery job failed. Error was: {}'.format(err.content)) return datasets_list
def get_datasets_list(self, project_id=None): """ Method returns full list of BigQuery datasets in the current project .. seealso:: For more information, see: https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list :param project_id: Google Cloud Project for which you try to get all datasets :type project_id: str :return: datasets_list Example of returned datasets_list: :: { "kind":"bigquery#dataset", "location":"US", "id":"your-project:dataset_2_test", "datasetReference":{ "projectId":"your-project", "datasetId":"dataset_2_test" } }, { "kind":"bigquery#dataset", "location":"US", "id":"your-project:dataset_1_test", "datasetReference":{ "projectId":"your-project", "datasetId":"dataset_1_test" } } ] """ dataset_project_id = project_id if project_id else self.project_id try: datasets_list = self.service.datasets().list( projectId=dataset_project_id).execute(num_retries=self.num_retries)['datasets'] self.log.info("Datasets List: %s", datasets_list) except HttpError as err: raise AirflowException( 'BigQuery job failed. Error was: {}'.format(err.content)) return datasets_list
[ "Method", "returns", "full", "list", "of", "BigQuery", "datasets", "in", "the", "current", "project" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1657-L1703
[ "def", "get_datasets_list", "(", "self", ",", "project_id", "=", "None", ")", ":", "dataset_project_id", "=", "project_id", "if", "project_id", "else", "self", ".", "project_id", "try", ":", "datasets_list", "=", "self", ".", "service", ".", "datasets", "(", ")", ".", "list", "(", "projectId", "=", "dataset_project_id", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "[", "'datasets'", "]", "self", ".", "log", ".", "info", "(", "\"Datasets List: %s\"", ",", "datasets_list", ")", "except", "HttpError", "as", "err", ":", "raise", "AirflowException", "(", "'BigQuery job failed. Error was: {}'", ".", "format", "(", "err", ".", "content", ")", ")", "return", "datasets_list" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BigQueryBaseCursor.insert_all
Method to stream data into BigQuery one record at a time without needing to run a load job .. seealso:: For more information, see: https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll :param project_id: The name of the project where we have the table :type project_id: str :param dataset_id: The name of the dataset where we have the table :type dataset_id: str :param table_id: The name of the table :type table_id: str :param rows: the rows to insert :type rows: list **Example or rows**: rows=[{"json": {"a_key": "a_value_0"}}, {"json": {"a_key": "a_value_1"}}] :param ignore_unknown_values: [Optional] Accept rows that contain values that do not match the schema. The unknown values are ignored. The default value is false, which treats unknown values as errors. :type ignore_unknown_values: bool :param skip_invalid_rows: [Optional] Insert all valid rows of a request, even if invalid rows exist. The default value is false, which causes the entire request to fail if any invalid rows exist. :type skip_invalid_rows: bool :param fail_on_error: [Optional] Force the task to fail if any errors occur. The default value is false, which indicates the task should not fail even if any insertion errors occur. :type fail_on_error: bool
airflow/contrib/hooks/bigquery_hook.py
def insert_all(self, project_id, dataset_id, table_id, rows, ignore_unknown_values=False, skip_invalid_rows=False, fail_on_error=False): """ Method to stream data into BigQuery one record at a time without needing to run a load job .. seealso:: For more information, see: https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll :param project_id: The name of the project where we have the table :type project_id: str :param dataset_id: The name of the dataset where we have the table :type dataset_id: str :param table_id: The name of the table :type table_id: str :param rows: the rows to insert :type rows: list **Example or rows**: rows=[{"json": {"a_key": "a_value_0"}}, {"json": {"a_key": "a_value_1"}}] :param ignore_unknown_values: [Optional] Accept rows that contain values that do not match the schema. The unknown values are ignored. The default value is false, which treats unknown values as errors. :type ignore_unknown_values: bool :param skip_invalid_rows: [Optional] Insert all valid rows of a request, even if invalid rows exist. The default value is false, which causes the entire request to fail if any invalid rows exist. :type skip_invalid_rows: bool :param fail_on_error: [Optional] Force the task to fail if any errors occur. The default value is false, which indicates the task should not fail even if any insertion errors occur. :type fail_on_error: bool """ dataset_project_id = project_id if project_id else self.project_id body = { "rows": rows, "ignoreUnknownValues": ignore_unknown_values, "kind": "bigquery#tableDataInsertAllRequest", "skipInvalidRows": skip_invalid_rows, } try: self.log.info( 'Inserting %s row(s) into Table %s:%s.%s', len(rows), dataset_project_id, dataset_id, table_id ) resp = self.service.tabledata().insertAll( projectId=dataset_project_id, datasetId=dataset_id, tableId=table_id, body=body ).execute(num_retries=self.num_retries) if 'insertErrors' not in resp: self.log.info( 'All row(s) inserted successfully: %s:%s.%s', dataset_project_id, dataset_id, table_id ) else: error_msg = '{} insert error(s) occurred: {}:{}.{}. Details: {}'.format( len(resp['insertErrors']), dataset_project_id, dataset_id, table_id, resp['insertErrors']) if fail_on_error: raise AirflowException( 'BigQuery job failed. Error was: {}'.format(error_msg) ) self.log.info(error_msg) except HttpError as err: raise AirflowException( 'BigQuery job failed. Error was: {}'.format(err.content) )
def insert_all(self, project_id, dataset_id, table_id, rows, ignore_unknown_values=False, skip_invalid_rows=False, fail_on_error=False): """ Method to stream data into BigQuery one record at a time without needing to run a load job .. seealso:: For more information, see: https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll :param project_id: The name of the project where we have the table :type project_id: str :param dataset_id: The name of the dataset where we have the table :type dataset_id: str :param table_id: The name of the table :type table_id: str :param rows: the rows to insert :type rows: list **Example or rows**: rows=[{"json": {"a_key": "a_value_0"}}, {"json": {"a_key": "a_value_1"}}] :param ignore_unknown_values: [Optional] Accept rows that contain values that do not match the schema. The unknown values are ignored. The default value is false, which treats unknown values as errors. :type ignore_unknown_values: bool :param skip_invalid_rows: [Optional] Insert all valid rows of a request, even if invalid rows exist. The default value is false, which causes the entire request to fail if any invalid rows exist. :type skip_invalid_rows: bool :param fail_on_error: [Optional] Force the task to fail if any errors occur. The default value is false, which indicates the task should not fail even if any insertion errors occur. :type fail_on_error: bool """ dataset_project_id = project_id if project_id else self.project_id body = { "rows": rows, "ignoreUnknownValues": ignore_unknown_values, "kind": "bigquery#tableDataInsertAllRequest", "skipInvalidRows": skip_invalid_rows, } try: self.log.info( 'Inserting %s row(s) into Table %s:%s.%s', len(rows), dataset_project_id, dataset_id, table_id ) resp = self.service.tabledata().insertAll( projectId=dataset_project_id, datasetId=dataset_id, tableId=table_id, body=body ).execute(num_retries=self.num_retries) if 'insertErrors' not in resp: self.log.info( 'All row(s) inserted successfully: %s:%s.%s', dataset_project_id, dataset_id, table_id ) else: error_msg = '{} insert error(s) occurred: {}:{}.{}. Details: {}'.format( len(resp['insertErrors']), dataset_project_id, dataset_id, table_id, resp['insertErrors']) if fail_on_error: raise AirflowException( 'BigQuery job failed. Error was: {}'.format(error_msg) ) self.log.info(error_msg) except HttpError as err: raise AirflowException( 'BigQuery job failed. Error was: {}'.format(err.content) )
[ "Method", "to", "stream", "data", "into", "BigQuery", "one", "record", "at", "a", "time", "without", "needing", "to", "run", "a", "load", "job" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1705-L1779
[ "def", "insert_all", "(", "self", ",", "project_id", ",", "dataset_id", ",", "table_id", ",", "rows", ",", "ignore_unknown_values", "=", "False", ",", "skip_invalid_rows", "=", "False", ",", "fail_on_error", "=", "False", ")", ":", "dataset_project_id", "=", "project_id", "if", "project_id", "else", "self", ".", "project_id", "body", "=", "{", "\"rows\"", ":", "rows", ",", "\"ignoreUnknownValues\"", ":", "ignore_unknown_values", ",", "\"kind\"", ":", "\"bigquery#tableDataInsertAllRequest\"", ",", "\"skipInvalidRows\"", ":", "skip_invalid_rows", ",", "}", "try", ":", "self", ".", "log", ".", "info", "(", "'Inserting %s row(s) into Table %s:%s.%s'", ",", "len", "(", "rows", ")", ",", "dataset_project_id", ",", "dataset_id", ",", "table_id", ")", "resp", "=", "self", ".", "service", ".", "tabledata", "(", ")", ".", "insertAll", "(", "projectId", "=", "dataset_project_id", ",", "datasetId", "=", "dataset_id", ",", "tableId", "=", "table_id", ",", "body", "=", "body", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "if", "'insertErrors'", "not", "in", "resp", ":", "self", ".", "log", ".", "info", "(", "'All row(s) inserted successfully: %s:%s.%s'", ",", "dataset_project_id", ",", "dataset_id", ",", "table_id", ")", "else", ":", "error_msg", "=", "'{} insert error(s) occurred: {}:{}.{}. Details: {}'", ".", "format", "(", "len", "(", "resp", "[", "'insertErrors'", "]", ")", ",", "dataset_project_id", ",", "dataset_id", ",", "table_id", ",", "resp", "[", "'insertErrors'", "]", ")", "if", "fail_on_error", ":", "raise", "AirflowException", "(", "'BigQuery job failed. Error was: {}'", ".", "format", "(", "error_msg", ")", ")", "self", ".", "log", ".", "info", "(", "error_msg", ")", "except", "HttpError", "as", "err", ":", "raise", "AirflowException", "(", "'BigQuery job failed. Error was: {}'", ".", "format", "(", "err", ".", "content", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BigQueryCursor.execute
Executes a BigQuery query, and returns the job ID. :param operation: The query to execute. :type operation: str :param parameters: Parameters to substitute into the query. :type parameters: dict
airflow/contrib/hooks/bigquery_hook.py
def execute(self, operation, parameters=None): """ Executes a BigQuery query, and returns the job ID. :param operation: The query to execute. :type operation: str :param parameters: Parameters to substitute into the query. :type parameters: dict """ sql = _bind_parameters(operation, parameters) if parameters else operation self.job_id = self.run_query(sql)
def execute(self, operation, parameters=None): """ Executes a BigQuery query, and returns the job ID. :param operation: The query to execute. :type operation: str :param parameters: Parameters to substitute into the query. :type parameters: dict """ sql = _bind_parameters(operation, parameters) if parameters else operation self.job_id = self.run_query(sql)
[ "Executes", "a", "BigQuery", "query", "and", "returns", "the", "job", "ID", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1819-L1830
[ "def", "execute", "(", "self", ",", "operation", ",", "parameters", "=", "None", ")", ":", "sql", "=", "_bind_parameters", "(", "operation", ",", "parameters", ")", "if", "parameters", "else", "operation", "self", ".", "job_id", "=", "self", ".", "run_query", "(", "sql", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BigQueryCursor.executemany
Execute a BigQuery query multiple times with different parameters. :param operation: The query to execute. :type operation: str :param seq_of_parameters: List of dictionary parameters to substitute into the query. :type seq_of_parameters: list
airflow/contrib/hooks/bigquery_hook.py
def executemany(self, operation, seq_of_parameters): """ Execute a BigQuery query multiple times with different parameters. :param operation: The query to execute. :type operation: str :param seq_of_parameters: List of dictionary parameters to substitute into the query. :type seq_of_parameters: list """ for parameters in seq_of_parameters: self.execute(operation, parameters)
def executemany(self, operation, seq_of_parameters): """ Execute a BigQuery query multiple times with different parameters. :param operation: The query to execute. :type operation: str :param seq_of_parameters: List of dictionary parameters to substitute into the query. :type seq_of_parameters: list """ for parameters in seq_of_parameters: self.execute(operation, parameters)
[ "Execute", "a", "BigQuery", "query", "multiple", "times", "with", "different", "parameters", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1832-L1843
[ "def", "executemany", "(", "self", ",", "operation", ",", "seq_of_parameters", ")", ":", "for", "parameters", "in", "seq_of_parameters", ":", "self", ".", "execute", "(", "operation", ",", "parameters", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BigQueryCursor.next
Helper method for fetchone, which returns the next row from a buffer. If the buffer is empty, attempts to paginate through the result set for the next page, and load it into the buffer.
airflow/contrib/hooks/bigquery_hook.py
def next(self): """ Helper method for fetchone, which returns the next row from a buffer. If the buffer is empty, attempts to paginate through the result set for the next page, and load it into the buffer. """ if not self.job_id: return None if len(self.buffer) == 0: if self.all_pages_loaded: return None query_results = (self.service.jobs().getQueryResults( projectId=self.project_id, jobId=self.job_id, pageToken=self.page_token).execute(num_retries=self.num_retries)) if 'rows' in query_results and query_results['rows']: self.page_token = query_results.get('pageToken') fields = query_results['schema']['fields'] col_types = [field['type'] for field in fields] rows = query_results['rows'] for dict_row in rows: typed_row = ([ _bq_cast(vs['v'], col_types[idx]) for idx, vs in enumerate(dict_row['f']) ]) self.buffer.append(typed_row) if not self.page_token: self.all_pages_loaded = True else: # Reset all state since we've exhausted the results. self.page_token = None self.job_id = None self.page_token = None return None return self.buffer.pop(0)
def next(self): """ Helper method for fetchone, which returns the next row from a buffer. If the buffer is empty, attempts to paginate through the result set for the next page, and load it into the buffer. """ if not self.job_id: return None if len(self.buffer) == 0: if self.all_pages_loaded: return None query_results = (self.service.jobs().getQueryResults( projectId=self.project_id, jobId=self.job_id, pageToken=self.page_token).execute(num_retries=self.num_retries)) if 'rows' in query_results and query_results['rows']: self.page_token = query_results.get('pageToken') fields = query_results['schema']['fields'] col_types = [field['type'] for field in fields] rows = query_results['rows'] for dict_row in rows: typed_row = ([ _bq_cast(vs['v'], col_types[idx]) for idx, vs in enumerate(dict_row['f']) ]) self.buffer.append(typed_row) if not self.page_token: self.all_pages_loaded = True else: # Reset all state since we've exhausted the results. self.page_token = None self.job_id = None self.page_token = None return None return self.buffer.pop(0)
[ "Helper", "method", "for", "fetchone", "which", "returns", "the", "next", "row", "from", "a", "buffer", ".", "If", "the", "buffer", "is", "empty", "attempts", "to", "paginate", "through", "the", "result", "set", "for", "the", "next", "page", "and", "load", "it", "into", "the", "buffer", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1849-L1890
[ "def", "next", "(", "self", ")", ":", "if", "not", "self", ".", "job_id", ":", "return", "None", "if", "len", "(", "self", ".", "buffer", ")", "==", "0", ":", "if", "self", ".", "all_pages_loaded", ":", "return", "None", "query_results", "=", "(", "self", ".", "service", ".", "jobs", "(", ")", ".", "getQueryResults", "(", "projectId", "=", "self", ".", "project_id", ",", "jobId", "=", "self", ".", "job_id", ",", "pageToken", "=", "self", ".", "page_token", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")", "if", "'rows'", "in", "query_results", "and", "query_results", "[", "'rows'", "]", ":", "self", ".", "page_token", "=", "query_results", ".", "get", "(", "'pageToken'", ")", "fields", "=", "query_results", "[", "'schema'", "]", "[", "'fields'", "]", "col_types", "=", "[", "field", "[", "'type'", "]", "for", "field", "in", "fields", "]", "rows", "=", "query_results", "[", "'rows'", "]", "for", "dict_row", "in", "rows", ":", "typed_row", "=", "(", "[", "_bq_cast", "(", "vs", "[", "'v'", "]", ",", "col_types", "[", "idx", "]", ")", "for", "idx", ",", "vs", "in", "enumerate", "(", "dict_row", "[", "'f'", "]", ")", "]", ")", "self", ".", "buffer", ".", "append", "(", "typed_row", ")", "if", "not", "self", ".", "page_token", ":", "self", ".", "all_pages_loaded", "=", "True", "else", ":", "# Reset all state since we've exhausted the results.", "self", ".", "page_token", "=", "None", "self", ".", "job_id", "=", "None", "self", ".", "page_token", "=", "None", "return", "None", "return", "self", ".", "buffer", ".", "pop", "(", "0", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BigQueryCursor.fetchmany
Fetch the next set of rows of a query result, returning a sequence of sequences (e.g. a list of tuples). An empty sequence is returned when no more rows are available. The number of rows to fetch per call is specified by the parameter. If it is not given, the cursor's arraysize determines the number of rows to be fetched. The method should try to fetch as many rows as indicated by the size parameter. If this is not possible due to the specified number of rows not being available, fewer rows may be returned. An :py:class:`~pyhive.exc.Error` (or subclass) exception is raised if the previous call to :py:meth:`execute` did not produce any result set or no call was issued yet.
airflow/contrib/hooks/bigquery_hook.py
def fetchmany(self, size=None): """ Fetch the next set of rows of a query result, returning a sequence of sequences (e.g. a list of tuples). An empty sequence is returned when no more rows are available. The number of rows to fetch per call is specified by the parameter. If it is not given, the cursor's arraysize determines the number of rows to be fetched. The method should try to fetch as many rows as indicated by the size parameter. If this is not possible due to the specified number of rows not being available, fewer rows may be returned. An :py:class:`~pyhive.exc.Error` (or subclass) exception is raised if the previous call to :py:meth:`execute` did not produce any result set or no call was issued yet. """ if size is None: size = self.arraysize result = [] for _ in range(size): one = self.fetchone() if one is None: break else: result.append(one) return result
def fetchmany(self, size=None): """ Fetch the next set of rows of a query result, returning a sequence of sequences (e.g. a list of tuples). An empty sequence is returned when no more rows are available. The number of rows to fetch per call is specified by the parameter. If it is not given, the cursor's arraysize determines the number of rows to be fetched. The method should try to fetch as many rows as indicated by the size parameter. If this is not possible due to the specified number of rows not being available, fewer rows may be returned. An :py:class:`~pyhive.exc.Error` (or subclass) exception is raised if the previous call to :py:meth:`execute` did not produce any result set or no call was issued yet. """ if size is None: size = self.arraysize result = [] for _ in range(size): one = self.fetchone() if one is None: break else: result.append(one) return result
[ "Fetch", "the", "next", "set", "of", "rows", "of", "a", "query", "result", "returning", "a", "sequence", "of", "sequences", "(", "e", ".", "g", ".", "a", "list", "of", "tuples", ")", ".", "An", "empty", "sequence", "is", "returned", "when", "no", "more", "rows", "are", "available", ".", "The", "number", "of", "rows", "to", "fetch", "per", "call", "is", "specified", "by", "the", "parameter", ".", "If", "it", "is", "not", "given", "the", "cursor", "s", "arraysize", "determines", "the", "number", "of", "rows", "to", "be", "fetched", ".", "The", "method", "should", "try", "to", "fetch", "as", "many", "rows", "as", "indicated", "by", "the", "size", "parameter", ".", "If", "this", "is", "not", "possible", "due", "to", "the", "specified", "number", "of", "rows", "not", "being", "available", "fewer", "rows", "may", "be", "returned", ".", "An", ":", "py", ":", "class", ":", "~pyhive", ".", "exc", ".", "Error", "(", "or", "subclass", ")", "exception", "is", "raised", "if", "the", "previous", "call", "to", ":", "py", ":", "meth", ":", "execute", "did", "not", "produce", "any", "result", "set", "or", "no", "call", "was", "issued", "yet", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1892-L1913
[ "def", "fetchmany", "(", "self", ",", "size", "=", "None", ")", ":", "if", "size", "is", "None", ":", "size", "=", "self", ".", "arraysize", "result", "=", "[", "]", "for", "_", "in", "range", "(", "size", ")", ":", "one", "=", "self", ".", "fetchone", "(", ")", "if", "one", "is", "None", ":", "break", "else", ":", "result", ".", "append", "(", "one", ")", "return", "result" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BigQueryCursor.fetchall
Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples).
airflow/contrib/hooks/bigquery_hook.py
def fetchall(self): """ Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). """ result = [] while True: one = self.fetchone() if one is None: break else: result.append(one) return result
def fetchall(self): """ Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). """ result = [] while True: one = self.fetchone() if one is None: break else: result.append(one) return result
[ "Fetch", "all", "(", "remaining", ")", "rows", "of", "a", "query", "result", "returning", "them", "as", "a", "sequence", "of", "sequences", "(", "e", ".", "g", ".", "a", "list", "of", "tuples", ")", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1915-L1927
[ "def", "fetchall", "(", "self", ")", ":", "result", "=", "[", "]", "while", "True", ":", "one", "=", "self", ".", "fetchone", "(", ")", "if", "one", "is", "None", ":", "break", "else", ":", "result", ".", "append", "(", "one", ")", "return", "result" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
configure_manifest_files
Loads the manifest file and register the `url_for_asset_` template tag. :param app: :return:
airflow/www/static_config.py
def configure_manifest_files(app): """ Loads the manifest file and register the `url_for_asset_` template tag. :param app: :return: """ def parse_manifest_json(): # noinspection PyBroadException try: global manifest manifest_file = os.path.join(os.path.dirname(__file__), 'static/dist/manifest.json') with open(manifest_file, 'r') as f: manifest.update(json.load(f)) for k in manifest.keys(): manifest[k] = os.path.join("dist", manifest[k]) except Exception: print("Please make sure to build the frontend in " "static/ directory and restart the server") pass def get_asset_url(filename): if app.debug: parse_manifest_json() return url_for('static', filename=manifest.get(filename, '')) parse_manifest_json() @app.context_processor def get_url_for_asset(): """ Template tag to return the asset URL. WebPack renders the assets after minification and modification under the static/dist folder. This template tag reads the asset name in manifest.json and returns the appropriate file. """ return dict(url_for_asset=get_asset_url)
def configure_manifest_files(app): """ Loads the manifest file and register the `url_for_asset_` template tag. :param app: :return: """ def parse_manifest_json(): # noinspection PyBroadException try: global manifest manifest_file = os.path.join(os.path.dirname(__file__), 'static/dist/manifest.json') with open(manifest_file, 'r') as f: manifest.update(json.load(f)) for k in manifest.keys(): manifest[k] = os.path.join("dist", manifest[k]) except Exception: print("Please make sure to build the frontend in " "static/ directory and restart the server") pass def get_asset_url(filename): if app.debug: parse_manifest_json() return url_for('static', filename=manifest.get(filename, '')) parse_manifest_json() @app.context_processor def get_url_for_asset(): """ Template tag to return the asset URL. WebPack renders the assets after minification and modification under the static/dist folder. This template tag reads the asset name in manifest.json and returns the appropriate file. """ return dict(url_for_asset=get_asset_url)
[ "Loads", "the", "manifest", "file", "and", "register", "the", "url_for_asset_", "template", "tag", ".", ":", "param", "app", ":", ":", "return", ":" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/static_config.py#L29-L68
[ "def", "configure_manifest_files", "(", "app", ")", ":", "def", "parse_manifest_json", "(", ")", ":", "# noinspection PyBroadException", "try", ":", "global", "manifest", "manifest_file", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'static/dist/manifest.json'", ")", "with", "open", "(", "manifest_file", ",", "'r'", ")", "as", "f", ":", "manifest", ".", "update", "(", "json", ".", "load", "(", "f", ")", ")", "for", "k", "in", "manifest", ".", "keys", "(", ")", ":", "manifest", "[", "k", "]", "=", "os", ".", "path", ".", "join", "(", "\"dist\"", ",", "manifest", "[", "k", "]", ")", "except", "Exception", ":", "print", "(", "\"Please make sure to build the frontend in \"", "\"static/ directory and restart the server\"", ")", "pass", "def", "get_asset_url", "(", "filename", ")", ":", "if", "app", ".", "debug", ":", "parse_manifest_json", "(", ")", "return", "url_for", "(", "'static'", ",", "filename", "=", "manifest", ".", "get", "(", "filename", ",", "''", ")", ")", "parse_manifest_json", "(", ")", "@", "app", ".", "context_processor", "def", "get_url_for_asset", "(", ")", ":", "\"\"\"\n Template tag to return the asset URL.\n WebPack renders the assets after minification and modification\n under the static/dist folder.\n This template tag reads the asset name in manifest.json and returns\n the appropriate file.\n \"\"\"", "return", "dict", "(", "url_for_asset", "=", "get_asset_url", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PostgresToGoogleCloudStorageOperator._query_postgres
Queries Postgres and returns a cursor to the results.
airflow/contrib/operators/postgres_to_gcs_operator.py
def _query_postgres(self): """ Queries Postgres and returns a cursor to the results. """ postgres = PostgresHook(postgres_conn_id=self.postgres_conn_id) conn = postgres.get_conn() cursor = conn.cursor() cursor.execute(self.sql, self.parameters) return cursor
def _query_postgres(self): """ Queries Postgres and returns a cursor to the results. """ postgres = PostgresHook(postgres_conn_id=self.postgres_conn_id) conn = postgres.get_conn() cursor = conn.cursor() cursor.execute(self.sql, self.parameters) return cursor
[ "Queries", "Postgres", "and", "returns", "a", "cursor", "to", "the", "results", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/postgres_to_gcs_operator.py#L114-L122
[ "def", "_query_postgres", "(", "self", ")", ":", "postgres", "=", "PostgresHook", "(", "postgres_conn_id", "=", "self", ".", "postgres_conn_id", ")", "conn", "=", "postgres", ".", "get_conn", "(", ")", "cursor", "=", "conn", ".", "cursor", "(", ")", "cursor", ".", "execute", "(", "self", ".", "sql", ",", "self", ".", "parameters", ")", "return", "cursor" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PostgresToGoogleCloudStorageOperator._write_local_data_files
Takes a cursor, and writes results to a local file. :return: A dictionary where keys are filenames to be used as object names in GCS, and values are file handles to local files that contain the data for the GCS objects.
airflow/contrib/operators/postgres_to_gcs_operator.py
def _write_local_data_files(self, cursor): """ Takes a cursor, and writes results to a local file. :return: A dictionary where keys are filenames to be used as object names in GCS, and values are file handles to local files that contain the data for the GCS objects. """ schema = list(map(lambda schema_tuple: schema_tuple[0], cursor.description)) tmp_file_handles = {} row_no = 0 def _create_new_file(): handle = NamedTemporaryFile(delete=True) filename = self.filename.format(len(tmp_file_handles)) tmp_file_handles[filename] = handle return handle # Don't create a file if there is nothing to write if cursor.rowcount > 0: tmp_file_handle = _create_new_file() for row in cursor: # Convert datetime objects to utc seconds, and decimals to floats row = map(self.convert_types, row) row_dict = dict(zip(schema, row)) s = json.dumps(row_dict, sort_keys=True).encode('utf-8') tmp_file_handle.write(s) # Append newline to make dumps BigQuery compatible. tmp_file_handle.write(b'\n') # Stop if the file exceeds the file size limit. if tmp_file_handle.tell() >= self.approx_max_file_size_bytes: tmp_file_handle = _create_new_file() row_no += 1 self.log.info('Received %s rows over %s files', row_no, len(tmp_file_handles)) return tmp_file_handles
def _write_local_data_files(self, cursor): """ Takes a cursor, and writes results to a local file. :return: A dictionary where keys are filenames to be used as object names in GCS, and values are file handles to local files that contain the data for the GCS objects. """ schema = list(map(lambda schema_tuple: schema_tuple[0], cursor.description)) tmp_file_handles = {} row_no = 0 def _create_new_file(): handle = NamedTemporaryFile(delete=True) filename = self.filename.format(len(tmp_file_handles)) tmp_file_handles[filename] = handle return handle # Don't create a file if there is nothing to write if cursor.rowcount > 0: tmp_file_handle = _create_new_file() for row in cursor: # Convert datetime objects to utc seconds, and decimals to floats row = map(self.convert_types, row) row_dict = dict(zip(schema, row)) s = json.dumps(row_dict, sort_keys=True).encode('utf-8') tmp_file_handle.write(s) # Append newline to make dumps BigQuery compatible. tmp_file_handle.write(b'\n') # Stop if the file exceeds the file size limit. if tmp_file_handle.tell() >= self.approx_max_file_size_bytes: tmp_file_handle = _create_new_file() row_no += 1 self.log.info('Received %s rows over %s files', row_no, len(tmp_file_handles)) return tmp_file_handles
[ "Takes", "a", "cursor", "and", "writes", "results", "to", "a", "local", "file", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/postgres_to_gcs_operator.py#L124-L164
[ "def", "_write_local_data_files", "(", "self", ",", "cursor", ")", ":", "schema", "=", "list", "(", "map", "(", "lambda", "schema_tuple", ":", "schema_tuple", "[", "0", "]", ",", "cursor", ".", "description", ")", ")", "tmp_file_handles", "=", "{", "}", "row_no", "=", "0", "def", "_create_new_file", "(", ")", ":", "handle", "=", "NamedTemporaryFile", "(", "delete", "=", "True", ")", "filename", "=", "self", ".", "filename", ".", "format", "(", "len", "(", "tmp_file_handles", ")", ")", "tmp_file_handles", "[", "filename", "]", "=", "handle", "return", "handle", "# Don't create a file if there is nothing to write", "if", "cursor", ".", "rowcount", ">", "0", ":", "tmp_file_handle", "=", "_create_new_file", "(", ")", "for", "row", "in", "cursor", ":", "# Convert datetime objects to utc seconds, and decimals to floats", "row", "=", "map", "(", "self", ".", "convert_types", ",", "row", ")", "row_dict", "=", "dict", "(", "zip", "(", "schema", ",", "row", ")", ")", "s", "=", "json", ".", "dumps", "(", "row_dict", ",", "sort_keys", "=", "True", ")", ".", "encode", "(", "'utf-8'", ")", "tmp_file_handle", ".", "write", "(", "s", ")", "# Append newline to make dumps BigQuery compatible.", "tmp_file_handle", ".", "write", "(", "b'\\n'", ")", "# Stop if the file exceeds the file size limit.", "if", "tmp_file_handle", ".", "tell", "(", ")", ">=", "self", ".", "approx_max_file_size_bytes", ":", "tmp_file_handle", "=", "_create_new_file", "(", ")", "row_no", "+=", "1", "self", ".", "log", ".", "info", "(", "'Received %s rows over %s files'", ",", "row_no", ",", "len", "(", "tmp_file_handles", ")", ")", "return", "tmp_file_handles" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PostgresToGoogleCloudStorageOperator._write_local_schema_file
Takes a cursor, and writes the BigQuery schema for the results to a local file system. :return: A dictionary where key is a filename to be used as an object name in GCS, and values are file handles to local files that contains the BigQuery schema fields in .json format.
airflow/contrib/operators/postgres_to_gcs_operator.py
def _write_local_schema_file(self, cursor): """ Takes a cursor, and writes the BigQuery schema for the results to a local file system. :return: A dictionary where key is a filename to be used as an object name in GCS, and values are file handles to local files that contains the BigQuery schema fields in .json format. """ schema = [] for field in cursor.description: # See PEP 249 for details about the description tuple. field_name = field[0] field_type = self.type_map(field[1]) field_mode = 'REPEATED' if field[1] in (1009, 1005, 1007, 1016) else 'NULLABLE' schema.append({ 'name': field_name, 'type': field_type, 'mode': field_mode, }) self.log.info('Using schema for %s: %s', self.schema_filename, schema) tmp_schema_file_handle = NamedTemporaryFile(delete=True) s = json.dumps(schema, sort_keys=True).encode('utf-8') tmp_schema_file_handle.write(s) return {self.schema_filename: tmp_schema_file_handle}
def _write_local_schema_file(self, cursor): """ Takes a cursor, and writes the BigQuery schema for the results to a local file system. :return: A dictionary where key is a filename to be used as an object name in GCS, and values are file handles to local files that contains the BigQuery schema fields in .json format. """ schema = [] for field in cursor.description: # See PEP 249 for details about the description tuple. field_name = field[0] field_type = self.type_map(field[1]) field_mode = 'REPEATED' if field[1] in (1009, 1005, 1007, 1016) else 'NULLABLE' schema.append({ 'name': field_name, 'type': field_type, 'mode': field_mode, }) self.log.info('Using schema for %s: %s', self.schema_filename, schema) tmp_schema_file_handle = NamedTemporaryFile(delete=True) s = json.dumps(schema, sort_keys=True).encode('utf-8') tmp_schema_file_handle.write(s) return {self.schema_filename: tmp_schema_file_handle}
[ "Takes", "a", "cursor", "and", "writes", "the", "BigQuery", "schema", "for", "the", "results", "to", "a", "local", "file", "system", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/postgres_to_gcs_operator.py#L166-L192
[ "def", "_write_local_schema_file", "(", "self", ",", "cursor", ")", ":", "schema", "=", "[", "]", "for", "field", "in", "cursor", ".", "description", ":", "# See PEP 249 for details about the description tuple.", "field_name", "=", "field", "[", "0", "]", "field_type", "=", "self", ".", "type_map", "(", "field", "[", "1", "]", ")", "field_mode", "=", "'REPEATED'", "if", "field", "[", "1", "]", "in", "(", "1009", ",", "1005", ",", "1007", ",", "1016", ")", "else", "'NULLABLE'", "schema", ".", "append", "(", "{", "'name'", ":", "field_name", ",", "'type'", ":", "field_type", ",", "'mode'", ":", "field_mode", ",", "}", ")", "self", ".", "log", ".", "info", "(", "'Using schema for %s: %s'", ",", "self", ".", "schema_filename", ",", "schema", ")", "tmp_schema_file_handle", "=", "NamedTemporaryFile", "(", "delete", "=", "True", ")", "s", "=", "json", ".", "dumps", "(", "schema", ",", "sort_keys", "=", "True", ")", ".", "encode", "(", "'utf-8'", ")", "tmp_schema_file_handle", ".", "write", "(", "s", ")", "return", "{", "self", ".", "schema_filename", ":", "tmp_schema_file_handle", "}" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PostgresToGoogleCloudStorageOperator.convert_types
Takes a value from Postgres, and converts it to a value that's safe for JSON/Google Cloud Storage/BigQuery. Dates are converted to UTC seconds. Decimals are converted to floats. Times are converted to seconds.
airflow/contrib/operators/postgres_to_gcs_operator.py
def convert_types(cls, value): """ Takes a value from Postgres, and converts it to a value that's safe for JSON/Google Cloud Storage/BigQuery. Dates are converted to UTC seconds. Decimals are converted to floats. Times are converted to seconds. """ if type(value) in (datetime.datetime, datetime.date): return time.mktime(value.timetuple()) elif type(value) == datetime.time: formated_time = time.strptime(str(value), "%H:%M:%S") return datetime.timedelta( hours=formated_time.tm_hour, minutes=formated_time.tm_min, seconds=formated_time.tm_sec).seconds elif isinstance(value, Decimal): return float(value) else: return value
def convert_types(cls, value): """ Takes a value from Postgres, and converts it to a value that's safe for JSON/Google Cloud Storage/BigQuery. Dates are converted to UTC seconds. Decimals are converted to floats. Times are converted to seconds. """ if type(value) in (datetime.datetime, datetime.date): return time.mktime(value.timetuple()) elif type(value) == datetime.time: formated_time = time.strptime(str(value), "%H:%M:%S") return datetime.timedelta( hours=formated_time.tm_hour, minutes=formated_time.tm_min, seconds=formated_time.tm_sec).seconds elif isinstance(value, Decimal): return float(value) else: return value
[ "Takes", "a", "value", "from", "Postgres", "and", "converts", "it", "to", "a", "value", "that", "s", "safe", "for", "JSON", "/", "Google", "Cloud", "Storage", "/", "BigQuery", ".", "Dates", "are", "converted", "to", "UTC", "seconds", ".", "Decimals", "are", "converted", "to", "floats", ".", "Times", "are", "converted", "to", "seconds", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/postgres_to_gcs_operator.py#L207-L224
[ "def", "convert_types", "(", "cls", ",", "value", ")", ":", "if", "type", "(", "value", ")", "in", "(", "datetime", ".", "datetime", ",", "datetime", ".", "date", ")", ":", "return", "time", ".", "mktime", "(", "value", ".", "timetuple", "(", ")", ")", "elif", "type", "(", "value", ")", "==", "datetime", ".", "time", ":", "formated_time", "=", "time", ".", "strptime", "(", "str", "(", "value", ")", ",", "\"%H:%M:%S\"", ")", "return", "datetime", ".", "timedelta", "(", "hours", "=", "formated_time", ".", "tm_hour", ",", "minutes", "=", "formated_time", ".", "tm_min", ",", "seconds", "=", "formated_time", ".", "tm_sec", ")", ".", "seconds", "elif", "isinstance", "(", "value", ",", "Decimal", ")", ":", "return", "float", "(", "value", ")", "else", ":", "return", "value" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
_make_intermediate_dirs
Create all the intermediate directories in a remote host :param sftp_client: A Paramiko SFTP client. :param remote_directory: Absolute Path of the directory containing the file :return:
airflow/contrib/operators/sftp_operator.py
def _make_intermediate_dirs(sftp_client, remote_directory): """ Create all the intermediate directories in a remote host :param sftp_client: A Paramiko SFTP client. :param remote_directory: Absolute Path of the directory containing the file :return: """ if remote_directory == '/': sftp_client.chdir('/') return if remote_directory == '': return try: sftp_client.chdir(remote_directory) except IOError: dirname, basename = os.path.split(remote_directory.rstrip('/')) _make_intermediate_dirs(sftp_client, dirname) sftp_client.mkdir(basename) sftp_client.chdir(basename) return
def _make_intermediate_dirs(sftp_client, remote_directory): """ Create all the intermediate directories in a remote host :param sftp_client: A Paramiko SFTP client. :param remote_directory: Absolute Path of the directory containing the file :return: """ if remote_directory == '/': sftp_client.chdir('/') return if remote_directory == '': return try: sftp_client.chdir(remote_directory) except IOError: dirname, basename = os.path.split(remote_directory.rstrip('/')) _make_intermediate_dirs(sftp_client, dirname) sftp_client.mkdir(basename) sftp_client.chdir(basename) return
[ "Create", "all", "the", "intermediate", "directories", "in", "a", "remote", "host" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/sftp_operator.py#L160-L180
[ "def", "_make_intermediate_dirs", "(", "sftp_client", ",", "remote_directory", ")", ":", "if", "remote_directory", "==", "'/'", ":", "sftp_client", ".", "chdir", "(", "'/'", ")", "return", "if", "remote_directory", "==", "''", ":", "return", "try", ":", "sftp_client", ".", "chdir", "(", "remote_directory", ")", "except", "IOError", ":", "dirname", ",", "basename", "=", "os", ".", "path", ".", "split", "(", "remote_directory", ".", "rstrip", "(", "'/'", ")", ")", "_make_intermediate_dirs", "(", "sftp_client", ",", "dirname", ")", "sftp_client", ".", "mkdir", "(", "basename", ")", "sftp_client", ".", "chdir", "(", "basename", ")", "return" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SQSHook.create_queue
Create queue using connection object :param queue_name: name of the queue. :type queue_name: str :param attributes: additional attributes for the queue (default: None) For details of the attributes parameter see :py:meth:`botocore.client.SQS.create_queue` :type attributes: dict :return: dict with the information about the queue For details of the returned value see :py:meth:`botocore.client.SQS.create_queue` :rtype: dict
airflow/contrib/hooks/aws_sqs_hook.py
def create_queue(self, queue_name, attributes=None): """ Create queue using connection object :param queue_name: name of the queue. :type queue_name: str :param attributes: additional attributes for the queue (default: None) For details of the attributes parameter see :py:meth:`botocore.client.SQS.create_queue` :type attributes: dict :return: dict with the information about the queue For details of the returned value see :py:meth:`botocore.client.SQS.create_queue` :rtype: dict """ return self.get_conn().create_queue(QueueName=queue_name, Attributes=attributes or {})
def create_queue(self, queue_name, attributes=None): """ Create queue using connection object :param queue_name: name of the queue. :type queue_name: str :param attributes: additional attributes for the queue (default: None) For details of the attributes parameter see :py:meth:`botocore.client.SQS.create_queue` :type attributes: dict :return: dict with the information about the queue For details of the returned value see :py:meth:`botocore.client.SQS.create_queue` :rtype: dict """ return self.get_conn().create_queue(QueueName=queue_name, Attributes=attributes or {})
[ "Create", "queue", "using", "connection", "object" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_sqs_hook.py#L34-L48
[ "def", "create_queue", "(", "self", ",", "queue_name", ",", "attributes", "=", "None", ")", ":", "return", "self", ".", "get_conn", "(", ")", ".", "create_queue", "(", "QueueName", "=", "queue_name", ",", "Attributes", "=", "attributes", "or", "{", "}", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SQSHook.send_message
Send message to the queue :param queue_url: queue url :type queue_url: str :param message_body: the contents of the message :type message_body: str :param delay_seconds: seconds to delay the message :type delay_seconds: int :param message_attributes: additional attributes for the message (default: None) For details of the attributes parameter see :py:meth:`botocore.client.SQS.send_message` :type message_attributes: dict :return: dict with the information about the message sent For details of the returned value see :py:meth:`botocore.client.SQS.send_message` :rtype: dict
airflow/contrib/hooks/aws_sqs_hook.py
def send_message(self, queue_url, message_body, delay_seconds=0, message_attributes=None): """ Send message to the queue :param queue_url: queue url :type queue_url: str :param message_body: the contents of the message :type message_body: str :param delay_seconds: seconds to delay the message :type delay_seconds: int :param message_attributes: additional attributes for the message (default: None) For details of the attributes parameter see :py:meth:`botocore.client.SQS.send_message` :type message_attributes: dict :return: dict with the information about the message sent For details of the returned value see :py:meth:`botocore.client.SQS.send_message` :rtype: dict """ return self.get_conn().send_message(QueueUrl=queue_url, MessageBody=message_body, DelaySeconds=delay_seconds, MessageAttributes=message_attributes or {})
def send_message(self, queue_url, message_body, delay_seconds=0, message_attributes=None): """ Send message to the queue :param queue_url: queue url :type queue_url: str :param message_body: the contents of the message :type message_body: str :param delay_seconds: seconds to delay the message :type delay_seconds: int :param message_attributes: additional attributes for the message (default: None) For details of the attributes parameter see :py:meth:`botocore.client.SQS.send_message` :type message_attributes: dict :return: dict with the information about the message sent For details of the returned value see :py:meth:`botocore.client.SQS.send_message` :rtype: dict """ return self.get_conn().send_message(QueueUrl=queue_url, MessageBody=message_body, DelaySeconds=delay_seconds, MessageAttributes=message_attributes or {})
[ "Send", "message", "to", "the", "queue" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_sqs_hook.py#L50-L71
[ "def", "send_message", "(", "self", ",", "queue_url", ",", "message_body", ",", "delay_seconds", "=", "0", ",", "message_attributes", "=", "None", ")", ":", "return", "self", ".", "get_conn", "(", ")", ".", "send_message", "(", "QueueUrl", "=", "queue_url", ",", "MessageBody", "=", "message_body", ",", "DelaySeconds", "=", "delay_seconds", ",", "MessageAttributes", "=", "message_attributes", "or", "{", "}", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
_integrate_plugins
Integrate plugins to the context
airflow/hooks/__init__.py
def _integrate_plugins(): """Integrate plugins to the context""" from airflow.plugins_manager import hooks_modules for hooks_module in hooks_modules: sys.modules[hooks_module.__name__] = hooks_module globals()[hooks_module._name] = hooks_module
def _integrate_plugins(): """Integrate plugins to the context""" from airflow.plugins_manager import hooks_modules for hooks_module in hooks_modules: sys.modules[hooks_module.__name__] = hooks_module globals()[hooks_module._name] = hooks_module
[ "Integrate", "plugins", "to", "the", "context" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/__init__.py#L27-L32
[ "def", "_integrate_plugins", "(", ")", ":", "from", "airflow", ".", "plugins_manager", "import", "hooks_modules", "for", "hooks_module", "in", "hooks_modules", ":", "sys", ".", "modules", "[", "hooks_module", ".", "__name__", "]", "=", "hooks_module", "globals", "(", ")", "[", "hooks_module", ".", "_name", "]", "=", "hooks_module" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BaseTaskRunner.run_command
Run the task command. :param run_with: list of tokens to run the task command with e.g. ``['bash', '-c']`` :type run_with: list :param join_args: whether to concatenate the list of command tokens e.g. ``['airflow', 'run']`` vs ``['airflow run']`` :param join_args: bool :return: the process that was run :rtype: subprocess.Popen
airflow/task/task_runner/base_task_runner.py
def run_command(self, run_with=None, join_args=False): """ Run the task command. :param run_with: list of tokens to run the task command with e.g. ``['bash', '-c']`` :type run_with: list :param join_args: whether to concatenate the list of command tokens e.g. ``['airflow', 'run']`` vs ``['airflow run']`` :param join_args: bool :return: the process that was run :rtype: subprocess.Popen """ run_with = run_with or [] cmd = [" ".join(self._command)] if join_args else self._command full_cmd = run_with + cmd self.log.info('Running: %s', full_cmd) proc = subprocess.Popen( full_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, close_fds=True, env=os.environ.copy(), preexec_fn=os.setsid ) # Start daemon thread to read subprocess logging output log_reader = threading.Thread( target=self._read_task_logs, args=(proc.stdout,), ) log_reader.daemon = True log_reader.start() return proc
def run_command(self, run_with=None, join_args=False): """ Run the task command. :param run_with: list of tokens to run the task command with e.g. ``['bash', '-c']`` :type run_with: list :param join_args: whether to concatenate the list of command tokens e.g. ``['airflow', 'run']`` vs ``['airflow run']`` :param join_args: bool :return: the process that was run :rtype: subprocess.Popen """ run_with = run_with or [] cmd = [" ".join(self._command)] if join_args else self._command full_cmd = run_with + cmd self.log.info('Running: %s', full_cmd) proc = subprocess.Popen( full_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, close_fds=True, env=os.environ.copy(), preexec_fn=os.setsid ) # Start daemon thread to read subprocess logging output log_reader = threading.Thread( target=self._read_task_logs, args=(proc.stdout,), ) log_reader.daemon = True log_reader.start() return proc
[ "Run", "the", "task", "command", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/task/task_runner/base_task_runner.py#L101-L135
[ "def", "run_command", "(", "self", ",", "run_with", "=", "None", ",", "join_args", "=", "False", ")", ":", "run_with", "=", "run_with", "or", "[", "]", "cmd", "=", "[", "\" \"", ".", "join", "(", "self", ".", "_command", ")", "]", "if", "join_args", "else", "self", ".", "_command", "full_cmd", "=", "run_with", "+", "cmd", "self", ".", "log", ".", "info", "(", "'Running: %s'", ",", "full_cmd", ")", "proc", "=", "subprocess", ".", "Popen", "(", "full_cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "universal_newlines", "=", "True", ",", "close_fds", "=", "True", ",", "env", "=", "os", ".", "environ", ".", "copy", "(", ")", ",", "preexec_fn", "=", "os", ".", "setsid", ")", "# Start daemon thread to read subprocess logging output", "log_reader", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_read_task_logs", ",", "args", "=", "(", "proc", ".", "stdout", ",", ")", ",", ")", "log_reader", ".", "daemon", "=", "True", "log_reader", ".", "start", "(", ")", "return", "proc" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BaseTaskRunner.on_finish
A callback that should be called when this is done running.
airflow/task/task_runner/base_task_runner.py
def on_finish(self): """ A callback that should be called when this is done running. """ if self._cfg_path and os.path.isfile(self._cfg_path): if self.run_as_user: subprocess.call(['sudo', 'rm', self._cfg_path], close_fds=True) else: os.remove(self._cfg_path)
def on_finish(self): """ A callback that should be called when this is done running. """ if self._cfg_path and os.path.isfile(self._cfg_path): if self.run_as_user: subprocess.call(['sudo', 'rm', self._cfg_path], close_fds=True) else: os.remove(self._cfg_path)
[ "A", "callback", "that", "should", "be", "called", "when", "this", "is", "done", "running", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/task/task_runner/base_task_runner.py#L157-L165
[ "def", "on_finish", "(", "self", ")", ":", "if", "self", ".", "_cfg_path", "and", "os", ".", "path", ".", "isfile", "(", "self", ".", "_cfg_path", ")", ":", "if", "self", ".", "run_as_user", ":", "subprocess", ".", "call", "(", "[", "'sudo'", ",", "'rm'", ",", "self", ".", "_cfg_path", "]", ",", "close_fds", "=", "True", ")", "else", ":", "os", ".", "remove", "(", "self", ".", "_cfg_path", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
_main
Parse options and process commands
airflow/_vendor/nvd3/NVD3Chart.py
def _main(): """ Parse options and process commands """ # Parse arguments usage = "usage: nvd3.py [options]" parser = OptionParser(usage=usage, version=("python-nvd3 - Charts generator with " "nvd3.js and d3.js")) parser.add_option("-q", "--quiet", action="store_false", dest="verbose", default=True, help="don't print messages to stdout") (options, args) = parser.parse_args()
def _main(): """ Parse options and process commands """ # Parse arguments usage = "usage: nvd3.py [options]" parser = OptionParser(usage=usage, version=("python-nvd3 - Charts generator with " "nvd3.js and d3.js")) parser.add_option("-q", "--quiet", action="store_false", dest="verbose", default=True, help="don't print messages to stdout") (options, args) = parser.parse_args()
[ "Parse", "options", "and", "process", "commands" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L488-L501
[ "def", "_main", "(", ")", ":", "# Parse arguments", "usage", "=", "\"usage: nvd3.py [options]\"", "parser", "=", "OptionParser", "(", "usage", "=", "usage", ",", "version", "=", "(", "\"python-nvd3 - Charts generator with \"", "\"nvd3.js and d3.js\"", ")", ")", "parser", ".", "add_option", "(", "\"-q\"", ",", "\"--quiet\"", ",", "action", "=", "\"store_false\"", ",", "dest", "=", "\"verbose\"", ",", "default", "=", "True", ",", "help", "=", "\"don't print messages to stdout\"", ")", "(", "options", ",", "args", ")", "=", "parser", ".", "parse_args", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
NVD3Chart.add_serie
add serie - Series are list of data that will be plotted y {1, 2, 3, 4, 5} / x {1, 2, 3, 4, 5} **Attributes**: * ``name`` - set Serie name * ``x`` - x-axis data * ``y`` - y-axis data kwargs: * ``shape`` - for scatterChart, you can set different shapes (circle, triangle etc...) * ``size`` - for scatterChart, you can set size of different shapes * ``type`` - for multiChart, type should be bar * ``bar`` - to display bars in Chart * ``color_list`` - define list of colors which will be used by pieChart * ``color`` - set axis color * ``disabled`` - extra: * ``tooltip`` - set tooltip flag * ``date_format`` - set date_format for tooltip if x-axis is in date format
airflow/_vendor/nvd3/NVD3Chart.py
def add_serie(self, y, x, name=None, extra=None, **kwargs): """ add serie - Series are list of data that will be plotted y {1, 2, 3, 4, 5} / x {1, 2, 3, 4, 5} **Attributes**: * ``name`` - set Serie name * ``x`` - x-axis data * ``y`` - y-axis data kwargs: * ``shape`` - for scatterChart, you can set different shapes (circle, triangle etc...) * ``size`` - for scatterChart, you can set size of different shapes * ``type`` - for multiChart, type should be bar * ``bar`` - to display bars in Chart * ``color_list`` - define list of colors which will be used by pieChart * ``color`` - set axis color * ``disabled`` - extra: * ``tooltip`` - set tooltip flag * ``date_format`` - set date_format for tooltip if x-axis is in date format """ if not name: name = "Serie %d" % (self.serie_no) # For scatterChart shape & size fields are added in serie if 'shape' in kwargs or 'size' in kwargs: csize = kwargs.get('size', 1) cshape = kwargs.get('shape', 'circle') serie = [{ 'x': x[i], 'y': j, 'shape': cshape, 'size': csize[i] if isinstance(csize, list) else csize } for i, j in enumerate(y)] else: if self.model == 'pieChart': serie = [{'label': x[i], 'value': y} for i, y in enumerate(y)] else: serie = [{'x': x[i], 'y': y} for i, y in enumerate(y)] data_keyvalue = {'values': serie, 'key': name} # multiChart # Histogram type='bar' for the series if 'type' in kwargs and kwargs['type']: data_keyvalue['type'] = kwargs['type'] # Define on which Y axis the serie is related # a chart can have 2 Y axis, left and right, by default only one Y Axis is used if 'yaxis' in kwargs and kwargs['yaxis']: data_keyvalue['yAxis'] = kwargs['yaxis'] else: if self.model != 'pieChart': data_keyvalue['yAxis'] = '1' if 'bar' in kwargs and kwargs['bar']: data_keyvalue['bar'] = 'true' if 'disabled' in kwargs and kwargs['disabled']: data_keyvalue['disabled'] = 'true' if 'color' in kwargs and kwargs['color']: data_keyvalue['color'] = kwargs['color'] if extra: if self.model == 'pieChart': if 'color_list' in extra and extra['color_list']: self.color_list = extra['color_list'] if extra.get('date_format'): self.charttooltip_dateformat = extra['date_format'] if extra.get('tooltip'): self.custom_tooltip_flag = True if self.model != 'pieChart': _start = extra['tooltip']['y_start'] _end = extra['tooltip']['y_end'] _start = ("'" + str(_start) + "' + ") if _start else '' _end = (" + '" + str(_end) + "'") if _end else '' if self.model == 'linePlusBarChart': if self.tooltip_condition_string: self.tooltip_condition_string += stab(5) self.tooltip_condition_string += stab(0) + "if(key.indexOf('" + name + "') > -1 ){\n" +\ stab(6) + "var y = " + _start + " String(graph.point.y) " + _end + ";\n" +\ stab(5) + "}\n" elif self.model == 'cumulativeLineChart': self.tooltip_condition_string += stab(0) + "if(key == '" + name + "'){\n" +\ stab(6) + "var y = " + _start + " String(e) " + _end + ";\n" +\ stab(5) + "}\n" else: self.tooltip_condition_string += stab(5) + "if(key == '" + name + "'){\n" +\ stab(6) + "var y = " + _start + " String(graph.point.y) " + _end + ";\n" +\ stab(5) + "}\n" if self.model == 'pieChart': _start = extra['tooltip']['y_start'] _end = extra['tooltip']['y_end'] _start = ("'" + str(_start) + "' + ") if _start else '' _end = (" + '" + str(_end) + "'") if _end else '' self.tooltip_condition_string += "var y = " + _start + " String(y) " + _end + ";\n" # Increment series counter & append self.serie_no += 1 self.series.append(data_keyvalue)
def add_serie(self, y, x, name=None, extra=None, **kwargs): """ add serie - Series are list of data that will be plotted y {1, 2, 3, 4, 5} / x {1, 2, 3, 4, 5} **Attributes**: * ``name`` - set Serie name * ``x`` - x-axis data * ``y`` - y-axis data kwargs: * ``shape`` - for scatterChart, you can set different shapes (circle, triangle etc...) * ``size`` - for scatterChart, you can set size of different shapes * ``type`` - for multiChart, type should be bar * ``bar`` - to display bars in Chart * ``color_list`` - define list of colors which will be used by pieChart * ``color`` - set axis color * ``disabled`` - extra: * ``tooltip`` - set tooltip flag * ``date_format`` - set date_format for tooltip if x-axis is in date format """ if not name: name = "Serie %d" % (self.serie_no) # For scatterChart shape & size fields are added in serie if 'shape' in kwargs or 'size' in kwargs: csize = kwargs.get('size', 1) cshape = kwargs.get('shape', 'circle') serie = [{ 'x': x[i], 'y': j, 'shape': cshape, 'size': csize[i] if isinstance(csize, list) else csize } for i, j in enumerate(y)] else: if self.model == 'pieChart': serie = [{'label': x[i], 'value': y} for i, y in enumerate(y)] else: serie = [{'x': x[i], 'y': y} for i, y in enumerate(y)] data_keyvalue = {'values': serie, 'key': name} # multiChart # Histogram type='bar' for the series if 'type' in kwargs and kwargs['type']: data_keyvalue['type'] = kwargs['type'] # Define on which Y axis the serie is related # a chart can have 2 Y axis, left and right, by default only one Y Axis is used if 'yaxis' in kwargs and kwargs['yaxis']: data_keyvalue['yAxis'] = kwargs['yaxis'] else: if self.model != 'pieChart': data_keyvalue['yAxis'] = '1' if 'bar' in kwargs and kwargs['bar']: data_keyvalue['bar'] = 'true' if 'disabled' in kwargs and kwargs['disabled']: data_keyvalue['disabled'] = 'true' if 'color' in kwargs and kwargs['color']: data_keyvalue['color'] = kwargs['color'] if extra: if self.model == 'pieChart': if 'color_list' in extra and extra['color_list']: self.color_list = extra['color_list'] if extra.get('date_format'): self.charttooltip_dateformat = extra['date_format'] if extra.get('tooltip'): self.custom_tooltip_flag = True if self.model != 'pieChart': _start = extra['tooltip']['y_start'] _end = extra['tooltip']['y_end'] _start = ("'" + str(_start) + "' + ") if _start else '' _end = (" + '" + str(_end) + "'") if _end else '' if self.model == 'linePlusBarChart': if self.tooltip_condition_string: self.tooltip_condition_string += stab(5) self.tooltip_condition_string += stab(0) + "if(key.indexOf('" + name + "') > -1 ){\n" +\ stab(6) + "var y = " + _start + " String(graph.point.y) " + _end + ";\n" +\ stab(5) + "}\n" elif self.model == 'cumulativeLineChart': self.tooltip_condition_string += stab(0) + "if(key == '" + name + "'){\n" +\ stab(6) + "var y = " + _start + " String(e) " + _end + ";\n" +\ stab(5) + "}\n" else: self.tooltip_condition_string += stab(5) + "if(key == '" + name + "'){\n" +\ stab(6) + "var y = " + _start + " String(graph.point.y) " + _end + ";\n" +\ stab(5) + "}\n" if self.model == 'pieChart': _start = extra['tooltip']['y_start'] _end = extra['tooltip']['y_end'] _start = ("'" + str(_start) + "' + ") if _start else '' _end = (" + '" + str(_end) + "'") if _end else '' self.tooltip_condition_string += "var y = " + _start + " String(y) " + _end + ";\n" # Increment series counter & append self.serie_no += 1 self.series.append(data_keyvalue)
[ "add", "serie", "-", "Series", "are", "list", "of", "data", "that", "will", "be", "plotted", "y", "{", "1", "2", "3", "4", "5", "}", "/", "x", "{", "1", "2", "3", "4", "5", "}" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L179-L294
[ "def", "add_serie", "(", "self", ",", "y", ",", "x", ",", "name", "=", "None", ",", "extra", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "not", "name", ":", "name", "=", "\"Serie %d\"", "%", "(", "self", ".", "serie_no", ")", "# For scatterChart shape & size fields are added in serie", "if", "'shape'", "in", "kwargs", "or", "'size'", "in", "kwargs", ":", "csize", "=", "kwargs", ".", "get", "(", "'size'", ",", "1", ")", "cshape", "=", "kwargs", ".", "get", "(", "'shape'", ",", "'circle'", ")", "serie", "=", "[", "{", "'x'", ":", "x", "[", "i", "]", ",", "'y'", ":", "j", ",", "'shape'", ":", "cshape", ",", "'size'", ":", "csize", "[", "i", "]", "if", "isinstance", "(", "csize", ",", "list", ")", "else", "csize", "}", "for", "i", ",", "j", "in", "enumerate", "(", "y", ")", "]", "else", ":", "if", "self", ".", "model", "==", "'pieChart'", ":", "serie", "=", "[", "{", "'label'", ":", "x", "[", "i", "]", ",", "'value'", ":", "y", "}", "for", "i", ",", "y", "in", "enumerate", "(", "y", ")", "]", "else", ":", "serie", "=", "[", "{", "'x'", ":", "x", "[", "i", "]", ",", "'y'", ":", "y", "}", "for", "i", ",", "y", "in", "enumerate", "(", "y", ")", "]", "data_keyvalue", "=", "{", "'values'", ":", "serie", ",", "'key'", ":", "name", "}", "# multiChart", "# Histogram type='bar' for the series", "if", "'type'", "in", "kwargs", "and", "kwargs", "[", "'type'", "]", ":", "data_keyvalue", "[", "'type'", "]", "=", "kwargs", "[", "'type'", "]", "# Define on which Y axis the serie is related", "# a chart can have 2 Y axis, left and right, by default only one Y Axis is used", "if", "'yaxis'", "in", "kwargs", "and", "kwargs", "[", "'yaxis'", "]", ":", "data_keyvalue", "[", "'yAxis'", "]", "=", "kwargs", "[", "'yaxis'", "]", "else", ":", "if", "self", ".", "model", "!=", "'pieChart'", ":", "data_keyvalue", "[", "'yAxis'", "]", "=", "'1'", "if", "'bar'", "in", "kwargs", "and", "kwargs", "[", "'bar'", "]", ":", "data_keyvalue", "[", "'bar'", "]", "=", "'true'", "if", "'disabled'", "in", "kwargs", "and", "kwargs", "[", "'disabled'", "]", ":", "data_keyvalue", "[", "'disabled'", "]", "=", "'true'", "if", "'color'", "in", "kwargs", "and", "kwargs", "[", "'color'", "]", ":", "data_keyvalue", "[", "'color'", "]", "=", "kwargs", "[", "'color'", "]", "if", "extra", ":", "if", "self", ".", "model", "==", "'pieChart'", ":", "if", "'color_list'", "in", "extra", "and", "extra", "[", "'color_list'", "]", ":", "self", ".", "color_list", "=", "extra", "[", "'color_list'", "]", "if", "extra", ".", "get", "(", "'date_format'", ")", ":", "self", ".", "charttooltip_dateformat", "=", "extra", "[", "'date_format'", "]", "if", "extra", ".", "get", "(", "'tooltip'", ")", ":", "self", ".", "custom_tooltip_flag", "=", "True", "if", "self", ".", "model", "!=", "'pieChart'", ":", "_start", "=", "extra", "[", "'tooltip'", "]", "[", "'y_start'", "]", "_end", "=", "extra", "[", "'tooltip'", "]", "[", "'y_end'", "]", "_start", "=", "(", "\"'\"", "+", "str", "(", "_start", ")", "+", "\"' + \"", ")", "if", "_start", "else", "''", "_end", "=", "(", "\" + '\"", "+", "str", "(", "_end", ")", "+", "\"'\"", ")", "if", "_end", "else", "''", "if", "self", ".", "model", "==", "'linePlusBarChart'", ":", "if", "self", ".", "tooltip_condition_string", ":", "self", ".", "tooltip_condition_string", "+=", "stab", "(", "5", ")", "self", ".", "tooltip_condition_string", "+=", "stab", "(", "0", ")", "+", "\"if(key.indexOf('\"", "+", "name", "+", "\"') > -1 ){\\n\"", "+", "stab", "(", "6", ")", "+", "\"var y = \"", "+", "_start", "+", "\" String(graph.point.y) \"", "+", "_end", "+", "\";\\n\"", "+", "stab", "(", "5", ")", "+", "\"}\\n\"", "elif", "self", ".", "model", "==", "'cumulativeLineChart'", ":", "self", ".", "tooltip_condition_string", "+=", "stab", "(", "0", ")", "+", "\"if(key == '\"", "+", "name", "+", "\"'){\\n\"", "+", "stab", "(", "6", ")", "+", "\"var y = \"", "+", "_start", "+", "\" String(e) \"", "+", "_end", "+", "\";\\n\"", "+", "stab", "(", "5", ")", "+", "\"}\\n\"", "else", ":", "self", ".", "tooltip_condition_string", "+=", "stab", "(", "5", ")", "+", "\"if(key == '\"", "+", "name", "+", "\"'){\\n\"", "+", "stab", "(", "6", ")", "+", "\"var y = \"", "+", "_start", "+", "\" String(graph.point.y) \"", "+", "_end", "+", "\";\\n\"", "+", "stab", "(", "5", ")", "+", "\"}\\n\"", "if", "self", ".", "model", "==", "'pieChart'", ":", "_start", "=", "extra", "[", "'tooltip'", "]", "[", "'y_start'", "]", "_end", "=", "extra", "[", "'tooltip'", "]", "[", "'y_end'", "]", "_start", "=", "(", "\"'\"", "+", "str", "(", "_start", ")", "+", "\"' + \"", ")", "if", "_start", "else", "''", "_end", "=", "(", "\" + '\"", "+", "str", "(", "_end", ")", "+", "\"'\"", ")", "if", "_end", "else", "''", "self", ".", "tooltip_condition_string", "+=", "\"var y = \"", "+", "_start", "+", "\" String(y) \"", "+", "_end", "+", "\";\\n\"", "# Increment series counter & append", "self", ".", "serie_no", "+=", "1", "self", ".", "series", ".", "append", "(", "data_keyvalue", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
NVD3Chart.buildcontent
Build HTML content only, no header or body tags. To be useful this will usually require the attribute `juqery_on_ready` to be set which will wrap the js in $(function(){<regular_js>};)
airflow/_vendor/nvd3/NVD3Chart.py
def buildcontent(self): """Build HTML content only, no header or body tags. To be useful this will usually require the attribute `juqery_on_ready` to be set which will wrap the js in $(function(){<regular_js>};) """ self.buildcontainer() # if the subclass has a method buildjs this method will be # called instead of the method defined here # when this subclass method is entered it does call # the method buildjschart defined here self.buildjschart() self.htmlcontent = self.template_content_nvd3.render(chart=self)
def buildcontent(self): """Build HTML content only, no header or body tags. To be useful this will usually require the attribute `juqery_on_ready` to be set which will wrap the js in $(function(){<regular_js>};) """ self.buildcontainer() # if the subclass has a method buildjs this method will be # called instead of the method defined here # when this subclass method is entered it does call # the method buildjschart defined here self.buildjschart() self.htmlcontent = self.template_content_nvd3.render(chart=self)
[ "Build", "HTML", "content", "only", "no", "header", "or", "body", "tags", ".", "To", "be", "useful", "this", "will", "usually", "require", "the", "attribute", "juqery_on_ready", "to", "be", "set", "which", "will", "wrap", "the", "js", "in", "$", "(", "function", "()", "{", "<regular_js", ">", "}", ";", ")" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L350-L361
[ "def", "buildcontent", "(", "self", ")", ":", "self", ".", "buildcontainer", "(", ")", "# if the subclass has a method buildjs this method will be", "# called instead of the method defined here", "# when this subclass method is entered it does call", "# the method buildjschart defined here", "self", ".", "buildjschart", "(", ")", "self", ".", "htmlcontent", "=", "self", ".", "template_content_nvd3", ".", "render", "(", "chart", "=", "self", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
NVD3Chart.buildhtml
Build the HTML page Create the htmlheader with css / js Create html page Add Js code for nvd3
airflow/_vendor/nvd3/NVD3Chart.py
def buildhtml(self): """Build the HTML page Create the htmlheader with css / js Create html page Add Js code for nvd3 """ self.buildcontent() self.content = self.htmlcontent self.htmlcontent = self.template_page_nvd3.render(chart=self)
def buildhtml(self): """Build the HTML page Create the htmlheader with css / js Create html page Add Js code for nvd3 """ self.buildcontent() self.content = self.htmlcontent self.htmlcontent = self.template_page_nvd3.render(chart=self)
[ "Build", "the", "HTML", "page", "Create", "the", "htmlheader", "with", "css", "/", "js", "Create", "html", "page", "Add", "Js", "code", "for", "nvd3" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L363-L371
[ "def", "buildhtml", "(", "self", ")", ":", "self", ".", "buildcontent", "(", ")", "self", ".", "content", "=", "self", ".", "htmlcontent", "self", ".", "htmlcontent", "=", "self", ".", "template_page_nvd3", ".", "render", "(", "chart", "=", "self", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
NVD3Chart.buildhtmlheader
generate HTML header content
airflow/_vendor/nvd3/NVD3Chart.py
def buildhtmlheader(self): """generate HTML header content""" self.htmlheader = '' # If the JavaScript assets have already been injected, don't bother re-sourcing them. global _js_initialized if '_js_initialized' not in globals() or not _js_initialized: for css in self.header_css: self.htmlheader += css for js in self.header_js: self.htmlheader += js
def buildhtmlheader(self): """generate HTML header content""" self.htmlheader = '' # If the JavaScript assets have already been injected, don't bother re-sourcing them. global _js_initialized if '_js_initialized' not in globals() or not _js_initialized: for css in self.header_css: self.htmlheader += css for js in self.header_js: self.htmlheader += js
[ "generate", "HTML", "header", "content" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L374-L383
[ "def", "buildhtmlheader", "(", "self", ")", ":", "self", ".", "htmlheader", "=", "''", "# If the JavaScript assets have already been injected, don't bother re-sourcing them.", "global", "_js_initialized", "if", "'_js_initialized'", "not", "in", "globals", "(", ")", "or", "not", "_js_initialized", ":", "for", "css", "in", "self", ".", "header_css", ":", "self", ".", "htmlheader", "+=", "css", "for", "js", "in", "self", ".", "header_js", ":", "self", ".", "htmlheader", "+=", "js" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
NVD3Chart.buildcontainer
generate HTML div
airflow/_vendor/nvd3/NVD3Chart.py
def buildcontainer(self): """generate HTML div""" if self.container: return # Create SVG div with style if self.width: if self.width[-1] != '%': self.style += 'width:%spx;' % self.width else: self.style += 'width:%s;' % self.width if self.height: if self.height[-1] != '%': self.style += 'height:%spx;' % self.height else: self.style += 'height:%s;' % self.height if self.style: self.style = 'style="%s"' % self.style self.container = self.containerheader + \ '<div id="%s"><svg %s></svg></div>\n' % (self.name, self.style)
def buildcontainer(self): """generate HTML div""" if self.container: return # Create SVG div with style if self.width: if self.width[-1] != '%': self.style += 'width:%spx;' % self.width else: self.style += 'width:%s;' % self.width if self.height: if self.height[-1] != '%': self.style += 'height:%spx;' % self.height else: self.style += 'height:%s;' % self.height if self.style: self.style = 'style="%s"' % self.style self.container = self.containerheader + \ '<div id="%s"><svg %s></svg></div>\n' % (self.name, self.style)
[ "generate", "HTML", "div" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L385-L405
[ "def", "buildcontainer", "(", "self", ")", ":", "if", "self", ".", "container", ":", "return", "# Create SVG div with style", "if", "self", ".", "width", ":", "if", "self", ".", "width", "[", "-", "1", "]", "!=", "'%'", ":", "self", ".", "style", "+=", "'width:%spx;'", "%", "self", ".", "width", "else", ":", "self", ".", "style", "+=", "'width:%s;'", "%", "self", ".", "width", "if", "self", ".", "height", ":", "if", "self", ".", "height", "[", "-", "1", "]", "!=", "'%'", ":", "self", ".", "style", "+=", "'height:%spx;'", "%", "self", ".", "height", "else", ":", "self", ".", "style", "+=", "'height:%s;'", "%", "self", ".", "height", "if", "self", ".", "style", ":", "self", ".", "style", "=", "'style=\"%s\"'", "%", "self", ".", "style", "self", ".", "container", "=", "self", ".", "containerheader", "+", "'<div id=\"%s\"><svg %s></svg></div>\\n'", "%", "(", "self", ".", "name", ",", "self", ".", "style", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
NVD3Chart.buildjschart
generate javascript code for the chart
airflow/_vendor/nvd3/NVD3Chart.py
def buildjschart(self): """generate javascript code for the chart""" self.jschart = '' # add custom tooltip string in jschart # default condition (if build_custom_tooltip is not called explicitly with date_flag=True) if self.tooltip_condition_string == '': self.tooltip_condition_string = 'var y = String(graph.point.y);\n' # Include data self.series_js = json.dumps(self.series)
def buildjschart(self): """generate javascript code for the chart""" self.jschart = '' # add custom tooltip string in jschart # default condition (if build_custom_tooltip is not called explicitly with date_flag=True) if self.tooltip_condition_string == '': self.tooltip_condition_string = 'var y = String(graph.point.y);\n' # Include data self.series_js = json.dumps(self.series)
[ "generate", "javascript", "code", "for", "the", "chart" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L407-L417
[ "def", "buildjschart", "(", "self", ")", ":", "self", ".", "jschart", "=", "''", "# add custom tooltip string in jschart", "# default condition (if build_custom_tooltip is not called explicitly with date_flag=True)", "if", "self", ".", "tooltip_condition_string", "==", "''", ":", "self", ".", "tooltip_condition_string", "=", "'var y = String(graph.point.y);\\n'", "# Include data", "self", ".", "series_js", "=", "json", ".", "dumps", "(", "self", ".", "series", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
NVD3Chart.create_x_axis
Create X-axis
airflow/_vendor/nvd3/NVD3Chart.py
def create_x_axis(self, name, label=None, format=None, date=False, custom_format=False): """Create X-axis""" axis = {} if custom_format and format: axis['tickFormat'] = format elif format: if format == 'AM_PM': axis['tickFormat'] = "function(d) { return get_am_pm(parseInt(d)); }" else: axis['tickFormat'] = "d3.format(',%s')" % format if label: axis['axisLabel'] = "'" + label + "'" # date format : see https://github.com/mbostock/d3/wiki/Time-Formatting if date: self.dateformat = format axis['tickFormat'] = ("function(d) { return d3.time.format('%s')" "(new Date(parseInt(d))) }\n" "" % self.dateformat) # flag is the x Axis is a date if name[0] == 'x': self.x_axis_date = True # Add new axis to list of axis self.axislist[name] = axis # Create x2Axis if focus_enable if name == "xAxis" and self.focus_enable: self.axislist['x2Axis'] = axis
def create_x_axis(self, name, label=None, format=None, date=False, custom_format=False): """Create X-axis""" axis = {} if custom_format and format: axis['tickFormat'] = format elif format: if format == 'AM_PM': axis['tickFormat'] = "function(d) { return get_am_pm(parseInt(d)); }" else: axis['tickFormat'] = "d3.format(',%s')" % format if label: axis['axisLabel'] = "'" + label + "'" # date format : see https://github.com/mbostock/d3/wiki/Time-Formatting if date: self.dateformat = format axis['tickFormat'] = ("function(d) { return d3.time.format('%s')" "(new Date(parseInt(d))) }\n" "" % self.dateformat) # flag is the x Axis is a date if name[0] == 'x': self.x_axis_date = True # Add new axis to list of axis self.axislist[name] = axis # Create x2Axis if focus_enable if name == "xAxis" and self.focus_enable: self.axislist['x2Axis'] = axis
[ "Create", "X", "-", "axis" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L419-L448
[ "def", "create_x_axis", "(", "self", ",", "name", ",", "label", "=", "None", ",", "format", "=", "None", ",", "date", "=", "False", ",", "custom_format", "=", "False", ")", ":", "axis", "=", "{", "}", "if", "custom_format", "and", "format", ":", "axis", "[", "'tickFormat'", "]", "=", "format", "elif", "format", ":", "if", "format", "==", "'AM_PM'", ":", "axis", "[", "'tickFormat'", "]", "=", "\"function(d) { return get_am_pm(parseInt(d)); }\"", "else", ":", "axis", "[", "'tickFormat'", "]", "=", "\"d3.format(',%s')\"", "%", "format", "if", "label", ":", "axis", "[", "'axisLabel'", "]", "=", "\"'\"", "+", "label", "+", "\"'\"", "# date format : see https://github.com/mbostock/d3/wiki/Time-Formatting", "if", "date", ":", "self", ".", "dateformat", "=", "format", "axis", "[", "'tickFormat'", "]", "=", "(", "\"function(d) { return d3.time.format('%s')\"", "\"(new Date(parseInt(d))) }\\n\"", "\"\"", "%", "self", ".", "dateformat", ")", "# flag is the x Axis is a date", "if", "name", "[", "0", "]", "==", "'x'", ":", "self", ".", "x_axis_date", "=", "True", "# Add new axis to list of axis", "self", ".", "axislist", "[", "name", "]", "=", "axis", "# Create x2Axis if focus_enable", "if", "name", "==", "\"xAxis\"", "and", "self", ".", "focus_enable", ":", "self", ".", "axislist", "[", "'x2Axis'", "]", "=", "axis" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
NVD3Chart.create_y_axis
Create Y-axis
airflow/_vendor/nvd3/NVD3Chart.py
def create_y_axis(self, name, label=None, format=None, custom_format=False): """ Create Y-axis """ axis = {} if custom_format and format: axis['tickFormat'] = format elif format: axis['tickFormat'] = "d3.format(',%s')" % format if label: axis['axisLabel'] = "'" + label + "'" # Add new axis to list of axis self.axislist[name] = axis
def create_y_axis(self, name, label=None, format=None, custom_format=False): """ Create Y-axis """ axis = {} if custom_format and format: axis['tickFormat'] = format elif format: axis['tickFormat'] = "d3.format(',%s')" % format if label: axis['axisLabel'] = "'" + label + "'" # Add new axis to list of axis self.axislist[name] = axis
[ "Create", "Y", "-", "axis" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L450-L465
[ "def", "create_y_axis", "(", "self", ",", "name", ",", "label", "=", "None", ",", "format", "=", "None", ",", "custom_format", "=", "False", ")", ":", "axis", "=", "{", "}", "if", "custom_format", "and", "format", ":", "axis", "[", "'tickFormat'", "]", "=", "format", "elif", "format", ":", "axis", "[", "'tickFormat'", "]", "=", "\"d3.format(',%s')\"", "%", "format", "if", "label", ":", "axis", "[", "'axisLabel'", "]", "=", "\"'\"", "+", "label", "+", "\"'\"", "# Add new axis to list of axis", "self", ".", "axislist", "[", "name", "]", "=", "axis" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TemplateMixin.buildcontent
Build HTML content only, no header or body tags. To be useful this will usually require the attribute `juqery_on_ready` to be set which will wrap the js in $(function(){<regular_js>};)
airflow/_vendor/nvd3/NVD3Chart.py
def buildcontent(self): """Build HTML content only, no header or body tags. To be useful this will usually require the attribute `juqery_on_ready` to be set which will wrap the js in $(function(){<regular_js>};) """ self.buildcontainer() # if the subclass has a method buildjs this method will be # called instead of the method defined here # when this subclass method is entered it does call # the method buildjschart defined here self.buildjschart() self.htmlcontent = self.template_chart_nvd3.render(chart=self)
def buildcontent(self): """Build HTML content only, no header or body tags. To be useful this will usually require the attribute `juqery_on_ready` to be set which will wrap the js in $(function(){<regular_js>};) """ self.buildcontainer() # if the subclass has a method buildjs this method will be # called instead of the method defined here # when this subclass method is entered it does call # the method buildjschart defined here self.buildjschart() self.htmlcontent = self.template_chart_nvd3.render(chart=self)
[ "Build", "HTML", "content", "only", "no", "header", "or", "body", "tags", ".", "To", "be", "useful", "this", "will", "usually", "require", "the", "attribute", "juqery_on_ready", "to", "be", "set", "which", "will", "wrap", "the", "js", "in", "$", "(", "function", "()", "{", "<regular_js", ">", "}", ";", ")" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L474-L485
[ "def", "buildcontent", "(", "self", ")", ":", "self", ".", "buildcontainer", "(", ")", "# if the subclass has a method buildjs this method will be", "# called instead of the method defined here", "# when this subclass method is entered it does call", "# the method buildjschart defined here", "self", ".", "buildjschart", "(", ")", "self", ".", "htmlcontent", "=", "self", ".", "template_chart_nvd3", ".", "render", "(", "chart", "=", "self", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SqliteHook.get_conn
Returns a sqlite connection object
airflow/hooks/sqlite_hook.py
def get_conn(self): """ Returns a sqlite connection object """ conn = self.get_connection(self.sqlite_conn_id) conn = sqlite3.connect(conn.host) return conn
def get_conn(self): """ Returns a sqlite connection object """ conn = self.get_connection(self.sqlite_conn_id) conn = sqlite3.connect(conn.host) return conn
[ "Returns", "a", "sqlite", "connection", "object" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/sqlite_hook.py#L35-L41
[ "def", "get_conn", "(", "self", ")", ":", "conn", "=", "self", ".", "get_connection", "(", "self", ".", "sqlite_conn_id", ")", "conn", "=", "sqlite3", ".", "connect", "(", "conn", ".", "host", ")", "return", "conn" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
action_logging
Decorator to log user actions
airflow/www/decorators.py
def action_logging(f): """ Decorator to log user actions """ @functools.wraps(f) def wrapper(*args, **kwargs): with create_session() as session: if g.user.is_anonymous: user = 'anonymous' else: user = g.user.username log = Log( event=f.__name__, task_instance=None, owner=user, extra=str(list(request.args.items())), task_id=request.args.get('task_id'), dag_id=request.args.get('dag_id')) if 'execution_date' in request.args: log.execution_date = pendulum.parse( request.args.get('execution_date')) session.add(log) return f(*args, **kwargs) return wrapper
def action_logging(f): """ Decorator to log user actions """ @functools.wraps(f) def wrapper(*args, **kwargs): with create_session() as session: if g.user.is_anonymous: user = 'anonymous' else: user = g.user.username log = Log( event=f.__name__, task_instance=None, owner=user, extra=str(list(request.args.items())), task_id=request.args.get('task_id'), dag_id=request.args.get('dag_id')) if 'execution_date' in request.args: log.execution_date = pendulum.parse( request.args.get('execution_date')) session.add(log) return f(*args, **kwargs) return wrapper
[ "Decorator", "to", "log", "user", "actions" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/decorators.py#L29-L58
[ "def", "action_logging", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "with", "create_session", "(", ")", "as", "session", ":", "if", "g", ".", "user", ".", "is_anonymous", ":", "user", "=", "'anonymous'", "else", ":", "user", "=", "g", ".", "user", ".", "username", "log", "=", "Log", "(", "event", "=", "f", ".", "__name__", ",", "task_instance", "=", "None", ",", "owner", "=", "user", ",", "extra", "=", "str", "(", "list", "(", "request", ".", "args", ".", "items", "(", ")", ")", ")", ",", "task_id", "=", "request", ".", "args", ".", "get", "(", "'task_id'", ")", ",", "dag_id", "=", "request", ".", "args", ".", "get", "(", "'dag_id'", ")", ")", "if", "'execution_date'", "in", "request", ".", "args", ":", "log", ".", "execution_date", "=", "pendulum", ".", "parse", "(", "request", ".", "args", ".", "get", "(", "'execution_date'", ")", ")", "session", ".", "add", "(", "log", ")", "return", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
gzipped
Decorator to make a view compressed
airflow/www/decorators.py
def gzipped(f): """ Decorator to make a view compressed """ @functools.wraps(f) def view_func(*args, **kwargs): @after_this_request def zipper(response): accept_encoding = request.headers.get('Accept-Encoding', '') if 'gzip' not in accept_encoding.lower(): return response response.direct_passthrough = False if (response.status_code < 200 or response.status_code >= 300 or 'Content-Encoding' in response.headers): return response gzip_buffer = IO() gzip_file = gzip.GzipFile(mode='wb', fileobj=gzip_buffer) gzip_file.write(response.data) gzip_file.close() response.data = gzip_buffer.getvalue() response.headers['Content-Encoding'] = 'gzip' response.headers['Vary'] = 'Accept-Encoding' response.headers['Content-Length'] = len(response.data) return response return f(*args, **kwargs) return view_func
def gzipped(f): """ Decorator to make a view compressed """ @functools.wraps(f) def view_func(*args, **kwargs): @after_this_request def zipper(response): accept_encoding = request.headers.get('Accept-Encoding', '') if 'gzip' not in accept_encoding.lower(): return response response.direct_passthrough = False if (response.status_code < 200 or response.status_code >= 300 or 'Content-Encoding' in response.headers): return response gzip_buffer = IO() gzip_file = gzip.GzipFile(mode='wb', fileobj=gzip_buffer) gzip_file.write(response.data) gzip_file.close() response.data = gzip_buffer.getvalue() response.headers['Content-Encoding'] = 'gzip' response.headers['Vary'] = 'Accept-Encoding' response.headers['Content-Length'] = len(response.data) return response return f(*args, **kwargs) return view_func
[ "Decorator", "to", "make", "a", "view", "compressed" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/decorators.py#L61-L94
[ "def", "gzipped", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "view_func", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "@", "after_this_request", "def", "zipper", "(", "response", ")", ":", "accept_encoding", "=", "request", ".", "headers", ".", "get", "(", "'Accept-Encoding'", ",", "''", ")", "if", "'gzip'", "not", "in", "accept_encoding", ".", "lower", "(", ")", ":", "return", "response", "response", ".", "direct_passthrough", "=", "False", "if", "(", "response", ".", "status_code", "<", "200", "or", "response", ".", "status_code", ">=", "300", "or", "'Content-Encoding'", "in", "response", ".", "headers", ")", ":", "return", "response", "gzip_buffer", "=", "IO", "(", ")", "gzip_file", "=", "gzip", ".", "GzipFile", "(", "mode", "=", "'wb'", ",", "fileobj", "=", "gzip_buffer", ")", "gzip_file", ".", "write", "(", "response", ".", "data", ")", "gzip_file", ".", "close", "(", ")", "response", ".", "data", "=", "gzip_buffer", ".", "getvalue", "(", ")", "response", ".", "headers", "[", "'Content-Encoding'", "]", "=", "'gzip'", "response", ".", "headers", "[", "'Vary'", "]", "=", "'Accept-Encoding'", "response", ".", "headers", "[", "'Content-Length'", "]", "=", "len", "(", "response", ".", "data", ")", "return", "response", "return", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "view_func" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
has_dag_access
Decorator to check whether the user has read / write permission on the dag.
airflow/www/decorators.py
def has_dag_access(**dag_kwargs): """ Decorator to check whether the user has read / write permission on the dag. """ def decorator(f): @functools.wraps(f) def wrapper(self, *args, **kwargs): has_access = self.appbuilder.sm.has_access dag_id = request.args.get('dag_id') # if it is false, we need to check whether user has write access on the dag can_dag_edit = dag_kwargs.get('can_dag_edit', False) # 1. check whether the user has can_dag_edit permissions on all_dags # 2. if 1 false, check whether the user # has can_dag_edit permissions on the dag # 3. if 2 false, check whether it is can_dag_read view, # and whether user has the permissions if ( has_access('can_dag_edit', 'all_dags') or has_access('can_dag_edit', dag_id) or (not can_dag_edit and (has_access('can_dag_read', 'all_dags') or has_access('can_dag_read', dag_id)))): return f(self, *args, **kwargs) else: flash("Access is Denied", "danger") return redirect(url_for(self.appbuilder.sm.auth_view. __class__.__name__ + ".login")) return wrapper return decorator
def has_dag_access(**dag_kwargs): """ Decorator to check whether the user has read / write permission on the dag. """ def decorator(f): @functools.wraps(f) def wrapper(self, *args, **kwargs): has_access = self.appbuilder.sm.has_access dag_id = request.args.get('dag_id') # if it is false, we need to check whether user has write access on the dag can_dag_edit = dag_kwargs.get('can_dag_edit', False) # 1. check whether the user has can_dag_edit permissions on all_dags # 2. if 1 false, check whether the user # has can_dag_edit permissions on the dag # 3. if 2 false, check whether it is can_dag_read view, # and whether user has the permissions if ( has_access('can_dag_edit', 'all_dags') or has_access('can_dag_edit', dag_id) or (not can_dag_edit and (has_access('can_dag_read', 'all_dags') or has_access('can_dag_read', dag_id)))): return f(self, *args, **kwargs) else: flash("Access is Denied", "danger") return redirect(url_for(self.appbuilder.sm.auth_view. __class__.__name__ + ".login")) return wrapper return decorator
[ "Decorator", "to", "check", "whether", "the", "user", "has", "read", "/", "write", "permission", "on", "the", "dag", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/decorators.py#L97-L127
[ "def", "has_dag_access", "(", "*", "*", "dag_kwargs", ")", ":", "def", "decorator", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "has_access", "=", "self", ".", "appbuilder", ".", "sm", ".", "has_access", "dag_id", "=", "request", ".", "args", ".", "get", "(", "'dag_id'", ")", "# if it is false, we need to check whether user has write access on the dag", "can_dag_edit", "=", "dag_kwargs", ".", "get", "(", "'can_dag_edit'", ",", "False", ")", "# 1. check whether the user has can_dag_edit permissions on all_dags", "# 2. if 1 false, check whether the user", "# has can_dag_edit permissions on the dag", "# 3. if 2 false, check whether it is can_dag_read view,", "# and whether user has the permissions", "if", "(", "has_access", "(", "'can_dag_edit'", ",", "'all_dags'", ")", "or", "has_access", "(", "'can_dag_edit'", ",", "dag_id", ")", "or", "(", "not", "can_dag_edit", "and", "(", "has_access", "(", "'can_dag_read'", ",", "'all_dags'", ")", "or", "has_access", "(", "'can_dag_read'", ",", "dag_id", ")", ")", ")", ")", ":", "return", "f", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "flash", "(", "\"Access is Denied\"", ",", "\"danger\"", ")", "return", "redirect", "(", "url_for", "(", "self", ".", "appbuilder", ".", "sm", ".", "auth_view", ".", "__class__", ".", "__name__", "+", "\".login\"", ")", ")", "return", "wrapper", "return", "decorator" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
get_last_dagrun
Returns the last dag run for a dag, None if there was none. Last dag run can be any type of run eg. scheduled or backfilled. Overridden DagRuns are ignored.
airflow/models/dag.py
def get_last_dagrun(dag_id, session, include_externally_triggered=False): """ Returns the last dag run for a dag, None if there was none. Last dag run can be any type of run eg. scheduled or backfilled. Overridden DagRuns are ignored. """ DR = DagRun query = session.query(DR).filter(DR.dag_id == dag_id) if not include_externally_triggered: query = query.filter(DR.external_trigger == False) # noqa query = query.order_by(DR.execution_date.desc()) return query.first()
def get_last_dagrun(dag_id, session, include_externally_triggered=False): """ Returns the last dag run for a dag, None if there was none. Last dag run can be any type of run eg. scheduled or backfilled. Overridden DagRuns are ignored. """ DR = DagRun query = session.query(DR).filter(DR.dag_id == dag_id) if not include_externally_triggered: query = query.filter(DR.external_trigger == False) # noqa query = query.order_by(DR.execution_date.desc()) return query.first()
[ "Returns", "the", "last", "dag", "run", "for", "a", "dag", "None", "if", "there", "was", "none", ".", "Last", "dag", "run", "can", "be", "any", "type", "of", "run", "eg", ".", "scheduled", "or", "backfilled", ".", "Overridden", "DagRuns", "are", "ignored", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dag.py#L64-L75
[ "def", "get_last_dagrun", "(", "dag_id", ",", "session", ",", "include_externally_triggered", "=", "False", ")", ":", "DR", "=", "DagRun", "query", "=", "session", ".", "query", "(", "DR", ")", ".", "filter", "(", "DR", ".", "dag_id", "==", "dag_id", ")", "if", "not", "include_externally_triggered", ":", "query", "=", "query", ".", "filter", "(", "DR", ".", "external_trigger", "==", "False", ")", "# noqa", "query", "=", "query", ".", "order_by", "(", "DR", ".", "execution_date", ".", "desc", "(", ")", ")", "return", "query", ".", "first", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagModel.create_dagrun
Creates a dag run from this dag including the tasks associated with this dag. Returns the dag run. :param run_id: defines the the run id for this dag run :type run_id: str :param execution_date: the execution date of this dag run :type execution_date: datetime.datetime :param state: the state of the dag run :type state: airflow.utils.state.State :param start_date: the date this dag run should be evaluated :type start_date: datetime.datetime :param external_trigger: whether this dag run is externally triggered :type external_trigger: bool :param session: database session :type session: sqlalchemy.orm.session.Session
airflow/models/dag.py
def create_dagrun(self, run_id, state, execution_date, start_date=None, external_trigger=False, conf=None, session=None): """ Creates a dag run from this dag including the tasks associated with this dag. Returns the dag run. :param run_id: defines the the run id for this dag run :type run_id: str :param execution_date: the execution date of this dag run :type execution_date: datetime.datetime :param state: the state of the dag run :type state: airflow.utils.state.State :param start_date: the date this dag run should be evaluated :type start_date: datetime.datetime :param external_trigger: whether this dag run is externally triggered :type external_trigger: bool :param session: database session :type session: sqlalchemy.orm.session.Session """ return self.get_dag().create_dagrun(run_id=run_id, state=state, execution_date=execution_date, start_date=start_date, external_trigger=external_trigger, conf=conf, session=session)
def create_dagrun(self, run_id, state, execution_date, start_date=None, external_trigger=False, conf=None, session=None): """ Creates a dag run from this dag including the tasks associated with this dag. Returns the dag run. :param run_id: defines the the run id for this dag run :type run_id: str :param execution_date: the execution date of this dag run :type execution_date: datetime.datetime :param state: the state of the dag run :type state: airflow.utils.state.State :param start_date: the date this dag run should be evaluated :type start_date: datetime.datetime :param external_trigger: whether this dag run is externally triggered :type external_trigger: bool :param session: database session :type session: sqlalchemy.orm.session.Session """ return self.get_dag().create_dagrun(run_id=run_id, state=state, execution_date=execution_date, start_date=start_date, external_trigger=external_trigger, conf=conf, session=session)
[ "Creates", "a", "dag", "run", "from", "this", "dag", "including", "the", "tasks", "associated", "with", "this", "dag", ".", "Returns", "the", "dag", "run", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dag.py#L1494-L1526
[ "def", "create_dagrun", "(", "self", ",", "run_id", ",", "state", ",", "execution_date", ",", "start_date", "=", "None", ",", "external_trigger", "=", "False", ",", "conf", "=", "None", ",", "session", "=", "None", ")", ":", "return", "self", ".", "get_dag", "(", ")", ".", "create_dagrun", "(", "run_id", "=", "run_id", ",", "state", "=", "state", ",", "execution_date", "=", "execution_date", ",", "start_date", "=", "start_date", ",", "external_trigger", "=", "external_trigger", ",", "conf", "=", "conf", ",", "session", "=", "session", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SQSPublishOperator.execute
Publish the message to SQS queue :param context: the context object :type context: dict :return: dict with information about the message sent For details of the returned dict see :py:meth:`botocore.client.SQS.send_message` :rtype: dict
airflow/contrib/operators/aws_sqs_publish_operator.py
def execute(self, context): """ Publish the message to SQS queue :param context: the context object :type context: dict :return: dict with information about the message sent For details of the returned dict see :py:meth:`botocore.client.SQS.send_message` :rtype: dict """ hook = SQSHook(aws_conn_id=self.aws_conn_id) result = hook.send_message(queue_url=self.sqs_queue, message_body=self.message_content, delay_seconds=self.delay_seconds, message_attributes=self.message_attributes) self.log.info('result is send_message is %s', result) return result
def execute(self, context): """ Publish the message to SQS queue :param context: the context object :type context: dict :return: dict with information about the message sent For details of the returned dict see :py:meth:`botocore.client.SQS.send_message` :rtype: dict """ hook = SQSHook(aws_conn_id=self.aws_conn_id) result = hook.send_message(queue_url=self.sqs_queue, message_body=self.message_content, delay_seconds=self.delay_seconds, message_attributes=self.message_attributes) self.log.info('result is send_message is %s', result) return result
[ "Publish", "the", "message", "to", "SQS", "queue" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/aws_sqs_publish_operator.py#L61-L81
[ "def", "execute", "(", "self", ",", "context", ")", ":", "hook", "=", "SQSHook", "(", "aws_conn_id", "=", "self", ".", "aws_conn_id", ")", "result", "=", "hook", ".", "send_message", "(", "queue_url", "=", "self", ".", "sqs_queue", ",", "message_body", "=", "self", ".", "message_content", ",", "delay_seconds", "=", "self", ".", "delay_seconds", ",", "message_attributes", "=", "self", ".", "message_attributes", ")", "self", ".", "log", ".", "info", "(", "'result is send_message is %s'", ",", "result", ")", "return", "result" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
generate_pages
Generates the HTML for a paging component using a similar logic to the paging auto-generated by Flask managed views. The paging component defines a number of pages visible in the pager (window) and once the user goes to a page beyond the largest visible, it would scroll to the right the page numbers and keeps the current one in the middle of the pager component. When in the last pages, the pages won't scroll and just keep moving until the last page. Pager also contains <first, previous, ..., next, last> pages. This component takes into account custom parameters such as search and showPaused, which could be added to the pages link in order to maintain the state between client and server. It also allows to make a bookmark on a specific paging state. :param current_page: the current page number, 0-indexed :param num_of_pages: the total number of pages :param search: the search query string, if any :param showPaused: false if paused dags will be hidden, otherwise true to show them :param window: the number of pages to be shown in the paging component (7 default) :return: the HTML string of the paging component
airflow/www/utils.py
def generate_pages(current_page, num_of_pages, search=None, showPaused=None, window=7): """ Generates the HTML for a paging component using a similar logic to the paging auto-generated by Flask managed views. The paging component defines a number of pages visible in the pager (window) and once the user goes to a page beyond the largest visible, it would scroll to the right the page numbers and keeps the current one in the middle of the pager component. When in the last pages, the pages won't scroll and just keep moving until the last page. Pager also contains <first, previous, ..., next, last> pages. This component takes into account custom parameters such as search and showPaused, which could be added to the pages link in order to maintain the state between client and server. It also allows to make a bookmark on a specific paging state. :param current_page: the current page number, 0-indexed :param num_of_pages: the total number of pages :param search: the search query string, if any :param showPaused: false if paused dags will be hidden, otherwise true to show them :param window: the number of pages to be shown in the paging component (7 default) :return: the HTML string of the paging component """ void_link = 'javascript:void(0)' first_node = Markup("""<li class="paginate_button {disabled}" id="dags_first"> <a href="{href_link}" aria-controls="dags" data-dt-idx="0" tabindex="0">&laquo;</a> </li>""") previous_node = Markup("""<li class="paginate_button previous {disabled}" id="dags_previous"> <a href="{href_link}" aria-controls="dags" data-dt-idx="0" tabindex="0">&lt;</a> </li>""") next_node = Markup("""<li class="paginate_button next {disabled}" id="dags_next"> <a href="{href_link}" aria-controls="dags" data-dt-idx="3" tabindex="0">&gt;</a> </li>""") last_node = Markup("""<li class="paginate_button {disabled}" id="dags_last"> <a href="{href_link}" aria-controls="dags" data-dt-idx="3" tabindex="0">&raquo;</a> </li>""") page_node = Markup("""<li class="paginate_button {is_active}"> <a href="{href_link}" aria-controls="dags" data-dt-idx="2" tabindex="0">{page_num}</a> </li>""") output = [Markup('<ul class="pagination" style="margin-top:0px;">')] is_disabled = 'disabled' if current_page <= 0 else '' output.append(first_node.format(href_link="?{}" .format(get_params(page=0, search=search, showPaused=showPaused)), disabled=is_disabled)) page_link = void_link if current_page > 0: page_link = '?{}'.format(get_params(page=(current_page - 1), search=search, showPaused=showPaused)) output.append(previous_node.format(href_link=page_link, disabled=is_disabled)) mid = int(window / 2) last_page = num_of_pages - 1 if current_page <= mid or num_of_pages < window: pages = [i for i in range(0, min(num_of_pages, window))] elif mid < current_page < last_page - mid: pages = [i for i in range(current_page - mid, current_page + mid + 1)] else: pages = [i for i in range(num_of_pages - window, last_page + 1)] def is_current(current, page): return page == current for page in pages: vals = { 'is_active': 'active' if is_current(current_page, page) else '', 'href_link': void_link if is_current(current_page, page) else '?{}'.format(get_params(page=page, search=search, showPaused=showPaused)), 'page_num': page + 1 } output.append(page_node.format(**vals)) is_disabled = 'disabled' if current_page >= num_of_pages - 1 else '' page_link = (void_link if current_page >= num_of_pages - 1 else '?{}'.format(get_params(page=current_page + 1, search=search, showPaused=showPaused))) output.append(next_node.format(href_link=page_link, disabled=is_disabled)) output.append(last_node.format(href_link="?{}" .format(get_params(page=last_page, search=search, showPaused=showPaused)), disabled=is_disabled)) output.append(Markup('</ul>')) return Markup('\n'.join(output))
def generate_pages(current_page, num_of_pages, search=None, showPaused=None, window=7): """ Generates the HTML for a paging component using a similar logic to the paging auto-generated by Flask managed views. The paging component defines a number of pages visible in the pager (window) and once the user goes to a page beyond the largest visible, it would scroll to the right the page numbers and keeps the current one in the middle of the pager component. When in the last pages, the pages won't scroll and just keep moving until the last page. Pager also contains <first, previous, ..., next, last> pages. This component takes into account custom parameters such as search and showPaused, which could be added to the pages link in order to maintain the state between client and server. It also allows to make a bookmark on a specific paging state. :param current_page: the current page number, 0-indexed :param num_of_pages: the total number of pages :param search: the search query string, if any :param showPaused: false if paused dags will be hidden, otherwise true to show them :param window: the number of pages to be shown in the paging component (7 default) :return: the HTML string of the paging component """ void_link = 'javascript:void(0)' first_node = Markup("""<li class="paginate_button {disabled}" id="dags_first"> <a href="{href_link}" aria-controls="dags" data-dt-idx="0" tabindex="0">&laquo;</a> </li>""") previous_node = Markup("""<li class="paginate_button previous {disabled}" id="dags_previous"> <a href="{href_link}" aria-controls="dags" data-dt-idx="0" tabindex="0">&lt;</a> </li>""") next_node = Markup("""<li class="paginate_button next {disabled}" id="dags_next"> <a href="{href_link}" aria-controls="dags" data-dt-idx="3" tabindex="0">&gt;</a> </li>""") last_node = Markup("""<li class="paginate_button {disabled}" id="dags_last"> <a href="{href_link}" aria-controls="dags" data-dt-idx="3" tabindex="0">&raquo;</a> </li>""") page_node = Markup("""<li class="paginate_button {is_active}"> <a href="{href_link}" aria-controls="dags" data-dt-idx="2" tabindex="0">{page_num}</a> </li>""") output = [Markup('<ul class="pagination" style="margin-top:0px;">')] is_disabled = 'disabled' if current_page <= 0 else '' output.append(first_node.format(href_link="?{}" .format(get_params(page=0, search=search, showPaused=showPaused)), disabled=is_disabled)) page_link = void_link if current_page > 0: page_link = '?{}'.format(get_params(page=(current_page - 1), search=search, showPaused=showPaused)) output.append(previous_node.format(href_link=page_link, disabled=is_disabled)) mid = int(window / 2) last_page = num_of_pages - 1 if current_page <= mid or num_of_pages < window: pages = [i for i in range(0, min(num_of_pages, window))] elif mid < current_page < last_page - mid: pages = [i for i in range(current_page - mid, current_page + mid + 1)] else: pages = [i for i in range(num_of_pages - window, last_page + 1)] def is_current(current, page): return page == current for page in pages: vals = { 'is_active': 'active' if is_current(current_page, page) else '', 'href_link': void_link if is_current(current_page, page) else '?{}'.format(get_params(page=page, search=search, showPaused=showPaused)), 'page_num': page + 1 } output.append(page_node.format(**vals)) is_disabled = 'disabled' if current_page >= num_of_pages - 1 else '' page_link = (void_link if current_page >= num_of_pages - 1 else '?{}'.format(get_params(page=current_page + 1, search=search, showPaused=showPaused))) output.append(next_node.format(href_link=page_link, disabled=is_disabled)) output.append(last_node.format(href_link="?{}" .format(get_params(page=last_page, search=search, showPaused=showPaused)), disabled=is_disabled)) output.append(Markup('</ul>')) return Markup('\n'.join(output))
[ "Generates", "the", "HTML", "for", "a", "paging", "component", "using", "a", "similar", "logic", "to", "the", "paging", "auto", "-", "generated", "by", "Flask", "managed", "views", ".", "The", "paging", "component", "defines", "a", "number", "of", "pages", "visible", "in", "the", "pager", "(", "window", ")", "and", "once", "the", "user", "goes", "to", "a", "page", "beyond", "the", "largest", "visible", "it", "would", "scroll", "to", "the", "right", "the", "page", "numbers", "and", "keeps", "the", "current", "one", "in", "the", "middle", "of", "the", "pager", "component", ".", "When", "in", "the", "last", "pages", "the", "pages", "won", "t", "scroll", "and", "just", "keep", "moving", "until", "the", "last", "page", ".", "Pager", "also", "contains", "<first", "previous", "...", "next", "last", ">", "pages", ".", "This", "component", "takes", "into", "account", "custom", "parameters", "such", "as", "search", "and", "showPaused", "which", "could", "be", "added", "to", "the", "pages", "link", "in", "order", "to", "maintain", "the", "state", "between", "client", "and", "server", ".", "It", "also", "allows", "to", "make", "a", "bookmark", "on", "a", "specific", "paging", "state", ".", ":", "param", "current_page", ":", "the", "current", "page", "number", "0", "-", "indexed", ":", "param", "num_of_pages", ":", "the", "total", "number", "of", "pages", ":", "param", "search", ":", "the", "search", "query", "string", "if", "any", ":", "param", "showPaused", ":", "false", "if", "paused", "dags", "will", "be", "hidden", "otherwise", "true", "to", "show", "them", ":", "param", "window", ":", "the", "number", "of", "pages", "to", "be", "shown", "in", "the", "paging", "component", "(", "7", "default", ")", ":", "return", ":", "the", "HTML", "string", "of", "the", "paging", "component" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/utils.py#L79-L185
[ "def", "generate_pages", "(", "current_page", ",", "num_of_pages", ",", "search", "=", "None", ",", "showPaused", "=", "None", ",", "window", "=", "7", ")", ":", "void_link", "=", "'javascript:void(0)'", "first_node", "=", "Markup", "(", "\"\"\"<li class=\"paginate_button {disabled}\" id=\"dags_first\">\n <a href=\"{href_link}\" aria-controls=\"dags\" data-dt-idx=\"0\" tabindex=\"0\">&laquo;</a>\n</li>\"\"\"", ")", "previous_node", "=", "Markup", "(", "\"\"\"<li class=\"paginate_button previous {disabled}\" id=\"dags_previous\">\n <a href=\"{href_link}\" aria-controls=\"dags\" data-dt-idx=\"0\" tabindex=\"0\">&lt;</a>\n</li>\"\"\"", ")", "next_node", "=", "Markup", "(", "\"\"\"<li class=\"paginate_button next {disabled}\" id=\"dags_next\">\n <a href=\"{href_link}\" aria-controls=\"dags\" data-dt-idx=\"3\" tabindex=\"0\">&gt;</a>\n</li>\"\"\"", ")", "last_node", "=", "Markup", "(", "\"\"\"<li class=\"paginate_button {disabled}\" id=\"dags_last\">\n <a href=\"{href_link}\" aria-controls=\"dags\" data-dt-idx=\"3\" tabindex=\"0\">&raquo;</a>\n</li>\"\"\"", ")", "page_node", "=", "Markup", "(", "\"\"\"<li class=\"paginate_button {is_active}\">\n <a href=\"{href_link}\" aria-controls=\"dags\" data-dt-idx=\"2\" tabindex=\"0\">{page_num}</a>\n</li>\"\"\"", ")", "output", "=", "[", "Markup", "(", "'<ul class=\"pagination\" style=\"margin-top:0px;\">'", ")", "]", "is_disabled", "=", "'disabled'", "if", "current_page", "<=", "0", "else", "''", "output", ".", "append", "(", "first_node", ".", "format", "(", "href_link", "=", "\"?{}\"", ".", "format", "(", "get_params", "(", "page", "=", "0", ",", "search", "=", "search", ",", "showPaused", "=", "showPaused", ")", ")", ",", "disabled", "=", "is_disabled", ")", ")", "page_link", "=", "void_link", "if", "current_page", ">", "0", ":", "page_link", "=", "'?{}'", ".", "format", "(", "get_params", "(", "page", "=", "(", "current_page", "-", "1", ")", ",", "search", "=", "search", ",", "showPaused", "=", "showPaused", ")", ")", "output", ".", "append", "(", "previous_node", ".", "format", "(", "href_link", "=", "page_link", ",", "disabled", "=", "is_disabled", ")", ")", "mid", "=", "int", "(", "window", "/", "2", ")", "last_page", "=", "num_of_pages", "-", "1", "if", "current_page", "<=", "mid", "or", "num_of_pages", "<", "window", ":", "pages", "=", "[", "i", "for", "i", "in", "range", "(", "0", ",", "min", "(", "num_of_pages", ",", "window", ")", ")", "]", "elif", "mid", "<", "current_page", "<", "last_page", "-", "mid", ":", "pages", "=", "[", "i", "for", "i", "in", "range", "(", "current_page", "-", "mid", ",", "current_page", "+", "mid", "+", "1", ")", "]", "else", ":", "pages", "=", "[", "i", "for", "i", "in", "range", "(", "num_of_pages", "-", "window", ",", "last_page", "+", "1", ")", "]", "def", "is_current", "(", "current", ",", "page", ")", ":", "return", "page", "==", "current", "for", "page", "in", "pages", ":", "vals", "=", "{", "'is_active'", ":", "'active'", "if", "is_current", "(", "current_page", ",", "page", ")", "else", "''", ",", "'href_link'", ":", "void_link", "if", "is_current", "(", "current_page", ",", "page", ")", "else", "'?{}'", ".", "format", "(", "get_params", "(", "page", "=", "page", ",", "search", "=", "search", ",", "showPaused", "=", "showPaused", ")", ")", ",", "'page_num'", ":", "page", "+", "1", "}", "output", ".", "append", "(", "page_node", ".", "format", "(", "*", "*", "vals", ")", ")", "is_disabled", "=", "'disabled'", "if", "current_page", ">=", "num_of_pages", "-", "1", "else", "''", "page_link", "=", "(", "void_link", "if", "current_page", ">=", "num_of_pages", "-", "1", "else", "'?{}'", ".", "format", "(", "get_params", "(", "page", "=", "current_page", "+", "1", ",", "search", "=", "search", ",", "showPaused", "=", "showPaused", ")", ")", ")", "output", ".", "append", "(", "next_node", ".", "format", "(", "href_link", "=", "page_link", ",", "disabled", "=", "is_disabled", ")", ")", "output", ".", "append", "(", "last_node", ".", "format", "(", "href_link", "=", "\"?{}\"", ".", "format", "(", "get_params", "(", "page", "=", "last_page", ",", "search", "=", "search", ",", "showPaused", "=", "showPaused", ")", ")", ",", "disabled", "=", "is_disabled", ")", ")", "output", ".", "append", "(", "Markup", "(", "'</ul>'", ")", ")", "return", "Markup", "(", "'\\n'", ".", "join", "(", "output", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
json_response
returns a json response from a json serializable python object
airflow/www/utils.py
def json_response(obj): """ returns a json response from a json serializable python object """ return Response( response=json.dumps( obj, indent=4, cls=AirflowJsonEncoder), status=200, mimetype="application/json")
def json_response(obj): """ returns a json response from a json serializable python object """ return Response( response=json.dumps( obj, indent=4, cls=AirflowJsonEncoder), status=200, mimetype="application/json")
[ "returns", "a", "json", "response", "from", "a", "json", "serializable", "python", "object" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/utils.py#L193-L201
[ "def", "json_response", "(", "obj", ")", ":", "return", "Response", "(", "response", "=", "json", ".", "dumps", "(", "obj", ",", "indent", "=", "4", ",", "cls", "=", "AirflowJsonEncoder", ")", ",", "status", "=", "200", ",", "mimetype", "=", "\"application/json\"", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
open_maybe_zipped
Opens the given file. If the path contains a folder with a .zip suffix, then the folder is treated as a zip archive, opening the file inside the archive. :return: a file object, as in `open`, or as in `ZipFile.open`.
airflow/www/utils.py
def open_maybe_zipped(f, mode='r'): """ Opens the given file. If the path contains a folder with a .zip suffix, then the folder is treated as a zip archive, opening the file inside the archive. :return: a file object, as in `open`, or as in `ZipFile.open`. """ _, archive, filename = ZIP_REGEX.search(f).groups() if archive and zipfile.is_zipfile(archive): return zipfile.ZipFile(archive, mode=mode).open(filename) else: return io.open(f, mode=mode)
def open_maybe_zipped(f, mode='r'): """ Opens the given file. If the path contains a folder with a .zip suffix, then the folder is treated as a zip archive, opening the file inside the archive. :return: a file object, as in `open`, or as in `ZipFile.open`. """ _, archive, filename = ZIP_REGEX.search(f).groups() if archive and zipfile.is_zipfile(archive): return zipfile.ZipFile(archive, mode=mode).open(filename) else: return io.open(f, mode=mode)
[ "Opens", "the", "given", "file", ".", "If", "the", "path", "contains", "a", "folder", "with", "a", ".", "zip", "suffix", "then", "the", "folder", "is", "treated", "as", "a", "zip", "archive", "opening", "the", "file", "inside", "the", "archive", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/utils.py#L207-L219
[ "def", "open_maybe_zipped", "(", "f", ",", "mode", "=", "'r'", ")", ":", "_", ",", "archive", ",", "filename", "=", "ZIP_REGEX", ".", "search", "(", "f", ")", ".", "groups", "(", ")", "if", "archive", "and", "zipfile", ".", "is_zipfile", "(", "archive", ")", ":", "return", "zipfile", ".", "ZipFile", "(", "archive", ",", "mode", "=", "mode", ")", ".", "open", "(", "filename", ")", "else", ":", "return", "io", ".", "open", "(", "f", ",", "mode", "=", "mode", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
make_cache_key
Used by cache to get a unique key per URL
airflow/www/utils.py
def make_cache_key(*args, **kwargs): """ Used by cache to get a unique key per URL """ path = request.path args = str(hash(frozenset(request.args.items()))) return (path + args).encode('ascii', 'ignore')
def make_cache_key(*args, **kwargs): """ Used by cache to get a unique key per URL """ path = request.path args = str(hash(frozenset(request.args.items()))) return (path + args).encode('ascii', 'ignore')
[ "Used", "by", "cache", "to", "get", "a", "unique", "key", "per", "URL" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/utils.py#L222-L228
[ "def", "make_cache_key", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "path", "=", "request", ".", "path", "args", "=", "str", "(", "hash", "(", "frozenset", "(", "request", ".", "args", ".", "items", "(", ")", ")", ")", ")", "return", "(", "path", "+", "args", ")", ".", "encode", "(", "'ascii'", ",", "'ignore'", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CloudVideoIntelligenceHook.get_conn
Returns Gcp Video Intelligence Service client :rtype: google.cloud.videointelligence_v1.VideoIntelligenceServiceClient
airflow/contrib/hooks/gcp_video_intelligence_hook.py
def get_conn(self): """ Returns Gcp Video Intelligence Service client :rtype: google.cloud.videointelligence_v1.VideoIntelligenceServiceClient """ if not self._conn: self._conn = VideoIntelligenceServiceClient(credentials=self._get_credentials()) return self._conn
def get_conn(self): """ Returns Gcp Video Intelligence Service client :rtype: google.cloud.videointelligence_v1.VideoIntelligenceServiceClient """ if not self._conn: self._conn = VideoIntelligenceServiceClient(credentials=self._get_credentials()) return self._conn
[ "Returns", "Gcp", "Video", "Intelligence", "Service", "client" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_video_intelligence_hook.py#L41-L49
[ "def", "get_conn", "(", "self", ")", ":", "if", "not", "self", ".", "_conn", ":", "self", ".", "_conn", "=", "VideoIntelligenceServiceClient", "(", "credentials", "=", "self", ".", "_get_credentials", "(", ")", ")", "return", "self", ".", "_conn" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CloudVideoIntelligenceHook.annotate_video
Performs video annotation. :param input_uri: Input video location. Currently, only Google Cloud Storage URIs are supported, which must be specified in the following format: ``gs://bucket-id/object-id``. :type input_uri: str :param input_content: The video data bytes. If unset, the input video(s) should be specified via ``input_uri``. If set, ``input_uri`` should be unset. :type input_content: bytes :param features: Requested video annotation features. :type features: list[google.cloud.videointelligence_v1.VideoIntelligenceServiceClient.enums.Feature] :param output_uri: Optional, location where the output (in JSON format) should be stored. Currently, only Google Cloud Storage URIs are supported, which must be specified in the following format: ``gs://bucket-id/object-id``. :type output_uri: str :param video_context: Optional, Additional video context and/or feature-specific parameters. :type video_context: dict or google.cloud.videointelligence_v1.types.VideoContext :param location: Optional, cloud region where annotation should take place. Supported cloud regions: us-east1, us-west1, europe-west1, asia-east1. If no region is specified, a region will be determined based on video file location. :type location: str :param retry: Retry object used to determine when/if to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: Optional, The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Optional, Additional metadata that is provided to the method. :type metadata: seq[tuple[str, str]]
airflow/contrib/hooks/gcp_video_intelligence_hook.py
def annotate_video( self, input_uri=None, input_content=None, features=None, video_context=None, output_uri=None, location=None, retry=None, timeout=None, metadata=None, ): """ Performs video annotation. :param input_uri: Input video location. Currently, only Google Cloud Storage URIs are supported, which must be specified in the following format: ``gs://bucket-id/object-id``. :type input_uri: str :param input_content: The video data bytes. If unset, the input video(s) should be specified via ``input_uri``. If set, ``input_uri`` should be unset. :type input_content: bytes :param features: Requested video annotation features. :type features: list[google.cloud.videointelligence_v1.VideoIntelligenceServiceClient.enums.Feature] :param output_uri: Optional, location where the output (in JSON format) should be stored. Currently, only Google Cloud Storage URIs are supported, which must be specified in the following format: ``gs://bucket-id/object-id``. :type output_uri: str :param video_context: Optional, Additional video context and/or feature-specific parameters. :type video_context: dict or google.cloud.videointelligence_v1.types.VideoContext :param location: Optional, cloud region where annotation should take place. Supported cloud regions: us-east1, us-west1, europe-west1, asia-east1. If no region is specified, a region will be determined based on video file location. :type location: str :param retry: Retry object used to determine when/if to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: Optional, The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Optional, Additional metadata that is provided to the method. :type metadata: seq[tuple[str, str]] """ client = self.get_conn() return client.annotate_video( input_uri=input_uri, input_content=input_content, features=features, video_context=video_context, output_uri=output_uri, location_id=location, retry=retry, timeout=timeout, metadata=metadata, )
def annotate_video( self, input_uri=None, input_content=None, features=None, video_context=None, output_uri=None, location=None, retry=None, timeout=None, metadata=None, ): """ Performs video annotation. :param input_uri: Input video location. Currently, only Google Cloud Storage URIs are supported, which must be specified in the following format: ``gs://bucket-id/object-id``. :type input_uri: str :param input_content: The video data bytes. If unset, the input video(s) should be specified via ``input_uri``. If set, ``input_uri`` should be unset. :type input_content: bytes :param features: Requested video annotation features. :type features: list[google.cloud.videointelligence_v1.VideoIntelligenceServiceClient.enums.Feature] :param output_uri: Optional, location where the output (in JSON format) should be stored. Currently, only Google Cloud Storage URIs are supported, which must be specified in the following format: ``gs://bucket-id/object-id``. :type output_uri: str :param video_context: Optional, Additional video context and/or feature-specific parameters. :type video_context: dict or google.cloud.videointelligence_v1.types.VideoContext :param location: Optional, cloud region where annotation should take place. Supported cloud regions: us-east1, us-west1, europe-west1, asia-east1. If no region is specified, a region will be determined based on video file location. :type location: str :param retry: Retry object used to determine when/if to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: Optional, The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Optional, Additional metadata that is provided to the method. :type metadata: seq[tuple[str, str]] """ client = self.get_conn() return client.annotate_video( input_uri=input_uri, input_content=input_content, features=features, video_context=video_context, output_uri=output_uri, location_id=location, retry=retry, timeout=timeout, metadata=metadata, )
[ "Performs", "video", "annotation", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_video_intelligence_hook.py#L51-L105
[ "def", "annotate_video", "(", "self", ",", "input_uri", "=", "None", ",", "input_content", "=", "None", ",", "features", "=", "None", ",", "video_context", "=", "None", ",", "output_uri", "=", "None", ",", "location", "=", "None", ",", "retry", "=", "None", ",", "timeout", "=", "None", ",", "metadata", "=", "None", ",", ")", ":", "client", "=", "self", ".", "get_conn", "(", ")", "return", "client", ".", "annotate_video", "(", "input_uri", "=", "input_uri", ",", "input_content", "=", "input_content", ",", "features", "=", "features", ",", "video_context", "=", "video_context", ",", "output_uri", "=", "output_uri", ",", "location_id", "=", "location", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ",", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
OpsgenieAlertHook._get_api_key
Get Opsgenie api_key for creating alert
airflow/contrib/hooks/opsgenie_alert_hook.py
def _get_api_key(self): """ Get Opsgenie api_key for creating alert """ conn = self.get_connection(self.http_conn_id) api_key = conn.password if not api_key: raise AirflowException('Opsgenie API Key is required for this hook, ' 'please check your conn_id configuration.') return api_key
def _get_api_key(self): """ Get Opsgenie api_key for creating alert """ conn = self.get_connection(self.http_conn_id) api_key = conn.password if not api_key: raise AirflowException('Opsgenie API Key is required for this hook, ' 'please check your conn_id configuration.') return api_key
[ "Get", "Opsgenie", "api_key", "for", "creating", "alert" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/opsgenie_alert_hook.py#L50-L59
[ "def", "_get_api_key", "(", "self", ")", ":", "conn", "=", "self", ".", "get_connection", "(", "self", ".", "http_conn_id", ")", "api_key", "=", "conn", ".", "password", "if", "not", "api_key", ":", "raise", "AirflowException", "(", "'Opsgenie API Key is required for this hook, '", "'please check your conn_id configuration.'", ")", "return", "api_key" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
OpsgenieAlertHook.get_conn
Overwrite HttpHook get_conn because this hook just needs base_url and headers, and does not need generic params :param headers: additional headers to be passed through as a dictionary :type headers: dict
airflow/contrib/hooks/opsgenie_alert_hook.py
def get_conn(self, headers=None): """ Overwrite HttpHook get_conn because this hook just needs base_url and headers, and does not need generic params :param headers: additional headers to be passed through as a dictionary :type headers: dict """ conn = self.get_connection(self.http_conn_id) self.base_url = conn.host if conn.host else 'https://api.opsgenie.com' session = requests.Session() if headers: session.headers.update(headers) return session
def get_conn(self, headers=None): """ Overwrite HttpHook get_conn because this hook just needs base_url and headers, and does not need generic params :param headers: additional headers to be passed through as a dictionary :type headers: dict """ conn = self.get_connection(self.http_conn_id) self.base_url = conn.host if conn.host else 'https://api.opsgenie.com' session = requests.Session() if headers: session.headers.update(headers) return session
[ "Overwrite", "HttpHook", "get_conn", "because", "this", "hook", "just", "needs", "base_url", "and", "headers", "and", "does", "not", "need", "generic", "params" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/opsgenie_alert_hook.py#L61-L74
[ "def", "get_conn", "(", "self", ",", "headers", "=", "None", ")", ":", "conn", "=", "self", ".", "get_connection", "(", "self", ".", "http_conn_id", ")", "self", ".", "base_url", "=", "conn", ".", "host", "if", "conn", ".", "host", "else", "'https://api.opsgenie.com'", "session", "=", "requests", ".", "Session", "(", ")", "if", "headers", ":", "session", ".", "headers", ".", "update", "(", "headers", ")", "return", "session" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
OpsgenieAlertHook.execute
Execute the Opsgenie Alert call :param payload: Opsgenie API Create Alert payload values See https://docs.opsgenie.com/docs/alert-api#section-create-alert :type payload: dict
airflow/contrib/hooks/opsgenie_alert_hook.py
def execute(self, payload={}): """ Execute the Opsgenie Alert call :param payload: Opsgenie API Create Alert payload values See https://docs.opsgenie.com/docs/alert-api#section-create-alert :type payload: dict """ api_key = self._get_api_key() return self.run(endpoint='v2/alerts', data=json.dumps(payload), headers={'Content-Type': 'application/json', 'Authorization': 'GenieKey %s' % api_key})
def execute(self, payload={}): """ Execute the Opsgenie Alert call :param payload: Opsgenie API Create Alert payload values See https://docs.opsgenie.com/docs/alert-api#section-create-alert :type payload: dict """ api_key = self._get_api_key() return self.run(endpoint='v2/alerts', data=json.dumps(payload), headers={'Content-Type': 'application/json', 'Authorization': 'GenieKey %s' % api_key})
[ "Execute", "the", "Opsgenie", "Alert", "call" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/opsgenie_alert_hook.py#L76-L88
[ "def", "execute", "(", "self", ",", "payload", "=", "{", "}", ")", ":", "api_key", "=", "self", ".", "_get_api_key", "(", ")", "return", "self", ".", "run", "(", "endpoint", "=", "'v2/alerts'", ",", "data", "=", "json", ".", "dumps", "(", "payload", ")", ",", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", ",", "'Authorization'", ":", "'GenieKey %s'", "%", "api_key", "}", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BashSensor.poke
Execute the bash command in a temporary directory which will be cleaned afterwards
airflow/contrib/sensors/bash_sensor.py
def poke(self, context): """ Execute the bash command in a temporary directory which will be cleaned afterwards """ bash_command = self.bash_command self.log.info("Tmp dir root location: \n %s", gettempdir()) with TemporaryDirectory(prefix='airflowtmp') as tmp_dir: with NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f: f.write(bytes(bash_command, 'utf_8')) f.flush() fname = f.name script_location = tmp_dir + "/" + fname self.log.info("Temporary script location: %s", script_location) self.log.info("Running command: %s", bash_command) sp = Popen( ['bash', fname], stdout=PIPE, stderr=STDOUT, close_fds=True, cwd=tmp_dir, env=self.env, preexec_fn=os.setsid) self.sp = sp self.log.info("Output:") line = '' for line in iter(sp.stdout.readline, b''): line = line.decode(self.output_encoding).strip() self.log.info(line) sp.wait() self.log.info("Command exited with return code %s", sp.returncode) return not sp.returncode
def poke(self, context): """ Execute the bash command in a temporary directory which will be cleaned afterwards """ bash_command = self.bash_command self.log.info("Tmp dir root location: \n %s", gettempdir()) with TemporaryDirectory(prefix='airflowtmp') as tmp_dir: with NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f: f.write(bytes(bash_command, 'utf_8')) f.flush() fname = f.name script_location = tmp_dir + "/" + fname self.log.info("Temporary script location: %s", script_location) self.log.info("Running command: %s", bash_command) sp = Popen( ['bash', fname], stdout=PIPE, stderr=STDOUT, close_fds=True, cwd=tmp_dir, env=self.env, preexec_fn=os.setsid) self.sp = sp self.log.info("Output:") line = '' for line in iter(sp.stdout.readline, b''): line = line.decode(self.output_encoding).strip() self.log.info(line) sp.wait() self.log.info("Command exited with return code %s", sp.returncode) return not sp.returncode
[ "Execute", "the", "bash", "command", "in", "a", "temporary", "directory", "which", "will", "be", "cleaned", "afterwards" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/sensors/bash_sensor.py#L60-L91
[ "def", "poke", "(", "self", ",", "context", ")", ":", "bash_command", "=", "self", ".", "bash_command", "self", ".", "log", ".", "info", "(", "\"Tmp dir root location: \\n %s\"", ",", "gettempdir", "(", ")", ")", "with", "TemporaryDirectory", "(", "prefix", "=", "'airflowtmp'", ")", "as", "tmp_dir", ":", "with", "NamedTemporaryFile", "(", "dir", "=", "tmp_dir", ",", "prefix", "=", "self", ".", "task_id", ")", "as", "f", ":", "f", ".", "write", "(", "bytes", "(", "bash_command", ",", "'utf_8'", ")", ")", "f", ".", "flush", "(", ")", "fname", "=", "f", ".", "name", "script_location", "=", "tmp_dir", "+", "\"/\"", "+", "fname", "self", ".", "log", ".", "info", "(", "\"Temporary script location: %s\"", ",", "script_location", ")", "self", ".", "log", ".", "info", "(", "\"Running command: %s\"", ",", "bash_command", ")", "sp", "=", "Popen", "(", "[", "'bash'", ",", "fname", "]", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "STDOUT", ",", "close_fds", "=", "True", ",", "cwd", "=", "tmp_dir", ",", "env", "=", "self", ".", "env", ",", "preexec_fn", "=", "os", ".", "setsid", ")", "self", ".", "sp", "=", "sp", "self", ".", "log", ".", "info", "(", "\"Output:\"", ")", "line", "=", "''", "for", "line", "in", "iter", "(", "sp", ".", "stdout", ".", "readline", ",", "b''", ")", ":", "line", "=", "line", ".", "decode", "(", "self", ".", "output_encoding", ")", ".", "strip", "(", ")", "self", ".", "log", ".", "info", "(", "line", ")", "sp", ".", "wait", "(", ")", "self", ".", "log", ".", "info", "(", "\"Command exited with return code %s\"", ",", "sp", ".", "returncode", ")", "return", "not", "sp", ".", "returncode" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
OpsgenieAlertOperator._build_opsgenie_payload
Construct the Opsgenie JSON payload. All relevant parameters are combined here to a valid Opsgenie JSON payload. :return: Opsgenie payload (dict) to send
airflow/contrib/operators/opsgenie_alert_operator.py
def _build_opsgenie_payload(self): """ Construct the Opsgenie JSON payload. All relevant parameters are combined here to a valid Opsgenie JSON payload. :return: Opsgenie payload (dict) to send """ payload = {} for key in [ "message", "alias", "description", "responders", "visibleTo", "actions", "tags", "details", "entity", "source", "priority", "user", "note" ]: val = getattr(self, key) if val: payload[key] = val return payload
def _build_opsgenie_payload(self): """ Construct the Opsgenie JSON payload. All relevant parameters are combined here to a valid Opsgenie JSON payload. :return: Opsgenie payload (dict) to send """ payload = {} for key in [ "message", "alias", "description", "responders", "visibleTo", "actions", "tags", "details", "entity", "source", "priority", "user", "note" ]: val = getattr(self, key) if val: payload[key] = val return payload
[ "Construct", "the", "Opsgenie", "JSON", "payload", ".", "All", "relevant", "parameters", "are", "combined", "here", "to", "a", "valid", "Opsgenie", "JSON", "payload", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/opsgenie_alert_operator.py#L107-L124
[ "def", "_build_opsgenie_payload", "(", "self", ")", ":", "payload", "=", "{", "}", "for", "key", "in", "[", "\"message\"", ",", "\"alias\"", ",", "\"description\"", ",", "\"responders\"", ",", "\"visibleTo\"", ",", "\"actions\"", ",", "\"tags\"", ",", "\"details\"", ",", "\"entity\"", ",", "\"source\"", ",", "\"priority\"", ",", "\"user\"", ",", "\"note\"", "]", ":", "val", "=", "getattr", "(", "self", ",", "key", ")", "if", "val", ":", "payload", "[", "key", "]", "=", "val", "return", "payload" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
OpsgenieAlertOperator.execute
Call the OpsgenieAlertHook to post message
airflow/contrib/operators/opsgenie_alert_operator.py
def execute(self, context): """ Call the OpsgenieAlertHook to post message """ self.hook = OpsgenieAlertHook(self.opsgenie_conn_id) self.hook.execute(self._build_opsgenie_payload())
def execute(self, context): """ Call the OpsgenieAlertHook to post message """ self.hook = OpsgenieAlertHook(self.opsgenie_conn_id) self.hook.execute(self._build_opsgenie_payload())
[ "Call", "the", "OpsgenieAlertHook", "to", "post", "message" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/opsgenie_alert_operator.py#L126-L131
[ "def", "execute", "(", "self", ",", "context", ")", ":", "self", ".", "hook", "=", "OpsgenieAlertHook", "(", "self", ".", "opsgenie_conn_id", ")", "self", ".", "hook", ".", "execute", "(", "self", ".", "_build_opsgenie_payload", "(", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AWSAthenaHook.get_conn
check if aws conn exists already or create one and return it :return: boto3 session
airflow/contrib/hooks/aws_athena_hook.py
def get_conn(self): """ check if aws conn exists already or create one and return it :return: boto3 session """ if not self.conn: self.conn = self.get_client_type('athena') return self.conn
def get_conn(self): """ check if aws conn exists already or create one and return it :return: boto3 session """ if not self.conn: self.conn = self.get_client_type('athena') return self.conn
[ "check", "if", "aws", "conn", "exists", "already", "or", "create", "one", "and", "return", "it" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_athena_hook.py#L43-L51
[ "def", "get_conn", "(", "self", ")", ":", "if", "not", "self", ".", "conn", ":", "self", ".", "conn", "=", "self", ".", "get_client_type", "(", "'athena'", ")", "return", "self", ".", "conn" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AWSAthenaHook.run_query
Run Presto query on athena with provided config and return submitted query_execution_id :param query: Presto query to run :type query: str :param query_context: Context in which query need to be run :type query_context: dict :param result_configuration: Dict with path to store results in and config related to encryption :type result_configuration: dict :param client_request_token: Unique token created by user to avoid multiple executions of same query :type client_request_token: str :return: str
airflow/contrib/hooks/aws_athena_hook.py
def run_query(self, query, query_context, result_configuration, client_request_token=None): """ Run Presto query on athena with provided config and return submitted query_execution_id :param query: Presto query to run :type query: str :param query_context: Context in which query need to be run :type query_context: dict :param result_configuration: Dict with path to store results in and config related to encryption :type result_configuration: dict :param client_request_token: Unique token created by user to avoid multiple executions of same query :type client_request_token: str :return: str """ response = self.conn.start_query_execution(QueryString=query, ClientRequestToken=client_request_token, QueryExecutionContext=query_context, ResultConfiguration=result_configuration) query_execution_id = response['QueryExecutionId'] return query_execution_id
def run_query(self, query, query_context, result_configuration, client_request_token=None): """ Run Presto query on athena with provided config and return submitted query_execution_id :param query: Presto query to run :type query: str :param query_context: Context in which query need to be run :type query_context: dict :param result_configuration: Dict with path to store results in and config related to encryption :type result_configuration: dict :param client_request_token: Unique token created by user to avoid multiple executions of same query :type client_request_token: str :return: str """ response = self.conn.start_query_execution(QueryString=query, ClientRequestToken=client_request_token, QueryExecutionContext=query_context, ResultConfiguration=result_configuration) query_execution_id = response['QueryExecutionId'] return query_execution_id
[ "Run", "Presto", "query", "on", "athena", "with", "provided", "config", "and", "return", "submitted", "query_execution_id" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_athena_hook.py#L53-L72
[ "def", "run_query", "(", "self", ",", "query", ",", "query_context", ",", "result_configuration", ",", "client_request_token", "=", "None", ")", ":", "response", "=", "self", ".", "conn", ".", "start_query_execution", "(", "QueryString", "=", "query", ",", "ClientRequestToken", "=", "client_request_token", ",", "QueryExecutionContext", "=", "query_context", ",", "ResultConfiguration", "=", "result_configuration", ")", "query_execution_id", "=", "response", "[", "'QueryExecutionId'", "]", "return", "query_execution_id" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AWSAthenaHook.check_query_status
Fetch the status of submitted athena query. Returns None or one of valid query states. :param query_execution_id: Id of submitted athena query :type query_execution_id: str :return: str
airflow/contrib/hooks/aws_athena_hook.py
def check_query_status(self, query_execution_id): """ Fetch the status of submitted athena query. Returns None or one of valid query states. :param query_execution_id: Id of submitted athena query :type query_execution_id: str :return: str """ response = self.conn.get_query_execution(QueryExecutionId=query_execution_id) state = None try: state = response['QueryExecution']['Status']['State'] except Exception as ex: self.log.error('Exception while getting query state', ex) finally: return state
def check_query_status(self, query_execution_id): """ Fetch the status of submitted athena query. Returns None or one of valid query states. :param query_execution_id: Id of submitted athena query :type query_execution_id: str :return: str """ response = self.conn.get_query_execution(QueryExecutionId=query_execution_id) state = None try: state = response['QueryExecution']['Status']['State'] except Exception as ex: self.log.error('Exception while getting query state', ex) finally: return state
[ "Fetch", "the", "status", "of", "submitted", "athena", "query", ".", "Returns", "None", "or", "one", "of", "valid", "query", "states", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_athena_hook.py#L74-L89
[ "def", "check_query_status", "(", "self", ",", "query_execution_id", ")", ":", "response", "=", "self", ".", "conn", ".", "get_query_execution", "(", "QueryExecutionId", "=", "query_execution_id", ")", "state", "=", "None", "try", ":", "state", "=", "response", "[", "'QueryExecution'", "]", "[", "'Status'", "]", "[", "'State'", "]", "except", "Exception", "as", "ex", ":", "self", ".", "log", ".", "error", "(", "'Exception while getting query state'", ",", "ex", ")", "finally", ":", "return", "state" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AWSAthenaHook.get_query_results
Fetch submitted athena query results. returns none if query is in intermediate state or failed/cancelled state else dict of query output :param query_execution_id: Id of submitted athena query :type query_execution_id: str :return: dict
airflow/contrib/hooks/aws_athena_hook.py
def get_query_results(self, query_execution_id): """ Fetch submitted athena query results. returns none if query is in intermediate state or failed/cancelled state else dict of query output :param query_execution_id: Id of submitted athena query :type query_execution_id: str :return: dict """ query_state = self.check_query_status(query_execution_id) if query_state is None: self.log.error('Invalid Query state') return None elif query_state in self.INTERMEDIATE_STATES or query_state in self.FAILURE_STATES: self.log.error('Query is in {state} state. Cannot fetch results'.format(state=query_state)) return None return self.conn.get_query_results(QueryExecutionId=query_execution_id)
def get_query_results(self, query_execution_id): """ Fetch submitted athena query results. returns none if query is in intermediate state or failed/cancelled state else dict of query output :param query_execution_id: Id of submitted athena query :type query_execution_id: str :return: dict """ query_state = self.check_query_status(query_execution_id) if query_state is None: self.log.error('Invalid Query state') return None elif query_state in self.INTERMEDIATE_STATES or query_state in self.FAILURE_STATES: self.log.error('Query is in {state} state. Cannot fetch results'.format(state=query_state)) return None return self.conn.get_query_results(QueryExecutionId=query_execution_id)
[ "Fetch", "submitted", "athena", "query", "results", ".", "returns", "none", "if", "query", "is", "in", "intermediate", "state", "or", "failed", "/", "cancelled", "state", "else", "dict", "of", "query", "output" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_athena_hook.py#L91-L107
[ "def", "get_query_results", "(", "self", ",", "query_execution_id", ")", ":", "query_state", "=", "self", ".", "check_query_status", "(", "query_execution_id", ")", "if", "query_state", "is", "None", ":", "self", ".", "log", ".", "error", "(", "'Invalid Query state'", ")", "return", "None", "elif", "query_state", "in", "self", ".", "INTERMEDIATE_STATES", "or", "query_state", "in", "self", ".", "FAILURE_STATES", ":", "self", ".", "log", ".", "error", "(", "'Query is in {state} state. Cannot fetch results'", ".", "format", "(", "state", "=", "query_state", ")", ")", "return", "None", "return", "self", ".", "conn", ".", "get_query_results", "(", "QueryExecutionId", "=", "query_execution_id", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AWSAthenaHook.poll_query_status
Poll the status of submitted athena query until query state reaches final state. Returns one of the final states :param query_execution_id: Id of submitted athena query :type query_execution_id: str :param max_tries: Number of times to poll for query state before function exits :type max_tries: int :return: str
airflow/contrib/hooks/aws_athena_hook.py
def poll_query_status(self, query_execution_id, max_tries=None): """ Poll the status of submitted athena query until query state reaches final state. Returns one of the final states :param query_execution_id: Id of submitted athena query :type query_execution_id: str :param max_tries: Number of times to poll for query state before function exits :type max_tries: int :return: str """ try_number = 1 final_query_state = None # Query state when query reaches final state or max_tries reached while True: query_state = self.check_query_status(query_execution_id) if query_state is None: self.log.info('Trial {try_number}: Invalid query state. Retrying again'.format( try_number=try_number)) elif query_state in self.INTERMEDIATE_STATES: self.log.info('Trial {try_number}: Query is still in an intermediate state - {state}' .format(try_number=try_number, state=query_state)) else: self.log.info('Trial {try_number}: Query execution completed. Final state is {state}' .format(try_number=try_number, state=query_state)) final_query_state = query_state break if max_tries and try_number >= max_tries: # Break loop if max_tries reached final_query_state = query_state break try_number += 1 sleep(self.sleep_time) return final_query_state
def poll_query_status(self, query_execution_id, max_tries=None): """ Poll the status of submitted athena query until query state reaches final state. Returns one of the final states :param query_execution_id: Id of submitted athena query :type query_execution_id: str :param max_tries: Number of times to poll for query state before function exits :type max_tries: int :return: str """ try_number = 1 final_query_state = None # Query state when query reaches final state or max_tries reached while True: query_state = self.check_query_status(query_execution_id) if query_state is None: self.log.info('Trial {try_number}: Invalid query state. Retrying again'.format( try_number=try_number)) elif query_state in self.INTERMEDIATE_STATES: self.log.info('Trial {try_number}: Query is still in an intermediate state - {state}' .format(try_number=try_number, state=query_state)) else: self.log.info('Trial {try_number}: Query execution completed. Final state is {state}' .format(try_number=try_number, state=query_state)) final_query_state = query_state break if max_tries and try_number >= max_tries: # Break loop if max_tries reached final_query_state = query_state break try_number += 1 sleep(self.sleep_time) return final_query_state
[ "Poll", "the", "status", "of", "submitted", "athena", "query", "until", "query", "state", "reaches", "final", "state", ".", "Returns", "one", "of", "the", "final", "states" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_athena_hook.py#L109-L140
[ "def", "poll_query_status", "(", "self", ",", "query_execution_id", ",", "max_tries", "=", "None", ")", ":", "try_number", "=", "1", "final_query_state", "=", "None", "# Query state when query reaches final state or max_tries reached", "while", "True", ":", "query_state", "=", "self", ".", "check_query_status", "(", "query_execution_id", ")", "if", "query_state", "is", "None", ":", "self", ".", "log", ".", "info", "(", "'Trial {try_number}: Invalid query state. Retrying again'", ".", "format", "(", "try_number", "=", "try_number", ")", ")", "elif", "query_state", "in", "self", ".", "INTERMEDIATE_STATES", ":", "self", ".", "log", ".", "info", "(", "'Trial {try_number}: Query is still in an intermediate state - {state}'", ".", "format", "(", "try_number", "=", "try_number", ",", "state", "=", "query_state", ")", ")", "else", ":", "self", ".", "log", ".", "info", "(", "'Trial {try_number}: Query execution completed. Final state is {state}'", ".", "format", "(", "try_number", "=", "try_number", ",", "state", "=", "query_state", ")", ")", "final_query_state", "=", "query_state", "break", "if", "max_tries", "and", "try_number", ">=", "max_tries", ":", "# Break loop if max_tries reached", "final_query_state", "=", "query_state", "break", "try_number", "+=", "1", "sleep", "(", "self", ".", "sleep_time", ")", "return", "final_query_state" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SFTPHook.get_conn
Returns an SFTP connection object
airflow/contrib/hooks/sftp_hook.py
def get_conn(self): """ Returns an SFTP connection object """ if self.conn is None: cnopts = pysftp.CnOpts() if self.no_host_key_check: cnopts.hostkeys = None cnopts.compression = self.compress conn_params = { 'host': self.remote_host, 'port': self.port, 'username': self.username, 'cnopts': cnopts } if self.password and self.password.strip(): conn_params['password'] = self.password if self.key_file: conn_params['private_key'] = self.key_file if self.private_key_pass: conn_params['private_key_pass'] = self.private_key_pass self.conn = pysftp.Connection(**conn_params) return self.conn
def get_conn(self): """ Returns an SFTP connection object """ if self.conn is None: cnopts = pysftp.CnOpts() if self.no_host_key_check: cnopts.hostkeys = None cnopts.compression = self.compress conn_params = { 'host': self.remote_host, 'port': self.port, 'username': self.username, 'cnopts': cnopts } if self.password and self.password.strip(): conn_params['password'] = self.password if self.key_file: conn_params['private_key'] = self.key_file if self.private_key_pass: conn_params['private_key_pass'] = self.private_key_pass self.conn = pysftp.Connection(**conn_params) return self.conn
[ "Returns", "an", "SFTP", "connection", "object" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L92-L115
[ "def", "get_conn", "(", "self", ")", ":", "if", "self", ".", "conn", "is", "None", ":", "cnopts", "=", "pysftp", ".", "CnOpts", "(", ")", "if", "self", ".", "no_host_key_check", ":", "cnopts", ".", "hostkeys", "=", "None", "cnopts", ".", "compression", "=", "self", ".", "compress", "conn_params", "=", "{", "'host'", ":", "self", ".", "remote_host", ",", "'port'", ":", "self", ".", "port", ",", "'username'", ":", "self", ".", "username", ",", "'cnopts'", ":", "cnopts", "}", "if", "self", ".", "password", "and", "self", ".", "password", ".", "strip", "(", ")", ":", "conn_params", "[", "'password'", "]", "=", "self", ".", "password", "if", "self", ".", "key_file", ":", "conn_params", "[", "'private_key'", "]", "=", "self", ".", "key_file", "if", "self", ".", "private_key_pass", ":", "conn_params", "[", "'private_key_pass'", "]", "=", "self", ".", "private_key_pass", "self", ".", "conn", "=", "pysftp", ".", "Connection", "(", "*", "*", "conn_params", ")", "return", "self", ".", "conn" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SFTPHook.describe_directory
Returns a dictionary of {filename: {attributes}} for all files on the remote system (where the MLSD command is supported). :param path: full path to the remote directory :type path: str
airflow/contrib/hooks/sftp_hook.py
def describe_directory(self, path): """ Returns a dictionary of {filename: {attributes}} for all files on the remote system (where the MLSD command is supported). :param path: full path to the remote directory :type path: str """ conn = self.get_conn() flist = conn.listdir_attr(path) files = {} for f in flist: modify = datetime.datetime.fromtimestamp( f.st_mtime).strftime('%Y%m%d%H%M%S') files[f.filename] = { 'size': f.st_size, 'type': 'dir' if stat.S_ISDIR(f.st_mode) else 'file', 'modify': modify} return files
def describe_directory(self, path): """ Returns a dictionary of {filename: {attributes}} for all files on the remote system (where the MLSD command is supported). :param path: full path to the remote directory :type path: str """ conn = self.get_conn() flist = conn.listdir_attr(path) files = {} for f in flist: modify = datetime.datetime.fromtimestamp( f.st_mtime).strftime('%Y%m%d%H%M%S') files[f.filename] = { 'size': f.st_size, 'type': 'dir' if stat.S_ISDIR(f.st_mode) else 'file', 'modify': modify} return files
[ "Returns", "a", "dictionary", "of", "{", "filename", ":", "{", "attributes", "}}", "for", "all", "files", "on", "the", "remote", "system", "(", "where", "the", "MLSD", "command", "is", "supported", ")", ".", ":", "param", "path", ":", "full", "path", "to", "the", "remote", "directory", ":", "type", "path", ":", "str" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L126-L143
[ "def", "describe_directory", "(", "self", ",", "path", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "flist", "=", "conn", ".", "listdir_attr", "(", "path", ")", "files", "=", "{", "}", "for", "f", "in", "flist", ":", "modify", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "f", ".", "st_mtime", ")", ".", "strftime", "(", "'%Y%m%d%H%M%S'", ")", "files", "[", "f", ".", "filename", "]", "=", "{", "'size'", ":", "f", ".", "st_size", ",", "'type'", ":", "'dir'", "if", "stat", ".", "S_ISDIR", "(", "f", ".", "st_mode", ")", "else", "'file'", ",", "'modify'", ":", "modify", "}", "return", "files" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SFTPHook.list_directory
Returns a list of files on the remote system. :param path: full path to the remote directory to list :type path: str
airflow/contrib/hooks/sftp_hook.py
def list_directory(self, path): """ Returns a list of files on the remote system. :param path: full path to the remote directory to list :type path: str """ conn = self.get_conn() files = conn.listdir(path) return files
def list_directory(self, path): """ Returns a list of files on the remote system. :param path: full path to the remote directory to list :type path: str """ conn = self.get_conn() files = conn.listdir(path) return files
[ "Returns", "a", "list", "of", "files", "on", "the", "remote", "system", ".", ":", "param", "path", ":", "full", "path", "to", "the", "remote", "directory", "to", "list", ":", "type", "path", ":", "str" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L145-L153
[ "def", "list_directory", "(", "self", ",", "path", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "files", "=", "conn", ".", "listdir", "(", "path", ")", "return", "files" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SFTPHook.create_directory
Creates a directory on the remote system. :param path: full path to the remote directory to create :type path: str :param mode: int representation of octal mode for directory
airflow/contrib/hooks/sftp_hook.py
def create_directory(self, path, mode=777): """ Creates a directory on the remote system. :param path: full path to the remote directory to create :type path: str :param mode: int representation of octal mode for directory """ conn = self.get_conn() conn.mkdir(path, mode)
def create_directory(self, path, mode=777): """ Creates a directory on the remote system. :param path: full path to the remote directory to create :type path: str :param mode: int representation of octal mode for directory """ conn = self.get_conn() conn.mkdir(path, mode)
[ "Creates", "a", "directory", "on", "the", "remote", "system", ".", ":", "param", "path", ":", "full", "path", "to", "the", "remote", "directory", "to", "create", ":", "type", "path", ":", "str", ":", "param", "mode", ":", "int", "representation", "of", "octal", "mode", "for", "directory" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L155-L163
[ "def", "create_directory", "(", "self", ",", "path", ",", "mode", "=", "777", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "conn", ".", "mkdir", "(", "path", ",", "mode", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SFTPHook.retrieve_file
Transfers the remote file to a local location. If local_full_path is a string path, the file will be put at that location :param remote_full_path: full path to the remote file :type remote_full_path: str :param local_full_path: full path to the local file :type local_full_path: str
airflow/contrib/hooks/sftp_hook.py
def retrieve_file(self, remote_full_path, local_full_path): """ Transfers the remote file to a local location. If local_full_path is a string path, the file will be put at that location :param remote_full_path: full path to the remote file :type remote_full_path: str :param local_full_path: full path to the local file :type local_full_path: str """ conn = self.get_conn() self.log.info('Retrieving file from FTP: %s', remote_full_path) conn.get(remote_full_path, local_full_path) self.log.info('Finished retrieving file from FTP: %s', remote_full_path)
def retrieve_file(self, remote_full_path, local_full_path): """ Transfers the remote file to a local location. If local_full_path is a string path, the file will be put at that location :param remote_full_path: full path to the remote file :type remote_full_path: str :param local_full_path: full path to the local file :type local_full_path: str """ conn = self.get_conn() self.log.info('Retrieving file from FTP: %s', remote_full_path) conn.get(remote_full_path, local_full_path) self.log.info('Finished retrieving file from FTP: %s', remote_full_path)
[ "Transfers", "the", "remote", "file", "to", "a", "local", "location", ".", "If", "local_full_path", "is", "a", "string", "path", "the", "file", "will", "be", "put", "at", "that", "location", ":", "param", "remote_full_path", ":", "full", "path", "to", "the", "remote", "file", ":", "type", "remote_full_path", ":", "str", ":", "param", "local_full_path", ":", "full", "path", "to", "the", "local", "file", ":", "type", "local_full_path", ":", "str" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L174-L187
[ "def", "retrieve_file", "(", "self", ",", "remote_full_path", ",", "local_full_path", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "self", ".", "log", ".", "info", "(", "'Retrieving file from FTP: %s'", ",", "remote_full_path", ")", "conn", ".", "get", "(", "remote_full_path", ",", "local_full_path", ")", "self", ".", "log", ".", "info", "(", "'Finished retrieving file from FTP: %s'", ",", "remote_full_path", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SFTPHook.store_file
Transfers a local file to the remote location. If local_full_path_or_buffer is a string path, the file will be read from that location :param remote_full_path: full path to the remote file :type remote_full_path: str :param local_full_path: full path to the local file :type local_full_path: str
airflow/contrib/hooks/sftp_hook.py
def store_file(self, remote_full_path, local_full_path): """ Transfers a local file to the remote location. If local_full_path_or_buffer is a string path, the file will be read from that location :param remote_full_path: full path to the remote file :type remote_full_path: str :param local_full_path: full path to the local file :type local_full_path: str """ conn = self.get_conn() conn.put(local_full_path, remote_full_path)
def store_file(self, remote_full_path, local_full_path): """ Transfers a local file to the remote location. If local_full_path_or_buffer is a string path, the file will be read from that location :param remote_full_path: full path to the remote file :type remote_full_path: str :param local_full_path: full path to the local file :type local_full_path: str """ conn = self.get_conn() conn.put(local_full_path, remote_full_path)
[ "Transfers", "a", "local", "file", "to", "the", "remote", "location", ".", "If", "local_full_path_or_buffer", "is", "a", "string", "path", "the", "file", "will", "be", "read", "from", "that", "location", ":", "param", "remote_full_path", ":", "full", "path", "to", "the", "remote", "file", ":", "type", "remote_full_path", ":", "str", ":", "param", "local_full_path", ":", "full", "path", "to", "the", "local", "file", ":", "type", "local_full_path", ":", "str" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L189-L200
[ "def", "store_file", "(", "self", ",", "remote_full_path", ",", "local_full_path", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "conn", ".", "put", "(", "local_full_path", ",", "remote_full_path", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
ZendeskHook.__handle_rate_limit_exception
Sleep for the time specified in the exception. If not specified, wait for 60 seconds.
airflow/hooks/zendesk_hook.py
def __handle_rate_limit_exception(self, rate_limit_exception): """ Sleep for the time specified in the exception. If not specified, wait for 60 seconds. """ retry_after = int( rate_limit_exception.response.headers.get('Retry-After', 60)) self.log.info( "Hit Zendesk API rate limit. Pausing for %s seconds", retry_after ) time.sleep(retry_after)
def __handle_rate_limit_exception(self, rate_limit_exception): """ Sleep for the time specified in the exception. If not specified, wait for 60 seconds. """ retry_after = int( rate_limit_exception.response.headers.get('Retry-After', 60)) self.log.info( "Hit Zendesk API rate limit. Pausing for %s seconds", retry_after ) time.sleep(retry_after)
[ "Sleep", "for", "the", "time", "specified", "in", "the", "exception", ".", "If", "not", "specified", "wait", "for", "60", "seconds", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/zendesk_hook.py#L39-L50
[ "def", "__handle_rate_limit_exception", "(", "self", ",", "rate_limit_exception", ")", ":", "retry_after", "=", "int", "(", "rate_limit_exception", ".", "response", ".", "headers", ".", "get", "(", "'Retry-After'", ",", "60", ")", ")", "self", ".", "log", ".", "info", "(", "\"Hit Zendesk API rate limit. Pausing for %s seconds\"", ",", "retry_after", ")", "time", ".", "sleep", "(", "retry_after", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
ZendeskHook.call
Call Zendesk API and return results :param path: The Zendesk API to call :param query: Query parameters :param get_all_pages: Accumulate results over all pages before returning. Due to strict rate limiting, this can often timeout. Waits for recommended period between tries after a timeout. :param side_loading: Retrieve related records as part of a single request. In order to enable side-loading, add an 'include' query parameter containing a comma-separated list of resources to load. For more information on side-loading see https://developer.zendesk.com/rest_api/docs/core/side_loading
airflow/hooks/zendesk_hook.py
def call(self, path, query=None, get_all_pages=True, side_loading=False): """ Call Zendesk API and return results :param path: The Zendesk API to call :param query: Query parameters :param get_all_pages: Accumulate results over all pages before returning. Due to strict rate limiting, this can often timeout. Waits for recommended period between tries after a timeout. :param side_loading: Retrieve related records as part of a single request. In order to enable side-loading, add an 'include' query parameter containing a comma-separated list of resources to load. For more information on side-loading see https://developer.zendesk.com/rest_api/docs/core/side_loading """ zendesk = self.get_conn() first_request_successful = False while not first_request_successful: try: results = zendesk.call(path, query) first_request_successful = True except RateLimitError as rle: self.__handle_rate_limit_exception(rle) # Find the key with the results keys = [path.split("/")[-1].split(".json")[0]] next_page = results['next_page'] if side_loading: keys += query['include'].split(',') results = {key: results[key] for key in keys} if get_all_pages: while next_page is not None: try: # Need to split because the next page URL has # `github.zendesk...` # in it, but the call function needs it removed. next_url = next_page.split(self.__url)[1] self.log.info("Calling %s", next_url) more_res = zendesk.call(next_url) for key in results: results[key].extend(more_res[key]) if next_page == more_res['next_page']: # Unfortunately zdesk doesn't always throw ZendeskError # when we are done getting all the data. Sometimes the # next just refers to the current set of results. # Hence, need to deal with this special case break else: next_page = more_res['next_page'] except RateLimitError as rle: self.__handle_rate_limit_exception(rle) except ZendeskError as ze: if b"Use a start_time older than 5 minutes" in ze.msg: # We have pretty up to date data break else: raise ze return results
def call(self, path, query=None, get_all_pages=True, side_loading=False): """ Call Zendesk API and return results :param path: The Zendesk API to call :param query: Query parameters :param get_all_pages: Accumulate results over all pages before returning. Due to strict rate limiting, this can often timeout. Waits for recommended period between tries after a timeout. :param side_loading: Retrieve related records as part of a single request. In order to enable side-loading, add an 'include' query parameter containing a comma-separated list of resources to load. For more information on side-loading see https://developer.zendesk.com/rest_api/docs/core/side_loading """ zendesk = self.get_conn() first_request_successful = False while not first_request_successful: try: results = zendesk.call(path, query) first_request_successful = True except RateLimitError as rle: self.__handle_rate_limit_exception(rle) # Find the key with the results keys = [path.split("/")[-1].split(".json")[0]] next_page = results['next_page'] if side_loading: keys += query['include'].split(',') results = {key: results[key] for key in keys} if get_all_pages: while next_page is not None: try: # Need to split because the next page URL has # `github.zendesk...` # in it, but the call function needs it removed. next_url = next_page.split(self.__url)[1] self.log.info("Calling %s", next_url) more_res = zendesk.call(next_url) for key in results: results[key].extend(more_res[key]) if next_page == more_res['next_page']: # Unfortunately zdesk doesn't always throw ZendeskError # when we are done getting all the data. Sometimes the # next just refers to the current set of results. # Hence, need to deal with this special case break else: next_page = more_res['next_page'] except RateLimitError as rle: self.__handle_rate_limit_exception(rle) except ZendeskError as ze: if b"Use a start_time older than 5 minutes" in ze.msg: # We have pretty up to date data break else: raise ze return results
[ "Call", "Zendesk", "API", "and", "return", "results" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/zendesk_hook.py#L52-L112
[ "def", "call", "(", "self", ",", "path", ",", "query", "=", "None", ",", "get_all_pages", "=", "True", ",", "side_loading", "=", "False", ")", ":", "zendesk", "=", "self", ".", "get_conn", "(", ")", "first_request_successful", "=", "False", "while", "not", "first_request_successful", ":", "try", ":", "results", "=", "zendesk", ".", "call", "(", "path", ",", "query", ")", "first_request_successful", "=", "True", "except", "RateLimitError", "as", "rle", ":", "self", ".", "__handle_rate_limit_exception", "(", "rle", ")", "# Find the key with the results", "keys", "=", "[", "path", ".", "split", "(", "\"/\"", ")", "[", "-", "1", "]", ".", "split", "(", "\".json\"", ")", "[", "0", "]", "]", "next_page", "=", "results", "[", "'next_page'", "]", "if", "side_loading", ":", "keys", "+=", "query", "[", "'include'", "]", ".", "split", "(", "','", ")", "results", "=", "{", "key", ":", "results", "[", "key", "]", "for", "key", "in", "keys", "}", "if", "get_all_pages", ":", "while", "next_page", "is", "not", "None", ":", "try", ":", "# Need to split because the next page URL has", "# `github.zendesk...`", "# in it, but the call function needs it removed.", "next_url", "=", "next_page", ".", "split", "(", "self", ".", "__url", ")", "[", "1", "]", "self", ".", "log", ".", "info", "(", "\"Calling %s\"", ",", "next_url", ")", "more_res", "=", "zendesk", ".", "call", "(", "next_url", ")", "for", "key", "in", "results", ":", "results", "[", "key", "]", ".", "extend", "(", "more_res", "[", "key", "]", ")", "if", "next_page", "==", "more_res", "[", "'next_page'", "]", ":", "# Unfortunately zdesk doesn't always throw ZendeskError", "# when we are done getting all the data. Sometimes the", "# next just refers to the current set of results.", "# Hence, need to deal with this special case", "break", "else", ":", "next_page", "=", "more_res", "[", "'next_page'", "]", "except", "RateLimitError", "as", "rle", ":", "self", ".", "__handle_rate_limit_exception", "(", "rle", ")", "except", "ZendeskError", "as", "ze", ":", "if", "b\"Use a start_time older than 5 minutes\"", "in", "ze", ".", "msg", ":", "# We have pretty up to date data", "break", "else", ":", "raise", "ze", "return", "results" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AwsGlueCatalogHook.get_partitions
Retrieves the partition values for a table. :param database_name: The name of the catalog database where the partitions reside. :type database_name: str :param table_name: The name of the partitions' table. :type table_name: str :param expression: An expression filtering the partitions to be returned. Please see official AWS documentation for further information. https://docs.aws.amazon.com/glue/latest/dg/aws-glue-api-catalog-partitions.html#aws-glue-api-catalog-partitions-GetPartitions :type expression: str :param page_size: pagination size :type page_size: int :param max_items: maximum items to return :type max_items: int :return: set of partition values where each value is a tuple since a partition may be composed of multiple columns. For example: ``{('2018-01-01','1'), ('2018-01-01','2')}``
airflow/contrib/hooks/aws_glue_catalog_hook.py
def get_partitions(self, database_name, table_name, expression='', page_size=None, max_items=None): """ Retrieves the partition values for a table. :param database_name: The name of the catalog database where the partitions reside. :type database_name: str :param table_name: The name of the partitions' table. :type table_name: str :param expression: An expression filtering the partitions to be returned. Please see official AWS documentation for further information. https://docs.aws.amazon.com/glue/latest/dg/aws-glue-api-catalog-partitions.html#aws-glue-api-catalog-partitions-GetPartitions :type expression: str :param page_size: pagination size :type page_size: int :param max_items: maximum items to return :type max_items: int :return: set of partition values where each value is a tuple since a partition may be composed of multiple columns. For example: ``{('2018-01-01','1'), ('2018-01-01','2')}`` """ config = { 'PageSize': page_size, 'MaxItems': max_items, } paginator = self.get_conn().get_paginator('get_partitions') response = paginator.paginate( DatabaseName=database_name, TableName=table_name, Expression=expression, PaginationConfig=config ) partitions = set() for page in response: for p in page['Partitions']: partitions.add(tuple(p['Values'])) return partitions
def get_partitions(self, database_name, table_name, expression='', page_size=None, max_items=None): """ Retrieves the partition values for a table. :param database_name: The name of the catalog database where the partitions reside. :type database_name: str :param table_name: The name of the partitions' table. :type table_name: str :param expression: An expression filtering the partitions to be returned. Please see official AWS documentation for further information. https://docs.aws.amazon.com/glue/latest/dg/aws-glue-api-catalog-partitions.html#aws-glue-api-catalog-partitions-GetPartitions :type expression: str :param page_size: pagination size :type page_size: int :param max_items: maximum items to return :type max_items: int :return: set of partition values where each value is a tuple since a partition may be composed of multiple columns. For example: ``{('2018-01-01','1'), ('2018-01-01','2')}`` """ config = { 'PageSize': page_size, 'MaxItems': max_items, } paginator = self.get_conn().get_paginator('get_partitions') response = paginator.paginate( DatabaseName=database_name, TableName=table_name, Expression=expression, PaginationConfig=config ) partitions = set() for page in response: for p in page['Partitions']: partitions.add(tuple(p['Values'])) return partitions
[ "Retrieves", "the", "partition", "values", "for", "a", "table", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_glue_catalog_hook.py#L50-L93
[ "def", "get_partitions", "(", "self", ",", "database_name", ",", "table_name", ",", "expression", "=", "''", ",", "page_size", "=", "None", ",", "max_items", "=", "None", ")", ":", "config", "=", "{", "'PageSize'", ":", "page_size", ",", "'MaxItems'", ":", "max_items", ",", "}", "paginator", "=", "self", ".", "get_conn", "(", ")", ".", "get_paginator", "(", "'get_partitions'", ")", "response", "=", "paginator", ".", "paginate", "(", "DatabaseName", "=", "database_name", ",", "TableName", "=", "table_name", ",", "Expression", "=", "expression", ",", "PaginationConfig", "=", "config", ")", "partitions", "=", "set", "(", ")", "for", "page", "in", "response", ":", "for", "p", "in", "page", "[", "'Partitions'", "]", ":", "partitions", ".", "add", "(", "tuple", "(", "p", "[", "'Values'", "]", ")", ")", "return", "partitions" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AwsGlueCatalogHook.check_for_partition
Checks whether a partition exists :param database_name: Name of hive database (schema) @table belongs to :type database_name: str :param table_name: Name of hive table @partition belongs to :type table_name: str :expression: Expression that matches the partitions to check for (eg `a = 'b' AND c = 'd'`) :type expression: str :rtype: bool >>> hook = AwsGlueCatalogHook() >>> t = 'static_babynames_partitioned' >>> hook.check_for_partition('airflow', t, "ds='2015-01-01'") True
airflow/contrib/hooks/aws_glue_catalog_hook.py
def check_for_partition(self, database_name, table_name, expression): """ Checks whether a partition exists :param database_name: Name of hive database (schema) @table belongs to :type database_name: str :param table_name: Name of hive table @partition belongs to :type table_name: str :expression: Expression that matches the partitions to check for (eg `a = 'b' AND c = 'd'`) :type expression: str :rtype: bool >>> hook = AwsGlueCatalogHook() >>> t = 'static_babynames_partitioned' >>> hook.check_for_partition('airflow', t, "ds='2015-01-01'") True """ partitions = self.get_partitions(database_name, table_name, expression, max_items=1) if partitions: return True else: return False
def check_for_partition(self, database_name, table_name, expression): """ Checks whether a partition exists :param database_name: Name of hive database (schema) @table belongs to :type database_name: str :param table_name: Name of hive table @partition belongs to :type table_name: str :expression: Expression that matches the partitions to check for (eg `a = 'b' AND c = 'd'`) :type expression: str :rtype: bool >>> hook = AwsGlueCatalogHook() >>> t = 'static_babynames_partitioned' >>> hook.check_for_partition('airflow', t, "ds='2015-01-01'") True """ partitions = self.get_partitions(database_name, table_name, expression, max_items=1) if partitions: return True else: return False
[ "Checks", "whether", "a", "partition", "exists" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_glue_catalog_hook.py#L95-L118
[ "def", "check_for_partition", "(", "self", ",", "database_name", ",", "table_name", ",", "expression", ")", ":", "partitions", "=", "self", ".", "get_partitions", "(", "database_name", ",", "table_name", ",", "expression", ",", "max_items", "=", "1", ")", "if", "partitions", ":", "return", "True", "else", ":", "return", "False" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AwsGlueCatalogHook.get_table
Get the information of the table :param database_name: Name of hive database (schema) @table belongs to :type database_name: str :param table_name: Name of hive table :type table_name: str :rtype: dict >>> hook = AwsGlueCatalogHook() >>> r = hook.get_table('db', 'table_foo') >>> r['Name'] = 'table_foo'
airflow/contrib/hooks/aws_glue_catalog_hook.py
def get_table(self, database_name, table_name): """ Get the information of the table :param database_name: Name of hive database (schema) @table belongs to :type database_name: str :param table_name: Name of hive table :type table_name: str :rtype: dict >>> hook = AwsGlueCatalogHook() >>> r = hook.get_table('db', 'table_foo') >>> r['Name'] = 'table_foo' """ result = self.get_conn().get_table(DatabaseName=database_name, Name=table_name) return result['Table']
def get_table(self, database_name, table_name): """ Get the information of the table :param database_name: Name of hive database (schema) @table belongs to :type database_name: str :param table_name: Name of hive table :type table_name: str :rtype: dict >>> hook = AwsGlueCatalogHook() >>> r = hook.get_table('db', 'table_foo') >>> r['Name'] = 'table_foo' """ result = self.get_conn().get_table(DatabaseName=database_name, Name=table_name) return result['Table']
[ "Get", "the", "information", "of", "the", "table" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_glue_catalog_hook.py#L120-L137
[ "def", "get_table", "(", "self", ",", "database_name", ",", "table_name", ")", ":", "result", "=", "self", ".", "get_conn", "(", ")", ".", "get_table", "(", "DatabaseName", "=", "database_name", ",", "Name", "=", "table_name", ")", "return", "result", "[", "'Table'", "]" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AwsGlueCatalogHook.get_table_location
Get the physical location of the table :param database_name: Name of hive database (schema) @table belongs to :type database_name: str :param table_name: Name of hive table :type table_name: str :return: str
airflow/contrib/hooks/aws_glue_catalog_hook.py
def get_table_location(self, database_name, table_name): """ Get the physical location of the table :param database_name: Name of hive database (schema) @table belongs to :type database_name: str :param table_name: Name of hive table :type table_name: str :return: str """ table = self.get_table(database_name, table_name) return table['StorageDescriptor']['Location']
def get_table_location(self, database_name, table_name): """ Get the physical location of the table :param database_name: Name of hive database (schema) @table belongs to :type database_name: str :param table_name: Name of hive table :type table_name: str :return: str """ table = self.get_table(database_name, table_name) return table['StorageDescriptor']['Location']
[ "Get", "the", "physical", "location", "of", "the", "table" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_glue_catalog_hook.py#L139-L152
[ "def", "get_table_location", "(", "self", ",", "database_name", ",", "table_name", ")", ":", "table", "=", "self", ".", "get_table", "(", "database_name", ",", "table_name", ")", "return", "table", "[", "'StorageDescriptor'", "]", "[", "'Location'", "]" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
RedshiftHook.cluster_status
Return status of a cluster :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str
airflow/contrib/hooks/redshift_hook.py
def cluster_status(self, cluster_identifier): """ Return status of a cluster :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str """ conn = self.get_conn() try: response = conn.describe_clusters( ClusterIdentifier=cluster_identifier)['Clusters'] return response[0]['ClusterStatus'] if response else None except conn.exceptions.ClusterNotFoundFault: return 'cluster_not_found'
def cluster_status(self, cluster_identifier): """ Return status of a cluster :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str """ conn = self.get_conn() try: response = conn.describe_clusters( ClusterIdentifier=cluster_identifier)['Clusters'] return response[0]['ClusterStatus'] if response else None except conn.exceptions.ClusterNotFoundFault: return 'cluster_not_found'
[ "Return", "status", "of", "a", "cluster" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/redshift_hook.py#L31-L44
[ "def", "cluster_status", "(", "self", ",", "cluster_identifier", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "try", ":", "response", "=", "conn", ".", "describe_clusters", "(", "ClusterIdentifier", "=", "cluster_identifier", ")", "[", "'Clusters'", "]", "return", "response", "[", "0", "]", "[", "'ClusterStatus'", "]", "if", "response", "else", "None", "except", "conn", ".", "exceptions", ".", "ClusterNotFoundFault", ":", "return", "'cluster_not_found'" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
RedshiftHook.delete_cluster
Delete a cluster and optionally create a snapshot :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str :param skip_final_cluster_snapshot: determines cluster snapshot creation :type skip_final_cluster_snapshot: bool :param final_cluster_snapshot_identifier: name of final cluster snapshot :type final_cluster_snapshot_identifier: str
airflow/contrib/hooks/redshift_hook.py
def delete_cluster( self, cluster_identifier, skip_final_cluster_snapshot=True, final_cluster_snapshot_identifier=''): """ Delete a cluster and optionally create a snapshot :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str :param skip_final_cluster_snapshot: determines cluster snapshot creation :type skip_final_cluster_snapshot: bool :param final_cluster_snapshot_identifier: name of final cluster snapshot :type final_cluster_snapshot_identifier: str """ response = self.get_conn().delete_cluster( ClusterIdentifier=cluster_identifier, SkipFinalClusterSnapshot=skip_final_cluster_snapshot, FinalClusterSnapshotIdentifier=final_cluster_snapshot_identifier ) return response['Cluster'] if response['Cluster'] else None
def delete_cluster( self, cluster_identifier, skip_final_cluster_snapshot=True, final_cluster_snapshot_identifier=''): """ Delete a cluster and optionally create a snapshot :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str :param skip_final_cluster_snapshot: determines cluster snapshot creation :type skip_final_cluster_snapshot: bool :param final_cluster_snapshot_identifier: name of final cluster snapshot :type final_cluster_snapshot_identifier: str """ response = self.get_conn().delete_cluster( ClusterIdentifier=cluster_identifier, SkipFinalClusterSnapshot=skip_final_cluster_snapshot, FinalClusterSnapshotIdentifier=final_cluster_snapshot_identifier ) return response['Cluster'] if response['Cluster'] else None
[ "Delete", "a", "cluster", "and", "optionally", "create", "a", "snapshot" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/redshift_hook.py#L46-L66
[ "def", "delete_cluster", "(", "self", ",", "cluster_identifier", ",", "skip_final_cluster_snapshot", "=", "True", ",", "final_cluster_snapshot_identifier", "=", "''", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "delete_cluster", "(", "ClusterIdentifier", "=", "cluster_identifier", ",", "SkipFinalClusterSnapshot", "=", "skip_final_cluster_snapshot", ",", "FinalClusterSnapshotIdentifier", "=", "final_cluster_snapshot_identifier", ")", "return", "response", "[", "'Cluster'", "]", "if", "response", "[", "'Cluster'", "]", "else", "None" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
RedshiftHook.describe_cluster_snapshots
Gets a list of snapshots for a cluster :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str
airflow/contrib/hooks/redshift_hook.py
def describe_cluster_snapshots(self, cluster_identifier): """ Gets a list of snapshots for a cluster :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str """ response = self.get_conn().describe_cluster_snapshots( ClusterIdentifier=cluster_identifier ) if 'Snapshots' not in response: return None snapshots = response['Snapshots'] snapshots = filter(lambda x: x['Status'], snapshots) snapshots.sort(key=lambda x: x['SnapshotCreateTime'], reverse=True) return snapshots
def describe_cluster_snapshots(self, cluster_identifier): """ Gets a list of snapshots for a cluster :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str """ response = self.get_conn().describe_cluster_snapshots( ClusterIdentifier=cluster_identifier ) if 'Snapshots' not in response: return None snapshots = response['Snapshots'] snapshots = filter(lambda x: x['Status'], snapshots) snapshots.sort(key=lambda x: x['SnapshotCreateTime'], reverse=True) return snapshots
[ "Gets", "a", "list", "of", "snapshots", "for", "a", "cluster" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/redshift_hook.py#L68-L83
[ "def", "describe_cluster_snapshots", "(", "self", ",", "cluster_identifier", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "describe_cluster_snapshots", "(", "ClusterIdentifier", "=", "cluster_identifier", ")", "if", "'Snapshots'", "not", "in", "response", ":", "return", "None", "snapshots", "=", "response", "[", "'Snapshots'", "]", "snapshots", "=", "filter", "(", "lambda", "x", ":", "x", "[", "'Status'", "]", ",", "snapshots", ")", "snapshots", ".", "sort", "(", "key", "=", "lambda", "x", ":", "x", "[", "'SnapshotCreateTime'", "]", ",", "reverse", "=", "True", ")", "return", "snapshots" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
RedshiftHook.restore_from_cluster_snapshot
Restores a cluster from its snapshot :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str :param snapshot_identifier: unique identifier for a snapshot of a cluster :type snapshot_identifier: str
airflow/contrib/hooks/redshift_hook.py
def restore_from_cluster_snapshot(self, cluster_identifier, snapshot_identifier): """ Restores a cluster from its snapshot :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str :param snapshot_identifier: unique identifier for a snapshot of a cluster :type snapshot_identifier: str """ response = self.get_conn().restore_from_cluster_snapshot( ClusterIdentifier=cluster_identifier, SnapshotIdentifier=snapshot_identifier ) return response['Cluster'] if response['Cluster'] else None
def restore_from_cluster_snapshot(self, cluster_identifier, snapshot_identifier): """ Restores a cluster from its snapshot :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str :param snapshot_identifier: unique identifier for a snapshot of a cluster :type snapshot_identifier: str """ response = self.get_conn().restore_from_cluster_snapshot( ClusterIdentifier=cluster_identifier, SnapshotIdentifier=snapshot_identifier ) return response['Cluster'] if response['Cluster'] else None
[ "Restores", "a", "cluster", "from", "its", "snapshot" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/redshift_hook.py#L85-L98
[ "def", "restore_from_cluster_snapshot", "(", "self", ",", "cluster_identifier", ",", "snapshot_identifier", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "restore_from_cluster_snapshot", "(", "ClusterIdentifier", "=", "cluster_identifier", ",", "SnapshotIdentifier", "=", "snapshot_identifier", ")", "return", "response", "[", "'Cluster'", "]", "if", "response", "[", "'Cluster'", "]", "else", "None" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
RedshiftHook.create_cluster_snapshot
Creates a snapshot of a cluster :param snapshot_identifier: unique identifier for a snapshot of a cluster :type snapshot_identifier: str :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str
airflow/contrib/hooks/redshift_hook.py
def create_cluster_snapshot(self, snapshot_identifier, cluster_identifier): """ Creates a snapshot of a cluster :param snapshot_identifier: unique identifier for a snapshot of a cluster :type snapshot_identifier: str :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str """ response = self.get_conn().create_cluster_snapshot( SnapshotIdentifier=snapshot_identifier, ClusterIdentifier=cluster_identifier, ) return response['Snapshot'] if response['Snapshot'] else None
def create_cluster_snapshot(self, snapshot_identifier, cluster_identifier): """ Creates a snapshot of a cluster :param snapshot_identifier: unique identifier for a snapshot of a cluster :type snapshot_identifier: str :param cluster_identifier: unique identifier of a cluster :type cluster_identifier: str """ response = self.get_conn().create_cluster_snapshot( SnapshotIdentifier=snapshot_identifier, ClusterIdentifier=cluster_identifier, ) return response['Snapshot'] if response['Snapshot'] else None
[ "Creates", "a", "snapshot", "of", "a", "cluster" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/redshift_hook.py#L100-L113
[ "def", "create_cluster_snapshot", "(", "self", ",", "snapshot_identifier", ",", "cluster_identifier", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "create_cluster_snapshot", "(", "SnapshotIdentifier", "=", "snapshot_identifier", ",", "ClusterIdentifier", "=", "cluster_identifier", ",", ")", "return", "response", "[", "'Snapshot'", "]", "if", "response", "[", "'Snapshot'", "]", "else", "None" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SlackAPIOperator.execute
SlackAPIOperator calls will not fail even if the call is not unsuccessful. It should not prevent a DAG from completing in success
airflow/operators/slack_operator.py
def execute(self, **kwargs): """ SlackAPIOperator calls will not fail even if the call is not unsuccessful. It should not prevent a DAG from completing in success """ if not self.api_params: self.construct_api_call_params() slack = SlackHook(token=self.token, slack_conn_id=self.slack_conn_id) slack.call(self.method, self.api_params)
def execute(self, **kwargs): """ SlackAPIOperator calls will not fail even if the call is not unsuccessful. It should not prevent a DAG from completing in success """ if not self.api_params: self.construct_api_call_params() slack = SlackHook(token=self.token, slack_conn_id=self.slack_conn_id) slack.call(self.method, self.api_params)
[ "SlackAPIOperator", "calls", "will", "not", "fail", "even", "if", "the", "call", "is", "not", "unsuccessful", ".", "It", "should", "not", "prevent", "a", "DAG", "from", "completing", "in", "success" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/operators/slack_operator.py#L79-L87
[ "def", "execute", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "api_params", ":", "self", ".", "construct_api_call_params", "(", ")", "slack", "=", "SlackHook", "(", "token", "=", "self", ".", "token", ",", "slack_conn_id", "=", "self", ".", "slack_conn_id", ")", "slack", ".", "call", "(", "self", ".", "method", ",", "self", ".", "api_params", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PodGenerator.add_volume
Args: volume (Volume):
airflow/contrib/kubernetes/pod_generator.py
def add_volume(self, volume): """ Args: volume (Volume): """ self._add_volume(name=volume.name, configs=volume.configs)
def add_volume(self, volume): """ Args: volume (Volume): """ self._add_volume(name=volume.name, configs=volume.configs)
[ "Args", ":", "volume", "(", "Volume", ")", ":" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/kubernetes/pod_generator.py#L67-L73
[ "def", "add_volume", "(", "self", ",", "volume", ")", ":", "self", ".", "_add_volume", "(", "name", "=", "volume", ".", "name", ",", "configs", "=", "volume", ".", "configs", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PodGenerator.add_mount
Args: volume_mount (VolumeMount):
airflow/contrib/kubernetes/pod_generator.py
def add_mount(self, volume_mount): """ Args: volume_mount (VolumeMount): """ self._add_mount( name=volume_mount.name, mount_path=volume_mount.mount_path, sub_path=volume_mount.sub_path, read_only=volume_mount.read_only )
def add_mount(self, volume_mount): """ Args: volume_mount (VolumeMount): """ self._add_mount( name=volume_mount.name, mount_path=volume_mount.mount_path, sub_path=volume_mount.sub_path, read_only=volume_mount.read_only )
[ "Args", ":", "volume_mount", "(", "VolumeMount", ")", ":" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/kubernetes/pod_generator.py#L124-L135
[ "def", "add_mount", "(", "self", ",", "volume_mount", ")", ":", "self", ".", "_add_mount", "(", "name", "=", "volume_mount", ".", "name", ",", "mount_path", "=", "volume_mount", ".", "mount_path", ",", "sub_path", "=", "volume_mount", ".", "sub_path", ",", "read_only", "=", "volume_mount", ".", "read_only", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
EmrHook.create_job_flow
Creates a job flow using the config from the EMR connection. Keys of the json extra hash may have the arguments of the boto3 run_job_flow method. Overrides for this config may be passed as the job_flow_overrides.
airflow/contrib/hooks/emr_hook.py
def create_job_flow(self, job_flow_overrides): """ Creates a job flow using the config from the EMR connection. Keys of the json extra hash may have the arguments of the boto3 run_job_flow method. Overrides for this config may be passed as the job_flow_overrides. """ if not self.emr_conn_id: raise AirflowException('emr_conn_id must be present to use create_job_flow') emr_conn = self.get_connection(self.emr_conn_id) config = emr_conn.extra_dejson.copy() config.update(job_flow_overrides) response = self.get_conn().run_job_flow(**config) return response
def create_job_flow(self, job_flow_overrides): """ Creates a job flow using the config from the EMR connection. Keys of the json extra hash may have the arguments of the boto3 run_job_flow method. Overrides for this config may be passed as the job_flow_overrides. """ if not self.emr_conn_id: raise AirflowException('emr_conn_id must be present to use create_job_flow') emr_conn = self.get_connection(self.emr_conn_id) config = emr_conn.extra_dejson.copy() config.update(job_flow_overrides) response = self.get_conn().run_job_flow(**config) return response
[ "Creates", "a", "job", "flow", "using", "the", "config", "from", "the", "EMR", "connection", ".", "Keys", "of", "the", "json", "extra", "hash", "may", "have", "the", "arguments", "of", "the", "boto3", "run_job_flow", "method", ".", "Overrides", "for", "this", "config", "may", "be", "passed", "as", "the", "job_flow_overrides", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/emr_hook.py#L39-L57
[ "def", "create_job_flow", "(", "self", ",", "job_flow_overrides", ")", ":", "if", "not", "self", ".", "emr_conn_id", ":", "raise", "AirflowException", "(", "'emr_conn_id must be present to use create_job_flow'", ")", "emr_conn", "=", "self", ".", "get_connection", "(", "self", ".", "emr_conn_id", ")", "config", "=", "emr_conn", ".", "extra_dejson", ".", "copy", "(", ")", "config", ".", "update", "(", "job_flow_overrides", ")", "response", "=", "self", ".", "get_conn", "(", ")", ".", "run_job_flow", "(", "*", "*", "config", ")", "return", "response" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
HdfsSensor.filter_for_filesize
Will test the filepath result and test if its size is at least self.filesize :param result: a list of dicts returned by Snakebite ls :param size: the file size in MB a file should be at least to trigger True :return: (bool) depending on the matching criteria
airflow/sensors/hdfs_sensor.py
def filter_for_filesize(result, size=None): """ Will test the filepath result and test if its size is at least self.filesize :param result: a list of dicts returned by Snakebite ls :param size: the file size in MB a file should be at least to trigger True :return: (bool) depending on the matching criteria """ if size: log = LoggingMixin().log log.debug( 'Filtering for file size >= %s in files: %s', size, map(lambda x: x['path'], result) ) size *= settings.MEGABYTE result = [x for x in result if x['length'] >= size] log.debug('HdfsSensor.poke: after size filter result is %s', result) return result
def filter_for_filesize(result, size=None): """ Will test the filepath result and test if its size is at least self.filesize :param result: a list of dicts returned by Snakebite ls :param size: the file size in MB a file should be at least to trigger True :return: (bool) depending on the matching criteria """ if size: log = LoggingMixin().log log.debug( 'Filtering for file size >= %s in files: %s', size, map(lambda x: x['path'], result) ) size *= settings.MEGABYTE result = [x for x in result if x['length'] >= size] log.debug('HdfsSensor.poke: after size filter result is %s', result) return result
[ "Will", "test", "the", "filepath", "result", "and", "test", "if", "its", "size", "is", "at", "least", "self", ".", "filesize" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/sensors/hdfs_sensor.py#L59-L76
[ "def", "filter_for_filesize", "(", "result", ",", "size", "=", "None", ")", ":", "if", "size", ":", "log", "=", "LoggingMixin", "(", ")", ".", "log", "log", ".", "debug", "(", "'Filtering for file size >= %s in files: %s'", ",", "size", ",", "map", "(", "lambda", "x", ":", "x", "[", "'path'", "]", ",", "result", ")", ")", "size", "*=", "settings", ".", "MEGABYTE", "result", "=", "[", "x", "for", "x", "in", "result", "if", "x", "[", "'length'", "]", ">=", "size", "]", "log", ".", "debug", "(", "'HdfsSensor.poke: after size filter result is %s'", ",", "result", ")", "return", "result" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
HdfsSensor.filter_for_ignored_ext
Will filter if instructed to do so the result to remove matching criteria :param result: list of dicts returned by Snakebite ls :type result: list[dict] :param ignored_ext: list of ignored extensions :type ignored_ext: list :param ignore_copying: shall we ignore ? :type ignore_copying: bool :return: list of dicts which were not removed :rtype: list[dict]
airflow/sensors/hdfs_sensor.py
def filter_for_ignored_ext(result, ignored_ext, ignore_copying): """ Will filter if instructed to do so the result to remove matching criteria :param result: list of dicts returned by Snakebite ls :type result: list[dict] :param ignored_ext: list of ignored extensions :type ignored_ext: list :param ignore_copying: shall we ignore ? :type ignore_copying: bool :return: list of dicts which were not removed :rtype: list[dict] """ if ignore_copying: log = LoggingMixin().log regex_builder = r"^.*\.(%s$)$" % '$|'.join(ignored_ext) ignored_extensions_regex = re.compile(regex_builder) log.debug( 'Filtering result for ignored extensions: %s in files %s', ignored_extensions_regex.pattern, map(lambda x: x['path'], result) ) result = [x for x in result if not ignored_extensions_regex.match(x['path'])] log.debug('HdfsSensor.poke: after ext filter result is %s', result) return result
def filter_for_ignored_ext(result, ignored_ext, ignore_copying): """ Will filter if instructed to do so the result to remove matching criteria :param result: list of dicts returned by Snakebite ls :type result: list[dict] :param ignored_ext: list of ignored extensions :type ignored_ext: list :param ignore_copying: shall we ignore ? :type ignore_copying: bool :return: list of dicts which were not removed :rtype: list[dict] """ if ignore_copying: log = LoggingMixin().log regex_builder = r"^.*\.(%s$)$" % '$|'.join(ignored_ext) ignored_extensions_regex = re.compile(regex_builder) log.debug( 'Filtering result for ignored extensions: %s in files %s', ignored_extensions_regex.pattern, map(lambda x: x['path'], result) ) result = [x for x in result if not ignored_extensions_regex.match(x['path'])] log.debug('HdfsSensor.poke: after ext filter result is %s', result) return result
[ "Will", "filter", "if", "instructed", "to", "do", "so", "the", "result", "to", "remove", "matching", "criteria" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/sensors/hdfs_sensor.py#L79-L102
[ "def", "filter_for_ignored_ext", "(", "result", ",", "ignored_ext", ",", "ignore_copying", ")", ":", "if", "ignore_copying", ":", "log", "=", "LoggingMixin", "(", ")", ".", "log", "regex_builder", "=", "r\"^.*\\.(%s$)$\"", "%", "'$|'", ".", "join", "(", "ignored_ext", ")", "ignored_extensions_regex", "=", "re", ".", "compile", "(", "regex_builder", ")", "log", ".", "debug", "(", "'Filtering result for ignored extensions: %s in files %s'", ",", "ignored_extensions_regex", ".", "pattern", ",", "map", "(", "lambda", "x", ":", "x", "[", "'path'", "]", ",", "result", ")", ")", "result", "=", "[", "x", "for", "x", "in", "result", "if", "not", "ignored_extensions_regex", ".", "match", "(", "x", "[", "'path'", "]", ")", "]", "log", ".", "debug", "(", "'HdfsSensor.poke: after ext filter result is %s'", ",", "result", ")", "return", "result" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
MongoToS3Operator.execute
Executed by task_instance at runtime
airflow/contrib/operators/mongo_to_s3.py
def execute(self, context): """ Executed by task_instance at runtime """ s3_conn = S3Hook(self.s3_conn_id) # Grab collection and execute query according to whether or not it is a pipeline if self.is_pipeline: results = MongoHook(self.mongo_conn_id).aggregate( mongo_collection=self.mongo_collection, aggregate_query=self.mongo_query, mongo_db=self.mongo_db ) else: results = MongoHook(self.mongo_conn_id).find( mongo_collection=self.mongo_collection, query=self.mongo_query, mongo_db=self.mongo_db ) # Performs transform then stringifies the docs results into json format docs_str = self._stringify(self.transform(results)) # Load Into S3 s3_conn.load_string( string_data=docs_str, key=self.s3_key, bucket_name=self.s3_bucket, replace=self.replace ) return True
def execute(self, context): """ Executed by task_instance at runtime """ s3_conn = S3Hook(self.s3_conn_id) # Grab collection and execute query according to whether or not it is a pipeline if self.is_pipeline: results = MongoHook(self.mongo_conn_id).aggregate( mongo_collection=self.mongo_collection, aggregate_query=self.mongo_query, mongo_db=self.mongo_db ) else: results = MongoHook(self.mongo_conn_id).find( mongo_collection=self.mongo_collection, query=self.mongo_query, mongo_db=self.mongo_db ) # Performs transform then stringifies the docs results into json format docs_str = self._stringify(self.transform(results)) # Load Into S3 s3_conn.load_string( string_data=docs_str, key=self.s3_key, bucket_name=self.s3_bucket, replace=self.replace ) return True
[ "Executed", "by", "task_instance", "at", "runtime" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/mongo_to_s3.py#L71-L103
[ "def", "execute", "(", "self", ",", "context", ")", ":", "s3_conn", "=", "S3Hook", "(", "self", ".", "s3_conn_id", ")", "# Grab collection and execute query according to whether or not it is a pipeline", "if", "self", ".", "is_pipeline", ":", "results", "=", "MongoHook", "(", "self", ".", "mongo_conn_id", ")", ".", "aggregate", "(", "mongo_collection", "=", "self", ".", "mongo_collection", ",", "aggregate_query", "=", "self", ".", "mongo_query", ",", "mongo_db", "=", "self", ".", "mongo_db", ")", "else", ":", "results", "=", "MongoHook", "(", "self", ".", "mongo_conn_id", ")", ".", "find", "(", "mongo_collection", "=", "self", ".", "mongo_collection", ",", "query", "=", "self", ".", "mongo_query", ",", "mongo_db", "=", "self", ".", "mongo_db", ")", "# Performs transform then stringifies the docs results into json format", "docs_str", "=", "self", ".", "_stringify", "(", "self", ".", "transform", "(", "results", ")", ")", "# Load Into S3", "s3_conn", ".", "load_string", "(", "string_data", "=", "docs_str", ",", "key", "=", "self", ".", "s3_key", ",", "bucket_name", "=", "self", ".", "s3_bucket", ",", "replace", "=", "self", ".", "replace", ")", "return", "True" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
MongoToS3Operator._stringify
Takes an iterable (pymongo Cursor or Array) containing dictionaries and returns a stringified version using python join
airflow/contrib/operators/mongo_to_s3.py
def _stringify(iterable, joinable='\n'): """ Takes an iterable (pymongo Cursor or Array) containing dictionaries and returns a stringified version using python join """ return joinable.join( [json.dumps(doc, default=json_util.default) for doc in iterable] )
def _stringify(iterable, joinable='\n'): """ Takes an iterable (pymongo Cursor or Array) containing dictionaries and returns a stringified version using python join """ return joinable.join( [json.dumps(doc, default=json_util.default) for doc in iterable] )
[ "Takes", "an", "iterable", "(", "pymongo", "Cursor", "or", "Array", ")", "containing", "dictionaries", "and", "returns", "a", "stringified", "version", "using", "python", "join" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/mongo_to_s3.py#L106-L113
[ "def", "_stringify", "(", "iterable", ",", "joinable", "=", "'\\n'", ")", ":", "return", "joinable", ".", "join", "(", "[", "json", ".", "dumps", "(", "doc", ",", "default", "=", "json_util", ".", "default", ")", "for", "doc", "in", "iterable", "]", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
get_pool
Get pool by a given name.
airflow/api/common/experimental/pool.py
def get_pool(name, session=None): """Get pool by a given name.""" if not (name and name.strip()): raise AirflowBadRequest("Pool name shouldn't be empty") pool = session.query(Pool).filter_by(pool=name).first() if pool is None: raise PoolNotFound("Pool '%s' doesn't exist" % name) return pool
def get_pool(name, session=None): """Get pool by a given name.""" if not (name and name.strip()): raise AirflowBadRequest("Pool name shouldn't be empty") pool = session.query(Pool).filter_by(pool=name).first() if pool is None: raise PoolNotFound("Pool '%s' doesn't exist" % name) return pool
[ "Get", "pool", "by", "a", "given", "name", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/pool.py#L26-L35
[ "def", "get_pool", "(", "name", ",", "session", "=", "None", ")", ":", "if", "not", "(", "name", "and", "name", ".", "strip", "(", ")", ")", ":", "raise", "AirflowBadRequest", "(", "\"Pool name shouldn't be empty\"", ")", "pool", "=", "session", ".", "query", "(", "Pool", ")", ".", "filter_by", "(", "pool", "=", "name", ")", ".", "first", "(", ")", "if", "pool", "is", "None", ":", "raise", "PoolNotFound", "(", "\"Pool '%s' doesn't exist\"", "%", "name", ")", "return", "pool" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
create_pool
Create a pool with a given parameters.
airflow/api/common/experimental/pool.py
def create_pool(name, slots, description, session=None): """Create a pool with a given parameters.""" if not (name and name.strip()): raise AirflowBadRequest("Pool name shouldn't be empty") try: slots = int(slots) except ValueError: raise AirflowBadRequest("Bad value for `slots`: %s" % slots) session.expire_on_commit = False pool = session.query(Pool).filter_by(pool=name).first() if pool is None: pool = Pool(pool=name, slots=slots, description=description) session.add(pool) else: pool.slots = slots pool.description = description session.commit() return pool
def create_pool(name, slots, description, session=None): """Create a pool with a given parameters.""" if not (name and name.strip()): raise AirflowBadRequest("Pool name shouldn't be empty") try: slots = int(slots) except ValueError: raise AirflowBadRequest("Bad value for `slots`: %s" % slots) session.expire_on_commit = False pool = session.query(Pool).filter_by(pool=name).first() if pool is None: pool = Pool(pool=name, slots=slots, description=description) session.add(pool) else: pool.slots = slots pool.description = description session.commit() return pool
[ "Create", "a", "pool", "with", "a", "given", "parameters", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/pool.py#L45-L66
[ "def", "create_pool", "(", "name", ",", "slots", ",", "description", ",", "session", "=", "None", ")", ":", "if", "not", "(", "name", "and", "name", ".", "strip", "(", ")", ")", ":", "raise", "AirflowBadRequest", "(", "\"Pool name shouldn't be empty\"", ")", "try", ":", "slots", "=", "int", "(", "slots", ")", "except", "ValueError", ":", "raise", "AirflowBadRequest", "(", "\"Bad value for `slots`: %s\"", "%", "slots", ")", "session", ".", "expire_on_commit", "=", "False", "pool", "=", "session", ".", "query", "(", "Pool", ")", ".", "filter_by", "(", "pool", "=", "name", ")", ".", "first", "(", ")", "if", "pool", "is", "None", ":", "pool", "=", "Pool", "(", "pool", "=", "name", ",", "slots", "=", "slots", ",", "description", "=", "description", ")", "session", ".", "add", "(", "pool", ")", "else", ":", "pool", ".", "slots", "=", "slots", "pool", ".", "description", "=", "description", "session", ".", "commit", "(", ")", "return", "pool" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
delete_pool
Delete pool by a given name.
airflow/api/common/experimental/pool.py
def delete_pool(name, session=None): """Delete pool by a given name.""" if not (name and name.strip()): raise AirflowBadRequest("Pool name shouldn't be empty") pool = session.query(Pool).filter_by(pool=name).first() if pool is None: raise PoolNotFound("Pool '%s' doesn't exist" % name) session.delete(pool) session.commit() return pool
def delete_pool(name, session=None): """Delete pool by a given name.""" if not (name and name.strip()): raise AirflowBadRequest("Pool name shouldn't be empty") pool = session.query(Pool).filter_by(pool=name).first() if pool is None: raise PoolNotFound("Pool '%s' doesn't exist" % name) session.delete(pool) session.commit() return pool
[ "Delete", "pool", "by", "a", "given", "name", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/pool.py#L70-L82
[ "def", "delete_pool", "(", "name", ",", "session", "=", "None", ")", ":", "if", "not", "(", "name", "and", "name", ".", "strip", "(", ")", ")", ":", "raise", "AirflowBadRequest", "(", "\"Pool name shouldn't be empty\"", ")", "pool", "=", "session", ".", "query", "(", "Pool", ")", ".", "filter_by", "(", "pool", "=", "name", ")", ".", "first", "(", ")", "if", "pool", "is", "None", ":", "raise", "PoolNotFound", "(", "\"Pool '%s' doesn't exist\"", "%", "name", ")", "session", ".", "delete", "(", "pool", ")", "session", ".", "commit", "(", ")", "return", "pool" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GKEClusterHook._dict_to_proto
Converts a python dictionary to the proto supplied :param py_dict: The dictionary to convert :type py_dict: dict :param proto: The proto object to merge with dictionary :type proto: protobuf :return: A parsed python dictionary in provided proto format :raises: ParseError: On JSON parsing problems.
airflow/contrib/hooks/gcp_container_hook.py
def _dict_to_proto(py_dict, proto): """ Converts a python dictionary to the proto supplied :param py_dict: The dictionary to convert :type py_dict: dict :param proto: The proto object to merge with dictionary :type proto: protobuf :return: A parsed python dictionary in provided proto format :raises: ParseError: On JSON parsing problems. """ dict_json_str = json.dumps(py_dict) return json_format.Parse(dict_json_str, proto)
def _dict_to_proto(py_dict, proto): """ Converts a python dictionary to the proto supplied :param py_dict: The dictionary to convert :type py_dict: dict :param proto: The proto object to merge with dictionary :type proto: protobuf :return: A parsed python dictionary in provided proto format :raises: ParseError: On JSON parsing problems. """ dict_json_str = json.dumps(py_dict) return json_format.Parse(dict_json_str, proto)
[ "Converts", "a", "python", "dictionary", "to", "the", "proto", "supplied" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L57-L70
[ "def", "_dict_to_proto", "(", "py_dict", ",", "proto", ")", ":", "dict_json_str", "=", "json", ".", "dumps", "(", "py_dict", ")", "return", "json_format", ".", "Parse", "(", "dict_json_str", ",", "proto", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GKEClusterHook.wait_for_operation
Given an operation, continuously fetches the status from Google Cloud until either completion or an error occurring :param operation: The Operation to wait for :type operation: google.cloud.container_V1.gapic.enums.Operation :param project_id: Google Cloud Platform project ID :type project_id: str :return: A new, updated operation fetched from Google Cloud
airflow/contrib/hooks/gcp_container_hook.py
def wait_for_operation(self, operation, project_id=None): """ Given an operation, continuously fetches the status from Google Cloud until either completion or an error occurring :param operation: The Operation to wait for :type operation: google.cloud.container_V1.gapic.enums.Operation :param project_id: Google Cloud Platform project ID :type project_id: str :return: A new, updated operation fetched from Google Cloud """ self.log.info("Waiting for OPERATION_NAME %s", operation.name) time.sleep(OPERATIONAL_POLL_INTERVAL) while operation.status != Operation.Status.DONE: if operation.status == Operation.Status.RUNNING or operation.status == \ Operation.Status.PENDING: time.sleep(OPERATIONAL_POLL_INTERVAL) else: raise exceptions.GoogleCloudError( "Operation has failed with status: %s" % operation.status) # To update status of operation operation = self.get_operation(operation.name, project_id=project_id or self.project_id) return operation
def wait_for_operation(self, operation, project_id=None): """ Given an operation, continuously fetches the status from Google Cloud until either completion or an error occurring :param operation: The Operation to wait for :type operation: google.cloud.container_V1.gapic.enums.Operation :param project_id: Google Cloud Platform project ID :type project_id: str :return: A new, updated operation fetched from Google Cloud """ self.log.info("Waiting for OPERATION_NAME %s", operation.name) time.sleep(OPERATIONAL_POLL_INTERVAL) while operation.status != Operation.Status.DONE: if operation.status == Operation.Status.RUNNING or operation.status == \ Operation.Status.PENDING: time.sleep(OPERATIONAL_POLL_INTERVAL) else: raise exceptions.GoogleCloudError( "Operation has failed with status: %s" % operation.status) # To update status of operation operation = self.get_operation(operation.name, project_id=project_id or self.project_id) return operation
[ "Given", "an", "operation", "continuously", "fetches", "the", "status", "from", "Google", "Cloud", "until", "either", "completion", "or", "an", "error", "occurring" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L72-L94
[ "def", "wait_for_operation", "(", "self", ",", "operation", ",", "project_id", "=", "None", ")", ":", "self", ".", "log", ".", "info", "(", "\"Waiting for OPERATION_NAME %s\"", ",", "operation", ".", "name", ")", "time", ".", "sleep", "(", "OPERATIONAL_POLL_INTERVAL", ")", "while", "operation", ".", "status", "!=", "Operation", ".", "Status", ".", "DONE", ":", "if", "operation", ".", "status", "==", "Operation", ".", "Status", ".", "RUNNING", "or", "operation", ".", "status", "==", "Operation", ".", "Status", ".", "PENDING", ":", "time", ".", "sleep", "(", "OPERATIONAL_POLL_INTERVAL", ")", "else", ":", "raise", "exceptions", ".", "GoogleCloudError", "(", "\"Operation has failed with status: %s\"", "%", "operation", ".", "status", ")", "# To update status of operation", "operation", "=", "self", ".", "get_operation", "(", "operation", ".", "name", ",", "project_id", "=", "project_id", "or", "self", ".", "project_id", ")", "return", "operation" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GKEClusterHook.get_operation
Fetches the operation from Google Cloud :param operation_name: Name of operation to fetch :type operation_name: str :param project_id: Google Cloud Platform project ID :type project_id: str :return: The new, updated operation from Google Cloud
airflow/contrib/hooks/gcp_container_hook.py
def get_operation(self, operation_name, project_id=None): """ Fetches the operation from Google Cloud :param operation_name: Name of operation to fetch :type operation_name: str :param project_id: Google Cloud Platform project ID :type project_id: str :return: The new, updated operation from Google Cloud """ return self.get_client().get_operation(project_id=project_id or self.project_id, zone=self.location, operation_id=operation_name)
def get_operation(self, operation_name, project_id=None): """ Fetches the operation from Google Cloud :param operation_name: Name of operation to fetch :type operation_name: str :param project_id: Google Cloud Platform project ID :type project_id: str :return: The new, updated operation from Google Cloud """ return self.get_client().get_operation(project_id=project_id or self.project_id, zone=self.location, operation_id=operation_name)
[ "Fetches", "the", "operation", "from", "Google", "Cloud" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L96-L108
[ "def", "get_operation", "(", "self", ",", "operation_name", ",", "project_id", "=", "None", ")", ":", "return", "self", ".", "get_client", "(", ")", ".", "get_operation", "(", "project_id", "=", "project_id", "or", "self", ".", "project_id", ",", "zone", "=", "self", ".", "location", ",", "operation_id", "=", "operation_name", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GKEClusterHook._append_label
Append labels to provided Cluster Protobuf Labels must fit the regex ``[a-z]([-a-z0-9]*[a-z0-9])?`` (current airflow version string follows semantic versioning spec: x.y.z). :param cluster_proto: The proto to append resource_label airflow version to :type cluster_proto: google.cloud.container_v1.types.Cluster :param key: The key label :type key: str :param val: :type val: str :return: The cluster proto updated with new label
airflow/contrib/hooks/gcp_container_hook.py
def _append_label(cluster_proto, key, val): """ Append labels to provided Cluster Protobuf Labels must fit the regex ``[a-z]([-a-z0-9]*[a-z0-9])?`` (current airflow version string follows semantic versioning spec: x.y.z). :param cluster_proto: The proto to append resource_label airflow version to :type cluster_proto: google.cloud.container_v1.types.Cluster :param key: The key label :type key: str :param val: :type val: str :return: The cluster proto updated with new label """ val = val.replace('.', '-').replace('+', '-') cluster_proto.resource_labels.update({key: val}) return cluster_proto
def _append_label(cluster_proto, key, val): """ Append labels to provided Cluster Protobuf Labels must fit the regex ``[a-z]([-a-z0-9]*[a-z0-9])?`` (current airflow version string follows semantic versioning spec: x.y.z). :param cluster_proto: The proto to append resource_label airflow version to :type cluster_proto: google.cloud.container_v1.types.Cluster :param key: The key label :type key: str :param val: :type val: str :return: The cluster proto updated with new label """ val = val.replace('.', '-').replace('+', '-') cluster_proto.resource_labels.update({key: val}) return cluster_proto
[ "Append", "labels", "to", "provided", "Cluster", "Protobuf" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L111-L129
[ "def", "_append_label", "(", "cluster_proto", ",", "key", ",", "val", ")", ":", "val", "=", "val", ".", "replace", "(", "'.'", ",", "'-'", ")", ".", "replace", "(", "'+'", ",", "'-'", ")", "cluster_proto", ".", "resource_labels", ".", "update", "(", "{", "key", ":", "val", "}", ")", "return", "cluster_proto" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GKEClusterHook.delete_cluster
Deletes the cluster, including the Kubernetes endpoint and all worker nodes. Firewalls and routes that were configured during cluster creation are also deleted. Other Google Compute Engine resources that might be in use by the cluster (e.g. load balancer resources) will not be deleted if they weren’t present at the initial create time. :param name: The name of the cluster to delete :type name: str :param project_id: Google Cloud Platform project ID :type project_id: str :param retry: Retry object used to determine when/if to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :return: The full url to the delete operation if successful, else None
airflow/contrib/hooks/gcp_container_hook.py
def delete_cluster(self, name, project_id=None, retry=DEFAULT, timeout=DEFAULT): """ Deletes the cluster, including the Kubernetes endpoint and all worker nodes. Firewalls and routes that were configured during cluster creation are also deleted. Other Google Compute Engine resources that might be in use by the cluster (e.g. load balancer resources) will not be deleted if they weren’t present at the initial create time. :param name: The name of the cluster to delete :type name: str :param project_id: Google Cloud Platform project ID :type project_id: str :param retry: Retry object used to determine when/if to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :return: The full url to the delete operation if successful, else None """ self.log.info( "Deleting (project_id=%s, zone=%s, cluster_id=%s)", self.project_id, self.location, name ) try: op = self.get_client().delete_cluster(project_id=project_id or self.project_id, zone=self.location, cluster_id=name, retry=retry, timeout=timeout) op = self.wait_for_operation(op) # Returns server-defined url for the resource return op.self_link except NotFound as error: self.log.info('Assuming Success: %s', error.message)
def delete_cluster(self, name, project_id=None, retry=DEFAULT, timeout=DEFAULT): """ Deletes the cluster, including the Kubernetes endpoint and all worker nodes. Firewalls and routes that were configured during cluster creation are also deleted. Other Google Compute Engine resources that might be in use by the cluster (e.g. load balancer resources) will not be deleted if they weren’t present at the initial create time. :param name: The name of the cluster to delete :type name: str :param project_id: Google Cloud Platform project ID :type project_id: str :param retry: Retry object used to determine when/if to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :return: The full url to the delete operation if successful, else None """ self.log.info( "Deleting (project_id=%s, zone=%s, cluster_id=%s)", self.project_id, self.location, name ) try: op = self.get_client().delete_cluster(project_id=project_id or self.project_id, zone=self.location, cluster_id=name, retry=retry, timeout=timeout) op = self.wait_for_operation(op) # Returns server-defined url for the resource return op.self_link except NotFound as error: self.log.info('Assuming Success: %s', error.message)
[ "Deletes", "the", "cluster", "including", "the", "Kubernetes", "endpoint", "and", "all", "worker", "nodes", ".", "Firewalls", "and", "routes", "that", "were", "configured", "during", "cluster", "creation", "are", "also", "deleted", ".", "Other", "Google", "Compute", "Engine", "resources", "that", "might", "be", "in", "use", "by", "the", "cluster", "(", "e", ".", "g", ".", "load", "balancer", "resources", ")", "will", "not", "be", "deleted", "if", "they", "weren’t", "present", "at", "the", "initial", "create", "time", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L131-L168
[ "def", "delete_cluster", "(", "self", ",", "name", ",", "project_id", "=", "None", ",", "retry", "=", "DEFAULT", ",", "timeout", "=", "DEFAULT", ")", ":", "self", ".", "log", ".", "info", "(", "\"Deleting (project_id=%s, zone=%s, cluster_id=%s)\"", ",", "self", ".", "project_id", ",", "self", ".", "location", ",", "name", ")", "try", ":", "op", "=", "self", ".", "get_client", "(", ")", ".", "delete_cluster", "(", "project_id", "=", "project_id", "or", "self", ".", "project_id", ",", "zone", "=", "self", ".", "location", ",", "cluster_id", "=", "name", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ")", "op", "=", "self", ".", "wait_for_operation", "(", "op", ")", "# Returns server-defined url for the resource", "return", "op", ".", "self_link", "except", "NotFound", "as", "error", ":", "self", ".", "log", ".", "info", "(", "'Assuming Success: %s'", ",", "error", ".", "message", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GKEClusterHook.create_cluster
Creates a cluster, consisting of the specified number and type of Google Compute Engine instances. :param cluster: A Cluster protobuf or dict. If dict is provided, it must be of the same form as the protobuf message :class:`google.cloud.container_v1.types.Cluster` :type cluster: dict or google.cloud.container_v1.types.Cluster :param project_id: Google Cloud Platform project ID :type project_id: str :param retry: A retry object (``google.api_core.retry.Retry``) used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :return: The full url to the new, or existing, cluster :raises: ParseError: On JSON parsing problems when trying to convert dict AirflowException: cluster is not dict type nor Cluster proto type
airflow/contrib/hooks/gcp_container_hook.py
def create_cluster(self, cluster, project_id=None, retry=DEFAULT, timeout=DEFAULT): """ Creates a cluster, consisting of the specified number and type of Google Compute Engine instances. :param cluster: A Cluster protobuf or dict. If dict is provided, it must be of the same form as the protobuf message :class:`google.cloud.container_v1.types.Cluster` :type cluster: dict or google.cloud.container_v1.types.Cluster :param project_id: Google Cloud Platform project ID :type project_id: str :param retry: A retry object (``google.api_core.retry.Retry``) used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :return: The full url to the new, or existing, cluster :raises: ParseError: On JSON parsing problems when trying to convert dict AirflowException: cluster is not dict type nor Cluster proto type """ if isinstance(cluster, dict): cluster_proto = Cluster() cluster = self._dict_to_proto(py_dict=cluster, proto=cluster_proto) elif not isinstance(cluster, Cluster): raise AirflowException( "cluster is not instance of Cluster proto or python dict") self._append_label(cluster, 'airflow-version', 'v' + version.version) self.log.info( "Creating (project_id=%s, zone=%s, cluster_name=%s)", self.project_id, self.location, cluster.name ) try: op = self.get_client().create_cluster(project_id=project_id or self.project_id, zone=self.location, cluster=cluster, retry=retry, timeout=timeout) op = self.wait_for_operation(op) return op.target_link except AlreadyExists as error: self.log.info('Assuming Success: %s', error.message) return self.get_cluster(name=cluster.name).self_link
def create_cluster(self, cluster, project_id=None, retry=DEFAULT, timeout=DEFAULT): """ Creates a cluster, consisting of the specified number and type of Google Compute Engine instances. :param cluster: A Cluster protobuf or dict. If dict is provided, it must be of the same form as the protobuf message :class:`google.cloud.container_v1.types.Cluster` :type cluster: dict or google.cloud.container_v1.types.Cluster :param project_id: Google Cloud Platform project ID :type project_id: str :param retry: A retry object (``google.api_core.retry.Retry``) used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :return: The full url to the new, or existing, cluster :raises: ParseError: On JSON parsing problems when trying to convert dict AirflowException: cluster is not dict type nor Cluster proto type """ if isinstance(cluster, dict): cluster_proto = Cluster() cluster = self._dict_to_proto(py_dict=cluster, proto=cluster_proto) elif not isinstance(cluster, Cluster): raise AirflowException( "cluster is not instance of Cluster proto or python dict") self._append_label(cluster, 'airflow-version', 'v' + version.version) self.log.info( "Creating (project_id=%s, zone=%s, cluster_name=%s)", self.project_id, self.location, cluster.name ) try: op = self.get_client().create_cluster(project_id=project_id or self.project_id, zone=self.location, cluster=cluster, retry=retry, timeout=timeout) op = self.wait_for_operation(op) return op.target_link except AlreadyExists as error: self.log.info('Assuming Success: %s', error.message) return self.get_cluster(name=cluster.name).self_link
[ "Creates", "a", "cluster", "consisting", "of", "the", "specified", "number", "and", "type", "of", "Google", "Compute", "Engine", "instances", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L170-L219
[ "def", "create_cluster", "(", "self", ",", "cluster", ",", "project_id", "=", "None", ",", "retry", "=", "DEFAULT", ",", "timeout", "=", "DEFAULT", ")", ":", "if", "isinstance", "(", "cluster", ",", "dict", ")", ":", "cluster_proto", "=", "Cluster", "(", ")", "cluster", "=", "self", ".", "_dict_to_proto", "(", "py_dict", "=", "cluster", ",", "proto", "=", "cluster_proto", ")", "elif", "not", "isinstance", "(", "cluster", ",", "Cluster", ")", ":", "raise", "AirflowException", "(", "\"cluster is not instance of Cluster proto or python dict\"", ")", "self", ".", "_append_label", "(", "cluster", ",", "'airflow-version'", ",", "'v'", "+", "version", ".", "version", ")", "self", ".", "log", ".", "info", "(", "\"Creating (project_id=%s, zone=%s, cluster_name=%s)\"", ",", "self", ".", "project_id", ",", "self", ".", "location", ",", "cluster", ".", "name", ")", "try", ":", "op", "=", "self", ".", "get_client", "(", ")", ".", "create_cluster", "(", "project_id", "=", "project_id", "or", "self", ".", "project_id", ",", "zone", "=", "self", ".", "location", ",", "cluster", "=", "cluster", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ")", "op", "=", "self", ".", "wait_for_operation", "(", "op", ")", "return", "op", ".", "target_link", "except", "AlreadyExists", "as", "error", ":", "self", ".", "log", ".", "info", "(", "'Assuming Success: %s'", ",", "error", ".", "message", ")", "return", "self", ".", "get_cluster", "(", "name", "=", "cluster", ".", "name", ")", ".", "self_link" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GKEClusterHook.get_cluster
Gets details of specified cluster :param name: The name of the cluster to retrieve :type name: str :param project_id: Google Cloud Platform project ID :type project_id: str :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :return: google.cloud.container_v1.types.Cluster
airflow/contrib/hooks/gcp_container_hook.py
def get_cluster(self, name, project_id=None, retry=DEFAULT, timeout=DEFAULT): """ Gets details of specified cluster :param name: The name of the cluster to retrieve :type name: str :param project_id: Google Cloud Platform project ID :type project_id: str :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :return: google.cloud.container_v1.types.Cluster """ self.log.info( "Fetching cluster (project_id=%s, zone=%s, cluster_name=%s)", project_id or self.project_id, self.location, name ) return self.get_client().get_cluster(project_id=project_id or self.project_id, zone=self.location, cluster_id=name, retry=retry, timeout=timeout).self_link
def get_cluster(self, name, project_id=None, retry=DEFAULT, timeout=DEFAULT): """ Gets details of specified cluster :param name: The name of the cluster to retrieve :type name: str :param project_id: Google Cloud Platform project ID :type project_id: str :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :return: google.cloud.container_v1.types.Cluster """ self.log.info( "Fetching cluster (project_id=%s, zone=%s, cluster_name=%s)", project_id or self.project_id, self.location, name ) return self.get_client().get_cluster(project_id=project_id or self.project_id, zone=self.location, cluster_id=name, retry=retry, timeout=timeout).self_link
[ "Gets", "details", "of", "specified", "cluster" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L221-L247
[ "def", "get_cluster", "(", "self", ",", "name", ",", "project_id", "=", "None", ",", "retry", "=", "DEFAULT", ",", "timeout", "=", "DEFAULT", ")", ":", "self", ".", "log", ".", "info", "(", "\"Fetching cluster (project_id=%s, zone=%s, cluster_name=%s)\"", ",", "project_id", "or", "self", ".", "project_id", ",", "self", ".", "location", ",", "name", ")", "return", "self", ".", "get_client", "(", ")", ".", "get_cluster", "(", "project_id", "=", "project_id", "or", "self", ".", "project_id", ",", "zone", "=", "self", ".", "location", ",", "cluster_id", "=", "name", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ")", ".", "self_link" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DiscordWebhookHook._get_webhook_endpoint
Given a Discord http_conn_id, return the default webhook endpoint or override if a webhook_endpoint is manually supplied. :param http_conn_id: The provided connection ID :param webhook_endpoint: The manually provided webhook endpoint :return: Webhook endpoint (str) to use
airflow/contrib/hooks/discord_webhook_hook.py
def _get_webhook_endpoint(self, http_conn_id, webhook_endpoint): """ Given a Discord http_conn_id, return the default webhook endpoint or override if a webhook_endpoint is manually supplied. :param http_conn_id: The provided connection ID :param webhook_endpoint: The manually provided webhook endpoint :return: Webhook endpoint (str) to use """ if webhook_endpoint: endpoint = webhook_endpoint elif http_conn_id: conn = self.get_connection(http_conn_id) extra = conn.extra_dejson endpoint = extra.get('webhook_endpoint', '') else: raise AirflowException('Cannot get webhook endpoint: No valid Discord ' 'webhook endpoint or http_conn_id supplied.') # make sure endpoint matches the expected Discord webhook format if not re.match('^webhooks/[0-9]+/[a-zA-Z0-9_-]+$', endpoint): raise AirflowException('Expected Discord webhook endpoint in the form ' 'of "webhooks/{webhook.id}/{webhook.token}".') return endpoint
def _get_webhook_endpoint(self, http_conn_id, webhook_endpoint): """ Given a Discord http_conn_id, return the default webhook endpoint or override if a webhook_endpoint is manually supplied. :param http_conn_id: The provided connection ID :param webhook_endpoint: The manually provided webhook endpoint :return: Webhook endpoint (str) to use """ if webhook_endpoint: endpoint = webhook_endpoint elif http_conn_id: conn = self.get_connection(http_conn_id) extra = conn.extra_dejson endpoint = extra.get('webhook_endpoint', '') else: raise AirflowException('Cannot get webhook endpoint: No valid Discord ' 'webhook endpoint or http_conn_id supplied.') # make sure endpoint matches the expected Discord webhook format if not re.match('^webhooks/[0-9]+/[a-zA-Z0-9_-]+$', endpoint): raise AirflowException('Expected Discord webhook endpoint in the form ' 'of "webhooks/{webhook.id}/{webhook.token}".') return endpoint
[ "Given", "a", "Discord", "http_conn_id", "return", "the", "default", "webhook", "endpoint", "or", "override", "if", "a", "webhook_endpoint", "is", "manually", "supplied", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/discord_webhook_hook.py#L76-L100
[ "def", "_get_webhook_endpoint", "(", "self", ",", "http_conn_id", ",", "webhook_endpoint", ")", ":", "if", "webhook_endpoint", ":", "endpoint", "=", "webhook_endpoint", "elif", "http_conn_id", ":", "conn", "=", "self", ".", "get_connection", "(", "http_conn_id", ")", "extra", "=", "conn", ".", "extra_dejson", "endpoint", "=", "extra", ".", "get", "(", "'webhook_endpoint'", ",", "''", ")", "else", ":", "raise", "AirflowException", "(", "'Cannot get webhook endpoint: No valid Discord '", "'webhook endpoint or http_conn_id supplied.'", ")", "# make sure endpoint matches the expected Discord webhook format", "if", "not", "re", ".", "match", "(", "'^webhooks/[0-9]+/[a-zA-Z0-9_-]+$'", ",", "endpoint", ")", ":", "raise", "AirflowException", "(", "'Expected Discord webhook endpoint in the form '", "'of \"webhooks/{webhook.id}/{webhook.token}\".'", ")", "return", "endpoint" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DiscordWebhookHook._build_discord_payload
Construct the Discord JSON payload. All relevant parameters are combined here to a valid Discord JSON payload. :return: Discord payload (str) to send
airflow/contrib/hooks/discord_webhook_hook.py
def _build_discord_payload(self): """ Construct the Discord JSON payload. All relevant parameters are combined here to a valid Discord JSON payload. :return: Discord payload (str) to send """ payload = {} if self.username: payload['username'] = self.username if self.avatar_url: payload['avatar_url'] = self.avatar_url payload['tts'] = self.tts if len(self.message) <= 2000: payload['content'] = self.message else: raise AirflowException('Discord message length must be 2000 or fewer ' 'characters.') return json.dumps(payload)
def _build_discord_payload(self): """ Construct the Discord JSON payload. All relevant parameters are combined here to a valid Discord JSON payload. :return: Discord payload (str) to send """ payload = {} if self.username: payload['username'] = self.username if self.avatar_url: payload['avatar_url'] = self.avatar_url payload['tts'] = self.tts if len(self.message) <= 2000: payload['content'] = self.message else: raise AirflowException('Discord message length must be 2000 or fewer ' 'characters.') return json.dumps(payload)
[ "Construct", "the", "Discord", "JSON", "payload", ".", "All", "relevant", "parameters", "are", "combined", "here", "to", "a", "valid", "Discord", "JSON", "payload", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/discord_webhook_hook.py#L102-L124
[ "def", "_build_discord_payload", "(", "self", ")", ":", "payload", "=", "{", "}", "if", "self", ".", "username", ":", "payload", "[", "'username'", "]", "=", "self", ".", "username", "if", "self", ".", "avatar_url", ":", "payload", "[", "'avatar_url'", "]", "=", "self", ".", "avatar_url", "payload", "[", "'tts'", "]", "=", "self", ".", "tts", "if", "len", "(", "self", ".", "message", ")", "<=", "2000", ":", "payload", "[", "'content'", "]", "=", "self", ".", "message", "else", ":", "raise", "AirflowException", "(", "'Discord message length must be 2000 or fewer '", "'characters.'", ")", "return", "json", ".", "dumps", "(", "payload", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DiscordWebhookHook.execute
Execute the Discord webhook call
airflow/contrib/hooks/discord_webhook_hook.py
def execute(self): """ Execute the Discord webhook call """ proxies = {} if self.proxy: # we only need https proxy for Discord proxies = {'https': self.proxy} discord_payload = self._build_discord_payload() self.run(endpoint=self.webhook_endpoint, data=discord_payload, headers={'Content-type': 'application/json'}, extra_options={'proxies': proxies})
def execute(self): """ Execute the Discord webhook call """ proxies = {} if self.proxy: # we only need https proxy for Discord proxies = {'https': self.proxy} discord_payload = self._build_discord_payload() self.run(endpoint=self.webhook_endpoint, data=discord_payload, headers={'Content-type': 'application/json'}, extra_options={'proxies': proxies})
[ "Execute", "the", "Discord", "webhook", "call" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/discord_webhook_hook.py#L126-L140
[ "def", "execute", "(", "self", ")", ":", "proxies", "=", "{", "}", "if", "self", ".", "proxy", ":", "# we only need https proxy for Discord", "proxies", "=", "{", "'https'", ":", "self", ".", "proxy", "}", "discord_payload", "=", "self", ".", "_build_discord_payload", "(", ")", "self", ".", "run", "(", "endpoint", "=", "self", ".", "webhook_endpoint", ",", "data", "=", "discord_payload", ",", "headers", "=", "{", "'Content-type'", ":", "'application/json'", "}", ",", "extra_options", "=", "{", "'proxies'", ":", "proxies", "}", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GoogleCloudKMSHook.encrypt
Encrypts a plaintext message using Google Cloud KMS. :param key_name: The Resource Name for the key (or key version) to be used for encyption. Of the form ``projects/*/locations/*/keyRings/*/cryptoKeys/**`` :type key_name: str :param plaintext: The message to be encrypted. :type plaintext: bytes :param authenticated_data: Optional additional authenticated data that must also be provided to decrypt the message. :type authenticated_data: bytes :return: The base 64 encoded ciphertext of the original message. :rtype: str
airflow/contrib/hooks/gcp_kms_hook.py
def encrypt(self, key_name, plaintext, authenticated_data=None): """ Encrypts a plaintext message using Google Cloud KMS. :param key_name: The Resource Name for the key (or key version) to be used for encyption. Of the form ``projects/*/locations/*/keyRings/*/cryptoKeys/**`` :type key_name: str :param plaintext: The message to be encrypted. :type plaintext: bytes :param authenticated_data: Optional additional authenticated data that must also be provided to decrypt the message. :type authenticated_data: bytes :return: The base 64 encoded ciphertext of the original message. :rtype: str """ keys = self.get_conn().projects().locations().keyRings().cryptoKeys() body = {'plaintext': _b64encode(plaintext)} if authenticated_data: body['additionalAuthenticatedData'] = _b64encode(authenticated_data) request = keys.encrypt(name=key_name, body=body) response = request.execute(num_retries=self.num_retries) ciphertext = response['ciphertext'] return ciphertext
def encrypt(self, key_name, plaintext, authenticated_data=None): """ Encrypts a plaintext message using Google Cloud KMS. :param key_name: The Resource Name for the key (or key version) to be used for encyption. Of the form ``projects/*/locations/*/keyRings/*/cryptoKeys/**`` :type key_name: str :param plaintext: The message to be encrypted. :type plaintext: bytes :param authenticated_data: Optional additional authenticated data that must also be provided to decrypt the message. :type authenticated_data: bytes :return: The base 64 encoded ciphertext of the original message. :rtype: str """ keys = self.get_conn().projects().locations().keyRings().cryptoKeys() body = {'plaintext': _b64encode(plaintext)} if authenticated_data: body['additionalAuthenticatedData'] = _b64encode(authenticated_data) request = keys.encrypt(name=key_name, body=body) response = request.execute(num_retries=self.num_retries) ciphertext = response['ciphertext'] return ciphertext
[ "Encrypts", "a", "plaintext", "message", "using", "Google", "Cloud", "KMS", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_kms_hook.py#L58-L83
[ "def", "encrypt", "(", "self", ",", "key_name", ",", "plaintext", ",", "authenticated_data", "=", "None", ")", ":", "keys", "=", "self", ".", "get_conn", "(", ")", ".", "projects", "(", ")", ".", "locations", "(", ")", ".", "keyRings", "(", ")", ".", "cryptoKeys", "(", ")", "body", "=", "{", "'plaintext'", ":", "_b64encode", "(", "plaintext", ")", "}", "if", "authenticated_data", ":", "body", "[", "'additionalAuthenticatedData'", "]", "=", "_b64encode", "(", "authenticated_data", ")", "request", "=", "keys", ".", "encrypt", "(", "name", "=", "key_name", ",", "body", "=", "body", ")", "response", "=", "request", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "ciphertext", "=", "response", "[", "'ciphertext'", "]", "return", "ciphertext" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SqoopHook.Popen
Remote Popen :param cmd: command to remotely execute :param kwargs: extra arguments to Popen (see subprocess.Popen) :return: handle to subprocess
airflow/contrib/hooks/sqoop_hook.py
def Popen(self, cmd, **kwargs): """ Remote Popen :param cmd: command to remotely execute :param kwargs: extra arguments to Popen (see subprocess.Popen) :return: handle to subprocess """ masked_cmd = ' '.join(self.cmd_mask_password(cmd)) self.log.info("Executing command: {}".format(masked_cmd)) self.sp = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) for line in iter(self.sp.stdout): self.log.info(line.strip()) self.sp.wait() self.log.info("Command exited with return code %s", self.sp.returncode) if self.sp.returncode: raise AirflowException("Sqoop command failed: {}".format(masked_cmd))
def Popen(self, cmd, **kwargs): """ Remote Popen :param cmd: command to remotely execute :param kwargs: extra arguments to Popen (see subprocess.Popen) :return: handle to subprocess """ masked_cmd = ' '.join(self.cmd_mask_password(cmd)) self.log.info("Executing command: {}".format(masked_cmd)) self.sp = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) for line in iter(self.sp.stdout): self.log.info(line.strip()) self.sp.wait() self.log.info("Command exited with return code %s", self.sp.returncode) if self.sp.returncode: raise AirflowException("Sqoop command failed: {}".format(masked_cmd))
[ "Remote", "Popen" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sqoop_hook.py#L92-L116
[ "def", "Popen", "(", "self", ",", "cmd", ",", "*", "*", "kwargs", ")", ":", "masked_cmd", "=", "' '", ".", "join", "(", "self", ".", "cmd_mask_password", "(", "cmd", ")", ")", "self", ".", "log", ".", "info", "(", "\"Executing command: {}\"", ".", "format", "(", "masked_cmd", ")", ")", "self", ".", "sp", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "*", "*", "kwargs", ")", "for", "line", "in", "iter", "(", "self", ".", "sp", ".", "stdout", ")", ":", "self", ".", "log", ".", "info", "(", "line", ".", "strip", "(", ")", ")", "self", ".", "sp", ".", "wait", "(", ")", "self", ".", "log", ".", "info", "(", "\"Command exited with return code %s\"", ",", "self", ".", "sp", ".", "returncode", ")", "if", "self", ".", "sp", ".", "returncode", ":", "raise", "AirflowException", "(", "\"Sqoop command failed: {}\"", ".", "format", "(", "masked_cmd", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SqoopHook.import_table
Imports table from remote location to target dir. Arguments are copies of direct sqoop command line arguments :param table: Table to read :param target_dir: HDFS destination dir :param append: Append data to an existing dataset in HDFS :param file_type: "avro", "sequence", "text" or "parquet". Imports data to into the specified format. Defaults to text. :param columns: <col,col,col…> Columns to import from table :param split_by: Column of the table used to split work units :param where: WHERE clause to use during import :param direct: Use direct connector if exists for the database :param driver: Manually specify JDBC driver class to use :param extra_import_options: Extra import options to pass as dict. If a key doesn't have a value, just pass an empty string to it. Don't include prefix of -- for sqoop options.
airflow/contrib/hooks/sqoop_hook.py
def import_table(self, table, target_dir=None, append=False, file_type="text", columns=None, split_by=None, where=None, direct=False, driver=None, extra_import_options=None): """ Imports table from remote location to target dir. Arguments are copies of direct sqoop command line arguments :param table: Table to read :param target_dir: HDFS destination dir :param append: Append data to an existing dataset in HDFS :param file_type: "avro", "sequence", "text" or "parquet". Imports data to into the specified format. Defaults to text. :param columns: <col,col,col…> Columns to import from table :param split_by: Column of the table used to split work units :param where: WHERE clause to use during import :param direct: Use direct connector if exists for the database :param driver: Manually specify JDBC driver class to use :param extra_import_options: Extra import options to pass as dict. If a key doesn't have a value, just pass an empty string to it. Don't include prefix of -- for sqoop options. """ cmd = self._import_cmd(target_dir, append, file_type, split_by, direct, driver, extra_import_options) cmd += ["--table", table] if columns: cmd += ["--columns", columns] if where: cmd += ["--where", where] self.Popen(cmd)
def import_table(self, table, target_dir=None, append=False, file_type="text", columns=None, split_by=None, where=None, direct=False, driver=None, extra_import_options=None): """ Imports table from remote location to target dir. Arguments are copies of direct sqoop command line arguments :param table: Table to read :param target_dir: HDFS destination dir :param append: Append data to an existing dataset in HDFS :param file_type: "avro", "sequence", "text" or "parquet". Imports data to into the specified format. Defaults to text. :param columns: <col,col,col…> Columns to import from table :param split_by: Column of the table used to split work units :param where: WHERE clause to use during import :param direct: Use direct connector if exists for the database :param driver: Manually specify JDBC driver class to use :param extra_import_options: Extra import options to pass as dict. If a key doesn't have a value, just pass an empty string to it. Don't include prefix of -- for sqoop options. """ cmd = self._import_cmd(target_dir, append, file_type, split_by, direct, driver, extra_import_options) cmd += ["--table", table] if columns: cmd += ["--columns", columns] if where: cmd += ["--where", where] self.Popen(cmd)
[ "Imports", "table", "from", "remote", "location", "to", "target", "dir", ".", "Arguments", "are", "copies", "of", "direct", "sqoop", "command", "line", "arguments" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sqoop_hook.py#L202-L233
[ "def", "import_table", "(", "self", ",", "table", ",", "target_dir", "=", "None", ",", "append", "=", "False", ",", "file_type", "=", "\"text\"", ",", "columns", "=", "None", ",", "split_by", "=", "None", ",", "where", "=", "None", ",", "direct", "=", "False", ",", "driver", "=", "None", ",", "extra_import_options", "=", "None", ")", ":", "cmd", "=", "self", ".", "_import_cmd", "(", "target_dir", ",", "append", ",", "file_type", ",", "split_by", ",", "direct", ",", "driver", ",", "extra_import_options", ")", "cmd", "+=", "[", "\"--table\"", ",", "table", "]", "if", "columns", ":", "cmd", "+=", "[", "\"--columns\"", ",", "columns", "]", "if", "where", ":", "cmd", "+=", "[", "\"--where\"", ",", "where", "]", "self", ".", "Popen", "(", "cmd", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SqoopHook.import_query
Imports a specific query from the rdbms to hdfs :param query: Free format query to run :param target_dir: HDFS destination dir :param append: Append data to an existing dataset in HDFS :param file_type: "avro", "sequence", "text" or "parquet" Imports data to hdfs into the specified format. Defaults to text. :param split_by: Column of the table used to split work units :param direct: Use direct import fast path :param driver: Manually specify JDBC driver class to use :param extra_import_options: Extra import options to pass as dict. If a key doesn't have a value, just pass an empty string to it. Don't include prefix of -- for sqoop options.
airflow/contrib/hooks/sqoop_hook.py
def import_query(self, query, target_dir, append=False, file_type="text", split_by=None, direct=None, driver=None, extra_import_options=None): """ Imports a specific query from the rdbms to hdfs :param query: Free format query to run :param target_dir: HDFS destination dir :param append: Append data to an existing dataset in HDFS :param file_type: "avro", "sequence", "text" or "parquet" Imports data to hdfs into the specified format. Defaults to text. :param split_by: Column of the table used to split work units :param direct: Use direct import fast path :param driver: Manually specify JDBC driver class to use :param extra_import_options: Extra import options to pass as dict. If a key doesn't have a value, just pass an empty string to it. Don't include prefix of -- for sqoop options. """ cmd = self._import_cmd(target_dir, append, file_type, split_by, direct, driver, extra_import_options) cmd += ["--query", query] self.Popen(cmd)
def import_query(self, query, target_dir, append=False, file_type="text", split_by=None, direct=None, driver=None, extra_import_options=None): """ Imports a specific query from the rdbms to hdfs :param query: Free format query to run :param target_dir: HDFS destination dir :param append: Append data to an existing dataset in HDFS :param file_type: "avro", "sequence", "text" or "parquet" Imports data to hdfs into the specified format. Defaults to text. :param split_by: Column of the table used to split work units :param direct: Use direct import fast path :param driver: Manually specify JDBC driver class to use :param extra_import_options: Extra import options to pass as dict. If a key doesn't have a value, just pass an empty string to it. Don't include prefix of -- for sqoop options. """ cmd = self._import_cmd(target_dir, append, file_type, split_by, direct, driver, extra_import_options) cmd += ["--query", query] self.Popen(cmd)
[ "Imports", "a", "specific", "query", "from", "the", "rdbms", "to", "hdfs" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sqoop_hook.py#L235-L256
[ "def", "import_query", "(", "self", ",", "query", ",", "target_dir", ",", "append", "=", "False", ",", "file_type", "=", "\"text\"", ",", "split_by", "=", "None", ",", "direct", "=", "None", ",", "driver", "=", "None", ",", "extra_import_options", "=", "None", ")", ":", "cmd", "=", "self", ".", "_import_cmd", "(", "target_dir", ",", "append", ",", "file_type", ",", "split_by", ",", "direct", ",", "driver", ",", "extra_import_options", ")", "cmd", "+=", "[", "\"--query\"", ",", "query", "]", "self", ".", "Popen", "(", "cmd", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SqoopHook.export_table
Exports Hive table to remote location. Arguments are copies of direct sqoop command line Arguments :param table: Table remote destination :param export_dir: Hive table to export :param input_null_string: The string to be interpreted as null for string columns :param input_null_non_string: The string to be interpreted as null for non-string columns :param staging_table: The table in which data will be staged before being inserted into the destination table :param clear_staging_table: Indicate that any data present in the staging table can be deleted :param enclosed_by: Sets a required field enclosing character :param escaped_by: Sets the escape character :param input_fields_terminated_by: Sets the field separator character :param input_lines_terminated_by: Sets the end-of-line character :param input_optionally_enclosed_by: Sets a field enclosing character :param batch: Use batch mode for underlying statement execution :param relaxed_isolation: Transaction isolation to read uncommitted for the mappers :param extra_export_options: Extra export options to pass as dict. If a key doesn't have a value, just pass an empty string to it. Don't include prefix of -- for sqoop options.
airflow/contrib/hooks/sqoop_hook.py
def export_table(self, table, export_dir, input_null_string, input_null_non_string, staging_table, clear_staging_table, enclosed_by, escaped_by, input_fields_terminated_by, input_lines_terminated_by, input_optionally_enclosed_by, batch, relaxed_isolation, extra_export_options=None): """ Exports Hive table to remote location. Arguments are copies of direct sqoop command line Arguments :param table: Table remote destination :param export_dir: Hive table to export :param input_null_string: The string to be interpreted as null for string columns :param input_null_non_string: The string to be interpreted as null for non-string columns :param staging_table: The table in which data will be staged before being inserted into the destination table :param clear_staging_table: Indicate that any data present in the staging table can be deleted :param enclosed_by: Sets a required field enclosing character :param escaped_by: Sets the escape character :param input_fields_terminated_by: Sets the field separator character :param input_lines_terminated_by: Sets the end-of-line character :param input_optionally_enclosed_by: Sets a field enclosing character :param batch: Use batch mode for underlying statement execution :param relaxed_isolation: Transaction isolation to read uncommitted for the mappers :param extra_export_options: Extra export options to pass as dict. If a key doesn't have a value, just pass an empty string to it. Don't include prefix of -- for sqoop options. """ cmd = self._export_cmd(table, export_dir, input_null_string, input_null_non_string, staging_table, clear_staging_table, enclosed_by, escaped_by, input_fields_terminated_by, input_lines_terminated_by, input_optionally_enclosed_by, batch, relaxed_isolation, extra_export_options) self.Popen(cmd)
def export_table(self, table, export_dir, input_null_string, input_null_non_string, staging_table, clear_staging_table, enclosed_by, escaped_by, input_fields_terminated_by, input_lines_terminated_by, input_optionally_enclosed_by, batch, relaxed_isolation, extra_export_options=None): """ Exports Hive table to remote location. Arguments are copies of direct sqoop command line Arguments :param table: Table remote destination :param export_dir: Hive table to export :param input_null_string: The string to be interpreted as null for string columns :param input_null_non_string: The string to be interpreted as null for non-string columns :param staging_table: The table in which data will be staged before being inserted into the destination table :param clear_staging_table: Indicate that any data present in the staging table can be deleted :param enclosed_by: Sets a required field enclosing character :param escaped_by: Sets the escape character :param input_fields_terminated_by: Sets the field separator character :param input_lines_terminated_by: Sets the end-of-line character :param input_optionally_enclosed_by: Sets a field enclosing character :param batch: Use batch mode for underlying statement execution :param relaxed_isolation: Transaction isolation to read uncommitted for the mappers :param extra_export_options: Extra export options to pass as dict. If a key doesn't have a value, just pass an empty string to it. Don't include prefix of -- for sqoop options. """ cmd = self._export_cmd(table, export_dir, input_null_string, input_null_non_string, staging_table, clear_staging_table, enclosed_by, escaped_by, input_fields_terminated_by, input_lines_terminated_by, input_optionally_enclosed_by, batch, relaxed_isolation, extra_export_options) self.Popen(cmd)
[ "Exports", "Hive", "table", "to", "remote", "location", ".", "Arguments", "are", "copies", "of", "direct", "sqoop", "command", "line", "Arguments" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sqoop_hook.py#L314-L355
[ "def", "export_table", "(", "self", ",", "table", ",", "export_dir", ",", "input_null_string", ",", "input_null_non_string", ",", "staging_table", ",", "clear_staging_table", ",", "enclosed_by", ",", "escaped_by", ",", "input_fields_terminated_by", ",", "input_lines_terminated_by", ",", "input_optionally_enclosed_by", ",", "batch", ",", "relaxed_isolation", ",", "extra_export_options", "=", "None", ")", ":", "cmd", "=", "self", ".", "_export_cmd", "(", "table", ",", "export_dir", ",", "input_null_string", ",", "input_null_non_string", ",", "staging_table", ",", "clear_staging_table", ",", "enclosed_by", ",", "escaped_by", ",", "input_fields_terminated_by", ",", "input_lines_terminated_by", ",", "input_optionally_enclosed_by", ",", "batch", ",", "relaxed_isolation", ",", "extra_export_options", ")", "self", ".", "Popen", "(", "cmd", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTextToSpeechHook.get_conn
Retrieves connection to Cloud Text to Speech. :return: Google Cloud Text to Speech client object. :rtype: google.cloud.texttospeech_v1.TextToSpeechClient
airflow/contrib/hooks/gcp_text_to_speech_hook.py
def get_conn(self): """ Retrieves connection to Cloud Text to Speech. :return: Google Cloud Text to Speech client object. :rtype: google.cloud.texttospeech_v1.TextToSpeechClient """ if not self._client: self._client = TextToSpeechClient(credentials=self._get_credentials()) return self._client
def get_conn(self): """ Retrieves connection to Cloud Text to Speech. :return: Google Cloud Text to Speech client object. :rtype: google.cloud.texttospeech_v1.TextToSpeechClient """ if not self._client: self._client = TextToSpeechClient(credentials=self._get_credentials()) return self._client
[ "Retrieves", "connection", "to", "Cloud", "Text", "to", "Speech", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_text_to_speech_hook.py#L42-L51
[ "def", "get_conn", "(", "self", ")", ":", "if", "not", "self", ".", "_client", ":", "self", ".", "_client", "=", "TextToSpeechClient", "(", "credentials", "=", "self", ".", "_get_credentials", "(", ")", ")", "return", "self", ".", "_client" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTextToSpeechHook.synthesize_speech
Synthesizes text input :param input_data: text input to be synthesized. See more: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.SynthesisInput :type input_data: dict or google.cloud.texttospeech_v1.types.SynthesisInput :param voice: configuration of voice to be used in synthesis. See more: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.VoiceSelectionParams :type voice: dict or google.cloud.texttospeech_v1.types.VoiceSelectionParams :param audio_config: configuration of the synthesized audio. See more: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.AudioConfig :type audio_config: dict or google.cloud.texttospeech_v1.types.AudioConfig :return: SynthesizeSpeechResponse See more: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.SynthesizeSpeechResponse :rtype: object :param retry: (Optional) A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: (Optional) The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float
airflow/contrib/hooks/gcp_text_to_speech_hook.py
def synthesize_speech(self, input_data, voice, audio_config, retry=None, timeout=None): """ Synthesizes text input :param input_data: text input to be synthesized. See more: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.SynthesisInput :type input_data: dict or google.cloud.texttospeech_v1.types.SynthesisInput :param voice: configuration of voice to be used in synthesis. See more: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.VoiceSelectionParams :type voice: dict or google.cloud.texttospeech_v1.types.VoiceSelectionParams :param audio_config: configuration of the synthesized audio. See more: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.AudioConfig :type audio_config: dict or google.cloud.texttospeech_v1.types.AudioConfig :return: SynthesizeSpeechResponse See more: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.SynthesizeSpeechResponse :rtype: object :param retry: (Optional) A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: (Optional) The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float """ client = self.get_conn() self.log.info("Synthesizing input: %s" % input_data) return client.synthesize_speech( input_=input_data, voice=voice, audio_config=audio_config, retry=retry, timeout=timeout )
def synthesize_speech(self, input_data, voice, audio_config, retry=None, timeout=None): """ Synthesizes text input :param input_data: text input to be synthesized. See more: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.SynthesisInput :type input_data: dict or google.cloud.texttospeech_v1.types.SynthesisInput :param voice: configuration of voice to be used in synthesis. See more: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.VoiceSelectionParams :type voice: dict or google.cloud.texttospeech_v1.types.VoiceSelectionParams :param audio_config: configuration of the synthesized audio. See more: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.AudioConfig :type audio_config: dict or google.cloud.texttospeech_v1.types.AudioConfig :return: SynthesizeSpeechResponse See more: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.SynthesizeSpeechResponse :rtype: object :param retry: (Optional) A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: (Optional) The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float """ client = self.get_conn() self.log.info("Synthesizing input: %s" % input_data) return client.synthesize_speech( input_=input_data, voice=voice, audio_config=audio_config, retry=retry, timeout=timeout )
[ "Synthesizes", "text", "input" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_text_to_speech_hook.py#L53-L80
[ "def", "synthesize_speech", "(", "self", ",", "input_data", ",", "voice", ",", "audio_config", ",", "retry", "=", "None", ",", "timeout", "=", "None", ")", ":", "client", "=", "self", ".", "get_conn", "(", ")", "self", ".", "log", ".", "info", "(", "\"Synthesizing input: %s\"", "%", "input_data", ")", "return", "client", ".", "synthesize_speech", "(", "input_", "=", "input_data", ",", "voice", "=", "voice", ",", "audio_config", "=", "audio_config", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3TaskHandler.close
Close and upload local log file to remote storage S3.
airflow/utils/log/s3_task_handler.py
def close(self): """ Close and upload local log file to remote storage S3. """ # When application exit, system shuts down all handlers by # calling close method. Here we check if logger is already # closed to prevent uploading the log to remote storage multiple # times when `logging.shutdown` is called. if self.closed: return super().close() if not self.upload_on_close: return local_loc = os.path.join(self.local_base, self.log_relative_path) remote_loc = os.path.join(self.remote_base, self.log_relative_path) if os.path.exists(local_loc): # read log and remove old logs to get just the latest additions with open(local_loc, 'r') as logfile: log = logfile.read() self.s3_write(log, remote_loc) # Mark closed so we don't double write if close is called twice self.closed = True
def close(self): """ Close and upload local log file to remote storage S3. """ # When application exit, system shuts down all handlers by # calling close method. Here we check if logger is already # closed to prevent uploading the log to remote storage multiple # times when `logging.shutdown` is called. if self.closed: return super().close() if not self.upload_on_close: return local_loc = os.path.join(self.local_base, self.log_relative_path) remote_loc = os.path.join(self.remote_base, self.log_relative_path) if os.path.exists(local_loc): # read log and remove old logs to get just the latest additions with open(local_loc, 'r') as logfile: log = logfile.read() self.s3_write(log, remote_loc) # Mark closed so we don't double write if close is called twice self.closed = True
[ "Close", "and", "upload", "local", "log", "file", "to", "remote", "storage", "S3", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/s3_task_handler.py#L62-L87
[ "def", "close", "(", "self", ")", ":", "# When application exit, system shuts down all handlers by", "# calling close method. Here we check if logger is already", "# closed to prevent uploading the log to remote storage multiple", "# times when `logging.shutdown` is called.", "if", "self", ".", "closed", ":", "return", "super", "(", ")", ".", "close", "(", ")", "if", "not", "self", ".", "upload_on_close", ":", "return", "local_loc", "=", "os", ".", "path", ".", "join", "(", "self", ".", "local_base", ",", "self", ".", "log_relative_path", ")", "remote_loc", "=", "os", ".", "path", ".", "join", "(", "self", ".", "remote_base", ",", "self", ".", "log_relative_path", ")", "if", "os", ".", "path", ".", "exists", "(", "local_loc", ")", ":", "# read log and remove old logs to get just the latest additions", "with", "open", "(", "local_loc", ",", "'r'", ")", "as", "logfile", ":", "log", "=", "logfile", ".", "read", "(", ")", "self", ".", "s3_write", "(", "log", ",", "remote_loc", ")", "# Mark closed so we don't double write if close is called twice", "self", ".", "closed", "=", "True" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597