partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
test
Result.asdict
r"""Convert Result to dict. Parameters: rawkey(bool): * True: dict key is Descriptor instance * False: dict key is str Returns: dict
mordred/_base/result.py
def asdict(self, rawkey=False): r"""Convert Result to dict. Parameters: rawkey(bool): * True: dict key is Descriptor instance * False: dict key is str Returns: dict """ if rawkey: return dict(self.items()) else: return { str(k): v for k, v in self.items() }
def asdict(self, rawkey=False): r"""Convert Result to dict. Parameters: rawkey(bool): * True: dict key is Descriptor instance * False: dict key is str Returns: dict """ if rawkey: return dict(self.items()) else: return { str(k): v for k, v in self.items() }
[ "r", "Convert", "Result", "to", "dict", "." ]
mordred-descriptor/mordred
python
https://github.com/mordred-descriptor/mordred/blob/2848b088fd7b6735590242b5e22573babc724f10/mordred/_base/result.py#L97-L115
[ "def", "asdict", "(", "self", ",", "rawkey", "=", "False", ")", ":", "if", "rawkey", ":", "return", "dict", "(", "self", ".", "items", "(", ")", ")", "else", ":", "return", "{", "str", "(", "k", ")", ":", "v", "for", "k", ",", "v", "in", "self", ".", "items", "(", ")", "}" ]
2848b088fd7b6735590242b5e22573babc724f10
test
Result.name
r"""Access descriptor value by descriptor name or instance. >>> from mordred import Calculator, descriptors >>> from rdkit import Chem >>> result = Calculator(descriptors)(Chem.MolFromSmiles("C1CCCCC1")) >>> result.name["C2SP3"] 6
mordred/_base/result.py
def name(self): r"""Access descriptor value by descriptor name or instance. >>> from mordred import Calculator, descriptors >>> from rdkit import Chem >>> result = Calculator(descriptors)(Chem.MolFromSmiles("C1CCCCC1")) >>> result.name["C2SP3"] 6 """ if self._name_to_value is None: self._name_to_value = {str(d): v for d, v in zip(self._descriptors, self._values)} return GetValueByName(self._name_to_value)
def name(self): r"""Access descriptor value by descriptor name or instance. >>> from mordred import Calculator, descriptors >>> from rdkit import Chem >>> result = Calculator(descriptors)(Chem.MolFromSmiles("C1CCCCC1")) >>> result.name["C2SP3"] 6 """ if self._name_to_value is None: self._name_to_value = {str(d): v for d, v in zip(self._descriptors, self._values)} return GetValueByName(self._name_to_value)
[ "r", "Access", "descriptor", "value", "by", "descriptor", "name", "or", "instance", "." ]
mordred-descriptor/mordred
python
https://github.com/mordred-descriptor/mordred/blob/2848b088fd7b6735590242b5e22573babc724f10/mordred/_base/result.py#L130-L143
[ "def", "name", "(", "self", ")", ":", "if", "self", ".", "_name_to_value", "is", "None", ":", "self", ".", "_name_to_value", "=", "{", "str", "(", "d", ")", ":", "v", "for", "d", ",", "v", "in", "zip", "(", "self", ".", "_descriptors", ",", "self", ".", "_values", ")", "}", "return", "GetValueByName", "(", "self", ".", "_name_to_value", ")" ]
2848b088fd7b6735590242b5e22573babc724f10
test
log_calls
Decorator to log function calls.
s4cmd.py
def log_calls(func): '''Decorator to log function calls.''' def wrapper(*args, **kargs): callStr = "%s(%s)" % (func.__name__, ", ".join([repr(p) for p in args] + ["%s=%s" % (k, repr(v)) for (k, v) in list(kargs.items())])) debug(">> %s", callStr) ret = func(*args, **kargs) debug("<< %s: %s", callStr, repr(ret)) return ret return wrapper
def log_calls(func): '''Decorator to log function calls.''' def wrapper(*args, **kargs): callStr = "%s(%s)" % (func.__name__, ", ".join([repr(p) for p in args] + ["%s=%s" % (k, repr(v)) for (k, v) in list(kargs.items())])) debug(">> %s", callStr) ret = func(*args, **kargs) debug("<< %s: %s", callStr, repr(ret)) return ret return wrapper
[ "Decorator", "to", "log", "function", "calls", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L124-L132
[ "def", "log_calls", "(", "func", ")", ":", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kargs", ")", ":", "callStr", "=", "\"%s(%s)\"", "%", "(", "func", ".", "__name__", ",", "\", \"", ".", "join", "(", "[", "repr", "(", "p", ")", "for", "p", "in", "args", "]", "+", "[", "\"%s=%s\"", "%", "(", "k", ",", "repr", "(", "v", ")", ")", "for", "(", "k", ",", "v", ")", "in", "list", "(", "kargs", ".", "items", "(", ")", ")", "]", ")", ")", "debug", "(", "\">> %s\"", ",", "callStr", ")", "ret", "=", "func", "(", "*", "args", ",", "*", "*", "kargs", ")", "debug", "(", "\"<< %s: %s\"", ",", "callStr", ",", "repr", "(", "ret", ")", ")", "return", "ret", "return", "wrapper" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
synchronized
Decorator to synchronize function.
s4cmd.py
def synchronized(func): '''Decorator to synchronize function.''' func.__lock__ = threading.Lock() def synced_func(*args, **kargs): with func.__lock__: return func(*args, **kargs) return synced_func
def synchronized(func): '''Decorator to synchronize function.''' func.__lock__ = threading.Lock() def synced_func(*args, **kargs): with func.__lock__: return func(*args, **kargs) return synced_func
[ "Decorator", "to", "synchronize", "function", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L138-L144
[ "def", "synchronized", "(", "func", ")", ":", "func", ".", "__lock__", "=", "threading", ".", "Lock", "(", ")", "def", "synced_func", "(", "*", "args", ",", "*", "*", "kargs", ")", ":", "with", "func", ".", "__lock__", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kargs", ")", "return", "synced_func" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
progress
Show current progress message to stderr. This function will remember the previous message so that next time, it will clear the previous message before showing next one.
s4cmd.py
def progress(msg, *args): '''Show current progress message to stderr. This function will remember the previous message so that next time, it will clear the previous message before showing next one. ''' # Don't show any progress if the output is directed to a file. if not (sys.stdout.isatty() and sys.stderr.isatty()): return text = (msg % args) if progress.prev_message: sys.stderr.write(' ' * len(progress.prev_message) + '\r') sys.stderr.write(text + '\r') progress.prev_message = text
def progress(msg, *args): '''Show current progress message to stderr. This function will remember the previous message so that next time, it will clear the previous message before showing next one. ''' # Don't show any progress if the output is directed to a file. if not (sys.stdout.isatty() and sys.stderr.isatty()): return text = (msg % args) if progress.prev_message: sys.stderr.write(' ' * len(progress.prev_message) + '\r') sys.stderr.write(text + '\r') progress.prev_message = text
[ "Show", "current", "progress", "message", "to", "stderr", ".", "This", "function", "will", "remember", "the", "previous", "message", "so", "that", "next", "time", "it", "will", "clear", "the", "previous", "message", "before", "showing", "next", "one", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L151-L164
[ "def", "progress", "(", "msg", ",", "*", "args", ")", ":", "# Don't show any progress if the output is directed to a file.", "if", "not", "(", "sys", ".", "stdout", ".", "isatty", "(", ")", "and", "sys", ".", "stderr", ".", "isatty", "(", ")", ")", ":", "return", "text", "=", "(", "msg", "%", "args", ")", "if", "progress", ".", "prev_message", ":", "sys", ".", "stderr", ".", "write", "(", "' '", "*", "len", "(", "progress", ".", "prev_message", ")", "+", "'\\r'", ")", "sys", ".", "stderr", ".", "write", "(", "text", "+", "'\\r'", ")", "progress", ".", "prev_message", "=", "text" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
message
Program message output.
s4cmd.py
def message(msg, *args): '''Program message output.''' clear_progress() text = (msg % args) sys.stdout.write(text + '\n')
def message(msg, *args): '''Program message output.''' clear_progress() text = (msg % args) sys.stdout.write(text + '\n')
[ "Program", "message", "output", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L169-L173
[ "def", "message", "(", "msg", ",", "*", "args", ")", ":", "clear_progress", "(", ")", "text", "=", "(", "msg", "%", "args", ")", "sys", ".", "stdout", ".", "write", "(", "text", "+", "'\\n'", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
fail
Utility function to handle runtime failures gracefully. Show concise information if possible, then terminate program.
s4cmd.py
def fail(message, exc_info=None, status=1, stacktrace=False): '''Utility function to handle runtime failures gracefully. Show concise information if possible, then terminate program. ''' text = message if exc_info: text += str(exc_info) error(text) if stacktrace: error(traceback.format_exc()) clean_tempfiles() if __name__ == '__main__': sys.exit(status) else: raise RuntimeError(status)
def fail(message, exc_info=None, status=1, stacktrace=False): '''Utility function to handle runtime failures gracefully. Show concise information if possible, then terminate program. ''' text = message if exc_info: text += str(exc_info) error(text) if stacktrace: error(traceback.format_exc()) clean_tempfiles() if __name__ == '__main__': sys.exit(status) else: raise RuntimeError(status)
[ "Utility", "function", "to", "handle", "runtime", "failures", "gracefully", ".", "Show", "concise", "information", "if", "possible", "then", "terminate", "program", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L175-L189
[ "def", "fail", "(", "message", ",", "exc_info", "=", "None", ",", "status", "=", "1", ",", "stacktrace", "=", "False", ")", ":", "text", "=", "message", "if", "exc_info", ":", "text", "+=", "str", "(", "exc_info", ")", "error", "(", "text", ")", "if", "stacktrace", ":", "error", "(", "traceback", ".", "format_exc", "(", ")", ")", "clean_tempfiles", "(", ")", "if", "__name__", "==", "'__main__'", ":", "sys", ".", "exit", "(", "status", ")", "else", ":", "raise", "RuntimeError", "(", "status", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
tempfile_get
Get a temp filename for atomic download.
s4cmd.py
def tempfile_get(target): '''Get a temp filename for atomic download.''' fn = '%s-%s.tmp' % (target, ''.join(random.Random().sample("0123456789abcdefghijklmnopqrstuvwxyz", 15))) TEMP_FILES.add(fn) return fn
def tempfile_get(target): '''Get a temp filename for atomic download.''' fn = '%s-%s.tmp' % (target, ''.join(random.Random().sample("0123456789abcdefghijklmnopqrstuvwxyz", 15))) TEMP_FILES.add(fn) return fn
[ "Get", "a", "temp", "filename", "for", "atomic", "download", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L192-L196
[ "def", "tempfile_get", "(", "target", ")", ":", "fn", "=", "'%s-%s.tmp'", "%", "(", "target", ",", "''", ".", "join", "(", "random", ".", "Random", "(", ")", ".", "sample", "(", "\"0123456789abcdefghijklmnopqrstuvwxyz\"", ",", "15", ")", ")", ")", "TEMP_FILES", ".", "add", "(", "fn", ")", "return", "fn" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
tempfile_set
Atomically rename and clean tempfile
s4cmd.py
def tempfile_set(tempfile, target): '''Atomically rename and clean tempfile''' if target: os.rename(tempfile, target) else: os.unlink(tempfile) if target in TEMP_FILES: TEMP_FILES.remove(tempfile)
def tempfile_set(tempfile, target): '''Atomically rename and clean tempfile''' if target: os.rename(tempfile, target) else: os.unlink(tempfile) if target in TEMP_FILES: TEMP_FILES.remove(tempfile)
[ "Atomically", "rename", "and", "clean", "tempfile" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L199-L207
[ "def", "tempfile_set", "(", "tempfile", ",", "target", ")", ":", "if", "target", ":", "os", ".", "rename", "(", "tempfile", ",", "target", ")", "else", ":", "os", ".", "unlink", "(", "tempfile", ")", "if", "target", "in", "TEMP_FILES", ":", "TEMP_FILES", ".", "remove", "(", "tempfile", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
clean_tempfiles
Clean up temp files
s4cmd.py
def clean_tempfiles(): '''Clean up temp files''' for fn in TEMP_FILES: if os.path.exists(fn): os.unlink(fn)
def clean_tempfiles(): '''Clean up temp files''' for fn in TEMP_FILES: if os.path.exists(fn): os.unlink(fn)
[ "Clean", "up", "temp", "files" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L209-L213
[ "def", "clean_tempfiles", "(", ")", ":", "for", "fn", "in", "TEMP_FILES", ":", "if", "os", ".", "path", ".", "exists", "(", "fn", ")", ":", "os", ".", "unlink", "(", "fn", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S4cmdLoggingClass.get_loggers
Return a list of the logger methods: (debug, info, warn, error)
s4cmd.py
def get_loggers(self): '''Return a list of the logger methods: (debug, info, warn, error)''' return self.log.debug, self.log.info, self.log.warn, self.log.error
def get_loggers(self): '''Return a list of the logger methods: (debug, info, warn, error)''' return self.log.debug, self.log.info, self.log.warn, self.log.error
[ "Return", "a", "list", "of", "the", "logger", "methods", ":", "(", "debug", "info", "warn", "error", ")" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L111-L114
[ "def", "get_loggers", "(", "self", ")", ":", "return", "self", ".", "log", ".", "debug", ",", "self", ".", "log", ".", "info", ",", "self", ".", "log", ".", "warn", ",", "self", ".", "log", ".", "error" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3URL.get_fixed_path
Get the fixed part of the path without wildcard
s4cmd.py
def get_fixed_path(self): '''Get the fixed part of the path without wildcard''' pi = self.path.split(PATH_SEP) fi = [] for p in pi: if '*' in p or '?' in p: break fi.append(p) return PATH_SEP.join(fi)
def get_fixed_path(self): '''Get the fixed part of the path without wildcard''' pi = self.path.split(PATH_SEP) fi = [] for p in pi: if '*' in p or '?' in p: break fi.append(p) return PATH_SEP.join(fi)
[ "Get", "the", "fixed", "part", "of", "the", "path", "without", "wildcard" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L233-L241
[ "def", "get_fixed_path", "(", "self", ")", ":", "pi", "=", "self", ".", "path", ".", "split", "(", "PATH_SEP", ")", "fi", "=", "[", "]", "for", "p", "in", "pi", ":", "if", "'*'", "in", "p", "or", "'?'", "in", "p", ":", "break", "fi", ".", "append", "(", "p", ")", "return", "PATH_SEP", ".", "join", "(", "fi", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
BotoClient.get_legal_params
Given a API name, list all legal parameters using boto3 service model.
s4cmd.py
def get_legal_params(self, method): '''Given a API name, list all legal parameters using boto3 service model.''' if method not in self.client.meta.method_to_api_mapping: # Injected methods. Ignore. return [] api = self.client.meta.method_to_api_mapping[method] shape = self.client.meta.service_model.operation_model(api).input_shape if shape is None: # No params needed for this API. return [] return shape.members.keys()
def get_legal_params(self, method): '''Given a API name, list all legal parameters using boto3 service model.''' if method not in self.client.meta.method_to_api_mapping: # Injected methods. Ignore. return [] api = self.client.meta.method_to_api_mapping[method] shape = self.client.meta.service_model.operation_model(api).input_shape if shape is None: # No params needed for this API. return [] return shape.members.keys()
[ "Given", "a", "API", "name", "list", "all", "legal", "parameters", "using", "boto3", "service", "model", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L410-L420
[ "def", "get_legal_params", "(", "self", ",", "method", ")", ":", "if", "method", "not", "in", "self", ".", "client", ".", "meta", ".", "method_to_api_mapping", ":", "# Injected methods. Ignore.", "return", "[", "]", "api", "=", "self", ".", "client", ".", "meta", ".", "method_to_api_mapping", "[", "method", "]", "shape", "=", "self", ".", "client", ".", "meta", ".", "service_model", ".", "operation_model", "(", "api", ")", ".", "input_shape", "if", "shape", "is", "None", ":", "# No params needed for this API.", "return", "[", "]", "return", "shape", ".", "members", ".", "keys", "(", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
BotoClient.merge_opt_params
Combine existing parameters with extra options supplied from command line options. Carefully merge special type of parameter if needed.
s4cmd.py
def merge_opt_params(self, method, kargs): '''Combine existing parameters with extra options supplied from command line options. Carefully merge special type of parameter if needed. ''' for key in self.legal_params[method]: if not hasattr(self.opt, key) or getattr(self.opt, key) is None: continue if key in kargs and type(kargs[key]) == dict: assert(type(getattr(self.opt, key)) == dict) # Merge two dictionaries. for k, v in getattr(self.opt, key).iteritems(): kargs[key][k] = v else: # Overwrite values. kargs[key] = getattr(self.opt, key) return kargs
def merge_opt_params(self, method, kargs): '''Combine existing parameters with extra options supplied from command line options. Carefully merge special type of parameter if needed. ''' for key in self.legal_params[method]: if not hasattr(self.opt, key) or getattr(self.opt, key) is None: continue if key in kargs and type(kargs[key]) == dict: assert(type(getattr(self.opt, key)) == dict) # Merge two dictionaries. for k, v in getattr(self.opt, key).iteritems(): kargs[key][k] = v else: # Overwrite values. kargs[key] = getattr(self.opt, key) return kargs
[ "Combine", "existing", "parameters", "with", "extra", "options", "supplied", "from", "command", "line", "options", ".", "Carefully", "merge", "special", "type", "of", "parameter", "if", "needed", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L422-L438
[ "def", "merge_opt_params", "(", "self", ",", "method", ",", "kargs", ")", ":", "for", "key", "in", "self", ".", "legal_params", "[", "method", "]", ":", "if", "not", "hasattr", "(", "self", ".", "opt", ",", "key", ")", "or", "getattr", "(", "self", ".", "opt", ",", "key", ")", "is", "None", ":", "continue", "if", "key", "in", "kargs", "and", "type", "(", "kargs", "[", "key", "]", ")", "==", "dict", ":", "assert", "(", "type", "(", "getattr", "(", "self", ".", "opt", ",", "key", ")", ")", "==", "dict", ")", "# Merge two dictionaries.", "for", "k", ",", "v", "in", "getattr", "(", "self", ".", "opt", ",", "key", ")", ".", "iteritems", "(", ")", ":", "kargs", "[", "key", "]", "[", "k", "]", "=", "v", "else", ":", "# Overwrite values.", "kargs", "[", "key", "]", "=", "getattr", "(", "self", ".", "opt", ",", "key", ")", "return", "kargs" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
BotoClient.add_options
Add the whole list of API parameters into optparse.
s4cmd.py
def add_options(parser): '''Add the whole list of API parameters into optparse.''' for param, param_type, param_doc in BotoClient.EXTRA_CLIENT_PARAMS: parser.add_option('--API-' + param, help=param_doc, type=param_type, dest=param)
def add_options(parser): '''Add the whole list of API parameters into optparse.''' for param, param_type, param_doc in BotoClient.EXTRA_CLIENT_PARAMS: parser.add_option('--API-' + param, help=param_doc, type=param_type, dest=param)
[ "Add", "the", "whole", "list", "of", "API", "parameters", "into", "optparse", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L441-L444
[ "def", "add_options", "(", "parser", ")", ":", "for", "param", ",", "param_type", ",", "param_doc", "in", "BotoClient", ".", "EXTRA_CLIENT_PARAMS", ":", "parser", ".", "add_option", "(", "'--API-'", "+", "param", ",", "help", "=", "param_doc", ",", "type", "=", "param_type", ",", "dest", "=", "param", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
TaskQueue.join
Override original join() with a timeout and handle keyboard interrupt.
s4cmd.py
def join(self): '''Override original join() with a timeout and handle keyboard interrupt.''' self.all_tasks_done.acquire() try: while self.unfinished_tasks: self.all_tasks_done.wait(1000) # Child thread has exceptions, fail main thread too. if self.exc_info: fail('[Thread Failure] ', exc_info=self.exc_info) except KeyboardInterrupt: raise Failure('Interrupted by user') finally: self.all_tasks_done.release()
def join(self): '''Override original join() with a timeout and handle keyboard interrupt.''' self.all_tasks_done.acquire() try: while self.unfinished_tasks: self.all_tasks_done.wait(1000) # Child thread has exceptions, fail main thread too. if self.exc_info: fail('[Thread Failure] ', exc_info=self.exc_info) except KeyboardInterrupt: raise Failure('Interrupted by user') finally: self.all_tasks_done.release()
[ "Override", "original", "join", "()", "with", "a", "timeout", "and", "handle", "keyboard", "interrupt", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L460-L473
[ "def", "join", "(", "self", ")", ":", "self", ".", "all_tasks_done", ".", "acquire", "(", ")", "try", ":", "while", "self", ".", "unfinished_tasks", ":", "self", ".", "all_tasks_done", ".", "wait", "(", "1000", ")", "# Child thread has exceptions, fail main thread too.", "if", "self", ".", "exc_info", ":", "fail", "(", "'[Thread Failure] '", ",", "exc_info", "=", "self", ".", "exc_info", ")", "except", "KeyboardInterrupt", ":", "raise", "Failure", "(", "'Interrupted by user'", ")", "finally", ":", "self", ".", "all_tasks_done", ".", "release", "(", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
TaskQueue.terminate
Terminate all threads by deleting the queue and forcing the child threads to quit.
s4cmd.py
def terminate(self, exc_info=None): '''Terminate all threads by deleting the queue and forcing the child threads to quit. ''' if exc_info: self.exc_info = exc_info try: while self.get_nowait(): self.task_done() except Queue.Empty: pass
def terminate(self, exc_info=None): '''Terminate all threads by deleting the queue and forcing the child threads to quit. ''' if exc_info: self.exc_info = exc_info try: while self.get_nowait(): self.task_done() except Queue.Empty: pass
[ "Terminate", "all", "threads", "by", "deleting", "the", "queue", "and", "forcing", "the", "child", "threads", "to", "quit", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L475-L485
[ "def", "terminate", "(", "self", ",", "exc_info", "=", "None", ")", ":", "if", "exc_info", ":", "self", ".", "exc_info", "=", "exc_info", "try", ":", "while", "self", ".", "get_nowait", "(", ")", ":", "self", ".", "task_done", "(", ")", "except", "Queue", ".", "Empty", ":", "pass" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadPool.add_task
Utility function to add a single task into task queue
s4cmd.py
def add_task(self, func_name, *args, **kargs): '''Utility function to add a single task into task queue''' self.tasks.put((func_name, 0, args, kargs))
def add_task(self, func_name, *args, **kargs): '''Utility function to add a single task into task queue''' self.tasks.put((func_name, 0, args, kargs))
[ "Utility", "function", "to", "add", "a", "single", "task", "into", "task", "queue" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L588-L590
[ "def", "add_task", "(", "self", ",", "func_name", ",", "*", "args", ",", "*", "*", "kargs", ")", ":", "self", ".", "tasks", ".", "put", "(", "(", "func_name", ",", "0", ",", "args", ",", "kargs", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadPool.join
Utility function to wait all tasks to complete
s4cmd.py
def join(self): '''Utility function to wait all tasks to complete''' self.tasks.join() # Force each thread to break loop. for worker in self.workers: self.tasks.put(None) # Wait for all thread to terminate. for worker in self.workers: worker.join() worker.s3 = None
def join(self): '''Utility function to wait all tasks to complete''' self.tasks.join() # Force each thread to break loop. for worker in self.workers: self.tasks.put(None) # Wait for all thread to terminate. for worker in self.workers: worker.join() worker.s3 = None
[ "Utility", "function", "to", "wait", "all", "tasks", "to", "complete" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L592-L603
[ "def", "join", "(", "self", ")", ":", "self", ".", "tasks", ".", "join", "(", ")", "# Force each thread to break loop.", "for", "worker", "in", "self", ".", "workers", ":", "self", ".", "tasks", ".", "put", "(", "None", ")", "# Wait for all thread to terminate.", "for", "worker", "in", "self", ".", "workers", ":", "worker", ".", "join", "(", ")", "worker", ".", "s3", "=", "None" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadPool.processed
Increase the processed task counter and show progress message
s4cmd.py
def processed(self): '''Increase the processed task counter and show progress message''' self.processed_tasks += 1 qsize = self.tasks.qsize() if qsize > 0: progress('[%d task(s) completed, %d remaining, %d thread(s)]', self.processed_tasks, qsize, len(self.workers)) else: progress('[%d task(s) completed, %d thread(s)]', self.processed_tasks, len(self.workers))
def processed(self): '''Increase the processed task counter and show progress message''' self.processed_tasks += 1 qsize = self.tasks.qsize() if qsize > 0: progress('[%d task(s) completed, %d remaining, %d thread(s)]', self.processed_tasks, qsize, len(self.workers)) else: progress('[%d task(s) completed, %d thread(s)]', self.processed_tasks, len(self.workers))
[ "Increase", "the", "processed", "task", "counter", "and", "show", "progress", "message" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L606-L613
[ "def", "processed", "(", "self", ")", ":", "self", ".", "processed_tasks", "+=", "1", "qsize", "=", "self", ".", "tasks", ".", "qsize", "(", ")", "if", "qsize", ">", "0", ":", "progress", "(", "'[%d task(s) completed, %d remaining, %d thread(s)]'", ",", "self", ".", "processed_tasks", ",", "qsize", ",", "len", "(", "self", ".", "workers", ")", ")", "else", ":", "progress", "(", "'[%d task(s) completed, %d thread(s)]'", ",", "self", ".", "processed_tasks", ",", "len", "(", "self", ".", "workers", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.s3_keys_from_env
Retrieve S3 access keys from the environment, or None if not present.
s4cmd.py
def s3_keys_from_env(): '''Retrieve S3 access keys from the environment, or None if not present.''' env = os.environ if S3_ACCESS_KEY_NAME in env and S3_SECRET_KEY_NAME in env: keys = (env[S3_ACCESS_KEY_NAME], env[S3_SECRET_KEY_NAME]) debug("read S3 keys from environment") return keys else: return None
def s3_keys_from_env(): '''Retrieve S3 access keys from the environment, or None if not present.''' env = os.environ if S3_ACCESS_KEY_NAME in env and S3_SECRET_KEY_NAME in env: keys = (env[S3_ACCESS_KEY_NAME], env[S3_SECRET_KEY_NAME]) debug("read S3 keys from environment") return keys else: return None
[ "Retrieve", "S3", "access", "keys", "from", "the", "environment", "or", "None", "if", "not", "present", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L625-L633
[ "def", "s3_keys_from_env", "(", ")", ":", "env", "=", "os", ".", "environ", "if", "S3_ACCESS_KEY_NAME", "in", "env", "and", "S3_SECRET_KEY_NAME", "in", "env", ":", "keys", "=", "(", "env", "[", "S3_ACCESS_KEY_NAME", "]", ",", "env", "[", "S3_SECRET_KEY_NAME", "]", ")", "debug", "(", "\"read S3 keys from environment\"", ")", "return", "keys", "else", ":", "return", "None" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.s3_keys_from_cmdline
Retrieve S3 access keys from the command line, or None if not present.
s4cmd.py
def s3_keys_from_cmdline(opt): '''Retrieve S3 access keys from the command line, or None if not present.''' if opt.access_key != None and opt.secret_key != None: keys = (opt.access_key, opt.secret_key) debug("read S3 keys from commandline") return keys else: return None
def s3_keys_from_cmdline(opt): '''Retrieve S3 access keys from the command line, or None if not present.''' if opt.access_key != None and opt.secret_key != None: keys = (opt.access_key, opt.secret_key) debug("read S3 keys from commandline") return keys else: return None
[ "Retrieve", "S3", "access", "keys", "from", "the", "command", "line", "or", "None", "if", "not", "present", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L636-L643
[ "def", "s3_keys_from_cmdline", "(", "opt", ")", ":", "if", "opt", ".", "access_key", "!=", "None", "and", "opt", ".", "secret_key", "!=", "None", ":", "keys", "=", "(", "opt", ".", "access_key", ",", "opt", ".", "secret_key", ")", "debug", "(", "\"read S3 keys from commandline\"", ")", "return", "keys", "else", ":", "return", "None" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.s3_keys_from_s3cfg
Retrieve S3 access key settings from s3cmd's config file, if present; otherwise return None.
s4cmd.py
def s3_keys_from_s3cfg(opt): '''Retrieve S3 access key settings from s3cmd's config file, if present; otherwise return None.''' try: if opt.s3cfg != None: s3cfg_path = "%s" % opt.s3cfg else: s3cfg_path = "%s/.s3cfg" % os.environ["HOME"] if not os.path.exists(s3cfg_path): return None config = ConfigParser.ConfigParser() config.read(s3cfg_path) keys = config.get("default", "access_key"), config.get("default", "secret_key") debug("read S3 keys from %s file", s3cfg_path) return keys except Exception as e: info("could not read S3 keys from %s file; skipping (%s)", s3cfg_path, e) return None
def s3_keys_from_s3cfg(opt): '''Retrieve S3 access key settings from s3cmd's config file, if present; otherwise return None.''' try: if opt.s3cfg != None: s3cfg_path = "%s" % opt.s3cfg else: s3cfg_path = "%s/.s3cfg" % os.environ["HOME"] if not os.path.exists(s3cfg_path): return None config = ConfigParser.ConfigParser() config.read(s3cfg_path) keys = config.get("default", "access_key"), config.get("default", "secret_key") debug("read S3 keys from %s file", s3cfg_path) return keys except Exception as e: info("could not read S3 keys from %s file; skipping (%s)", s3cfg_path, e) return None
[ "Retrieve", "S3", "access", "key", "settings", "from", "s3cmd", "s", "config", "file", "if", "present", ";", "otherwise", "return", "None", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L646-L662
[ "def", "s3_keys_from_s3cfg", "(", "opt", ")", ":", "try", ":", "if", "opt", ".", "s3cfg", "!=", "None", ":", "s3cfg_path", "=", "\"%s\"", "%", "opt", ".", "s3cfg", "else", ":", "s3cfg_path", "=", "\"%s/.s3cfg\"", "%", "os", ".", "environ", "[", "\"HOME\"", "]", "if", "not", "os", ".", "path", ".", "exists", "(", "s3cfg_path", ")", ":", "return", "None", "config", "=", "ConfigParser", ".", "ConfigParser", "(", ")", "config", ".", "read", "(", "s3cfg_path", ")", "keys", "=", "config", ".", "get", "(", "\"default\"", ",", "\"access_key\"", ")", ",", "config", ".", "get", "(", "\"default\"", ",", "\"secret_key\"", ")", "debug", "(", "\"read S3 keys from %s file\"", ",", "s3cfg_path", ")", "return", "keys", "except", "Exception", "as", "e", ":", "info", "(", "\"could not read S3 keys from %s file; skipping (%s)\"", ",", "s3cfg_path", ",", "e", ")", "return", "None" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.init_s3_keys
Initialize s3 access keys from environment variable or s3cfg config file.
s4cmd.py
def init_s3_keys(opt): '''Initialize s3 access keys from environment variable or s3cfg config file.''' S3Handler.S3_KEYS = S3Handler.s3_keys_from_cmdline(opt) or S3Handler.s3_keys_from_env() \ or S3Handler.s3_keys_from_s3cfg(opt)
def init_s3_keys(opt): '''Initialize s3 access keys from environment variable or s3cfg config file.''' S3Handler.S3_KEYS = S3Handler.s3_keys_from_cmdline(opt) or S3Handler.s3_keys_from_env() \ or S3Handler.s3_keys_from_s3cfg(opt)
[ "Initialize", "s3", "access", "keys", "from", "environment", "variable", "or", "s3cfg", "config", "file", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L665-L668
[ "def", "init_s3_keys", "(", "opt", ")", ":", "S3Handler", ".", "S3_KEYS", "=", "S3Handler", ".", "s3_keys_from_cmdline", "(", "opt", ")", "or", "S3Handler", ".", "s3_keys_from_env", "(", ")", "or", "S3Handler", ".", "s3_keys_from_s3cfg", "(", "opt", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.connect
Connect to S3 storage
s4cmd.py
def connect(self): '''Connect to S3 storage''' try: if S3Handler.S3_KEYS: self.s3 = BotoClient(self.opt, S3Handler.S3_KEYS[0], S3Handler.S3_KEYS[1]) else: self.s3 = BotoClient(self.opt) except Exception as e: raise RetryFailure('Unable to connect to s3: %s' % e)
def connect(self): '''Connect to S3 storage''' try: if S3Handler.S3_KEYS: self.s3 = BotoClient(self.opt, S3Handler.S3_KEYS[0], S3Handler.S3_KEYS[1]) else: self.s3 = BotoClient(self.opt) except Exception as e: raise RetryFailure('Unable to connect to s3: %s' % e)
[ "Connect", "to", "S3", "storage" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L680-L688
[ "def", "connect", "(", "self", ")", ":", "try", ":", "if", "S3Handler", ".", "S3_KEYS", ":", "self", ".", "s3", "=", "BotoClient", "(", "self", ".", "opt", ",", "S3Handler", ".", "S3_KEYS", "[", "0", "]", ",", "S3Handler", ".", "S3_KEYS", "[", "1", "]", ")", "else", ":", "self", ".", "s3", "=", "BotoClient", "(", "self", ".", "opt", ")", "except", "Exception", "as", "e", ":", "raise", "RetryFailure", "(", "'Unable to connect to s3: %s'", "%", "e", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.list_buckets
List all buckets
s4cmd.py
def list_buckets(self): '''List all buckets''' result = [] for bucket in self.s3.list_buckets().get('Buckets') or []: result.append({ 'name': S3URL.combine('s3', bucket['Name'], ''), 'is_dir': True, 'size': 0, 'last_modified': bucket['CreationDate'] }) return result
def list_buckets(self): '''List all buckets''' result = [] for bucket in self.s3.list_buckets().get('Buckets') or []: result.append({ 'name': S3URL.combine('s3', bucket['Name'], ''), 'is_dir': True, 'size': 0, 'last_modified': bucket['CreationDate'] }) return result
[ "List", "all", "buckets" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L691-L701
[ "def", "list_buckets", "(", "self", ")", ":", "result", "=", "[", "]", "for", "bucket", "in", "self", ".", "s3", ".", "list_buckets", "(", ")", ".", "get", "(", "'Buckets'", ")", "or", "[", "]", ":", "result", ".", "append", "(", "{", "'name'", ":", "S3URL", ".", "combine", "(", "'s3'", ",", "bucket", "[", "'Name'", "]", ",", "''", ")", ",", "'is_dir'", ":", "True", ",", "'size'", ":", "0", ",", "'last_modified'", ":", "bucket", "[", "'CreationDate'", "]", "}", ")", "return", "result" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.s3walk
Walk through a S3 directory. This function initiate a walk with a basedir. It also supports multiple wildcards.
s4cmd.py
def s3walk(self, basedir, show_dir=None): '''Walk through a S3 directory. This function initiate a walk with a basedir. It also supports multiple wildcards. ''' # Provide the default value from command line if no override. if not show_dir: show_dir = self.opt.show_dir # trailing slash normalization, this is for the reason that we want # ls 's3://foo/bar/' has the same result as 's3://foo/bar'. Since we # call partial_match() to check wildcards, we need to ensure the number # of slashes stays the same when we do this. if basedir[-1] == PATH_SEP: basedir = basedir[0:-1] s3url = S3URL(basedir) result = [] pool = ThreadPool(ThreadUtil, self.opt) pool.s3walk(s3url, s3url.get_fixed_path(), s3url.path, result) pool.join() # automatic directory detection if not show_dir and len(result) == 1 and result[0]['is_dir']: path = result[0]['name'] s3url = S3URL(path) result = [] pool = ThreadPool(ThreadUtil, self.opt) pool.s3walk(s3url, s3url.get_fixed_path(), s3url.path, result) pool.join() def compare(x, y): '''Comparator for ls output''' result = -cmp(x['is_dir'], y['is_dir']) if result != 0: return result return cmp(x['name'], y['name']) return sorted(result, key=cmp_to_key(compare))
def s3walk(self, basedir, show_dir=None): '''Walk through a S3 directory. This function initiate a walk with a basedir. It also supports multiple wildcards. ''' # Provide the default value from command line if no override. if not show_dir: show_dir = self.opt.show_dir # trailing slash normalization, this is for the reason that we want # ls 's3://foo/bar/' has the same result as 's3://foo/bar'. Since we # call partial_match() to check wildcards, we need to ensure the number # of slashes stays the same when we do this. if basedir[-1] == PATH_SEP: basedir = basedir[0:-1] s3url = S3URL(basedir) result = [] pool = ThreadPool(ThreadUtil, self.opt) pool.s3walk(s3url, s3url.get_fixed_path(), s3url.path, result) pool.join() # automatic directory detection if not show_dir and len(result) == 1 and result[0]['is_dir']: path = result[0]['name'] s3url = S3URL(path) result = [] pool = ThreadPool(ThreadUtil, self.opt) pool.s3walk(s3url, s3url.get_fixed_path(), s3url.path, result) pool.join() def compare(x, y): '''Comparator for ls output''' result = -cmp(x['is_dir'], y['is_dir']) if result != 0: return result return cmp(x['name'], y['name']) return sorted(result, key=cmp_to_key(compare))
[ "Walk", "through", "a", "S3", "directory", ".", "This", "function", "initiate", "a", "walk", "with", "a", "basedir", ".", "It", "also", "supports", "multiple", "wildcards", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L704-L741
[ "def", "s3walk", "(", "self", ",", "basedir", ",", "show_dir", "=", "None", ")", ":", "# Provide the default value from command line if no override.", "if", "not", "show_dir", ":", "show_dir", "=", "self", ".", "opt", ".", "show_dir", "# trailing slash normalization, this is for the reason that we want", "# ls 's3://foo/bar/' has the same result as 's3://foo/bar'. Since we", "# call partial_match() to check wildcards, we need to ensure the number", "# of slashes stays the same when we do this.", "if", "basedir", "[", "-", "1", "]", "==", "PATH_SEP", ":", "basedir", "=", "basedir", "[", "0", ":", "-", "1", "]", "s3url", "=", "S3URL", "(", "basedir", ")", "result", "=", "[", "]", "pool", "=", "ThreadPool", "(", "ThreadUtil", ",", "self", ".", "opt", ")", "pool", ".", "s3walk", "(", "s3url", ",", "s3url", ".", "get_fixed_path", "(", ")", ",", "s3url", ".", "path", ",", "result", ")", "pool", ".", "join", "(", ")", "# automatic directory detection", "if", "not", "show_dir", "and", "len", "(", "result", ")", "==", "1", "and", "result", "[", "0", "]", "[", "'is_dir'", "]", ":", "path", "=", "result", "[", "0", "]", "[", "'name'", "]", "s3url", "=", "S3URL", "(", "path", ")", "result", "=", "[", "]", "pool", "=", "ThreadPool", "(", "ThreadUtil", ",", "self", ".", "opt", ")", "pool", ".", "s3walk", "(", "s3url", ",", "s3url", ".", "get_fixed_path", "(", ")", ",", "s3url", ".", "path", ",", "result", ")", "pool", ".", "join", "(", ")", "def", "compare", "(", "x", ",", "y", ")", ":", "'''Comparator for ls output'''", "result", "=", "-", "cmp", "(", "x", "[", "'is_dir'", "]", ",", "y", "[", "'is_dir'", "]", ")", "if", "result", "!=", "0", ":", "return", "result", "return", "cmp", "(", "x", "[", "'name'", "]", ",", "y", "[", "'name'", "]", ")", "return", "sorted", "(", "result", ",", "key", "=", "cmp_to_key", "(", "compare", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.local_walk
Walk through local directories from root basedir
s4cmd.py
def local_walk(self, basedir): '''Walk through local directories from root basedir''' result = [] for root, dirs, files in os.walk(basedir): for f in files: result.append(os.path.join(root, f)) return result
def local_walk(self, basedir): '''Walk through local directories from root basedir''' result = [] for root, dirs, files in os.walk(basedir): for f in files: result.append(os.path.join(root, f)) return result
[ "Walk", "through", "local", "directories", "from", "root", "basedir" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L744-L751
[ "def", "local_walk", "(", "self", ",", "basedir", ")", ":", "result", "=", "[", "]", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "basedir", ")", ":", "for", "f", "in", "files", ":", "result", ".", "append", "(", "os", ".", "path", ".", "join", "(", "root", ",", "f", ")", ")", "return", "result" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.get_basename
Unix style basename. This fuction will return 'bar' for '/foo/bar/' instead of empty string. It is used to normalize the input trailing slash.
s4cmd.py
def get_basename(self, path): '''Unix style basename. This fuction will return 'bar' for '/foo/bar/' instead of empty string. It is used to normalize the input trailing slash. ''' if path[-1] == PATH_SEP: path = path[0:-1] return os.path.basename(path)
def get_basename(self, path): '''Unix style basename. This fuction will return 'bar' for '/foo/bar/' instead of empty string. It is used to normalize the input trailing slash. ''' if path[-1] == PATH_SEP: path = path[0:-1] return os.path.basename(path)
[ "Unix", "style", "basename", ".", "This", "fuction", "will", "return", "bar", "for", "/", "foo", "/", "bar", "/", "instead", "of", "empty", "string", ".", "It", "is", "used", "to", "normalize", "the", "input", "trailing", "slash", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L754-L761
[ "def", "get_basename", "(", "self", ",", "path", ")", ":", "if", "path", "[", "-", "1", "]", "==", "PATH_SEP", ":", "path", "=", "path", "[", "0", ":", "-", "1", "]", "return", "os", ".", "path", ".", "basename", "(", "path", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.source_expand
Expand the wildcards for an S3 path. This emulates the shall expansion for wildcards if the input is local path.
s4cmd.py
def source_expand(self, source): '''Expand the wildcards for an S3 path. This emulates the shall expansion for wildcards if the input is local path. ''' result = [] if not isinstance(source, list): source = [source] for src in source: # XXX Hacky: We need to disable recursive when we expand the input # parameters, need to pass this as an override parameter if # provided. tmp = self.opt.recursive self.opt.recursive = False result += [f['name'] for f in self.s3walk(src, True)] self.opt.recursive = tmp if (len(result) == 0) and (not self.opt.ignore_empty_source): fail("[Runtime Failure] Source doesn't exist.") return result
def source_expand(self, source): '''Expand the wildcards for an S3 path. This emulates the shall expansion for wildcards if the input is local path. ''' result = [] if not isinstance(source, list): source = [source] for src in source: # XXX Hacky: We need to disable recursive when we expand the input # parameters, need to pass this as an override parameter if # provided. tmp = self.opt.recursive self.opt.recursive = False result += [f['name'] for f in self.s3walk(src, True)] self.opt.recursive = tmp if (len(result) == 0) and (not self.opt.ignore_empty_source): fail("[Runtime Failure] Source doesn't exist.") return result
[ "Expand", "the", "wildcards", "for", "an", "S3", "path", ".", "This", "emulates", "the", "shall", "expansion", "for", "wildcards", "if", "the", "input", "is", "local", "path", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L763-L784
[ "def", "source_expand", "(", "self", ",", "source", ")", ":", "result", "=", "[", "]", "if", "not", "isinstance", "(", "source", ",", "list", ")", ":", "source", "=", "[", "source", "]", "for", "src", "in", "source", ":", "# XXX Hacky: We need to disable recursive when we expand the input", "# parameters, need to pass this as an override parameter if", "# provided.", "tmp", "=", "self", ".", "opt", ".", "recursive", "self", ".", "opt", ".", "recursive", "=", "False", "result", "+=", "[", "f", "[", "'name'", "]", "for", "f", "in", "self", ".", "s3walk", "(", "src", ",", "True", ")", "]", "self", ".", "opt", ".", "recursive", "=", "tmp", "if", "(", "len", "(", "result", ")", "==", "0", ")", "and", "(", "not", "self", ".", "opt", ".", "ignore_empty_source", ")", ":", "fail", "(", "\"[Runtime Failure] Source doesn't exist.\"", ")", "return", "result" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.put_single_file
Upload a single file or a directory by adding a task into queue
s4cmd.py
def put_single_file(self, pool, source, target): '''Upload a single file or a directory by adding a task into queue''' if os.path.isdir(source): if self.opt.recursive: for f in (f for f in self.local_walk(source) if not os.path.isdir(f)): target_url = S3URL(target) # deal with ./ or ../ here by normalizing the path. joined_path = os.path.normpath(os.path.join(target_url.path, os.path.relpath(f, source))) pool.upload(f, S3URL.combine('s3', target_url.bucket, joined_path)) else: message('omitting directory "%s".' % source) else: pool.upload(source, target)
def put_single_file(self, pool, source, target): '''Upload a single file or a directory by adding a task into queue''' if os.path.isdir(source): if self.opt.recursive: for f in (f for f in self.local_walk(source) if not os.path.isdir(f)): target_url = S3URL(target) # deal with ./ or ../ here by normalizing the path. joined_path = os.path.normpath(os.path.join(target_url.path, os.path.relpath(f, source))) pool.upload(f, S3URL.combine('s3', target_url.bucket, joined_path)) else: message('omitting directory "%s".' % source) else: pool.upload(source, target)
[ "Upload", "a", "single", "file", "or", "a", "directory", "by", "adding", "a", "task", "into", "queue" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L787-L799
[ "def", "put_single_file", "(", "self", ",", "pool", ",", "source", ",", "target", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "source", ")", ":", "if", "self", ".", "opt", ".", "recursive", ":", "for", "f", "in", "(", "f", "for", "f", "in", "self", ".", "local_walk", "(", "source", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "f", ")", ")", ":", "target_url", "=", "S3URL", "(", "target", ")", "# deal with ./ or ../ here by normalizing the path.", "joined_path", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "target_url", ".", "path", ",", "os", ".", "path", ".", "relpath", "(", "f", ",", "source", ")", ")", ")", "pool", ".", "upload", "(", "f", ",", "S3URL", ".", "combine", "(", "'s3'", ",", "target_url", ".", "bucket", ",", "joined_path", ")", ")", "else", ":", "message", "(", "'omitting directory \"%s\".'", "%", "source", ")", "else", ":", "pool", ".", "upload", "(", "source", ",", "target", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.put_files
Upload files to S3. This function can handle multiple file upload if source is a list. Also, it works for recursive mode which copy all files and keep the directory structure under the given source directory.
s4cmd.py
def put_files(self, source, target): '''Upload files to S3. This function can handle multiple file upload if source is a list. Also, it works for recursive mode which copy all files and keep the directory structure under the given source directory. ''' pool = ThreadPool(ThreadUtil, self.opt) if not isinstance(source, list): source = [source] if target[-1] == PATH_SEP: for src in source: self.put_single_file(pool, src, os.path.join(target, self.get_basename(src))) else: if len(source) == 1: self.put_single_file(pool, source[0], target) else: raise Failure('Target "%s" is not a directory (with a trailing slash).' % target) pool.join()
def put_files(self, source, target): '''Upload files to S3. This function can handle multiple file upload if source is a list. Also, it works for recursive mode which copy all files and keep the directory structure under the given source directory. ''' pool = ThreadPool(ThreadUtil, self.opt) if not isinstance(source, list): source = [source] if target[-1] == PATH_SEP: for src in source: self.put_single_file(pool, src, os.path.join(target, self.get_basename(src))) else: if len(source) == 1: self.put_single_file(pool, source[0], target) else: raise Failure('Target "%s" is not a directory (with a trailing slash).' % target) pool.join()
[ "Upload", "files", "to", "S3", ".", "This", "function", "can", "handle", "multiple", "file", "upload", "if", "source", "is", "a", "list", ".", "Also", "it", "works", "for", "recursive", "mode", "which", "copy", "all", "files", "and", "keep", "the", "directory", "structure", "under", "the", "given", "source", "directory", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L802-L821
[ "def", "put_files", "(", "self", ",", "source", ",", "target", ")", ":", "pool", "=", "ThreadPool", "(", "ThreadUtil", ",", "self", ".", "opt", ")", "if", "not", "isinstance", "(", "source", ",", "list", ")", ":", "source", "=", "[", "source", "]", "if", "target", "[", "-", "1", "]", "==", "PATH_SEP", ":", "for", "src", "in", "source", ":", "self", ".", "put_single_file", "(", "pool", ",", "src", ",", "os", ".", "path", ".", "join", "(", "target", ",", "self", ".", "get_basename", "(", "src", ")", ")", ")", "else", ":", "if", "len", "(", "source", ")", "==", "1", ":", "self", ".", "put_single_file", "(", "pool", ",", "source", "[", "0", "]", ",", "target", ")", "else", ":", "raise", "Failure", "(", "'Target \"%s\" is not a directory (with a trailing slash).'", "%", "target", ")", "pool", ".", "join", "(", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.create_bucket
Use the create_bucket API to create a new bucket
s4cmd.py
def create_bucket(self, source): '''Use the create_bucket API to create a new bucket''' s3url = S3URL(source) message('Creating %s', source) if not self.opt.dry_run: resp = self.s3.create_bucket(Bucket=s3url.bucket) if resp['ResponseMetadata']["HTTPStatusCode"] == 200: message('Done.') else: raise Failure('Unable to create bucket %s' % source)
def create_bucket(self, source): '''Use the create_bucket API to create a new bucket''' s3url = S3URL(source) message('Creating %s', source) if not self.opt.dry_run: resp = self.s3.create_bucket(Bucket=s3url.bucket) if resp['ResponseMetadata']["HTTPStatusCode"] == 200: message('Done.') else: raise Failure('Unable to create bucket %s' % source)
[ "Use", "the", "create_bucket", "API", "to", "create", "a", "new", "bucket" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L824-L834
[ "def", "create_bucket", "(", "self", ",", "source", ")", ":", "s3url", "=", "S3URL", "(", "source", ")", "message", "(", "'Creating %s'", ",", "source", ")", "if", "not", "self", ".", "opt", ".", "dry_run", ":", "resp", "=", "self", ".", "s3", ".", "create_bucket", "(", "Bucket", "=", "s3url", ".", "bucket", ")", "if", "resp", "[", "'ResponseMetadata'", "]", "[", "\"HTTPStatusCode\"", "]", "==", "200", ":", "message", "(", "'Done.'", ")", "else", ":", "raise", "Failure", "(", "'Unable to create bucket %s'", "%", "source", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.update_privilege
Get privileges from metadata of the source in s3, and apply them to target
s4cmd.py
def update_privilege(self, obj, target): '''Get privileges from metadata of the source in s3, and apply them to target''' if 'privilege' in obj['Metadata']: os.chmod(target, int(obj['Metadata']['privilege'], 8))
def update_privilege(self, obj, target): '''Get privileges from metadata of the source in s3, and apply them to target''' if 'privilege' in obj['Metadata']: os.chmod(target, int(obj['Metadata']['privilege'], 8))
[ "Get", "privileges", "from", "metadata", "of", "the", "source", "in", "s3", "and", "apply", "them", "to", "target" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L838-L841
[ "def", "update_privilege", "(", "self", ",", "obj", ",", "target", ")", ":", "if", "'privilege'", "in", "obj", "[", "'Metadata'", "]", ":", "os", ".", "chmod", "(", "target", ",", "int", "(", "obj", "[", "'Metadata'", "]", "[", "'privilege'", "]", ",", "8", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.print_files
Print out a series of files
s4cmd.py
def print_files(self, source): '''Print out a series of files''' sources = self.source_expand(source) for source in sources: s3url = S3URL(source) response = self.s3.get_object(Bucket=s3url.bucket, Key=s3url.path) message('%s', response['Body'].read())
def print_files(self, source): '''Print out a series of files''' sources = self.source_expand(source) for source in sources: s3url = S3URL(source) response = self.s3.get_object(Bucket=s3url.bucket, Key=s3url.path) message('%s', response['Body'].read())
[ "Print", "out", "a", "series", "of", "files" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L844-L851
[ "def", "print_files", "(", "self", ",", "source", ")", ":", "sources", "=", "self", ".", "source_expand", "(", "source", ")", "for", "source", "in", "sources", ":", "s3url", "=", "S3URL", "(", "source", ")", "response", "=", "self", ".", "s3", ".", "get_object", "(", "Bucket", "=", "s3url", ".", "bucket", ",", "Key", "=", "s3url", ".", "path", ")", "message", "(", "'%s'", ",", "response", "[", "'Body'", "]", ".", "read", "(", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.get_single_file
Download a single file or a directory by adding a task into queue
s4cmd.py
def get_single_file(self, pool, source, target): '''Download a single file or a directory by adding a task into queue''' if source[-1] == PATH_SEP: if self.opt.recursive: basepath = S3URL(source).path for f in (f for f in self.s3walk(source) if not f['is_dir']): pool.download(f['name'], os.path.join(target, os.path.relpath(S3URL(f['name']).path, basepath))) else: message('omitting directory "%s".' % source) else: pool.download(source, target)
def get_single_file(self, pool, source, target): '''Download a single file or a directory by adding a task into queue''' if source[-1] == PATH_SEP: if self.opt.recursive: basepath = S3URL(source).path for f in (f for f in self.s3walk(source) if not f['is_dir']): pool.download(f['name'], os.path.join(target, os.path.relpath(S3URL(f['name']).path, basepath))) else: message('omitting directory "%s".' % source) else: pool.download(source, target)
[ "Download", "a", "single", "file", "or", "a", "directory", "by", "adding", "a", "task", "into", "queue" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L854-L864
[ "def", "get_single_file", "(", "self", ",", "pool", ",", "source", ",", "target", ")", ":", "if", "source", "[", "-", "1", "]", "==", "PATH_SEP", ":", "if", "self", ".", "opt", ".", "recursive", ":", "basepath", "=", "S3URL", "(", "source", ")", ".", "path", "for", "f", "in", "(", "f", "for", "f", "in", "self", ".", "s3walk", "(", "source", ")", "if", "not", "f", "[", "'is_dir'", "]", ")", ":", "pool", ".", "download", "(", "f", "[", "'name'", "]", ",", "os", ".", "path", ".", "join", "(", "target", ",", "os", ".", "path", ".", "relpath", "(", "S3URL", "(", "f", "[", "'name'", "]", ")", ".", "path", ",", "basepath", ")", ")", ")", "else", ":", "message", "(", "'omitting directory \"%s\".'", "%", "source", ")", "else", ":", "pool", ".", "download", "(", "source", ",", "target", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.get_files
Download files. This function can handle multiple files if source S3 URL has wildcard characters. It also handles recursive mode by download all files and keep the directory structure.
s4cmd.py
def get_files(self, source, target): '''Download files. This function can handle multiple files if source S3 URL has wildcard characters. It also handles recursive mode by download all files and keep the directory structure. ''' pool = ThreadPool(ThreadUtil, self.opt) source = self.source_expand(source) if os.path.isdir(target): for src in source: self.get_single_file(pool, src, os.path.join(target, self.get_basename(S3URL(src).path))) else: if len(source) > 1: raise Failure('Target "%s" is not a directory.' % target) # Get file if it exists on s3 otherwise do nothing elif len(source) == 1: self.get_single_file(pool, source[0], target) else: #Source expand may return empty list only if ignore-empty-source is set to true pass pool.join()
def get_files(self, source, target): '''Download files. This function can handle multiple files if source S3 URL has wildcard characters. It also handles recursive mode by download all files and keep the directory structure. ''' pool = ThreadPool(ThreadUtil, self.opt) source = self.source_expand(source) if os.path.isdir(target): for src in source: self.get_single_file(pool, src, os.path.join(target, self.get_basename(S3URL(src).path))) else: if len(source) > 1: raise Failure('Target "%s" is not a directory.' % target) # Get file if it exists on s3 otherwise do nothing elif len(source) == 1: self.get_single_file(pool, source[0], target) else: #Source expand may return empty list only if ignore-empty-source is set to true pass pool.join()
[ "Download", "files", ".", "This", "function", "can", "handle", "multiple", "files", "if", "source", "S3", "URL", "has", "wildcard", "characters", ".", "It", "also", "handles", "recursive", "mode", "by", "download", "all", "files", "and", "keep", "the", "directory", "structure", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L867-L889
[ "def", "get_files", "(", "self", ",", "source", ",", "target", ")", ":", "pool", "=", "ThreadPool", "(", "ThreadUtil", ",", "self", ".", "opt", ")", "source", "=", "self", ".", "source_expand", "(", "source", ")", "if", "os", ".", "path", ".", "isdir", "(", "target", ")", ":", "for", "src", "in", "source", ":", "self", ".", "get_single_file", "(", "pool", ",", "src", ",", "os", ".", "path", ".", "join", "(", "target", ",", "self", ".", "get_basename", "(", "S3URL", "(", "src", ")", ".", "path", ")", ")", ")", "else", ":", "if", "len", "(", "source", ")", ">", "1", ":", "raise", "Failure", "(", "'Target \"%s\" is not a directory.'", "%", "target", ")", "# Get file if it exists on s3 otherwise do nothing", "elif", "len", "(", "source", ")", "==", "1", ":", "self", ".", "get_single_file", "(", "pool", ",", "source", "[", "0", "]", ",", "target", ")", "else", ":", "#Source expand may return empty list only if ignore-empty-source is set to true", "pass", "pool", ".", "join", "(", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.delete_removed_files
Remove remote files that are not present in the local source. (Obsolete) It is used for old sync command now.
s4cmd.py
def delete_removed_files(self, source, target): '''Remove remote files that are not present in the local source. (Obsolete) It is used for old sync command now. ''' message("Deleting files found in %s and not in %s", source, target) if os.path.isdir(source): unecessary = [] basepath = S3URL(target).path for f in [f for f in self.s3walk(target) if not f['is_dir']]: local_name = os.path.join(source, os.path.relpath(S3URL(f['name']).path, basepath)) if not os.path.isfile(local_name): message("%s not found locally, adding to delete queue", local_name) unecessary.append(f['name']) if len(unecessary) > 0: pool = ThreadPool(ThreadUtil, self.opt) for del_file in unecessary: pool.delete(del_file) pool.join() else: raise Failure('Source "%s" is not a directory.' % target)
def delete_removed_files(self, source, target): '''Remove remote files that are not present in the local source. (Obsolete) It is used for old sync command now. ''' message("Deleting files found in %s and not in %s", source, target) if os.path.isdir(source): unecessary = [] basepath = S3URL(target).path for f in [f for f in self.s3walk(target) if not f['is_dir']]: local_name = os.path.join(source, os.path.relpath(S3URL(f['name']).path, basepath)) if not os.path.isfile(local_name): message("%s not found locally, adding to delete queue", local_name) unecessary.append(f['name']) if len(unecessary) > 0: pool = ThreadPool(ThreadUtil, self.opt) for del_file in unecessary: pool.delete(del_file) pool.join() else: raise Failure('Source "%s" is not a directory.' % target)
[ "Remove", "remote", "files", "that", "are", "not", "present", "in", "the", "local", "source", ".", "(", "Obsolete", ")", "It", "is", "used", "for", "old", "sync", "command", "now", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L892-L911
[ "def", "delete_removed_files", "(", "self", ",", "source", ",", "target", ")", ":", "message", "(", "\"Deleting files found in %s and not in %s\"", ",", "source", ",", "target", ")", "if", "os", ".", "path", ".", "isdir", "(", "source", ")", ":", "unecessary", "=", "[", "]", "basepath", "=", "S3URL", "(", "target", ")", ".", "path", "for", "f", "in", "[", "f", "for", "f", "in", "self", ".", "s3walk", "(", "target", ")", "if", "not", "f", "[", "'is_dir'", "]", "]", ":", "local_name", "=", "os", ".", "path", ".", "join", "(", "source", ",", "os", ".", "path", ".", "relpath", "(", "S3URL", "(", "f", "[", "'name'", "]", ")", ".", "path", ",", "basepath", ")", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "local_name", ")", ":", "message", "(", "\"%s not found locally, adding to delete queue\"", ",", "local_name", ")", "unecessary", ".", "append", "(", "f", "[", "'name'", "]", ")", "if", "len", "(", "unecessary", ")", ">", "0", ":", "pool", "=", "ThreadPool", "(", "ThreadUtil", ",", "self", ".", "opt", ")", "for", "del_file", "in", "unecessary", ":", "pool", ".", "delete", "(", "del_file", ")", "pool", ".", "join", "(", ")", "else", ":", "raise", "Failure", "(", "'Source \"%s\" is not a directory.'", "%", "target", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.cp_single_file
Copy a single file or a directory by adding a task into queue
s4cmd.py
def cp_single_file(self, pool, source, target, delete_source): '''Copy a single file or a directory by adding a task into queue''' if source[-1] == PATH_SEP: if self.opt.recursive: basepath = S3URL(source).path for f in (f for f in self.s3walk(source) if not f['is_dir']): pool.copy(f['name'], os.path.join(target, os.path.relpath(S3URL(f['name']).path, basepath)), delete_source=delete_source) else: message('omitting directory "%s".' % source) else: pool.copy(source, target, delete_source=delete_source)
def cp_single_file(self, pool, source, target, delete_source): '''Copy a single file or a directory by adding a task into queue''' if source[-1] == PATH_SEP: if self.opt.recursive: basepath = S3URL(source).path for f in (f for f in self.s3walk(source) if not f['is_dir']): pool.copy(f['name'], os.path.join(target, os.path.relpath(S3URL(f['name']).path, basepath)), delete_source=delete_source) else: message('omitting directory "%s".' % source) else: pool.copy(source, target, delete_source=delete_source)
[ "Copy", "a", "single", "file", "or", "a", "directory", "by", "adding", "a", "task", "into", "queue" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L914-L924
[ "def", "cp_single_file", "(", "self", ",", "pool", ",", "source", ",", "target", ",", "delete_source", ")", ":", "if", "source", "[", "-", "1", "]", "==", "PATH_SEP", ":", "if", "self", ".", "opt", ".", "recursive", ":", "basepath", "=", "S3URL", "(", "source", ")", ".", "path", "for", "f", "in", "(", "f", "for", "f", "in", "self", ".", "s3walk", "(", "source", ")", "if", "not", "f", "[", "'is_dir'", "]", ")", ":", "pool", ".", "copy", "(", "f", "[", "'name'", "]", ",", "os", ".", "path", ".", "join", "(", "target", ",", "os", ".", "path", ".", "relpath", "(", "S3URL", "(", "f", "[", "'name'", "]", ")", ".", "path", ",", "basepath", ")", ")", ",", "delete_source", "=", "delete_source", ")", "else", ":", "message", "(", "'omitting directory \"%s\".'", "%", "source", ")", "else", ":", "pool", ".", "copy", "(", "source", ",", "target", ",", "delete_source", "=", "delete_source", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.cp_files
Copy files This function can handle multiple files if source S3 URL has wildcard characters. It also handles recursive mode by copying all files and keep the directory structure.
s4cmd.py
def cp_files(self, source, target, delete_source=False): '''Copy files This function can handle multiple files if source S3 URL has wildcard characters. It also handles recursive mode by copying all files and keep the directory structure. ''' pool = ThreadPool(ThreadUtil, self.opt) source = self.source_expand(source) if target[-1] == PATH_SEP: for src in source: self.cp_single_file(pool, src, os.path.join(target, self.get_basename(S3URL(src).path)), delete_source) else: if len(source) > 1: raise Failure('Target "%s" is not a directory (with a trailing slash).' % target) # Copy file if it exists otherwise do nothing elif len(source) == 1: self.cp_single_file(pool, source[0], target, delete_source) else: # Source expand may return empty list only if ignore-empty-source is set to true pass pool.join()
def cp_files(self, source, target, delete_source=False): '''Copy files This function can handle multiple files if source S3 URL has wildcard characters. It also handles recursive mode by copying all files and keep the directory structure. ''' pool = ThreadPool(ThreadUtil, self.opt) source = self.source_expand(source) if target[-1] == PATH_SEP: for src in source: self.cp_single_file(pool, src, os.path.join(target, self.get_basename(S3URL(src).path)), delete_source) else: if len(source) > 1: raise Failure('Target "%s" is not a directory (with a trailing slash).' % target) # Copy file if it exists otherwise do nothing elif len(source) == 1: self.cp_single_file(pool, source[0], target, delete_source) else: # Source expand may return empty list only if ignore-empty-source is set to true pass pool.join()
[ "Copy", "files", "This", "function", "can", "handle", "multiple", "files", "if", "source", "S3", "URL", "has", "wildcard", "characters", ".", "It", "also", "handles", "recursive", "mode", "by", "copying", "all", "files", "and", "keep", "the", "directory", "structure", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L927-L949
[ "def", "cp_files", "(", "self", ",", "source", ",", "target", ",", "delete_source", "=", "False", ")", ":", "pool", "=", "ThreadPool", "(", "ThreadUtil", ",", "self", ".", "opt", ")", "source", "=", "self", ".", "source_expand", "(", "source", ")", "if", "target", "[", "-", "1", "]", "==", "PATH_SEP", ":", "for", "src", "in", "source", ":", "self", ".", "cp_single_file", "(", "pool", ",", "src", ",", "os", ".", "path", ".", "join", "(", "target", ",", "self", ".", "get_basename", "(", "S3URL", "(", "src", ")", ".", "path", ")", ")", ",", "delete_source", ")", "else", ":", "if", "len", "(", "source", ")", ">", "1", ":", "raise", "Failure", "(", "'Target \"%s\" is not a directory (with a trailing slash).'", "%", "target", ")", "# Copy file if it exists otherwise do nothing", "elif", "len", "(", "source", ")", "==", "1", ":", "self", ".", "cp_single_file", "(", "pool", ",", "source", "[", "0", "]", ",", "target", ",", "delete_source", ")", "else", ":", "# Source expand may return empty list only if ignore-empty-source is set to true", "pass", "pool", ".", "join", "(", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.del_files
Delete files on S3
s4cmd.py
def del_files(self, source): '''Delete files on S3''' src_files = [] for obj in self.s3walk(source): if not obj['is_dir']: # ignore directories src_files.append(obj['name']) pool = ThreadPool(ThreadUtil, self.opt) pool.batch_delete(src_files) pool.join()
def del_files(self, source): '''Delete files on S3''' src_files = [] for obj in self.s3walk(source): if not obj['is_dir']: # ignore directories src_files.append(obj['name']) pool = ThreadPool(ThreadUtil, self.opt) pool.batch_delete(src_files) pool.join()
[ "Delete", "files", "on", "S3" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L952-L961
[ "def", "del_files", "(", "self", ",", "source", ")", ":", "src_files", "=", "[", "]", "for", "obj", "in", "self", ".", "s3walk", "(", "source", ")", ":", "if", "not", "obj", "[", "'is_dir'", "]", ":", "# ignore directories", "src_files", ".", "append", "(", "obj", "[", "'name'", "]", ")", "pool", "=", "ThreadPool", "(", "ThreadUtil", ",", "self", ".", "opt", ")", "pool", ".", "batch_delete", "(", "src_files", ")", "pool", ".", "join", "(", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.relative_dir_walk
Generic version of directory walk. Return file list without base path for comparison.
s4cmd.py
def relative_dir_walk(self, dir): '''Generic version of directory walk. Return file list without base path for comparison. ''' result = [] if S3URL.is_valid(dir): basepath = S3URL(dir).path for f in (f for f in self.s3walk(dir) if not f['is_dir']): result.append(os.path.relpath(S3URL(f['name']).path, basepath)) else: for f in (f for f in self.local_walk(dir) if not os.path.isdir(f)): result.append(os.path.relpath(f, dir)) return result
def relative_dir_walk(self, dir): '''Generic version of directory walk. Return file list without base path for comparison. ''' result = [] if S3URL.is_valid(dir): basepath = S3URL(dir).path for f in (f for f in self.s3walk(dir) if not f['is_dir']): result.append(os.path.relpath(S3URL(f['name']).path, basepath)) else: for f in (f for f in self.local_walk(dir) if not os.path.isdir(f)): result.append(os.path.relpath(f, dir)) return result
[ "Generic", "version", "of", "directory", "walk", ".", "Return", "file", "list", "without", "base", "path", "for", "comparison", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L964-L978
[ "def", "relative_dir_walk", "(", "self", ",", "dir", ")", ":", "result", "=", "[", "]", "if", "S3URL", ".", "is_valid", "(", "dir", ")", ":", "basepath", "=", "S3URL", "(", "dir", ")", ".", "path", "for", "f", "in", "(", "f", "for", "f", "in", "self", ".", "s3walk", "(", "dir", ")", "if", "not", "f", "[", "'is_dir'", "]", ")", ":", "result", ".", "append", "(", "os", ".", "path", ".", "relpath", "(", "S3URL", "(", "f", "[", "'name'", "]", ")", ".", "path", ",", "basepath", ")", ")", "else", ":", "for", "f", "in", "(", "f", "for", "f", "in", "self", ".", "local_walk", "(", "dir", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "f", ")", ")", ":", "result", ".", "append", "(", "os", ".", "path", ".", "relpath", "(", "f", ",", "dir", ")", ")", "return", "result" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.dsync_files
Sync directory to directory.
s4cmd.py
def dsync_files(self, source, target): '''Sync directory to directory.''' src_s3_url = S3URL.is_valid(source) dst_s3_url = S3URL.is_valid(target) source_list = self.relative_dir_walk(source) if len(source_list) == 0 or '.' in source_list: raise Failure('Sync command need to sync directory to directory.') sync_list = [(os.path.join(source, f), os.path.join(target, f)) for f in source_list] pool = ThreadPool(ThreadUtil, self.opt) if src_s3_url and not dst_s3_url: for src, dest in sync_list: pool.download(src, dest) elif not src_s3_url and dst_s3_url: for src, dest in sync_list: pool.upload(src, dest) elif src_s3_url and dst_s3_url: for src, dest in sync_list: pool.copy(src, dest) else: raise InvalidArgument('Cannot sync two local directories.') pool.join() if self.opt.delete_removed: target_list = self.relative_dir_walk(target) remove_list = [os.path.join(target, f) for f in (set(target_list) - set(source_list))] if S3URL.is_valid(target): pool = ThreadPool(ThreadUtil, self.opt) pool.batch_delete(remove_list) pool.join() else: for f in remove_list: try: os.unlink(f) message('Delete %s', f) except: pass
def dsync_files(self, source, target): '''Sync directory to directory.''' src_s3_url = S3URL.is_valid(source) dst_s3_url = S3URL.is_valid(target) source_list = self.relative_dir_walk(source) if len(source_list) == 0 or '.' in source_list: raise Failure('Sync command need to sync directory to directory.') sync_list = [(os.path.join(source, f), os.path.join(target, f)) for f in source_list] pool = ThreadPool(ThreadUtil, self.opt) if src_s3_url and not dst_s3_url: for src, dest in sync_list: pool.download(src, dest) elif not src_s3_url and dst_s3_url: for src, dest in sync_list: pool.upload(src, dest) elif src_s3_url and dst_s3_url: for src, dest in sync_list: pool.copy(src, dest) else: raise InvalidArgument('Cannot sync two local directories.') pool.join() if self.opt.delete_removed: target_list = self.relative_dir_walk(target) remove_list = [os.path.join(target, f) for f in (set(target_list) - set(source_list))] if S3URL.is_valid(target): pool = ThreadPool(ThreadUtil, self.opt) pool.batch_delete(remove_list) pool.join() else: for f in remove_list: try: os.unlink(f) message('Delete %s', f) except: pass
[ "Sync", "directory", "to", "directory", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L981-L1020
[ "def", "dsync_files", "(", "self", ",", "source", ",", "target", ")", ":", "src_s3_url", "=", "S3URL", ".", "is_valid", "(", "source", ")", "dst_s3_url", "=", "S3URL", ".", "is_valid", "(", "target", ")", "source_list", "=", "self", ".", "relative_dir_walk", "(", "source", ")", "if", "len", "(", "source_list", ")", "==", "0", "or", "'.'", "in", "source_list", ":", "raise", "Failure", "(", "'Sync command need to sync directory to directory.'", ")", "sync_list", "=", "[", "(", "os", ".", "path", ".", "join", "(", "source", ",", "f", ")", ",", "os", ".", "path", ".", "join", "(", "target", ",", "f", ")", ")", "for", "f", "in", "source_list", "]", "pool", "=", "ThreadPool", "(", "ThreadUtil", ",", "self", ".", "opt", ")", "if", "src_s3_url", "and", "not", "dst_s3_url", ":", "for", "src", ",", "dest", "in", "sync_list", ":", "pool", ".", "download", "(", "src", ",", "dest", ")", "elif", "not", "src_s3_url", "and", "dst_s3_url", ":", "for", "src", ",", "dest", "in", "sync_list", ":", "pool", ".", "upload", "(", "src", ",", "dest", ")", "elif", "src_s3_url", "and", "dst_s3_url", ":", "for", "src", ",", "dest", "in", "sync_list", ":", "pool", ".", "copy", "(", "src", ",", "dest", ")", "else", ":", "raise", "InvalidArgument", "(", "'Cannot sync two local directories.'", ")", "pool", ".", "join", "(", ")", "if", "self", ".", "opt", ".", "delete_removed", ":", "target_list", "=", "self", ".", "relative_dir_walk", "(", "target", ")", "remove_list", "=", "[", "os", ".", "path", ".", "join", "(", "target", ",", "f", ")", "for", "f", "in", "(", "set", "(", "target_list", ")", "-", "set", "(", "source_list", ")", ")", "]", "if", "S3URL", ".", "is_valid", "(", "target", ")", ":", "pool", "=", "ThreadPool", "(", "ThreadUtil", ",", "self", ".", "opt", ")", "pool", ".", "batch_delete", "(", "remove_list", ")", "pool", ".", "join", "(", ")", "else", ":", "for", "f", "in", "remove_list", ":", "try", ":", "os", ".", "unlink", "(", "f", ")", "message", "(", "'Delete %s'", ",", "f", ")", "except", ":", "pass" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.sync_files
Sync files to S3. Does implement deletions if syncing TO s3. Currently identical to get/put -r -f --sync-check with exception of deletions.
s4cmd.py
def sync_files(self, source, target): '''Sync files to S3. Does implement deletions if syncing TO s3. Currently identical to get/put -r -f --sync-check with exception of deletions. ''' src_s3_url = S3URL.is_valid(source) dst_s3_url = S3URL.is_valid(target) if src_s3_url and not dst_s3_url: self.get_files(source, target) elif not src_s3_url and dst_s3_url: self.put_files(source, target) if self.opt.delete_removed: self.delete_removed_files(source, target) elif src_s3_url and dst_s3_url: self.cp_files(source, target) else: raise InvalidArgument('No S3 URI provided')
def sync_files(self, source, target): '''Sync files to S3. Does implement deletions if syncing TO s3. Currently identical to get/put -r -f --sync-check with exception of deletions. ''' src_s3_url = S3URL.is_valid(source) dst_s3_url = S3URL.is_valid(target) if src_s3_url and not dst_s3_url: self.get_files(source, target) elif not src_s3_url and dst_s3_url: self.put_files(source, target) if self.opt.delete_removed: self.delete_removed_files(source, target) elif src_s3_url and dst_s3_url: self.cp_files(source, target) else: raise InvalidArgument('No S3 URI provided')
[ "Sync", "files", "to", "S3", ".", "Does", "implement", "deletions", "if", "syncing", "TO", "s3", ".", "Currently", "identical", "to", "get", "/", "put", "-", "r", "-", "f", "--", "sync", "-", "check", "with", "exception", "of", "deletions", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1023-L1039
[ "def", "sync_files", "(", "self", ",", "source", ",", "target", ")", ":", "src_s3_url", "=", "S3URL", ".", "is_valid", "(", "source", ")", "dst_s3_url", "=", "S3URL", ".", "is_valid", "(", "target", ")", "if", "src_s3_url", "and", "not", "dst_s3_url", ":", "self", ".", "get_files", "(", "source", ",", "target", ")", "elif", "not", "src_s3_url", "and", "dst_s3_url", ":", "self", ".", "put_files", "(", "source", ",", "target", ")", "if", "self", ".", "opt", ".", "delete_removed", ":", "self", ".", "delete_removed_files", "(", "source", ",", "target", ")", "elif", "src_s3_url", "and", "dst_s3_url", ":", "self", ".", "cp_files", "(", "source", ",", "target", ")", "else", ":", "raise", "InvalidArgument", "(", "'No S3 URI provided'", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
S3Handler.size
Get the size component of the given s3url. If it is a directory, combine the sizes of all the files under that directory. Subdirectories will not be counted unless --recursive option is set.
s4cmd.py
def size(self, source): '''Get the size component of the given s3url. If it is a directory, combine the sizes of all the files under that directory. Subdirectories will not be counted unless --recursive option is set. ''' result = [] for src in self.source_expand(source): size = 0 for f in self.s3walk(src): size += f['size'] result.append((src, size)) return result
def size(self, source): '''Get the size component of the given s3url. If it is a directory, combine the sizes of all the files under that directory. Subdirectories will not be counted unless --recursive option is set. ''' result = [] for src in self.source_expand(source): size = 0 for f in self.s3walk(src): size += f['size'] result.append((src, size)) return result
[ "Get", "the", "size", "component", "of", "the", "given", "s3url", ".", "If", "it", "is", "a", "directory", "combine", "the", "sizes", "of", "all", "the", "files", "under", "that", "directory", ".", "Subdirectories", "will", "not", "be", "counted", "unless", "--", "recursive", "option", "is", "set", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1042-L1055
[ "def", "size", "(", "self", ",", "source", ")", ":", "result", "=", "[", "]", "for", "src", "in", "self", ".", "source_expand", "(", "source", ")", ":", "size", "=", "0", "for", "f", "in", "self", ".", "s3walk", "(", "src", ")", ":", "size", "+=", "f", "[", "'size'", "]", "result", ".", "append", "(", "(", "src", ",", "size", ")", ")", "return", "result" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
LocalMD5Cache.file_hash
Calculate MD5 hash code for a local file
s4cmd.py
def file_hash(self, filename, block_size=2**20): '''Calculate MD5 hash code for a local file''' m = hashlib.md5() with open(filename, 'rb') as f: while True: data = f.read(block_size) if not data: break m.update(data) return m.hexdigest()
def file_hash(self, filename, block_size=2**20): '''Calculate MD5 hash code for a local file''' m = hashlib.md5() with open(filename, 'rb') as f: while True: data = f.read(block_size) if not data: break m.update(data) return m.hexdigest()
[ "Calculate", "MD5", "hash", "code", "for", "a", "local", "file" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1065-L1074
[ "def", "file_hash", "(", "self", ",", "filename", ",", "block_size", "=", "2", "**", "20", ")", ":", "m", "=", "hashlib", ".", "md5", "(", ")", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "f", ":", "while", "True", ":", "data", "=", "f", ".", "read", "(", "block_size", ")", "if", "not", "data", ":", "break", "m", ".", "update", "(", "data", ")", "return", "m", ".", "hexdigest", "(", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
LocalMD5Cache.get_md5
Get or calculate MD5 value of the local file.
s4cmd.py
def get_md5(self): '''Get or calculate MD5 value of the local file.''' if self.md5 is None: self.md5 = self.file_hash(self.filename) return self.md5
def get_md5(self): '''Get or calculate MD5 value of the local file.''' if self.md5 is None: self.md5 = self.file_hash(self.filename) return self.md5
[ "Get", "or", "calculate", "MD5", "value", "of", "the", "local", "file", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1076-L1080
[ "def", "get_md5", "(", "self", ")", ":", "if", "self", ".", "md5", "is", "None", ":", "self", ".", "md5", "=", "self", ".", "file_hash", "(", "self", ".", "filename", ")", "return", "self", ".", "md5" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.mkdirs
Ensure all directories are created for a given target file.
s4cmd.py
def mkdirs(self, target): '''Ensure all directories are created for a given target file.''' path = os.path.dirname(target) if path and path != PATH_SEP and not os.path.isdir(path): # Multi-threading means there will be intervleaved execution # between the check and creation of the directory. try: os.makedirs(path) except OSError as ose: if ose.errno != errno.EEXIST: raise Failure('Unable to create directory (%s)' % (path,))
def mkdirs(self, target): '''Ensure all directories are created for a given target file.''' path = os.path.dirname(target) if path and path != PATH_SEP and not os.path.isdir(path): # Multi-threading means there will be intervleaved execution # between the check and creation of the directory. try: os.makedirs(path) except OSError as ose: if ose.errno != errno.EEXIST: raise Failure('Unable to create directory (%s)' % (path,))
[ "Ensure", "all", "directories", "are", "created", "for", "a", "given", "target", "file", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1114-L1124
[ "def", "mkdirs", "(", "self", ",", "target", ")", ":", "path", "=", "os", ".", "path", ".", "dirname", "(", "target", ")", "if", "path", "and", "path", "!=", "PATH_SEP", "and", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "# Multi-threading means there will be intervleaved execution", "# between the check and creation of the directory.", "try", ":", "os", ".", "makedirs", "(", "path", ")", "except", "OSError", "as", "ose", ":", "if", "ose", ".", "errno", "!=", "errno", ".", "EEXIST", ":", "raise", "Failure", "(", "'Unable to create directory (%s)'", "%", "(", "path", ",", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.sync_check
Check MD5 for a local file and a remote file. Return True if they have the same md5 hash, otherwise False.
s4cmd.py
def sync_check(self, md5cache, remoteKey): '''Check MD5 for a local file and a remote file. Return True if they have the same md5 hash, otherwise False. ''' if not remoteKey: return False if not os.path.exists(md5cache.filename): return False localmd5 = md5cache.get_md5() # check multiple md5 locations return ('ETag' in remoteKey and remoteKey['ETag'] == '"%s"' % localmd5) or \ ('md5' in remoteKey and remoteKey['md5'] == localmd5) or \ ('md5' in remoteKey['Metadata'] and remoteKey['Metadata']['md5'] == localmd5)
def sync_check(self, md5cache, remoteKey): '''Check MD5 for a local file and a remote file. Return True if they have the same md5 hash, otherwise False. ''' if not remoteKey: return False if not os.path.exists(md5cache.filename): return False localmd5 = md5cache.get_md5() # check multiple md5 locations return ('ETag' in remoteKey and remoteKey['ETag'] == '"%s"' % localmd5) or \ ('md5' in remoteKey and remoteKey['md5'] == localmd5) or \ ('md5' in remoteKey['Metadata'] and remoteKey['Metadata']['md5'] == localmd5)
[ "Check", "MD5", "for", "a", "local", "file", "and", "a", "remote", "file", ".", "Return", "True", "if", "they", "have", "the", "same", "md5", "hash", "otherwise", "False", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1127-L1140
[ "def", "sync_check", "(", "self", ",", "md5cache", ",", "remoteKey", ")", ":", "if", "not", "remoteKey", ":", "return", "False", "if", "not", "os", ".", "path", ".", "exists", "(", "md5cache", ".", "filename", ")", ":", "return", "False", "localmd5", "=", "md5cache", ".", "get_md5", "(", ")", "# check multiple md5 locations", "return", "(", "'ETag'", "in", "remoteKey", "and", "remoteKey", "[", "'ETag'", "]", "==", "'\"%s\"'", "%", "localmd5", ")", "or", "(", "'md5'", "in", "remoteKey", "and", "remoteKey", "[", "'md5'", "]", "==", "localmd5", ")", "or", "(", "'md5'", "in", "remoteKey", "[", "'Metadata'", "]", "and", "remoteKey", "[", "'Metadata'", "]", "[", "'md5'", "]", "==", "localmd5", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.partial_match
Partially match a path and a filter_path with wildcards. This function will return True if this path partially match a filter path. This is used for walking through directories with multiple level wildcard.
s4cmd.py
def partial_match(self, path, filter_path): '''Partially match a path and a filter_path with wildcards. This function will return True if this path partially match a filter path. This is used for walking through directories with multiple level wildcard. ''' if not path or not filter_path: return True # trailing slash normalization if path[-1] == PATH_SEP: path = path[0:-1] if filter_path[-1] == PATH_SEP: filter_path += '*' pi = path.split(PATH_SEP) fi = filter_path.split(PATH_SEP) # Here, if we are in recursive mode, we allow the pi to be longer than fi. # Otherwise, length of pi should be equal or less than the lenght of fi. min_len = min(len(pi), len(fi)) matched = fnmatch.fnmatch(PATH_SEP.join(pi[0:min_len]), PATH_SEP.join(fi[0:min_len])) return matched and (self.opt.recursive or len(pi) <= len(fi))
def partial_match(self, path, filter_path): '''Partially match a path and a filter_path with wildcards. This function will return True if this path partially match a filter path. This is used for walking through directories with multiple level wildcard. ''' if not path or not filter_path: return True # trailing slash normalization if path[-1] == PATH_SEP: path = path[0:-1] if filter_path[-1] == PATH_SEP: filter_path += '*' pi = path.split(PATH_SEP) fi = filter_path.split(PATH_SEP) # Here, if we are in recursive mode, we allow the pi to be longer than fi. # Otherwise, length of pi should be equal or less than the lenght of fi. min_len = min(len(pi), len(fi)) matched = fnmatch.fnmatch(PATH_SEP.join(pi[0:min_len]), PATH_SEP.join(fi[0:min_len])) return matched and (self.opt.recursive or len(pi) <= len(fi))
[ "Partially", "match", "a", "path", "and", "a", "filter_path", "with", "wildcards", ".", "This", "function", "will", "return", "True", "if", "this", "path", "partially", "match", "a", "filter", "path", ".", "This", "is", "used", "for", "walking", "through", "directories", "with", "multiple", "level", "wildcard", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1143-L1164
[ "def", "partial_match", "(", "self", ",", "path", ",", "filter_path", ")", ":", "if", "not", "path", "or", "not", "filter_path", ":", "return", "True", "# trailing slash normalization", "if", "path", "[", "-", "1", "]", "==", "PATH_SEP", ":", "path", "=", "path", "[", "0", ":", "-", "1", "]", "if", "filter_path", "[", "-", "1", "]", "==", "PATH_SEP", ":", "filter_path", "+=", "'*'", "pi", "=", "path", ".", "split", "(", "PATH_SEP", ")", "fi", "=", "filter_path", ".", "split", "(", "PATH_SEP", ")", "# Here, if we are in recursive mode, we allow the pi to be longer than fi.", "# Otherwise, length of pi should be equal or less than the lenght of fi.", "min_len", "=", "min", "(", "len", "(", "pi", ")", ",", "len", "(", "fi", ")", ")", "matched", "=", "fnmatch", ".", "fnmatch", "(", "PATH_SEP", ".", "join", "(", "pi", "[", "0", ":", "min_len", "]", ")", ",", "PATH_SEP", ".", "join", "(", "fi", "[", "0", ":", "min_len", "]", ")", ")", "return", "matched", "and", "(", "self", ".", "opt", ".", "recursive", "or", "len", "(", "pi", ")", "<=", "len", "(", "fi", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.s3walk
Thread worker for s3walk. Recursively walk into all subdirectories if they still match the filter path partially.
s4cmd.py
def s3walk(self, s3url, s3dir, filter_path, result): '''Thread worker for s3walk. Recursively walk into all subdirectories if they still match the filter path partially. ''' paginator = self.s3.get_paginator('list_objects') filter_path_level = filter_path.count(PATH_SEP) for page in paginator.paginate(Bucket=s3url.bucket, Prefix=s3dir, Delimiter=PATH_SEP, PaginationConfig={'PageSize': 1000}): # Get subdirectories first. for obj in page.get('CommonPrefixes') or []: obj_name = obj['Prefix'] if not self.partial_match(obj_name, filter_path): continue if self.opt.recursive or (obj_name.count(PATH_SEP) != filter_path_level + 1): self.pool.s3walk(s3url, obj_name, filter_path, result) else: self.conditional(result, { 'name': S3URL.combine(s3url.proto, s3url.bucket, obj_name), 'is_dir': True, 'size': 0, 'last_modified': None }) # Then get all items in this folder. for obj in page.get('Contents') or []: obj_name = obj['Key'] if not self.partial_match(obj_name, filter_path): continue if self.opt.recursive or obj_name.count(PATH_SEP) == filter_path_level: self.conditional(result, { 'name': S3URL.combine(s3url.proto, s3url.bucket, obj_name), 'is_dir': False, 'size': obj['Size'], 'last_modified': obj['LastModified'] })
def s3walk(self, s3url, s3dir, filter_path, result): '''Thread worker for s3walk. Recursively walk into all subdirectories if they still match the filter path partially. ''' paginator = self.s3.get_paginator('list_objects') filter_path_level = filter_path.count(PATH_SEP) for page in paginator.paginate(Bucket=s3url.bucket, Prefix=s3dir, Delimiter=PATH_SEP, PaginationConfig={'PageSize': 1000}): # Get subdirectories first. for obj in page.get('CommonPrefixes') or []: obj_name = obj['Prefix'] if not self.partial_match(obj_name, filter_path): continue if self.opt.recursive or (obj_name.count(PATH_SEP) != filter_path_level + 1): self.pool.s3walk(s3url, obj_name, filter_path, result) else: self.conditional(result, { 'name': S3URL.combine(s3url.proto, s3url.bucket, obj_name), 'is_dir': True, 'size': 0, 'last_modified': None }) # Then get all items in this folder. for obj in page.get('Contents') or []: obj_name = obj['Key'] if not self.partial_match(obj_name, filter_path): continue if self.opt.recursive or obj_name.count(PATH_SEP) == filter_path_level: self.conditional(result, { 'name': S3URL.combine(s3url.proto, s3url.bucket, obj_name), 'is_dir': False, 'size': obj['Size'], 'last_modified': obj['LastModified'] })
[ "Thread", "worker", "for", "s3walk", ".", "Recursively", "walk", "into", "all", "subdirectories", "if", "they", "still", "match", "the", "filter", "path", "partially", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1167-L1206
[ "def", "s3walk", "(", "self", ",", "s3url", ",", "s3dir", ",", "filter_path", ",", "result", ")", ":", "paginator", "=", "self", ".", "s3", ".", "get_paginator", "(", "'list_objects'", ")", "filter_path_level", "=", "filter_path", ".", "count", "(", "PATH_SEP", ")", "for", "page", "in", "paginator", ".", "paginate", "(", "Bucket", "=", "s3url", ".", "bucket", ",", "Prefix", "=", "s3dir", ",", "Delimiter", "=", "PATH_SEP", ",", "PaginationConfig", "=", "{", "'PageSize'", ":", "1000", "}", ")", ":", "# Get subdirectories first.", "for", "obj", "in", "page", ".", "get", "(", "'CommonPrefixes'", ")", "or", "[", "]", ":", "obj_name", "=", "obj", "[", "'Prefix'", "]", "if", "not", "self", ".", "partial_match", "(", "obj_name", ",", "filter_path", ")", ":", "continue", "if", "self", ".", "opt", ".", "recursive", "or", "(", "obj_name", ".", "count", "(", "PATH_SEP", ")", "!=", "filter_path_level", "+", "1", ")", ":", "self", ".", "pool", ".", "s3walk", "(", "s3url", ",", "obj_name", ",", "filter_path", ",", "result", ")", "else", ":", "self", ".", "conditional", "(", "result", ",", "{", "'name'", ":", "S3URL", ".", "combine", "(", "s3url", ".", "proto", ",", "s3url", ".", "bucket", ",", "obj_name", ")", ",", "'is_dir'", ":", "True", ",", "'size'", ":", "0", ",", "'last_modified'", ":", "None", "}", ")", "# Then get all items in this folder.", "for", "obj", "in", "page", ".", "get", "(", "'Contents'", ")", "or", "[", "]", ":", "obj_name", "=", "obj", "[", "'Key'", "]", "if", "not", "self", ".", "partial_match", "(", "obj_name", ",", "filter_path", ")", ":", "continue", "if", "self", ".", "opt", ".", "recursive", "or", "obj_name", ".", "count", "(", "PATH_SEP", ")", "==", "filter_path_level", ":", "self", ".", "conditional", "(", "result", ",", "{", "'name'", ":", "S3URL", ".", "combine", "(", "s3url", ".", "proto", ",", "s3url", ".", "bucket", ",", "obj_name", ")", ",", "'is_dir'", ":", "False", ",", "'size'", ":", "obj", "[", "'Size'", "]", ",", "'last_modified'", ":", "obj", "[", "'LastModified'", "]", "}", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.conditional
Check all file item with given conditions.
s4cmd.py
def conditional(self, result, obj): '''Check all file item with given conditions.''' fileonly = (self.opt.last_modified_before is not None) or (self.opt.last_modified_after is not None) if obj['is_dir']: if not fileonly: result.append(obj) return if (self.opt.last_modified_before is not None) and obj['last_modified'] >= self.opt.last_modified_before: return if (self.opt.last_modified_after is not None) and obj['last_modified'] <= self.opt.last_modified_after: return result.append(obj)
def conditional(self, result, obj): '''Check all file item with given conditions.''' fileonly = (self.opt.last_modified_before is not None) or (self.opt.last_modified_after is not None) if obj['is_dir']: if not fileonly: result.append(obj) return if (self.opt.last_modified_before is not None) and obj['last_modified'] >= self.opt.last_modified_before: return if (self.opt.last_modified_after is not None) and obj['last_modified'] <= self.opt.last_modified_after: return result.append(obj)
[ "Check", "all", "file", "item", "with", "given", "conditions", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1208-L1223
[ "def", "conditional", "(", "self", ",", "result", ",", "obj", ")", ":", "fileonly", "=", "(", "self", ".", "opt", ".", "last_modified_before", "is", "not", "None", ")", "or", "(", "self", ".", "opt", ".", "last_modified_after", "is", "not", "None", ")", "if", "obj", "[", "'is_dir'", "]", ":", "if", "not", "fileonly", ":", "result", ".", "append", "(", "obj", ")", "return", "if", "(", "self", ".", "opt", ".", "last_modified_before", "is", "not", "None", ")", "and", "obj", "[", "'last_modified'", "]", ">=", "self", ".", "opt", ".", "last_modified_before", ":", "return", "if", "(", "self", ".", "opt", ".", "last_modified_after", "is", "not", "None", ")", "and", "obj", "[", "'last_modified'", "]", "<=", "self", ".", "opt", ".", "last_modified_after", ":", "return", "result", ".", "append", "(", "obj", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.get_file_splits
Get file splits for upload/download/copy operation.
s4cmd.py
def get_file_splits(self, id, source, target, fsize, splitsize): '''Get file splits for upload/download/copy operation.''' pos = 0 part = 1 # S3 part id starts from 1 mpi = ThreadUtil.MultipartItem(id) splits = [] while pos < fsize: chunk = min(splitsize, fsize - pos) assert(chunk > 0) splits.append((source, target, mpi, pos, chunk, part)) part += 1 pos += chunk mpi.total = len(splits) return splits
def get_file_splits(self, id, source, target, fsize, splitsize): '''Get file splits for upload/download/copy operation.''' pos = 0 part = 1 # S3 part id starts from 1 mpi = ThreadUtil.MultipartItem(id) splits = [] while pos < fsize: chunk = min(splitsize, fsize - pos) assert(chunk > 0) splits.append((source, target, mpi, pos, chunk, part)) part += 1 pos += chunk mpi.total = len(splits) return splits
[ "Get", "file", "splits", "for", "upload", "/", "download", "/", "copy", "operation", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1260-L1275
[ "def", "get_file_splits", "(", "self", ",", "id", ",", "source", ",", "target", ",", "fsize", ",", "splitsize", ")", ":", "pos", "=", "0", "part", "=", "1", "# S3 part id starts from 1", "mpi", "=", "ThreadUtil", ".", "MultipartItem", "(", "id", ")", "splits", "=", "[", "]", "while", "pos", "<", "fsize", ":", "chunk", "=", "min", "(", "splitsize", ",", "fsize", "-", "pos", ")", "assert", "(", "chunk", ">", "0", ")", "splits", ".", "append", "(", "(", "source", ",", "target", ",", "mpi", ",", "pos", ",", "chunk", ",", "part", ")", ")", "part", "+=", "1", "pos", "+=", "chunk", "mpi", ".", "total", "=", "len", "(", "splits", ")", "return", "splits" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.get_file_privilege
Get privileges of a local file
s4cmd.py
def get_file_privilege(self, source): '''Get privileges of a local file''' try: return str(oct(os.stat(source).st_mode)[-3:]) except Exception as e: raise Failure('Could not get stat for %s, error_message = %s', source, e)
def get_file_privilege(self, source): '''Get privileges of a local file''' try: return str(oct(os.stat(source).st_mode)[-3:]) except Exception as e: raise Failure('Could not get stat for %s, error_message = %s', source, e)
[ "Get", "privileges", "of", "a", "local", "file" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1278-L1283
[ "def", "get_file_privilege", "(", "self", ",", "source", ")", ":", "try", ":", "return", "str", "(", "oct", "(", "os", ".", "stat", "(", "source", ")", ".", "st_mode", ")", "[", "-", "3", ":", "]", ")", "except", "Exception", "as", "e", ":", "raise", "Failure", "(", "'Could not get stat for %s, error_message = %s'", ",", "source", ",", "e", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.lookup
Get the s3 object with the S3 URL. Return None if not exist.
s4cmd.py
def lookup(self, s3url): '''Get the s3 object with the S3 URL. Return None if not exist.''' try: return self.s3.head_object(Bucket=s3url.bucket, Key=s3url.path) except BotoClient.ClientError as e: if e.response['ResponseMetadata']['HTTPStatusCode'] == 404: return None else: raise e
def lookup(self, s3url): '''Get the s3 object with the S3 URL. Return None if not exist.''' try: return self.s3.head_object(Bucket=s3url.bucket, Key=s3url.path) except BotoClient.ClientError as e: if e.response['ResponseMetadata']['HTTPStatusCode'] == 404: return None else: raise e
[ "Get", "the", "s3", "object", "with", "the", "S3", "URL", ".", "Return", "None", "if", "not", "exist", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1286-L1294
[ "def", "lookup", "(", "self", ",", "s3url", ")", ":", "try", ":", "return", "self", ".", "s3", ".", "head_object", "(", "Bucket", "=", "s3url", ".", "bucket", ",", "Key", "=", "s3url", ".", "path", ")", "except", "BotoClient", ".", "ClientError", "as", "e", ":", "if", "e", ".", "response", "[", "'ResponseMetadata'", "]", "[", "'HTTPStatusCode'", "]", "==", "404", ":", "return", "None", "else", ":", "raise", "e" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.read_file_chunk
Read local file chunk
s4cmd.py
def read_file_chunk(self, source, pos, chunk): '''Read local file chunk''' if chunk==0: return StringIO() data = None with open(source, 'rb') as f: f.seek(pos) data = f.read(chunk) if not data: raise Failure('Unable to read data from source: %s' % source) return StringIO(data)
def read_file_chunk(self, source, pos, chunk): '''Read local file chunk''' if chunk==0: return StringIO() data = None with open(source, 'rb') as f: f.seek(pos) data = f.read(chunk) if not data: raise Failure('Unable to read data from source: %s' % source) return StringIO(data)
[ "Read", "local", "file", "chunk" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1297-L1307
[ "def", "read_file_chunk", "(", "self", ",", "source", ",", "pos", ",", "chunk", ")", ":", "if", "chunk", "==", "0", ":", "return", "StringIO", "(", ")", "data", "=", "None", "with", "open", "(", "source", ",", "'rb'", ")", "as", "f", ":", "f", ".", "seek", "(", "pos", ")", "data", "=", "f", ".", "read", "(", "chunk", ")", "if", "not", "data", ":", "raise", "Failure", "(", "'Unable to read data from source: %s'", "%", "source", ")", "return", "StringIO", "(", "data", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.upload
Thread worker for upload operation.
s4cmd.py
def upload(self, source, target, mpi=None, pos=0, chunk=0, part=0): '''Thread worker for upload operation.''' s3url = S3URL(target) obj = self.lookup(s3url) # Initialization: Set up multithreaded uploads. if not mpi: fsize = os.path.getsize(source) md5cache = LocalMD5Cache(source) # optional checks if self.opt.dry_run: message('%s => %s', source, target) return elif self.opt.sync_check and self.sync_check(md5cache, obj): message('%s => %s (synced)', source, target) return elif not self.opt.force and obj: raise Failure('File already exists: %s' % target) if fsize < self.opt.max_singlepart_upload_size: data = self.read_file_chunk(source, 0, fsize) self.s3.put_object(Bucket=s3url.bucket, Key=s3url.path, Body=data, Metadata={'md5': md5cache.get_md5(), 'privilege': self.get_file_privilege(source)}) message('%s => %s', source, target) return # Here we need to have our own md5 value because multipart upload calculates # different md5 values. response = self.s3.create_multipart_upload(Bucket=s3url.bucket, Key=s3url.path, Metadata={'md5': md5cache.get_md5(), 'privilege': self.get_file_privilege(source)}) upload_id = response['UploadId'] for args in self.get_file_splits(upload_id, source, target, fsize, self.opt.multipart_split_size): self.pool.upload(*args) return data = self.read_file_chunk(source, pos, chunk) response = self.s3.upload_part(Bucket=s3url.bucket, Key=s3url.path, UploadId=mpi.id, Body=data, PartNumber=part) # Finalize if mpi.complete({'ETag': response['ETag'], 'PartNumber': part}): try: self.s3.complete_multipart_upload(Bucket=s3url.bucket, Key=s3url.path, UploadId=mpi.id, MultipartUpload={'Parts': mpi.sorted_parts()}) message('%s => %s', source, target) except Exception as e: message('Unable to complete upload: %s', str(e)) self.s3.abort_multipart_upload(Bucket=s3url.bucket, Key=s3url.path, UploadId=mpi.id) raise RetryFailure('Upload failed: Unable to complete upload %s.' % source)
def upload(self, source, target, mpi=None, pos=0, chunk=0, part=0): '''Thread worker for upload operation.''' s3url = S3URL(target) obj = self.lookup(s3url) # Initialization: Set up multithreaded uploads. if not mpi: fsize = os.path.getsize(source) md5cache = LocalMD5Cache(source) # optional checks if self.opt.dry_run: message('%s => %s', source, target) return elif self.opt.sync_check and self.sync_check(md5cache, obj): message('%s => %s (synced)', source, target) return elif not self.opt.force and obj: raise Failure('File already exists: %s' % target) if fsize < self.opt.max_singlepart_upload_size: data = self.read_file_chunk(source, 0, fsize) self.s3.put_object(Bucket=s3url.bucket, Key=s3url.path, Body=data, Metadata={'md5': md5cache.get_md5(), 'privilege': self.get_file_privilege(source)}) message('%s => %s', source, target) return # Here we need to have our own md5 value because multipart upload calculates # different md5 values. response = self.s3.create_multipart_upload(Bucket=s3url.bucket, Key=s3url.path, Metadata={'md5': md5cache.get_md5(), 'privilege': self.get_file_privilege(source)}) upload_id = response['UploadId'] for args in self.get_file_splits(upload_id, source, target, fsize, self.opt.multipart_split_size): self.pool.upload(*args) return data = self.read_file_chunk(source, pos, chunk) response = self.s3.upload_part(Bucket=s3url.bucket, Key=s3url.path, UploadId=mpi.id, Body=data, PartNumber=part) # Finalize if mpi.complete({'ETag': response['ETag'], 'PartNumber': part}): try: self.s3.complete_multipart_upload(Bucket=s3url.bucket, Key=s3url.path, UploadId=mpi.id, MultipartUpload={'Parts': mpi.sorted_parts()}) message('%s => %s', source, target) except Exception as e: message('Unable to complete upload: %s', str(e)) self.s3.abort_multipart_upload(Bucket=s3url.bucket, Key=s3url.path, UploadId=mpi.id) raise RetryFailure('Upload failed: Unable to complete upload %s.' % source)
[ "Thread", "worker", "for", "upload", "operation", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1310-L1363
[ "def", "upload", "(", "self", ",", "source", ",", "target", ",", "mpi", "=", "None", ",", "pos", "=", "0", ",", "chunk", "=", "0", ",", "part", "=", "0", ")", ":", "s3url", "=", "S3URL", "(", "target", ")", "obj", "=", "self", ".", "lookup", "(", "s3url", ")", "# Initialization: Set up multithreaded uploads.", "if", "not", "mpi", ":", "fsize", "=", "os", ".", "path", ".", "getsize", "(", "source", ")", "md5cache", "=", "LocalMD5Cache", "(", "source", ")", "# optional checks", "if", "self", ".", "opt", ".", "dry_run", ":", "message", "(", "'%s => %s'", ",", "source", ",", "target", ")", "return", "elif", "self", ".", "opt", ".", "sync_check", "and", "self", ".", "sync_check", "(", "md5cache", ",", "obj", ")", ":", "message", "(", "'%s => %s (synced)'", ",", "source", ",", "target", ")", "return", "elif", "not", "self", ".", "opt", ".", "force", "and", "obj", ":", "raise", "Failure", "(", "'File already exists: %s'", "%", "target", ")", "if", "fsize", "<", "self", ".", "opt", ".", "max_singlepart_upload_size", ":", "data", "=", "self", ".", "read_file_chunk", "(", "source", ",", "0", ",", "fsize", ")", "self", ".", "s3", ".", "put_object", "(", "Bucket", "=", "s3url", ".", "bucket", ",", "Key", "=", "s3url", ".", "path", ",", "Body", "=", "data", ",", "Metadata", "=", "{", "'md5'", ":", "md5cache", ".", "get_md5", "(", ")", ",", "'privilege'", ":", "self", ".", "get_file_privilege", "(", "source", ")", "}", ")", "message", "(", "'%s => %s'", ",", "source", ",", "target", ")", "return", "# Here we need to have our own md5 value because multipart upload calculates", "# different md5 values.", "response", "=", "self", ".", "s3", ".", "create_multipart_upload", "(", "Bucket", "=", "s3url", ".", "bucket", ",", "Key", "=", "s3url", ".", "path", ",", "Metadata", "=", "{", "'md5'", ":", "md5cache", ".", "get_md5", "(", ")", ",", "'privilege'", ":", "self", ".", "get_file_privilege", "(", "source", ")", "}", ")", "upload_id", "=", "response", "[", "'UploadId'", "]", "for", "args", "in", "self", ".", "get_file_splits", "(", "upload_id", ",", "source", ",", "target", ",", "fsize", ",", "self", ".", "opt", ".", "multipart_split_size", ")", ":", "self", ".", "pool", ".", "upload", "(", "*", "args", ")", "return", "data", "=", "self", ".", "read_file_chunk", "(", "source", ",", "pos", ",", "chunk", ")", "response", "=", "self", ".", "s3", ".", "upload_part", "(", "Bucket", "=", "s3url", ".", "bucket", ",", "Key", "=", "s3url", ".", "path", ",", "UploadId", "=", "mpi", ".", "id", ",", "Body", "=", "data", ",", "PartNumber", "=", "part", ")", "# Finalize", "if", "mpi", ".", "complete", "(", "{", "'ETag'", ":", "response", "[", "'ETag'", "]", ",", "'PartNumber'", ":", "part", "}", ")", ":", "try", ":", "self", ".", "s3", ".", "complete_multipart_upload", "(", "Bucket", "=", "s3url", ".", "bucket", ",", "Key", "=", "s3url", ".", "path", ",", "UploadId", "=", "mpi", ".", "id", ",", "MultipartUpload", "=", "{", "'Parts'", ":", "mpi", ".", "sorted_parts", "(", ")", "}", ")", "message", "(", "'%s => %s'", ",", "source", ",", "target", ")", "except", "Exception", "as", "e", ":", "message", "(", "'Unable to complete upload: %s'", ",", "str", "(", "e", ")", ")", "self", ".", "s3", ".", "abort_multipart_upload", "(", "Bucket", "=", "s3url", ".", "bucket", ",", "Key", "=", "s3url", ".", "path", ",", "UploadId", "=", "mpi", ".", "id", ")", "raise", "RetryFailure", "(", "'Upload failed: Unable to complete upload %s.'", "%", "source", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil._verify_file_size
Verify the file size of the downloaded file.
s4cmd.py
def _verify_file_size(self, obj, downloaded_file): '''Verify the file size of the downloaded file.''' file_size = os.path.getsize(downloaded_file) if int(obj['ContentLength']) != file_size: raise RetryFailure('Downloaded file size inconsistent: %s' % (repr(obj)))
def _verify_file_size(self, obj, downloaded_file): '''Verify the file size of the downloaded file.''' file_size = os.path.getsize(downloaded_file) if int(obj['ContentLength']) != file_size: raise RetryFailure('Downloaded file size inconsistent: %s' % (repr(obj)))
[ "Verify", "the", "file", "size", "of", "the", "downloaded", "file", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1366-L1370
[ "def", "_verify_file_size", "(", "self", ",", "obj", ",", "downloaded_file", ")", ":", "file_size", "=", "os", ".", "path", ".", "getsize", "(", "downloaded_file", ")", "if", "int", "(", "obj", "[", "'ContentLength'", "]", ")", "!=", "file_size", ":", "raise", "RetryFailure", "(", "'Downloaded file size inconsistent: %s'", "%", "(", "repr", "(", "obj", ")", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.write_file_chunk
Write local file chunk
s4cmd.py
def write_file_chunk(self, target, pos, chunk, body): '''Write local file chunk''' fd = os.open(target, os.O_CREAT | os.O_WRONLY) try: os.lseek(fd, pos, os.SEEK_SET) data = body.read(chunk) num_bytes_written = os.write(fd, data) if(num_bytes_written != len(data)): raise RetryFailure('Number of bytes written inconsistent: %s != %s' % (num_bytes_written, sys.getsizeof(data))) finally: os.close(fd)
def write_file_chunk(self, target, pos, chunk, body): '''Write local file chunk''' fd = os.open(target, os.O_CREAT | os.O_WRONLY) try: os.lseek(fd, pos, os.SEEK_SET) data = body.read(chunk) num_bytes_written = os.write(fd, data) if(num_bytes_written != len(data)): raise RetryFailure('Number of bytes written inconsistent: %s != %s' % (num_bytes_written, sys.getsizeof(data))) finally: os.close(fd)
[ "Write", "local", "file", "chunk" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1373-L1383
[ "def", "write_file_chunk", "(", "self", ",", "target", ",", "pos", ",", "chunk", ",", "body", ")", ":", "fd", "=", "os", ".", "open", "(", "target", ",", "os", ".", "O_CREAT", "|", "os", ".", "O_WRONLY", ")", "try", ":", "os", ".", "lseek", "(", "fd", ",", "pos", ",", "os", ".", "SEEK_SET", ")", "data", "=", "body", ".", "read", "(", "chunk", ")", "num_bytes_written", "=", "os", ".", "write", "(", "fd", ",", "data", ")", "if", "(", "num_bytes_written", "!=", "len", "(", "data", ")", ")", ":", "raise", "RetryFailure", "(", "'Number of bytes written inconsistent: %s != %s'", "%", "(", "num_bytes_written", ",", "sys", ".", "getsizeof", "(", "data", ")", ")", ")", "finally", ":", "os", ".", "close", "(", "fd", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.download
Thread worker for download operation.
s4cmd.py
def download(self, source, target, mpi=None, pos=0, chunk=0, part=0): '''Thread worker for download operation.''' s3url = S3URL(source) obj = self.lookup(s3url) if obj is None: raise Failure('The obj "%s" does not exists.' % (s3url.path,)) # Initialization: Set up multithreaded downloads. if not mpi: # optional checks if self.opt.dry_run: message('%s => %s', source, target) return elif self.opt.sync_check and self.sync_check(LocalMD5Cache(target), obj): message('%s => %s (synced)', source, target) return elif not self.opt.force and os.path.exists(target): raise Failure('File already exists: %s' % target) fsize = int(obj['ContentLength']) # Small file optimization. if fsize < self.opt.max_singlepart_download_size: # Create a single part to chain back main download operation. mpi = ThreadUtil.MultipartItem(tempfile_get(target)) mpi.total = 1 pos = 0 chunk = fsize # Continue as one part download. else: # Here we use temp filename as the id of mpi. for args in self.get_file_splits(tempfile_get(target), source, target, fsize, self.opt.multipart_split_size): self.pool.download(*args) return tempfile = mpi.id if self.opt.recursive: self.mkdirs(tempfile) # Download part of the file, range is inclusive. response = self.s3.get_object(Bucket=s3url.bucket, Key=s3url.path, Range='bytes=%d-%d' % (pos, pos + chunk - 1)) self.write_file_chunk(tempfile, pos, chunk, response['Body']) # Finalize if mpi.complete({'PartNumber': part}): try: self.update_privilege(obj, tempfile) self._verify_file_size(obj, tempfile) tempfile_set(tempfile, target) message('%s => %s', source, target) except Exception as e: # Note that we don't retry in this case, because # We are going to remove the temp file, and if we # retry here with original parameters (wrapped in # the task item), it would fail anyway tempfile_set(tempfile, None) raise Failure('Download Failure: %s, Source: %s.' % (e.message, source))
def download(self, source, target, mpi=None, pos=0, chunk=0, part=0): '''Thread worker for download operation.''' s3url = S3URL(source) obj = self.lookup(s3url) if obj is None: raise Failure('The obj "%s" does not exists.' % (s3url.path,)) # Initialization: Set up multithreaded downloads. if not mpi: # optional checks if self.opt.dry_run: message('%s => %s', source, target) return elif self.opt.sync_check and self.sync_check(LocalMD5Cache(target), obj): message('%s => %s (synced)', source, target) return elif not self.opt.force and os.path.exists(target): raise Failure('File already exists: %s' % target) fsize = int(obj['ContentLength']) # Small file optimization. if fsize < self.opt.max_singlepart_download_size: # Create a single part to chain back main download operation. mpi = ThreadUtil.MultipartItem(tempfile_get(target)) mpi.total = 1 pos = 0 chunk = fsize # Continue as one part download. else: # Here we use temp filename as the id of mpi. for args in self.get_file_splits(tempfile_get(target), source, target, fsize, self.opt.multipart_split_size): self.pool.download(*args) return tempfile = mpi.id if self.opt.recursive: self.mkdirs(tempfile) # Download part of the file, range is inclusive. response = self.s3.get_object(Bucket=s3url.bucket, Key=s3url.path, Range='bytes=%d-%d' % (pos, pos + chunk - 1)) self.write_file_chunk(tempfile, pos, chunk, response['Body']) # Finalize if mpi.complete({'PartNumber': part}): try: self.update_privilege(obj, tempfile) self._verify_file_size(obj, tempfile) tempfile_set(tempfile, target) message('%s => %s', source, target) except Exception as e: # Note that we don't retry in this case, because # We are going to remove the temp file, and if we # retry here with original parameters (wrapped in # the task item), it would fail anyway tempfile_set(tempfile, None) raise Failure('Download Failure: %s, Source: %s.' % (e.message, source))
[ "Thread", "worker", "for", "download", "operation", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1386-L1442
[ "def", "download", "(", "self", ",", "source", ",", "target", ",", "mpi", "=", "None", ",", "pos", "=", "0", ",", "chunk", "=", "0", ",", "part", "=", "0", ")", ":", "s3url", "=", "S3URL", "(", "source", ")", "obj", "=", "self", ".", "lookup", "(", "s3url", ")", "if", "obj", "is", "None", ":", "raise", "Failure", "(", "'The obj \"%s\" does not exists.'", "%", "(", "s3url", ".", "path", ",", ")", ")", "# Initialization: Set up multithreaded downloads.", "if", "not", "mpi", ":", "# optional checks", "if", "self", ".", "opt", ".", "dry_run", ":", "message", "(", "'%s => %s'", ",", "source", ",", "target", ")", "return", "elif", "self", ".", "opt", ".", "sync_check", "and", "self", ".", "sync_check", "(", "LocalMD5Cache", "(", "target", ")", ",", "obj", ")", ":", "message", "(", "'%s => %s (synced)'", ",", "source", ",", "target", ")", "return", "elif", "not", "self", ".", "opt", ".", "force", "and", "os", ".", "path", ".", "exists", "(", "target", ")", ":", "raise", "Failure", "(", "'File already exists: %s'", "%", "target", ")", "fsize", "=", "int", "(", "obj", "[", "'ContentLength'", "]", ")", "# Small file optimization.", "if", "fsize", "<", "self", ".", "opt", ".", "max_singlepart_download_size", ":", "# Create a single part to chain back main download operation.", "mpi", "=", "ThreadUtil", ".", "MultipartItem", "(", "tempfile_get", "(", "target", ")", ")", "mpi", ".", "total", "=", "1", "pos", "=", "0", "chunk", "=", "fsize", "# Continue as one part download.", "else", ":", "# Here we use temp filename as the id of mpi.", "for", "args", "in", "self", ".", "get_file_splits", "(", "tempfile_get", "(", "target", ")", ",", "source", ",", "target", ",", "fsize", ",", "self", ".", "opt", ".", "multipart_split_size", ")", ":", "self", ".", "pool", ".", "download", "(", "*", "args", ")", "return", "tempfile", "=", "mpi", ".", "id", "if", "self", ".", "opt", ".", "recursive", ":", "self", ".", "mkdirs", "(", "tempfile", ")", "# Download part of the file, range is inclusive.", "response", "=", "self", ".", "s3", ".", "get_object", "(", "Bucket", "=", "s3url", ".", "bucket", ",", "Key", "=", "s3url", ".", "path", ",", "Range", "=", "'bytes=%d-%d'", "%", "(", "pos", ",", "pos", "+", "chunk", "-", "1", ")", ")", "self", ".", "write_file_chunk", "(", "tempfile", ",", "pos", ",", "chunk", ",", "response", "[", "'Body'", "]", ")", "# Finalize", "if", "mpi", ".", "complete", "(", "{", "'PartNumber'", ":", "part", "}", ")", ":", "try", ":", "self", ".", "update_privilege", "(", "obj", ",", "tempfile", ")", "self", ".", "_verify_file_size", "(", "obj", ",", "tempfile", ")", "tempfile_set", "(", "tempfile", ",", "target", ")", "message", "(", "'%s => %s'", ",", "source", ",", "target", ")", "except", "Exception", "as", "e", ":", "# Note that we don't retry in this case, because", "# We are going to remove the temp file, and if we", "# retry here with original parameters (wrapped in", "# the task item), it would fail anyway", "tempfile_set", "(", "tempfile", ",", "None", ")", "raise", "Failure", "(", "'Download Failure: %s, Source: %s.'", "%", "(", "e", ".", "message", ",", "source", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.copy
Copy a single file from source to target using boto S3 library.
s4cmd.py
def copy(self, source, target, mpi=None, pos=0, chunk=0, part=0, delete_source=False): '''Copy a single file from source to target using boto S3 library.''' if self.opt.dry_run: message('%s => %s' % (source, target)) return source_url = S3URL(source) target_url = S3URL(target) if not mpi: obj = self.lookup(source_url) fsize = int(obj['ContentLength']) if fsize < self.opt.max_singlepart_copy_size: self.s3.copy_object(Bucket=target_url.bucket, Key=target_url.path, CopySource={'Bucket': source_url.bucket, 'Key': source_url.path}) message('%s => %s' % (source, target)) if delete_source: self.delete(source) return response = self.s3.create_multipart_upload(Bucket=target_url.bucket, Key=target_url.path, Metadata=obj['Metadata']) upload_id = response['UploadId'] for args in self.get_file_splits(upload_id, source, target, fsize, self.opt.multipart_split_size): self.pool.copy(*args, delete_source=delete_source) return response = self.s3.upload_part_copy(Bucket=target_url.bucket, Key=target_url.path, CopySource={'Bucket': source_url.bucket, 'Key': source_url.path}, CopySourceRange='bytes=%d-%d' % (pos, pos + chunk - 1), UploadId=mpi.id, PartNumber=part) if mpi.complete({'ETag': response['CopyPartResult']['ETag'], 'PartNumber': part}): try: # Finalize copy operation. self.s3.complete_multipart_upload(Bucket=target_url.bucket, Key=target_url.path, UploadId=mpi.id, MultipartUpload={'Parts': mpi.sorted_parts()}) if delete_source: self.delete(source) message('%s => %s' % (source, target)) except Exception as e: message('Unable to complete upload: %s', str(e)) self.s3.abort_multipart_upload(Bucket=source_url.bucket, Key=source_url.path, UploadId=mpi.id) raise RetryFailure('Copy failed: Unable to complete copy %s.' % source)
def copy(self, source, target, mpi=None, pos=0, chunk=0, part=0, delete_source=False): '''Copy a single file from source to target using boto S3 library.''' if self.opt.dry_run: message('%s => %s' % (source, target)) return source_url = S3URL(source) target_url = S3URL(target) if not mpi: obj = self.lookup(source_url) fsize = int(obj['ContentLength']) if fsize < self.opt.max_singlepart_copy_size: self.s3.copy_object(Bucket=target_url.bucket, Key=target_url.path, CopySource={'Bucket': source_url.bucket, 'Key': source_url.path}) message('%s => %s' % (source, target)) if delete_source: self.delete(source) return response = self.s3.create_multipart_upload(Bucket=target_url.bucket, Key=target_url.path, Metadata=obj['Metadata']) upload_id = response['UploadId'] for args in self.get_file_splits(upload_id, source, target, fsize, self.opt.multipart_split_size): self.pool.copy(*args, delete_source=delete_source) return response = self.s3.upload_part_copy(Bucket=target_url.bucket, Key=target_url.path, CopySource={'Bucket': source_url.bucket, 'Key': source_url.path}, CopySourceRange='bytes=%d-%d' % (pos, pos + chunk - 1), UploadId=mpi.id, PartNumber=part) if mpi.complete({'ETag': response['CopyPartResult']['ETag'], 'PartNumber': part}): try: # Finalize copy operation. self.s3.complete_multipart_upload(Bucket=target_url.bucket, Key=target_url.path, UploadId=mpi.id, MultipartUpload={'Parts': mpi.sorted_parts()}) if delete_source: self.delete(source) message('%s => %s' % (source, target)) except Exception as e: message('Unable to complete upload: %s', str(e)) self.s3.abort_multipart_upload(Bucket=source_url.bucket, Key=source_url.path, UploadId=mpi.id) raise RetryFailure('Copy failed: Unable to complete copy %s.' % source)
[ "Copy", "a", "single", "file", "from", "source", "to", "target", "using", "boto", "S3", "library", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1445-L1497
[ "def", "copy", "(", "self", ",", "source", ",", "target", ",", "mpi", "=", "None", ",", "pos", "=", "0", ",", "chunk", "=", "0", ",", "part", "=", "0", ",", "delete_source", "=", "False", ")", ":", "if", "self", ".", "opt", ".", "dry_run", ":", "message", "(", "'%s => %s'", "%", "(", "source", ",", "target", ")", ")", "return", "source_url", "=", "S3URL", "(", "source", ")", "target_url", "=", "S3URL", "(", "target", ")", "if", "not", "mpi", ":", "obj", "=", "self", ".", "lookup", "(", "source_url", ")", "fsize", "=", "int", "(", "obj", "[", "'ContentLength'", "]", ")", "if", "fsize", "<", "self", ".", "opt", ".", "max_singlepart_copy_size", ":", "self", ".", "s3", ".", "copy_object", "(", "Bucket", "=", "target_url", ".", "bucket", ",", "Key", "=", "target_url", ".", "path", ",", "CopySource", "=", "{", "'Bucket'", ":", "source_url", ".", "bucket", ",", "'Key'", ":", "source_url", ".", "path", "}", ")", "message", "(", "'%s => %s'", "%", "(", "source", ",", "target", ")", ")", "if", "delete_source", ":", "self", ".", "delete", "(", "source", ")", "return", "response", "=", "self", ".", "s3", ".", "create_multipart_upload", "(", "Bucket", "=", "target_url", ".", "bucket", ",", "Key", "=", "target_url", ".", "path", ",", "Metadata", "=", "obj", "[", "'Metadata'", "]", ")", "upload_id", "=", "response", "[", "'UploadId'", "]", "for", "args", "in", "self", ".", "get_file_splits", "(", "upload_id", ",", "source", ",", "target", ",", "fsize", ",", "self", ".", "opt", ".", "multipart_split_size", ")", ":", "self", ".", "pool", ".", "copy", "(", "*", "args", ",", "delete_source", "=", "delete_source", ")", "return", "response", "=", "self", ".", "s3", ".", "upload_part_copy", "(", "Bucket", "=", "target_url", ".", "bucket", ",", "Key", "=", "target_url", ".", "path", ",", "CopySource", "=", "{", "'Bucket'", ":", "source_url", ".", "bucket", ",", "'Key'", ":", "source_url", ".", "path", "}", ",", "CopySourceRange", "=", "'bytes=%d-%d'", "%", "(", "pos", ",", "pos", "+", "chunk", "-", "1", ")", ",", "UploadId", "=", "mpi", ".", "id", ",", "PartNumber", "=", "part", ")", "if", "mpi", ".", "complete", "(", "{", "'ETag'", ":", "response", "[", "'CopyPartResult'", "]", "[", "'ETag'", "]", ",", "'PartNumber'", ":", "part", "}", ")", ":", "try", ":", "# Finalize copy operation.", "self", ".", "s3", ".", "complete_multipart_upload", "(", "Bucket", "=", "target_url", ".", "bucket", ",", "Key", "=", "target_url", ".", "path", ",", "UploadId", "=", "mpi", ".", "id", ",", "MultipartUpload", "=", "{", "'Parts'", ":", "mpi", ".", "sorted_parts", "(", ")", "}", ")", "if", "delete_source", ":", "self", ".", "delete", "(", "source", ")", "message", "(", "'%s => %s'", "%", "(", "source", ",", "target", ")", ")", "except", "Exception", "as", "e", ":", "message", "(", "'Unable to complete upload: %s'", ",", "str", "(", "e", ")", ")", "self", ".", "s3", ".", "abort_multipart_upload", "(", "Bucket", "=", "source_url", ".", "bucket", ",", "Key", "=", "source_url", ".", "path", ",", "UploadId", "=", "mpi", ".", "id", ")", "raise", "RetryFailure", "(", "'Copy failed: Unable to complete copy %s.'", "%", "source", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.delete
Thread worker for download operation.
s4cmd.py
def delete(self, source): '''Thread worker for download operation.''' s3url = S3URL(source) message('Delete %s', source) if not self.opt.dry_run: self.s3.delete_object(Bucket=s3url.bucket, Key=s3url.path)
def delete(self, source): '''Thread worker for download operation.''' s3url = S3URL(source) message('Delete %s', source) if not self.opt.dry_run: self.s3.delete_object(Bucket=s3url.bucket, Key=s3url.path)
[ "Thread", "worker", "for", "download", "operation", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1500-L1506
[ "def", "delete", "(", "self", ",", "source", ")", ":", "s3url", "=", "S3URL", "(", "source", ")", "message", "(", "'Delete %s'", ",", "source", ")", "if", "not", "self", ".", "opt", ".", "dry_run", ":", "self", ".", "s3", ".", "delete_object", "(", "Bucket", "=", "s3url", ".", "bucket", ",", "Key", "=", "s3url", ".", "path", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ThreadUtil.batch_delete
Delete a list of files in batch of batch_delete_size (default=1000).
s4cmd.py
def batch_delete(self, sources): '''Delete a list of files in batch of batch_delete_size (default=1000).''' assert(type(sources) == list) if len(sources) == 0: return elif len(sources) == 1: self.delete(sources[0]) elif len(sources) > self.opt.batch_delete_size: for i in range(0, len(sources), self.opt.batch_delete_size): self.pool.batch_delete(sources[i:i+self.opt.batch_delete_size]) else: bucket = S3URL(sources[0]).bucket deletes = [] for source in sources: s3url = S3URL(source) if s3url.bucket != bucket: raise Failure('Unable to delete keys in different bucket %s and %s.' % (s3url.bucket, bucket)) deletes.append({'Key': s3url.path}) response = self.s3.delete_objects(Bucket=bucket, Delete={'Objects': deletes}) # Output result of deletion. for res in response.get('Deleted') or []: message('Delete %s', S3URL.combine('s3', bucket, res['Key'])) for err in response.get('Errors') or []: message('Error deleting %s, code(%s) %s', S3URL.combine('s3', bucket, res['Key']), err['Code'], err['Message']) if response.get('Errors') is not None: raise RetryFailure('Unable to complete deleting %d files.' % len(response.get('Errors')))
def batch_delete(self, sources): '''Delete a list of files in batch of batch_delete_size (default=1000).''' assert(type(sources) == list) if len(sources) == 0: return elif len(sources) == 1: self.delete(sources[0]) elif len(sources) > self.opt.batch_delete_size: for i in range(0, len(sources), self.opt.batch_delete_size): self.pool.batch_delete(sources[i:i+self.opt.batch_delete_size]) else: bucket = S3URL(sources[0]).bucket deletes = [] for source in sources: s3url = S3URL(source) if s3url.bucket != bucket: raise Failure('Unable to delete keys in different bucket %s and %s.' % (s3url.bucket, bucket)) deletes.append({'Key': s3url.path}) response = self.s3.delete_objects(Bucket=bucket, Delete={'Objects': deletes}) # Output result of deletion. for res in response.get('Deleted') or []: message('Delete %s', S3URL.combine('s3', bucket, res['Key'])) for err in response.get('Errors') or []: message('Error deleting %s, code(%s) %s', S3URL.combine('s3', bucket, res['Key']), err['Code'], err['Message']) if response.get('Errors') is not None: raise RetryFailure('Unable to complete deleting %d files.' % len(response.get('Errors')))
[ "Delete", "a", "list", "of", "files", "in", "batch", "of", "batch_delete_size", "(", "default", "=", "1000", ")", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1509-L1539
[ "def", "batch_delete", "(", "self", ",", "sources", ")", ":", "assert", "(", "type", "(", "sources", ")", "==", "list", ")", "if", "len", "(", "sources", ")", "==", "0", ":", "return", "elif", "len", "(", "sources", ")", "==", "1", ":", "self", ".", "delete", "(", "sources", "[", "0", "]", ")", "elif", "len", "(", "sources", ")", ">", "self", ".", "opt", ".", "batch_delete_size", ":", "for", "i", "in", "range", "(", "0", ",", "len", "(", "sources", ")", ",", "self", ".", "opt", ".", "batch_delete_size", ")", ":", "self", ".", "pool", ".", "batch_delete", "(", "sources", "[", "i", ":", "i", "+", "self", ".", "opt", ".", "batch_delete_size", "]", ")", "else", ":", "bucket", "=", "S3URL", "(", "sources", "[", "0", "]", ")", ".", "bucket", "deletes", "=", "[", "]", "for", "source", "in", "sources", ":", "s3url", "=", "S3URL", "(", "source", ")", "if", "s3url", ".", "bucket", "!=", "bucket", ":", "raise", "Failure", "(", "'Unable to delete keys in different bucket %s and %s.'", "%", "(", "s3url", ".", "bucket", ",", "bucket", ")", ")", "deletes", ".", "append", "(", "{", "'Key'", ":", "s3url", ".", "path", "}", ")", "response", "=", "self", ".", "s3", ".", "delete_objects", "(", "Bucket", "=", "bucket", ",", "Delete", "=", "{", "'Objects'", ":", "deletes", "}", ")", "# Output result of deletion.", "for", "res", "in", "response", ".", "get", "(", "'Deleted'", ")", "or", "[", "]", ":", "message", "(", "'Delete %s'", ",", "S3URL", ".", "combine", "(", "'s3'", ",", "bucket", ",", "res", "[", "'Key'", "]", ")", ")", "for", "err", "in", "response", ".", "get", "(", "'Errors'", ")", "or", "[", "]", ":", "message", "(", "'Error deleting %s, code(%s) %s'", ",", "S3URL", ".", "combine", "(", "'s3'", ",", "bucket", ",", "res", "[", "'Key'", "]", ")", ",", "err", "[", "'Code'", "]", ",", "err", "[", "'Message'", "]", ")", "if", "response", ".", "get", "(", "'Errors'", ")", "is", "not", "None", ":", "raise", "RetryFailure", "(", "'Unable to complete deleting %d files.'", "%", "len", "(", "response", ".", "get", "(", "'Errors'", ")", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.run
Main entry to handle commands. Dispatch to individual command handler.
s4cmd.py
def run(self, args): '''Main entry to handle commands. Dispatch to individual command handler.''' if len(args) == 0: raise InvalidArgument('No command provided') cmd = args[0] if cmd + '_handler' in CommandHandler.__dict__: CommandHandler.__dict__[cmd + '_handler'](self, args) else: raise InvalidArgument('Unknown command %s' % cmd)
def run(self, args): '''Main entry to handle commands. Dispatch to individual command handler.''' if len(args) == 0: raise InvalidArgument('No command provided') cmd = args[0] if cmd + '_handler' in CommandHandler.__dict__: CommandHandler.__dict__[cmd + '_handler'](self, args) else: raise InvalidArgument('Unknown command %s' % cmd)
[ "Main", "entry", "to", "handle", "commands", ".", "Dispatch", "to", "individual", "command", "handler", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1551-L1559
[ "def", "run", "(", "self", ",", "args", ")", ":", "if", "len", "(", "args", ")", "==", "0", ":", "raise", "InvalidArgument", "(", "'No command provided'", ")", "cmd", "=", "args", "[", "0", "]", "if", "cmd", "+", "'_handler'", "in", "CommandHandler", ".", "__dict__", ":", "CommandHandler", ".", "__dict__", "[", "cmd", "+", "'_handler'", "]", "(", "self", ",", "args", ")", "else", ":", "raise", "InvalidArgument", "(", "'Unknown command %s'", "%", "cmd", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.validate
Validate input parameters with given format. This function also checks for wildcards for recursive mode.
s4cmd.py
def validate(self, format, args): '''Validate input parameters with given format. This function also checks for wildcards for recursive mode. ''' fmtMap = { 'cmd': 'Command', 's3': 's3 path', 'local': 'local path' } fmts = format.split('|') if len(fmts) != len(args): raise InvalidArgument('Invalid number of parameters') for i, fmt in enumerate(fmts): valid = False for f in fmt.split(','): if f == 'cmd' and args[i] + '_handler' in CommandHandler.__dict__: valid = True if f == 's3' and S3URL.is_valid(args[i]): valid = True if f == 'local' and not S3URL.is_valid(args[i]): valid = True if not valid: raise InvalidArgument('Invalid parameter: %s, %s expected' % (args[i], fmtMap[fmt.split(',')[0]]))
def validate(self, format, args): '''Validate input parameters with given format. This function also checks for wildcards for recursive mode. ''' fmtMap = { 'cmd': 'Command', 's3': 's3 path', 'local': 'local path' } fmts = format.split('|') if len(fmts) != len(args): raise InvalidArgument('Invalid number of parameters') for i, fmt in enumerate(fmts): valid = False for f in fmt.split(','): if f == 'cmd' and args[i] + '_handler' in CommandHandler.__dict__: valid = True if f == 's3' and S3URL.is_valid(args[i]): valid = True if f == 'local' and not S3URL.is_valid(args[i]): valid = True if not valid: raise InvalidArgument('Invalid parameter: %s, %s expected' % (args[i], fmtMap[fmt.split(',')[0]]))
[ "Validate", "input", "parameters", "with", "given", "format", ".", "This", "function", "also", "checks", "for", "wildcards", "for", "recursive", "mode", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1566-L1589
[ "def", "validate", "(", "self", ",", "format", ",", "args", ")", ":", "fmtMap", "=", "{", "'cmd'", ":", "'Command'", ",", "'s3'", ":", "'s3 path'", ",", "'local'", ":", "'local path'", "}", "fmts", "=", "format", ".", "split", "(", "'|'", ")", "if", "len", "(", "fmts", ")", "!=", "len", "(", "args", ")", ":", "raise", "InvalidArgument", "(", "'Invalid number of parameters'", ")", "for", "i", ",", "fmt", "in", "enumerate", "(", "fmts", ")", ":", "valid", "=", "False", "for", "f", "in", "fmt", ".", "split", "(", "','", ")", ":", "if", "f", "==", "'cmd'", "and", "args", "[", "i", "]", "+", "'_handler'", "in", "CommandHandler", ".", "__dict__", ":", "valid", "=", "True", "if", "f", "==", "'s3'", "and", "S3URL", ".", "is_valid", "(", "args", "[", "i", "]", ")", ":", "valid", "=", "True", "if", "f", "==", "'local'", "and", "not", "S3URL", ".", "is_valid", "(", "args", "[", "i", "]", ")", ":", "valid", "=", "True", "if", "not", "valid", ":", "raise", "InvalidArgument", "(", "'Invalid parameter: %s, %s expected'", "%", "(", "args", "[", "i", "]", ",", "fmtMap", "[", "fmt", ".", "split", "(", "','", ")", "[", "0", "]", "]", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.pretty_print
Pretty print the result of s3walk. Here we calculate the maximum width of each column and align them.
s4cmd.py
def pretty_print(self, objlist): '''Pretty print the result of s3walk. Here we calculate the maximum width of each column and align them. ''' def normalize_time(timestamp): '''Normalize the timestamp format for pretty print.''' if timestamp is None: return ' ' * 16 return TIMESTAMP_FORMAT % (timestamp.year, timestamp.month, timestamp.day, timestamp.hour, timestamp.minute) cwidth = [0, 0, 0] format = '%%%ds %%%ds %%-%ds' # Calculate maximum width for each column. result = [] for obj in objlist: last_modified = normalize_time(obj['last_modified']) size = str(obj['size']) if not obj['is_dir'] else 'DIR' name = obj['name'] item = (last_modified, size, name) for i, value in enumerate(item): if cwidth[i] < len(value): cwidth[i] = len(value) result.append(item) # Format output. for item in result: text = (format % tuple(cwidth)) % item message('%s', text.rstrip())
def pretty_print(self, objlist): '''Pretty print the result of s3walk. Here we calculate the maximum width of each column and align them. ''' def normalize_time(timestamp): '''Normalize the timestamp format for pretty print.''' if timestamp is None: return ' ' * 16 return TIMESTAMP_FORMAT % (timestamp.year, timestamp.month, timestamp.day, timestamp.hour, timestamp.minute) cwidth = [0, 0, 0] format = '%%%ds %%%ds %%-%ds' # Calculate maximum width for each column. result = [] for obj in objlist: last_modified = normalize_time(obj['last_modified']) size = str(obj['size']) if not obj['is_dir'] else 'DIR' name = obj['name'] item = (last_modified, size, name) for i, value in enumerate(item): if cwidth[i] < len(value): cwidth[i] = len(value) result.append(item) # Format output. for item in result: text = (format % tuple(cwidth)) % item message('%s', text.rstrip())
[ "Pretty", "print", "the", "result", "of", "s3walk", ".", "Here", "we", "calculate", "the", "maximum", "width", "of", "each", "column", "and", "align", "them", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1592-L1622
[ "def", "pretty_print", "(", "self", ",", "objlist", ")", ":", "def", "normalize_time", "(", "timestamp", ")", ":", "'''Normalize the timestamp format for pretty print.'''", "if", "timestamp", "is", "None", ":", "return", "' '", "*", "16", "return", "TIMESTAMP_FORMAT", "%", "(", "timestamp", ".", "year", ",", "timestamp", ".", "month", ",", "timestamp", ".", "day", ",", "timestamp", ".", "hour", ",", "timestamp", ".", "minute", ")", "cwidth", "=", "[", "0", ",", "0", ",", "0", "]", "format", "=", "'%%%ds %%%ds %%-%ds'", "# Calculate maximum width for each column.", "result", "=", "[", "]", "for", "obj", "in", "objlist", ":", "last_modified", "=", "normalize_time", "(", "obj", "[", "'last_modified'", "]", ")", "size", "=", "str", "(", "obj", "[", "'size'", "]", ")", "if", "not", "obj", "[", "'is_dir'", "]", "else", "'DIR'", "name", "=", "obj", "[", "'name'", "]", "item", "=", "(", "last_modified", ",", "size", ",", "name", ")", "for", "i", ",", "value", "in", "enumerate", "(", "item", ")", ":", "if", "cwidth", "[", "i", "]", "<", "len", "(", "value", ")", ":", "cwidth", "[", "i", "]", "=", "len", "(", "value", ")", "result", ".", "append", "(", "item", ")", "# Format output.", "for", "item", "in", "result", ":", "text", "=", "(", "format", "%", "tuple", "(", "cwidth", ")", ")", "%", "item", "message", "(", "'%s'", ",", "text", ".", "rstrip", "(", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.ls_handler
Handler for ls command
s4cmd.py
def ls_handler(self, args): '''Handler for ls command''' if len(args) == 1: self.pretty_print(self.s3handler().list_buckets()) return self.validate('cmd|s3', args) self.pretty_print(self.s3handler().s3walk(args[1]))
def ls_handler(self, args): '''Handler for ls command''' if len(args) == 1: self.pretty_print(self.s3handler().list_buckets()) return self.validate('cmd|s3', args) self.pretty_print(self.s3handler().s3walk(args[1]))
[ "Handler", "for", "ls", "command" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1625-L1632
[ "def", "ls_handler", "(", "self", ",", "args", ")", ":", "if", "len", "(", "args", ")", "==", "1", ":", "self", ".", "pretty_print", "(", "self", ".", "s3handler", "(", ")", ".", "list_buckets", "(", ")", ")", "return", "self", ".", "validate", "(", "'cmd|s3'", ",", "args", ")", "self", ".", "pretty_print", "(", "self", ".", "s3handler", "(", ")", ".", "s3walk", "(", "args", "[", "1", "]", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.mb_handler
Handler for mb command
s4cmd.py
def mb_handler(self, args): '''Handler for mb command''' if len(args) == 1: raise InvalidArgument('No s3 bucketname provided') self.validate('cmd|s3', args) self.s3handler().create_bucket(args[1])
def mb_handler(self, args): '''Handler for mb command''' if len(args) == 1: raise InvalidArgument('No s3 bucketname provided') self.validate('cmd|s3', args) self.s3handler().create_bucket(args[1])
[ "Handler", "for", "mb", "command" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1635-L1641
[ "def", "mb_handler", "(", "self", ",", "args", ")", ":", "if", "len", "(", "args", ")", "==", "1", ":", "raise", "InvalidArgument", "(", "'No s3 bucketname provided'", ")", "self", ".", "validate", "(", "'cmd|s3'", ",", "args", ")", "self", ".", "s3handler", "(", ")", ".", "create_bucket", "(", "args", "[", "1", "]", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.put_handler
Handler for put command
s4cmd.py
def put_handler(self, args): '''Handler for put command''' # Special check for shell expansion if len(args) < 3: raise InvalidArgument('Invalid number of parameters') self.validate('|'.join(['cmd'] + ['local'] * (len(args) - 2) + ['s3']), args) source = args[1:-1] # shell expansion target = args[-1] self.s3handler().put_files(source, target)
def put_handler(self, args): '''Handler for put command''' # Special check for shell expansion if len(args) < 3: raise InvalidArgument('Invalid number of parameters') self.validate('|'.join(['cmd'] + ['local'] * (len(args) - 2) + ['s3']), args) source = args[1:-1] # shell expansion target = args[-1] self.s3handler().put_files(source, target)
[ "Handler", "for", "put", "command" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1644-L1655
[ "def", "put_handler", "(", "self", ",", "args", ")", ":", "# Special check for shell expansion", "if", "len", "(", "args", ")", "<", "3", ":", "raise", "InvalidArgument", "(", "'Invalid number of parameters'", ")", "self", ".", "validate", "(", "'|'", ".", "join", "(", "[", "'cmd'", "]", "+", "[", "'local'", "]", "*", "(", "len", "(", "args", ")", "-", "2", ")", "+", "[", "'s3'", "]", ")", ",", "args", ")", "source", "=", "args", "[", "1", ":", "-", "1", "]", "# shell expansion", "target", "=", "args", "[", "-", "1", "]", "self", ".", "s3handler", "(", ")", ".", "put_files", "(", "source", ",", "target", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.get_handler
Handler for get command
s4cmd.py
def get_handler(self, args): '''Handler for get command''' # Special case when we don't have target directory. if len(args) == 2: args += ['.'] self.validate('cmd|s3|local', args) source = args[1] target = args[2] self.s3handler().get_files(source, target)
def get_handler(self, args): '''Handler for get command''' # Special case when we don't have target directory. if len(args) == 2: args += ['.'] self.validate('cmd|s3|local', args) source = args[1] target = args[2] self.s3handler().get_files(source, target)
[ "Handler", "for", "get", "command" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1658-L1668
[ "def", "get_handler", "(", "self", ",", "args", ")", ":", "# Special case when we don't have target directory.", "if", "len", "(", "args", ")", "==", "2", ":", "args", "+=", "[", "'.'", "]", "self", ".", "validate", "(", "'cmd|s3|local'", ",", "args", ")", "source", "=", "args", "[", "1", "]", "target", "=", "args", "[", "2", "]", "self", ".", "s3handler", "(", ")", ".", "get_files", "(", "source", ",", "target", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.cat_handler
Handler for cat command
s4cmd.py
def cat_handler(self, args): '''Handler for cat command''' self.validate('cmd|s3', args) source = args[1] self.s3handler().print_files(source)
def cat_handler(self, args): '''Handler for cat command''' self.validate('cmd|s3', args) source = args[1] self.s3handler().print_files(source)
[ "Handler", "for", "cat", "command" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1671-L1677
[ "def", "cat_handler", "(", "self", ",", "args", ")", ":", "self", ".", "validate", "(", "'cmd|s3'", ",", "args", ")", "source", "=", "args", "[", "1", "]", "self", ".", "s3handler", "(", ")", ".", "print_files", "(", "source", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.dsync_handler
Handler for dsync command.
s4cmd.py
def dsync_handler(self, args): '''Handler for dsync command.''' self.opt.recursive = True self.opt.sync_check = True self.opt.force = True self.validate('cmd|s3,local|s3,local', args) source = args[1] target = args[2] self.s3handler().dsync_files(source, target)
def dsync_handler(self, args): '''Handler for dsync command.''' self.opt.recursive = True self.opt.sync_check = True self.opt.force = True self.validate('cmd|s3,local|s3,local', args) source = args[1] target = args[2] self.s3handler().dsync_files(source, target)
[ "Handler", "for", "dsync", "command", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1680-L1690
[ "def", "dsync_handler", "(", "self", ",", "args", ")", ":", "self", ".", "opt", ".", "recursive", "=", "True", "self", ".", "opt", ".", "sync_check", "=", "True", "self", ".", "opt", ".", "force", "=", "True", "self", ".", "validate", "(", "'cmd|s3,local|s3,local'", ",", "args", ")", "source", "=", "args", "[", "1", "]", "target", "=", "args", "[", "2", "]", "self", ".", "s3handler", "(", ")", ".", "dsync_files", "(", "source", ",", "target", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.sync_handler
Handler for sync command. XXX Here we emulate sync command with get/put -r -f --sync-check. So it doesn't provide delete operation.
s4cmd.py
def sync_handler(self, args): '''Handler for sync command. XXX Here we emulate sync command with get/put -r -f --sync-check. So it doesn't provide delete operation. ''' self.opt.recursive = True self.opt.sync_check = True self.opt.force = True self.validate('cmd|s3,local|s3,local', args) source = args[1] target = args[2] self.s3handler().sync_files(source, target)
def sync_handler(self, args): '''Handler for sync command. XXX Here we emulate sync command with get/put -r -f --sync-check. So it doesn't provide delete operation. ''' self.opt.recursive = True self.opt.sync_check = True self.opt.force = True self.validate('cmd|s3,local|s3,local', args) source = args[1] target = args[2] self.s3handler().sync_files(source, target)
[ "Handler", "for", "sync", "command", ".", "XXX", "Here", "we", "emulate", "sync", "command", "with", "get", "/", "put", "-", "r", "-", "f", "--", "sync", "-", "check", ".", "So", "it", "doesn", "t", "provide", "delete", "operation", "." ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1693-L1706
[ "def", "sync_handler", "(", "self", ",", "args", ")", ":", "self", ".", "opt", ".", "recursive", "=", "True", "self", ".", "opt", ".", "sync_check", "=", "True", "self", ".", "opt", ".", "force", "=", "True", "self", ".", "validate", "(", "'cmd|s3,local|s3,local'", ",", "args", ")", "source", "=", "args", "[", "1", "]", "target", "=", "args", "[", "2", "]", "self", ".", "s3handler", "(", ")", ".", "sync_files", "(", "source", ",", "target", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.cp_handler
Handler for cp command
s4cmd.py
def cp_handler(self, args): '''Handler for cp command''' self.validate('cmd|s3|s3', args) source = args[1] target = args[2] self.s3handler().cp_files(source, target)
def cp_handler(self, args): '''Handler for cp command''' self.validate('cmd|s3|s3', args) source = args[1] target = args[2] self.s3handler().cp_files(source, target)
[ "Handler", "for", "cp", "command" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1709-L1715
[ "def", "cp_handler", "(", "self", ",", "args", ")", ":", "self", ".", "validate", "(", "'cmd|s3|s3'", ",", "args", ")", "source", "=", "args", "[", "1", "]", "target", "=", "args", "[", "2", "]", "self", ".", "s3handler", "(", ")", ".", "cp_files", "(", "source", ",", "target", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.mv_handler
Handler for mv command
s4cmd.py
def mv_handler(self, args): '''Handler for mv command''' self.validate('cmd|s3|s3', args) source = args[1] target = args[2] self.s3handler().cp_files(source, target, delete_source=True)
def mv_handler(self, args): '''Handler for mv command''' self.validate('cmd|s3|s3', args) source = args[1] target = args[2] self.s3handler().cp_files(source, target, delete_source=True)
[ "Handler", "for", "mv", "command" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1718-L1724
[ "def", "mv_handler", "(", "self", ",", "args", ")", ":", "self", ".", "validate", "(", "'cmd|s3|s3'", ",", "args", ")", "source", "=", "args", "[", "1", "]", "target", "=", "args", "[", "2", "]", "self", ".", "s3handler", "(", ")", ".", "cp_files", "(", "source", ",", "target", ",", "delete_source", "=", "True", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.del_handler
Handler for del command
s4cmd.py
def del_handler(self, args): '''Handler for del command''' self.validate('cmd|s3', args) source = args[1] self.s3handler().del_files(source)
def del_handler(self, args): '''Handler for del command''' self.validate('cmd|s3', args) source = args[1] self.s3handler().del_files(source)
[ "Handler", "for", "del", "command" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1727-L1731
[ "def", "del_handler", "(", "self", ",", "args", ")", ":", "self", ".", "validate", "(", "'cmd|s3'", ",", "args", ")", "source", "=", "args", "[", "1", "]", "self", ".", "s3handler", "(", ")", ".", "del_files", "(", "source", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler.du_handler
Handler for size command
s4cmd.py
def du_handler(self, args): '''Handler for size command''' for src, size in self.s3handler().size(args[1:]): message('%s\t%s' % (size, src))
def du_handler(self, args): '''Handler for size command''' for src, size in self.s3handler().size(args[1:]): message('%s\t%s' % (size, src))
[ "Handler", "for", "size", "command" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1734-L1737
[ "def", "du_handler", "(", "self", ",", "args", ")", ":", "for", "src", ",", "size", "in", "self", ".", "s3handler", "(", ")", ".", "size", "(", "args", "[", "1", ":", "]", ")", ":", "message", "(", "'%s\\t%s'", "%", "(", "size", ",", "src", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
CommandHandler._totalsize_handler
Handler of total_size command
s4cmd.py
def _totalsize_handler(self, args): '''Handler of total_size command''' total_size = 0 for src, size in self.s3handler().size(args[1:]): total_size += size message(str(total_size))
def _totalsize_handler(self, args): '''Handler of total_size command''' total_size = 0 for src, size in self.s3handler().size(args[1:]): total_size += size message(str(total_size))
[ "Handler", "of", "total_size", "command" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1740-L1745
[ "def", "_totalsize_handler", "(", "self", ",", "args", ")", ":", "total_size", "=", "0", "for", "src", ",", "size", "in", "self", ".", "s3handler", "(", ")", ".", "size", "(", "args", "[", "1", ":", "]", ")", ":", "total_size", "+=", "size", "message", "(", "str", "(", "total_size", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ExtendedOptParser.match_date
Search for date information in the string
s4cmd.py
def match_date(self, value): '''Search for date information in the string''' m = self.REGEX_DATE.search(value) date = datetime.datetime.utcnow().date() if m: date = datetime.date(int(m.group(1)), int(m.group(2)), int(m.group(3))) value = self.REGEX_DATE.sub('', value) return (date, value)
def match_date(self, value): '''Search for date information in the string''' m = self.REGEX_DATE.search(value) date = datetime.datetime.utcnow().date() if m: date = datetime.date(int(m.group(1)), int(m.group(2)), int(m.group(3))) value = self.REGEX_DATE.sub('', value) return (date, value)
[ "Search", "for", "date", "information", "in", "the", "string" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1754-L1761
[ "def", "match_date", "(", "self", ",", "value", ")", ":", "m", "=", "self", ".", "REGEX_DATE", ".", "search", "(", "value", ")", "date", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "date", "(", ")", "if", "m", ":", "date", "=", "datetime", ".", "date", "(", "int", "(", "m", ".", "group", "(", "1", ")", ")", ",", "int", "(", "m", ".", "group", "(", "2", ")", ")", ",", "int", "(", "m", ".", "group", "(", "3", ")", ")", ")", "value", "=", "self", ".", "REGEX_DATE", ".", "sub", "(", "''", ",", "value", ")", "return", "(", "date", ",", "value", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ExtendedOptParser.match_time
Search for time information in the string
s4cmd.py
def match_time(self, value): '''Search for time information in the string''' m = self.REGEX_TIME.search(value) time = datetime.datetime.utcnow().time() if m: time = datetime.time(int(m.group(1)), int(m.group(2))) value = self.REGEX_TIME.sub('', value) return (time, value)
def match_time(self, value): '''Search for time information in the string''' m = self.REGEX_TIME.search(value) time = datetime.datetime.utcnow().time() if m: time = datetime.time(int(m.group(1)), int(m.group(2))) value = self.REGEX_TIME.sub('', value) return (time, value)
[ "Search", "for", "time", "information", "in", "the", "string" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1763-L1770
[ "def", "match_time", "(", "self", ",", "value", ")", ":", "m", "=", "self", ".", "REGEX_TIME", ".", "search", "(", "value", ")", "time", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "time", "(", ")", "if", "m", ":", "time", "=", "datetime", ".", "time", "(", "int", "(", "m", ".", "group", "(", "1", ")", ")", ",", "int", "(", "m", ".", "group", "(", "2", ")", ")", ")", "value", "=", "self", ".", "REGEX_TIME", ".", "sub", "(", "''", ",", "value", ")", "return", "(", "time", ",", "value", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ExtendedOptParser.match_delta
Search for timedelta information in the string
s4cmd.py
def match_delta(self, value): '''Search for timedelta information in the string''' m = self.REGEX_DELTA.search(value) delta = datetime.timedelta(days=0) if m: d = int(m.group(1)) if m.group(3) == 'ago' or m.group(3) == 'before': d = -d if m.group(2) == 'minute': delta = datetime.timedelta(minutes=d) elif m.group(2) == 'hour': delta = datetime.timedelta(hours=d) elif m.group(2) == 'day': delta = datetime.timedelta(days=d) elif m.group(2) == 'week': delta = datetime.timedelta(weeks=d) value = self.REGEX_DELTA.sub('', value) return (delta, value)
def match_delta(self, value): '''Search for timedelta information in the string''' m = self.REGEX_DELTA.search(value) delta = datetime.timedelta(days=0) if m: d = int(m.group(1)) if m.group(3) == 'ago' or m.group(3) == 'before': d = -d if m.group(2) == 'minute': delta = datetime.timedelta(minutes=d) elif m.group(2) == 'hour': delta = datetime.timedelta(hours=d) elif m.group(2) == 'day': delta = datetime.timedelta(days=d) elif m.group(2) == 'week': delta = datetime.timedelta(weeks=d) value = self.REGEX_DELTA.sub('', value) return (delta, value)
[ "Search", "for", "timedelta", "information", "in", "the", "string" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1772-L1790
[ "def", "match_delta", "(", "self", ",", "value", ")", ":", "m", "=", "self", ".", "REGEX_DELTA", ".", "search", "(", "value", ")", "delta", "=", "datetime", ".", "timedelta", "(", "days", "=", "0", ")", "if", "m", ":", "d", "=", "int", "(", "m", ".", "group", "(", "1", ")", ")", "if", "m", ".", "group", "(", "3", ")", "==", "'ago'", "or", "m", ".", "group", "(", "3", ")", "==", "'before'", ":", "d", "=", "-", "d", "if", "m", ".", "group", "(", "2", ")", "==", "'minute'", ":", "delta", "=", "datetime", ".", "timedelta", "(", "minutes", "=", "d", ")", "elif", "m", ".", "group", "(", "2", ")", "==", "'hour'", ":", "delta", "=", "datetime", ".", "timedelta", "(", "hours", "=", "d", ")", "elif", "m", ".", "group", "(", "2", ")", "==", "'day'", ":", "delta", "=", "datetime", ".", "timedelta", "(", "days", "=", "d", ")", "elif", "m", ".", "group", "(", "2", ")", "==", "'week'", ":", "delta", "=", "datetime", ".", "timedelta", "(", "weeks", "=", "d", ")", "value", "=", "self", ".", "REGEX_DELTA", ".", "sub", "(", "''", ",", "value", ")", "return", "(", "delta", ",", "value", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
ExtendedOptParser.check_dict
Take json as dictionary parameter
s4cmd.py
def check_dict(self, opt, value): '''Take json as dictionary parameter''' try: return json.loads(value) except: raise optparse.OptionValueError("Option %s: invalid dict value: %r" % (opt, value))
def check_dict(self, opt, value): '''Take json as dictionary parameter''' try: return json.loads(value) except: raise optparse.OptionValueError("Option %s: invalid dict value: %r" % (opt, value))
[ "Take", "json", "as", "dictionary", "parameter" ]
bloomreach/s4cmd
python
https://github.com/bloomreach/s4cmd/blob/bb51075bf43703e7cd95aa39288cf7732ec13a6d/s4cmd.py#L1805-L1810
[ "def", "check_dict", "(", "self", ",", "opt", ",", "value", ")", ":", "try", ":", "return", "json", ".", "loads", "(", "value", ")", "except", ":", "raise", "optparse", ".", "OptionValueError", "(", "\"Option %s: invalid dict value: %r\"", "%", "(", "opt", ",", "value", ")", ")" ]
bb51075bf43703e7cd95aa39288cf7732ec13a6d
test
XiaomiGatewayDiscovery.discover_gateways
Discover gateways using multicast
xiaomi_gateway/__init__.py
def discover_gateways(self): """Discover gateways using multicast""" _socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) _socket.settimeout(5.0) if self._interface != 'any': _socket.bind((self._interface, 0)) for gateway in self._gateways_config: host = gateway.get('host') port = gateway.get('port') sid = gateway.get('sid') if not (host and port and sid): continue try: ip_address = socket.gethostbyname(host) if gateway.get('disable'): _LOGGER.info( 'Xiaomi Gateway %s is disabled by configuration', sid) self.disabled_gateways.append(ip_address) continue _LOGGER.info( 'Xiaomi Gateway %s configured at IP %s:%s', sid, ip_address, port) self.gateways[ip_address] = XiaomiGateway( ip_address, port, sid, gateway.get('key'), self._device_discovery_retries, self._interface, gateway.get('proto')) except OSError as error: _LOGGER.error( "Could not resolve %s: %s", host, error) try: _socket.sendto('{"cmd":"whois"}'.encode(), (self.MULTICAST_ADDRESS, self.GATEWAY_DISCOVERY_PORT)) while True: data, (ip_add, _) = _socket.recvfrom(1024) if len(data) is None or ip_add in self.gateways: continue if ip_add in self.gateways.keys() or ip_add in self.disabled_gateways: continue resp = json.loads(data.decode()) if resp["cmd"] != 'iam': _LOGGER.error("Response does not match return cmd") continue if resp["model"] not in GATEWAY_MODELS: _LOGGER.error("Response must be gateway model") continue disabled = False gateway_key = None for gateway in self._gateways_config: sid = gateway.get('sid') if sid is None or sid == resp["sid"]: gateway_key = gateway.get('key') if sid and sid == resp['sid'] and gateway.get('disable'): disabled = True sid = resp["sid"] if disabled: _LOGGER.info("Xiaomi Gateway %s is disabled by configuration", sid) self.disabled_gateways.append(ip_add) else: _LOGGER.info('Xiaomi Gateway %s found at IP %s', sid, ip_add) self.gateways[ip_add] = XiaomiGateway( ip_add, resp["port"], sid, gateway_key, self._device_discovery_retries, self._interface, resp["proto_version"] if "proto_version" in resp else None) except socket.timeout: _LOGGER.info("Gateway discovery finished in 5 seconds") _socket.close()
def discover_gateways(self): """Discover gateways using multicast""" _socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) _socket.settimeout(5.0) if self._interface != 'any': _socket.bind((self._interface, 0)) for gateway in self._gateways_config: host = gateway.get('host') port = gateway.get('port') sid = gateway.get('sid') if not (host and port and sid): continue try: ip_address = socket.gethostbyname(host) if gateway.get('disable'): _LOGGER.info( 'Xiaomi Gateway %s is disabled by configuration', sid) self.disabled_gateways.append(ip_address) continue _LOGGER.info( 'Xiaomi Gateway %s configured at IP %s:%s', sid, ip_address, port) self.gateways[ip_address] = XiaomiGateway( ip_address, port, sid, gateway.get('key'), self._device_discovery_retries, self._interface, gateway.get('proto')) except OSError as error: _LOGGER.error( "Could not resolve %s: %s", host, error) try: _socket.sendto('{"cmd":"whois"}'.encode(), (self.MULTICAST_ADDRESS, self.GATEWAY_DISCOVERY_PORT)) while True: data, (ip_add, _) = _socket.recvfrom(1024) if len(data) is None or ip_add in self.gateways: continue if ip_add in self.gateways.keys() or ip_add in self.disabled_gateways: continue resp = json.loads(data.decode()) if resp["cmd"] != 'iam': _LOGGER.error("Response does not match return cmd") continue if resp["model"] not in GATEWAY_MODELS: _LOGGER.error("Response must be gateway model") continue disabled = False gateway_key = None for gateway in self._gateways_config: sid = gateway.get('sid') if sid is None or sid == resp["sid"]: gateway_key = gateway.get('key') if sid and sid == resp['sid'] and gateway.get('disable'): disabled = True sid = resp["sid"] if disabled: _LOGGER.info("Xiaomi Gateway %s is disabled by configuration", sid) self.disabled_gateways.append(ip_add) else: _LOGGER.info('Xiaomi Gateway %s found at IP %s', sid, ip_add) self.gateways[ip_add] = XiaomiGateway( ip_add, resp["port"], sid, gateway_key, self._device_discovery_retries, self._interface, resp["proto_version"] if "proto_version" in resp else None) except socket.timeout: _LOGGER.info("Gateway discovery finished in 5 seconds") _socket.close()
[ "Discover", "gateways", "using", "multicast" ]
Danielhiversen/PyXiaomiGateway
python
https://github.com/Danielhiversen/PyXiaomiGateway/blob/21b38ab972d67402f2124dba02101ddfd8d9e0b4/xiaomi_gateway/__init__.py#L41-L119
[ "def", "discover_gateways", "(", "self", ")", ":", "_socket", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_DGRAM", ")", "_socket", ".", "settimeout", "(", "5.0", ")", "if", "self", ".", "_interface", "!=", "'any'", ":", "_socket", ".", "bind", "(", "(", "self", ".", "_interface", ",", "0", ")", ")", "for", "gateway", "in", "self", ".", "_gateways_config", ":", "host", "=", "gateway", ".", "get", "(", "'host'", ")", "port", "=", "gateway", ".", "get", "(", "'port'", ")", "sid", "=", "gateway", ".", "get", "(", "'sid'", ")", "if", "not", "(", "host", "and", "port", "and", "sid", ")", ":", "continue", "try", ":", "ip_address", "=", "socket", ".", "gethostbyname", "(", "host", ")", "if", "gateway", ".", "get", "(", "'disable'", ")", ":", "_LOGGER", ".", "info", "(", "'Xiaomi Gateway %s is disabled by configuration'", ",", "sid", ")", "self", ".", "disabled_gateways", ".", "append", "(", "ip_address", ")", "continue", "_LOGGER", ".", "info", "(", "'Xiaomi Gateway %s configured at IP %s:%s'", ",", "sid", ",", "ip_address", ",", "port", ")", "self", ".", "gateways", "[", "ip_address", "]", "=", "XiaomiGateway", "(", "ip_address", ",", "port", ",", "sid", ",", "gateway", ".", "get", "(", "'key'", ")", ",", "self", ".", "_device_discovery_retries", ",", "self", ".", "_interface", ",", "gateway", ".", "get", "(", "'proto'", ")", ")", "except", "OSError", "as", "error", ":", "_LOGGER", ".", "error", "(", "\"Could not resolve %s: %s\"", ",", "host", ",", "error", ")", "try", ":", "_socket", ".", "sendto", "(", "'{\"cmd\":\"whois\"}'", ".", "encode", "(", ")", ",", "(", "self", ".", "MULTICAST_ADDRESS", ",", "self", ".", "GATEWAY_DISCOVERY_PORT", ")", ")", "while", "True", ":", "data", ",", "(", "ip_add", ",", "_", ")", "=", "_socket", ".", "recvfrom", "(", "1024", ")", "if", "len", "(", "data", ")", "is", "None", "or", "ip_add", "in", "self", ".", "gateways", ":", "continue", "if", "ip_add", "in", "self", ".", "gateways", ".", "keys", "(", ")", "or", "ip_add", "in", "self", ".", "disabled_gateways", ":", "continue", "resp", "=", "json", ".", "loads", "(", "data", ".", "decode", "(", ")", ")", "if", "resp", "[", "\"cmd\"", "]", "!=", "'iam'", ":", "_LOGGER", ".", "error", "(", "\"Response does not match return cmd\"", ")", "continue", "if", "resp", "[", "\"model\"", "]", "not", "in", "GATEWAY_MODELS", ":", "_LOGGER", ".", "error", "(", "\"Response must be gateway model\"", ")", "continue", "disabled", "=", "False", "gateway_key", "=", "None", "for", "gateway", "in", "self", ".", "_gateways_config", ":", "sid", "=", "gateway", ".", "get", "(", "'sid'", ")", "if", "sid", "is", "None", "or", "sid", "==", "resp", "[", "\"sid\"", "]", ":", "gateway_key", "=", "gateway", ".", "get", "(", "'key'", ")", "if", "sid", "and", "sid", "==", "resp", "[", "'sid'", "]", "and", "gateway", ".", "get", "(", "'disable'", ")", ":", "disabled", "=", "True", "sid", "=", "resp", "[", "\"sid\"", "]", "if", "disabled", ":", "_LOGGER", ".", "info", "(", "\"Xiaomi Gateway %s is disabled by configuration\"", ",", "sid", ")", "self", ".", "disabled_gateways", ".", "append", "(", "ip_add", ")", "else", ":", "_LOGGER", ".", "info", "(", "'Xiaomi Gateway %s found at IP %s'", ",", "sid", ",", "ip_add", ")", "self", ".", "gateways", "[", "ip_add", "]", "=", "XiaomiGateway", "(", "ip_add", ",", "resp", "[", "\"port\"", "]", ",", "sid", ",", "gateway_key", ",", "self", ".", "_device_discovery_retries", ",", "self", ".", "_interface", ",", "resp", "[", "\"proto_version\"", "]", "if", "\"proto_version\"", "in", "resp", "else", "None", ")", "except", "socket", ".", "timeout", ":", "_LOGGER", ".", "info", "(", "\"Gateway discovery finished in 5 seconds\"", ")", "_socket", ".", "close", "(", ")" ]
21b38ab972d67402f2124dba02101ddfd8d9e0b4
test
XiaomiGatewayDiscovery.listen
Start listening.
xiaomi_gateway/__init__.py
def listen(self): """Start listening.""" _LOGGER.info('Creating Multicast Socket') self._mcastsocket = self._create_mcast_socket() self._listening = True thread = Thread(target=self._listen_to_msg, args=()) self._threads.append(thread) thread.daemon = True thread.start()
def listen(self): """Start listening.""" _LOGGER.info('Creating Multicast Socket') self._mcastsocket = self._create_mcast_socket() self._listening = True thread = Thread(target=self._listen_to_msg, args=()) self._threads.append(thread) thread.daemon = True thread.start()
[ "Start", "listening", "." ]
Danielhiversen/PyXiaomiGateway
python
https://github.com/Danielhiversen/PyXiaomiGateway/blob/21b38ab972d67402f2124dba02101ddfd8d9e0b4/xiaomi_gateway/__init__.py#L142-L151
[ "def", "listen", "(", "self", ")", ":", "_LOGGER", ".", "info", "(", "'Creating Multicast Socket'", ")", "self", ".", "_mcastsocket", "=", "self", ".", "_create_mcast_socket", "(", ")", "self", ".", "_listening", "=", "True", "thread", "=", "Thread", "(", "target", "=", "self", ".", "_listen_to_msg", ",", "args", "=", "(", ")", ")", "self", ".", "_threads", ".", "append", "(", "thread", ")", "thread", ".", "daemon", "=", "True", "thread", ".", "start", "(", ")" ]
21b38ab972d67402f2124dba02101ddfd8d9e0b4
test
XiaomiGatewayDiscovery.stop_listen
Stop listening.
xiaomi_gateway/__init__.py
def stop_listen(self): """Stop listening.""" self._listening = False if self._mcastsocket is not None: _LOGGER.info('Closing multisocket') self._mcastsocket.close() self._mcastsocket = None for thread in self._threads: thread.join()
def stop_listen(self): """Stop listening.""" self._listening = False if self._mcastsocket is not None: _LOGGER.info('Closing multisocket') self._mcastsocket.close() self._mcastsocket = None for thread in self._threads: thread.join()
[ "Stop", "listening", "." ]
Danielhiversen/PyXiaomiGateway
python
https://github.com/Danielhiversen/PyXiaomiGateway/blob/21b38ab972d67402f2124dba02101ddfd8d9e0b4/xiaomi_gateway/__init__.py#L153-L163
[ "def", "stop_listen", "(", "self", ")", ":", "self", ".", "_listening", "=", "False", "if", "self", ".", "_mcastsocket", "is", "not", "None", ":", "_LOGGER", ".", "info", "(", "'Closing multisocket'", ")", "self", ".", "_mcastsocket", ".", "close", "(", ")", "self", ".", "_mcastsocket", "=", "None", "for", "thread", "in", "self", ".", "_threads", ":", "thread", ".", "join", "(", ")" ]
21b38ab972d67402f2124dba02101ddfd8d9e0b4
test
XiaomiGateway.write_to_hub
Send data to gateway to turn on / off device
xiaomi_gateway/__init__.py
def write_to_hub(self, sid, **kwargs): """Send data to gateway to turn on / off device""" if self.key is None: _LOGGER.error('Gateway Key is not provided. Can not send commands to the gateway.') return False data = {} for key in kwargs: data[key] = kwargs[key] if not self.token: _LOGGER.debug('Gateway Token was not obtained yet. Cannot send commands to the gateway.') return False cmd = dict() cmd['cmd'] = 'write' cmd['sid'] = sid if int(self.proto[0:1]) == 1: data['key'] = self._get_key() cmd['data'] = data else: cmd['key'] = self._get_key() cmd['params'] = [data] resp = self._send_cmd(json.dumps(cmd), "write_ack") if int(self.proto[0:1]) == 1 \ else self._send_cmd(json.dumps(cmd), "write_rsp") _LOGGER.debug("write_ack << %s", resp) if _validate_data(resp): return True if not _validate_keyerror(resp): return False # If 'invalid key' message we ask for a new token resp = self._send_cmd('{"cmd" : "get_id_list"}', "get_id_list_ack") if int(self.proto[0:1]) == 1 \ else self._send_cmd('{"cmd" : "discovery"}', "discovery_rsp") _LOGGER.debug("get_id_list << %s", resp) if resp is None or "token" not in resp: _LOGGER.error('No new token from gateway. Can not send commands to the gateway.') return False self.token = resp['token'] if int(self.proto[0:1]) == 1: data['key'] = self._get_key() cmd['data'] = data else: cmd['key'] = self._get_key() cmd['params'] = [data] resp = self._send_cmd(json.dumps(cmd), "write_ack") if int(self.proto[0:1]) == 1 \ else self._send_cmd(json.dumps(cmd), "write_rsp") _LOGGER.debug("write_ack << %s", resp) return _validate_data(resp)
def write_to_hub(self, sid, **kwargs): """Send data to gateway to turn on / off device""" if self.key is None: _LOGGER.error('Gateway Key is not provided. Can not send commands to the gateway.') return False data = {} for key in kwargs: data[key] = kwargs[key] if not self.token: _LOGGER.debug('Gateway Token was not obtained yet. Cannot send commands to the gateway.') return False cmd = dict() cmd['cmd'] = 'write' cmd['sid'] = sid if int(self.proto[0:1]) == 1: data['key'] = self._get_key() cmd['data'] = data else: cmd['key'] = self._get_key() cmd['params'] = [data] resp = self._send_cmd(json.dumps(cmd), "write_ack") if int(self.proto[0:1]) == 1 \ else self._send_cmd(json.dumps(cmd), "write_rsp") _LOGGER.debug("write_ack << %s", resp) if _validate_data(resp): return True if not _validate_keyerror(resp): return False # If 'invalid key' message we ask for a new token resp = self._send_cmd('{"cmd" : "get_id_list"}', "get_id_list_ack") if int(self.proto[0:1]) == 1 \ else self._send_cmd('{"cmd" : "discovery"}', "discovery_rsp") _LOGGER.debug("get_id_list << %s", resp) if resp is None or "token" not in resp: _LOGGER.error('No new token from gateway. Can not send commands to the gateway.') return False self.token = resp['token'] if int(self.proto[0:1]) == 1: data['key'] = self._get_key() cmd['data'] = data else: cmd['key'] = self._get_key() cmd['params'] = [data] resp = self._send_cmd(json.dumps(cmd), "write_ack") if int(self.proto[0:1]) == 1 \ else self._send_cmd(json.dumps(cmd), "write_rsp") _LOGGER.debug("write_ack << %s", resp) return _validate_data(resp)
[ "Send", "data", "to", "gateway", "to", "turn", "on", "/", "off", "device" ]
Danielhiversen/PyXiaomiGateway
python
https://github.com/Danielhiversen/PyXiaomiGateway/blob/21b38ab972d67402f2124dba02101ddfd8d9e0b4/xiaomi_gateway/__init__.py#L330-L377
[ "def", "write_to_hub", "(", "self", ",", "sid", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "key", "is", "None", ":", "_LOGGER", ".", "error", "(", "'Gateway Key is not provided. Can not send commands to the gateway.'", ")", "return", "False", "data", "=", "{", "}", "for", "key", "in", "kwargs", ":", "data", "[", "key", "]", "=", "kwargs", "[", "key", "]", "if", "not", "self", ".", "token", ":", "_LOGGER", ".", "debug", "(", "'Gateway Token was not obtained yet. Cannot send commands to the gateway.'", ")", "return", "False", "cmd", "=", "dict", "(", ")", "cmd", "[", "'cmd'", "]", "=", "'write'", "cmd", "[", "'sid'", "]", "=", "sid", "if", "int", "(", "self", ".", "proto", "[", "0", ":", "1", "]", ")", "==", "1", ":", "data", "[", "'key'", "]", "=", "self", ".", "_get_key", "(", ")", "cmd", "[", "'data'", "]", "=", "data", "else", ":", "cmd", "[", "'key'", "]", "=", "self", ".", "_get_key", "(", ")", "cmd", "[", "'params'", "]", "=", "[", "data", "]", "resp", "=", "self", ".", "_send_cmd", "(", "json", ".", "dumps", "(", "cmd", ")", ",", "\"write_ack\"", ")", "if", "int", "(", "self", ".", "proto", "[", "0", ":", "1", "]", ")", "==", "1", "else", "self", ".", "_send_cmd", "(", "json", ".", "dumps", "(", "cmd", ")", ",", "\"write_rsp\"", ")", "_LOGGER", ".", "debug", "(", "\"write_ack << %s\"", ",", "resp", ")", "if", "_validate_data", "(", "resp", ")", ":", "return", "True", "if", "not", "_validate_keyerror", "(", "resp", ")", ":", "return", "False", "# If 'invalid key' message we ask for a new token", "resp", "=", "self", ".", "_send_cmd", "(", "'{\"cmd\" : \"get_id_list\"}'", ",", "\"get_id_list_ack\"", ")", "if", "int", "(", "self", ".", "proto", "[", "0", ":", "1", "]", ")", "==", "1", "else", "self", ".", "_send_cmd", "(", "'{\"cmd\" : \"discovery\"}'", ",", "\"discovery_rsp\"", ")", "_LOGGER", ".", "debug", "(", "\"get_id_list << %s\"", ",", "resp", ")", "if", "resp", "is", "None", "or", "\"token\"", "not", "in", "resp", ":", "_LOGGER", ".", "error", "(", "'No new token from gateway. Can not send commands to the gateway.'", ")", "return", "False", "self", ".", "token", "=", "resp", "[", "'token'", "]", "if", "int", "(", "self", ".", "proto", "[", "0", ":", "1", "]", ")", "==", "1", ":", "data", "[", "'key'", "]", "=", "self", ".", "_get_key", "(", ")", "cmd", "[", "'data'", "]", "=", "data", "else", ":", "cmd", "[", "'key'", "]", "=", "self", ".", "_get_key", "(", ")", "cmd", "[", "'params'", "]", "=", "[", "data", "]", "resp", "=", "self", ".", "_send_cmd", "(", "json", ".", "dumps", "(", "cmd", ")", ",", "\"write_ack\"", ")", "if", "int", "(", "self", ".", "proto", "[", "0", ":", "1", "]", ")", "==", "1", "else", "self", ".", "_send_cmd", "(", "json", ".", "dumps", "(", "cmd", ")", ",", "\"write_rsp\"", ")", "_LOGGER", ".", "debug", "(", "\"write_ack << %s\"", ",", "resp", ")", "return", "_validate_data", "(", "resp", ")" ]
21b38ab972d67402f2124dba02101ddfd8d9e0b4
test
XiaomiGateway.get_from_hub
Get data from gateway
xiaomi_gateway/__init__.py
def get_from_hub(self, sid): """Get data from gateway""" cmd = '{ "cmd":"read","sid":"' + sid + '"}' resp = self._send_cmd(cmd, "read_ack") if int(self.proto[0:1]) == 1 else self._send_cmd(cmd, "read_rsp") _LOGGER.debug("read_ack << %s", resp) return self.push_data(resp)
def get_from_hub(self, sid): """Get data from gateway""" cmd = '{ "cmd":"read","sid":"' + sid + '"}' resp = self._send_cmd(cmd, "read_ack") if int(self.proto[0:1]) == 1 else self._send_cmd(cmd, "read_rsp") _LOGGER.debug("read_ack << %s", resp) return self.push_data(resp)
[ "Get", "data", "from", "gateway" ]
Danielhiversen/PyXiaomiGateway
python
https://github.com/Danielhiversen/PyXiaomiGateway/blob/21b38ab972d67402f2124dba02101ddfd8d9e0b4/xiaomi_gateway/__init__.py#L379-L384
[ "def", "get_from_hub", "(", "self", ",", "sid", ")", ":", "cmd", "=", "'{ \"cmd\":\"read\",\"sid\":\"'", "+", "sid", "+", "'\"}'", "resp", "=", "self", ".", "_send_cmd", "(", "cmd", ",", "\"read_ack\"", ")", "if", "int", "(", "self", ".", "proto", "[", "0", ":", "1", "]", ")", "==", "1", "else", "self", ".", "_send_cmd", "(", "cmd", ",", "\"read_rsp\"", ")", "_LOGGER", ".", "debug", "(", "\"read_ack << %s\"", ",", "resp", ")", "return", "self", ".", "push_data", "(", "resp", ")" ]
21b38ab972d67402f2124dba02101ddfd8d9e0b4
test
XiaomiGateway.push_data
Push data broadcasted from gateway to device
xiaomi_gateway/__init__.py
def push_data(self, data): """Push data broadcasted from gateway to device""" if not _validate_data(data): return False jdata = json.loads(data['data']) if int(self.proto[0:1]) == 1 else _list2map(data['params']) if jdata is None: return False sid = data['sid'] for func in self.callbacks[sid]: func(jdata, data) return True
def push_data(self, data): """Push data broadcasted from gateway to device""" if not _validate_data(data): return False jdata = json.loads(data['data']) if int(self.proto[0:1]) == 1 else _list2map(data['params']) if jdata is None: return False sid = data['sid'] for func in self.callbacks[sid]: func(jdata, data) return True
[ "Push", "data", "broadcasted", "from", "gateway", "to", "device" ]
Danielhiversen/PyXiaomiGateway
python
https://github.com/Danielhiversen/PyXiaomiGateway/blob/21b38ab972d67402f2124dba02101ddfd8d9e0b4/xiaomi_gateway/__init__.py#L386-L396
[ "def", "push_data", "(", "self", ",", "data", ")", ":", "if", "not", "_validate_data", "(", "data", ")", ":", "return", "False", "jdata", "=", "json", ".", "loads", "(", "data", "[", "'data'", "]", ")", "if", "int", "(", "self", ".", "proto", "[", "0", ":", "1", "]", ")", "==", "1", "else", "_list2map", "(", "data", "[", "'params'", "]", ")", "if", "jdata", "is", "None", ":", "return", "False", "sid", "=", "data", "[", "'sid'", "]", "for", "func", "in", "self", ".", "callbacks", "[", "sid", "]", ":", "func", "(", "jdata", ",", "data", ")", "return", "True" ]
21b38ab972d67402f2124dba02101ddfd8d9e0b4
test
XiaomiGateway._get_key
Get key using token from gateway
xiaomi_gateway/__init__.py
def _get_key(self): """Get key using token from gateway""" init_vector = bytes(bytearray.fromhex('17996d093d28ddb3ba695a2e6f58562e')) encryptor = Cipher(algorithms.AES(self.key.encode()), modes.CBC(init_vector), backend=default_backend()).encryptor() ciphertext = encryptor.update(self.token.encode()) + encryptor.finalize() if isinstance(ciphertext, str): # For Python 2 compatibility return ''.join('{:02x}'.format(ord(x)) for x in ciphertext) return ''.join('{:02x}'.format(x) for x in ciphertext)
def _get_key(self): """Get key using token from gateway""" init_vector = bytes(bytearray.fromhex('17996d093d28ddb3ba695a2e6f58562e')) encryptor = Cipher(algorithms.AES(self.key.encode()), modes.CBC(init_vector), backend=default_backend()).encryptor() ciphertext = encryptor.update(self.token.encode()) + encryptor.finalize() if isinstance(ciphertext, str): # For Python 2 compatibility return ''.join('{:02x}'.format(ord(x)) for x in ciphertext) return ''.join('{:02x}'.format(x) for x in ciphertext)
[ "Get", "key", "using", "token", "from", "gateway" ]
Danielhiversen/PyXiaomiGateway
python
https://github.com/Danielhiversen/PyXiaomiGateway/blob/21b38ab972d67402f2124dba02101ddfd8d9e0b4/xiaomi_gateway/__init__.py#L398-L406
[ "def", "_get_key", "(", "self", ")", ":", "init_vector", "=", "bytes", "(", "bytearray", ".", "fromhex", "(", "'17996d093d28ddb3ba695a2e6f58562e'", ")", ")", "encryptor", "=", "Cipher", "(", "algorithms", ".", "AES", "(", "self", ".", "key", ".", "encode", "(", ")", ")", ",", "modes", ".", "CBC", "(", "init_vector", ")", ",", "backend", "=", "default_backend", "(", ")", ")", ".", "encryptor", "(", ")", "ciphertext", "=", "encryptor", ".", "update", "(", "self", ".", "token", ".", "encode", "(", ")", ")", "+", "encryptor", ".", "finalize", "(", ")", "if", "isinstance", "(", "ciphertext", ",", "str", ")", ":", "# For Python 2 compatibility", "return", "''", ".", "join", "(", "'{:02x}'", ".", "format", "(", "ord", "(", "x", ")", ")", "for", "x", "in", "ciphertext", ")", "return", "''", ".", "join", "(", "'{:02x}'", ".", "format", "(", "x", ")", "for", "x", "in", "ciphertext", ")" ]
21b38ab972d67402f2124dba02101ddfd8d9e0b4
test
train
Train your awesome model. :param hparams: The arguments to run the model with.
examples/pytorch_hpc_example.py
def train(hparams, *args): """Train your awesome model. :param hparams: The arguments to run the model with. """ # Initialize experiments and track all the hyperparameters exp = Experiment( name=hparams.test_tube_exp_name, # Location to save the metrics. save_dir=hparams.log_path, autosave=False, ) exp.argparse(hparams) # Pretend to train. x = torch.rand((1, hparams.x_val)) for train_step in range(0, 100): y = torch.rand((hparams.x_val, 1)) out = x.mm(y) exp.log({'fake_err': out.item()}) # Save exp when . exp.save()
def train(hparams, *args): """Train your awesome model. :param hparams: The arguments to run the model with. """ # Initialize experiments and track all the hyperparameters exp = Experiment( name=hparams.test_tube_exp_name, # Location to save the metrics. save_dir=hparams.log_path, autosave=False, ) exp.argparse(hparams) # Pretend to train. x = torch.rand((1, hparams.x_val)) for train_step in range(0, 100): y = torch.rand((hparams.x_val, 1)) out = x.mm(y) exp.log({'fake_err': out.item()}) # Save exp when . exp.save()
[ "Train", "your", "awesome", "model", "." ]
williamFalcon/test-tube
python
https://github.com/williamFalcon/test-tube/blob/db5a47067a854f76d89f8066582023c1e184bccb/examples/pytorch_hpc_example.py#L8-L30
[ "def", "train", "(", "hparams", ",", "*", "args", ")", ":", "# Initialize experiments and track all the hyperparameters", "exp", "=", "Experiment", "(", "name", "=", "hparams", ".", "test_tube_exp_name", ",", "# Location to save the metrics.", "save_dir", "=", "hparams", ".", "log_path", ",", "autosave", "=", "False", ",", ")", "exp", ".", "argparse", "(", "hparams", ")", "# Pretend to train.", "x", "=", "torch", ".", "rand", "(", "(", "1", ",", "hparams", ".", "x_val", ")", ")", "for", "train_step", "in", "range", "(", "0", ",", "100", ")", ":", "y", "=", "torch", ".", "rand", "(", "(", "hparams", ".", "x_val", ",", "1", ")", ")", "out", "=", "x", ".", "mm", "(", "y", ")", "exp", ".", "log", "(", "{", "'fake_err'", ":", "out", ".", "item", "(", ")", "}", ")", "# Save exp when .", "exp", ".", "save", "(", ")" ]
db5a47067a854f76d89f8066582023c1e184bccb
test
train
Train your awesome model. :param hparams: The arguments to run the model with.
examples/hpc_cpu_example.py
def train(hparams, *args): """Train your awesome model. :param hparams: The arguments to run the model with. """ # Initialize experiments and track all the hyperparameters exp = Experiment( name=hparams.test_tube_exp_name, # Location to save the metrics. save_dir=hparams.log_path, # The experiment version is optional, but using the one # from SLURM means the exp will not collide with other # versions if SLURM runs multiple at once. version=hparams.hpc_exp_number, autosave=False, ) exp.argparse(hparams) # Pretend to train. x = hparams.x_val for train_step in range(0, 100): y = hparams.y_val out = x * y exp.log({'fake_err': out.item()}) # Log metrics. # Save exp when done. exp.save()
def train(hparams, *args): """Train your awesome model. :param hparams: The arguments to run the model with. """ # Initialize experiments and track all the hyperparameters exp = Experiment( name=hparams.test_tube_exp_name, # Location to save the metrics. save_dir=hparams.log_path, # The experiment version is optional, but using the one # from SLURM means the exp will not collide with other # versions if SLURM runs multiple at once. version=hparams.hpc_exp_number, autosave=False, ) exp.argparse(hparams) # Pretend to train. x = hparams.x_val for train_step in range(0, 100): y = hparams.y_val out = x * y exp.log({'fake_err': out.item()}) # Log metrics. # Save exp when done. exp.save()
[ "Train", "your", "awesome", "model", "." ]
williamFalcon/test-tube
python
https://github.com/williamFalcon/test-tube/blob/db5a47067a854f76d89f8066582023c1e184bccb/examples/hpc_cpu_example.py#L5-L31
[ "def", "train", "(", "hparams", ",", "*", "args", ")", ":", "# Initialize experiments and track all the hyperparameters", "exp", "=", "Experiment", "(", "name", "=", "hparams", ".", "test_tube_exp_name", ",", "# Location to save the metrics.", "save_dir", "=", "hparams", ".", "log_path", ",", "# The experiment version is optional, but using the one ", "# from SLURM means the exp will not collide with other", "# versions if SLURM runs multiple at once.", "version", "=", "hparams", ".", "hpc_exp_number", ",", "autosave", "=", "False", ",", ")", "exp", ".", "argparse", "(", "hparams", ")", "# Pretend to train.", "x", "=", "hparams", ".", "x_val", "for", "train_step", "in", "range", "(", "0", ",", "100", ")", ":", "y", "=", "hparams", ".", "y_val", "out", "=", "x", "*", "y", "exp", ".", "log", "(", "{", "'fake_err'", ":", "out", ".", "item", "(", ")", "}", ")", "# Log metrics.", "# Save exp when done.", "exp", ".", "save", "(", ")" ]
db5a47067a854f76d89f8066582023c1e184bccb
test
exception_handler
Called by RQ when there is a failure in a worker. NOTE: Make sure that in your RQ worker process, rollbar.init() has been called with handler='blocking'. The default handler, 'thread', does not work from inside an RQ worker.
rollbar/contrib/rq/__init__.py
def exception_handler(job, *exc_info): """ Called by RQ when there is a failure in a worker. NOTE: Make sure that in your RQ worker process, rollbar.init() has been called with handler='blocking'. The default handler, 'thread', does not work from inside an RQ worker. """ # Report data about the job with the exception. job_info = job.to_dict() # job_info['data'] is the pickled representation of the job, and doesn't json-serialize well. # repr() works nicely. job_info['data'] = repr(job_info['data']) extra_data = {'job': job_info} payload_data = {'framework': 'rq'} rollbar.report_exc_info(exc_info, extra_data=extra_data, payload_data=payload_data) # continue to the next handler return True
def exception_handler(job, *exc_info): """ Called by RQ when there is a failure in a worker. NOTE: Make sure that in your RQ worker process, rollbar.init() has been called with handler='blocking'. The default handler, 'thread', does not work from inside an RQ worker. """ # Report data about the job with the exception. job_info = job.to_dict() # job_info['data'] is the pickled representation of the job, and doesn't json-serialize well. # repr() works nicely. job_info['data'] = repr(job_info['data']) extra_data = {'job': job_info} payload_data = {'framework': 'rq'} rollbar.report_exc_info(exc_info, extra_data=extra_data, payload_data=payload_data) # continue to the next handler return True
[ "Called", "by", "RQ", "when", "there", "is", "a", "failure", "in", "a", "worker", "." ]
rollbar/pyrollbar
python
https://github.com/rollbar/pyrollbar/blob/33ef2e723a33d09dd6302f978f4a3908be95b9d2/rollbar/contrib/rq/__init__.py#L36-L55
[ "def", "exception_handler", "(", "job", ",", "*", "exc_info", ")", ":", "# Report data about the job with the exception.", "job_info", "=", "job", ".", "to_dict", "(", ")", "# job_info['data'] is the pickled representation of the job, and doesn't json-serialize well.", "# repr() works nicely.", "job_info", "[", "'data'", "]", "=", "repr", "(", "job_info", "[", "'data'", "]", ")", "extra_data", "=", "{", "'job'", ":", "job_info", "}", "payload_data", "=", "{", "'framework'", ":", "'rq'", "}", "rollbar", ".", "report_exc_info", "(", "exc_info", ",", "extra_data", "=", "extra_data", ",", "payload_data", "=", "payload_data", ")", "# continue to the next handler", "return", "True" ]
33ef2e723a33d09dd6302f978f4a3908be95b9d2
test
patch_debugtoolbar
Patches the pyramid_debugtoolbar (if installed) to display a link to the related rollbar item.
rollbar/contrib/pyramid/__init__.py
def patch_debugtoolbar(settings): """ Patches the pyramid_debugtoolbar (if installed) to display a link to the related rollbar item. """ try: from pyramid_debugtoolbar import tbtools except ImportError: return rollbar_web_base = settings.get('rollbar.web_base', DEFAULT_WEB_BASE) if rollbar_web_base.endswith('/'): rollbar_web_base = rollbar_web_base[:-1] def insert_rollbar_console(request, html): # insert after the closing </h1> item_uuid = request.environ.get('rollbar.uuid') if not item_uuid: return html url = '%s/item/uuid/?uuid=%s' % (rollbar_web_base, item_uuid) link = '<a style="color:white;" href="%s">View in Rollbar</a>' % url new_data = "<h2>Rollbar: %s</h2>" % link insertion_marker = "</h1>" replacement = insertion_marker + new_data return html.replace(insertion_marker, replacement, 1) # patch tbtools.Traceback.render_full old_render_full = tbtools.Traceback.render_full def new_render_full(self, request, *args, **kw): html = old_render_full(self, request, *args, **kw) return insert_rollbar_console(request, html) tbtools.Traceback.render_full = new_render_full
def patch_debugtoolbar(settings): """ Patches the pyramid_debugtoolbar (if installed) to display a link to the related rollbar item. """ try: from pyramid_debugtoolbar import tbtools except ImportError: return rollbar_web_base = settings.get('rollbar.web_base', DEFAULT_WEB_BASE) if rollbar_web_base.endswith('/'): rollbar_web_base = rollbar_web_base[:-1] def insert_rollbar_console(request, html): # insert after the closing </h1> item_uuid = request.environ.get('rollbar.uuid') if not item_uuid: return html url = '%s/item/uuid/?uuid=%s' % (rollbar_web_base, item_uuid) link = '<a style="color:white;" href="%s">View in Rollbar</a>' % url new_data = "<h2>Rollbar: %s</h2>" % link insertion_marker = "</h1>" replacement = insertion_marker + new_data return html.replace(insertion_marker, replacement, 1) # patch tbtools.Traceback.render_full old_render_full = tbtools.Traceback.render_full def new_render_full(self, request, *args, **kw): html = old_render_full(self, request, *args, **kw) return insert_rollbar_console(request, html) tbtools.Traceback.render_full = new_render_full
[ "Patches", "the", "pyramid_debugtoolbar", "(", "if", "installed", ")", "to", "display", "a", "link", "to", "the", "related", "rollbar", "item", "." ]
rollbar/pyrollbar
python
https://github.com/rollbar/pyrollbar/blob/33ef2e723a33d09dd6302f978f4a3908be95b9d2/rollbar/contrib/pyramid/__init__.py#L76-L109
[ "def", "patch_debugtoolbar", "(", "settings", ")", ":", "try", ":", "from", "pyramid_debugtoolbar", "import", "tbtools", "except", "ImportError", ":", "return", "rollbar_web_base", "=", "settings", ".", "get", "(", "'rollbar.web_base'", ",", "DEFAULT_WEB_BASE", ")", "if", "rollbar_web_base", ".", "endswith", "(", "'/'", ")", ":", "rollbar_web_base", "=", "rollbar_web_base", "[", ":", "-", "1", "]", "def", "insert_rollbar_console", "(", "request", ",", "html", ")", ":", "# insert after the closing </h1>", "item_uuid", "=", "request", ".", "environ", ".", "get", "(", "'rollbar.uuid'", ")", "if", "not", "item_uuid", ":", "return", "html", "url", "=", "'%s/item/uuid/?uuid=%s'", "%", "(", "rollbar_web_base", ",", "item_uuid", ")", "link", "=", "'<a style=\"color:white;\" href=\"%s\">View in Rollbar</a>'", "%", "url", "new_data", "=", "\"<h2>Rollbar: %s</h2>\"", "%", "link", "insertion_marker", "=", "\"</h1>\"", "replacement", "=", "insertion_marker", "+", "new_data", "return", "html", ".", "replace", "(", "insertion_marker", ",", "replacement", ",", "1", ")", "# patch tbtools.Traceback.render_full", "old_render_full", "=", "tbtools", ".", "Traceback", ".", "render_full", "def", "new_render_full", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "html", "=", "old_render_full", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kw", ")", "return", "insert_rollbar_console", "(", "request", ",", "html", ")", "tbtools", ".", "Traceback", ".", "render_full", "=", "new_render_full" ]
33ef2e723a33d09dd6302f978f4a3908be95b9d2
test
includeme
Pyramid entry point
rollbar/contrib/pyramid/__init__.py
def includeme(config): """ Pyramid entry point """ settings = config.registry.settings config.add_tween('rollbar.contrib.pyramid.rollbar_tween_factory', over=EXCVIEW) # run patch_debugtoolbar, unless they disabled it if asbool(settings.get('rollbar.patch_debugtoolbar', True)): patch_debugtoolbar(settings) def hook(request, data): data['framework'] = 'pyramid' if request: request.environ['rollbar.uuid'] = data['uuid'] if request.matched_route: data['context'] = request.matched_route.name rollbar.BASE_DATA_HOOK = hook kw = parse_settings(settings) access_token = kw.pop('access_token') environment = kw.pop('environment', 'production') if kw.get('scrub_fields'): kw['scrub_fields'] = set([str.strip(x) for x in kw.get('scrub_fields').split('\n') if x]) if kw.get('exception_level_filters'): r = DottedNameResolver() exception_level_filters = [] for line in kw.get('exception_level_filters').split('\n'): if line: dotted_path, level = line.split() try: cls = r.resolve(dotted_path) exception_level_filters.append((cls, level)) except ImportError: log.error('Could not import %r' % dotted_path) kw['exception_level_filters'] = exception_level_filters kw['enabled'] = asbool(kw.get('enabled', True)) rollbar.init(access_token, environment, **kw)
def includeme(config): """ Pyramid entry point """ settings = config.registry.settings config.add_tween('rollbar.contrib.pyramid.rollbar_tween_factory', over=EXCVIEW) # run patch_debugtoolbar, unless they disabled it if asbool(settings.get('rollbar.patch_debugtoolbar', True)): patch_debugtoolbar(settings) def hook(request, data): data['framework'] = 'pyramid' if request: request.environ['rollbar.uuid'] = data['uuid'] if request.matched_route: data['context'] = request.matched_route.name rollbar.BASE_DATA_HOOK = hook kw = parse_settings(settings) access_token = kw.pop('access_token') environment = kw.pop('environment', 'production') if kw.get('scrub_fields'): kw['scrub_fields'] = set([str.strip(x) for x in kw.get('scrub_fields').split('\n') if x]) if kw.get('exception_level_filters'): r = DottedNameResolver() exception_level_filters = [] for line in kw.get('exception_level_filters').split('\n'): if line: dotted_path, level = line.split() try: cls = r.resolve(dotted_path) exception_level_filters.append((cls, level)) except ImportError: log.error('Could not import %r' % dotted_path) kw['exception_level_filters'] = exception_level_filters kw['enabled'] = asbool(kw.get('enabled', True)) rollbar.init(access_token, environment, **kw)
[ "Pyramid", "entry", "point" ]
rollbar/pyrollbar
python
https://github.com/rollbar/pyrollbar/blob/33ef2e723a33d09dd6302f978f4a3908be95b9d2/rollbar/contrib/pyramid/__init__.py#L112-L160
[ "def", "includeme", "(", "config", ")", ":", "settings", "=", "config", ".", "registry", ".", "settings", "config", ".", "add_tween", "(", "'rollbar.contrib.pyramid.rollbar_tween_factory'", ",", "over", "=", "EXCVIEW", ")", "# run patch_debugtoolbar, unless they disabled it", "if", "asbool", "(", "settings", ".", "get", "(", "'rollbar.patch_debugtoolbar'", ",", "True", ")", ")", ":", "patch_debugtoolbar", "(", "settings", ")", "def", "hook", "(", "request", ",", "data", ")", ":", "data", "[", "'framework'", "]", "=", "'pyramid'", "if", "request", ":", "request", ".", "environ", "[", "'rollbar.uuid'", "]", "=", "data", "[", "'uuid'", "]", "if", "request", ".", "matched_route", ":", "data", "[", "'context'", "]", "=", "request", ".", "matched_route", ".", "name", "rollbar", ".", "BASE_DATA_HOOK", "=", "hook", "kw", "=", "parse_settings", "(", "settings", ")", "access_token", "=", "kw", ".", "pop", "(", "'access_token'", ")", "environment", "=", "kw", ".", "pop", "(", "'environment'", ",", "'production'", ")", "if", "kw", ".", "get", "(", "'scrub_fields'", ")", ":", "kw", "[", "'scrub_fields'", "]", "=", "set", "(", "[", "str", ".", "strip", "(", "x", ")", "for", "x", "in", "kw", ".", "get", "(", "'scrub_fields'", ")", ".", "split", "(", "'\\n'", ")", "if", "x", "]", ")", "if", "kw", ".", "get", "(", "'exception_level_filters'", ")", ":", "r", "=", "DottedNameResolver", "(", ")", "exception_level_filters", "=", "[", "]", "for", "line", "in", "kw", ".", "get", "(", "'exception_level_filters'", ")", ".", "split", "(", "'\\n'", ")", ":", "if", "line", ":", "dotted_path", ",", "level", "=", "line", ".", "split", "(", ")", "try", ":", "cls", "=", "r", ".", "resolve", "(", "dotted_path", ")", "exception_level_filters", ".", "append", "(", "(", "cls", ",", "level", ")", ")", "except", "ImportError", ":", "log", ".", "error", "(", "'Could not import %r'", "%", "dotted_path", ")", "kw", "[", "'exception_level_filters'", "]", "=", "exception_level_filters", "kw", "[", "'enabled'", "]", "=", "asbool", "(", "kw", ".", "get", "(", "'enabled'", ",", "True", ")", ")", "rollbar", ".", "init", "(", "access_token", ",", "environment", ",", "*", "*", "kw", ")" ]
33ef2e723a33d09dd6302f978f4a3908be95b9d2
test
RollbarNotifierMiddleware._ensure_log_handler
If there's no log configuration, set up a default handler.
rollbar/contrib/django/middleware.py
def _ensure_log_handler(self): """ If there's no log configuration, set up a default handler. """ if log.handlers: return handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s') handler.setFormatter(formatter) log.addHandler(handler)
def _ensure_log_handler(self): """ If there's no log configuration, set up a default handler. """ if log.handlers: return handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s') handler.setFormatter(formatter) log.addHandler(handler)
[ "If", "there", "s", "no", "log", "configuration", "set", "up", "a", "default", "handler", "." ]
rollbar/pyrollbar
python
https://github.com/rollbar/pyrollbar/blob/33ef2e723a33d09dd6302f978f4a3908be95b9d2/rollbar/contrib/django/middleware.py#L236-L246
[ "def", "_ensure_log_handler", "(", "self", ")", ":", "if", "log", ".", "handlers", ":", "return", "handler", "=", "logging", ".", "StreamHandler", "(", ")", "formatter", "=", "logging", ".", "Formatter", "(", "'%(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s'", ")", "handler", ".", "setFormatter", "(", "formatter", ")", "log", ".", "addHandler", "(", "handler", ")" ]
33ef2e723a33d09dd6302f978f4a3908be95b9d2
test
get_request
Get the current request object. Implementation varies on library support. Modified below when we know which framework is being used.
rollbar/__init__.py
def get_request(): """ Get the current request object. Implementation varies on library support. Modified below when we know which framework is being used. """ # TODO(cory): add in a generic _get_locals_request() which # will iterate up through the call stack and look for a variable # that appears to be valid request object. for fn in (_get_bottle_request, _get_flask_request, _get_pyramid_request, _get_pylons_request): try: req = fn() if req is not None: return req except: pass return None
def get_request(): """ Get the current request object. Implementation varies on library support. Modified below when we know which framework is being used. """ # TODO(cory): add in a generic _get_locals_request() which # will iterate up through the call stack and look for a variable # that appears to be valid request object. for fn in (_get_bottle_request, _get_flask_request, _get_pyramid_request, _get_pylons_request): try: req = fn() if req is not None: return req except: pass return None
[ "Get", "the", "current", "request", "object", ".", "Implementation", "varies", "on", "library", "support", ".", "Modified", "below", "when", "we", "know", "which", "framework", "is", "being", "used", "." ]
rollbar/pyrollbar
python
https://github.com/rollbar/pyrollbar/blob/33ef2e723a33d09dd6302f978f4a3908be95b9d2/rollbar/__init__.py#L140-L161
[ "def", "get_request", "(", ")", ":", "# TODO(cory): add in a generic _get_locals_request() which", "# will iterate up through the call stack and look for a variable", "# that appears to be valid request object.", "for", "fn", "in", "(", "_get_bottle_request", ",", "_get_flask_request", ",", "_get_pyramid_request", ",", "_get_pylons_request", ")", ":", "try", ":", "req", "=", "fn", "(", ")", "if", "req", "is", "not", "None", ":", "return", "req", "except", ":", "pass", "return", "None" ]
33ef2e723a33d09dd6302f978f4a3908be95b9d2
test
init
Saves configuration variables in this module's SETTINGS. access_token: project access token. Get this from the Rollbar UI: - click "Settings" in the top nav - click "Projects" in the left nav - copy-paste the appropriate token. environment: environment name. Can be any string; suggestions: 'production', 'development', 'staging', 'yourname' **kw: provided keyword arguments will override keys in SETTINGS.
rollbar/__init__.py
def init(access_token, environment='production', scrub_fields=None, url_fields=None, **kw): """ Saves configuration variables in this module's SETTINGS. access_token: project access token. Get this from the Rollbar UI: - click "Settings" in the top nav - click "Projects" in the left nav - copy-paste the appropriate token. environment: environment name. Can be any string; suggestions: 'production', 'development', 'staging', 'yourname' **kw: provided keyword arguments will override keys in SETTINGS. """ global SETTINGS, agent_log, _initialized, _transforms, _serialize_transform, _threads if scrub_fields is not None: SETTINGS['scrub_fields'] = list(scrub_fields) if url_fields is not None: SETTINGS['url_fields'] = list(url_fields) # Merge the extra config settings into SETTINGS SETTINGS = dict_merge(SETTINGS, kw) if _initialized: # NOTE: Temp solution to not being able to re-init. # New versions of pyrollbar will support re-initialization # via the (not-yet-implemented) configure() method. if not SETTINGS.get('suppress_reinit_warning'): log.warning('Rollbar already initialized. Ignoring re-init.') return SETTINGS['access_token'] = access_token SETTINGS['environment'] = environment if SETTINGS.get('allow_logging_basic_config'): logging.basicConfig() if SETTINGS.get('handler') == 'agent': agent_log = _create_agent_log() # We will perform these transforms in order: # 1. Serialize the payload to be all python built-in objects # 2. Scrub the payloads based on the key suffixes in SETTINGS['scrub_fields'] # 3. Scrub URLs in the payload for keys that end with 'url' # 4. Optional - If local variable gathering is enabled, transform the # trace frame values using the ShortReprTransform. _serialize_transform = SerializableTransform(safe_repr=SETTINGS['locals']['safe_repr'], whitelist_types=SETTINGS['locals']['whitelisted_types']) _transforms = [ ScrubRedactTransform(), _serialize_transform, ScrubTransform(suffixes=[(field,) for field in SETTINGS['scrub_fields']], redact_char='*'), ScrubUrlTransform(suffixes=[(field,) for field in SETTINGS['url_fields']], params_to_scrub=SETTINGS['scrub_fields']) ] # A list of key prefixes to apply our shortener transform to. The request # being included in the body key is old behavior and is being retained for # backwards compatibility. shortener_keys = [ ('request', 'POST'), ('request', 'json'), ('body', 'request', 'POST'), ('body', 'request', 'json'), ] if SETTINGS['locals']['enabled']: shortener_keys.append(('body', 'trace', 'frames', '*', 'code')) shortener_keys.append(('body', 'trace', 'frames', '*', 'args', '*')) shortener_keys.append(('body', 'trace', 'frames', '*', 'kwargs', '*')) shortener_keys.append(('body', 'trace', 'frames', '*', 'locals', '*')) shortener_keys.extend(SETTINGS['shortener_keys']) shortener = ShortenerTransform(safe_repr=SETTINGS['locals']['safe_repr'], keys=shortener_keys, **SETTINGS['locals']['sizes']) _transforms.append(shortener) _threads = queue.Queue() events.reset() filters.add_builtin_filters(SETTINGS) _initialized = True
def init(access_token, environment='production', scrub_fields=None, url_fields=None, **kw): """ Saves configuration variables in this module's SETTINGS. access_token: project access token. Get this from the Rollbar UI: - click "Settings" in the top nav - click "Projects" in the left nav - copy-paste the appropriate token. environment: environment name. Can be any string; suggestions: 'production', 'development', 'staging', 'yourname' **kw: provided keyword arguments will override keys in SETTINGS. """ global SETTINGS, agent_log, _initialized, _transforms, _serialize_transform, _threads if scrub_fields is not None: SETTINGS['scrub_fields'] = list(scrub_fields) if url_fields is not None: SETTINGS['url_fields'] = list(url_fields) # Merge the extra config settings into SETTINGS SETTINGS = dict_merge(SETTINGS, kw) if _initialized: # NOTE: Temp solution to not being able to re-init. # New versions of pyrollbar will support re-initialization # via the (not-yet-implemented) configure() method. if not SETTINGS.get('suppress_reinit_warning'): log.warning('Rollbar already initialized. Ignoring re-init.') return SETTINGS['access_token'] = access_token SETTINGS['environment'] = environment if SETTINGS.get('allow_logging_basic_config'): logging.basicConfig() if SETTINGS.get('handler') == 'agent': agent_log = _create_agent_log() # We will perform these transforms in order: # 1. Serialize the payload to be all python built-in objects # 2. Scrub the payloads based on the key suffixes in SETTINGS['scrub_fields'] # 3. Scrub URLs in the payload for keys that end with 'url' # 4. Optional - If local variable gathering is enabled, transform the # trace frame values using the ShortReprTransform. _serialize_transform = SerializableTransform(safe_repr=SETTINGS['locals']['safe_repr'], whitelist_types=SETTINGS['locals']['whitelisted_types']) _transforms = [ ScrubRedactTransform(), _serialize_transform, ScrubTransform(suffixes=[(field,) for field in SETTINGS['scrub_fields']], redact_char='*'), ScrubUrlTransform(suffixes=[(field,) for field in SETTINGS['url_fields']], params_to_scrub=SETTINGS['scrub_fields']) ] # A list of key prefixes to apply our shortener transform to. The request # being included in the body key is old behavior and is being retained for # backwards compatibility. shortener_keys = [ ('request', 'POST'), ('request', 'json'), ('body', 'request', 'POST'), ('body', 'request', 'json'), ] if SETTINGS['locals']['enabled']: shortener_keys.append(('body', 'trace', 'frames', '*', 'code')) shortener_keys.append(('body', 'trace', 'frames', '*', 'args', '*')) shortener_keys.append(('body', 'trace', 'frames', '*', 'kwargs', '*')) shortener_keys.append(('body', 'trace', 'frames', '*', 'locals', '*')) shortener_keys.extend(SETTINGS['shortener_keys']) shortener = ShortenerTransform(safe_repr=SETTINGS['locals']['safe_repr'], keys=shortener_keys, **SETTINGS['locals']['sizes']) _transforms.append(shortener) _threads = queue.Queue() events.reset() filters.add_builtin_filters(SETTINGS) _initialized = True
[ "Saves", "configuration", "variables", "in", "this", "module", "s", "SETTINGS", "." ]
rollbar/pyrollbar
python
https://github.com/rollbar/pyrollbar/blob/33ef2e723a33d09dd6302f978f4a3908be95b9d2/rollbar/__init__.py#L291-L370
[ "def", "init", "(", "access_token", ",", "environment", "=", "'production'", ",", "scrub_fields", "=", "None", ",", "url_fields", "=", "None", ",", "*", "*", "kw", ")", ":", "global", "SETTINGS", ",", "agent_log", ",", "_initialized", ",", "_transforms", ",", "_serialize_transform", ",", "_threads", "if", "scrub_fields", "is", "not", "None", ":", "SETTINGS", "[", "'scrub_fields'", "]", "=", "list", "(", "scrub_fields", ")", "if", "url_fields", "is", "not", "None", ":", "SETTINGS", "[", "'url_fields'", "]", "=", "list", "(", "url_fields", ")", "# Merge the extra config settings into SETTINGS", "SETTINGS", "=", "dict_merge", "(", "SETTINGS", ",", "kw", ")", "if", "_initialized", ":", "# NOTE: Temp solution to not being able to re-init.", "# New versions of pyrollbar will support re-initialization", "# via the (not-yet-implemented) configure() method.", "if", "not", "SETTINGS", ".", "get", "(", "'suppress_reinit_warning'", ")", ":", "log", ".", "warning", "(", "'Rollbar already initialized. Ignoring re-init.'", ")", "return", "SETTINGS", "[", "'access_token'", "]", "=", "access_token", "SETTINGS", "[", "'environment'", "]", "=", "environment", "if", "SETTINGS", ".", "get", "(", "'allow_logging_basic_config'", ")", ":", "logging", ".", "basicConfig", "(", ")", "if", "SETTINGS", ".", "get", "(", "'handler'", ")", "==", "'agent'", ":", "agent_log", "=", "_create_agent_log", "(", ")", "# We will perform these transforms in order:", "# 1. Serialize the payload to be all python built-in objects", "# 2. Scrub the payloads based on the key suffixes in SETTINGS['scrub_fields']", "# 3. Scrub URLs in the payload for keys that end with 'url'", "# 4. Optional - If local variable gathering is enabled, transform the", "# trace frame values using the ShortReprTransform.", "_serialize_transform", "=", "SerializableTransform", "(", "safe_repr", "=", "SETTINGS", "[", "'locals'", "]", "[", "'safe_repr'", "]", ",", "whitelist_types", "=", "SETTINGS", "[", "'locals'", "]", "[", "'whitelisted_types'", "]", ")", "_transforms", "=", "[", "ScrubRedactTransform", "(", ")", ",", "_serialize_transform", ",", "ScrubTransform", "(", "suffixes", "=", "[", "(", "field", ",", ")", "for", "field", "in", "SETTINGS", "[", "'scrub_fields'", "]", "]", ",", "redact_char", "=", "'*'", ")", ",", "ScrubUrlTransform", "(", "suffixes", "=", "[", "(", "field", ",", ")", "for", "field", "in", "SETTINGS", "[", "'url_fields'", "]", "]", ",", "params_to_scrub", "=", "SETTINGS", "[", "'scrub_fields'", "]", ")", "]", "# A list of key prefixes to apply our shortener transform to. The request", "# being included in the body key is old behavior and is being retained for", "# backwards compatibility.", "shortener_keys", "=", "[", "(", "'request'", ",", "'POST'", ")", ",", "(", "'request'", ",", "'json'", ")", ",", "(", "'body'", ",", "'request'", ",", "'POST'", ")", ",", "(", "'body'", ",", "'request'", ",", "'json'", ")", ",", "]", "if", "SETTINGS", "[", "'locals'", "]", "[", "'enabled'", "]", ":", "shortener_keys", ".", "append", "(", "(", "'body'", ",", "'trace'", ",", "'frames'", ",", "'*'", ",", "'code'", ")", ")", "shortener_keys", ".", "append", "(", "(", "'body'", ",", "'trace'", ",", "'frames'", ",", "'*'", ",", "'args'", ",", "'*'", ")", ")", "shortener_keys", ".", "append", "(", "(", "'body'", ",", "'trace'", ",", "'frames'", ",", "'*'", ",", "'kwargs'", ",", "'*'", ")", ")", "shortener_keys", ".", "append", "(", "(", "'body'", ",", "'trace'", ",", "'frames'", ",", "'*'", ",", "'locals'", ",", "'*'", ")", ")", "shortener_keys", ".", "extend", "(", "SETTINGS", "[", "'shortener_keys'", "]", ")", "shortener", "=", "ShortenerTransform", "(", "safe_repr", "=", "SETTINGS", "[", "'locals'", "]", "[", "'safe_repr'", "]", ",", "keys", "=", "shortener_keys", ",", "*", "*", "SETTINGS", "[", "'locals'", "]", "[", "'sizes'", "]", ")", "_transforms", ".", "append", "(", "shortener", ")", "_threads", "=", "queue", ".", "Queue", "(", ")", "events", ".", "reset", "(", ")", "filters", ".", "add_builtin_filters", "(", "SETTINGS", ")", "_initialized", "=", "True" ]
33ef2e723a33d09dd6302f978f4a3908be95b9d2
test
lambda_function
Decorator for making error handling on AWS Lambda easier
rollbar/__init__.py
def lambda_function(f): """ Decorator for making error handling on AWS Lambda easier """ @functools.wraps(f) def wrapper(event, context): global _CURRENT_LAMBDA_CONTEXT _CURRENT_LAMBDA_CONTEXT = context try: result = f(event, context) return wait(lambda: result) except: cls, exc, trace = sys.exc_info() report_exc_info((cls, exc, trace.tb_next)) wait() raise return wrapper
def lambda_function(f): """ Decorator for making error handling on AWS Lambda easier """ @functools.wraps(f) def wrapper(event, context): global _CURRENT_LAMBDA_CONTEXT _CURRENT_LAMBDA_CONTEXT = context try: result = f(event, context) return wait(lambda: result) except: cls, exc, trace = sys.exc_info() report_exc_info((cls, exc, trace.tb_next)) wait() raise return wrapper
[ "Decorator", "for", "making", "error", "handling", "on", "AWS", "Lambda", "easier" ]
rollbar/pyrollbar
python
https://github.com/rollbar/pyrollbar/blob/33ef2e723a33d09dd6302f978f4a3908be95b9d2/rollbar/__init__.py#L373-L389
[ "def", "lambda_function", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrapper", "(", "event", ",", "context", ")", ":", "global", "_CURRENT_LAMBDA_CONTEXT", "_CURRENT_LAMBDA_CONTEXT", "=", "context", "try", ":", "result", "=", "f", "(", "event", ",", "context", ")", "return", "wait", "(", "lambda", ":", "result", ")", "except", ":", "cls", ",", "exc", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "report_exc_info", "(", "(", "cls", ",", "exc", ",", "trace", ".", "tb_next", ")", ")", "wait", "(", ")", "raise", "return", "wrapper" ]
33ef2e723a33d09dd6302f978f4a3908be95b9d2
test
report_exc_info
Reports an exception to Rollbar, using exc_info (from calling sys.exc_info()) exc_info: optional, should be the result of calling sys.exc_info(). If omitted, sys.exc_info() will be called here. request: optional, a WebOb, Werkzeug-based or Sanic request object. extra_data: optional, will be included in the 'custom' section of the payload payload_data: optional, dict that will override values in the final payload (e.g. 'level' or 'fingerprint') kw: provided for legacy purposes; unused. Example usage: rollbar.init(access_token='YOUR_PROJECT_ACCESS_TOKEN') try: do_something() except: rollbar.report_exc_info(sys.exc_info(), request, {'foo': 'bar'}, {'level': 'warning'})
rollbar/__init__.py
def report_exc_info(exc_info=None, request=None, extra_data=None, payload_data=None, level=None, **kw): """ Reports an exception to Rollbar, using exc_info (from calling sys.exc_info()) exc_info: optional, should be the result of calling sys.exc_info(). If omitted, sys.exc_info() will be called here. request: optional, a WebOb, Werkzeug-based or Sanic request object. extra_data: optional, will be included in the 'custom' section of the payload payload_data: optional, dict that will override values in the final payload (e.g. 'level' or 'fingerprint') kw: provided for legacy purposes; unused. Example usage: rollbar.init(access_token='YOUR_PROJECT_ACCESS_TOKEN') try: do_something() except: rollbar.report_exc_info(sys.exc_info(), request, {'foo': 'bar'}, {'level': 'warning'}) """ if exc_info is None: exc_info = sys.exc_info() try: return _report_exc_info(exc_info, request, extra_data, payload_data, level=level) except Exception as e: log.exception("Exception while reporting exc_info to Rollbar. %r", e)
def report_exc_info(exc_info=None, request=None, extra_data=None, payload_data=None, level=None, **kw): """ Reports an exception to Rollbar, using exc_info (from calling sys.exc_info()) exc_info: optional, should be the result of calling sys.exc_info(). If omitted, sys.exc_info() will be called here. request: optional, a WebOb, Werkzeug-based or Sanic request object. extra_data: optional, will be included in the 'custom' section of the payload payload_data: optional, dict that will override values in the final payload (e.g. 'level' or 'fingerprint') kw: provided for legacy purposes; unused. Example usage: rollbar.init(access_token='YOUR_PROJECT_ACCESS_TOKEN') try: do_something() except: rollbar.report_exc_info(sys.exc_info(), request, {'foo': 'bar'}, {'level': 'warning'}) """ if exc_info is None: exc_info = sys.exc_info() try: return _report_exc_info(exc_info, request, extra_data, payload_data, level=level) except Exception as e: log.exception("Exception while reporting exc_info to Rollbar. %r", e)
[ "Reports", "an", "exception", "to", "Rollbar", "using", "exc_info", "(", "from", "calling", "sys", ".", "exc_info", "()", ")" ]
rollbar/pyrollbar
python
https://github.com/rollbar/pyrollbar/blob/33ef2e723a33d09dd6302f978f4a3908be95b9d2/rollbar/__init__.py#L392-L417
[ "def", "report_exc_info", "(", "exc_info", "=", "None", ",", "request", "=", "None", ",", "extra_data", "=", "None", ",", "payload_data", "=", "None", ",", "level", "=", "None", ",", "*", "*", "kw", ")", ":", "if", "exc_info", "is", "None", ":", "exc_info", "=", "sys", ".", "exc_info", "(", ")", "try", ":", "return", "_report_exc_info", "(", "exc_info", ",", "request", ",", "extra_data", ",", "payload_data", ",", "level", "=", "level", ")", "except", "Exception", "as", "e", ":", "log", ".", "exception", "(", "\"Exception while reporting exc_info to Rollbar. %r\"", ",", "e", ")" ]
33ef2e723a33d09dd6302f978f4a3908be95b9d2
test
report_message
Reports an arbitrary string message to Rollbar. message: the string body of the message level: level to report at. One of: 'critical', 'error', 'warning', 'info', 'debug' request: the request object for the context of the message extra_data: dictionary of params to include with the message. 'body' is reserved. payload_data: param names to pass in the 'data' level of the payload; overrides defaults.
rollbar/__init__.py
def report_message(message, level='error', request=None, extra_data=None, payload_data=None): """ Reports an arbitrary string message to Rollbar. message: the string body of the message level: level to report at. One of: 'critical', 'error', 'warning', 'info', 'debug' request: the request object for the context of the message extra_data: dictionary of params to include with the message. 'body' is reserved. payload_data: param names to pass in the 'data' level of the payload; overrides defaults. """ try: return _report_message(message, level, request, extra_data, payload_data) except Exception as e: log.exception("Exception while reporting message to Rollbar. %r", e)
def report_message(message, level='error', request=None, extra_data=None, payload_data=None): """ Reports an arbitrary string message to Rollbar. message: the string body of the message level: level to report at. One of: 'critical', 'error', 'warning', 'info', 'debug' request: the request object for the context of the message extra_data: dictionary of params to include with the message. 'body' is reserved. payload_data: param names to pass in the 'data' level of the payload; overrides defaults. """ try: return _report_message(message, level, request, extra_data, payload_data) except Exception as e: log.exception("Exception while reporting message to Rollbar. %r", e)
[ "Reports", "an", "arbitrary", "string", "message", "to", "Rollbar", "." ]
rollbar/pyrollbar
python
https://github.com/rollbar/pyrollbar/blob/33ef2e723a33d09dd6302f978f4a3908be95b9d2/rollbar/__init__.py#L420-L433
[ "def", "report_message", "(", "message", ",", "level", "=", "'error'", ",", "request", "=", "None", ",", "extra_data", "=", "None", ",", "payload_data", "=", "None", ")", ":", "try", ":", "return", "_report_message", "(", "message", ",", "level", ",", "request", ",", "extra_data", ",", "payload_data", ")", "except", "Exception", "as", "e", ":", "log", ".", "exception", "(", "\"Exception while reporting message to Rollbar. %r\"", ",", "e", ")" ]
33ef2e723a33d09dd6302f978f4a3908be95b9d2