partition
stringclasses 3
values | func_name
stringlengths 1
134
| docstring
stringlengths 1
46.9k
| path
stringlengths 4
223
| original_string
stringlengths 75
104k
| code
stringlengths 75
104k
| docstring_tokens
listlengths 1
1.97k
| repo
stringlengths 7
55
| language
stringclasses 1
value | url
stringlengths 87
315
| code_tokens
listlengths 19
28.4k
| sha
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|---|---|---|
test
|
walk_dirs
|
Recursive helper for walk.
|
pgcontents/utils/sync.py
|
def walk_dirs(mgr, dirs):
"""
Recursive helper for walk.
"""
for directory in dirs:
children = mgr.get(
directory,
content=True,
type='directory',
)['content']
dirs, files = map(sorted, _separate_dirs_files(children))
yield directory, dirs, files
if dirs:
for entry in walk_dirs(mgr, dirs):
yield entry
|
def walk_dirs(mgr, dirs):
"""
Recursive helper for walk.
"""
for directory in dirs:
children = mgr.get(
directory,
content=True,
type='directory',
)['content']
dirs, files = map(sorted, _separate_dirs_files(children))
yield directory, dirs, files
if dirs:
for entry in walk_dirs(mgr, dirs):
yield entry
|
[
"Recursive",
"helper",
"for",
"walk",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/utils/sync.py#L53-L67
|
[
"def",
"walk_dirs",
"(",
"mgr",
",",
"dirs",
")",
":",
"for",
"directory",
"in",
"dirs",
":",
"children",
"=",
"mgr",
".",
"get",
"(",
"directory",
",",
"content",
"=",
"True",
",",
"type",
"=",
"'directory'",
",",
")",
"[",
"'content'",
"]",
"dirs",
",",
"files",
"=",
"map",
"(",
"sorted",
",",
"_separate_dirs_files",
"(",
"children",
")",
")",
"yield",
"directory",
",",
"dirs",
",",
"files",
"if",
"dirs",
":",
"for",
"entry",
"in",
"walk_dirs",
"(",
"mgr",
",",
"dirs",
")",
":",
"yield",
"entry"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
walk_files
|
Iterate over all files visible to ``mgr``.
|
pgcontents/utils/sync.py
|
def walk_files(mgr):
"""
Iterate over all files visible to ``mgr``.
"""
for dir_, subdirs, files in walk_files(mgr):
for file_ in files:
yield file_
|
def walk_files(mgr):
"""
Iterate over all files visible to ``mgr``.
"""
for dir_, subdirs, files in walk_files(mgr):
for file_ in files:
yield file_
|
[
"Iterate",
"over",
"all",
"files",
"visible",
"to",
"mgr",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/utils/sync.py#L70-L76
|
[
"def",
"walk_files",
"(",
"mgr",
")",
":",
"for",
"dir_",
",",
"subdirs",
",",
"files",
"in",
"walk_files",
"(",
"mgr",
")",
":",
"for",
"file_",
"in",
"files",
":",
"yield",
"file_"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
walk_files_with_content
|
Iterate over the contents of all files visible to ``mgr``.
|
pgcontents/utils/sync.py
|
def walk_files_with_content(mgr):
"""
Iterate over the contents of all files visible to ``mgr``.
"""
for _, _, files in walk(mgr):
for f in files:
yield mgr.get(f, content=True)
|
def walk_files_with_content(mgr):
"""
Iterate over the contents of all files visible to ``mgr``.
"""
for _, _, files in walk(mgr):
for f in files:
yield mgr.get(f, content=True)
|
[
"Iterate",
"over",
"the",
"contents",
"of",
"all",
"files",
"visible",
"to",
"mgr",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/utils/sync.py#L79-L85
|
[
"def",
"walk_files_with_content",
"(",
"mgr",
")",
":",
"for",
"_",
",",
"_",
",",
"files",
"in",
"walk",
"(",
"mgr",
")",
":",
"for",
"f",
"in",
"files",
":",
"yield",
"mgr",
".",
"get",
"(",
"f",
",",
"content",
"=",
"True",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
reencrypt_all_users
|
Re-encrypt data for all users.
This function is idempotent, meaning that it should be possible to apply
the same re-encryption process multiple times without having any effect on
the database. Idempotency is achieved by first attempting to decrypt with
the old crypto and falling back to the new crypto on failure.
An important consequence of this strategy is that **decrypting** a database
is not supported with this function, because ``NoEncryption.decrypt``
always succeeds. To decrypt an already-encrypted database, use
``unencrypt_all_users`` instead.
It is, however, possible to perform an initial encryption of a database by
passing a function returning a ``NoEncryption`` as ``old_crypto_factory``.
Parameters
----------
engine : SQLAlchemy.engine
Engine encapsulating database connections.
old_crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of existing database content.
new_crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
re-encryption of database content.
This **must not** return instances of ``NoEncryption``. Use
``unencrypt_all_users`` if you want to unencrypt a database.
logger : logging.Logger, optional
A logger to user during re-encryption.
See Also
--------
reencrypt_user
unencrypt_all_users
|
pgcontents/utils/sync.py
|
def reencrypt_all_users(engine,
old_crypto_factory,
new_crypto_factory,
logger):
"""
Re-encrypt data for all users.
This function is idempotent, meaning that it should be possible to apply
the same re-encryption process multiple times without having any effect on
the database. Idempotency is achieved by first attempting to decrypt with
the old crypto and falling back to the new crypto on failure.
An important consequence of this strategy is that **decrypting** a database
is not supported with this function, because ``NoEncryption.decrypt``
always succeeds. To decrypt an already-encrypted database, use
``unencrypt_all_users`` instead.
It is, however, possible to perform an initial encryption of a database by
passing a function returning a ``NoEncryption`` as ``old_crypto_factory``.
Parameters
----------
engine : SQLAlchemy.engine
Engine encapsulating database connections.
old_crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of existing database content.
new_crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
re-encryption of database content.
This **must not** return instances of ``NoEncryption``. Use
``unencrypt_all_users`` if you want to unencrypt a database.
logger : logging.Logger, optional
A logger to user during re-encryption.
See Also
--------
reencrypt_user
unencrypt_all_users
"""
logger.info("Beginning re-encryption for all users.")
for user_id in all_user_ids(engine):
reencrypt_single_user(
engine,
user_id,
old_crypto=old_crypto_factory(user_id),
new_crypto=new_crypto_factory(user_id),
logger=logger,
)
logger.info("Finished re-encryption for all users.")
|
def reencrypt_all_users(engine,
old_crypto_factory,
new_crypto_factory,
logger):
"""
Re-encrypt data for all users.
This function is idempotent, meaning that it should be possible to apply
the same re-encryption process multiple times without having any effect on
the database. Idempotency is achieved by first attempting to decrypt with
the old crypto and falling back to the new crypto on failure.
An important consequence of this strategy is that **decrypting** a database
is not supported with this function, because ``NoEncryption.decrypt``
always succeeds. To decrypt an already-encrypted database, use
``unencrypt_all_users`` instead.
It is, however, possible to perform an initial encryption of a database by
passing a function returning a ``NoEncryption`` as ``old_crypto_factory``.
Parameters
----------
engine : SQLAlchemy.engine
Engine encapsulating database connections.
old_crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of existing database content.
new_crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
re-encryption of database content.
This **must not** return instances of ``NoEncryption``. Use
``unencrypt_all_users`` if you want to unencrypt a database.
logger : logging.Logger, optional
A logger to user during re-encryption.
See Also
--------
reencrypt_user
unencrypt_all_users
"""
logger.info("Beginning re-encryption for all users.")
for user_id in all_user_ids(engine):
reencrypt_single_user(
engine,
user_id,
old_crypto=old_crypto_factory(user_id),
new_crypto=new_crypto_factory(user_id),
logger=logger,
)
logger.info("Finished re-encryption for all users.")
|
[
"Re",
"-",
"encrypt",
"data",
"for",
"all",
"users",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/utils/sync.py#L96-L148
|
[
"def",
"reencrypt_all_users",
"(",
"engine",
",",
"old_crypto_factory",
",",
"new_crypto_factory",
",",
"logger",
")",
":",
"logger",
".",
"info",
"(",
"\"Beginning re-encryption for all users.\"",
")",
"for",
"user_id",
"in",
"all_user_ids",
"(",
"engine",
")",
":",
"reencrypt_single_user",
"(",
"engine",
",",
"user_id",
",",
"old_crypto",
"=",
"old_crypto_factory",
"(",
"user_id",
")",
",",
"new_crypto",
"=",
"new_crypto_factory",
"(",
"user_id",
")",
",",
"logger",
"=",
"logger",
",",
")",
"logger",
".",
"info",
"(",
"\"Finished re-encryption for all users.\"",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
reencrypt_single_user
|
Re-encrypt all files and checkpoints for a single user.
|
pgcontents/utils/sync.py
|
def reencrypt_single_user(engine, user_id, old_crypto, new_crypto, logger):
"""
Re-encrypt all files and checkpoints for a single user.
"""
# Use FallbackCrypto so that we're re-entrant if we halt partway through.
crypto = FallbackCrypto([new_crypto, old_crypto])
reencrypt_user_content(
engine=engine,
user_id=user_id,
old_decrypt_func=crypto.decrypt,
new_encrypt_func=crypto.encrypt,
logger=logger,
)
|
def reencrypt_single_user(engine, user_id, old_crypto, new_crypto, logger):
"""
Re-encrypt all files and checkpoints for a single user.
"""
# Use FallbackCrypto so that we're re-entrant if we halt partway through.
crypto = FallbackCrypto([new_crypto, old_crypto])
reencrypt_user_content(
engine=engine,
user_id=user_id,
old_decrypt_func=crypto.decrypt,
new_encrypt_func=crypto.encrypt,
logger=logger,
)
|
[
"Re",
"-",
"encrypt",
"all",
"files",
"and",
"checkpoints",
"for",
"a",
"single",
"user",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/utils/sync.py#L151-L164
|
[
"def",
"reencrypt_single_user",
"(",
"engine",
",",
"user_id",
",",
"old_crypto",
",",
"new_crypto",
",",
"logger",
")",
":",
"# Use FallbackCrypto so that we're re-entrant if we halt partway through.",
"crypto",
"=",
"FallbackCrypto",
"(",
"[",
"new_crypto",
",",
"old_crypto",
"]",
")",
"reencrypt_user_content",
"(",
"engine",
"=",
"engine",
",",
"user_id",
"=",
"user_id",
",",
"old_decrypt_func",
"=",
"crypto",
".",
"decrypt",
",",
"new_encrypt_func",
"=",
"crypto",
".",
"encrypt",
",",
"logger",
"=",
"logger",
",",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
unencrypt_all_users
|
Unencrypt data for all users.
Parameters
----------
engine : SQLAlchemy.engine
Engine encapsulating database connections.
old_crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of existing database content.
logger : logging.Logger, optional
A logger to user during re-encryption.
|
pgcontents/utils/sync.py
|
def unencrypt_all_users(engine, old_crypto_factory, logger):
"""
Unencrypt data for all users.
Parameters
----------
engine : SQLAlchemy.engine
Engine encapsulating database connections.
old_crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of existing database content.
logger : logging.Logger, optional
A logger to user during re-encryption.
"""
logger.info("Beginning re-encryption for all users.")
for user_id in all_user_ids(engine):
unencrypt_single_user(
engine=engine,
user_id=user_id,
old_crypto=old_crypto_factory(user_id),
logger=logger,
)
logger.info("Finished re-encryption for all users.")
|
def unencrypt_all_users(engine, old_crypto_factory, logger):
"""
Unencrypt data for all users.
Parameters
----------
engine : SQLAlchemy.engine
Engine encapsulating database connections.
old_crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of existing database content.
logger : logging.Logger, optional
A logger to user during re-encryption.
"""
logger.info("Beginning re-encryption for all users.")
for user_id in all_user_ids(engine):
unencrypt_single_user(
engine=engine,
user_id=user_id,
old_crypto=old_crypto_factory(user_id),
logger=logger,
)
logger.info("Finished re-encryption for all users.")
|
[
"Unencrypt",
"data",
"for",
"all",
"users",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/utils/sync.py#L167-L190
|
[
"def",
"unencrypt_all_users",
"(",
"engine",
",",
"old_crypto_factory",
",",
"logger",
")",
":",
"logger",
".",
"info",
"(",
"\"Beginning re-encryption for all users.\"",
")",
"for",
"user_id",
"in",
"all_user_ids",
"(",
"engine",
")",
":",
"unencrypt_single_user",
"(",
"engine",
"=",
"engine",
",",
"user_id",
"=",
"user_id",
",",
"old_crypto",
"=",
"old_crypto_factory",
"(",
"user_id",
")",
",",
"logger",
"=",
"logger",
",",
")",
"logger",
".",
"info",
"(",
"\"Finished re-encryption for all users.\"",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
unencrypt_single_user
|
Unencrypt all files and checkpoints for a single user.
|
pgcontents/utils/sync.py
|
def unencrypt_single_user(engine, user_id, old_crypto, logger):
"""
Unencrypt all files and checkpoints for a single user.
"""
reencrypt_user_content(
engine=engine,
user_id=user_id,
old_decrypt_func=old_crypto.decrypt,
new_encrypt_func=lambda s: s,
logger=logger,
)
|
def unencrypt_single_user(engine, user_id, old_crypto, logger):
"""
Unencrypt all files and checkpoints for a single user.
"""
reencrypt_user_content(
engine=engine,
user_id=user_id,
old_decrypt_func=old_crypto.decrypt,
new_encrypt_func=lambda s: s,
logger=logger,
)
|
[
"Unencrypt",
"all",
"files",
"and",
"checkpoints",
"for",
"a",
"single",
"user",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/utils/sync.py#L193-L203
|
[
"def",
"unencrypt_single_user",
"(",
"engine",
",",
"user_id",
",",
"old_crypto",
",",
"logger",
")",
":",
"reencrypt_user_content",
"(",
"engine",
"=",
"engine",
",",
"user_id",
"=",
"user_id",
",",
"old_decrypt_func",
"=",
"old_crypto",
".",
"decrypt",
",",
"new_encrypt_func",
"=",
"lambda",
"s",
":",
"s",
",",
"logger",
"=",
"logger",
",",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
temp_alembic_ini
|
Temporarily write an alembic.ini file for use with alembic migration
scripts.
|
pgcontents/utils/migrate.py
|
def temp_alembic_ini(alembic_dir_location, sqlalchemy_url):
"""
Temporarily write an alembic.ini file for use with alembic migration
scripts.
"""
with TemporaryDirectory() as tempdir:
alembic_ini_filename = join(tempdir, 'temp_alembic.ini')
with open(alembic_ini_filename, 'w') as f:
f.write(
ALEMBIC_INI_TEMPLATE.format(
alembic_dir_location=alembic_dir_location,
sqlalchemy_url=sqlalchemy_url,
)
)
yield alembic_ini_filename
|
def temp_alembic_ini(alembic_dir_location, sqlalchemy_url):
"""
Temporarily write an alembic.ini file for use with alembic migration
scripts.
"""
with TemporaryDirectory() as tempdir:
alembic_ini_filename = join(tempdir, 'temp_alembic.ini')
with open(alembic_ini_filename, 'w') as f:
f.write(
ALEMBIC_INI_TEMPLATE.format(
alembic_dir_location=alembic_dir_location,
sqlalchemy_url=sqlalchemy_url,
)
)
yield alembic_ini_filename
|
[
"Temporarily",
"write",
"an",
"alembic",
".",
"ini",
"file",
"for",
"use",
"with",
"alembic",
"migration",
"scripts",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/utils/migrate.py#L17-L31
|
[
"def",
"temp_alembic_ini",
"(",
"alembic_dir_location",
",",
"sqlalchemy_url",
")",
":",
"with",
"TemporaryDirectory",
"(",
")",
"as",
"tempdir",
":",
"alembic_ini_filename",
"=",
"join",
"(",
"tempdir",
",",
"'temp_alembic.ini'",
")",
"with",
"open",
"(",
"alembic_ini_filename",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"ALEMBIC_INI_TEMPLATE",
".",
"format",
"(",
"alembic_dir_location",
"=",
"alembic_dir_location",
",",
"sqlalchemy_url",
"=",
"sqlalchemy_url",
",",
")",
")",
"yield",
"alembic_ini_filename"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
upgrade
|
Upgrade the given database to revision.
|
pgcontents/utils/migrate.py
|
def upgrade(db_url, revision):
"""
Upgrade the given database to revision.
"""
with temp_alembic_ini(ALEMBIC_DIR_LOCATION, db_url) as alembic_ini:
subprocess.check_call(
['alembic', '-c', alembic_ini, 'upgrade', revision]
)
|
def upgrade(db_url, revision):
"""
Upgrade the given database to revision.
"""
with temp_alembic_ini(ALEMBIC_DIR_LOCATION, db_url) as alembic_ini:
subprocess.check_call(
['alembic', '-c', alembic_ini, 'upgrade', revision]
)
|
[
"Upgrade",
"the",
"given",
"database",
"to",
"revision",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/utils/migrate.py#L34-L41
|
[
"def",
"upgrade",
"(",
"db_url",
",",
"revision",
")",
":",
"with",
"temp_alembic_ini",
"(",
"ALEMBIC_DIR_LOCATION",
",",
"db_url",
")",
"as",
"alembic_ini",
":",
"subprocess",
".",
"check_call",
"(",
"[",
"'alembic'",
",",
"'-c'",
",",
"alembic_ini",
",",
"'upgrade'",
",",
"revision",
"]",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
AuthorMixin.get_author_string
|
Returns list of authors as a comma-separated
string (with 'and' before last author).
|
dispatch/modules/content/mixins.py
|
def get_author_string(self, links=False):
saved_args = locals()
saved_args = saved_args['links']
"""Returns list of authors as a comma-separated
string (with 'and' before last author)."""
def format_author(author):
if links and author.person.slug:
return '<a href="/authors/%s/">%s</a>' % (author.person.slug, author.person.full_name)
return author.person.full_name
if links == True or links == False:
authors = map(format_author, self.authors.all())
else:
authors = map(format_author, saved_args)
if not authors:
return ""
elif len(authors) == 1:
# If this is the only author, just return author name
return authors[0]
return ", ".join(authors[0:-1]) + " and " + authors[-1]
|
def get_author_string(self, links=False):
saved_args = locals()
saved_args = saved_args['links']
"""Returns list of authors as a comma-separated
string (with 'and' before last author)."""
def format_author(author):
if links and author.person.slug:
return '<a href="/authors/%s/">%s</a>' % (author.person.slug, author.person.full_name)
return author.person.full_name
if links == True or links == False:
authors = map(format_author, self.authors.all())
else:
authors = map(format_author, saved_args)
if not authors:
return ""
elif len(authors) == 1:
# If this is the only author, just return author name
return authors[0]
return ", ".join(authors[0:-1]) + " and " + authors[-1]
|
[
"Returns",
"list",
"of",
"authors",
"as",
"a",
"comma",
"-",
"separated",
"string",
"(",
"with",
"and",
"before",
"last",
"author",
")",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/content/mixins.py#L27-L49
|
[
"def",
"get_author_string",
"(",
"self",
",",
"links",
"=",
"False",
")",
":",
"saved_args",
"=",
"locals",
"(",
")",
"saved_args",
"=",
"saved_args",
"[",
"'links'",
"]",
"def",
"format_author",
"(",
"author",
")",
":",
"if",
"links",
"and",
"author",
".",
"person",
".",
"slug",
":",
"return",
"'<a href=\"/authors/%s/\">%s</a>'",
"%",
"(",
"author",
".",
"person",
".",
"slug",
",",
"author",
".",
"person",
".",
"full_name",
")",
"return",
"author",
".",
"person",
".",
"full_name",
"if",
"links",
"==",
"True",
"or",
"links",
"==",
"False",
":",
"authors",
"=",
"map",
"(",
"format_author",
",",
"self",
".",
"authors",
".",
"all",
"(",
")",
")",
"else",
":",
"authors",
"=",
"map",
"(",
"format_author",
",",
"saved_args",
")",
"if",
"not",
"authors",
":",
"return",
"\"\"",
"elif",
"len",
"(",
"authors",
")",
"==",
"1",
":",
"# If this is the only author, just return author name",
"return",
"authors",
"[",
"0",
"]",
"return",
"\", \"",
".",
"join",
"(",
"authors",
"[",
"0",
":",
"-",
"1",
"]",
")",
"+",
"\" and \"",
"+",
"authors",
"[",
"-",
"1",
"]"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
AuthorMixin.get_author_type_string
|
Returns list of authors as a comma-separated string
sorted by author type (with 'and' before last author).
|
dispatch/modules/content/mixins.py
|
def get_author_type_string(self):
"""Returns list of authors as a comma-separated string
sorted by author type (with 'and' before last author)."""
authorTypeString = ''
aStringA = ''
aStringB = ''
aStringC = ''
aStringD = ''
authors = dict((k, list(v)) for k, v in groupby(self.authors.all(), lambda a: a.type))
for author in authors:
if author == 'author':
aStringA += 'Written by ' + self.get_author_string(authors['author'])
if author == 'photographer':
aStringB += 'Photos by ' + self.get_author_string(authors['photographer'])
if author == 'illustrator':
aStringC += 'Illustrations by ' + self.get_author_string(authors['illustrator'])
if author == 'videographer':
aStringD += 'Videos by ' + self.get_author_string(authors['videographer'])
if aStringA != '':
authorTypeString += aStringA
if aStringB != '':
authorTypeString += ', ' + aStringB
if aStringC != '':
authorTypeString += ', ' + aStringC
if aStringD != '':
authorTypeString += ', ' + aStringD
return authorTypeString
|
def get_author_type_string(self):
"""Returns list of authors as a comma-separated string
sorted by author type (with 'and' before last author)."""
authorTypeString = ''
aStringA = ''
aStringB = ''
aStringC = ''
aStringD = ''
authors = dict((k, list(v)) for k, v in groupby(self.authors.all(), lambda a: a.type))
for author in authors:
if author == 'author':
aStringA += 'Written by ' + self.get_author_string(authors['author'])
if author == 'photographer':
aStringB += 'Photos by ' + self.get_author_string(authors['photographer'])
if author == 'illustrator':
aStringC += 'Illustrations by ' + self.get_author_string(authors['illustrator'])
if author == 'videographer':
aStringD += 'Videos by ' + self.get_author_string(authors['videographer'])
if aStringA != '':
authorTypeString += aStringA
if aStringB != '':
authorTypeString += ', ' + aStringB
if aStringC != '':
authorTypeString += ', ' + aStringC
if aStringD != '':
authorTypeString += ', ' + aStringD
return authorTypeString
|
[
"Returns",
"list",
"of",
"authors",
"as",
"a",
"comma",
"-",
"separated",
"string",
"sorted",
"by",
"author",
"type",
"(",
"with",
"and",
"before",
"last",
"author",
")",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/content/mixins.py#L51-L79
|
[
"def",
"get_author_type_string",
"(",
"self",
")",
":",
"authorTypeString",
"=",
"''",
"aStringA",
"=",
"''",
"aStringB",
"=",
"''",
"aStringC",
"=",
"''",
"aStringD",
"=",
"''",
"authors",
"=",
"dict",
"(",
"(",
"k",
",",
"list",
"(",
"v",
")",
")",
"for",
"k",
",",
"v",
"in",
"groupby",
"(",
"self",
".",
"authors",
".",
"all",
"(",
")",
",",
"lambda",
"a",
":",
"a",
".",
"type",
")",
")",
"for",
"author",
"in",
"authors",
":",
"if",
"author",
"==",
"'author'",
":",
"aStringA",
"+=",
"'Written by '",
"+",
"self",
".",
"get_author_string",
"(",
"authors",
"[",
"'author'",
"]",
")",
"if",
"author",
"==",
"'photographer'",
":",
"aStringB",
"+=",
"'Photos by '",
"+",
"self",
".",
"get_author_string",
"(",
"authors",
"[",
"'photographer'",
"]",
")",
"if",
"author",
"==",
"'illustrator'",
":",
"aStringC",
"+=",
"'Illustrations by '",
"+",
"self",
".",
"get_author_string",
"(",
"authors",
"[",
"'illustrator'",
"]",
")",
"if",
"author",
"==",
"'videographer'",
":",
"aStringD",
"+=",
"'Videos by '",
"+",
"self",
".",
"get_author_string",
"(",
"authors",
"[",
"'videographer'",
"]",
")",
"if",
"aStringA",
"!=",
"''",
":",
"authorTypeString",
"+=",
"aStringA",
"if",
"aStringB",
"!=",
"''",
":",
"authorTypeString",
"+=",
"', '",
"+",
"aStringB",
"if",
"aStringC",
"!=",
"''",
":",
"authorTypeString",
"+=",
"', '",
"+",
"aStringC",
"if",
"aStringD",
"!=",
"''",
":",
"authorTypeString",
"+=",
"', '",
"+",
"aStringD",
"return",
"authorTypeString"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
ContentSerializer.sanitize_block
|
Santizes the data for the given block.
If block has a matching embed serializer, use the `to_internal_value` method.
|
dispatch/api/serializers.py
|
def sanitize_block(self, block):
"""Santizes the data for the given block.
If block has a matching embed serializer, use the `to_internal_value` method."""
embed_type = block.get('type', None)
data = block.get('data', {})
serializer = self.serializers.get(embed_type, None)
if serializer is None:
return block
block['data'] = serializer.to_internal_value(data)
return block
|
def sanitize_block(self, block):
"""Santizes the data for the given block.
If block has a matching embed serializer, use the `to_internal_value` method."""
embed_type = block.get('type', None)
data = block.get('data', {})
serializer = self.serializers.get(embed_type, None)
if serializer is None:
return block
block['data'] = serializer.to_internal_value(data)
return block
|
[
"Santizes",
"the",
"data",
"for",
"the",
"given",
"block",
".",
"If",
"block",
"has",
"a",
"matching",
"embed",
"serializer",
"use",
"the",
"to_internal_value",
"method",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/api/serializers.py#L449-L462
|
[
"def",
"sanitize_block",
"(",
"self",
",",
"block",
")",
":",
"embed_type",
"=",
"block",
".",
"get",
"(",
"'type'",
",",
"None",
")",
"data",
"=",
"block",
".",
"get",
"(",
"'data'",
",",
"{",
"}",
")",
"serializer",
"=",
"self",
".",
"serializers",
".",
"get",
"(",
"embed_type",
",",
"None",
")",
"if",
"serializer",
"is",
"None",
":",
"return",
"block",
"block",
"[",
"'data'",
"]",
"=",
"serializer",
".",
"to_internal_value",
"(",
"data",
")",
"return",
"block"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
ContentSerializer.queue_instance
|
Queue an instance to be fetched from the database.
|
dispatch/api/serializers.py
|
def queue_instance(self, embed_type, data):
"""Queue an instance to be fetched from the database."""
serializer = self.serializers.get(embed_type, None)
if serializer is None:
return
instance_id = serializer.get_id(data)
if embed_type not in self.ids:
self.ids[embed_type] = []
self.ids[embed_type].append(instance_id)
|
def queue_instance(self, embed_type, data):
"""Queue an instance to be fetched from the database."""
serializer = self.serializers.get(embed_type, None)
if serializer is None:
return
instance_id = serializer.get_id(data)
if embed_type not in self.ids:
self.ids[embed_type] = []
self.ids[embed_type].append(instance_id)
|
[
"Queue",
"an",
"instance",
"to",
"be",
"fetched",
"from",
"the",
"database",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/api/serializers.py#L464-L477
|
[
"def",
"queue_instance",
"(",
"self",
",",
"embed_type",
",",
"data",
")",
":",
"serializer",
"=",
"self",
".",
"serializers",
".",
"get",
"(",
"embed_type",
",",
"None",
")",
"if",
"serializer",
"is",
"None",
":",
"return",
"instance_id",
"=",
"serializer",
".",
"get_id",
"(",
"data",
")",
"if",
"embed_type",
"not",
"in",
"self",
".",
"ids",
":",
"self",
".",
"ids",
"[",
"embed_type",
"]",
"=",
"[",
"]",
"self",
".",
"ids",
"[",
"embed_type",
"]",
".",
"append",
"(",
"instance_id",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
ContentSerializer.load_instances
|
Fetch all queued instances of type `embed_type`, save results
to `self.instances`
|
dispatch/api/serializers.py
|
def load_instances(self, embed_type, ids):
"""Fetch all queued instances of type `embed_type`, save results
to `self.instances`"""
serializer = self.serializers.get(embed_type, None)
if serializer is None:
return
self.instances[embed_type] = serializer.fetch(ids)
|
def load_instances(self, embed_type, ids):
"""Fetch all queued instances of type `embed_type`, save results
to `self.instances`"""
serializer = self.serializers.get(embed_type, None)
if serializer is None:
return
self.instances[embed_type] = serializer.fetch(ids)
|
[
"Fetch",
"all",
"queued",
"instances",
"of",
"type",
"embed_type",
"save",
"results",
"to",
"self",
".",
"instances"
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/api/serializers.py#L479-L488
|
[
"def",
"load_instances",
"(",
"self",
",",
"embed_type",
",",
"ids",
")",
":",
"serializer",
"=",
"self",
".",
"serializers",
".",
"get",
"(",
"embed_type",
",",
"None",
")",
"if",
"serializer",
"is",
"None",
":",
"return",
"self",
".",
"instances",
"[",
"embed_type",
"]",
"=",
"serializer",
".",
"fetch",
"(",
"ids",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
ContentSerializer.insert_instance
|
Insert a fetched instance into embed block.
|
dispatch/api/serializers.py
|
def insert_instance(self, block):
"""Insert a fetched instance into embed block."""
embed_type = block.get('type', None)
data = block.get('data', {})
serializer = self.serializers.get(embed_type, None)
if serializer is None:
return block
try:
instance_id = serializer.get_id(data)
instance = self.instances[embed_type][instance_id]
data[embed_type] = serializer.serialize(instance)
except:
data[embed_type] = None
block['data'] = data
return block
|
def insert_instance(self, block):
"""Insert a fetched instance into embed block."""
embed_type = block.get('type', None)
data = block.get('data', {})
serializer = self.serializers.get(embed_type, None)
if serializer is None:
return block
try:
instance_id = serializer.get_id(data)
instance = self.instances[embed_type][instance_id]
data[embed_type] = serializer.serialize(instance)
except:
data[embed_type] = None
block['data'] = data
return block
|
[
"Insert",
"a",
"fetched",
"instance",
"into",
"embed",
"block",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/api/serializers.py#L490-L509
|
[
"def",
"insert_instance",
"(",
"self",
",",
"block",
")",
":",
"embed_type",
"=",
"block",
".",
"get",
"(",
"'type'",
",",
"None",
")",
"data",
"=",
"block",
".",
"get",
"(",
"'data'",
",",
"{",
"}",
")",
"serializer",
"=",
"self",
".",
"serializers",
".",
"get",
"(",
"embed_type",
",",
"None",
")",
"if",
"serializer",
"is",
"None",
":",
"return",
"block",
"try",
":",
"instance_id",
"=",
"serializer",
".",
"get_id",
"(",
"data",
")",
"instance",
"=",
"self",
".",
"instances",
"[",
"embed_type",
"]",
"[",
"instance_id",
"]",
"data",
"[",
"embed_type",
"]",
"=",
"serializer",
".",
"serialize",
"(",
"instance",
")",
"except",
":",
"data",
"[",
"embed_type",
"]",
"=",
"None",
"block",
"[",
"'data'",
"]",
"=",
"data",
"return",
"block"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
ContentSerializer.load_data
|
Load data in bulk for each embed block.
|
dispatch/api/serializers.py
|
def load_data(self):
"""Load data in bulk for each embed block."""
for embed_type in self.ids.keys():
self.load_instances(embed_type, self.ids[embed_type])
|
def load_data(self):
"""Load data in bulk for each embed block."""
for embed_type in self.ids.keys():
self.load_instances(embed_type, self.ids[embed_type])
|
[
"Load",
"data",
"in",
"bulk",
"for",
"each",
"embed",
"block",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/api/serializers.py#L517-L521
|
[
"def",
"load_data",
"(",
"self",
")",
":",
"for",
"embed_type",
"in",
"self",
".",
"ids",
".",
"keys",
"(",
")",
":",
"self",
".",
"load_instances",
"(",
"embed_type",
",",
"self",
".",
"ids",
"[",
"embed_type",
"]",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
ZoneSerializer.validate
|
Perform validation of the widget data
|
dispatch/api/serializers.py
|
def validate(self, data):
"""Perform validation of the widget data"""
from dispatch.theme import ThemeManager
errors = {}
if data.get('widget') is not None:
try:
widget = ThemeManager.Widgets.get(data['widget'])
except WidgetNotFound as e:
errors['widget'] = str(e)
else:
for field in widget.fields:
field_data = data['data'].get(field.name)
if field_data is not None:
try:
field.validate(field_data)
except InvalidField as e:
errors[field.name] = str(e)
elif field.required:
errors[field.name] = '%s is required' % field.label
if errors:
raise ValidationError(errors)
return data
|
def validate(self, data):
"""Perform validation of the widget data"""
from dispatch.theme import ThemeManager
errors = {}
if data.get('widget') is not None:
try:
widget = ThemeManager.Widgets.get(data['widget'])
except WidgetNotFound as e:
errors['widget'] = str(e)
else:
for field in widget.fields:
field_data = data['data'].get(field.name)
if field_data is not None:
try:
field.validate(field_data)
except InvalidField as e:
errors[field.name] = str(e)
elif field.required:
errors[field.name] = '%s is required' % field.label
if errors:
raise ValidationError(errors)
return data
|
[
"Perform",
"validation",
"of",
"the",
"widget",
"data"
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/api/serializers.py#L889-L918
|
[
"def",
"validate",
"(",
"self",
",",
"data",
")",
":",
"from",
"dispatch",
".",
"theme",
"import",
"ThemeManager",
"errors",
"=",
"{",
"}",
"if",
"data",
".",
"get",
"(",
"'widget'",
")",
"is",
"not",
"None",
":",
"try",
":",
"widget",
"=",
"ThemeManager",
".",
"Widgets",
".",
"get",
"(",
"data",
"[",
"'widget'",
"]",
")",
"except",
"WidgetNotFound",
"as",
"e",
":",
"errors",
"[",
"'widget'",
"]",
"=",
"str",
"(",
"e",
")",
"else",
":",
"for",
"field",
"in",
"widget",
".",
"fields",
":",
"field_data",
"=",
"data",
"[",
"'data'",
"]",
".",
"get",
"(",
"field",
".",
"name",
")",
"if",
"field_data",
"is",
"not",
"None",
":",
"try",
":",
"field",
".",
"validate",
"(",
"field_data",
")",
"except",
"InvalidField",
"as",
"e",
":",
"errors",
"[",
"field",
".",
"name",
"]",
"=",
"str",
"(",
"e",
")",
"elif",
"field",
".",
"required",
":",
"errors",
"[",
"field",
".",
"name",
"]",
"=",
"'%s is required'",
"%",
"field",
".",
"label",
"if",
"errors",
":",
"raise",
"ValidationError",
"(",
"errors",
")",
"return",
"data"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
admin
|
Render HTML entry point for manager app.
|
dispatch/admin/urls.py
|
def admin(request):
"""Render HTML entry point for manager app."""
context = {
'api_url': settings.API_URL,
'app_js_bundle': 'manager-%s.js' % dispatch.__version__,
'app_css_bundle': 'manager-%s.css' % dispatch.__version__
}
return render_to_response('manager/index.html', context)
|
def admin(request):
"""Render HTML entry point for manager app."""
context = {
'api_url': settings.API_URL,
'app_js_bundle': 'manager-%s.js' % dispatch.__version__,
'app_css_bundle': 'manager-%s.css' % dispatch.__version__
}
return render_to_response('manager/index.html', context)
|
[
"Render",
"HTML",
"entry",
"point",
"for",
"manager",
"app",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/admin/urls.py#L8-L16
|
[
"def",
"admin",
"(",
"request",
")",
":",
"context",
"=",
"{",
"'api_url'",
":",
"settings",
".",
"API_URL",
",",
"'app_js_bundle'",
":",
"'manager-%s.js'",
"%",
"dispatch",
".",
"__version__",
",",
"'app_css_bundle'",
":",
"'manager-%s.css'",
"%",
"dispatch",
".",
"__version__",
"}",
"return",
"render_to_response",
"(",
"'manager/index.html'",
",",
"context",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
Template.to_json
|
Return JSON representation for this template
|
dispatch/theme/templates.py
|
def to_json(self):
"""Return JSON representation for this template"""
result = {}
for field in self.fields:
result[field.name] = field.to_json(self.data.get(field.name))
return result
|
def to_json(self):
"""Return JSON representation for this template"""
result = {}
for field in self.fields:
result[field.name] = field.to_json(self.data.get(field.name))
return result
|
[
"Return",
"JSON",
"representation",
"for",
"this",
"template"
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/theme/templates.py#L14-L21
|
[
"def",
"to_json",
"(",
"self",
")",
":",
"result",
"=",
"{",
"}",
"for",
"field",
"in",
"self",
".",
"fields",
":",
"result",
"[",
"field",
".",
"name",
"]",
"=",
"field",
".",
"to_json",
"(",
"self",
".",
"data",
".",
"get",
"(",
"field",
".",
"name",
")",
")",
"return",
"result"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
DispatchModelSerializer.hide_authenticated_fields
|
Hides authenticated_fields if request context is missing or
user is not authenticated
|
dispatch/api/mixins.py
|
def hide_authenticated_fields(self):
"""Hides authenticated_fields if request context is missing or
user is not authenticated"""
authenticated_fields = getattr(self.Meta, 'authenticated_fields', [])
if not self.is_authenticated():
for field in authenticated_fields:
self.fields.pop(field)
|
def hide_authenticated_fields(self):
"""Hides authenticated_fields if request context is missing or
user is not authenticated"""
authenticated_fields = getattr(self.Meta, 'authenticated_fields', [])
if not self.is_authenticated():
for field in authenticated_fields:
self.fields.pop(field)
|
[
"Hides",
"authenticated_fields",
"if",
"request",
"context",
"is",
"missing",
"or",
"user",
"is",
"not",
"authenticated"
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/api/mixins.py#L98-L105
|
[
"def",
"hide_authenticated_fields",
"(",
"self",
")",
":",
"authenticated_fields",
"=",
"getattr",
"(",
"self",
".",
"Meta",
",",
"'authenticated_fields'",
",",
"[",
"]",
")",
"if",
"not",
"self",
".",
"is_authenticated",
"(",
")",
":",
"for",
"field",
"in",
"authenticated_fields",
":",
"self",
".",
"fields",
".",
"pop",
"(",
"field",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
DispatchModelSerializer.exclude_fields
|
Excludes fields that are included in the queryparameters
|
dispatch/api/mixins.py
|
def exclude_fields(self):
"""Excludes fields that are included in the queryparameters"""
request = self.context.get('request')
if request:
exclude = request.query_params.get('exclude', None)
if exclude is None: return
excluded_fields = exclude.split(',')
for field in excluded_fields:
self.fields.pop(field)
|
def exclude_fields(self):
"""Excludes fields that are included in the queryparameters"""
request = self.context.get('request')
if request:
exclude = request.query_params.get('exclude', None)
if exclude is None: return
excluded_fields = exclude.split(',')
for field in excluded_fields:
self.fields.pop(field)
|
[
"Excludes",
"fields",
"that",
"are",
"included",
"in",
"the",
"queryparameters"
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/api/mixins.py#L107-L116
|
[
"def",
"exclude_fields",
"(",
"self",
")",
":",
"request",
"=",
"self",
".",
"context",
".",
"get",
"(",
"'request'",
")",
"if",
"request",
":",
"exclude",
"=",
"request",
".",
"query_params",
".",
"get",
"(",
"'exclude'",
",",
"None",
")",
"if",
"exclude",
"is",
"None",
":",
"return",
"excluded_fields",
"=",
"exclude",
".",
"split",
"(",
"','",
")",
"for",
"field",
"in",
"excluded_fields",
":",
"self",
".",
"fields",
".",
"pop",
"(",
"field",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
PublishableManager.get
|
Get the latest article with the given primary key.
|
dispatch/modules/content/managers.py
|
def get(self, *args, **kwargs):
"""Get the latest article with the given primary key."""
if 'pk' in kwargs:
kwargs['parent'] = kwargs['pk']
kwargs['head'] = True
del kwargs['pk']
"""If the url requested includes the querystring parameters 'version' and 'preview_id',
get the article with the specified version and preview_id.
Otherwise, get the published version of the article.
"""
if 'request' in kwargs:
request = kwargs['request']
version = request.GET.get('version', None)
preview_id = request.GET.get('preview_id', None)
if (version is not None) and (preview_id is not None):
kwargs['revision_id'] = version
kwargs['preview_id'] = preview_id
del kwargs['is_published']
del kwargs['request']
return super(PublishableManager, self).get(*args, **kwargs)
|
def get(self, *args, **kwargs):
"""Get the latest article with the given primary key."""
if 'pk' in kwargs:
kwargs['parent'] = kwargs['pk']
kwargs['head'] = True
del kwargs['pk']
"""If the url requested includes the querystring parameters 'version' and 'preview_id',
get the article with the specified version and preview_id.
Otherwise, get the published version of the article.
"""
if 'request' in kwargs:
request = kwargs['request']
version = request.GET.get('version', None)
preview_id = request.GET.get('preview_id', None)
if (version is not None) and (preview_id is not None):
kwargs['revision_id'] = version
kwargs['preview_id'] = preview_id
del kwargs['is_published']
del kwargs['request']
return super(PublishableManager, self).get(*args, **kwargs)
|
[
"Get",
"the",
"latest",
"article",
"with",
"the",
"given",
"primary",
"key",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/content/managers.py#L6-L31
|
[
"def",
"get",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'pk'",
"in",
"kwargs",
":",
"kwargs",
"[",
"'parent'",
"]",
"=",
"kwargs",
"[",
"'pk'",
"]",
"kwargs",
"[",
"'head'",
"]",
"=",
"True",
"del",
"kwargs",
"[",
"'pk'",
"]",
"\"\"\"If the url requested includes the querystring parameters 'version' and 'preview_id',\n get the article with the specified version and preview_id.\n\n Otherwise, get the published version of the article.\n \"\"\"",
"if",
"'request'",
"in",
"kwargs",
":",
"request",
"=",
"kwargs",
"[",
"'request'",
"]",
"version",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'version'",
",",
"None",
")",
"preview_id",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'preview_id'",
",",
"None",
")",
"if",
"(",
"version",
"is",
"not",
"None",
")",
"and",
"(",
"preview_id",
"is",
"not",
"None",
")",
":",
"kwargs",
"[",
"'revision_id'",
"]",
"=",
"version",
"kwargs",
"[",
"'preview_id'",
"]",
"=",
"preview_id",
"del",
"kwargs",
"[",
"'is_published'",
"]",
"del",
"kwargs",
"[",
"'request'",
"]",
"return",
"super",
"(",
"PublishableManager",
",",
"self",
")",
".",
"get",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
ArticleViewSet.get_queryset
|
Optionally restricts the returned articles by filtering against a `topic`
query parameter in the URL.
|
dispatch/api/views.py
|
def get_queryset(self):
"""Optionally restricts the returned articles by filtering against a `topic`
query parameter in the URL."""
# Get base queryset from DispatchPublishableMixin
queryset = self.get_publishable_queryset()
# Optimize queries by prefetching related data
queryset = queryset \
.select_related('featured_image', 'featured_video', 'topic', 'section', 'subsection') \
.prefetch_related(
'tags',
'featured_image__image__authors',
'authors'
)
queryset = queryset.order_by('-updated_at')
q = self.request.query_params.get('q', None)
section = self.request.query_params.get('section', None)
tags = self.request.query_params.getlist('tags', None)
author = self.request.query_params.get('author', None)
if q is not None:
queryset = queryset.filter(headline__icontains=q)
if section is not None:
queryset = queryset.filter(section_id=section)
if tags is not None:
for tag in tags:
queryset = queryset.filter(tags__id=tag)
if author is not None:
queryset = queryset.filter(authors__person_id=author)
return queryset
|
def get_queryset(self):
"""Optionally restricts the returned articles by filtering against a `topic`
query parameter in the URL."""
# Get base queryset from DispatchPublishableMixin
queryset = self.get_publishable_queryset()
# Optimize queries by prefetching related data
queryset = queryset \
.select_related('featured_image', 'featured_video', 'topic', 'section', 'subsection') \
.prefetch_related(
'tags',
'featured_image__image__authors',
'authors'
)
queryset = queryset.order_by('-updated_at')
q = self.request.query_params.get('q', None)
section = self.request.query_params.get('section', None)
tags = self.request.query_params.getlist('tags', None)
author = self.request.query_params.get('author', None)
if q is not None:
queryset = queryset.filter(headline__icontains=q)
if section is not None:
queryset = queryset.filter(section_id=section)
if tags is not None:
for tag in tags:
queryset = queryset.filter(tags__id=tag)
if author is not None:
queryset = queryset.filter(authors__person_id=author)
return queryset
|
[
"Optionally",
"restricts",
"the",
"returned",
"articles",
"by",
"filtering",
"against",
"a",
"topic",
"query",
"parameter",
"in",
"the",
"URL",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/api/views.py#L76-L112
|
[
"def",
"get_queryset",
"(",
"self",
")",
":",
"# Get base queryset from DispatchPublishableMixin",
"queryset",
"=",
"self",
".",
"get_publishable_queryset",
"(",
")",
"# Optimize queries by prefetching related data",
"queryset",
"=",
"queryset",
".",
"select_related",
"(",
"'featured_image'",
",",
"'featured_video'",
",",
"'topic'",
",",
"'section'",
",",
"'subsection'",
")",
".",
"prefetch_related",
"(",
"'tags'",
",",
"'featured_image__image__authors'",
",",
"'authors'",
")",
"queryset",
"=",
"queryset",
".",
"order_by",
"(",
"'-updated_at'",
")",
"q",
"=",
"self",
".",
"request",
".",
"query_params",
".",
"get",
"(",
"'q'",
",",
"None",
")",
"section",
"=",
"self",
".",
"request",
".",
"query_params",
".",
"get",
"(",
"'section'",
",",
"None",
")",
"tags",
"=",
"self",
".",
"request",
".",
"query_params",
".",
"getlist",
"(",
"'tags'",
",",
"None",
")",
"author",
"=",
"self",
".",
"request",
".",
"query_params",
".",
"get",
"(",
"'author'",
",",
"None",
")",
"if",
"q",
"is",
"not",
"None",
":",
"queryset",
"=",
"queryset",
".",
"filter",
"(",
"headline__icontains",
"=",
"q",
")",
"if",
"section",
"is",
"not",
"None",
":",
"queryset",
"=",
"queryset",
".",
"filter",
"(",
"section_id",
"=",
"section",
")",
"if",
"tags",
"is",
"not",
"None",
":",
"for",
"tag",
"in",
"tags",
":",
"queryset",
"=",
"queryset",
".",
"filter",
"(",
"tags__id",
"=",
"tag",
")",
"if",
"author",
"is",
"not",
"None",
":",
"queryset",
"=",
"queryset",
".",
"filter",
"(",
"authors__person_id",
"=",
"author",
")",
"return",
"queryset"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
PageViewSet.get_queryset
|
Only display unpublished content to authenticated users, filter by
query parameter if present.
|
dispatch/api/views.py
|
def get_queryset(self):
"""Only display unpublished content to authenticated users, filter by
query parameter if present."""
# Get base queryset from DispatchPublishableMixin
queryset = self.get_publishable_queryset()
queryset = queryset.order_by('-updated_at')
# Optionally filter by a query parameter
q = self.request.query_params.get('q')
if q:
queryset = queryset.filter(title__icontains=q)
return queryset
|
def get_queryset(self):
"""Only display unpublished content to authenticated users, filter by
query parameter if present."""
# Get base queryset from DispatchPublishableMixin
queryset = self.get_publishable_queryset()
queryset = queryset.order_by('-updated_at')
# Optionally filter by a query parameter
q = self.request.query_params.get('q')
if q:
queryset = queryset.filter(title__icontains=q)
return queryset
|
[
"Only",
"display",
"unpublished",
"content",
"to",
"authenticated",
"users",
"filter",
"by",
"query",
"parameter",
"if",
"present",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/api/views.py#L138-L153
|
[
"def",
"get_queryset",
"(",
"self",
")",
":",
"# Get base queryset from DispatchPublishableMixin",
"queryset",
"=",
"self",
".",
"get_publishable_queryset",
"(",
")",
"queryset",
"=",
"queryset",
".",
"order_by",
"(",
"'-updated_at'",
")",
"# Optionally filter by a query parameter",
"q",
"=",
"self",
".",
"request",
".",
"query_params",
".",
"get",
"(",
"'q'",
")",
"if",
"q",
":",
"queryset",
"=",
"queryset",
".",
"filter",
"(",
"title__icontains",
"=",
"q",
")",
"return",
"queryset"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
NullBooleanField.get_attribute
|
Overrides the default get_attribute method to convert None values to False.
|
dispatch/api/fields.py
|
def get_attribute(self, instance):
"""Overrides the default get_attribute method to convert None values to False."""
attr = super(NullBooleanField, self).get_attribute(instance)
return True if attr else False
|
def get_attribute(self, instance):
"""Overrides the default get_attribute method to convert None values to False."""
attr = super(NullBooleanField, self).get_attribute(instance)
return True if attr else False
|
[
"Overrides",
"the",
"default",
"get_attribute",
"method",
"to",
"convert",
"None",
"values",
"to",
"False",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/api/fields.py#L16-L20
|
[
"def",
"get_attribute",
"(",
"self",
",",
"instance",
")",
":",
"attr",
"=",
"super",
"(",
"NullBooleanField",
",",
"self",
")",
".",
"get_attribute",
"(",
"instance",
")",
"return",
"True",
"if",
"attr",
"else",
"False"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
validate_widget
|
Checks that the given widget contains the required fields
|
dispatch/theme/validators.py
|
def validate_widget(widget):
"""Checks that the given widget contains the required fields"""
if not has_valid_id(widget):
raise InvalidWidget("%s must contain a valid 'id' attribute" % widget.__name__)
if not has_valid_name(widget):
raise InvalidWidget("%s must contain a valid 'name' attribute" % widget.__name__)
if not has_valid_template(widget):
raise InvalidWidget("%s must contain a valid 'template' attribute" % widget.__name__)
if not hasattr(widget, 'zones') or not widget.zones:
raise InvalidWidget("%s must be compatible with at least one zone" % widget.__name__)
|
def validate_widget(widget):
"""Checks that the given widget contains the required fields"""
if not has_valid_id(widget):
raise InvalidWidget("%s must contain a valid 'id' attribute" % widget.__name__)
if not has_valid_name(widget):
raise InvalidWidget("%s must contain a valid 'name' attribute" % widget.__name__)
if not has_valid_template(widget):
raise InvalidWidget("%s must contain a valid 'template' attribute" % widget.__name__)
if not hasattr(widget, 'zones') or not widget.zones:
raise InvalidWidget("%s must be compatible with at least one zone" % widget.__name__)
|
[
"Checks",
"that",
"the",
"given",
"widget",
"contains",
"the",
"required",
"fields"
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/theme/validators.py#L20-L33
|
[
"def",
"validate_widget",
"(",
"widget",
")",
":",
"if",
"not",
"has_valid_id",
"(",
"widget",
")",
":",
"raise",
"InvalidWidget",
"(",
"\"%s must contain a valid 'id' attribute\"",
"%",
"widget",
".",
"__name__",
")",
"if",
"not",
"has_valid_name",
"(",
"widget",
")",
":",
"raise",
"InvalidWidget",
"(",
"\"%s must contain a valid 'name' attribute\"",
"%",
"widget",
".",
"__name__",
")",
"if",
"not",
"has_valid_template",
"(",
"widget",
")",
":",
"raise",
"InvalidWidget",
"(",
"\"%s must contain a valid 'template' attribute\"",
"%",
"widget",
".",
"__name__",
")",
"if",
"not",
"hasattr",
"(",
"widget",
",",
"'zones'",
")",
"or",
"not",
"widget",
".",
"zones",
":",
"raise",
"InvalidWidget",
"(",
"\"%s must be compatible with at least one zone\"",
"%",
"widget",
".",
"__name__",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
validate_zone
|
Checks that the given zone contains the required fields
|
dispatch/theme/validators.py
|
def validate_zone(zone):
"""Checks that the given zone contains the required fields"""
if not has_valid_id(zone):
raise InvalidZone("%s must contain a valid 'id' attribute" % zone.__name__)
if not has_valid_name(zone):
raise InvalidZone("%s must contain a valid 'name' attribute" % zone.__name__)
|
def validate_zone(zone):
"""Checks that the given zone contains the required fields"""
if not has_valid_id(zone):
raise InvalidZone("%s must contain a valid 'id' attribute" % zone.__name__)
if not has_valid_name(zone):
raise InvalidZone("%s must contain a valid 'name' attribute" % zone.__name__)
|
[
"Checks",
"that",
"the",
"given",
"zone",
"contains",
"the",
"required",
"fields"
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/theme/validators.py#L35-L42
|
[
"def",
"validate_zone",
"(",
"zone",
")",
":",
"if",
"not",
"has_valid_id",
"(",
"zone",
")",
":",
"raise",
"InvalidZone",
"(",
"\"%s must contain a valid 'id' attribute\"",
"%",
"zone",
".",
"__name__",
")",
"if",
"not",
"has_valid_name",
"(",
"zone",
")",
":",
"raise",
"InvalidZone",
"(",
"\"%s must contain a valid 'name' attribute\"",
"%",
"zone",
".",
"__name__",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
is_valid_uuid
|
Return True if id is a valid UUID, False otherwise.
|
dispatch/theme/validators.py
|
def is_valid_uuid(id):
"""Return True if id is a valid UUID, False otherwise."""
if not isinstance(id, basestring):
return False
try:
val = UUID(id, version=4)
except ValueError:
return False
return True
|
def is_valid_uuid(id):
"""Return True if id is a valid UUID, False otherwise."""
if not isinstance(id, basestring):
return False
try:
val = UUID(id, version=4)
except ValueError:
return False
return True
|
[
"Return",
"True",
"if",
"id",
"is",
"a",
"valid",
"UUID",
"False",
"otherwise",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/theme/validators.py#L49-L59
|
[
"def",
"is_valid_uuid",
"(",
"id",
")",
":",
"if",
"not",
"isinstance",
"(",
"id",
",",
"basestring",
")",
":",
"return",
"False",
"try",
":",
"val",
"=",
"UUID",
"(",
"id",
",",
"version",
"=",
"4",
")",
"except",
"ValueError",
":",
"return",
"False",
"return",
"True"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
User.get_permissions
|
Returns the user's permissions.
|
dispatch/modules/auth/models.py
|
def get_permissions(self):
"""Returns the user's permissions."""
permissions = ''
if self.groups.filter(name='Admin').exists() or self.is_superuser:
permissions = 'admin'
return permissions
|
def get_permissions(self):
"""Returns the user's permissions."""
permissions = ''
if self.groups.filter(name='Admin').exists() or self.is_superuser:
permissions = 'admin'
return permissions
|
[
"Returns",
"the",
"user",
"s",
"permissions",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/auth/models.py#L48-L55
|
[
"def",
"get_permissions",
"(",
"self",
")",
":",
"permissions",
"=",
"''",
"if",
"self",
".",
"groups",
".",
"filter",
"(",
"name",
"=",
"'Admin'",
")",
".",
"exists",
"(",
")",
"or",
"self",
".",
"is_superuser",
":",
"permissions",
"=",
"'admin'",
"return",
"permissions"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
User.modify_permissions
|
Modify the user's permissions.
|
dispatch/modules/auth/models.py
|
def modify_permissions(self, permissions):
"""Modify the user's permissions."""
group = Group.objects.get(name='Admin')
if permissions == 'admin':
self.groups.add(group)
else:
self.groups.remove(group)
|
def modify_permissions(self, permissions):
"""Modify the user's permissions."""
group = Group.objects.get(name='Admin')
if permissions == 'admin':
self.groups.add(group)
else:
self.groups.remove(group)
|
[
"Modify",
"the",
"user",
"s",
"permissions",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/auth/models.py#L57-L65
|
[
"def",
"modify_permissions",
"(",
"self",
",",
"permissions",
")",
":",
"group",
"=",
"Group",
".",
"objects",
".",
"get",
"(",
"name",
"=",
"'Admin'",
")",
"if",
"permissions",
"==",
"'admin'",
":",
"self",
".",
"groups",
".",
"add",
"(",
"group",
")",
"else",
":",
"self",
".",
"groups",
".",
"remove",
"(",
"group",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
AuthorValidator
|
Raise a ValidationError if data does not match the author format.
|
dispatch/api/validators.py
|
def AuthorValidator(data):
"""Raise a ValidationError if data does not match the author format."""
if not isinstance(data, list):
# Convert single instance to a list
data = [data]
for author in data:
if 'person' not in author:
raise ValidationError('An author must contain a person.')
if 'type' in author and not isinstance(author['type'], basestring):
# If type is defined, it should be a string
raise ValidationError('The author type must be a string.')
|
def AuthorValidator(data):
"""Raise a ValidationError if data does not match the author format."""
if not isinstance(data, list):
# Convert single instance to a list
data = [data]
for author in data:
if 'person' not in author:
raise ValidationError('An author must contain a person.')
if 'type' in author and not isinstance(author['type'], basestring):
# If type is defined, it should be a string
raise ValidationError('The author type must be a string.')
|
[
"Raise",
"a",
"ValidationError",
"if",
"data",
"does",
"not",
"match",
"the",
"author",
"format",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/api/validators.py#L89-L100
|
[
"def",
"AuthorValidator",
"(",
"data",
")",
":",
"if",
"not",
"isinstance",
"(",
"data",
",",
"list",
")",
":",
"# Convert single instance to a list",
"data",
"=",
"[",
"data",
"]",
"for",
"author",
"in",
"data",
":",
"if",
"'person'",
"not",
"in",
"author",
":",
"raise",
"ValidationError",
"(",
"'An author must contain a person.'",
")",
"if",
"'type'",
"in",
"author",
"and",
"not",
"isinstance",
"(",
"author",
"[",
"'type'",
"]",
",",
"basestring",
")",
":",
"# If type is defined, it should be a string",
"raise",
"ValidationError",
"(",
"'The author type must be a string.'",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
Zone.save
|
Save widget data for this zone.
|
dispatch/theme/widgets.py
|
def save(self, validated_data):
"""Save widget data for this zone."""
(zone, created) = ZoneModel.objects.get_or_create(zone_id=self.id)
zone.widget_id = validated_data['widget']
zone.data = validated_data['data']
# Call widget before-save hook on nested widgets
for key in list(zone.data.keys()):
if isinstance(zone.data[key], dict) and ('id' in zone.data[key].keys()) and ('data' in zone.data[key].keys()):
zone.data[key]['data'] = self.before_save(zone.data[key]['id'], zone.data[key]['data'])
# Call widget before-save hook
zone.data = self.before_save(zone.widget_id, zone.data)
return zone.save()
|
def save(self, validated_data):
"""Save widget data for this zone."""
(zone, created) = ZoneModel.objects.get_or_create(zone_id=self.id)
zone.widget_id = validated_data['widget']
zone.data = validated_data['data']
# Call widget before-save hook on nested widgets
for key in list(zone.data.keys()):
if isinstance(zone.data[key], dict) and ('id' in zone.data[key].keys()) and ('data' in zone.data[key].keys()):
zone.data[key]['data'] = self.before_save(zone.data[key]['id'], zone.data[key]['data'])
# Call widget before-save hook
zone.data = self.before_save(zone.widget_id, zone.data)
return zone.save()
|
[
"Save",
"widget",
"data",
"for",
"this",
"zone",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/theme/widgets.py#L76-L92
|
[
"def",
"save",
"(",
"self",
",",
"validated_data",
")",
":",
"(",
"zone",
",",
"created",
")",
"=",
"ZoneModel",
".",
"objects",
".",
"get_or_create",
"(",
"zone_id",
"=",
"self",
".",
"id",
")",
"zone",
".",
"widget_id",
"=",
"validated_data",
"[",
"'widget'",
"]",
"zone",
".",
"data",
"=",
"validated_data",
"[",
"'data'",
"]",
"# Call widget before-save hook on nested widgets",
"for",
"key",
"in",
"list",
"(",
"zone",
".",
"data",
".",
"keys",
"(",
")",
")",
":",
"if",
"isinstance",
"(",
"zone",
".",
"data",
"[",
"key",
"]",
",",
"dict",
")",
"and",
"(",
"'id'",
"in",
"zone",
".",
"data",
"[",
"key",
"]",
".",
"keys",
"(",
")",
")",
"and",
"(",
"'data'",
"in",
"zone",
".",
"data",
"[",
"key",
"]",
".",
"keys",
"(",
")",
")",
":",
"zone",
".",
"data",
"[",
"key",
"]",
"[",
"'data'",
"]",
"=",
"self",
".",
"before_save",
"(",
"zone",
".",
"data",
"[",
"key",
"]",
"[",
"'id'",
"]",
",",
"zone",
".",
"data",
"[",
"key",
"]",
"[",
"'data'",
"]",
")",
"# Call widget before-save hook",
"zone",
".",
"data",
"=",
"self",
".",
"before_save",
"(",
"zone",
".",
"widget_id",
",",
"zone",
".",
"data",
")",
"return",
"zone",
".",
"save",
"(",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
Widget.get_data
|
Returns data from each field.
|
dispatch/theme/widgets.py
|
def get_data(self):
"""Returns data from each field."""
result = {}
for field in self.fields:
result[field.name] = self.data.get(field.name)
return result
|
def get_data(self):
"""Returns data from each field."""
result = {}
for field in self.fields:
result[field.name] = self.data.get(field.name)
return result
|
[
"Returns",
"data",
"from",
"each",
"field",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/theme/widgets.py#L114-L121
|
[
"def",
"get_data",
"(",
"self",
")",
":",
"result",
"=",
"{",
"}",
"for",
"field",
"in",
"self",
".",
"fields",
":",
"result",
"[",
"field",
".",
"name",
"]",
"=",
"self",
".",
"data",
".",
"get",
"(",
"field",
".",
"name",
")",
"return",
"result"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
Widget.prepare_data
|
Prepare widget data for template.
|
dispatch/theme/widgets.py
|
def prepare_data(self):
"""Prepare widget data for template."""
result = {}
for field in self.fields:
data = self.data.get(field.name)
result[field.name] = field.prepare_data(data)
return result
|
def prepare_data(self):
"""Prepare widget data for template."""
result = {}
for field in self.fields:
data = self.data.get(field.name)
result[field.name] = field.prepare_data(data)
return result
|
[
"Prepare",
"widget",
"data",
"for",
"template",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/theme/widgets.py#L132-L140
|
[
"def",
"prepare_data",
"(",
"self",
")",
":",
"result",
"=",
"{",
"}",
"for",
"field",
"in",
"self",
".",
"fields",
":",
"data",
"=",
"self",
".",
"data",
".",
"get",
"(",
"field",
".",
"name",
")",
"result",
"[",
"field",
".",
"name",
"]",
"=",
"field",
".",
"prepare_data",
"(",
"data",
")",
"return",
"result"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
Widget.render
|
Renders the widget as HTML.
|
dispatch/theme/widgets.py
|
def render(self, data=None, add_context=None):
"""Renders the widget as HTML."""
template = loader.get_template(self.template)
if not data:
data = self.context(self.prepare_data())
if add_context is not None:
for key, value in add_context.iteritems():
if key in self.accepted_keywords:
data[key] = value
return template.render(data)
|
def render(self, data=None, add_context=None):
"""Renders the widget as HTML."""
template = loader.get_template(self.template)
if not data:
data = self.context(self.prepare_data())
if add_context is not None:
for key, value in add_context.iteritems():
if key in self.accepted_keywords:
data[key] = value
return template.render(data)
|
[
"Renders",
"the",
"widget",
"as",
"HTML",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/theme/widgets.py#L142-L154
|
[
"def",
"render",
"(",
"self",
",",
"data",
"=",
"None",
",",
"add_context",
"=",
"None",
")",
":",
"template",
"=",
"loader",
".",
"get_template",
"(",
"self",
".",
"template",
")",
"if",
"not",
"data",
":",
"data",
"=",
"self",
".",
"context",
"(",
"self",
".",
"prepare_data",
"(",
")",
")",
"if",
"add_context",
"is",
"not",
"None",
":",
"for",
"key",
",",
"value",
"in",
"add_context",
".",
"iteritems",
"(",
")",
":",
"if",
"key",
"in",
"self",
".",
"accepted_keywords",
":",
"data",
"[",
"key",
"]",
"=",
"value",
"return",
"template",
".",
"render",
"(",
"data",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
content_to_html
|
Returns artilce/page content as HTML
|
dispatch/modules/content/render.py
|
def content_to_html(content, article_id):
"""Returns artilce/page content as HTML"""
def render_node(html, node, index):
"""Renders node as HTML"""
if node['type'] == 'paragraph':
return html + '<p>%s</p>' % node['data']
else:
if node['type'] == 'ad':
id = 'div-gpt-ad-1443288719995-' + str(10 + index) + '-' + str(article_id)
dfp_type = 'Intra_Article_' + str(index + 1)
size = 'banner'
if node['data'] == 'mobile':
size = 'box'
newString = '<div class="o-article-embed__advertisement"><div class="o-advertisement o-advertisment--banner o-advertisement--center"><div class="adslot" id="' + id + '" data-size="' + size + '" data-dfp="' + dfp_type + '"></div></div></div>'
return html + '<div class="o-article-embed o-article-embed--advertisement">%s</div>\n' % newString
try:
if node['type'] == 'poll':
node['type'] = 'widget'
node['data']['data'] = node['data']
return html + embeds.render(node['type'], node['data'])
except EmbedException:
return html
html = ''
index = 0
for node in content:
html = render_node(html, node, index)
if (node['type'] == 'ad'):
index += 1
# return mark_safe(reduce(render_node, content, ''))
return mark_safe(html)
|
def content_to_html(content, article_id):
"""Returns artilce/page content as HTML"""
def render_node(html, node, index):
"""Renders node as HTML"""
if node['type'] == 'paragraph':
return html + '<p>%s</p>' % node['data']
else:
if node['type'] == 'ad':
id = 'div-gpt-ad-1443288719995-' + str(10 + index) + '-' + str(article_id)
dfp_type = 'Intra_Article_' + str(index + 1)
size = 'banner'
if node['data'] == 'mobile':
size = 'box'
newString = '<div class="o-article-embed__advertisement"><div class="o-advertisement o-advertisment--banner o-advertisement--center"><div class="adslot" id="' + id + '" data-size="' + size + '" data-dfp="' + dfp_type + '"></div></div></div>'
return html + '<div class="o-article-embed o-article-embed--advertisement">%s</div>\n' % newString
try:
if node['type'] == 'poll':
node['type'] = 'widget'
node['data']['data'] = node['data']
return html + embeds.render(node['type'], node['data'])
except EmbedException:
return html
html = ''
index = 0
for node in content:
html = render_node(html, node, index)
if (node['type'] == 'ad'):
index += 1
# return mark_safe(reduce(render_node, content, ''))
return mark_safe(html)
|
[
"Returns",
"artilce",
"/",
"page",
"content",
"as",
"HTML"
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/content/render.py#L5-L38
|
[
"def",
"content_to_html",
"(",
"content",
",",
"article_id",
")",
":",
"def",
"render_node",
"(",
"html",
",",
"node",
",",
"index",
")",
":",
"\"\"\"Renders node as HTML\"\"\"",
"if",
"node",
"[",
"'type'",
"]",
"==",
"'paragraph'",
":",
"return",
"html",
"+",
"'<p>%s</p>'",
"%",
"node",
"[",
"'data'",
"]",
"else",
":",
"if",
"node",
"[",
"'type'",
"]",
"==",
"'ad'",
":",
"id",
"=",
"'div-gpt-ad-1443288719995-'",
"+",
"str",
"(",
"10",
"+",
"index",
")",
"+",
"'-'",
"+",
"str",
"(",
"article_id",
")",
"dfp_type",
"=",
"'Intra_Article_'",
"+",
"str",
"(",
"index",
"+",
"1",
")",
"size",
"=",
"'banner'",
"if",
"node",
"[",
"'data'",
"]",
"==",
"'mobile'",
":",
"size",
"=",
"'box'",
"newString",
"=",
"'<div class=\"o-article-embed__advertisement\"><div class=\"o-advertisement o-advertisment--banner o-advertisement--center\"><div class=\"adslot\" id=\"'",
"+",
"id",
"+",
"'\" data-size=\"'",
"+",
"size",
"+",
"'\" data-dfp=\"'",
"+",
"dfp_type",
"+",
"'\"></div></div></div>'",
"return",
"html",
"+",
"'<div class=\"o-article-embed o-article-embed--advertisement\">%s</div>\\n'",
"%",
"newString",
"try",
":",
"if",
"node",
"[",
"'type'",
"]",
"==",
"'poll'",
":",
"node",
"[",
"'type'",
"]",
"=",
"'widget'",
"node",
"[",
"'data'",
"]",
"[",
"'data'",
"]",
"=",
"node",
"[",
"'data'",
"]",
"return",
"html",
"+",
"embeds",
".",
"render",
"(",
"node",
"[",
"'type'",
"]",
",",
"node",
"[",
"'data'",
"]",
")",
"except",
"EmbedException",
":",
"return",
"html",
"html",
"=",
"''",
"index",
"=",
"0",
"for",
"node",
"in",
"content",
":",
"html",
"=",
"render_node",
"(",
"html",
",",
"node",
",",
"index",
")",
"if",
"(",
"node",
"[",
"'type'",
"]",
"==",
"'ad'",
")",
":",
"index",
"+=",
"1",
"# return mark_safe(reduce(render_node, content, ''))",
"return",
"mark_safe",
"(",
"html",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
content_to_json
|
Returns article/page content as JSON
|
dispatch/modules/content/render.py
|
def content_to_json(content):
"""Returns article/page content as JSON"""
def render_node(node):
"""Renders node as JSON"""
if node['type'] == 'paragraph':
return node
else:
return {
'type': node['type'],
'data': embeds.to_json(node['type'], node['data'])
}
return map(render_node, content)
|
def content_to_json(content):
"""Returns article/page content as JSON"""
def render_node(node):
"""Renders node as JSON"""
if node['type'] == 'paragraph':
return node
else:
return {
'type': node['type'],
'data': embeds.to_json(node['type'], node['data'])
}
return map(render_node, content)
|
[
"Returns",
"article",
"/",
"page",
"content",
"as",
"JSON"
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/content/render.py#L40-L54
|
[
"def",
"content_to_json",
"(",
"content",
")",
":",
"def",
"render_node",
"(",
"node",
")",
":",
"\"\"\"Renders node as JSON\"\"\"",
"if",
"node",
"[",
"'type'",
"]",
"==",
"'paragraph'",
":",
"return",
"node",
"else",
":",
"return",
"{",
"'type'",
":",
"node",
"[",
"'type'",
"]",
",",
"'data'",
":",
"embeds",
".",
"to_json",
"(",
"node",
"[",
"'type'",
"]",
",",
"node",
"[",
"'data'",
"]",
")",
"}",
"return",
"map",
"(",
"render_node",
",",
"content",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
BaseIntegration.get_settings
|
Retrieves the settings for this integration as a dictionary.
Removes all hidden fields if show_hidden=False
|
dispatch/modules/integrations/integrations.py
|
def get_settings(cls, show_hidden=False):
"""
Retrieves the settings for this integration as a dictionary.
Removes all hidden fields if show_hidden=False
"""
settings = Integration.objects.get_settings(cls.ID)
if not show_hidden:
for field in cls.HIDDEN_FIELDS:
settings.pop(field, None)
return settings
|
def get_settings(cls, show_hidden=False):
"""
Retrieves the settings for this integration as a dictionary.
Removes all hidden fields if show_hidden=False
"""
settings = Integration.objects.get_settings(cls.ID)
if not show_hidden:
for field in cls.HIDDEN_FIELDS:
settings.pop(field, None)
return settings
|
[
"Retrieves",
"the",
"settings",
"for",
"this",
"integration",
"as",
"a",
"dictionary",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/integrations/integrations.py#L44-L56
|
[
"def",
"get_settings",
"(",
"cls",
",",
"show_hidden",
"=",
"False",
")",
":",
"settings",
"=",
"Integration",
".",
"objects",
".",
"get_settings",
"(",
"cls",
".",
"ID",
")",
"if",
"not",
"show_hidden",
":",
"for",
"field",
"in",
"cls",
".",
"HIDDEN_FIELDS",
":",
"settings",
".",
"pop",
"(",
"field",
",",
"None",
")",
"return",
"settings"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
FacebookInstantArticlesIntegration.callback
|
Receive OAuth callback request from Facebook.
|
dispatch/modules/integrations/integrations.py
|
def callback(cls, user, query):
"""Receive OAuth callback request from Facebook."""
# Get settings for this integration
settings = cls.get_settings(show_hidden=True)
fb = Facebook()
payload = {
'client_id': settings['client_id'],
'client_secret': settings['client_secret'],
'code': query['code'],
'redirect_uri': cls.REDIRECT_URI
}
try:
# Authenticate with Facebook
fb.get_access_token(payload)
# Fetch pages belonging to authenticated user
pages = fb.list_pages('me')
except FacebookAPIError, e:
raise IntegrationCallbackError(e.message)
return {
'pages': pages
}
|
def callback(cls, user, query):
"""Receive OAuth callback request from Facebook."""
# Get settings for this integration
settings = cls.get_settings(show_hidden=True)
fb = Facebook()
payload = {
'client_id': settings['client_id'],
'client_secret': settings['client_secret'],
'code': query['code'],
'redirect_uri': cls.REDIRECT_URI
}
try:
# Authenticate with Facebook
fb.get_access_token(payload)
# Fetch pages belonging to authenticated user
pages = fb.list_pages('me')
except FacebookAPIError, e:
raise IntegrationCallbackError(e.message)
return {
'pages': pages
}
|
[
"Receive",
"OAuth",
"callback",
"request",
"from",
"Facebook",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/integrations/integrations.py#L91-L119
|
[
"def",
"callback",
"(",
"cls",
",",
"user",
",",
"query",
")",
":",
"# Get settings for this integration",
"settings",
"=",
"cls",
".",
"get_settings",
"(",
"show_hidden",
"=",
"True",
")",
"fb",
"=",
"Facebook",
"(",
")",
"payload",
"=",
"{",
"'client_id'",
":",
"settings",
"[",
"'client_id'",
"]",
",",
"'client_secret'",
":",
"settings",
"[",
"'client_secret'",
"]",
",",
"'code'",
":",
"query",
"[",
"'code'",
"]",
",",
"'redirect_uri'",
":",
"cls",
".",
"REDIRECT_URI",
"}",
"try",
":",
"# Authenticate with Facebook",
"fb",
".",
"get_access_token",
"(",
"payload",
")",
"# Fetch pages belonging to authenticated user",
"pages",
"=",
"fb",
".",
"list_pages",
"(",
"'me'",
")",
"except",
"FacebookAPIError",
",",
"e",
":",
"raise",
"IntegrationCallbackError",
"(",
"e",
".",
"message",
")",
"return",
"{",
"'pages'",
":",
"pages",
"}"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
IntegrationManager.get_settings
|
Return settings for given integration as a dictionary.
|
dispatch/modules/integrations/managers.py
|
def get_settings(self, integration_id):
"""Return settings for given integration as a dictionary."""
try:
integration = self.get(integration_id=integration_id)
return json.loads(integration.settings)
except (self.model.DoesNotExist, ValueError):
return {}
|
def get_settings(self, integration_id):
"""Return settings for given integration as a dictionary."""
try:
integration = self.get(integration_id=integration_id)
return json.loads(integration.settings)
except (self.model.DoesNotExist, ValueError):
return {}
|
[
"Return",
"settings",
"for",
"given",
"integration",
"as",
"a",
"dictionary",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/integrations/managers.py#L7-L14
|
[
"def",
"get_settings",
"(",
"self",
",",
"integration_id",
")",
":",
"try",
":",
"integration",
"=",
"self",
".",
"get",
"(",
"integration_id",
"=",
"integration_id",
")",
"return",
"json",
".",
"loads",
"(",
"integration",
".",
"settings",
")",
"except",
"(",
"self",
".",
"model",
".",
"DoesNotExist",
",",
"ValueError",
")",
":",
"return",
"{",
"}"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
IntegrationManager.update_settings
|
Updates settings for given integration.
|
dispatch/modules/integrations/managers.py
|
def update_settings(self, integration_id, settings):
"""Updates settings for given integration."""
(integration, created) = self.get_or_create(integration_id=integration_id)
try:
current_settings = json.loads(integration.settings)
except ValueError:
current_settings = {}
current_settings.update(settings)
integration.settings = json.dumps(current_settings)
integration.save()
|
def update_settings(self, integration_id, settings):
"""Updates settings for given integration."""
(integration, created) = self.get_or_create(integration_id=integration_id)
try:
current_settings = json.loads(integration.settings)
except ValueError:
current_settings = {}
current_settings.update(settings)
integration.settings = json.dumps(current_settings)
integration.save()
|
[
"Updates",
"settings",
"for",
"given",
"integration",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/integrations/managers.py#L16-L30
|
[
"def",
"update_settings",
"(",
"self",
",",
"integration_id",
",",
"settings",
")",
":",
"(",
"integration",
",",
"created",
")",
"=",
"self",
".",
"get_or_create",
"(",
"integration_id",
"=",
"integration_id",
")",
"try",
":",
"current_settings",
"=",
"json",
".",
"loads",
"(",
"integration",
".",
"settings",
")",
"except",
"ValueError",
":",
"current_settings",
"=",
"{",
"}",
"current_settings",
".",
"update",
"(",
"settings",
")",
"integration",
".",
"settings",
"=",
"json",
".",
"dumps",
"(",
"current_settings",
")",
"integration",
".",
"save",
"(",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
signup
|
Handles requests to the user signup page.
|
dispatch/admin/views.py
|
def signup(request, uuid=None):
"""Handles requests to the user signup page."""
invite = get_object_or_404(Invite.objects.all(), id=uuid)
if invite.expiration_date < timezone.now():
invite.delete()
raise Http404('This page does not exist.')
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
user = form.save(commit=False)
user.email = invite.email
user.person = invite.person
user.save()
if invite.permissions == 'admin':
group = Group.objects.get(name='Admin')
user.groups.add(group)
invite.delete()
return redirect('dispatch-admin')
else:
return render(
request,
'registration/signup.html',
{
'form': form,
'email': invite.email
}
)
else:
form = SignUpForm()
return render(
request,
'registration/signup.html',
{
'form': form,
'email': invite.email
}
)
|
def signup(request, uuid=None):
"""Handles requests to the user signup page."""
invite = get_object_or_404(Invite.objects.all(), id=uuid)
if invite.expiration_date < timezone.now():
invite.delete()
raise Http404('This page does not exist.')
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
user = form.save(commit=False)
user.email = invite.email
user.person = invite.person
user.save()
if invite.permissions == 'admin':
group = Group.objects.get(name='Admin')
user.groups.add(group)
invite.delete()
return redirect('dispatch-admin')
else:
return render(
request,
'registration/signup.html',
{
'form': form,
'email': invite.email
}
)
else:
form = SignUpForm()
return render(
request,
'registration/signup.html',
{
'form': form,
'email': invite.email
}
)
|
[
"Handles",
"requests",
"to",
"the",
"user",
"signup",
"page",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/admin/views.py#L14-L60
|
[
"def",
"signup",
"(",
"request",
",",
"uuid",
"=",
"None",
")",
":",
"invite",
"=",
"get_object_or_404",
"(",
"Invite",
".",
"objects",
".",
"all",
"(",
")",
",",
"id",
"=",
"uuid",
")",
"if",
"invite",
".",
"expiration_date",
"<",
"timezone",
".",
"now",
"(",
")",
":",
"invite",
".",
"delete",
"(",
")",
"raise",
"Http404",
"(",
"'This page does not exist.'",
")",
"if",
"request",
".",
"method",
"==",
"'POST'",
":",
"form",
"=",
"SignUpForm",
"(",
"request",
".",
"POST",
")",
"if",
"form",
".",
"is_valid",
"(",
")",
":",
"user",
"=",
"form",
".",
"save",
"(",
"commit",
"=",
"False",
")",
"user",
".",
"email",
"=",
"invite",
".",
"email",
"user",
".",
"person",
"=",
"invite",
".",
"person",
"user",
".",
"save",
"(",
")",
"if",
"invite",
".",
"permissions",
"==",
"'admin'",
":",
"group",
"=",
"Group",
".",
"objects",
".",
"get",
"(",
"name",
"=",
"'Admin'",
")",
"user",
".",
"groups",
".",
"add",
"(",
"group",
")",
"invite",
".",
"delete",
"(",
")",
"return",
"redirect",
"(",
"'dispatch-admin'",
")",
"else",
":",
"return",
"render",
"(",
"request",
",",
"'registration/signup.html'",
",",
"{",
"'form'",
":",
"form",
",",
"'email'",
":",
"invite",
".",
"email",
"}",
")",
"else",
":",
"form",
"=",
"SignUpForm",
"(",
")",
"return",
"render",
"(",
"request",
",",
"'registration/signup.html'",
",",
"{",
"'form'",
":",
"form",
",",
"'email'",
":",
"invite",
".",
"email",
"}",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
maptag
|
Returns the HTML produced from enclosing each item in
`contents` in a tag of type `tagname`
|
dispatch/modules/content/embeds.py
|
def maptag(tagname, contents):
"""Returns the HTML produced from enclosing each item in
`contents` in a tag of type `tagname`"""
return u''.join(tag(tagname, item) for item in contents)
|
def maptag(tagname, contents):
"""Returns the HTML produced from enclosing each item in
`contents` in a tag of type `tagname`"""
return u''.join(tag(tagname, item) for item in contents)
|
[
"Returns",
"the",
"HTML",
"produced",
"from",
"enclosing",
"each",
"item",
"in",
"contents",
"in",
"a",
"tag",
"of",
"type",
"tagname"
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/content/embeds.py#L31-L34
|
[
"def",
"maptag",
"(",
"tagname",
",",
"contents",
")",
":",
"return",
"u''",
".",
"join",
"(",
"tag",
"(",
"tagname",
",",
"item",
")",
"for",
"item",
"in",
"contents",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
zone
|
Renders the contents of the zone with given zone_id.
|
dispatch/templatetags/dispatch_tags.py
|
def zone(zone_id, **kwargs):
"""Renders the contents of the zone with given zone_id."""
try:
zone = ThemeManager.Zones.get(zone_id)
except ZoneNotFound:
return ''
try:
return zone.widget.render(add_context=kwargs)
except (WidgetNotFound, AttributeError):
pass
return ''
|
def zone(zone_id, **kwargs):
"""Renders the contents of the zone with given zone_id."""
try:
zone = ThemeManager.Zones.get(zone_id)
except ZoneNotFound:
return ''
try:
return zone.widget.render(add_context=kwargs)
except (WidgetNotFound, AttributeError):
pass
return ''
|
[
"Renders",
"the",
"contents",
"of",
"the",
"zone",
"with",
"given",
"zone_id",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/templatetags/dispatch_tags.py#L10-L23
|
[
"def",
"zone",
"(",
"zone_id",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"zone",
"=",
"ThemeManager",
".",
"Zones",
".",
"get",
"(",
"zone_id",
")",
"except",
"ZoneNotFound",
":",
"return",
"''",
"try",
":",
"return",
"zone",
".",
"widget",
".",
"render",
"(",
"add_context",
"=",
"kwargs",
")",
"except",
"(",
"WidgetNotFound",
",",
"AttributeError",
")",
":",
"pass",
"return",
"''"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
Publishable.save
|
Handles the saving/updating of a Publishable instance.
Arguments:
revision - if True, a new version of this Publishable will be created.
|
dispatch/modules/content/models.py
|
def save(self, revision=True, *args, **kwargs):
"""
Handles the saving/updating of a Publishable instance.
Arguments:
revision - if True, a new version of this Publishable will be created.
"""
if revision:
# If this is a revision, set it to be the head of the list and increment the revision id
self.head = True
self.revision_id += 1
previous_revision = self.get_previous_revision()
if not self.is_parent():
# If this is a revision, delete the old head of the list.
type(self).objects \
.filter(parent=self.parent, head=True) \
.update(head=None)
# Clear the instance id to force Django to save a new instance.
# Both fields (pk, id) required for this to work -- something to do with model inheritance
self.pk = None
self.id = None
# New version is unpublished by default
self.is_published = None
# Set created_at to current time, but only for first version
if not self.created_at:
self.created_at = timezone.now()
self.updated_at = timezone.now()
if revision:
self.updated_at = timezone.now()
super(Publishable, self).save(*args, **kwargs)
# Update the parent foreign key
if not self.parent:
self.parent = self
super(Publishable, self).save(update_fields=['parent'])
if revision:
# Set latest version for all articles
type(self).objects \
.filter(parent=self.parent) \
.update(latest_version=self.revision_id)
self.latest_version = self.revision_id
return self
|
def save(self, revision=True, *args, **kwargs):
"""
Handles the saving/updating of a Publishable instance.
Arguments:
revision - if True, a new version of this Publishable will be created.
"""
if revision:
# If this is a revision, set it to be the head of the list and increment the revision id
self.head = True
self.revision_id += 1
previous_revision = self.get_previous_revision()
if not self.is_parent():
# If this is a revision, delete the old head of the list.
type(self).objects \
.filter(parent=self.parent, head=True) \
.update(head=None)
# Clear the instance id to force Django to save a new instance.
# Both fields (pk, id) required for this to work -- something to do with model inheritance
self.pk = None
self.id = None
# New version is unpublished by default
self.is_published = None
# Set created_at to current time, but only for first version
if not self.created_at:
self.created_at = timezone.now()
self.updated_at = timezone.now()
if revision:
self.updated_at = timezone.now()
super(Publishable, self).save(*args, **kwargs)
# Update the parent foreign key
if not self.parent:
self.parent = self
super(Publishable, self).save(update_fields=['parent'])
if revision:
# Set latest version for all articles
type(self).objects \
.filter(parent=self.parent) \
.update(latest_version=self.revision_id)
self.latest_version = self.revision_id
return self
|
[
"Handles",
"the",
"saving",
"/",
"updating",
"of",
"a",
"Publishable",
"instance",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/content/models.py#L170-L222
|
[
"def",
"save",
"(",
"self",
",",
"revision",
"=",
"True",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"revision",
":",
"# If this is a revision, set it to be the head of the list and increment the revision id",
"self",
".",
"head",
"=",
"True",
"self",
".",
"revision_id",
"+=",
"1",
"previous_revision",
"=",
"self",
".",
"get_previous_revision",
"(",
")",
"if",
"not",
"self",
".",
"is_parent",
"(",
")",
":",
"# If this is a revision, delete the old head of the list.",
"type",
"(",
"self",
")",
".",
"objects",
".",
"filter",
"(",
"parent",
"=",
"self",
".",
"parent",
",",
"head",
"=",
"True",
")",
".",
"update",
"(",
"head",
"=",
"None",
")",
"# Clear the instance id to force Django to save a new instance.",
"# Both fields (pk, id) required for this to work -- something to do with model inheritance",
"self",
".",
"pk",
"=",
"None",
"self",
".",
"id",
"=",
"None",
"# New version is unpublished by default",
"self",
".",
"is_published",
"=",
"None",
"# Set created_at to current time, but only for first version",
"if",
"not",
"self",
".",
"created_at",
":",
"self",
".",
"created_at",
"=",
"timezone",
".",
"now",
"(",
")",
"self",
".",
"updated_at",
"=",
"timezone",
".",
"now",
"(",
")",
"if",
"revision",
":",
"self",
".",
"updated_at",
"=",
"timezone",
".",
"now",
"(",
")",
"super",
"(",
"Publishable",
",",
"self",
")",
".",
"save",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"# Update the parent foreign key",
"if",
"not",
"self",
".",
"parent",
":",
"self",
".",
"parent",
"=",
"self",
"super",
"(",
"Publishable",
",",
"self",
")",
".",
"save",
"(",
"update_fields",
"=",
"[",
"'parent'",
"]",
")",
"if",
"revision",
":",
"# Set latest version for all articles",
"type",
"(",
"self",
")",
".",
"objects",
".",
"filter",
"(",
"parent",
"=",
"self",
".",
"parent",
")",
".",
"update",
"(",
"latest_version",
"=",
"self",
".",
"revision_id",
")",
"self",
".",
"latest_version",
"=",
"self",
".",
"revision_id",
"return",
"self"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
Publishable.save_featured_image
|
Handles saving the featured image.
If data is None, the featured image will be removed.
`data` should be dictionary with the following format:
{
'image_id': int,
'caption': str,
'credit': str
}
|
dispatch/modules/content/models.py
|
def save_featured_image(self, data):
"""
Handles saving the featured image.
If data is None, the featured image will be removed.
`data` should be dictionary with the following format:
{
'image_id': int,
'caption': str,
'credit': str
}
"""
attachment = self.featured_image
if data is None:
if attachment:
attachment.delete()
self.featured_image = None
return
if data['image_id'] is None:
if attachment:
attachment.delete()
self.featured_image = None
return
if not attachment:
attachment = ImageAttachment()
attachment.image_id = data.get('image_id', attachment.image_id)
attachment.caption = data.get('caption', None)
attachment.credit = data.get('credit', None)
instance_type = str(type(self)).lower()
setattr(attachment, instance_type, self)
attachment.save()
self.featured_image = attachment
|
def save_featured_image(self, data):
"""
Handles saving the featured image.
If data is None, the featured image will be removed.
`data` should be dictionary with the following format:
{
'image_id': int,
'caption': str,
'credit': str
}
"""
attachment = self.featured_image
if data is None:
if attachment:
attachment.delete()
self.featured_image = None
return
if data['image_id'] is None:
if attachment:
attachment.delete()
self.featured_image = None
return
if not attachment:
attachment = ImageAttachment()
attachment.image_id = data.get('image_id', attachment.image_id)
attachment.caption = data.get('caption', None)
attachment.credit = data.get('credit', None)
instance_type = str(type(self)).lower()
setattr(attachment, instance_type, self)
attachment.save()
self.featured_image = attachment
|
[
"Handles",
"saving",
"the",
"featured",
"image",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/content/models.py#L231-L274
|
[
"def",
"save_featured_image",
"(",
"self",
",",
"data",
")",
":",
"attachment",
"=",
"self",
".",
"featured_image",
"if",
"data",
"is",
"None",
":",
"if",
"attachment",
":",
"attachment",
".",
"delete",
"(",
")",
"self",
".",
"featured_image",
"=",
"None",
"return",
"if",
"data",
"[",
"'image_id'",
"]",
"is",
"None",
":",
"if",
"attachment",
":",
"attachment",
".",
"delete",
"(",
")",
"self",
".",
"featured_image",
"=",
"None",
"return",
"if",
"not",
"attachment",
":",
"attachment",
"=",
"ImageAttachment",
"(",
")",
"attachment",
".",
"image_id",
"=",
"data",
".",
"get",
"(",
"'image_id'",
",",
"attachment",
".",
"image_id",
")",
"attachment",
".",
"caption",
"=",
"data",
".",
"get",
"(",
"'caption'",
",",
"None",
")",
"attachment",
".",
"credit",
"=",
"data",
".",
"get",
"(",
"'credit'",
",",
"None",
")",
"instance_type",
"=",
"str",
"(",
"type",
"(",
"self",
")",
")",
".",
"lower",
"(",
")",
"setattr",
"(",
"attachment",
",",
"instance_type",
",",
"self",
")",
"attachment",
".",
"save",
"(",
")",
"self",
".",
"featured_image",
"=",
"attachment"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
Article.save_subsection
|
Save the subsection to the parent article
|
dispatch/modules/content/models.py
|
def save_subsection(self, subsection_id):
""" Save the subsection to the parent article """
Article.objects.filter(parent_id=self.parent.id).update(subsection_id=subsection_id)
|
def save_subsection(self, subsection_id):
""" Save the subsection to the parent article """
Article.objects.filter(parent_id=self.parent.id).update(subsection_id=subsection_id)
|
[
"Save",
"the",
"subsection",
"to",
"the",
"parent",
"article"
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/content/models.py#L407-L409
|
[
"def",
"save_subsection",
"(",
"self",
",",
"subsection_id",
")",
":",
"Article",
".",
"objects",
".",
"filter",
"(",
"parent_id",
"=",
"self",
".",
"parent",
".",
"id",
")",
".",
"update",
"(",
"subsection_id",
"=",
"subsection_id",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
Image.get_extension
|
Returns the file extension.
|
dispatch/modules/content/models.py
|
def get_extension(self):
"""Returns the file extension."""
ext = os.path.splitext(self.img.name)[1]
if ext:
# Remove period from extension
return ext[1:]
return ext
|
def get_extension(self):
"""Returns the file extension."""
ext = os.path.splitext(self.img.name)[1]
if ext:
# Remove period from extension
return ext[1:]
return ext
|
[
"Returns",
"the",
"file",
"extension",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/content/models.py#L497-L503
|
[
"def",
"get_extension",
"(",
"self",
")",
":",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"self",
".",
"img",
".",
"name",
")",
"[",
"1",
"]",
"if",
"ext",
":",
"# Remove period from extension",
"return",
"ext",
"[",
"1",
":",
"]",
"return",
"ext"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
Image.get_medium_url
|
Returns the medium size image URL.
|
dispatch/modules/content/models.py
|
def get_medium_url(self):
"""Returns the medium size image URL."""
if self.is_gif():
return self.get_absolute_url()
return '%s%s-%s.jpg' % (settings.MEDIA_URL, self.get_name(), 'medium')
|
def get_medium_url(self):
"""Returns the medium size image URL."""
if self.is_gif():
return self.get_absolute_url()
return '%s%s-%s.jpg' % (settings.MEDIA_URL, self.get_name(), 'medium')
|
[
"Returns",
"the",
"medium",
"size",
"image",
"URL",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/content/models.py#L509-L513
|
[
"def",
"get_medium_url",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_gif",
"(",
")",
":",
"return",
"self",
".",
"get_absolute_url",
"(",
")",
"return",
"'%s%s-%s.jpg'",
"%",
"(",
"settings",
".",
"MEDIA_URL",
",",
"self",
".",
"get_name",
"(",
")",
",",
"'medium'",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
Image.save
|
Custom save method to process thumbnails and save image dimensions.
|
dispatch/modules/content/models.py
|
def save(self, **kwargs):
"""Custom save method to process thumbnails and save image dimensions."""
is_new = self.pk is None
if is_new:
# Make filenames lowercase
self.img.name = self.img.name.lower()
# Call super method
super(Image, self).save(**kwargs)
if is_new and self.img:
data = self.img.read()
if not data:
return
image = Img.open(StringIO.StringIO(data))
self.width, self.height = image.size
super(Image, self).save()
name = self.get_name()
ext = self.get_extension()
for size in self.SIZES.keys():
self.save_thumbnail(image, self.SIZES[size], name, size, ext)
|
def save(self, **kwargs):
"""Custom save method to process thumbnails and save image dimensions."""
is_new = self.pk is None
if is_new:
# Make filenames lowercase
self.img.name = self.img.name.lower()
# Call super method
super(Image, self).save(**kwargs)
if is_new and self.img:
data = self.img.read()
if not data:
return
image = Img.open(StringIO.StringIO(data))
self.width, self.height = image.size
super(Image, self).save()
name = self.get_name()
ext = self.get_extension()
for size in self.SIZES.keys():
self.save_thumbnail(image, self.SIZES[size], name, size, ext)
|
[
"Custom",
"save",
"method",
"to",
"process",
"thumbnails",
"and",
"save",
"image",
"dimensions",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/content/models.py#L520-L547
|
[
"def",
"save",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"is_new",
"=",
"self",
".",
"pk",
"is",
"None",
"if",
"is_new",
":",
"# Make filenames lowercase",
"self",
".",
"img",
".",
"name",
"=",
"self",
".",
"img",
".",
"name",
".",
"lower",
"(",
")",
"# Call super method",
"super",
"(",
"Image",
",",
"self",
")",
".",
"save",
"(",
"*",
"*",
"kwargs",
")",
"if",
"is_new",
"and",
"self",
".",
"img",
":",
"data",
"=",
"self",
".",
"img",
".",
"read",
"(",
")",
"if",
"not",
"data",
":",
"return",
"image",
"=",
"Img",
".",
"open",
"(",
"StringIO",
".",
"StringIO",
"(",
"data",
")",
")",
"self",
".",
"width",
",",
"self",
".",
"height",
"=",
"image",
".",
"size",
"super",
"(",
"Image",
",",
"self",
")",
".",
"save",
"(",
")",
"name",
"=",
"self",
".",
"get_name",
"(",
")",
"ext",
"=",
"self",
".",
"get_extension",
"(",
")",
"for",
"size",
"in",
"self",
".",
"SIZES",
".",
"keys",
"(",
")",
":",
"self",
".",
"save_thumbnail",
"(",
"image",
",",
"self",
".",
"SIZES",
"[",
"size",
"]",
",",
"name",
",",
"size",
",",
"ext",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
Image.save_thumbnail
|
Processes and saves a resized thumbnail version of the image.
|
dispatch/modules/content/models.py
|
def save_thumbnail(self, image, size, name, label, file_type):
"""Processes and saves a resized thumbnail version of the image."""
width, height = size
(imw, imh) = image.size
# If image is larger than thumbnail size, resize image
if (imw > width) or (imh > height):
image.thumbnail(size, Img.ANTIALIAS)
# Attach new thumbnail label to image filename
name = "%s-%s.jpg" % (name, label)
# Image.save format takes JPEG not jpg
if file_type in self.JPG_FORMATS:
file_type = 'JPEG'
# Write new thumbnail to StringIO object
image_io = StringIO.StringIO()
image.save(image_io, format=file_type, quality=75)
# Convert StringIO object to Django File object
thumb_file = InMemoryUploadedFile(image_io, None, name, 'image/jpeg', image_io.len, None)
# Save the new file to the default storage system
default_storage.save(name, thumb_file)
|
def save_thumbnail(self, image, size, name, label, file_type):
"""Processes and saves a resized thumbnail version of the image."""
width, height = size
(imw, imh) = image.size
# If image is larger than thumbnail size, resize image
if (imw > width) or (imh > height):
image.thumbnail(size, Img.ANTIALIAS)
# Attach new thumbnail label to image filename
name = "%s-%s.jpg" % (name, label)
# Image.save format takes JPEG not jpg
if file_type in self.JPG_FORMATS:
file_type = 'JPEG'
# Write new thumbnail to StringIO object
image_io = StringIO.StringIO()
image.save(image_io, format=file_type, quality=75)
# Convert StringIO object to Django File object
thumb_file = InMemoryUploadedFile(image_io, None, name, 'image/jpeg', image_io.len, None)
# Save the new file to the default storage system
default_storage.save(name, thumb_file)
|
[
"Processes",
"and",
"saves",
"a",
"resized",
"thumbnail",
"version",
"of",
"the",
"image",
"."
] |
ubyssey/dispatch
|
python
|
https://github.com/ubyssey/dispatch/blob/8da6084fe61726f20e9cf675190480cfc45ee764/dispatch/modules/content/models.py#L549-L573
|
[
"def",
"save_thumbnail",
"(",
"self",
",",
"image",
",",
"size",
",",
"name",
",",
"label",
",",
"file_type",
")",
":",
"width",
",",
"height",
"=",
"size",
"(",
"imw",
",",
"imh",
")",
"=",
"image",
".",
"size",
"# If image is larger than thumbnail size, resize image",
"if",
"(",
"imw",
">",
"width",
")",
"or",
"(",
"imh",
">",
"height",
")",
":",
"image",
".",
"thumbnail",
"(",
"size",
",",
"Img",
".",
"ANTIALIAS",
")",
"# Attach new thumbnail label to image filename",
"name",
"=",
"\"%s-%s.jpg\"",
"%",
"(",
"name",
",",
"label",
")",
"# Image.save format takes JPEG not jpg",
"if",
"file_type",
"in",
"self",
".",
"JPG_FORMATS",
":",
"file_type",
"=",
"'JPEG'",
"# Write new thumbnail to StringIO object",
"image_io",
"=",
"StringIO",
".",
"StringIO",
"(",
")",
"image",
".",
"save",
"(",
"image_io",
",",
"format",
"=",
"file_type",
",",
"quality",
"=",
"75",
")",
"# Convert StringIO object to Django File object",
"thumb_file",
"=",
"InMemoryUploadedFile",
"(",
"image_io",
",",
"None",
",",
"name",
",",
"'image/jpeg'",
",",
"image_io",
".",
"len",
",",
"None",
")",
"# Save the new file to the default storage system",
"default_storage",
".",
"save",
"(",
"name",
",",
"thumb_file",
")"
] |
8da6084fe61726f20e9cf675190480cfc45ee764
|
test
|
MySQL.init_app
|
Initialize the `app` for use with this
:class:`~flask_mysqldb.MySQL` class.
This is called automatically if `app` is passed to
:meth:`~MySQL.__init__`.
:param flask.Flask app: the application to configure for use with
this :class:`~flask_mysqldb.MySQL` class.
|
flask_mysqldb/__init__.py
|
def init_app(self, app):
"""Initialize the `app` for use with this
:class:`~flask_mysqldb.MySQL` class.
This is called automatically if `app` is passed to
:meth:`~MySQL.__init__`.
:param flask.Flask app: the application to configure for use with
this :class:`~flask_mysqldb.MySQL` class.
"""
app.config.setdefault('MYSQL_HOST', 'localhost')
app.config.setdefault('MYSQL_USER', None)
app.config.setdefault('MYSQL_PASSWORD', None)
app.config.setdefault('MYSQL_DB', None)
app.config.setdefault('MYSQL_PORT', 3306)
app.config.setdefault('MYSQL_UNIX_SOCKET', None)
app.config.setdefault('MYSQL_CONNECT_TIMEOUT', 10)
app.config.setdefault('MYSQL_READ_DEFAULT_FILE', None)
app.config.setdefault('MYSQL_USE_UNICODE', True)
app.config.setdefault('MYSQL_CHARSET', 'utf8')
app.config.setdefault('MYSQL_SQL_MODE', None)
app.config.setdefault('MYSQL_CURSORCLASS', None)
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
|
def init_app(self, app):
"""Initialize the `app` for use with this
:class:`~flask_mysqldb.MySQL` class.
This is called automatically if `app` is passed to
:meth:`~MySQL.__init__`.
:param flask.Flask app: the application to configure for use with
this :class:`~flask_mysqldb.MySQL` class.
"""
app.config.setdefault('MYSQL_HOST', 'localhost')
app.config.setdefault('MYSQL_USER', None)
app.config.setdefault('MYSQL_PASSWORD', None)
app.config.setdefault('MYSQL_DB', None)
app.config.setdefault('MYSQL_PORT', 3306)
app.config.setdefault('MYSQL_UNIX_SOCKET', None)
app.config.setdefault('MYSQL_CONNECT_TIMEOUT', 10)
app.config.setdefault('MYSQL_READ_DEFAULT_FILE', None)
app.config.setdefault('MYSQL_USE_UNICODE', True)
app.config.setdefault('MYSQL_CHARSET', 'utf8')
app.config.setdefault('MYSQL_SQL_MODE', None)
app.config.setdefault('MYSQL_CURSORCLASS', None)
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
|
[
"Initialize",
"the",
"app",
"for",
"use",
"with",
"this",
":",
"class",
":",
"~flask_mysqldb",
".",
"MySQL",
"class",
".",
"This",
"is",
"called",
"automatically",
"if",
"app",
"is",
"passed",
"to",
":",
"meth",
":",
"~MySQL",
".",
"__init__",
"."
] |
admiralobvious/flask-mysqldb
|
python
|
https://github.com/admiralobvious/flask-mysqldb/blob/418c794e9b031addd026f29312865403baea55a0/flask_mysqldb/__init__.py#L13-L37
|
[
"def",
"init_app",
"(",
"self",
",",
"app",
")",
":",
"app",
".",
"config",
".",
"setdefault",
"(",
"'MYSQL_HOST'",
",",
"'localhost'",
")",
"app",
".",
"config",
".",
"setdefault",
"(",
"'MYSQL_USER'",
",",
"None",
")",
"app",
".",
"config",
".",
"setdefault",
"(",
"'MYSQL_PASSWORD'",
",",
"None",
")",
"app",
".",
"config",
".",
"setdefault",
"(",
"'MYSQL_DB'",
",",
"None",
")",
"app",
".",
"config",
".",
"setdefault",
"(",
"'MYSQL_PORT'",
",",
"3306",
")",
"app",
".",
"config",
".",
"setdefault",
"(",
"'MYSQL_UNIX_SOCKET'",
",",
"None",
")",
"app",
".",
"config",
".",
"setdefault",
"(",
"'MYSQL_CONNECT_TIMEOUT'",
",",
"10",
")",
"app",
".",
"config",
".",
"setdefault",
"(",
"'MYSQL_READ_DEFAULT_FILE'",
",",
"None",
")",
"app",
".",
"config",
".",
"setdefault",
"(",
"'MYSQL_USE_UNICODE'",
",",
"True",
")",
"app",
".",
"config",
".",
"setdefault",
"(",
"'MYSQL_CHARSET'",
",",
"'utf8'",
")",
"app",
".",
"config",
".",
"setdefault",
"(",
"'MYSQL_SQL_MODE'",
",",
"None",
")",
"app",
".",
"config",
".",
"setdefault",
"(",
"'MYSQL_CURSORCLASS'",
",",
"None",
")",
"if",
"hasattr",
"(",
"app",
",",
"'teardown_appcontext'",
")",
":",
"app",
".",
"teardown_appcontext",
"(",
"self",
".",
"teardown",
")"
] |
418c794e9b031addd026f29312865403baea55a0
|
test
|
MySQL.connection
|
Attempts to connect to the MySQL server.
:return: Bound MySQL connection object if successful or ``None`` if
unsuccessful.
|
flask_mysqldb/__init__.py
|
def connection(self):
"""Attempts to connect to the MySQL server.
:return: Bound MySQL connection object if successful or ``None`` if
unsuccessful.
"""
ctx = _app_ctx_stack.top
if ctx is not None:
if not hasattr(ctx, 'mysql_db'):
ctx.mysql_db = self.connect
return ctx.mysql_db
|
def connection(self):
"""Attempts to connect to the MySQL server.
:return: Bound MySQL connection object if successful or ``None`` if
unsuccessful.
"""
ctx = _app_ctx_stack.top
if ctx is not None:
if not hasattr(ctx, 'mysql_db'):
ctx.mysql_db = self.connect
return ctx.mysql_db
|
[
"Attempts",
"to",
"connect",
"to",
"the",
"MySQL",
"server",
"."
] |
admiralobvious/flask-mysqldb
|
python
|
https://github.com/admiralobvious/flask-mysqldb/blob/418c794e9b031addd026f29312865403baea55a0/flask_mysqldb/__init__.py#L84-L95
|
[
"def",
"connection",
"(",
"self",
")",
":",
"ctx",
"=",
"_app_ctx_stack",
".",
"top",
"if",
"ctx",
"is",
"not",
"None",
":",
"if",
"not",
"hasattr",
"(",
"ctx",
",",
"'mysql_db'",
")",
":",
"ctx",
".",
"mysql_db",
"=",
"self",
".",
"connect",
"return",
"ctx",
".",
"mysql_db"
] |
418c794e9b031addd026f29312865403baea55a0
|
test
|
BandwidthLimiter.get_bandwith_limited_stream
|
Wraps a fileobj in a bandwidth limited stream wrapper
:type fileobj: file-like obj
:param fileobj: The file-like obj to wrap
:type transfer_coordinator: s3transfer.futures.TransferCoordinator
param transfer_coordinator: The coordinator for the general transfer
that the wrapped stream is a part of
:type enabled: boolean
:param enabled: Whether bandwidth limiting should be enabled to start
|
s3transfer/bandwidth.py
|
def get_bandwith_limited_stream(self, fileobj, transfer_coordinator,
enabled=True):
"""Wraps a fileobj in a bandwidth limited stream wrapper
:type fileobj: file-like obj
:param fileobj: The file-like obj to wrap
:type transfer_coordinator: s3transfer.futures.TransferCoordinator
param transfer_coordinator: The coordinator for the general transfer
that the wrapped stream is a part of
:type enabled: boolean
:param enabled: Whether bandwidth limiting should be enabled to start
"""
stream = BandwidthLimitedStream(
fileobj, self._leaky_bucket, transfer_coordinator,
self._time_utils)
if not enabled:
stream.disable_bandwidth_limiting()
return stream
|
def get_bandwith_limited_stream(self, fileobj, transfer_coordinator,
enabled=True):
"""Wraps a fileobj in a bandwidth limited stream wrapper
:type fileobj: file-like obj
:param fileobj: The file-like obj to wrap
:type transfer_coordinator: s3transfer.futures.TransferCoordinator
param transfer_coordinator: The coordinator for the general transfer
that the wrapped stream is a part of
:type enabled: boolean
:param enabled: Whether bandwidth limiting should be enabled to start
"""
stream = BandwidthLimitedStream(
fileobj, self._leaky_bucket, transfer_coordinator,
self._time_utils)
if not enabled:
stream.disable_bandwidth_limiting()
return stream
|
[
"Wraps",
"a",
"fileobj",
"in",
"a",
"bandwidth",
"limited",
"stream",
"wrapper"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/bandwidth.py#L78-L97
|
[
"def",
"get_bandwith_limited_stream",
"(",
"self",
",",
"fileobj",
",",
"transfer_coordinator",
",",
"enabled",
"=",
"True",
")",
":",
"stream",
"=",
"BandwidthLimitedStream",
"(",
"fileobj",
",",
"self",
".",
"_leaky_bucket",
",",
"transfer_coordinator",
",",
"self",
".",
"_time_utils",
")",
"if",
"not",
"enabled",
":",
"stream",
".",
"disable_bandwidth_limiting",
"(",
")",
"return",
"stream"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
BandwidthLimitedStream.read
|
Read a specified amount
Reads will only be throttled if bandwidth limiting is enabled.
|
s3transfer/bandwidth.py
|
def read(self, amount):
"""Read a specified amount
Reads will only be throttled if bandwidth limiting is enabled.
"""
if not self._bandwidth_limiting_enabled:
return self._fileobj.read(amount)
# We do not want to be calling consume on every read as the read
# amounts can be small causing the lock of the leaky bucket to
# introduce noticeable overhead. So instead we keep track of
# how many bytes we have seen and only call consume once we pass a
# certain threshold.
self._bytes_seen += amount
if self._bytes_seen < self._bytes_threshold:
return self._fileobj.read(amount)
self._consume_through_leaky_bucket()
return self._fileobj.read(amount)
|
def read(self, amount):
"""Read a specified amount
Reads will only be throttled if bandwidth limiting is enabled.
"""
if not self._bandwidth_limiting_enabled:
return self._fileobj.read(amount)
# We do not want to be calling consume on every read as the read
# amounts can be small causing the lock of the leaky bucket to
# introduce noticeable overhead. So instead we keep track of
# how many bytes we have seen and only call consume once we pass a
# certain threshold.
self._bytes_seen += amount
if self._bytes_seen < self._bytes_threshold:
return self._fileobj.read(amount)
self._consume_through_leaky_bucket()
return self._fileobj.read(amount)
|
[
"Read",
"a",
"specified",
"amount"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/bandwidth.py#L138-L156
|
[
"def",
"read",
"(",
"self",
",",
"amount",
")",
":",
"if",
"not",
"self",
".",
"_bandwidth_limiting_enabled",
":",
"return",
"self",
".",
"_fileobj",
".",
"read",
"(",
"amount",
")",
"# We do not want to be calling consume on every read as the read",
"# amounts can be small causing the lock of the leaky bucket to",
"# introduce noticeable overhead. So instead we keep track of",
"# how many bytes we have seen and only call consume once we pass a",
"# certain threshold.",
"self",
".",
"_bytes_seen",
"+=",
"amount",
"if",
"self",
".",
"_bytes_seen",
"<",
"self",
".",
"_bytes_threshold",
":",
"return",
"self",
".",
"_fileobj",
".",
"read",
"(",
"amount",
")",
"self",
".",
"_consume_through_leaky_bucket",
"(",
")",
"return",
"self",
".",
"_fileobj",
".",
"read",
"(",
"amount",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
LeakyBucket.consume
|
Consume an a requested amount
:type amt: int
:param amt: The amount of bytes to request to consume
:type request_token: RequestToken
:param request_token: The token associated to the consumption
request that is used to identify the request. So if a
RequestExceededException is raised the token should be used
in subsequent retry consume() request.
:raises RequestExceededException: If the consumption amount would
exceed the maximum allocated bandwidth
:rtype: int
:returns: The amount consumed
|
s3transfer/bandwidth.py
|
def consume(self, amt, request_token):
"""Consume an a requested amount
:type amt: int
:param amt: The amount of bytes to request to consume
:type request_token: RequestToken
:param request_token: The token associated to the consumption
request that is used to identify the request. So if a
RequestExceededException is raised the token should be used
in subsequent retry consume() request.
:raises RequestExceededException: If the consumption amount would
exceed the maximum allocated bandwidth
:rtype: int
:returns: The amount consumed
"""
with self._lock:
time_now = self._time_utils.time()
if self._consumption_scheduler.is_scheduled(request_token):
return self._release_requested_amt_for_scheduled_request(
amt, request_token, time_now)
elif self._projected_to_exceed_max_rate(amt, time_now):
self._raise_request_exceeded_exception(
amt, request_token, time_now)
else:
return self._release_requested_amt(amt, time_now)
|
def consume(self, amt, request_token):
"""Consume an a requested amount
:type amt: int
:param amt: The amount of bytes to request to consume
:type request_token: RequestToken
:param request_token: The token associated to the consumption
request that is used to identify the request. So if a
RequestExceededException is raised the token should be used
in subsequent retry consume() request.
:raises RequestExceededException: If the consumption amount would
exceed the maximum allocated bandwidth
:rtype: int
:returns: The amount consumed
"""
with self._lock:
time_now = self._time_utils.time()
if self._consumption_scheduler.is_scheduled(request_token):
return self._release_requested_amt_for_scheduled_request(
amt, request_token, time_now)
elif self._projected_to_exceed_max_rate(amt, time_now):
self._raise_request_exceeded_exception(
amt, request_token, time_now)
else:
return self._release_requested_amt(amt, time_now)
|
[
"Consume",
"an",
"a",
"requested",
"amount"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/bandwidth.py#L237-L264
|
[
"def",
"consume",
"(",
"self",
",",
"amt",
",",
"request_token",
")",
":",
"with",
"self",
".",
"_lock",
":",
"time_now",
"=",
"self",
".",
"_time_utils",
".",
"time",
"(",
")",
"if",
"self",
".",
"_consumption_scheduler",
".",
"is_scheduled",
"(",
"request_token",
")",
":",
"return",
"self",
".",
"_release_requested_amt_for_scheduled_request",
"(",
"amt",
",",
"request_token",
",",
"time_now",
")",
"elif",
"self",
".",
"_projected_to_exceed_max_rate",
"(",
"amt",
",",
"time_now",
")",
":",
"self",
".",
"_raise_request_exceeded_exception",
"(",
"amt",
",",
"request_token",
",",
"time_now",
")",
"else",
":",
"return",
"self",
".",
"_release_requested_amt",
"(",
"amt",
",",
"time_now",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
ConsumptionScheduler.schedule_consumption
|
Schedules a wait time to be able to consume an amount
:type amt: int
:param amt: The amount of bytes scheduled to be consumed
:type token: RequestToken
:param token: The token associated to the consumption
request that is used to identify the request.
:type time_to_consume: float
:param time_to_consume: The desired time it should take for that
specific request amount to be consumed in regardless of previously
scheduled consumption requests
:rtype: float
:returns: The amount of time to wait for the specific request before
actually consuming the specified amount.
|
s3transfer/bandwidth.py
|
def schedule_consumption(self, amt, token, time_to_consume):
"""Schedules a wait time to be able to consume an amount
:type amt: int
:param amt: The amount of bytes scheduled to be consumed
:type token: RequestToken
:param token: The token associated to the consumption
request that is used to identify the request.
:type time_to_consume: float
:param time_to_consume: The desired time it should take for that
specific request amount to be consumed in regardless of previously
scheduled consumption requests
:rtype: float
:returns: The amount of time to wait for the specific request before
actually consuming the specified amount.
"""
self._total_wait += time_to_consume
self._tokens_to_scheduled_consumption[token] = {
'wait_duration': self._total_wait,
'time_to_consume': time_to_consume,
}
return self._total_wait
|
def schedule_consumption(self, amt, token, time_to_consume):
"""Schedules a wait time to be able to consume an amount
:type amt: int
:param amt: The amount of bytes scheduled to be consumed
:type token: RequestToken
:param token: The token associated to the consumption
request that is used to identify the request.
:type time_to_consume: float
:param time_to_consume: The desired time it should take for that
specific request amount to be consumed in regardless of previously
scheduled consumption requests
:rtype: float
:returns: The amount of time to wait for the specific request before
actually consuming the specified amount.
"""
self._total_wait += time_to_consume
self._tokens_to_scheduled_consumption[token] = {
'wait_duration': self._total_wait,
'time_to_consume': time_to_consume,
}
return self._total_wait
|
[
"Schedules",
"a",
"wait",
"time",
"to",
"be",
"able",
"to",
"consume",
"an",
"amount"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/bandwidth.py#L303-L327
|
[
"def",
"schedule_consumption",
"(",
"self",
",",
"amt",
",",
"token",
",",
"time_to_consume",
")",
":",
"self",
".",
"_total_wait",
"+=",
"time_to_consume",
"self",
".",
"_tokens_to_scheduled_consumption",
"[",
"token",
"]",
"=",
"{",
"'wait_duration'",
":",
"self",
".",
"_total_wait",
",",
"'time_to_consume'",
":",
"time_to_consume",
",",
"}",
"return",
"self",
".",
"_total_wait"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
ConsumptionScheduler.process_scheduled_consumption
|
Processes a scheduled consumption request that has completed
:type token: RequestToken
:param token: The token associated to the consumption
request that is used to identify the request.
|
s3transfer/bandwidth.py
|
def process_scheduled_consumption(self, token):
"""Processes a scheduled consumption request that has completed
:type token: RequestToken
:param token: The token associated to the consumption
request that is used to identify the request.
"""
scheduled_retry = self._tokens_to_scheduled_consumption.pop(token)
self._total_wait = max(
self._total_wait - scheduled_retry['time_to_consume'], 0)
|
def process_scheduled_consumption(self, token):
"""Processes a scheduled consumption request that has completed
:type token: RequestToken
:param token: The token associated to the consumption
request that is used to identify the request.
"""
scheduled_retry = self._tokens_to_scheduled_consumption.pop(token)
self._total_wait = max(
self._total_wait - scheduled_retry['time_to_consume'], 0)
|
[
"Processes",
"a",
"scheduled",
"consumption",
"request",
"that",
"has",
"completed"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/bandwidth.py#L329-L338
|
[
"def",
"process_scheduled_consumption",
"(",
"self",
",",
"token",
")",
":",
"scheduled_retry",
"=",
"self",
".",
"_tokens_to_scheduled_consumption",
".",
"pop",
"(",
"token",
")",
"self",
".",
"_total_wait",
"=",
"max",
"(",
"self",
".",
"_total_wait",
"-",
"scheduled_retry",
"[",
"'time_to_consume'",
"]",
",",
"0",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
BandwidthRateTracker.get_projected_rate
|
Get the projected rate using a provided amount and time
:type amt: int
:param amt: The proposed amount to consume
:type time_at_consumption: float
:param time_at_consumption: The proposed time to consume at
:rtype: float
:returns: The consumption rate if that amt and time were consumed
|
s3transfer/bandwidth.py
|
def get_projected_rate(self, amt, time_at_consumption):
"""Get the projected rate using a provided amount and time
:type amt: int
:param amt: The proposed amount to consume
:type time_at_consumption: float
:param time_at_consumption: The proposed time to consume at
:rtype: float
:returns: The consumption rate if that amt and time were consumed
"""
if self._last_time is None:
return 0.0
return self._calculate_exponential_moving_average_rate(
amt, time_at_consumption)
|
def get_projected_rate(self, amt, time_at_consumption):
"""Get the projected rate using a provided amount and time
:type amt: int
:param amt: The proposed amount to consume
:type time_at_consumption: float
:param time_at_consumption: The proposed time to consume at
:rtype: float
:returns: The consumption rate if that amt and time were consumed
"""
if self._last_time is None:
return 0.0
return self._calculate_exponential_moving_average_rate(
amt, time_at_consumption)
|
[
"Get",
"the",
"projected",
"rate",
"using",
"a",
"provided",
"amount",
"and",
"time"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/bandwidth.py#L369-L384
|
[
"def",
"get_projected_rate",
"(",
"self",
",",
"amt",
",",
"time_at_consumption",
")",
":",
"if",
"self",
".",
"_last_time",
"is",
"None",
":",
"return",
"0.0",
"return",
"self",
".",
"_calculate_exponential_moving_average_rate",
"(",
"amt",
",",
"time_at_consumption",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
BandwidthRateTracker.record_consumption_rate
|
Record the consumption rate based off amount and time point
:type amt: int
:param amt: The amount that got consumed
:type time_at_consumption: float
:param time_at_consumption: The time at which the amount was consumed
|
s3transfer/bandwidth.py
|
def record_consumption_rate(self, amt, time_at_consumption):
"""Record the consumption rate based off amount and time point
:type amt: int
:param amt: The amount that got consumed
:type time_at_consumption: float
:param time_at_consumption: The time at which the amount was consumed
"""
if self._last_time is None:
self._last_time = time_at_consumption
self._current_rate = 0.0
return
self._current_rate = self._calculate_exponential_moving_average_rate(
amt, time_at_consumption)
self._last_time = time_at_consumption
|
def record_consumption_rate(self, amt, time_at_consumption):
"""Record the consumption rate based off amount and time point
:type amt: int
:param amt: The amount that got consumed
:type time_at_consumption: float
:param time_at_consumption: The time at which the amount was consumed
"""
if self._last_time is None:
self._last_time = time_at_consumption
self._current_rate = 0.0
return
self._current_rate = self._calculate_exponential_moving_average_rate(
amt, time_at_consumption)
self._last_time = time_at_consumption
|
[
"Record",
"the",
"consumption",
"rate",
"based",
"off",
"amount",
"and",
"time",
"point"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/bandwidth.py#L386-L401
|
[
"def",
"record_consumption_rate",
"(",
"self",
",",
"amt",
",",
"time_at_consumption",
")",
":",
"if",
"self",
".",
"_last_time",
"is",
"None",
":",
"self",
".",
"_last_time",
"=",
"time_at_consumption",
"self",
".",
"_current_rate",
"=",
"0.0",
"return",
"self",
".",
"_current_rate",
"=",
"self",
".",
"_calculate_exponential_moving_average_rate",
"(",
"amt",
",",
"time_at_consumption",
")",
"self",
".",
"_last_time",
"=",
"time_at_consumption"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
ProcessPoolDownloader.download_file
|
Downloads the object's contents to a file
:type bucket: str
:param bucket: The name of the bucket to download from
:type key: str
:param key: The name of the key to download from
:type filename: str
:param filename: The name of a file to download to.
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
client operation
:type expected_size: int
:param expected_size: The expected size in bytes of the download. If
provided, the downloader will not call HeadObject to determine the
object's size and use the provided value instead. The size is
needed to determine whether to do a multipart download.
:rtype: s3transfer.futures.TransferFuture
:returns: Transfer future representing the download
|
s3transfer/processpool.py
|
def download_file(self, bucket, key, filename, extra_args=None,
expected_size=None):
"""Downloads the object's contents to a file
:type bucket: str
:param bucket: The name of the bucket to download from
:type key: str
:param key: The name of the key to download from
:type filename: str
:param filename: The name of a file to download to.
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
client operation
:type expected_size: int
:param expected_size: The expected size in bytes of the download. If
provided, the downloader will not call HeadObject to determine the
object's size and use the provided value instead. The size is
needed to determine whether to do a multipart download.
:rtype: s3transfer.futures.TransferFuture
:returns: Transfer future representing the download
"""
self._start_if_needed()
if extra_args is None:
extra_args = {}
self._validate_all_known_args(extra_args)
transfer_id = self._transfer_monitor.notify_new_transfer()
download_file_request = DownloadFileRequest(
transfer_id=transfer_id, bucket=bucket, key=key,
filename=filename, extra_args=extra_args,
expected_size=expected_size,
)
logger.debug(
'Submitting download file request: %s.', download_file_request)
self._download_request_queue.put(download_file_request)
call_args = CallArgs(
bucket=bucket, key=key, filename=filename, extra_args=extra_args,
expected_size=expected_size)
future = self._get_transfer_future(transfer_id, call_args)
return future
|
def download_file(self, bucket, key, filename, extra_args=None,
expected_size=None):
"""Downloads the object's contents to a file
:type bucket: str
:param bucket: The name of the bucket to download from
:type key: str
:param key: The name of the key to download from
:type filename: str
:param filename: The name of a file to download to.
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
client operation
:type expected_size: int
:param expected_size: The expected size in bytes of the download. If
provided, the downloader will not call HeadObject to determine the
object's size and use the provided value instead. The size is
needed to determine whether to do a multipart download.
:rtype: s3transfer.futures.TransferFuture
:returns: Transfer future representing the download
"""
self._start_if_needed()
if extra_args is None:
extra_args = {}
self._validate_all_known_args(extra_args)
transfer_id = self._transfer_monitor.notify_new_transfer()
download_file_request = DownloadFileRequest(
transfer_id=transfer_id, bucket=bucket, key=key,
filename=filename, extra_args=extra_args,
expected_size=expected_size,
)
logger.debug(
'Submitting download file request: %s.', download_file_request)
self._download_request_queue.put(download_file_request)
call_args = CallArgs(
bucket=bucket, key=key, filename=filename, extra_args=extra_args,
expected_size=expected_size)
future = self._get_transfer_future(transfer_id, call_args)
return future
|
[
"Downloads",
"the",
"object",
"s",
"contents",
"to",
"a",
"file"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/processpool.py#L321-L364
|
[
"def",
"download_file",
"(",
"self",
",",
"bucket",
",",
"key",
",",
"filename",
",",
"extra_args",
"=",
"None",
",",
"expected_size",
"=",
"None",
")",
":",
"self",
".",
"_start_if_needed",
"(",
")",
"if",
"extra_args",
"is",
"None",
":",
"extra_args",
"=",
"{",
"}",
"self",
".",
"_validate_all_known_args",
"(",
"extra_args",
")",
"transfer_id",
"=",
"self",
".",
"_transfer_monitor",
".",
"notify_new_transfer",
"(",
")",
"download_file_request",
"=",
"DownloadFileRequest",
"(",
"transfer_id",
"=",
"transfer_id",
",",
"bucket",
"=",
"bucket",
",",
"key",
"=",
"key",
",",
"filename",
"=",
"filename",
",",
"extra_args",
"=",
"extra_args",
",",
"expected_size",
"=",
"expected_size",
",",
")",
"logger",
".",
"debug",
"(",
"'Submitting download file request: %s.'",
",",
"download_file_request",
")",
"self",
".",
"_download_request_queue",
".",
"put",
"(",
"download_file_request",
")",
"call_args",
"=",
"CallArgs",
"(",
"bucket",
"=",
"bucket",
",",
"key",
"=",
"key",
",",
"filename",
"=",
"filename",
",",
"extra_args",
"=",
"extra_args",
",",
"expected_size",
"=",
"expected_size",
")",
"future",
"=",
"self",
".",
"_get_transfer_future",
"(",
"transfer_id",
",",
"call_args",
")",
"return",
"future"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferMonitor.poll_for_result
|
Poll for the result of a transfer
:param transfer_id: Unique identifier for the transfer
:return: If the transfer succeeded, it will return the result. If the
transfer failed, it will raise the exception associated to the
failure.
|
s3transfer/processpool.py
|
def poll_for_result(self, transfer_id):
"""Poll for the result of a transfer
:param transfer_id: Unique identifier for the transfer
:return: If the transfer succeeded, it will return the result. If the
transfer failed, it will raise the exception associated to the
failure.
"""
self._transfer_states[transfer_id].wait_till_done()
exception = self._transfer_states[transfer_id].exception
if exception:
raise exception
return None
|
def poll_for_result(self, transfer_id):
"""Poll for the result of a transfer
:param transfer_id: Unique identifier for the transfer
:return: If the transfer succeeded, it will return the result. If the
transfer failed, it will raise the exception associated to the
failure.
"""
self._transfer_states[transfer_id].wait_till_done()
exception = self._transfer_states[transfer_id].exception
if exception:
raise exception
return None
|
[
"Poll",
"for",
"the",
"result",
"of",
"a",
"transfer"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/processpool.py#L596-L608
|
[
"def",
"poll_for_result",
"(",
"self",
",",
"transfer_id",
")",
":",
"self",
".",
"_transfer_states",
"[",
"transfer_id",
"]",
".",
"wait_till_done",
"(",
")",
"exception",
"=",
"self",
".",
"_transfer_states",
"[",
"transfer_id",
"]",
".",
"exception",
"if",
"exception",
":",
"raise",
"exception",
"return",
"None"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
calculate_range_parameter
|
Calculate the range parameter for multipart downloads/copies
:type part_size: int
:param part_size: The size of the part
:type part_index: int
:param part_index: The index for which this parts starts. This index starts
at zero
:type num_parts: int
:param num_parts: The total number of parts in the transfer
:returns: The value to use for Range parameter on downloads or
the CopySourceRange parameter for copies
|
s3transfer/utils.py
|
def calculate_range_parameter(part_size, part_index, num_parts,
total_size=None):
"""Calculate the range parameter for multipart downloads/copies
:type part_size: int
:param part_size: The size of the part
:type part_index: int
:param part_index: The index for which this parts starts. This index starts
at zero
:type num_parts: int
:param num_parts: The total number of parts in the transfer
:returns: The value to use for Range parameter on downloads or
the CopySourceRange parameter for copies
"""
# Used to calculate the Range parameter
start_range = part_index * part_size
if part_index == num_parts - 1:
end_range = ''
if total_size is not None:
end_range = str(total_size - 1)
else:
end_range = start_range + part_size - 1
range_param = 'bytes=%s-%s' % (start_range, end_range)
return range_param
|
def calculate_range_parameter(part_size, part_index, num_parts,
total_size=None):
"""Calculate the range parameter for multipart downloads/copies
:type part_size: int
:param part_size: The size of the part
:type part_index: int
:param part_index: The index for which this parts starts. This index starts
at zero
:type num_parts: int
:param num_parts: The total number of parts in the transfer
:returns: The value to use for Range parameter on downloads or
the CopySourceRange parameter for copies
"""
# Used to calculate the Range parameter
start_range = part_index * part_size
if part_index == num_parts - 1:
end_range = ''
if total_size is not None:
end_range = str(total_size - 1)
else:
end_range = start_range + part_size - 1
range_param = 'bytes=%s-%s' % (start_range, end_range)
return range_param
|
[
"Calculate",
"the",
"range",
"parameter",
"for",
"multipart",
"downloads",
"/",
"copies"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/utils.py#L69-L95
|
[
"def",
"calculate_range_parameter",
"(",
"part_size",
",",
"part_index",
",",
"num_parts",
",",
"total_size",
"=",
"None",
")",
":",
"# Used to calculate the Range parameter",
"start_range",
"=",
"part_index",
"*",
"part_size",
"if",
"part_index",
"==",
"num_parts",
"-",
"1",
":",
"end_range",
"=",
"''",
"if",
"total_size",
"is",
"not",
"None",
":",
"end_range",
"=",
"str",
"(",
"total_size",
"-",
"1",
")",
"else",
":",
"end_range",
"=",
"start_range",
"+",
"part_size",
"-",
"1",
"range_param",
"=",
"'bytes=%s-%s'",
"%",
"(",
"start_range",
",",
"end_range",
")",
"return",
"range_param"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
get_callbacks
|
Retrieves callbacks from a subscriber
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future the subscriber is associated
to.
:type callback_type: str
:param callback_type: The type of callback to retrieve from the subscriber.
Valid types include:
* 'queued'
* 'progress'
* 'done'
:returns: A list of callbacks for the type specified. All callbacks are
preinjected with the transfer future.
|
s3transfer/utils.py
|
def get_callbacks(transfer_future, callback_type):
"""Retrieves callbacks from a subscriber
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future the subscriber is associated
to.
:type callback_type: str
:param callback_type: The type of callback to retrieve from the subscriber.
Valid types include:
* 'queued'
* 'progress'
* 'done'
:returns: A list of callbacks for the type specified. All callbacks are
preinjected with the transfer future.
"""
callbacks = []
for subscriber in transfer_future.meta.call_args.subscribers:
callback_name = 'on_' + callback_type
if hasattr(subscriber, callback_name):
callbacks.append(
functools.partial(
getattr(subscriber, callback_name),
future=transfer_future
)
)
return callbacks
|
def get_callbacks(transfer_future, callback_type):
"""Retrieves callbacks from a subscriber
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future the subscriber is associated
to.
:type callback_type: str
:param callback_type: The type of callback to retrieve from the subscriber.
Valid types include:
* 'queued'
* 'progress'
* 'done'
:returns: A list of callbacks for the type specified. All callbacks are
preinjected with the transfer future.
"""
callbacks = []
for subscriber in transfer_future.meta.call_args.subscribers:
callback_name = 'on_' + callback_type
if hasattr(subscriber, callback_name):
callbacks.append(
functools.partial(
getattr(subscriber, callback_name),
future=transfer_future
)
)
return callbacks
|
[
"Retrieves",
"callbacks",
"from",
"a",
"subscriber"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/utils.py#L98-L125
|
[
"def",
"get_callbacks",
"(",
"transfer_future",
",",
"callback_type",
")",
":",
"callbacks",
"=",
"[",
"]",
"for",
"subscriber",
"in",
"transfer_future",
".",
"meta",
".",
"call_args",
".",
"subscribers",
":",
"callback_name",
"=",
"'on_'",
"+",
"callback_type",
"if",
"hasattr",
"(",
"subscriber",
",",
"callback_name",
")",
":",
"callbacks",
".",
"append",
"(",
"functools",
".",
"partial",
"(",
"getattr",
"(",
"subscriber",
",",
"callback_name",
")",
",",
"future",
"=",
"transfer_future",
")",
")",
"return",
"callbacks"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
get_filtered_dict
|
Gets a dictionary filtered by whitelisted keys
:param original_dict: The original dictionary of arguments to source keys
and values.
:param whitelisted_key: A list of keys to include in the filtered
dictionary.
:returns: A dictionary containing key/values from the original dictionary
whose key was included in the whitelist
|
s3transfer/utils.py
|
def get_filtered_dict(original_dict, whitelisted_keys):
"""Gets a dictionary filtered by whitelisted keys
:param original_dict: The original dictionary of arguments to source keys
and values.
:param whitelisted_key: A list of keys to include in the filtered
dictionary.
:returns: A dictionary containing key/values from the original dictionary
whose key was included in the whitelist
"""
filtered_dict = {}
for key, value in original_dict.items():
if key in whitelisted_keys:
filtered_dict[key] = value
return filtered_dict
|
def get_filtered_dict(original_dict, whitelisted_keys):
"""Gets a dictionary filtered by whitelisted keys
:param original_dict: The original dictionary of arguments to source keys
and values.
:param whitelisted_key: A list of keys to include in the filtered
dictionary.
:returns: A dictionary containing key/values from the original dictionary
whose key was included in the whitelist
"""
filtered_dict = {}
for key, value in original_dict.items():
if key in whitelisted_keys:
filtered_dict[key] = value
return filtered_dict
|
[
"Gets",
"a",
"dictionary",
"filtered",
"by",
"whitelisted",
"keys"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/utils.py#L144-L159
|
[
"def",
"get_filtered_dict",
"(",
"original_dict",
",",
"whitelisted_keys",
")",
":",
"filtered_dict",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"original_dict",
".",
"items",
"(",
")",
":",
"if",
"key",
"in",
"whitelisted_keys",
":",
"filtered_dict",
"[",
"key",
"]",
"=",
"value",
"return",
"filtered_dict"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
CountCallbackInvoker.decrement
|
Decrement the count by one
|
s3transfer/utils.py
|
def decrement(self):
"""Decrement the count by one"""
with self._lock:
if self._count == 0:
raise RuntimeError(
'Counter is at zero. It cannot dip below zero')
self._count -= 1
if self._is_finalized and self._count == 0:
self._callback()
|
def decrement(self):
"""Decrement the count by one"""
with self._lock:
if self._count == 0:
raise RuntimeError(
'Counter is at zero. It cannot dip below zero')
self._count -= 1
if self._is_finalized and self._count == 0:
self._callback()
|
[
"Decrement",
"the",
"count",
"by",
"one"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/utils.py#L219-L227
|
[
"def",
"decrement",
"(",
"self",
")",
":",
"with",
"self",
".",
"_lock",
":",
"if",
"self",
".",
"_count",
"==",
"0",
":",
"raise",
"RuntimeError",
"(",
"'Counter is at zero. It cannot dip below zero'",
")",
"self",
".",
"_count",
"-=",
"1",
"if",
"self",
".",
"_is_finalized",
"and",
"self",
".",
"_count",
"==",
"0",
":",
"self",
".",
"_callback",
"(",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
CountCallbackInvoker.finalize
|
Finalize the counter
Once finalized, the counter never be incremented and the callback
can be invoked once the count reaches zero
|
s3transfer/utils.py
|
def finalize(self):
"""Finalize the counter
Once finalized, the counter never be incremented and the callback
can be invoked once the count reaches zero
"""
with self._lock:
self._is_finalized = True
if self._count == 0:
self._callback()
|
def finalize(self):
"""Finalize the counter
Once finalized, the counter never be incremented and the callback
can be invoked once the count reaches zero
"""
with self._lock:
self._is_finalized = True
if self._count == 0:
self._callback()
|
[
"Finalize",
"the",
"counter"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/utils.py#L229-L238
|
[
"def",
"finalize",
"(",
"self",
")",
":",
"with",
"self",
".",
"_lock",
":",
"self",
".",
"_is_finalized",
"=",
"True",
"if",
"self",
".",
"_count",
"==",
"0",
":",
"self",
".",
"_callback",
"(",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
OSUtils.is_special_file
|
Checks to see if a file is a special UNIX file.
It checks if the file is a character special device, block special
device, FIFO, or socket.
:param filename: Name of the file
:returns: True if the file is a special file. False, if is not.
|
s3transfer/utils.py
|
def is_special_file(cls, filename):
"""Checks to see if a file is a special UNIX file.
It checks if the file is a character special device, block special
device, FIFO, or socket.
:param filename: Name of the file
:returns: True if the file is a special file. False, if is not.
"""
# If it does not exist, it must be a new file so it cannot be
# a special file.
if not os.path.exists(filename):
return False
mode = os.stat(filename).st_mode
# Character special device.
if stat.S_ISCHR(mode):
return True
# Block special device
if stat.S_ISBLK(mode):
return True
# Named pipe / FIFO
if stat.S_ISFIFO(mode):
return True
# Socket.
if stat.S_ISSOCK(mode):
return True
return False
|
def is_special_file(cls, filename):
"""Checks to see if a file is a special UNIX file.
It checks if the file is a character special device, block special
device, FIFO, or socket.
:param filename: Name of the file
:returns: True if the file is a special file. False, if is not.
"""
# If it does not exist, it must be a new file so it cannot be
# a special file.
if not os.path.exists(filename):
return False
mode = os.stat(filename).st_mode
# Character special device.
if stat.S_ISCHR(mode):
return True
# Block special device
if stat.S_ISBLK(mode):
return True
# Named pipe / FIFO
if stat.S_ISFIFO(mode):
return True
# Socket.
if stat.S_ISSOCK(mode):
return True
return False
|
[
"Checks",
"to",
"see",
"if",
"a",
"file",
"is",
"a",
"special",
"UNIX",
"file",
"."
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/utils.py#L273-L300
|
[
"def",
"is_special_file",
"(",
"cls",
",",
"filename",
")",
":",
"# If it does not exist, it must be a new file so it cannot be",
"# a special file.",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"return",
"False",
"mode",
"=",
"os",
".",
"stat",
"(",
"filename",
")",
".",
"st_mode",
"# Character special device.",
"if",
"stat",
".",
"S_ISCHR",
"(",
"mode",
")",
":",
"return",
"True",
"# Block special device",
"if",
"stat",
".",
"S_ISBLK",
"(",
"mode",
")",
":",
"return",
"True",
"# Named pipe / FIFO",
"if",
"stat",
".",
"S_ISFIFO",
"(",
"mode",
")",
":",
"return",
"True",
"# Socket.",
"if",
"stat",
".",
"S_ISSOCK",
"(",
"mode",
")",
":",
"return",
"True",
"return",
"False"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
ReadFileChunk.from_filename
|
Convenience factory function to create from a filename.
:type start_byte: int
:param start_byte: The first byte from which to start reading.
:type chunk_size: int
:param chunk_size: The max chunk size to read. Trying to read
pass the end of the chunk size will behave like you've
reached the end of the file.
:type full_file_size: int
:param full_file_size: The entire content length associated
with ``fileobj``.
:type callbacks: function(amount_read)
:param callbacks: Called whenever data is read from this object.
:type enable_callbacks: bool
:param enable_callbacks: Indicate whether to invoke callback
during read() calls.
:rtype: ``ReadFileChunk``
:return: A new instance of ``ReadFileChunk``
|
s3transfer/utils.py
|
def from_filename(cls, filename, start_byte, chunk_size, callbacks=None,
enable_callbacks=True):
"""Convenience factory function to create from a filename.
:type start_byte: int
:param start_byte: The first byte from which to start reading.
:type chunk_size: int
:param chunk_size: The max chunk size to read. Trying to read
pass the end of the chunk size will behave like you've
reached the end of the file.
:type full_file_size: int
:param full_file_size: The entire content length associated
with ``fileobj``.
:type callbacks: function(amount_read)
:param callbacks: Called whenever data is read from this object.
:type enable_callbacks: bool
:param enable_callbacks: Indicate whether to invoke callback
during read() calls.
:rtype: ``ReadFileChunk``
:return: A new instance of ``ReadFileChunk``
"""
f = open(filename, 'rb')
f.seek(start_byte)
file_size = os.fstat(f.fileno()).st_size
return cls(f, chunk_size, file_size, callbacks, enable_callbacks)
|
def from_filename(cls, filename, start_byte, chunk_size, callbacks=None,
enable_callbacks=True):
"""Convenience factory function to create from a filename.
:type start_byte: int
:param start_byte: The first byte from which to start reading.
:type chunk_size: int
:param chunk_size: The max chunk size to read. Trying to read
pass the end of the chunk size will behave like you've
reached the end of the file.
:type full_file_size: int
:param full_file_size: The entire content length associated
with ``fileobj``.
:type callbacks: function(amount_read)
:param callbacks: Called whenever data is read from this object.
:type enable_callbacks: bool
:param enable_callbacks: Indicate whether to invoke callback
during read() calls.
:rtype: ``ReadFileChunk``
:return: A new instance of ``ReadFileChunk``
"""
f = open(filename, 'rb')
f.seek(start_byte)
file_size = os.fstat(f.fileno()).st_size
return cls(f, chunk_size, file_size, callbacks, enable_callbacks)
|
[
"Convenience",
"factory",
"function",
"to",
"create",
"from",
"a",
"filename",
"."
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/utils.py#L432-L462
|
[
"def",
"from_filename",
"(",
"cls",
",",
"filename",
",",
"start_byte",
",",
"chunk_size",
",",
"callbacks",
"=",
"None",
",",
"enable_callbacks",
"=",
"True",
")",
":",
"f",
"=",
"open",
"(",
"filename",
",",
"'rb'",
")",
"f",
".",
"seek",
"(",
"start_byte",
")",
"file_size",
"=",
"os",
".",
"fstat",
"(",
"f",
".",
"fileno",
"(",
")",
")",
".",
"st_size",
"return",
"cls",
"(",
"f",
",",
"chunk_size",
",",
"file_size",
",",
"callbacks",
",",
"enable_callbacks",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TaskSemaphore.acquire
|
Acquire the semaphore
:param tag: A tag identifying what is acquiring the semaphore. Note
that this is not really needed to directly use this class but is
needed for API compatibility with the SlidingWindowSemaphore
implementation.
:param block: If True, block until it can be acquired. If False,
do not block and raise an exception if cannot be aquired.
:returns: A token (can be None) to use when releasing the semaphore
|
s3transfer/utils.py
|
def acquire(self, tag, blocking=True):
"""Acquire the semaphore
:param tag: A tag identifying what is acquiring the semaphore. Note
that this is not really needed to directly use this class but is
needed for API compatibility with the SlidingWindowSemaphore
implementation.
:param block: If True, block until it can be acquired. If False,
do not block and raise an exception if cannot be aquired.
:returns: A token (can be None) to use when releasing the semaphore
"""
logger.debug("Acquiring %s", tag)
if not self._semaphore.acquire(blocking):
raise NoResourcesAvailable("Cannot acquire tag '%s'" % tag)
|
def acquire(self, tag, blocking=True):
"""Acquire the semaphore
:param tag: A tag identifying what is acquiring the semaphore. Note
that this is not really needed to directly use this class but is
needed for API compatibility with the SlidingWindowSemaphore
implementation.
:param block: If True, block until it can be acquired. If False,
do not block and raise an exception if cannot be aquired.
:returns: A token (can be None) to use when releasing the semaphore
"""
logger.debug("Acquiring %s", tag)
if not self._semaphore.acquire(blocking):
raise NoResourcesAvailable("Cannot acquire tag '%s'" % tag)
|
[
"Acquire",
"the",
"semaphore"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/utils.py#L562-L576
|
[
"def",
"acquire",
"(",
"self",
",",
"tag",
",",
"blocking",
"=",
"True",
")",
":",
"logger",
".",
"debug",
"(",
"\"Acquiring %s\"",
",",
"tag",
")",
"if",
"not",
"self",
".",
"_semaphore",
".",
"acquire",
"(",
"blocking",
")",
":",
"raise",
"NoResourcesAvailable",
"(",
"\"Cannot acquire tag '%s'\"",
"%",
"tag",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TaskSemaphore.release
|
Release the semaphore
:param tag: A tag identifying what is releasing the semaphore
:param acquire_token: The token returned from when the semaphore was
acquired. Note that this is not really needed to directly use this
class but is needed for API compatibility with the
SlidingWindowSemaphore implementation.
|
s3transfer/utils.py
|
def release(self, tag, acquire_token):
"""Release the semaphore
:param tag: A tag identifying what is releasing the semaphore
:param acquire_token: The token returned from when the semaphore was
acquired. Note that this is not really needed to directly use this
class but is needed for API compatibility with the
SlidingWindowSemaphore implementation.
"""
logger.debug("Releasing acquire %s/%s" % (tag, acquire_token))
self._semaphore.release()
|
def release(self, tag, acquire_token):
"""Release the semaphore
:param tag: A tag identifying what is releasing the semaphore
:param acquire_token: The token returned from when the semaphore was
acquired. Note that this is not really needed to directly use this
class but is needed for API compatibility with the
SlidingWindowSemaphore implementation.
"""
logger.debug("Releasing acquire %s/%s" % (tag, acquire_token))
self._semaphore.release()
|
[
"Release",
"the",
"semaphore"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/utils.py#L578-L588
|
[
"def",
"release",
"(",
"self",
",",
"tag",
",",
"acquire_token",
")",
":",
"logger",
".",
"debug",
"(",
"\"Releasing acquire %s/%s\"",
"%",
"(",
"tag",
",",
"acquire_token",
")",
")",
"self",
".",
"_semaphore",
".",
"release",
"(",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
ChunksizeAdjuster.adjust_chunksize
|
Get a chunksize close to current that fits within all S3 limits.
:type current_chunksize: int
:param current_chunksize: The currently configured chunksize.
:type file_size: int or None
:param file_size: The size of the file to upload. This might be None
if the object being transferred has an unknown size.
:returns: A valid chunksize that fits within configured limits.
|
s3transfer/utils.py
|
def adjust_chunksize(self, current_chunksize, file_size=None):
"""Get a chunksize close to current that fits within all S3 limits.
:type current_chunksize: int
:param current_chunksize: The currently configured chunksize.
:type file_size: int or None
:param file_size: The size of the file to upload. This might be None
if the object being transferred has an unknown size.
:returns: A valid chunksize that fits within configured limits.
"""
chunksize = current_chunksize
if file_size is not None:
chunksize = self._adjust_for_max_parts(chunksize, file_size)
return self._adjust_for_chunksize_limits(chunksize)
|
def adjust_chunksize(self, current_chunksize, file_size=None):
"""Get a chunksize close to current that fits within all S3 limits.
:type current_chunksize: int
:param current_chunksize: The currently configured chunksize.
:type file_size: int or None
:param file_size: The size of the file to upload. This might be None
if the object being transferred has an unknown size.
:returns: A valid chunksize that fits within configured limits.
"""
chunksize = current_chunksize
if file_size is not None:
chunksize = self._adjust_for_max_parts(chunksize, file_size)
return self._adjust_for_chunksize_limits(chunksize)
|
[
"Get",
"a",
"chunksize",
"close",
"to",
"current",
"that",
"fits",
"within",
"all",
"S3",
"limits",
"."
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/utils.py#L693-L708
|
[
"def",
"adjust_chunksize",
"(",
"self",
",",
"current_chunksize",
",",
"file_size",
"=",
"None",
")",
":",
"chunksize",
"=",
"current_chunksize",
"if",
"file_size",
"is",
"not",
"None",
":",
"chunksize",
"=",
"self",
".",
"_adjust_for_max_parts",
"(",
"chunksize",
",",
"file_size",
")",
"return",
"self",
".",
"_adjust_for_chunksize_limits",
"(",
"chunksize",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
DownloadOutputManager.queue_file_io_task
|
Queue IO write for submission to the IO executor.
This method accepts an IO executor and information about the
downloaded data, and handles submitting this to the IO executor.
This method may defer submission to the IO executor if necessary.
|
s3transfer/download.py
|
def queue_file_io_task(self, fileobj, data, offset):
"""Queue IO write for submission to the IO executor.
This method accepts an IO executor and information about the
downloaded data, and handles submitting this to the IO executor.
This method may defer submission to the IO executor if necessary.
"""
self._transfer_coordinator.submit(
self._io_executor,
self.get_io_write_task(fileobj, data, offset)
)
|
def queue_file_io_task(self, fileobj, data, offset):
"""Queue IO write for submission to the IO executor.
This method accepts an IO executor and information about the
downloaded data, and handles submitting this to the IO executor.
This method may defer submission to the IO executor if necessary.
"""
self._transfer_coordinator.submit(
self._io_executor,
self.get_io_write_task(fileobj, data, offset)
)
|
[
"Queue",
"IO",
"write",
"for",
"submission",
"to",
"the",
"IO",
"executor",
"."
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/download.py#L91-L103
|
[
"def",
"queue_file_io_task",
"(",
"self",
",",
"fileobj",
",",
"data",
",",
"offset",
")",
":",
"self",
".",
"_transfer_coordinator",
".",
"submit",
"(",
"self",
".",
"_io_executor",
",",
"self",
".",
"get_io_write_task",
"(",
"fileobj",
",",
"data",
",",
"offset",
")",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
DownloadOutputManager.get_io_write_task
|
Get an IO write task for the requested set of data
This task can be ran immediately or be submitted to the IO executor
for it to run.
:type fileobj: file-like object
:param fileobj: The file-like object to write to
:type data: bytes
:param data: The data to write out
:type offset: integer
:param offset: The offset to write the data to in the file-like object
:returns: An IO task to be used to write data to a file-like object
|
s3transfer/download.py
|
def get_io_write_task(self, fileobj, data, offset):
"""Get an IO write task for the requested set of data
This task can be ran immediately or be submitted to the IO executor
for it to run.
:type fileobj: file-like object
:param fileobj: The file-like object to write to
:type data: bytes
:param data: The data to write out
:type offset: integer
:param offset: The offset to write the data to in the file-like object
:returns: An IO task to be used to write data to a file-like object
"""
return IOWriteTask(
self._transfer_coordinator,
main_kwargs={
'fileobj': fileobj,
'data': data,
'offset': offset,
}
)
|
def get_io_write_task(self, fileobj, data, offset):
"""Get an IO write task for the requested set of data
This task can be ran immediately or be submitted to the IO executor
for it to run.
:type fileobj: file-like object
:param fileobj: The file-like object to write to
:type data: bytes
:param data: The data to write out
:type offset: integer
:param offset: The offset to write the data to in the file-like object
:returns: An IO task to be used to write data to a file-like object
"""
return IOWriteTask(
self._transfer_coordinator,
main_kwargs={
'fileobj': fileobj,
'data': data,
'offset': offset,
}
)
|
[
"Get",
"an",
"IO",
"write",
"task",
"for",
"the",
"requested",
"set",
"of",
"data"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/download.py#L105-L129
|
[
"def",
"get_io_write_task",
"(",
"self",
",",
"fileobj",
",",
"data",
",",
"offset",
")",
":",
"return",
"IOWriteTask",
"(",
"self",
".",
"_transfer_coordinator",
",",
"main_kwargs",
"=",
"{",
"'fileobj'",
":",
"fileobj",
",",
"'data'",
":",
"data",
",",
"'offset'",
":",
"offset",
",",
"}",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
DownloadSubmissionTask._get_download_output_manager_cls
|
Retrieves a class for managing output for a download
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future for the request
:type osutil: s3transfer.utils.OSUtils
:param osutil: The os utility associated to the transfer
:rtype: class of DownloadOutputManager
:returns: The appropriate class to use for managing a specific type of
input for downloads.
|
s3transfer/download.py
|
def _get_download_output_manager_cls(self, transfer_future, osutil):
"""Retrieves a class for managing output for a download
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future for the request
:type osutil: s3transfer.utils.OSUtils
:param osutil: The os utility associated to the transfer
:rtype: class of DownloadOutputManager
:returns: The appropriate class to use for managing a specific type of
input for downloads.
"""
download_manager_resolver_chain = [
DownloadSpecialFilenameOutputManager,
DownloadFilenameOutputManager,
DownloadSeekableOutputManager,
DownloadNonSeekableOutputManager,
]
fileobj = transfer_future.meta.call_args.fileobj
for download_manager_cls in download_manager_resolver_chain:
if download_manager_cls.is_compatible(fileobj, osutil):
return download_manager_cls
raise RuntimeError(
'Output %s of type: %s is not supported.' % (
fileobj, type(fileobj)))
|
def _get_download_output_manager_cls(self, transfer_future, osutil):
"""Retrieves a class for managing output for a download
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future for the request
:type osutil: s3transfer.utils.OSUtils
:param osutil: The os utility associated to the transfer
:rtype: class of DownloadOutputManager
:returns: The appropriate class to use for managing a specific type of
input for downloads.
"""
download_manager_resolver_chain = [
DownloadSpecialFilenameOutputManager,
DownloadFilenameOutputManager,
DownloadSeekableOutputManager,
DownloadNonSeekableOutputManager,
]
fileobj = transfer_future.meta.call_args.fileobj
for download_manager_cls in download_manager_resolver_chain:
if download_manager_cls.is_compatible(fileobj, osutil):
return download_manager_cls
raise RuntimeError(
'Output %s of type: %s is not supported.' % (
fileobj, type(fileobj)))
|
[
"Retrieves",
"a",
"class",
"for",
"managing",
"output",
"for",
"a",
"download"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/download.py#L283-L309
|
[
"def",
"_get_download_output_manager_cls",
"(",
"self",
",",
"transfer_future",
",",
"osutil",
")",
":",
"download_manager_resolver_chain",
"=",
"[",
"DownloadSpecialFilenameOutputManager",
",",
"DownloadFilenameOutputManager",
",",
"DownloadSeekableOutputManager",
",",
"DownloadNonSeekableOutputManager",
",",
"]",
"fileobj",
"=",
"transfer_future",
".",
"meta",
".",
"call_args",
".",
"fileobj",
"for",
"download_manager_cls",
"in",
"download_manager_resolver_chain",
":",
"if",
"download_manager_cls",
".",
"is_compatible",
"(",
"fileobj",
",",
"osutil",
")",
":",
"return",
"download_manager_cls",
"raise",
"RuntimeError",
"(",
"'Output %s of type: %s is not supported.'",
"%",
"(",
"fileobj",
",",
"type",
"(",
"fileobj",
")",
")",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
DownloadSubmissionTask._submit
|
:param client: The client associated with the transfer manager
:type config: s3transfer.manager.TransferConfig
:param config: The transfer config associated with the transfer
manager
:type osutil: s3transfer.utils.OSUtil
:param osutil: The os utility associated to the transfer manager
:type request_executor: s3transfer.futures.BoundedExecutor
:param request_executor: The request executor associated with the
transfer manager
:type io_executor: s3transfer.futures.BoundedExecutor
:param io_executor: The io executor associated with the
transfer manager
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future associated with the
transfer request that tasks are being submitted for
:type bandwidth_limiter: s3transfer.bandwidth.BandwidthLimiter
:param bandwidth_limiter: The bandwidth limiter to use when
downloading streams
|
s3transfer/download.py
|
def _submit(self, client, config, osutil, request_executor, io_executor,
transfer_future, bandwidth_limiter=None):
"""
:param client: The client associated with the transfer manager
:type config: s3transfer.manager.TransferConfig
:param config: The transfer config associated with the transfer
manager
:type osutil: s3transfer.utils.OSUtil
:param osutil: The os utility associated to the transfer manager
:type request_executor: s3transfer.futures.BoundedExecutor
:param request_executor: The request executor associated with the
transfer manager
:type io_executor: s3transfer.futures.BoundedExecutor
:param io_executor: The io executor associated with the
transfer manager
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future associated with the
transfer request that tasks are being submitted for
:type bandwidth_limiter: s3transfer.bandwidth.BandwidthLimiter
:param bandwidth_limiter: The bandwidth limiter to use when
downloading streams
"""
if transfer_future.meta.size is None:
# If a size was not provided figure out the size for the
# user.
response = client.head_object(
Bucket=transfer_future.meta.call_args.bucket,
Key=transfer_future.meta.call_args.key,
**transfer_future.meta.call_args.extra_args
)
transfer_future.meta.provide_transfer_size(
response['ContentLength'])
download_output_manager = self._get_download_output_manager_cls(
transfer_future, osutil)(osutil, self._transfer_coordinator,
io_executor)
# If it is greater than threshold do a ranged download, otherwise
# do a regular GetObject download.
if transfer_future.meta.size < config.multipart_threshold:
self._submit_download_request(
client, config, osutil, request_executor, io_executor,
download_output_manager, transfer_future, bandwidth_limiter)
else:
self._submit_ranged_download_request(
client, config, osutil, request_executor, io_executor,
download_output_manager, transfer_future, bandwidth_limiter)
|
def _submit(self, client, config, osutil, request_executor, io_executor,
transfer_future, bandwidth_limiter=None):
"""
:param client: The client associated with the transfer manager
:type config: s3transfer.manager.TransferConfig
:param config: The transfer config associated with the transfer
manager
:type osutil: s3transfer.utils.OSUtil
:param osutil: The os utility associated to the transfer manager
:type request_executor: s3transfer.futures.BoundedExecutor
:param request_executor: The request executor associated with the
transfer manager
:type io_executor: s3transfer.futures.BoundedExecutor
:param io_executor: The io executor associated with the
transfer manager
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future associated with the
transfer request that tasks are being submitted for
:type bandwidth_limiter: s3transfer.bandwidth.BandwidthLimiter
:param bandwidth_limiter: The bandwidth limiter to use when
downloading streams
"""
if transfer_future.meta.size is None:
# If a size was not provided figure out the size for the
# user.
response = client.head_object(
Bucket=transfer_future.meta.call_args.bucket,
Key=transfer_future.meta.call_args.key,
**transfer_future.meta.call_args.extra_args
)
transfer_future.meta.provide_transfer_size(
response['ContentLength'])
download_output_manager = self._get_download_output_manager_cls(
transfer_future, osutil)(osutil, self._transfer_coordinator,
io_executor)
# If it is greater than threshold do a ranged download, otherwise
# do a regular GetObject download.
if transfer_future.meta.size < config.multipart_threshold:
self._submit_download_request(
client, config, osutil, request_executor, io_executor,
download_output_manager, transfer_future, bandwidth_limiter)
else:
self._submit_ranged_download_request(
client, config, osutil, request_executor, io_executor,
download_output_manager, transfer_future, bandwidth_limiter)
|
[
":",
"param",
"client",
":",
"The",
"client",
"associated",
"with",
"the",
"transfer",
"manager"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/download.py#L311-L363
|
[
"def",
"_submit",
"(",
"self",
",",
"client",
",",
"config",
",",
"osutil",
",",
"request_executor",
",",
"io_executor",
",",
"transfer_future",
",",
"bandwidth_limiter",
"=",
"None",
")",
":",
"if",
"transfer_future",
".",
"meta",
".",
"size",
"is",
"None",
":",
"# If a size was not provided figure out the size for the",
"# user.",
"response",
"=",
"client",
".",
"head_object",
"(",
"Bucket",
"=",
"transfer_future",
".",
"meta",
".",
"call_args",
".",
"bucket",
",",
"Key",
"=",
"transfer_future",
".",
"meta",
".",
"call_args",
".",
"key",
",",
"*",
"*",
"transfer_future",
".",
"meta",
".",
"call_args",
".",
"extra_args",
")",
"transfer_future",
".",
"meta",
".",
"provide_transfer_size",
"(",
"response",
"[",
"'ContentLength'",
"]",
")",
"download_output_manager",
"=",
"self",
".",
"_get_download_output_manager_cls",
"(",
"transfer_future",
",",
"osutil",
")",
"(",
"osutil",
",",
"self",
".",
"_transfer_coordinator",
",",
"io_executor",
")",
"# If it is greater than threshold do a ranged download, otherwise",
"# do a regular GetObject download.",
"if",
"transfer_future",
".",
"meta",
".",
"size",
"<",
"config",
".",
"multipart_threshold",
":",
"self",
".",
"_submit_download_request",
"(",
"client",
",",
"config",
",",
"osutil",
",",
"request_executor",
",",
"io_executor",
",",
"download_output_manager",
",",
"transfer_future",
",",
"bandwidth_limiter",
")",
"else",
":",
"self",
".",
"_submit_ranged_download_request",
"(",
"client",
",",
"config",
",",
"osutil",
",",
"request_executor",
",",
"io_executor",
",",
"download_output_manager",
",",
"transfer_future",
",",
"bandwidth_limiter",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
GetObjectTask._main
|
Downloads an object and places content into io queue
:param client: The client to use when calling GetObject
:param bucket: The bucket to download from
:param key: The key to download from
:param fileobj: The file handle to write content to
:param exta_args: Any extra arguements to include in GetObject request
:param callbacks: List of progress callbacks to invoke on download
:param max_attempts: The number of retries to do when downloading
:param download_output_manager: The download output manager associated
with the current download.
:param io_chunksize: The size of each io chunk to read from the
download stream and queue in the io queue.
:param start_index: The location in the file to start writing the
content of the key to.
:param bandwidth_limiter: The bandwidth limiter to use when throttling
the downloading of data in streams.
|
s3transfer/download.py
|
def _main(self, client, bucket, key, fileobj, extra_args, callbacks,
max_attempts, download_output_manager, io_chunksize,
start_index=0, bandwidth_limiter=None):
"""Downloads an object and places content into io queue
:param client: The client to use when calling GetObject
:param bucket: The bucket to download from
:param key: The key to download from
:param fileobj: The file handle to write content to
:param exta_args: Any extra arguements to include in GetObject request
:param callbacks: List of progress callbacks to invoke on download
:param max_attempts: The number of retries to do when downloading
:param download_output_manager: The download output manager associated
with the current download.
:param io_chunksize: The size of each io chunk to read from the
download stream and queue in the io queue.
:param start_index: The location in the file to start writing the
content of the key to.
:param bandwidth_limiter: The bandwidth limiter to use when throttling
the downloading of data in streams.
"""
last_exception = None
for i in range(max_attempts):
try:
response = client.get_object(
Bucket=bucket, Key=key, **extra_args)
streaming_body = StreamReaderProgress(
response['Body'], callbacks)
if bandwidth_limiter:
streaming_body = \
bandwidth_limiter.get_bandwith_limited_stream(
streaming_body, self._transfer_coordinator)
current_index = start_index
chunks = DownloadChunkIterator(streaming_body, io_chunksize)
for chunk in chunks:
# If the transfer is done because of a cancellation
# or error somewhere else, stop trying to submit more
# data to be written and break out of the download.
if not self._transfer_coordinator.done():
self._handle_io(
download_output_manager, fileobj, chunk,
current_index
)
current_index += len(chunk)
else:
return
return
except S3_RETRYABLE_DOWNLOAD_ERRORS as e:
logger.debug("Retrying exception caught (%s), "
"retrying request, (attempt %s / %s)", e, i,
max_attempts, exc_info=True)
last_exception = e
# Also invoke the progress callbacks to indicate that we
# are trying to download the stream again and all progress
# for this GetObject has been lost.
invoke_progress_callbacks(
callbacks, start_index - current_index)
continue
raise RetriesExceededError(last_exception)
|
def _main(self, client, bucket, key, fileobj, extra_args, callbacks,
max_attempts, download_output_manager, io_chunksize,
start_index=0, bandwidth_limiter=None):
"""Downloads an object and places content into io queue
:param client: The client to use when calling GetObject
:param bucket: The bucket to download from
:param key: The key to download from
:param fileobj: The file handle to write content to
:param exta_args: Any extra arguements to include in GetObject request
:param callbacks: List of progress callbacks to invoke on download
:param max_attempts: The number of retries to do when downloading
:param download_output_manager: The download output manager associated
with the current download.
:param io_chunksize: The size of each io chunk to read from the
download stream and queue in the io queue.
:param start_index: The location in the file to start writing the
content of the key to.
:param bandwidth_limiter: The bandwidth limiter to use when throttling
the downloading of data in streams.
"""
last_exception = None
for i in range(max_attempts):
try:
response = client.get_object(
Bucket=bucket, Key=key, **extra_args)
streaming_body = StreamReaderProgress(
response['Body'], callbacks)
if bandwidth_limiter:
streaming_body = \
bandwidth_limiter.get_bandwith_limited_stream(
streaming_body, self._transfer_coordinator)
current_index = start_index
chunks = DownloadChunkIterator(streaming_body, io_chunksize)
for chunk in chunks:
# If the transfer is done because of a cancellation
# or error somewhere else, stop trying to submit more
# data to be written and break out of the download.
if not self._transfer_coordinator.done():
self._handle_io(
download_output_manager, fileobj, chunk,
current_index
)
current_index += len(chunk)
else:
return
return
except S3_RETRYABLE_DOWNLOAD_ERRORS as e:
logger.debug("Retrying exception caught (%s), "
"retrying request, (attempt %s / %s)", e, i,
max_attempts, exc_info=True)
last_exception = e
# Also invoke the progress callbacks to indicate that we
# are trying to download the stream again and all progress
# for this GetObject has been lost.
invoke_progress_callbacks(
callbacks, start_index - current_index)
continue
raise RetriesExceededError(last_exception)
|
[
"Downloads",
"an",
"object",
"and",
"places",
"content",
"into",
"io",
"queue"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/download.py#L488-L547
|
[
"def",
"_main",
"(",
"self",
",",
"client",
",",
"bucket",
",",
"key",
",",
"fileobj",
",",
"extra_args",
",",
"callbacks",
",",
"max_attempts",
",",
"download_output_manager",
",",
"io_chunksize",
",",
"start_index",
"=",
"0",
",",
"bandwidth_limiter",
"=",
"None",
")",
":",
"last_exception",
"=",
"None",
"for",
"i",
"in",
"range",
"(",
"max_attempts",
")",
":",
"try",
":",
"response",
"=",
"client",
".",
"get_object",
"(",
"Bucket",
"=",
"bucket",
",",
"Key",
"=",
"key",
",",
"*",
"*",
"extra_args",
")",
"streaming_body",
"=",
"StreamReaderProgress",
"(",
"response",
"[",
"'Body'",
"]",
",",
"callbacks",
")",
"if",
"bandwidth_limiter",
":",
"streaming_body",
"=",
"bandwidth_limiter",
".",
"get_bandwith_limited_stream",
"(",
"streaming_body",
",",
"self",
".",
"_transfer_coordinator",
")",
"current_index",
"=",
"start_index",
"chunks",
"=",
"DownloadChunkIterator",
"(",
"streaming_body",
",",
"io_chunksize",
")",
"for",
"chunk",
"in",
"chunks",
":",
"# If the transfer is done because of a cancellation",
"# or error somewhere else, stop trying to submit more",
"# data to be written and break out of the download.",
"if",
"not",
"self",
".",
"_transfer_coordinator",
".",
"done",
"(",
")",
":",
"self",
".",
"_handle_io",
"(",
"download_output_manager",
",",
"fileobj",
",",
"chunk",
",",
"current_index",
")",
"current_index",
"+=",
"len",
"(",
"chunk",
")",
"else",
":",
"return",
"return",
"except",
"S3_RETRYABLE_DOWNLOAD_ERRORS",
"as",
"e",
":",
"logger",
".",
"debug",
"(",
"\"Retrying exception caught (%s), \"",
"\"retrying request, (attempt %s / %s)\"",
",",
"e",
",",
"i",
",",
"max_attempts",
",",
"exc_info",
"=",
"True",
")",
"last_exception",
"=",
"e",
"# Also invoke the progress callbacks to indicate that we",
"# are trying to download the stream again and all progress",
"# for this GetObject has been lost.",
"invoke_progress_callbacks",
"(",
"callbacks",
",",
"start_index",
"-",
"current_index",
")",
"continue",
"raise",
"RetriesExceededError",
"(",
"last_exception",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
IOWriteTask._main
|
Pulls off an io queue to write contents to a file
:param fileobj: The file handle to write content to
:param data: The data to write
:param offset: The offset to write the data to.
|
s3transfer/download.py
|
def _main(self, fileobj, data, offset):
"""Pulls off an io queue to write contents to a file
:param fileobj: The file handle to write content to
:param data: The data to write
:param offset: The offset to write the data to.
"""
fileobj.seek(offset)
fileobj.write(data)
|
def _main(self, fileobj, data, offset):
"""Pulls off an io queue to write contents to a file
:param fileobj: The file handle to write content to
:param data: The data to write
:param offset: The offset to write the data to.
"""
fileobj.seek(offset)
fileobj.write(data)
|
[
"Pulls",
"off",
"an",
"io",
"queue",
"to",
"write",
"contents",
"to",
"a",
"file"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/download.py#L566-L574
|
[
"def",
"_main",
"(",
"self",
",",
"fileobj",
",",
"data",
",",
"offset",
")",
":",
"fileobj",
".",
"seek",
"(",
"offset",
")",
"fileobj",
".",
"write",
"(",
"data",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
DeferQueue.request_writes
|
Request any available writes given new incoming data.
You call this method by providing new data along with the
offset associated with the data. If that new data unlocks
any contiguous writes that can now be submitted, this
method will return all applicable writes.
This is done with 1 method call so you don't have to
make two method calls (put(), get()) which acquires a lock
each method call.
|
s3transfer/download.py
|
def request_writes(self, offset, data):
"""Request any available writes given new incoming data.
You call this method by providing new data along with the
offset associated with the data. If that new data unlocks
any contiguous writes that can now be submitted, this
method will return all applicable writes.
This is done with 1 method call so you don't have to
make two method calls (put(), get()) which acquires a lock
each method call.
"""
if offset < self._next_offset:
# This is a request for a write that we've already
# seen. This can happen in the event of a retry
# where if we retry at at offset N/2, we'll requeue
# offsets 0-N/2 again.
return []
writes = []
if offset in self._pending_offsets:
# We've already queued this offset so this request is
# a duplicate. In this case we should ignore
# this request and prefer what's already queued.
return []
heapq.heappush(self._writes, (offset, data))
self._pending_offsets.add(offset)
while self._writes and self._writes[0][0] == self._next_offset:
next_write = heapq.heappop(self._writes)
writes.append({'offset': next_write[0], 'data': next_write[1]})
self._pending_offsets.remove(next_write[0])
self._next_offset += len(next_write[1])
return writes
|
def request_writes(self, offset, data):
"""Request any available writes given new incoming data.
You call this method by providing new data along with the
offset associated with the data. If that new data unlocks
any contiguous writes that can now be submitted, this
method will return all applicable writes.
This is done with 1 method call so you don't have to
make two method calls (put(), get()) which acquires a lock
each method call.
"""
if offset < self._next_offset:
# This is a request for a write that we've already
# seen. This can happen in the event of a retry
# where if we retry at at offset N/2, we'll requeue
# offsets 0-N/2 again.
return []
writes = []
if offset in self._pending_offsets:
# We've already queued this offset so this request is
# a duplicate. In this case we should ignore
# this request and prefer what's already queued.
return []
heapq.heappush(self._writes, (offset, data))
self._pending_offsets.add(offset)
while self._writes and self._writes[0][0] == self._next_offset:
next_write = heapq.heappop(self._writes)
writes.append({'offset': next_write[0], 'data': next_write[1]})
self._pending_offsets.remove(next_write[0])
self._next_offset += len(next_write[1])
return writes
|
[
"Request",
"any",
"available",
"writes",
"given",
"new",
"incoming",
"data",
"."
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/download.py#L680-L712
|
[
"def",
"request_writes",
"(",
"self",
",",
"offset",
",",
"data",
")",
":",
"if",
"offset",
"<",
"self",
".",
"_next_offset",
":",
"# This is a request for a write that we've already",
"# seen. This can happen in the event of a retry",
"# where if we retry at at offset N/2, we'll requeue",
"# offsets 0-N/2 again.",
"return",
"[",
"]",
"writes",
"=",
"[",
"]",
"if",
"offset",
"in",
"self",
".",
"_pending_offsets",
":",
"# We've already queued this offset so this request is",
"# a duplicate. In this case we should ignore",
"# this request and prefer what's already queued.",
"return",
"[",
"]",
"heapq",
".",
"heappush",
"(",
"self",
".",
"_writes",
",",
"(",
"offset",
",",
"data",
")",
")",
"self",
".",
"_pending_offsets",
".",
"add",
"(",
"offset",
")",
"while",
"self",
".",
"_writes",
"and",
"self",
".",
"_writes",
"[",
"0",
"]",
"[",
"0",
"]",
"==",
"self",
".",
"_next_offset",
":",
"next_write",
"=",
"heapq",
".",
"heappop",
"(",
"self",
".",
"_writes",
")",
"writes",
".",
"append",
"(",
"{",
"'offset'",
":",
"next_write",
"[",
"0",
"]",
",",
"'data'",
":",
"next_write",
"[",
"1",
"]",
"}",
")",
"self",
".",
"_pending_offsets",
".",
"remove",
"(",
"next_write",
"[",
"0",
"]",
")",
"self",
".",
"_next_offset",
"+=",
"len",
"(",
"next_write",
"[",
"1",
"]",
")",
"return",
"writes"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
seekable
|
Backwards compat function to determine if a fileobj is seekable
:param fileobj: The file-like object to determine if seekable
:returns: True, if seekable. False, otherwise.
|
s3transfer/compat.py
|
def seekable(fileobj):
"""Backwards compat function to determine if a fileobj is seekable
:param fileobj: The file-like object to determine if seekable
:returns: True, if seekable. False, otherwise.
"""
# If the fileobj has a seekable attr, try calling the seekable()
# method on it.
if hasattr(fileobj, 'seekable'):
return fileobj.seekable()
# If there is no seekable attr, check if the object can be seeked
# or telled. If it can, try to seek to the current position.
elif hasattr(fileobj, 'seek') and hasattr(fileobj, 'tell'):
try:
fileobj.seek(0, 1)
return True
except (OSError, IOError):
# If an io related error was thrown then it is not seekable.
return False
# Else, the fileobj is not seekable
return False
|
def seekable(fileobj):
"""Backwards compat function to determine if a fileobj is seekable
:param fileobj: The file-like object to determine if seekable
:returns: True, if seekable. False, otherwise.
"""
# If the fileobj has a seekable attr, try calling the seekable()
# method on it.
if hasattr(fileobj, 'seekable'):
return fileobj.seekable()
# If there is no seekable attr, check if the object can be seeked
# or telled. If it can, try to seek to the current position.
elif hasattr(fileobj, 'seek') and hasattr(fileobj, 'tell'):
try:
fileobj.seek(0, 1)
return True
except (OSError, IOError):
# If an io related error was thrown then it is not seekable.
return False
# Else, the fileobj is not seekable
return False
|
[
"Backwards",
"compat",
"function",
"to",
"determine",
"if",
"a",
"fileobj",
"is",
"seekable"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/compat.py#L57-L78
|
[
"def",
"seekable",
"(",
"fileobj",
")",
":",
"# If the fileobj has a seekable attr, try calling the seekable()",
"# method on it.",
"if",
"hasattr",
"(",
"fileobj",
",",
"'seekable'",
")",
":",
"return",
"fileobj",
".",
"seekable",
"(",
")",
"# If there is no seekable attr, check if the object can be seeked",
"# or telled. If it can, try to seek to the current position.",
"elif",
"hasattr",
"(",
"fileobj",
",",
"'seek'",
")",
"and",
"hasattr",
"(",
"fileobj",
",",
"'tell'",
")",
":",
"try",
":",
"fileobj",
".",
"seek",
"(",
"0",
",",
"1",
")",
"return",
"True",
"except",
"(",
"OSError",
",",
"IOError",
")",
":",
"# If an io related error was thrown then it is not seekable.",
"return",
"False",
"# Else, the fileobj is not seekable",
"return",
"False"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferManager.upload
|
Uploads a file to S3
:type fileobj: str or seekable file-like object
:param fileobj: The name of a file to upload or a seekable file-like
object to upload. It is recommended to use a filename because
file-like objects may result in higher memory usage.
:type bucket: str
:param bucket: The name of the bucket to upload to
:type key: str
:param key: The name of the key to upload to
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
client operation
:type subscribers: list(s3transfer.subscribers.BaseSubscriber)
:param subscribers: The list of subscribers to be invoked in the
order provided based on the event emit during the process of
the transfer request.
:rtype: s3transfer.futures.TransferFuture
:returns: Transfer future representing the upload
|
s3transfer/manager.py
|
def upload(self, fileobj, bucket, key, extra_args=None, subscribers=None):
"""Uploads a file to S3
:type fileobj: str or seekable file-like object
:param fileobj: The name of a file to upload or a seekable file-like
object to upload. It is recommended to use a filename because
file-like objects may result in higher memory usage.
:type bucket: str
:param bucket: The name of the bucket to upload to
:type key: str
:param key: The name of the key to upload to
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
client operation
:type subscribers: list(s3transfer.subscribers.BaseSubscriber)
:param subscribers: The list of subscribers to be invoked in the
order provided based on the event emit during the process of
the transfer request.
:rtype: s3transfer.futures.TransferFuture
:returns: Transfer future representing the upload
"""
if extra_args is None:
extra_args = {}
if subscribers is None:
subscribers = []
self._validate_all_known_args(extra_args, self.ALLOWED_UPLOAD_ARGS)
call_args = CallArgs(
fileobj=fileobj, bucket=bucket, key=key, extra_args=extra_args,
subscribers=subscribers
)
extra_main_kwargs = {}
if self._bandwidth_limiter:
extra_main_kwargs['bandwidth_limiter'] = self._bandwidth_limiter
return self._submit_transfer(
call_args, UploadSubmissionTask, extra_main_kwargs)
|
def upload(self, fileobj, bucket, key, extra_args=None, subscribers=None):
"""Uploads a file to S3
:type fileobj: str or seekable file-like object
:param fileobj: The name of a file to upload or a seekable file-like
object to upload. It is recommended to use a filename because
file-like objects may result in higher memory usage.
:type bucket: str
:param bucket: The name of the bucket to upload to
:type key: str
:param key: The name of the key to upload to
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
client operation
:type subscribers: list(s3transfer.subscribers.BaseSubscriber)
:param subscribers: The list of subscribers to be invoked in the
order provided based on the event emit during the process of
the transfer request.
:rtype: s3transfer.futures.TransferFuture
:returns: Transfer future representing the upload
"""
if extra_args is None:
extra_args = {}
if subscribers is None:
subscribers = []
self._validate_all_known_args(extra_args, self.ALLOWED_UPLOAD_ARGS)
call_args = CallArgs(
fileobj=fileobj, bucket=bucket, key=key, extra_args=extra_args,
subscribers=subscribers
)
extra_main_kwargs = {}
if self._bandwidth_limiter:
extra_main_kwargs['bandwidth_limiter'] = self._bandwidth_limiter
return self._submit_transfer(
call_args, UploadSubmissionTask, extra_main_kwargs)
|
[
"Uploads",
"a",
"file",
"to",
"S3"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/manager.py#L264-L303
|
[
"def",
"upload",
"(",
"self",
",",
"fileobj",
",",
"bucket",
",",
"key",
",",
"extra_args",
"=",
"None",
",",
"subscribers",
"=",
"None",
")",
":",
"if",
"extra_args",
"is",
"None",
":",
"extra_args",
"=",
"{",
"}",
"if",
"subscribers",
"is",
"None",
":",
"subscribers",
"=",
"[",
"]",
"self",
".",
"_validate_all_known_args",
"(",
"extra_args",
",",
"self",
".",
"ALLOWED_UPLOAD_ARGS",
")",
"call_args",
"=",
"CallArgs",
"(",
"fileobj",
"=",
"fileobj",
",",
"bucket",
"=",
"bucket",
",",
"key",
"=",
"key",
",",
"extra_args",
"=",
"extra_args",
",",
"subscribers",
"=",
"subscribers",
")",
"extra_main_kwargs",
"=",
"{",
"}",
"if",
"self",
".",
"_bandwidth_limiter",
":",
"extra_main_kwargs",
"[",
"'bandwidth_limiter'",
"]",
"=",
"self",
".",
"_bandwidth_limiter",
"return",
"self",
".",
"_submit_transfer",
"(",
"call_args",
",",
"UploadSubmissionTask",
",",
"extra_main_kwargs",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferManager.download
|
Downloads a file from S3
:type bucket: str
:param bucket: The name of the bucket to download from
:type key: str
:param key: The name of the key to download from
:type fileobj: str or seekable file-like object
:param fileobj: The name of a file to download or a seekable file-like
object to download. It is recommended to use a filename because
file-like objects may result in higher memory usage.
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
client operation
:type subscribers: list(s3transfer.subscribers.BaseSubscriber)
:param subscribers: The list of subscribers to be invoked in the
order provided based on the event emit during the process of
the transfer request.
:rtype: s3transfer.futures.TransferFuture
:returns: Transfer future representing the download
|
s3transfer/manager.py
|
def download(self, bucket, key, fileobj, extra_args=None,
subscribers=None):
"""Downloads a file from S3
:type bucket: str
:param bucket: The name of the bucket to download from
:type key: str
:param key: The name of the key to download from
:type fileobj: str or seekable file-like object
:param fileobj: The name of a file to download or a seekable file-like
object to download. It is recommended to use a filename because
file-like objects may result in higher memory usage.
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
client operation
:type subscribers: list(s3transfer.subscribers.BaseSubscriber)
:param subscribers: The list of subscribers to be invoked in the
order provided based on the event emit during the process of
the transfer request.
:rtype: s3transfer.futures.TransferFuture
:returns: Transfer future representing the download
"""
if extra_args is None:
extra_args = {}
if subscribers is None:
subscribers = []
self._validate_all_known_args(extra_args, self.ALLOWED_DOWNLOAD_ARGS)
call_args = CallArgs(
bucket=bucket, key=key, fileobj=fileobj, extra_args=extra_args,
subscribers=subscribers
)
extra_main_kwargs = {'io_executor': self._io_executor}
if self._bandwidth_limiter:
extra_main_kwargs['bandwidth_limiter'] = self._bandwidth_limiter
return self._submit_transfer(
call_args, DownloadSubmissionTask, extra_main_kwargs)
|
def download(self, bucket, key, fileobj, extra_args=None,
subscribers=None):
"""Downloads a file from S3
:type bucket: str
:param bucket: The name of the bucket to download from
:type key: str
:param key: The name of the key to download from
:type fileobj: str or seekable file-like object
:param fileobj: The name of a file to download or a seekable file-like
object to download. It is recommended to use a filename because
file-like objects may result in higher memory usage.
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
client operation
:type subscribers: list(s3transfer.subscribers.BaseSubscriber)
:param subscribers: The list of subscribers to be invoked in the
order provided based on the event emit during the process of
the transfer request.
:rtype: s3transfer.futures.TransferFuture
:returns: Transfer future representing the download
"""
if extra_args is None:
extra_args = {}
if subscribers is None:
subscribers = []
self._validate_all_known_args(extra_args, self.ALLOWED_DOWNLOAD_ARGS)
call_args = CallArgs(
bucket=bucket, key=key, fileobj=fileobj, extra_args=extra_args,
subscribers=subscribers
)
extra_main_kwargs = {'io_executor': self._io_executor}
if self._bandwidth_limiter:
extra_main_kwargs['bandwidth_limiter'] = self._bandwidth_limiter
return self._submit_transfer(
call_args, DownloadSubmissionTask, extra_main_kwargs)
|
[
"Downloads",
"a",
"file",
"from",
"S3"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/manager.py#L305-L345
|
[
"def",
"download",
"(",
"self",
",",
"bucket",
",",
"key",
",",
"fileobj",
",",
"extra_args",
"=",
"None",
",",
"subscribers",
"=",
"None",
")",
":",
"if",
"extra_args",
"is",
"None",
":",
"extra_args",
"=",
"{",
"}",
"if",
"subscribers",
"is",
"None",
":",
"subscribers",
"=",
"[",
"]",
"self",
".",
"_validate_all_known_args",
"(",
"extra_args",
",",
"self",
".",
"ALLOWED_DOWNLOAD_ARGS",
")",
"call_args",
"=",
"CallArgs",
"(",
"bucket",
"=",
"bucket",
",",
"key",
"=",
"key",
",",
"fileobj",
"=",
"fileobj",
",",
"extra_args",
"=",
"extra_args",
",",
"subscribers",
"=",
"subscribers",
")",
"extra_main_kwargs",
"=",
"{",
"'io_executor'",
":",
"self",
".",
"_io_executor",
"}",
"if",
"self",
".",
"_bandwidth_limiter",
":",
"extra_main_kwargs",
"[",
"'bandwidth_limiter'",
"]",
"=",
"self",
".",
"_bandwidth_limiter",
"return",
"self",
".",
"_submit_transfer",
"(",
"call_args",
",",
"DownloadSubmissionTask",
",",
"extra_main_kwargs",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferManager.copy
|
Copies a file in S3
:type copy_source: dict
:param copy_source: The name of the source bucket, key name of the
source object, and optional version ID of the source object. The
dictionary format is:
``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
that the ``VersionId`` key is optional and may be omitted.
:type bucket: str
:param bucket: The name of the bucket to copy to
:type key: str
:param key: The name of the key to copy to
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
client operation
:type subscribers: a list of subscribers
:param subscribers: The list of subscribers to be invoked in the
order provided based on the event emit during the process of
the transfer request.
:type source_client: botocore or boto3 Client
:param source_client: The client to be used for operation that
may happen at the source object. For example, this client is
used for the head_object that determines the size of the copy.
If no client is provided, the transfer manager's client is used
as the client for the source object.
:rtype: s3transfer.futures.TransferFuture
:returns: Transfer future representing the copy
|
s3transfer/manager.py
|
def copy(self, copy_source, bucket, key, extra_args=None,
subscribers=None, source_client=None):
"""Copies a file in S3
:type copy_source: dict
:param copy_source: The name of the source bucket, key name of the
source object, and optional version ID of the source object. The
dictionary format is:
``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
that the ``VersionId`` key is optional and may be omitted.
:type bucket: str
:param bucket: The name of the bucket to copy to
:type key: str
:param key: The name of the key to copy to
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
client operation
:type subscribers: a list of subscribers
:param subscribers: The list of subscribers to be invoked in the
order provided based on the event emit during the process of
the transfer request.
:type source_client: botocore or boto3 Client
:param source_client: The client to be used for operation that
may happen at the source object. For example, this client is
used for the head_object that determines the size of the copy.
If no client is provided, the transfer manager's client is used
as the client for the source object.
:rtype: s3transfer.futures.TransferFuture
:returns: Transfer future representing the copy
"""
if extra_args is None:
extra_args = {}
if subscribers is None:
subscribers = []
if source_client is None:
source_client = self._client
self._validate_all_known_args(extra_args, self.ALLOWED_COPY_ARGS)
call_args = CallArgs(
copy_source=copy_source, bucket=bucket, key=key,
extra_args=extra_args, subscribers=subscribers,
source_client=source_client
)
return self._submit_transfer(call_args, CopySubmissionTask)
|
def copy(self, copy_source, bucket, key, extra_args=None,
subscribers=None, source_client=None):
"""Copies a file in S3
:type copy_source: dict
:param copy_source: The name of the source bucket, key name of the
source object, and optional version ID of the source object. The
dictionary format is:
``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
that the ``VersionId`` key is optional and may be omitted.
:type bucket: str
:param bucket: The name of the bucket to copy to
:type key: str
:param key: The name of the key to copy to
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
client operation
:type subscribers: a list of subscribers
:param subscribers: The list of subscribers to be invoked in the
order provided based on the event emit during the process of
the transfer request.
:type source_client: botocore or boto3 Client
:param source_client: The client to be used for operation that
may happen at the source object. For example, this client is
used for the head_object that determines the size of the copy.
If no client is provided, the transfer manager's client is used
as the client for the source object.
:rtype: s3transfer.futures.TransferFuture
:returns: Transfer future representing the copy
"""
if extra_args is None:
extra_args = {}
if subscribers is None:
subscribers = []
if source_client is None:
source_client = self._client
self._validate_all_known_args(extra_args, self.ALLOWED_COPY_ARGS)
call_args = CallArgs(
copy_source=copy_source, bucket=bucket, key=key,
extra_args=extra_args, subscribers=subscribers,
source_client=source_client
)
return self._submit_transfer(call_args, CopySubmissionTask)
|
[
"Copies",
"a",
"file",
"in",
"S3"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/manager.py#L347-L395
|
[
"def",
"copy",
"(",
"self",
",",
"copy_source",
",",
"bucket",
",",
"key",
",",
"extra_args",
"=",
"None",
",",
"subscribers",
"=",
"None",
",",
"source_client",
"=",
"None",
")",
":",
"if",
"extra_args",
"is",
"None",
":",
"extra_args",
"=",
"{",
"}",
"if",
"subscribers",
"is",
"None",
":",
"subscribers",
"=",
"[",
"]",
"if",
"source_client",
"is",
"None",
":",
"source_client",
"=",
"self",
".",
"_client",
"self",
".",
"_validate_all_known_args",
"(",
"extra_args",
",",
"self",
".",
"ALLOWED_COPY_ARGS",
")",
"call_args",
"=",
"CallArgs",
"(",
"copy_source",
"=",
"copy_source",
",",
"bucket",
"=",
"bucket",
",",
"key",
"=",
"key",
",",
"extra_args",
"=",
"extra_args",
",",
"subscribers",
"=",
"subscribers",
",",
"source_client",
"=",
"source_client",
")",
"return",
"self",
".",
"_submit_transfer",
"(",
"call_args",
",",
"CopySubmissionTask",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferManager.delete
|
Delete an S3 object.
:type bucket: str
:param bucket: The name of the bucket.
:type key: str
:param key: The name of the S3 object to delete.
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
DeleteObject call.
:type subscribers: list
:param subscribers: A list of subscribers to be invoked during the
process of the transfer request. Note that the ``on_progress``
callback is not invoked during object deletion.
:rtype: s3transfer.futures.TransferFuture
:return: Transfer future representing the deletion.
|
s3transfer/manager.py
|
def delete(self, bucket, key, extra_args=None, subscribers=None):
"""Delete an S3 object.
:type bucket: str
:param bucket: The name of the bucket.
:type key: str
:param key: The name of the S3 object to delete.
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
DeleteObject call.
:type subscribers: list
:param subscribers: A list of subscribers to be invoked during the
process of the transfer request. Note that the ``on_progress``
callback is not invoked during object deletion.
:rtype: s3transfer.futures.TransferFuture
:return: Transfer future representing the deletion.
"""
if extra_args is None:
extra_args = {}
if subscribers is None:
subscribers = []
self._validate_all_known_args(extra_args, self.ALLOWED_DELETE_ARGS)
call_args = CallArgs(
bucket=bucket, key=key, extra_args=extra_args,
subscribers=subscribers
)
return self._submit_transfer(call_args, DeleteSubmissionTask)
|
def delete(self, bucket, key, extra_args=None, subscribers=None):
"""Delete an S3 object.
:type bucket: str
:param bucket: The name of the bucket.
:type key: str
:param key: The name of the S3 object to delete.
:type extra_args: dict
:param extra_args: Extra arguments that may be passed to the
DeleteObject call.
:type subscribers: list
:param subscribers: A list of subscribers to be invoked during the
process of the transfer request. Note that the ``on_progress``
callback is not invoked during object deletion.
:rtype: s3transfer.futures.TransferFuture
:return: Transfer future representing the deletion.
"""
if extra_args is None:
extra_args = {}
if subscribers is None:
subscribers = []
self._validate_all_known_args(extra_args, self.ALLOWED_DELETE_ARGS)
call_args = CallArgs(
bucket=bucket, key=key, extra_args=extra_args,
subscribers=subscribers
)
return self._submit_transfer(call_args, DeleteSubmissionTask)
|
[
"Delete",
"an",
"S3",
"object",
"."
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/manager.py#L397-L428
|
[
"def",
"delete",
"(",
"self",
",",
"bucket",
",",
"key",
",",
"extra_args",
"=",
"None",
",",
"subscribers",
"=",
"None",
")",
":",
"if",
"extra_args",
"is",
"None",
":",
"extra_args",
"=",
"{",
"}",
"if",
"subscribers",
"is",
"None",
":",
"subscribers",
"=",
"[",
"]",
"self",
".",
"_validate_all_known_args",
"(",
"extra_args",
",",
"self",
".",
"ALLOWED_DELETE_ARGS",
")",
"call_args",
"=",
"CallArgs",
"(",
"bucket",
"=",
"bucket",
",",
"key",
"=",
"key",
",",
"extra_args",
"=",
"extra_args",
",",
"subscribers",
"=",
"subscribers",
")",
"return",
"self",
".",
"_submit_transfer",
"(",
"call_args",
",",
"DeleteSubmissionTask",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferManager.shutdown
|
Shutdown the TransferManager
It will wait till all transfers complete before it completely shuts
down.
:type cancel: boolean
:param cancel: If True, calls TransferFuture.cancel() for
all in-progress in transfers. This is useful if you want the
shutdown to happen quicker.
:type cancel_msg: str
:param cancel_msg: The message to specify if canceling all in-progress
transfers.
|
s3transfer/manager.py
|
def shutdown(self, cancel=False, cancel_msg=''):
"""Shutdown the TransferManager
It will wait till all transfers complete before it completely shuts
down.
:type cancel: boolean
:param cancel: If True, calls TransferFuture.cancel() for
all in-progress in transfers. This is useful if you want the
shutdown to happen quicker.
:type cancel_msg: str
:param cancel_msg: The message to specify if canceling all in-progress
transfers.
"""
self._shutdown(cancel, cancel, cancel_msg)
|
def shutdown(self, cancel=False, cancel_msg=''):
"""Shutdown the TransferManager
It will wait till all transfers complete before it completely shuts
down.
:type cancel: boolean
:param cancel: If True, calls TransferFuture.cancel() for
all in-progress in transfers. This is useful if you want the
shutdown to happen quicker.
:type cancel_msg: str
:param cancel_msg: The message to specify if canceling all in-progress
transfers.
"""
self._shutdown(cancel, cancel, cancel_msg)
|
[
"Shutdown",
"the",
"TransferManager"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/manager.py#L531-L546
|
[
"def",
"shutdown",
"(",
"self",
",",
"cancel",
"=",
"False",
",",
"cancel_msg",
"=",
"''",
")",
":",
"self",
".",
"_shutdown",
"(",
"cancel",
",",
"cancel",
",",
"cancel_msg",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferCoordinatorController.cancel
|
Cancels all inprogress transfers
This cancels the inprogress transfers by calling cancel() on all
tracked transfer coordinators.
:param msg: The message to pass on to each transfer coordinator that
gets cancelled.
:param exc_type: The type of exception to set for the cancellation
|
s3transfer/manager.py
|
def cancel(self, msg='', exc_type=CancelledError):
"""Cancels all inprogress transfers
This cancels the inprogress transfers by calling cancel() on all
tracked transfer coordinators.
:param msg: The message to pass on to each transfer coordinator that
gets cancelled.
:param exc_type: The type of exception to set for the cancellation
"""
for transfer_coordinator in self.tracked_transfer_coordinators:
transfer_coordinator.cancel(msg, exc_type)
|
def cancel(self, msg='', exc_type=CancelledError):
"""Cancels all inprogress transfers
This cancels the inprogress transfers by calling cancel() on all
tracked transfer coordinators.
:param msg: The message to pass on to each transfer coordinator that
gets cancelled.
:param exc_type: The type of exception to set for the cancellation
"""
for transfer_coordinator in self.tracked_transfer_coordinators:
transfer_coordinator.cancel(msg, exc_type)
|
[
"Cancels",
"all",
"inprogress",
"transfers"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/manager.py#L615-L627
|
[
"def",
"cancel",
"(",
"self",
",",
"msg",
"=",
"''",
",",
"exc_type",
"=",
"CancelledError",
")",
":",
"for",
"transfer_coordinator",
"in",
"self",
".",
"tracked_transfer_coordinators",
":",
"transfer_coordinator",
".",
"cancel",
"(",
"msg",
",",
"exc_type",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferCoordinatorController.wait
|
Wait until there are no more inprogress transfers
This will not stop when failures are encountered and not propogate any
of these errors from failed transfers, but it can be interrupted with
a KeyboardInterrupt.
|
s3transfer/manager.py
|
def wait(self):
"""Wait until there are no more inprogress transfers
This will not stop when failures are encountered and not propogate any
of these errors from failed transfers, but it can be interrupted with
a KeyboardInterrupt.
"""
try:
transfer_coordinator = None
for transfer_coordinator in self.tracked_transfer_coordinators:
transfer_coordinator.result()
except KeyboardInterrupt:
logger.debug('Received KeyboardInterrupt in wait()')
# If Keyboard interrupt is raised while waiting for
# the result, then exit out of the wait and raise the
# exception
if transfer_coordinator:
logger.debug(
'On KeyboardInterrupt was waiting for %s',
transfer_coordinator)
raise
except Exception:
# A general exception could have been thrown because
# of result(). We just want to ignore this and continue
# because we at least know that the transfer coordinator
# has completed.
pass
|
def wait(self):
"""Wait until there are no more inprogress transfers
This will not stop when failures are encountered and not propogate any
of these errors from failed transfers, but it can be interrupted with
a KeyboardInterrupt.
"""
try:
transfer_coordinator = None
for transfer_coordinator in self.tracked_transfer_coordinators:
transfer_coordinator.result()
except KeyboardInterrupt:
logger.debug('Received KeyboardInterrupt in wait()')
# If Keyboard interrupt is raised while waiting for
# the result, then exit out of the wait and raise the
# exception
if transfer_coordinator:
logger.debug(
'On KeyboardInterrupt was waiting for %s',
transfer_coordinator)
raise
except Exception:
# A general exception could have been thrown because
# of result(). We just want to ignore this and continue
# because we at least know that the transfer coordinator
# has completed.
pass
|
[
"Wait",
"until",
"there",
"are",
"no",
"more",
"inprogress",
"transfers"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/manager.py#L629-L655
|
[
"def",
"wait",
"(",
"self",
")",
":",
"try",
":",
"transfer_coordinator",
"=",
"None",
"for",
"transfer_coordinator",
"in",
"self",
".",
"tracked_transfer_coordinators",
":",
"transfer_coordinator",
".",
"result",
"(",
")",
"except",
"KeyboardInterrupt",
":",
"logger",
".",
"debug",
"(",
"'Received KeyboardInterrupt in wait()'",
")",
"# If Keyboard interrupt is raised while waiting for",
"# the result, then exit out of the wait and raise the",
"# exception",
"if",
"transfer_coordinator",
":",
"logger",
".",
"debug",
"(",
"'On KeyboardInterrupt was waiting for %s'",
",",
"transfer_coordinator",
")",
"raise",
"except",
"Exception",
":",
"# A general exception could have been thrown because",
"# of result(). We just want to ignore this and continue",
"# because we at least know that the transfer coordinator",
"# has completed.",
"pass"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
UploadNonSeekableInputManager._read
|
Reads a specific amount of data from a stream and returns it. If there
is any data in initial_data, that will be popped out first.
:type fileobj: A file-like object that implements read
:param fileobj: The stream to read from.
:type amount: int
:param amount: The number of bytes to read from the stream.
:type truncate: bool
:param truncate: Whether or not to truncate initial_data after
reading from it.
:return: Generator which generates part bodies from the initial data.
|
s3transfer/upload.py
|
def _read(self, fileobj, amount, truncate=True):
"""
Reads a specific amount of data from a stream and returns it. If there
is any data in initial_data, that will be popped out first.
:type fileobj: A file-like object that implements read
:param fileobj: The stream to read from.
:type amount: int
:param amount: The number of bytes to read from the stream.
:type truncate: bool
:param truncate: Whether or not to truncate initial_data after
reading from it.
:return: Generator which generates part bodies from the initial data.
"""
# If the the initial data is empty, we simply read from the fileobj
if len(self._initial_data) == 0:
return fileobj.read(amount)
# If the requested number of bytes is less than the amount of
# initial data, pull entirely from initial data.
if amount <= len(self._initial_data):
data = self._initial_data[:amount]
# Truncate initial data so we don't hang onto the data longer
# than we need.
if truncate:
self._initial_data = self._initial_data[amount:]
return data
# At this point there is some initial data left, but not enough to
# satisfy the number of bytes requested. Pull out the remaining
# initial data and read the rest from the fileobj.
amount_to_read = amount - len(self._initial_data)
data = self._initial_data + fileobj.read(amount_to_read)
# Zero out initial data so we don't hang onto the data any more.
if truncate:
self._initial_data = b''
return data
|
def _read(self, fileobj, amount, truncate=True):
"""
Reads a specific amount of data from a stream and returns it. If there
is any data in initial_data, that will be popped out first.
:type fileobj: A file-like object that implements read
:param fileobj: The stream to read from.
:type amount: int
:param amount: The number of bytes to read from the stream.
:type truncate: bool
:param truncate: Whether or not to truncate initial_data after
reading from it.
:return: Generator which generates part bodies from the initial data.
"""
# If the the initial data is empty, we simply read from the fileobj
if len(self._initial_data) == 0:
return fileobj.read(amount)
# If the requested number of bytes is less than the amount of
# initial data, pull entirely from initial data.
if amount <= len(self._initial_data):
data = self._initial_data[:amount]
# Truncate initial data so we don't hang onto the data longer
# than we need.
if truncate:
self._initial_data = self._initial_data[amount:]
return data
# At this point there is some initial data left, but not enough to
# satisfy the number of bytes requested. Pull out the remaining
# initial data and read the rest from the fileobj.
amount_to_read = amount - len(self._initial_data)
data = self._initial_data + fileobj.read(amount_to_read)
# Zero out initial data so we don't hang onto the data any more.
if truncate:
self._initial_data = b''
return data
|
[
"Reads",
"a",
"specific",
"amount",
"of",
"data",
"from",
"a",
"stream",
"and",
"returns",
"it",
".",
"If",
"there",
"is",
"any",
"data",
"in",
"initial_data",
"that",
"will",
"be",
"popped",
"out",
"first",
"."
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/upload.py#L421-L461
|
[
"def",
"_read",
"(",
"self",
",",
"fileobj",
",",
"amount",
",",
"truncate",
"=",
"True",
")",
":",
"# If the the initial data is empty, we simply read from the fileobj",
"if",
"len",
"(",
"self",
".",
"_initial_data",
")",
"==",
"0",
":",
"return",
"fileobj",
".",
"read",
"(",
"amount",
")",
"# If the requested number of bytes is less than the amount of",
"# initial data, pull entirely from initial data.",
"if",
"amount",
"<=",
"len",
"(",
"self",
".",
"_initial_data",
")",
":",
"data",
"=",
"self",
".",
"_initial_data",
"[",
":",
"amount",
"]",
"# Truncate initial data so we don't hang onto the data longer",
"# than we need.",
"if",
"truncate",
":",
"self",
".",
"_initial_data",
"=",
"self",
".",
"_initial_data",
"[",
"amount",
":",
"]",
"return",
"data",
"# At this point there is some initial data left, but not enough to",
"# satisfy the number of bytes requested. Pull out the remaining",
"# initial data and read the rest from the fileobj.",
"amount_to_read",
"=",
"amount",
"-",
"len",
"(",
"self",
".",
"_initial_data",
")",
"data",
"=",
"self",
".",
"_initial_data",
"+",
"fileobj",
".",
"read",
"(",
"amount_to_read",
")",
"# Zero out initial data so we don't hang onto the data any more.",
"if",
"truncate",
":",
"self",
".",
"_initial_data",
"=",
"b''",
"return",
"data"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
UploadNonSeekableInputManager._wrap_data
|
Wraps data with the interrupt reader and the file chunk reader.
:type data: bytes
:param data: The data to wrap.
:type callbacks: list
:param callbacks: The callbacks associated with the transfer future.
:type close_callbacks: list
:param close_callbacks: The callbacks to be called when closing the
wrapper for the data.
:return: Fully wrapped data.
|
s3transfer/upload.py
|
def _wrap_data(self, data, callbacks, close_callbacks):
"""
Wraps data with the interrupt reader and the file chunk reader.
:type data: bytes
:param data: The data to wrap.
:type callbacks: list
:param callbacks: The callbacks associated with the transfer future.
:type close_callbacks: list
:param close_callbacks: The callbacks to be called when closing the
wrapper for the data.
:return: Fully wrapped data.
"""
fileobj = self._wrap_fileobj(six.BytesIO(data))
return self._osutil.open_file_chunk_reader_from_fileobj(
fileobj=fileobj, chunk_size=len(data), full_file_size=len(data),
callbacks=callbacks, close_callbacks=close_callbacks)
|
def _wrap_data(self, data, callbacks, close_callbacks):
"""
Wraps data with the interrupt reader and the file chunk reader.
:type data: bytes
:param data: The data to wrap.
:type callbacks: list
:param callbacks: The callbacks associated with the transfer future.
:type close_callbacks: list
:param close_callbacks: The callbacks to be called when closing the
wrapper for the data.
:return: Fully wrapped data.
"""
fileobj = self._wrap_fileobj(six.BytesIO(data))
return self._osutil.open_file_chunk_reader_from_fileobj(
fileobj=fileobj, chunk_size=len(data), full_file_size=len(data),
callbacks=callbacks, close_callbacks=close_callbacks)
|
[
"Wraps",
"data",
"with",
"the",
"interrupt",
"reader",
"and",
"the",
"file",
"chunk",
"reader",
"."
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/upload.py#L463-L482
|
[
"def",
"_wrap_data",
"(",
"self",
",",
"data",
",",
"callbacks",
",",
"close_callbacks",
")",
":",
"fileobj",
"=",
"self",
".",
"_wrap_fileobj",
"(",
"six",
".",
"BytesIO",
"(",
"data",
")",
")",
"return",
"self",
".",
"_osutil",
".",
"open_file_chunk_reader_from_fileobj",
"(",
"fileobj",
"=",
"fileobj",
",",
"chunk_size",
"=",
"len",
"(",
"data",
")",
",",
"full_file_size",
"=",
"len",
"(",
"data",
")",
",",
"callbacks",
"=",
"callbacks",
",",
"close_callbacks",
"=",
"close_callbacks",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
UploadSubmissionTask._get_upload_input_manager_cls
|
Retrieves a class for managing input for an upload based on file type
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future for the request
:rtype: class of UploadInputManager
:returns: The appropriate class to use for managing a specific type of
input for uploads.
|
s3transfer/upload.py
|
def _get_upload_input_manager_cls(self, transfer_future):
"""Retrieves a class for managing input for an upload based on file type
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future for the request
:rtype: class of UploadInputManager
:returns: The appropriate class to use for managing a specific type of
input for uploads.
"""
upload_manager_resolver_chain = [
UploadFilenameInputManager,
UploadSeekableInputManager,
UploadNonSeekableInputManager
]
fileobj = transfer_future.meta.call_args.fileobj
for upload_manager_cls in upload_manager_resolver_chain:
if upload_manager_cls.is_compatible(fileobj):
return upload_manager_cls
raise RuntimeError(
'Input %s of type: %s is not supported.' % (
fileobj, type(fileobj)))
|
def _get_upload_input_manager_cls(self, transfer_future):
"""Retrieves a class for managing input for an upload based on file type
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future for the request
:rtype: class of UploadInputManager
:returns: The appropriate class to use for managing a specific type of
input for uploads.
"""
upload_manager_resolver_chain = [
UploadFilenameInputManager,
UploadSeekableInputManager,
UploadNonSeekableInputManager
]
fileobj = transfer_future.meta.call_args.fileobj
for upload_manager_cls in upload_manager_resolver_chain:
if upload_manager_cls.is_compatible(fileobj):
return upload_manager_cls
raise RuntimeError(
'Input %s of type: %s is not supported.' % (
fileobj, type(fileobj)))
|
[
"Retrieves",
"a",
"class",
"for",
"managing",
"input",
"for",
"an",
"upload",
"based",
"on",
"file",
"type"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/upload.py#L499-L521
|
[
"def",
"_get_upload_input_manager_cls",
"(",
"self",
",",
"transfer_future",
")",
":",
"upload_manager_resolver_chain",
"=",
"[",
"UploadFilenameInputManager",
",",
"UploadSeekableInputManager",
",",
"UploadNonSeekableInputManager",
"]",
"fileobj",
"=",
"transfer_future",
".",
"meta",
".",
"call_args",
".",
"fileobj",
"for",
"upload_manager_cls",
"in",
"upload_manager_resolver_chain",
":",
"if",
"upload_manager_cls",
".",
"is_compatible",
"(",
"fileobj",
")",
":",
"return",
"upload_manager_cls",
"raise",
"RuntimeError",
"(",
"'Input %s of type: %s is not supported.'",
"%",
"(",
"fileobj",
",",
"type",
"(",
"fileobj",
")",
")",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
UploadSubmissionTask._submit
|
:param client: The client associated with the transfer manager
:type config: s3transfer.manager.TransferConfig
:param config: The transfer config associated with the transfer
manager
:type osutil: s3transfer.utils.OSUtil
:param osutil: The os utility associated to the transfer manager
:type request_executor: s3transfer.futures.BoundedExecutor
:param request_executor: The request executor associated with the
transfer manager
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future associated with the
transfer request that tasks are being submitted for
|
s3transfer/upload.py
|
def _submit(self, client, config, osutil, request_executor,
transfer_future, bandwidth_limiter=None):
"""
:param client: The client associated with the transfer manager
:type config: s3transfer.manager.TransferConfig
:param config: The transfer config associated with the transfer
manager
:type osutil: s3transfer.utils.OSUtil
:param osutil: The os utility associated to the transfer manager
:type request_executor: s3transfer.futures.BoundedExecutor
:param request_executor: The request executor associated with the
transfer manager
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future associated with the
transfer request that tasks are being submitted for
"""
upload_input_manager = self._get_upload_input_manager_cls(
transfer_future)(
osutil, self._transfer_coordinator, bandwidth_limiter)
# Determine the size if it was not provided
if transfer_future.meta.size is None:
upload_input_manager.provide_transfer_size(transfer_future)
# Do a multipart upload if needed, otherwise do a regular put object.
if not upload_input_manager.requires_multipart_upload(
transfer_future, config):
self._submit_upload_request(
client, config, osutil, request_executor, transfer_future,
upload_input_manager)
else:
self._submit_multipart_request(
client, config, osutil, request_executor, transfer_future,
upload_input_manager)
|
def _submit(self, client, config, osutil, request_executor,
transfer_future, bandwidth_limiter=None):
"""
:param client: The client associated with the transfer manager
:type config: s3transfer.manager.TransferConfig
:param config: The transfer config associated with the transfer
manager
:type osutil: s3transfer.utils.OSUtil
:param osutil: The os utility associated to the transfer manager
:type request_executor: s3transfer.futures.BoundedExecutor
:param request_executor: The request executor associated with the
transfer manager
:type transfer_future: s3transfer.futures.TransferFuture
:param transfer_future: The transfer future associated with the
transfer request that tasks are being submitted for
"""
upload_input_manager = self._get_upload_input_manager_cls(
transfer_future)(
osutil, self._transfer_coordinator, bandwidth_limiter)
# Determine the size if it was not provided
if transfer_future.meta.size is None:
upload_input_manager.provide_transfer_size(transfer_future)
# Do a multipart upload if needed, otherwise do a regular put object.
if not upload_input_manager.requires_multipart_upload(
transfer_future, config):
self._submit_upload_request(
client, config, osutil, request_executor, transfer_future,
upload_input_manager)
else:
self._submit_multipart_request(
client, config, osutil, request_executor, transfer_future,
upload_input_manager)
|
[
":",
"param",
"client",
":",
"The",
"client",
"associated",
"with",
"the",
"transfer",
"manager"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/upload.py#L523-L560
|
[
"def",
"_submit",
"(",
"self",
",",
"client",
",",
"config",
",",
"osutil",
",",
"request_executor",
",",
"transfer_future",
",",
"bandwidth_limiter",
"=",
"None",
")",
":",
"upload_input_manager",
"=",
"self",
".",
"_get_upload_input_manager_cls",
"(",
"transfer_future",
")",
"(",
"osutil",
",",
"self",
".",
"_transfer_coordinator",
",",
"bandwidth_limiter",
")",
"# Determine the size if it was not provided",
"if",
"transfer_future",
".",
"meta",
".",
"size",
"is",
"None",
":",
"upload_input_manager",
".",
"provide_transfer_size",
"(",
"transfer_future",
")",
"# Do a multipart upload if needed, otherwise do a regular put object.",
"if",
"not",
"upload_input_manager",
".",
"requires_multipart_upload",
"(",
"transfer_future",
",",
"config",
")",
":",
"self",
".",
"_submit_upload_request",
"(",
"client",
",",
"config",
",",
"osutil",
",",
"request_executor",
",",
"transfer_future",
",",
"upload_input_manager",
")",
"else",
":",
"self",
".",
"_submit_multipart_request",
"(",
"client",
",",
"config",
",",
"osutil",
",",
"request_executor",
",",
"transfer_future",
",",
"upload_input_manager",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
PutObjectTask._main
|
:param client: The client to use when calling PutObject
:param fileobj: The file to upload.
:param bucket: The name of the bucket to upload to
:param key: The name of the key to upload to
:param extra_args: A dictionary of any extra arguments that may be
used in the upload.
|
s3transfer/upload.py
|
def _main(self, client, fileobj, bucket, key, extra_args):
"""
:param client: The client to use when calling PutObject
:param fileobj: The file to upload.
:param bucket: The name of the bucket to upload to
:param key: The name of the key to upload to
:param extra_args: A dictionary of any extra arguments that may be
used in the upload.
"""
with fileobj as body:
client.put_object(Bucket=bucket, Key=key, Body=body, **extra_args)
|
def _main(self, client, fileobj, bucket, key, extra_args):
"""
:param client: The client to use when calling PutObject
:param fileobj: The file to upload.
:param bucket: The name of the bucket to upload to
:param key: The name of the key to upload to
:param extra_args: A dictionary of any extra arguments that may be
used in the upload.
"""
with fileobj as body:
client.put_object(Bucket=bucket, Key=key, Body=body, **extra_args)
|
[
":",
"param",
"client",
":",
"The",
"client",
"to",
"use",
"when",
"calling",
"PutObject",
":",
"param",
"fileobj",
":",
"The",
"file",
"to",
"upload",
".",
":",
"param",
"bucket",
":",
"The",
"name",
"of",
"the",
"bucket",
"to",
"upload",
"to",
":",
"param",
"key",
":",
"The",
"name",
"of",
"the",
"key",
"to",
"upload",
"to",
":",
"param",
"extra_args",
":",
"A",
"dictionary",
"of",
"any",
"extra",
"arguments",
"that",
"may",
"be",
"used",
"in",
"the",
"upload",
"."
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/upload.py#L682-L692
|
[
"def",
"_main",
"(",
"self",
",",
"client",
",",
"fileobj",
",",
"bucket",
",",
"key",
",",
"extra_args",
")",
":",
"with",
"fileobj",
"as",
"body",
":",
"client",
".",
"put_object",
"(",
"Bucket",
"=",
"bucket",
",",
"Key",
"=",
"key",
",",
"Body",
"=",
"body",
",",
"*",
"*",
"extra_args",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
UploadPartTask._main
|
:param client: The client to use when calling PutObject
:param fileobj: The file to upload.
:param bucket: The name of the bucket to upload to
:param key: The name of the key to upload to
:param upload_id: The id of the upload
:param part_number: The number representing the part of the multipart
upload
:param extra_args: A dictionary of any extra arguments that may be
used in the upload.
:rtype: dict
:returns: A dictionary representing a part::
{'Etag': etag_value, 'PartNumber': part_number}
This value can be appended to a list to be used to complete
the multipart upload.
|
s3transfer/upload.py
|
def _main(self, client, fileobj, bucket, key, upload_id, part_number,
extra_args):
"""
:param client: The client to use when calling PutObject
:param fileobj: The file to upload.
:param bucket: The name of the bucket to upload to
:param key: The name of the key to upload to
:param upload_id: The id of the upload
:param part_number: The number representing the part of the multipart
upload
:param extra_args: A dictionary of any extra arguments that may be
used in the upload.
:rtype: dict
:returns: A dictionary representing a part::
{'Etag': etag_value, 'PartNumber': part_number}
This value can be appended to a list to be used to complete
the multipart upload.
"""
with fileobj as body:
response = client.upload_part(
Bucket=bucket, Key=key,
UploadId=upload_id, PartNumber=part_number,
Body=body, **extra_args)
etag = response['ETag']
return {'ETag': etag, 'PartNumber': part_number}
|
def _main(self, client, fileobj, bucket, key, upload_id, part_number,
extra_args):
"""
:param client: The client to use when calling PutObject
:param fileobj: The file to upload.
:param bucket: The name of the bucket to upload to
:param key: The name of the key to upload to
:param upload_id: The id of the upload
:param part_number: The number representing the part of the multipart
upload
:param extra_args: A dictionary of any extra arguments that may be
used in the upload.
:rtype: dict
:returns: A dictionary representing a part::
{'Etag': etag_value, 'PartNumber': part_number}
This value can be appended to a list to be used to complete
the multipart upload.
"""
with fileobj as body:
response = client.upload_part(
Bucket=bucket, Key=key,
UploadId=upload_id, PartNumber=part_number,
Body=body, **extra_args)
etag = response['ETag']
return {'ETag': etag, 'PartNumber': part_number}
|
[
":",
"param",
"client",
":",
"The",
"client",
"to",
"use",
"when",
"calling",
"PutObject",
":",
"param",
"fileobj",
":",
"The",
"file",
"to",
"upload",
".",
":",
"param",
"bucket",
":",
"The",
"name",
"of",
"the",
"bucket",
"to",
"upload",
"to",
":",
"param",
"key",
":",
"The",
"name",
"of",
"the",
"key",
"to",
"upload",
"to",
":",
"param",
"upload_id",
":",
"The",
"id",
"of",
"the",
"upload",
":",
"param",
"part_number",
":",
"The",
"number",
"representing",
"the",
"part",
"of",
"the",
"multipart",
"upload",
":",
"param",
"extra_args",
":",
"A",
"dictionary",
"of",
"any",
"extra",
"arguments",
"that",
"may",
"be",
"used",
"in",
"the",
"upload",
"."
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/upload.py#L697-L724
|
[
"def",
"_main",
"(",
"self",
",",
"client",
",",
"fileobj",
",",
"bucket",
",",
"key",
",",
"upload_id",
",",
"part_number",
",",
"extra_args",
")",
":",
"with",
"fileobj",
"as",
"body",
":",
"response",
"=",
"client",
".",
"upload_part",
"(",
"Bucket",
"=",
"bucket",
",",
"Key",
"=",
"key",
",",
"UploadId",
"=",
"upload_id",
",",
"PartNumber",
"=",
"part_number",
",",
"Body",
"=",
"body",
",",
"*",
"*",
"extra_args",
")",
"etag",
"=",
"response",
"[",
"'ETag'",
"]",
"return",
"{",
"'ETag'",
":",
"etag",
",",
"'PartNumber'",
":",
"part_number",
"}"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferFuture.set_exception
|
Sets the exception on the future.
|
s3transfer/futures.py
|
def set_exception(self, exception):
"""Sets the exception on the future."""
if not self.done():
raise TransferNotDoneError(
'set_exception can only be called once the transfer is '
'complete.')
self._coordinator.set_exception(exception, override=True)
|
def set_exception(self, exception):
"""Sets the exception on the future."""
if not self.done():
raise TransferNotDoneError(
'set_exception can only be called once the transfer is '
'complete.')
self._coordinator.set_exception(exception, override=True)
|
[
"Sets",
"the",
"exception",
"on",
"the",
"future",
"."
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/futures.py#L114-L120
|
[
"def",
"set_exception",
"(",
"self",
",",
"exception",
")",
":",
"if",
"not",
"self",
".",
"done",
"(",
")",
":",
"raise",
"TransferNotDoneError",
"(",
"'set_exception can only be called once the transfer is '",
"'complete.'",
")",
"self",
".",
"_coordinator",
".",
"set_exception",
"(",
"exception",
",",
"override",
"=",
"True",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferCoordinator.set_result
|
Set a result for the TransferFuture
Implies that the TransferFuture succeeded. This will always set a
result because it is invoked on the final task where there is only
ever one final task and it is ran at the very end of a transfer
process. So if a result is being set for this final task, the transfer
succeeded even if something came a long and canceled the transfer
on the final task.
|
s3transfer/futures.py
|
def set_result(self, result):
"""Set a result for the TransferFuture
Implies that the TransferFuture succeeded. This will always set a
result because it is invoked on the final task where there is only
ever one final task and it is ran at the very end of a transfer
process. So if a result is being set for this final task, the transfer
succeeded even if something came a long and canceled the transfer
on the final task.
"""
with self._lock:
self._exception = None
self._result = result
self._status = 'success'
|
def set_result(self, result):
"""Set a result for the TransferFuture
Implies that the TransferFuture succeeded. This will always set a
result because it is invoked on the final task where there is only
ever one final task and it is ran at the very end of a transfer
process. So if a result is being set for this final task, the transfer
succeeded even if something came a long and canceled the transfer
on the final task.
"""
with self._lock:
self._exception = None
self._result = result
self._status = 'success'
|
[
"Set",
"a",
"result",
"for",
"the",
"TransferFuture"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/futures.py#L221-L234
|
[
"def",
"set_result",
"(",
"self",
",",
"result",
")",
":",
"with",
"self",
".",
"_lock",
":",
"self",
".",
"_exception",
"=",
"None",
"self",
".",
"_result",
"=",
"result",
"self",
".",
"_status",
"=",
"'success'"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferCoordinator.set_exception
|
Set an exception for the TransferFuture
Implies the TransferFuture failed.
:param exception: The exception that cause the transfer to fail.
:param override: If True, override any existing state.
|
s3transfer/futures.py
|
def set_exception(self, exception, override=False):
"""Set an exception for the TransferFuture
Implies the TransferFuture failed.
:param exception: The exception that cause the transfer to fail.
:param override: If True, override any existing state.
"""
with self._lock:
if not self.done() or override:
self._exception = exception
self._status = 'failed'
|
def set_exception(self, exception, override=False):
"""Set an exception for the TransferFuture
Implies the TransferFuture failed.
:param exception: The exception that cause the transfer to fail.
:param override: If True, override any existing state.
"""
with self._lock:
if not self.done() or override:
self._exception = exception
self._status = 'failed'
|
[
"Set",
"an",
"exception",
"for",
"the",
"TransferFuture"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/futures.py#L236-L247
|
[
"def",
"set_exception",
"(",
"self",
",",
"exception",
",",
"override",
"=",
"False",
")",
":",
"with",
"self",
".",
"_lock",
":",
"if",
"not",
"self",
".",
"done",
"(",
")",
"or",
"override",
":",
"self",
".",
"_exception",
"=",
"exception",
"self",
".",
"_status",
"=",
"'failed'"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferCoordinator.result
|
Waits until TransferFuture is done and returns the result
If the TransferFuture succeeded, it will return the result. If the
TransferFuture failed, it will raise the exception associated to the
failure.
|
s3transfer/futures.py
|
def result(self):
"""Waits until TransferFuture is done and returns the result
If the TransferFuture succeeded, it will return the result. If the
TransferFuture failed, it will raise the exception associated to the
failure.
"""
# Doing a wait() with no timeout cannot be interrupted in python2 but
# can be interrupted in python3 so we just wait with the largest
# possible value integer value, which is on the scale of billions of
# years...
self._done_event.wait(MAXINT)
# Once done waiting, raise an exception if present or return the
# final result.
if self._exception:
raise self._exception
return self._result
|
def result(self):
"""Waits until TransferFuture is done and returns the result
If the TransferFuture succeeded, it will return the result. If the
TransferFuture failed, it will raise the exception associated to the
failure.
"""
# Doing a wait() with no timeout cannot be interrupted in python2 but
# can be interrupted in python3 so we just wait with the largest
# possible value integer value, which is on the scale of billions of
# years...
self._done_event.wait(MAXINT)
# Once done waiting, raise an exception if present or return the
# final result.
if self._exception:
raise self._exception
return self._result
|
[
"Waits",
"until",
"TransferFuture",
"is",
"done",
"and",
"returns",
"the",
"result"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/futures.py#L249-L266
|
[
"def",
"result",
"(",
"self",
")",
":",
"# Doing a wait() with no timeout cannot be interrupted in python2 but",
"# can be interrupted in python3 so we just wait with the largest",
"# possible value integer value, which is on the scale of billions of",
"# years...",
"self",
".",
"_done_event",
".",
"wait",
"(",
"MAXINT",
")",
"# Once done waiting, raise an exception if present or return the",
"# final result.",
"if",
"self",
".",
"_exception",
":",
"raise",
"self",
".",
"_exception",
"return",
"self",
".",
"_result"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferCoordinator.cancel
|
Cancels the TransferFuture
:param msg: The message to attach to the cancellation
:param exc_type: The type of exception to set for the cancellation
|
s3transfer/futures.py
|
def cancel(self, msg='', exc_type=CancelledError):
"""Cancels the TransferFuture
:param msg: The message to attach to the cancellation
:param exc_type: The type of exception to set for the cancellation
"""
with self._lock:
if not self.done():
should_announce_done = False
logger.debug('%s cancel(%s) called', self, msg)
self._exception = exc_type(msg)
if self._status == 'not-started':
should_announce_done = True
self._status = 'cancelled'
if should_announce_done:
self.announce_done()
|
def cancel(self, msg='', exc_type=CancelledError):
"""Cancels the TransferFuture
:param msg: The message to attach to the cancellation
:param exc_type: The type of exception to set for the cancellation
"""
with self._lock:
if not self.done():
should_announce_done = False
logger.debug('%s cancel(%s) called', self, msg)
self._exception = exc_type(msg)
if self._status == 'not-started':
should_announce_done = True
self._status = 'cancelled'
if should_announce_done:
self.announce_done()
|
[
"Cancels",
"the",
"TransferFuture"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/futures.py#L268-L283
|
[
"def",
"cancel",
"(",
"self",
",",
"msg",
"=",
"''",
",",
"exc_type",
"=",
"CancelledError",
")",
":",
"with",
"self",
".",
"_lock",
":",
"if",
"not",
"self",
".",
"done",
"(",
")",
":",
"should_announce_done",
"=",
"False",
"logger",
".",
"debug",
"(",
"'%s cancel(%s) called'",
",",
"self",
",",
"msg",
")",
"self",
".",
"_exception",
"=",
"exc_type",
"(",
"msg",
")",
"if",
"self",
".",
"_status",
"==",
"'not-started'",
":",
"should_announce_done",
"=",
"True",
"self",
".",
"_status",
"=",
"'cancelled'",
"if",
"should_announce_done",
":",
"self",
".",
"announce_done",
"(",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferCoordinator.submit
|
Submits a task to a provided executor
:type executor: s3transfer.futures.BoundedExecutor
:param executor: The executor to submit the callable to
:type task: s3transfer.tasks.Task
:param task: The task to submit to the executor
:type tag: s3transfer.futures.TaskTag
:param tag: A tag to associate to the submitted task
:rtype: concurrent.futures.Future
:returns: A future representing the submitted task
|
s3transfer/futures.py
|
def submit(self, executor, task, tag=None):
"""Submits a task to a provided executor
:type executor: s3transfer.futures.BoundedExecutor
:param executor: The executor to submit the callable to
:type task: s3transfer.tasks.Task
:param task: The task to submit to the executor
:type tag: s3transfer.futures.TaskTag
:param tag: A tag to associate to the submitted task
:rtype: concurrent.futures.Future
:returns: A future representing the submitted task
"""
logger.debug(
"Submitting task %s to executor %s for transfer request: %s." % (
task, executor, self.transfer_id)
)
future = executor.submit(task, tag=tag)
# Add this created future to the list of associated future just
# in case it is needed during cleanups.
self.add_associated_future(future)
future.add_done_callback(
FunctionContainer(self.remove_associated_future, future))
return future
|
def submit(self, executor, task, tag=None):
"""Submits a task to a provided executor
:type executor: s3transfer.futures.BoundedExecutor
:param executor: The executor to submit the callable to
:type task: s3transfer.tasks.Task
:param task: The task to submit to the executor
:type tag: s3transfer.futures.TaskTag
:param tag: A tag to associate to the submitted task
:rtype: concurrent.futures.Future
:returns: A future representing the submitted task
"""
logger.debug(
"Submitting task %s to executor %s for transfer request: %s." % (
task, executor, self.transfer_id)
)
future = executor.submit(task, tag=tag)
# Add this created future to the list of associated future just
# in case it is needed during cleanups.
self.add_associated_future(future)
future.add_done_callback(
FunctionContainer(self.remove_associated_future, future))
return future
|
[
"Submits",
"a",
"task",
"to",
"a",
"provided",
"executor"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/futures.py#L301-L326
|
[
"def",
"submit",
"(",
"self",
",",
"executor",
",",
"task",
",",
"tag",
"=",
"None",
")",
":",
"logger",
".",
"debug",
"(",
"\"Submitting task %s to executor %s for transfer request: %s.\"",
"%",
"(",
"task",
",",
"executor",
",",
"self",
".",
"transfer_id",
")",
")",
"future",
"=",
"executor",
".",
"submit",
"(",
"task",
",",
"tag",
"=",
"tag",
")",
"# Add this created future to the list of associated future just",
"# in case it is needed during cleanups.",
"self",
".",
"add_associated_future",
"(",
"future",
")",
"future",
".",
"add_done_callback",
"(",
"FunctionContainer",
"(",
"self",
".",
"remove_associated_future",
",",
"future",
")",
")",
"return",
"future"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
test
|
TransferCoordinator.add_done_callback
|
Add a done callback to be invoked when transfer is done
|
s3transfer/futures.py
|
def add_done_callback(self, function, *args, **kwargs):
"""Add a done callback to be invoked when transfer is done"""
with self._done_callbacks_lock:
self._done_callbacks.append(
FunctionContainer(function, *args, **kwargs)
)
|
def add_done_callback(self, function, *args, **kwargs):
"""Add a done callback to be invoked when transfer is done"""
with self._done_callbacks_lock:
self._done_callbacks.append(
FunctionContainer(function, *args, **kwargs)
)
|
[
"Add",
"a",
"done",
"callback",
"to",
"be",
"invoked",
"when",
"transfer",
"is",
"done"
] |
boto/s3transfer
|
python
|
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/futures.py#L346-L351
|
[
"def",
"add_done_callback",
"(",
"self",
",",
"function",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"with",
"self",
".",
"_done_callbacks_lock",
":",
"self",
".",
"_done_callbacks",
".",
"append",
"(",
"FunctionContainer",
"(",
"function",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
")"
] |
2aead638c8385d8ae0b1756b2de17e8fad45fffa
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.