partition
stringclasses 3
values | func_name
stringlengths 1
134
| docstring
stringlengths 1
46.9k
| path
stringlengths 4
223
| original_string
stringlengths 75
104k
| code
stringlengths 75
104k
| docstring_tokens
listlengths 1
1.97k
| repo
stringlengths 7
55
| language
stringclasses 1
value | url
stringlengths 87
315
| code_tokens
listlengths 19
28.4k
| sha
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|---|---|---|
test
|
pack
|
Packs a sub field's array into another array using a mask
Parameters:
----------
array : numpy.ndarray
The array in which the sub field array will be packed into
array_in : numpy.ndarray
sub field array to pack
mask : mask (ie: 0b00001111)
Mask of the sub field
inplace : {bool}, optional
If true a new array is returned. (the default is False, which modifies the array in place)
Raises
------
OverflowError
If the values contained in the sub field array are greater than its mask's number of bits
allows
|
pylas/point/packing.py
|
def pack(array, sub_field_array, mask, inplace=False):
""" Packs a sub field's array into another array using a mask
Parameters:
----------
array : numpy.ndarray
The array in which the sub field array will be packed into
array_in : numpy.ndarray
sub field array to pack
mask : mask (ie: 0b00001111)
Mask of the sub field
inplace : {bool}, optional
If true a new array is returned. (the default is False, which modifies the array in place)
Raises
------
OverflowError
If the values contained in the sub field array are greater than its mask's number of bits
allows
"""
lsb = least_significant_bit(mask)
max_value = int(mask >> lsb)
if sub_field_array.max() > max_value:
raise OverflowError(
"value ({}) is greater than allowed (max: {})".format(
sub_field_array.max(), max_value
)
)
if inplace:
array[:] = array & ~mask
array[:] = array | ((sub_field_array << lsb) & mask).astype(array.dtype)
else:
array = array & ~mask
return array | ((sub_field_array << lsb) & mask).astype(array.dtype)
|
def pack(array, sub_field_array, mask, inplace=False):
""" Packs a sub field's array into another array using a mask
Parameters:
----------
array : numpy.ndarray
The array in which the sub field array will be packed into
array_in : numpy.ndarray
sub field array to pack
mask : mask (ie: 0b00001111)
Mask of the sub field
inplace : {bool}, optional
If true a new array is returned. (the default is False, which modifies the array in place)
Raises
------
OverflowError
If the values contained in the sub field array are greater than its mask's number of bits
allows
"""
lsb = least_significant_bit(mask)
max_value = int(mask >> lsb)
if sub_field_array.max() > max_value:
raise OverflowError(
"value ({}) is greater than allowed (max: {})".format(
sub_field_array.max(), max_value
)
)
if inplace:
array[:] = array & ~mask
array[:] = array | ((sub_field_array << lsb) & mask).astype(array.dtype)
else:
array = array & ~mask
return array | ((sub_field_array << lsb) & mask).astype(array.dtype)
|
[
"Packs",
"a",
"sub",
"field",
"s",
"array",
"into",
"another",
"array",
"using",
"a",
"mask"
] |
tmontaigu/pylas
|
python
|
https://github.com/tmontaigu/pylas/blob/8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06/pylas/point/packing.py#L30-L63
|
[
"def",
"pack",
"(",
"array",
",",
"sub_field_array",
",",
"mask",
",",
"inplace",
"=",
"False",
")",
":",
"lsb",
"=",
"least_significant_bit",
"(",
"mask",
")",
"max_value",
"=",
"int",
"(",
"mask",
">>",
"lsb",
")",
"if",
"sub_field_array",
".",
"max",
"(",
")",
">",
"max_value",
":",
"raise",
"OverflowError",
"(",
"\"value ({}) is greater than allowed (max: {})\"",
".",
"format",
"(",
"sub_field_array",
".",
"max",
"(",
")",
",",
"max_value",
")",
")",
"if",
"inplace",
":",
"array",
"[",
":",
"]",
"=",
"array",
"&",
"~",
"mask",
"array",
"[",
":",
"]",
"=",
"array",
"|",
"(",
"(",
"sub_field_array",
"<<",
"lsb",
")",
"&",
"mask",
")",
".",
"astype",
"(",
"array",
".",
"dtype",
")",
"else",
":",
"array",
"=",
"array",
"&",
"~",
"mask",
"return",
"array",
"|",
"(",
"(",
"sub_field_array",
"<<",
"lsb",
")",
"&",
"mask",
")",
".",
"astype",
"(",
"array",
".",
"dtype",
")"
] |
8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06
|
test
|
lost_dimensions
|
Returns a list of the names of the dimensions that will be lost
when converting from point_fmt_in to point_fmt_out
|
pylas/point/format.py
|
def lost_dimensions(point_fmt_in, point_fmt_out):
""" Returns a list of the names of the dimensions that will be lost
when converting from point_fmt_in to point_fmt_out
"""
unpacked_dims_in = PointFormat(point_fmt_in).dtype
unpacked_dims_out = PointFormat(point_fmt_out).dtype
out_dims = unpacked_dims_out.fields
completely_lost = []
for dim_name in unpacked_dims_in.names:
if dim_name not in out_dims:
completely_lost.append(dim_name)
return completely_lost
|
def lost_dimensions(point_fmt_in, point_fmt_out):
""" Returns a list of the names of the dimensions that will be lost
when converting from point_fmt_in to point_fmt_out
"""
unpacked_dims_in = PointFormat(point_fmt_in).dtype
unpacked_dims_out = PointFormat(point_fmt_out).dtype
out_dims = unpacked_dims_out.fields
completely_lost = []
for dim_name in unpacked_dims_in.names:
if dim_name not in out_dims:
completely_lost.append(dim_name)
return completely_lost
|
[
"Returns",
"a",
"list",
"of",
"the",
"names",
"of",
"the",
"dimensions",
"that",
"will",
"be",
"lost",
"when",
"converting",
"from",
"point_fmt_in",
"to",
"point_fmt_out"
] |
tmontaigu/pylas
|
python
|
https://github.com/tmontaigu/pylas/blob/8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06/pylas/point/format.py#L150-L163
|
[
"def",
"lost_dimensions",
"(",
"point_fmt_in",
",",
"point_fmt_out",
")",
":",
"unpacked_dims_in",
"=",
"PointFormat",
"(",
"point_fmt_in",
")",
".",
"dtype",
"unpacked_dims_out",
"=",
"PointFormat",
"(",
"point_fmt_out",
")",
".",
"dtype",
"out_dims",
"=",
"unpacked_dims_out",
".",
"fields",
"completely_lost",
"=",
"[",
"]",
"for",
"dim_name",
"in",
"unpacked_dims_in",
".",
"names",
":",
"if",
"dim_name",
"not",
"in",
"out_dims",
":",
"completely_lost",
".",
"append",
"(",
"dim_name",
")",
"return",
"completely_lost"
] |
8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06
|
test
|
PointFormat.dtype
|
Returns the numpy.dtype used to store the point records in a numpy array
.. note::
The dtype corresponds to the dtype with sub_fields *packed* into their
composed fields
|
pylas/point/format.py
|
def dtype(self):
""" Returns the numpy.dtype used to store the point records in a numpy array
.. note::
The dtype corresponds to the dtype with sub_fields *packed* into their
composed fields
"""
dtype = self._access_dict(dims.ALL_POINT_FORMATS_DTYPE, self.id)
dtype = self._dtype_add_extra_dims(dtype)
return dtype
|
def dtype(self):
""" Returns the numpy.dtype used to store the point records in a numpy array
.. note::
The dtype corresponds to the dtype with sub_fields *packed* into their
composed fields
"""
dtype = self._access_dict(dims.ALL_POINT_FORMATS_DTYPE, self.id)
dtype = self._dtype_add_extra_dims(dtype)
return dtype
|
[
"Returns",
"the",
"numpy",
".",
"dtype",
"used",
"to",
"store",
"the",
"point",
"records",
"in",
"a",
"numpy",
"array"
] |
tmontaigu/pylas
|
python
|
https://github.com/tmontaigu/pylas/blob/8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06/pylas/point/format.py#L41-L52
|
[
"def",
"dtype",
"(",
"self",
")",
":",
"dtype",
"=",
"self",
".",
"_access_dict",
"(",
"dims",
".",
"ALL_POINT_FORMATS_DTYPE",
",",
"self",
".",
"id",
")",
"dtype",
"=",
"self",
".",
"_dtype_add_extra_dims",
"(",
"dtype",
")",
"return",
"dtype"
] |
8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06
|
test
|
PointFormat.unpacked_dtype
|
Returns the numpy.dtype used to store the point records in a numpy array
.. note::
The dtype corresponds to the dtype with sub_fields *unpacked*
|
pylas/point/format.py
|
def unpacked_dtype(self):
""" Returns the numpy.dtype used to store the point records in a numpy array
.. note::
The dtype corresponds to the dtype with sub_fields *unpacked*
"""
dtype = self._access_dict(dims.UNPACKED_POINT_FORMATS_DTYPES, self.id)
dtype = self._dtype_add_extra_dims(dtype)
return dtype
|
def unpacked_dtype(self):
""" Returns the numpy.dtype used to store the point records in a numpy array
.. note::
The dtype corresponds to the dtype with sub_fields *unpacked*
"""
dtype = self._access_dict(dims.UNPACKED_POINT_FORMATS_DTYPES, self.id)
dtype = self._dtype_add_extra_dims(dtype)
return dtype
|
[
"Returns",
"the",
"numpy",
".",
"dtype",
"used",
"to",
"store",
"the",
"point",
"records",
"in",
"a",
"numpy",
"array"
] |
tmontaigu/pylas
|
python
|
https://github.com/tmontaigu/pylas/blob/8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06/pylas/point/format.py#L55-L65
|
[
"def",
"unpacked_dtype",
"(",
"self",
")",
":",
"dtype",
"=",
"self",
".",
"_access_dict",
"(",
"dims",
".",
"UNPACKED_POINT_FORMATS_DTYPES",
",",
"self",
".",
"id",
")",
"dtype",
"=",
"self",
".",
"_dtype_add_extra_dims",
"(",
"dtype",
")",
"return",
"dtype"
] |
8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06
|
test
|
PointFormat.sub_fields
|
Returns a dict of the sub fields for this point format
Returns
-------
Dict[str, Tuple[str, SubField]]
maps a sub field name to its composed dimension with additional information
|
pylas/point/format.py
|
def sub_fields(self):
""" Returns a dict of the sub fields for this point format
Returns
-------
Dict[str, Tuple[str, SubField]]
maps a sub field name to its composed dimension with additional information
"""
sub_fields_dict = {}
for composed_dim_name, sub_fields in self.composed_fields.items():
for sub_field in sub_fields:
sub_fields_dict[sub_field.name] = (composed_dim_name, sub_field)
return sub_fields_dict
|
def sub_fields(self):
""" Returns a dict of the sub fields for this point format
Returns
-------
Dict[str, Tuple[str, SubField]]
maps a sub field name to its composed dimension with additional information
"""
sub_fields_dict = {}
for composed_dim_name, sub_fields in self.composed_fields.items():
for sub_field in sub_fields:
sub_fields_dict[sub_field.name] = (composed_dim_name, sub_field)
return sub_fields_dict
|
[
"Returns",
"a",
"dict",
"of",
"the",
"sub",
"fields",
"for",
"this",
"point",
"format"
] |
tmontaigu/pylas
|
python
|
https://github.com/tmontaigu/pylas/blob/8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06/pylas/point/format.py#L80-L93
|
[
"def",
"sub_fields",
"(",
"self",
")",
":",
"sub_fields_dict",
"=",
"{",
"}",
"for",
"composed_dim_name",
",",
"sub_fields",
"in",
"self",
".",
"composed_fields",
".",
"items",
"(",
")",
":",
"for",
"sub_field",
"in",
"sub_fields",
":",
"sub_fields_dict",
"[",
"sub_field",
".",
"name",
"]",
"=",
"(",
"composed_dim_name",
",",
"sub_field",
")",
"return",
"sub_fields_dict"
] |
8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06
|
test
|
PointFormat.num_extra_bytes
|
Returns the number of extra bytes
|
pylas/point/format.py
|
def num_extra_bytes(self):
""" Returns the number of extra bytes
"""
return sum(np.dtype(extra_dim[1]).itemsize for extra_dim in self.extra_dims)
|
def num_extra_bytes(self):
""" Returns the number of extra bytes
"""
return sum(np.dtype(extra_dim[1]).itemsize for extra_dim in self.extra_dims)
|
[
"Returns",
"the",
"number",
"of",
"extra",
"bytes"
] |
tmontaigu/pylas
|
python
|
https://github.com/tmontaigu/pylas/blob/8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06/pylas/point/format.py#L102-L105
|
[
"def",
"num_extra_bytes",
"(",
"self",
")",
":",
"return",
"sum",
"(",
"np",
".",
"dtype",
"(",
"extra_dim",
"[",
"1",
"]",
")",
".",
"itemsize",
"for",
"extra_dim",
"in",
"self",
".",
"extra_dims",
")"
] |
8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06
|
test
|
PointFormat.has_waveform_packet
|
Returns True if the point format has waveform packet dimensions
|
pylas/point/format.py
|
def has_waveform_packet(self):
""" Returns True if the point format has waveform packet dimensions
"""
dimensions = set(self.dimension_names)
return all(name in dimensions for name in dims.WAVEFORM_FIELDS_NAMES)
|
def has_waveform_packet(self):
""" Returns True if the point format has waveform packet dimensions
"""
dimensions = set(self.dimension_names)
return all(name in dimensions for name in dims.WAVEFORM_FIELDS_NAMES)
|
[
"Returns",
"True",
"if",
"the",
"point",
"format",
"has",
"waveform",
"packet",
"dimensions"
] |
tmontaigu/pylas
|
python
|
https://github.com/tmontaigu/pylas/blob/8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06/pylas/point/format.py#L108-L112
|
[
"def",
"has_waveform_packet",
"(",
"self",
")",
":",
"dimensions",
"=",
"set",
"(",
"self",
".",
"dimension_names",
")",
"return",
"all",
"(",
"name",
"in",
"dimensions",
"for",
"name",
"in",
"dims",
".",
"WAVEFORM_FIELDS_NAMES",
")"
] |
8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06
|
test
|
main
|
Console script for satel_integra.
|
satel_integra/cli.py
|
def main(port, ip, command, loglevel):
"""Console script for satel_integra."""
numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
click.echo("Demo of satel_integra library")
if command == "demo":
demo(ip, port)
|
def main(port, ip, command, loglevel):
"""Console script for satel_integra."""
numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
click.echo("Demo of satel_integra library")
if command == "demo":
demo(ip, port)
|
[
"Console",
"script",
"for",
"satel_integra",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/cli.py#L16-L26
|
[
"def",
"main",
"(",
"port",
",",
"ip",
",",
"command",
",",
"loglevel",
")",
":",
"numeric_level",
"=",
"getattr",
"(",
"logging",
",",
"loglevel",
".",
"upper",
"(",
")",
",",
"None",
")",
"if",
"not",
"isinstance",
"(",
"numeric_level",
",",
"int",
")",
":",
"raise",
"ValueError",
"(",
"'Invalid log level: %s'",
"%",
"loglevel",
")",
"logging",
".",
"basicConfig",
"(",
"level",
"=",
"numeric_level",
")",
"click",
".",
"echo",
"(",
"\"Demo of satel_integra library\"",
")",
"if",
"command",
"==",
"\"demo\"",
":",
"demo",
"(",
"ip",
",",
"port",
")"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
checksum
|
Function to calculate checksum as per Satel manual.
|
satel_integra/satel_integra.py
|
def checksum(command):
"""Function to calculate checksum as per Satel manual."""
crc = 0x147A
for b in command:
# rotate (crc 1 bit left)
crc = ((crc << 1) & 0xFFFF) | (crc & 0x8000) >> 15
crc = crc ^ 0xFFFF
crc = (crc + (crc >> 8) + b) & 0xFFFF
return crc
|
def checksum(command):
"""Function to calculate checksum as per Satel manual."""
crc = 0x147A
for b in command:
# rotate (crc 1 bit left)
crc = ((crc << 1) & 0xFFFF) | (crc & 0x8000) >> 15
crc = crc ^ 0xFFFF
crc = (crc + (crc >> 8) + b) & 0xFFFF
return crc
|
[
"Function",
"to",
"calculate",
"checksum",
"as",
"per",
"Satel",
"manual",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L12-L20
|
[
"def",
"checksum",
"(",
"command",
")",
":",
"crc",
"=",
"0x147A",
"for",
"b",
"in",
"command",
":",
"# rotate (crc 1 bit left)",
"crc",
"=",
"(",
"(",
"crc",
"<<",
"1",
")",
"&",
"0xFFFF",
")",
"|",
"(",
"crc",
"&",
"0x8000",
")",
">>",
"15",
"crc",
"=",
"crc",
"^",
"0xFFFF",
"crc",
"=",
"(",
"crc",
"+",
"(",
"crc",
">>",
"8",
")",
"+",
"b",
")",
"&",
"0xFFFF",
"return",
"crc"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
print_hex
|
Debugging method to print out frames in hex.
|
satel_integra/satel_integra.py
|
def print_hex(data):
"""Debugging method to print out frames in hex."""
hex_msg = ""
for c in data:
hex_msg += "\\x" + format(c, "02x")
_LOGGER.debug(hex_msg)
|
def print_hex(data):
"""Debugging method to print out frames in hex."""
hex_msg = ""
for c in data:
hex_msg += "\\x" + format(c, "02x")
_LOGGER.debug(hex_msg)
|
[
"Debugging",
"method",
"to",
"print",
"out",
"frames",
"in",
"hex",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L23-L28
|
[
"def",
"print_hex",
"(",
"data",
")",
":",
"hex_msg",
"=",
"\"\"",
"for",
"c",
"in",
"data",
":",
"hex_msg",
"+=",
"\"\\\\x\"",
"+",
"format",
"(",
"c",
",",
"\"02x\"",
")",
"_LOGGER",
".",
"debug",
"(",
"hex_msg",
")"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
verify_and_strip
|
Verify checksum and strip header and footer of received frame.
|
satel_integra/satel_integra.py
|
def verify_and_strip(resp):
"""Verify checksum and strip header and footer of received frame."""
if resp[0:2] != b'\xFE\xFE':
_LOGGER.error("Houston, we got problem:")
print_hex(resp)
raise Exception("Wrong header - got %X%X" % (resp[0], resp[1]))
if resp[-2:] != b'\xFE\x0D':
raise Exception("Wrong footer - got %X%X" % (resp[-2], resp[-1]))
output = resp[2:-2].replace(b'\xFE\xF0', b'\xFE')
c = checksum(bytearray(output[0:-2]))
if (256 * output[-2:-1][0] + output[-1:][0]) != c:
raise Exception("Wrong checksum - got %d expected %d" % (
(256 * output[-2:-1][0] + output[-1:][0]), c))
return output[0:-2]
|
def verify_and_strip(resp):
"""Verify checksum and strip header and footer of received frame."""
if resp[0:2] != b'\xFE\xFE':
_LOGGER.error("Houston, we got problem:")
print_hex(resp)
raise Exception("Wrong header - got %X%X" % (resp[0], resp[1]))
if resp[-2:] != b'\xFE\x0D':
raise Exception("Wrong footer - got %X%X" % (resp[-2], resp[-1]))
output = resp[2:-2].replace(b'\xFE\xF0', b'\xFE')
c = checksum(bytearray(output[0:-2]))
if (256 * output[-2:-1][0] + output[-1:][0]) != c:
raise Exception("Wrong checksum - got %d expected %d" % (
(256 * output[-2:-1][0] + output[-1:][0]), c))
return output[0:-2]
|
[
"Verify",
"checksum",
"and",
"strip",
"header",
"and",
"footer",
"of",
"received",
"frame",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L31-L47
|
[
"def",
"verify_and_strip",
"(",
"resp",
")",
":",
"if",
"resp",
"[",
"0",
":",
"2",
"]",
"!=",
"b'\\xFE\\xFE'",
":",
"_LOGGER",
".",
"error",
"(",
"\"Houston, we got problem:\"",
")",
"print_hex",
"(",
"resp",
")",
"raise",
"Exception",
"(",
"\"Wrong header - got %X%X\"",
"%",
"(",
"resp",
"[",
"0",
"]",
",",
"resp",
"[",
"1",
"]",
")",
")",
"if",
"resp",
"[",
"-",
"2",
":",
"]",
"!=",
"b'\\xFE\\x0D'",
":",
"raise",
"Exception",
"(",
"\"Wrong footer - got %X%X\"",
"%",
"(",
"resp",
"[",
"-",
"2",
"]",
",",
"resp",
"[",
"-",
"1",
"]",
")",
")",
"output",
"=",
"resp",
"[",
"2",
":",
"-",
"2",
"]",
".",
"replace",
"(",
"b'\\xFE\\xF0'",
",",
"b'\\xFE'",
")",
"c",
"=",
"checksum",
"(",
"bytearray",
"(",
"output",
"[",
"0",
":",
"-",
"2",
"]",
")",
")",
"if",
"(",
"256",
"*",
"output",
"[",
"-",
"2",
":",
"-",
"1",
"]",
"[",
"0",
"]",
"+",
"output",
"[",
"-",
"1",
":",
"]",
"[",
"0",
"]",
")",
"!=",
"c",
":",
"raise",
"Exception",
"(",
"\"Wrong checksum - got %d expected %d\"",
"%",
"(",
"(",
"256",
"*",
"output",
"[",
"-",
"2",
":",
"-",
"1",
"]",
"[",
"0",
"]",
"+",
"output",
"[",
"-",
"1",
":",
"]",
"[",
"0",
"]",
")",
",",
"c",
")",
")",
"return",
"output",
"[",
"0",
":",
"-",
"2",
"]"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
list_set_bits
|
Return list of positions of bits set to one in given data.
This method is used to read e.g. violated zones. They are marked by ones
on respective bit positions - as per Satel manual.
|
satel_integra/satel_integra.py
|
def list_set_bits(r, expected_length):
"""Return list of positions of bits set to one in given data.
This method is used to read e.g. violated zones. They are marked by ones
on respective bit positions - as per Satel manual.
"""
set_bit_numbers = []
bit_index = 0x1
assert (len(r) == expected_length + 1)
for b in r[1:]:
for i in range(8):
if ((b >> i) & 1) == 1:
set_bit_numbers.append(bit_index)
bit_index += 1
return set_bit_numbers
|
def list_set_bits(r, expected_length):
"""Return list of positions of bits set to one in given data.
This method is used to read e.g. violated zones. They are marked by ones
on respective bit positions - as per Satel manual.
"""
set_bit_numbers = []
bit_index = 0x1
assert (len(r) == expected_length + 1)
for b in r[1:]:
for i in range(8):
if ((b >> i) & 1) == 1:
set_bit_numbers.append(bit_index)
bit_index += 1
return set_bit_numbers
|
[
"Return",
"list",
"of",
"positions",
"of",
"bits",
"set",
"to",
"one",
"in",
"given",
"data",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L50-L66
|
[
"def",
"list_set_bits",
"(",
"r",
",",
"expected_length",
")",
":",
"set_bit_numbers",
"=",
"[",
"]",
"bit_index",
"=",
"0x1",
"assert",
"(",
"len",
"(",
"r",
")",
"==",
"expected_length",
"+",
"1",
")",
"for",
"b",
"in",
"r",
"[",
"1",
":",
"]",
":",
"for",
"i",
"in",
"range",
"(",
"8",
")",
":",
"if",
"(",
"(",
"b",
">>",
"i",
")",
"&",
"1",
")",
"==",
"1",
":",
"set_bit_numbers",
".",
"append",
"(",
"bit_index",
")",
"bit_index",
"+=",
"1",
"return",
"set_bit_numbers"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
generate_query
|
Add header, checksum and footer to command data.
|
satel_integra/satel_integra.py
|
def generate_query(command):
"""Add header, checksum and footer to command data."""
data = bytearray(command)
c = checksum(data)
data.append(c >> 8)
data.append(c & 0xFF)
data.replace(b'\xFE', b'\xFE\xF0')
data = bytearray.fromhex("FEFE") + data + bytearray.fromhex("FE0D")
return data
|
def generate_query(command):
"""Add header, checksum and footer to command data."""
data = bytearray(command)
c = checksum(data)
data.append(c >> 8)
data.append(c & 0xFF)
data.replace(b'\xFE', b'\xFE\xF0')
data = bytearray.fromhex("FEFE") + data + bytearray.fromhex("FE0D")
return data
|
[
"Add",
"header",
"checksum",
"and",
"footer",
"to",
"command",
"data",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L69-L78
|
[
"def",
"generate_query",
"(",
"command",
")",
":",
"data",
"=",
"bytearray",
"(",
"command",
")",
"c",
"=",
"checksum",
"(",
"data",
")",
"data",
".",
"append",
"(",
"c",
">>",
"8",
")",
"data",
".",
"append",
"(",
"c",
"&",
"0xFF",
")",
"data",
".",
"replace",
"(",
"b'\\xFE'",
",",
"b'\\xFE\\xF0'",
")",
"data",
"=",
"bytearray",
".",
"fromhex",
"(",
"\"FEFE\"",
")",
"+",
"data",
"+",
"bytearray",
".",
"fromhex",
"(",
"\"FE0D\"",
")",
"return",
"data"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
demo
|
Basic demo of the monitoring capabilities.
|
satel_integra/satel_integra.py
|
def demo(host, port):
"""Basic demo of the monitoring capabilities."""
# logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
stl = AsyncSatel(host,
port,
loop,
[1, 2, 3, 4, 5, 6, 7, 8, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 25, 26, 27, 28, 29, 30],
[8, 9, 10]
)
loop.run_until_complete(stl.connect())
loop.create_task(stl.arm("3333", 1))
loop.create_task(stl.disarm("3333"))
loop.create_task(stl.keep_alive())
loop.create_task(stl.monitor_status())
loop.run_forever()
loop.close()
|
def demo(host, port):
"""Basic demo of the monitoring capabilities."""
# logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
stl = AsyncSatel(host,
port,
loop,
[1, 2, 3, 4, 5, 6, 7, 8, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 25, 26, 27, 28, 29, 30],
[8, 9, 10]
)
loop.run_until_complete(stl.connect())
loop.create_task(stl.arm("3333", 1))
loop.create_task(stl.disarm("3333"))
loop.create_task(stl.keep_alive())
loop.create_task(stl.monitor_status())
loop.run_forever()
loop.close()
|
[
"Basic",
"demo",
"of",
"the",
"monitoring",
"capabilities",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L453-L473
|
[
"def",
"demo",
"(",
"host",
",",
"port",
")",
":",
"# logging.basicConfig(level=logging.DEBUG)",
"loop",
"=",
"asyncio",
".",
"get_event_loop",
"(",
")",
"stl",
"=",
"AsyncSatel",
"(",
"host",
",",
"port",
",",
"loop",
",",
"[",
"1",
",",
"2",
",",
"3",
",",
"4",
",",
"5",
",",
"6",
",",
"7",
",",
"8",
",",
"12",
",",
"13",
",",
"14",
",",
"15",
",",
"16",
",",
"17",
",",
"18",
",",
"19",
",",
"20",
",",
"21",
",",
"22",
",",
"23",
",",
"25",
",",
"26",
",",
"27",
",",
"28",
",",
"29",
",",
"30",
"]",
",",
"[",
"8",
",",
"9",
",",
"10",
"]",
")",
"loop",
".",
"run_until_complete",
"(",
"stl",
".",
"connect",
"(",
")",
")",
"loop",
".",
"create_task",
"(",
"stl",
".",
"arm",
"(",
"\"3333\"",
",",
"1",
")",
")",
"loop",
".",
"create_task",
"(",
"stl",
".",
"disarm",
"(",
"\"3333\"",
")",
")",
"loop",
".",
"create_task",
"(",
"stl",
".",
"keep_alive",
"(",
")",
")",
"loop",
".",
"create_task",
"(",
"stl",
".",
"monitor_status",
"(",
")",
")",
"loop",
".",
"run_forever",
"(",
")",
"loop",
".",
"close",
"(",
")"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
AsyncSatel.connect
|
Make a TCP connection to the alarm system.
|
satel_integra/satel_integra.py
|
async def connect(self):
"""Make a TCP connection to the alarm system."""
_LOGGER.debug("Connecting...")
try:
self._reader, self._writer = await asyncio.open_connection(
self._host, self._port, loop=self._loop)
_LOGGER.debug("sucess connecting...")
except Exception as e:
_LOGGER.warning(
"Exception during connecting: %s.", e)
self._writer = None
self._reader = None
return False
return True
|
async def connect(self):
"""Make a TCP connection to the alarm system."""
_LOGGER.debug("Connecting...")
try:
self._reader, self._writer = await asyncio.open_connection(
self._host, self._port, loop=self._loop)
_LOGGER.debug("sucess connecting...")
except Exception as e:
_LOGGER.warning(
"Exception during connecting: %s.", e)
self._writer = None
self._reader = None
return False
return True
|
[
"Make",
"a",
"TCP",
"connection",
"to",
"the",
"alarm",
"system",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L170-L186
|
[
"async",
"def",
"connect",
"(",
"self",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Connecting...\"",
")",
"try",
":",
"self",
".",
"_reader",
",",
"self",
".",
"_writer",
"=",
"await",
"asyncio",
".",
"open_connection",
"(",
"self",
".",
"_host",
",",
"self",
".",
"_port",
",",
"loop",
"=",
"self",
".",
"_loop",
")",
"_LOGGER",
".",
"debug",
"(",
"\"sucess connecting...\"",
")",
"except",
"Exception",
"as",
"e",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Exception during connecting: %s.\"",
",",
"e",
")",
"self",
".",
"_writer",
"=",
"None",
"self",
".",
"_reader",
"=",
"None",
"return",
"False",
"return",
"True"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
AsyncSatel.start_monitoring
|
Start monitoring for interesting events.
|
satel_integra/satel_integra.py
|
async def start_monitoring(self):
"""Start monitoring for interesting events."""
data = generate_query(
b'\x7F\x01\xDC\x99\x80\x00\x04\x00\x00\x00\x00\x00\x00')
await self._send_data(data)
resp = await self._read_data()
if resp is None:
_LOGGER.warning("Start monitoring - no data!")
return
if resp[1:2] != b'\xFF':
_LOGGER.warning("Monitoring not accepted.")
|
async def start_monitoring(self):
"""Start monitoring for interesting events."""
data = generate_query(
b'\x7F\x01\xDC\x99\x80\x00\x04\x00\x00\x00\x00\x00\x00')
await self._send_data(data)
resp = await self._read_data()
if resp is None:
_LOGGER.warning("Start monitoring - no data!")
return
if resp[1:2] != b'\xFF':
_LOGGER.warning("Monitoring not accepted.")
|
[
"Start",
"monitoring",
"for",
"interesting",
"events",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L188-L201
|
[
"async",
"def",
"start_monitoring",
"(",
"self",
")",
":",
"data",
"=",
"generate_query",
"(",
"b'\\x7F\\x01\\xDC\\x99\\x80\\x00\\x04\\x00\\x00\\x00\\x00\\x00\\x00'",
")",
"await",
"self",
".",
"_send_data",
"(",
"data",
")",
"resp",
"=",
"await",
"self",
".",
"_read_data",
"(",
")",
"if",
"resp",
"is",
"None",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Start monitoring - no data!\"",
")",
"return",
"if",
"resp",
"[",
"1",
":",
"2",
"]",
"!=",
"b'\\xFF'",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Monitoring not accepted.\"",
")"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
AsyncSatel._output_changed
|
0x17 outputs state 0x17 + 16/32 bytes
|
satel_integra/satel_integra.py
|
def _output_changed(self, msg):
"""0x17 outputs state 0x17 + 16/32 bytes"""
status = {"outputs": {}}
output_states = list_set_bits(msg, 32)
self.violated_outputs = output_states
_LOGGER.debug("Output states: %s, monitored outputs: %s",
output_states, self._monitored_outputs)
for output in self._monitored_outputs:
status["outputs"][output] = \
1 if output in output_states else 0
_LOGGER.debug("Returning status: %s", status)
if self._output_changed_callback:
self._output_changed_callback(status)
return status
|
def _output_changed(self, msg):
"""0x17 outputs state 0x17 + 16/32 bytes"""
status = {"outputs": {}}
output_states = list_set_bits(msg, 32)
self.violated_outputs = output_states
_LOGGER.debug("Output states: %s, monitored outputs: %s",
output_states, self._monitored_outputs)
for output in self._monitored_outputs:
status["outputs"][output] = \
1 if output in output_states else 0
_LOGGER.debug("Returning status: %s", status)
if self._output_changed_callback:
self._output_changed_callback(status)
return status
|
[
"0x17",
"outputs",
"state",
"0x17",
"+",
"16",
"/",
"32",
"bytes"
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L221-L239
|
[
"def",
"_output_changed",
"(",
"self",
",",
"msg",
")",
":",
"status",
"=",
"{",
"\"outputs\"",
":",
"{",
"}",
"}",
"output_states",
"=",
"list_set_bits",
"(",
"msg",
",",
"32",
")",
"self",
".",
"violated_outputs",
"=",
"output_states",
"_LOGGER",
".",
"debug",
"(",
"\"Output states: %s, monitored outputs: %s\"",
",",
"output_states",
",",
"self",
".",
"_monitored_outputs",
")",
"for",
"output",
"in",
"self",
".",
"_monitored_outputs",
":",
"status",
"[",
"\"outputs\"",
"]",
"[",
"output",
"]",
"=",
"1",
"if",
"output",
"in",
"output_states",
"else",
"0",
"_LOGGER",
".",
"debug",
"(",
"\"Returning status: %s\"",
",",
"status",
")",
"if",
"self",
".",
"_output_changed_callback",
":",
"self",
".",
"_output_changed_callback",
"(",
"status",
")",
"return",
"status"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
AsyncSatel.arm
|
Send arming command to the alarm. Modes allowed: from 0 till 3.
|
satel_integra/satel_integra.py
|
async def arm(self, code, partition_list, mode=0):
"""Send arming command to the alarm. Modes allowed: from 0 till 3."""
_LOGGER.debug("Sending arm command, mode: %s!", mode)
while len(code) < 16:
code += 'F'
code_bytes = bytearray.fromhex(code)
mode_command = 0x80 + mode
data = generate_query(mode_command.to_bytes(1, 'big')
+ code_bytes
+ partition_bytes(partition_list))
await self._send_data(data)
|
async def arm(self, code, partition_list, mode=0):
"""Send arming command to the alarm. Modes allowed: from 0 till 3."""
_LOGGER.debug("Sending arm command, mode: %s!", mode)
while len(code) < 16:
code += 'F'
code_bytes = bytearray.fromhex(code)
mode_command = 0x80 + mode
data = generate_query(mode_command.to_bytes(1, 'big')
+ code_bytes
+ partition_bytes(partition_list))
await self._send_data(data)
|
[
"Send",
"arming",
"command",
"to",
"the",
"alarm",
".",
"Modes",
"allowed",
":",
"from",
"0",
"till",
"3",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L284-L296
|
[
"async",
"def",
"arm",
"(",
"self",
",",
"code",
",",
"partition_list",
",",
"mode",
"=",
"0",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Sending arm command, mode: %s!\"",
",",
"mode",
")",
"while",
"len",
"(",
"code",
")",
"<",
"16",
":",
"code",
"+=",
"'F'",
"code_bytes",
"=",
"bytearray",
".",
"fromhex",
"(",
"code",
")",
"mode_command",
"=",
"0x80",
"+",
"mode",
"data",
"=",
"generate_query",
"(",
"mode_command",
".",
"to_bytes",
"(",
"1",
",",
"'big'",
")",
"+",
"code_bytes",
"+",
"partition_bytes",
"(",
"partition_list",
")",
")",
"await",
"self",
".",
"_send_data",
"(",
"data",
")"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
AsyncSatel.disarm
|
Send command to disarm.
|
satel_integra/satel_integra.py
|
async def disarm(self, code, partition_list):
"""Send command to disarm."""
_LOGGER.info("Sending disarm command.")
while len(code) < 16:
code += 'F'
code_bytes = bytearray.fromhex(code)
data = generate_query(b'\x84' + code_bytes
+ partition_bytes(partition_list))
await self._send_data(data)
|
async def disarm(self, code, partition_list):
"""Send command to disarm."""
_LOGGER.info("Sending disarm command.")
while len(code) < 16:
code += 'F'
code_bytes = bytearray.fromhex(code)
data = generate_query(b'\x84' + code_bytes
+ partition_bytes(partition_list))
await self._send_data(data)
|
[
"Send",
"command",
"to",
"disarm",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L298-L309
|
[
"async",
"def",
"disarm",
"(",
"self",
",",
"code",
",",
"partition_list",
")",
":",
"_LOGGER",
".",
"info",
"(",
"\"Sending disarm command.\"",
")",
"while",
"len",
"(",
"code",
")",
"<",
"16",
":",
"code",
"+=",
"'F'",
"code_bytes",
"=",
"bytearray",
".",
"fromhex",
"(",
"code",
")",
"data",
"=",
"generate_query",
"(",
"b'\\x84'",
"+",
"code_bytes",
"+",
"partition_bytes",
"(",
"partition_list",
")",
")",
"await",
"self",
".",
"_send_data",
"(",
"data",
")"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
AsyncSatel.clear_alarm
|
Send command to clear the alarm.
|
satel_integra/satel_integra.py
|
async def clear_alarm(self, code, partition_list):
"""Send command to clear the alarm."""
_LOGGER.info("Sending clear the alarm command.")
while len(code) < 16:
code += 'F'
code_bytes = bytearray.fromhex(code)
data = generate_query(b'\x85' + code_bytes
+ partition_bytes(partition_list))
await self._send_data(data)
|
async def clear_alarm(self, code, partition_list):
"""Send command to clear the alarm."""
_LOGGER.info("Sending clear the alarm command.")
while len(code) < 16:
code += 'F'
code_bytes = bytearray.fromhex(code)
data = generate_query(b'\x85' + code_bytes
+ partition_bytes(partition_list))
await self._send_data(data)
|
[
"Send",
"command",
"to",
"clear",
"the",
"alarm",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L311-L322
|
[
"async",
"def",
"clear_alarm",
"(",
"self",
",",
"code",
",",
"partition_list",
")",
":",
"_LOGGER",
".",
"info",
"(",
"\"Sending clear the alarm command.\"",
")",
"while",
"len",
"(",
"code",
")",
"<",
"16",
":",
"code",
"+=",
"'F'",
"code_bytes",
"=",
"bytearray",
".",
"fromhex",
"(",
"code",
")",
"data",
"=",
"generate_query",
"(",
"b'\\x85'",
"+",
"code_bytes",
"+",
"partition_bytes",
"(",
"partition_list",
")",
")",
"await",
"self",
".",
"_send_data",
"(",
"data",
")"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
AsyncSatel.set_output
|
Send output turn on command to the alarm.
|
satel_integra/satel_integra.py
|
async def set_output(self, code, output_id, state):
"""Send output turn on command to the alarm."""
"""0x88 outputs on
+ 8 bytes - user code
+ 16/32 bytes - output list
If function is accepted, function result can be
checked by observe the system state """
_LOGGER.debug("Turn on, output: %s, code: %s", output_id, code)
while len(code) < 16:
code += 'F'
code_bytes = bytearray.fromhex(code)
mode_command = 0x88 if state else 0x89
data = generate_query(mode_command.to_bytes(1, 'big') +
code_bytes +
output_bytes(output_id))
await self._send_data(data)
|
async def set_output(self, code, output_id, state):
"""Send output turn on command to the alarm."""
"""0x88 outputs on
+ 8 bytes - user code
+ 16/32 bytes - output list
If function is accepted, function result can be
checked by observe the system state """
_LOGGER.debug("Turn on, output: %s, code: %s", output_id, code)
while len(code) < 16:
code += 'F'
code_bytes = bytearray.fromhex(code)
mode_command = 0x88 if state else 0x89
data = generate_query(mode_command.to_bytes(1, 'big') +
code_bytes +
output_bytes(output_id))
await self._send_data(data)
|
[
"Send",
"output",
"turn",
"on",
"command",
"to",
"the",
"alarm",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L324-L340
|
[
"async",
"def",
"set_output",
"(",
"self",
",",
"code",
",",
"output_id",
",",
"state",
")",
":",
"\"\"\"0x88 outputs on\n + 8 bytes - user code\n + 16/32 bytes - output list\n If function is accepted, function result can be\n checked by observe the system state \"\"\"",
"_LOGGER",
".",
"debug",
"(",
"\"Turn on, output: %s, code: %s\"",
",",
"output_id",
",",
"code",
")",
"while",
"len",
"(",
"code",
")",
"<",
"16",
":",
"code",
"+=",
"'F'",
"code_bytes",
"=",
"bytearray",
".",
"fromhex",
"(",
"code",
")",
"mode_command",
"=",
"0x88",
"if",
"state",
"else",
"0x89",
"data",
"=",
"generate_query",
"(",
"mode_command",
".",
"to_bytes",
"(",
"1",
",",
"'big'",
")",
"+",
"code_bytes",
"+",
"output_bytes",
"(",
"output_id",
")",
")",
"await",
"self",
".",
"_send_data",
"(",
"data",
")"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
AsyncSatel.keep_alive
|
A workaround for Satel Integra disconnecting after 25s.
Every interval it sends some random question to the device, ignoring
answer - just to keep connection alive.
|
satel_integra/satel_integra.py
|
async def keep_alive(self):
"""A workaround for Satel Integra disconnecting after 25s.
Every interval it sends some random question to the device, ignoring
answer - just to keep connection alive.
"""
while True:
await asyncio.sleep(self._keep_alive_timeout)
if self.closed:
return
# Command to read status of the alarm
data = generate_query(b'\xEE\x01\x01')
await self._send_data(data)
|
async def keep_alive(self):
"""A workaround for Satel Integra disconnecting after 25s.
Every interval it sends some random question to the device, ignoring
answer - just to keep connection alive.
"""
while True:
await asyncio.sleep(self._keep_alive_timeout)
if self.closed:
return
# Command to read status of the alarm
data = generate_query(b'\xEE\x01\x01')
await self._send_data(data)
|
[
"A",
"workaround",
"for",
"Satel",
"Integra",
"disconnecting",
"after",
"25s",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L374-L386
|
[
"async",
"def",
"keep_alive",
"(",
"self",
")",
":",
"while",
"True",
":",
"await",
"asyncio",
".",
"sleep",
"(",
"self",
".",
"_keep_alive_timeout",
")",
"if",
"self",
".",
"closed",
":",
"return",
"# Command to read status of the alarm",
"data",
"=",
"generate_query",
"(",
"b'\\xEE\\x01\\x01'",
")",
"await",
"self",
".",
"_send_data",
"(",
"data",
")"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
AsyncSatel.monitor_status
|
Start monitoring of the alarm status.
Send command to satel integra to start sending updates. Read in a
loop and call respective callbacks when received messages.
|
satel_integra/satel_integra.py
|
async def monitor_status(self, alarm_status_callback=None,
zone_changed_callback=None,
output_changed_callback=None):
"""Start monitoring of the alarm status.
Send command to satel integra to start sending updates. Read in a
loop and call respective callbacks when received messages.
"""
self._alarm_status_callback = alarm_status_callback
self._zone_changed_callback = zone_changed_callback
self._output_changed_callback = output_changed_callback
_LOGGER.info("Starting monitor_status loop")
while not self.closed:
_LOGGER.debug("Iteration... ")
while not self.connected:
_LOGGER.info("Not connected, re-connecting... ")
await self.connect()
if not self.connected:
_LOGGER.warning("Not connected, sleeping for 10s... ")
await asyncio.sleep(self._reconnection_timeout)
continue
await self.start_monitoring()
if not self.connected:
_LOGGER.warning("Start monitoring failed, sleeping for 10s...")
await asyncio.sleep(self._reconnection_timeout)
continue
while True:
await self._update_status()
_LOGGER.debug("Got status!")
if not self.connected:
_LOGGER.info("Got connection broken, reconnecting!")
break
_LOGGER.info("Closed, quit monitoring.")
|
async def monitor_status(self, alarm_status_callback=None,
zone_changed_callback=None,
output_changed_callback=None):
"""Start monitoring of the alarm status.
Send command to satel integra to start sending updates. Read in a
loop and call respective callbacks when received messages.
"""
self._alarm_status_callback = alarm_status_callback
self._zone_changed_callback = zone_changed_callback
self._output_changed_callback = output_changed_callback
_LOGGER.info("Starting monitor_status loop")
while not self.closed:
_LOGGER.debug("Iteration... ")
while not self.connected:
_LOGGER.info("Not connected, re-connecting... ")
await self.connect()
if not self.connected:
_LOGGER.warning("Not connected, sleeping for 10s... ")
await asyncio.sleep(self._reconnection_timeout)
continue
await self.start_monitoring()
if not self.connected:
_LOGGER.warning("Start monitoring failed, sleeping for 10s...")
await asyncio.sleep(self._reconnection_timeout)
continue
while True:
await self._update_status()
_LOGGER.debug("Got status!")
if not self.connected:
_LOGGER.info("Got connection broken, reconnecting!")
break
_LOGGER.info("Closed, quit monitoring.")
|
[
"Start",
"monitoring",
"of",
"the",
"alarm",
"status",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L409-L443
|
[
"async",
"def",
"monitor_status",
"(",
"self",
",",
"alarm_status_callback",
"=",
"None",
",",
"zone_changed_callback",
"=",
"None",
",",
"output_changed_callback",
"=",
"None",
")",
":",
"self",
".",
"_alarm_status_callback",
"=",
"alarm_status_callback",
"self",
".",
"_zone_changed_callback",
"=",
"zone_changed_callback",
"self",
".",
"_output_changed_callback",
"=",
"output_changed_callback",
"_LOGGER",
".",
"info",
"(",
"\"Starting monitor_status loop\"",
")",
"while",
"not",
"self",
".",
"closed",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Iteration... \"",
")",
"while",
"not",
"self",
".",
"connected",
":",
"_LOGGER",
".",
"info",
"(",
"\"Not connected, re-connecting... \"",
")",
"await",
"self",
".",
"connect",
"(",
")",
"if",
"not",
"self",
".",
"connected",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Not connected, sleeping for 10s... \"",
")",
"await",
"asyncio",
".",
"sleep",
"(",
"self",
".",
"_reconnection_timeout",
")",
"continue",
"await",
"self",
".",
"start_monitoring",
"(",
")",
"if",
"not",
"self",
".",
"connected",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Start monitoring failed, sleeping for 10s...\"",
")",
"await",
"asyncio",
".",
"sleep",
"(",
"self",
".",
"_reconnection_timeout",
")",
"continue",
"while",
"True",
":",
"await",
"self",
".",
"_update_status",
"(",
")",
"_LOGGER",
".",
"debug",
"(",
"\"Got status!\"",
")",
"if",
"not",
"self",
".",
"connected",
":",
"_LOGGER",
".",
"info",
"(",
"\"Got connection broken, reconnecting!\"",
")",
"break",
"_LOGGER",
".",
"info",
"(",
"\"Closed, quit monitoring.\"",
")"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
AsyncSatel.close
|
Stop monitoring and close connection.
|
satel_integra/satel_integra.py
|
def close(self):
"""Stop monitoring and close connection."""
_LOGGER.debug("Closing...")
self.closed = True
if self.connected:
self._writer.close()
|
def close(self):
"""Stop monitoring and close connection."""
_LOGGER.debug("Closing...")
self.closed = True
if self.connected:
self._writer.close()
|
[
"Stop",
"monitoring",
"and",
"close",
"connection",
"."
] |
c-soft/satel_integra
|
python
|
https://github.com/c-soft/satel_integra/blob/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c/satel_integra/satel_integra.py#L445-L450
|
[
"def",
"close",
"(",
"self",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Closing...\"",
")",
"self",
".",
"closed",
"=",
"True",
"if",
"self",
".",
"connected",
":",
"self",
".",
"_writer",
".",
"close",
"(",
")"
] |
3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c
|
test
|
PostgresContentsManager.purge_db
|
Clear all matching our user_id.
|
pgcontents/pgmanager.py
|
def purge_db(self):
"""
Clear all matching our user_id.
"""
with self.engine.begin() as db:
purge_user(db, self.user_id)
|
def purge_db(self):
"""
Clear all matching our user_id.
"""
with self.engine.begin() as db:
purge_user(db, self.user_id)
|
[
"Clear",
"all",
"matching",
"our",
"user_id",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/pgmanager.py#L107-L112
|
[
"def",
"purge_db",
"(",
"self",
")",
":",
"with",
"self",
".",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"purge_user",
"(",
"db",
",",
"self",
".",
"user_id",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresContentsManager.guess_type
|
Guess the type of a file.
If allow_directory is False, don't consider the possibility that the
file is a directory.
|
pgcontents/pgmanager.py
|
def guess_type(self, path, allow_directory=True):
"""
Guess the type of a file.
If allow_directory is False, don't consider the possibility that the
file is a directory.
"""
if path.endswith('.ipynb'):
return 'notebook'
elif allow_directory and self.dir_exists(path):
return 'directory'
else:
return 'file'
|
def guess_type(self, path, allow_directory=True):
"""
Guess the type of a file.
If allow_directory is False, don't consider the possibility that the
file is a directory.
"""
if path.endswith('.ipynb'):
return 'notebook'
elif allow_directory and self.dir_exists(path):
return 'directory'
else:
return 'file'
|
[
"Guess",
"the",
"type",
"of",
"a",
"file",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/pgmanager.py#L115-L127
|
[
"def",
"guess_type",
"(",
"self",
",",
"path",
",",
"allow_directory",
"=",
"True",
")",
":",
"if",
"path",
".",
"endswith",
"(",
"'.ipynb'",
")",
":",
"return",
"'notebook'",
"elif",
"allow_directory",
"and",
"self",
".",
"dir_exists",
"(",
"path",
")",
":",
"return",
"'directory'",
"else",
":",
"return",
"'file'"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresContentsManager.get_file_id
|
Get the id of a file in the database. This function is specific to
this implementation of ContentsManager and is not in the base class.
|
pgcontents/pgmanager.py
|
def get_file_id(self, path):
"""
Get the id of a file in the database. This function is specific to
this implementation of ContentsManager and is not in the base class.
"""
with self.engine.begin() as db:
try:
file_id = get_file_id(db, self.user_id, path)
except NoSuchFile:
self.no_such_entity(path)
return file_id
|
def get_file_id(self, path):
"""
Get the id of a file in the database. This function is specific to
this implementation of ContentsManager and is not in the base class.
"""
with self.engine.begin() as db:
try:
file_id = get_file_id(db, self.user_id, path)
except NoSuchFile:
self.no_such_entity(path)
return file_id
|
[
"Get",
"the",
"id",
"of",
"a",
"file",
"in",
"the",
"database",
".",
"This",
"function",
"is",
"specific",
"to",
"this",
"implementation",
"of",
"ContentsManager",
"and",
"is",
"not",
"in",
"the",
"base",
"class",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/pgmanager.py#L166-L177
|
[
"def",
"get_file_id",
"(",
"self",
",",
"path",
")",
":",
"with",
"self",
".",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"try",
":",
"file_id",
"=",
"get_file_id",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"path",
")",
"except",
"NoSuchFile",
":",
"self",
".",
"no_such_entity",
"(",
"path",
")",
"return",
"file_id"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresContentsManager._get_notebook
|
Get a notebook from the database.
|
pgcontents/pgmanager.py
|
def _get_notebook(self, path, content, format):
"""
Get a notebook from the database.
"""
with self.engine.begin() as db:
try:
record = get_file(
db,
self.user_id,
path,
content,
self.crypto.decrypt,
)
except NoSuchFile:
self.no_such_entity(path)
return self._notebook_model_from_db(record, content)
|
def _get_notebook(self, path, content, format):
"""
Get a notebook from the database.
"""
with self.engine.begin() as db:
try:
record = get_file(
db,
self.user_id,
path,
content,
self.crypto.decrypt,
)
except NoSuchFile:
self.no_such_entity(path)
return self._notebook_model_from_db(record, content)
|
[
"Get",
"a",
"notebook",
"from",
"the",
"database",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/pgmanager.py#L179-L195
|
[
"def",
"_get_notebook",
"(",
"self",
",",
"path",
",",
"content",
",",
"format",
")",
":",
"with",
"self",
".",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"try",
":",
"record",
"=",
"get_file",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"path",
",",
"content",
",",
"self",
".",
"crypto",
".",
"decrypt",
",",
")",
"except",
"NoSuchFile",
":",
"self",
".",
"no_such_entity",
"(",
"path",
")",
"return",
"self",
".",
"_notebook_model_from_db",
"(",
"record",
",",
"content",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresContentsManager._notebook_model_from_db
|
Build a notebook model from database record.
|
pgcontents/pgmanager.py
|
def _notebook_model_from_db(self, record, content):
"""
Build a notebook model from database record.
"""
path = to_api_path(record['parent_name'] + record['name'])
model = base_model(path)
model['type'] = 'notebook'
model['last_modified'] = model['created'] = record['created_at']
if content:
content = reads_base64(record['content'])
self.mark_trusted_cells(content, path)
model['content'] = content
model['format'] = 'json'
self.validate_notebook_model(model)
return model
|
def _notebook_model_from_db(self, record, content):
"""
Build a notebook model from database record.
"""
path = to_api_path(record['parent_name'] + record['name'])
model = base_model(path)
model['type'] = 'notebook'
model['last_modified'] = model['created'] = record['created_at']
if content:
content = reads_base64(record['content'])
self.mark_trusted_cells(content, path)
model['content'] = content
model['format'] = 'json'
self.validate_notebook_model(model)
return model
|
[
"Build",
"a",
"notebook",
"model",
"from",
"database",
"record",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/pgmanager.py#L197-L211
|
[
"def",
"_notebook_model_from_db",
"(",
"self",
",",
"record",
",",
"content",
")",
":",
"path",
"=",
"to_api_path",
"(",
"record",
"[",
"'parent_name'",
"]",
"+",
"record",
"[",
"'name'",
"]",
")",
"model",
"=",
"base_model",
"(",
"path",
")",
"model",
"[",
"'type'",
"]",
"=",
"'notebook'",
"model",
"[",
"'last_modified'",
"]",
"=",
"model",
"[",
"'created'",
"]",
"=",
"record",
"[",
"'created_at'",
"]",
"if",
"content",
":",
"content",
"=",
"reads_base64",
"(",
"record",
"[",
"'content'",
"]",
")",
"self",
".",
"mark_trusted_cells",
"(",
"content",
",",
"path",
")",
"model",
"[",
"'content'",
"]",
"=",
"content",
"model",
"[",
"'format'",
"]",
"=",
"'json'",
"self",
".",
"validate_notebook_model",
"(",
"model",
")",
"return",
"model"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresContentsManager._get_directory
|
Get a directory from the database.
|
pgcontents/pgmanager.py
|
def _get_directory(self, path, content, format):
"""
Get a directory from the database.
"""
with self.engine.begin() as db:
try:
record = get_directory(
db, self.user_id, path, content
)
except NoSuchDirectory:
if self.file_exists(path):
# TODO: It's awkward/expensive to have to check this to
# return a 400 instead of 404. Consider just 404ing.
self.do_400("Wrong type: %s" % path)
else:
self.no_such_entity(path)
return self._directory_model_from_db(record, content)
|
def _get_directory(self, path, content, format):
"""
Get a directory from the database.
"""
with self.engine.begin() as db:
try:
record = get_directory(
db, self.user_id, path, content
)
except NoSuchDirectory:
if self.file_exists(path):
# TODO: It's awkward/expensive to have to check this to
# return a 400 instead of 404. Consider just 404ing.
self.do_400("Wrong type: %s" % path)
else:
self.no_such_entity(path)
return self._directory_model_from_db(record, content)
|
[
"Get",
"a",
"directory",
"from",
"the",
"database",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/pgmanager.py#L213-L230
|
[
"def",
"_get_directory",
"(",
"self",
",",
"path",
",",
"content",
",",
"format",
")",
":",
"with",
"self",
".",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"try",
":",
"record",
"=",
"get_directory",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"path",
",",
"content",
")",
"except",
"NoSuchDirectory",
":",
"if",
"self",
".",
"file_exists",
"(",
"path",
")",
":",
"# TODO: It's awkward/expensive to have to check this to",
"# return a 400 instead of 404. Consider just 404ing.",
"self",
".",
"do_400",
"(",
"\"Wrong type: %s\"",
"%",
"path",
")",
"else",
":",
"self",
".",
"no_such_entity",
"(",
"path",
")",
"return",
"self",
".",
"_directory_model_from_db",
"(",
"record",
",",
"content",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresContentsManager._convert_file_records
|
Apply _notebook_model_from_db or _file_model_from_db to each entry
in file_records, depending on the result of `guess_type`.
|
pgcontents/pgmanager.py
|
def _convert_file_records(self, file_records):
"""
Apply _notebook_model_from_db or _file_model_from_db to each entry
in file_records, depending on the result of `guess_type`.
"""
for record in file_records:
type_ = self.guess_type(record['name'], allow_directory=False)
if type_ == 'notebook':
yield self._notebook_model_from_db(record, False)
elif type_ == 'file':
yield self._file_model_from_db(record, False, None)
else:
self.do_500("Unknown file type %s" % type_)
|
def _convert_file_records(self, file_records):
"""
Apply _notebook_model_from_db or _file_model_from_db to each entry
in file_records, depending on the result of `guess_type`.
"""
for record in file_records:
type_ = self.guess_type(record['name'], allow_directory=False)
if type_ == 'notebook':
yield self._notebook_model_from_db(record, False)
elif type_ == 'file':
yield self._file_model_from_db(record, False, None)
else:
self.do_500("Unknown file type %s" % type_)
|
[
"Apply",
"_notebook_model_from_db",
"or",
"_file_model_from_db",
"to",
"each",
"entry",
"in",
"file_records",
"depending",
"on",
"the",
"result",
"of",
"guess_type",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/pgmanager.py#L232-L244
|
[
"def",
"_convert_file_records",
"(",
"self",
",",
"file_records",
")",
":",
"for",
"record",
"in",
"file_records",
":",
"type_",
"=",
"self",
".",
"guess_type",
"(",
"record",
"[",
"'name'",
"]",
",",
"allow_directory",
"=",
"False",
")",
"if",
"type_",
"==",
"'notebook'",
":",
"yield",
"self",
".",
"_notebook_model_from_db",
"(",
"record",
",",
"False",
")",
"elif",
"type_",
"==",
"'file'",
":",
"yield",
"self",
".",
"_file_model_from_db",
"(",
"record",
",",
"False",
",",
"None",
")",
"else",
":",
"self",
".",
"do_500",
"(",
"\"Unknown file type %s\"",
"%",
"type_",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresContentsManager._directory_model_from_db
|
Build a directory model from database directory record.
|
pgcontents/pgmanager.py
|
def _directory_model_from_db(self, record, content):
"""
Build a directory model from database directory record.
"""
model = base_directory_model(to_api_path(record['name']))
if content:
model['format'] = 'json'
model['content'] = list(
chain(
self._convert_file_records(record['files']),
(
self._directory_model_from_db(subdir, False)
for subdir in record['subdirs']
),
)
)
return model
|
def _directory_model_from_db(self, record, content):
"""
Build a directory model from database directory record.
"""
model = base_directory_model(to_api_path(record['name']))
if content:
model['format'] = 'json'
model['content'] = list(
chain(
self._convert_file_records(record['files']),
(
self._directory_model_from_db(subdir, False)
for subdir in record['subdirs']
),
)
)
return model
|
[
"Build",
"a",
"directory",
"model",
"from",
"database",
"directory",
"record",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/pgmanager.py#L246-L262
|
[
"def",
"_directory_model_from_db",
"(",
"self",
",",
"record",
",",
"content",
")",
":",
"model",
"=",
"base_directory_model",
"(",
"to_api_path",
"(",
"record",
"[",
"'name'",
"]",
")",
")",
"if",
"content",
":",
"model",
"[",
"'format'",
"]",
"=",
"'json'",
"model",
"[",
"'content'",
"]",
"=",
"list",
"(",
"chain",
"(",
"self",
".",
"_convert_file_records",
"(",
"record",
"[",
"'files'",
"]",
")",
",",
"(",
"self",
".",
"_directory_model_from_db",
"(",
"subdir",
",",
"False",
")",
"for",
"subdir",
"in",
"record",
"[",
"'subdirs'",
"]",
")",
",",
")",
")",
"return",
"model"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresContentsManager._file_model_from_db
|
Build a file model from database record.
|
pgcontents/pgmanager.py
|
def _file_model_from_db(self, record, content, format):
"""
Build a file model from database record.
"""
# TODO: Most of this is shared with _notebook_model_from_db.
path = to_api_path(record['parent_name'] + record['name'])
model = base_model(path)
model['type'] = 'file'
model['last_modified'] = model['created'] = record['created_at']
if content:
bcontent = record['content']
model['content'], model['format'], model['mimetype'] = from_b64(
path,
bcontent,
format,
)
return model
|
def _file_model_from_db(self, record, content, format):
"""
Build a file model from database record.
"""
# TODO: Most of this is shared with _notebook_model_from_db.
path = to_api_path(record['parent_name'] + record['name'])
model = base_model(path)
model['type'] = 'file'
model['last_modified'] = model['created'] = record['created_at']
if content:
bcontent = record['content']
model['content'], model['format'], model['mimetype'] = from_b64(
path,
bcontent,
format,
)
return model
|
[
"Build",
"a",
"file",
"model",
"from",
"database",
"record",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/pgmanager.py#L264-L280
|
[
"def",
"_file_model_from_db",
"(",
"self",
",",
"record",
",",
"content",
",",
"format",
")",
":",
"# TODO: Most of this is shared with _notebook_model_from_db.",
"path",
"=",
"to_api_path",
"(",
"record",
"[",
"'parent_name'",
"]",
"+",
"record",
"[",
"'name'",
"]",
")",
"model",
"=",
"base_model",
"(",
"path",
")",
"model",
"[",
"'type'",
"]",
"=",
"'file'",
"model",
"[",
"'last_modified'",
"]",
"=",
"model",
"[",
"'created'",
"]",
"=",
"record",
"[",
"'created_at'",
"]",
"if",
"content",
":",
"bcontent",
"=",
"record",
"[",
"'content'",
"]",
"model",
"[",
"'content'",
"]",
",",
"model",
"[",
"'format'",
"]",
",",
"model",
"[",
"'mimetype'",
"]",
"=",
"from_b64",
"(",
"path",
",",
"bcontent",
",",
"format",
",",
")",
"return",
"model"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresContentsManager._save_notebook
|
Save a notebook.
Returns a validation message.
|
pgcontents/pgmanager.py
|
def _save_notebook(self, db, model, path):
"""
Save a notebook.
Returns a validation message.
"""
nb_contents = from_dict(model['content'])
self.check_and_sign(nb_contents, path)
save_file(
db,
self.user_id,
path,
writes_base64(nb_contents),
self.crypto.encrypt,
self.max_file_size_bytes,
)
# It's awkward that this writes to the model instead of returning.
self.validate_notebook_model(model)
return model.get('message')
|
def _save_notebook(self, db, model, path):
"""
Save a notebook.
Returns a validation message.
"""
nb_contents = from_dict(model['content'])
self.check_and_sign(nb_contents, path)
save_file(
db,
self.user_id,
path,
writes_base64(nb_contents),
self.crypto.encrypt,
self.max_file_size_bytes,
)
# It's awkward that this writes to the model instead of returning.
self.validate_notebook_model(model)
return model.get('message')
|
[
"Save",
"a",
"notebook",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/pgmanager.py#L301-L319
|
[
"def",
"_save_notebook",
"(",
"self",
",",
"db",
",",
"model",
",",
"path",
")",
":",
"nb_contents",
"=",
"from_dict",
"(",
"model",
"[",
"'content'",
"]",
")",
"self",
".",
"check_and_sign",
"(",
"nb_contents",
",",
"path",
")",
"save_file",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"path",
",",
"writes_base64",
"(",
"nb_contents",
")",
",",
"self",
".",
"crypto",
".",
"encrypt",
",",
"self",
".",
"max_file_size_bytes",
",",
")",
"# It's awkward that this writes to the model instead of returning.",
"self",
".",
"validate_notebook_model",
"(",
"model",
")",
"return",
"model",
".",
"get",
"(",
"'message'",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresContentsManager._save_file
|
Save a non-notebook file.
|
pgcontents/pgmanager.py
|
def _save_file(self, db, model, path):
"""
Save a non-notebook file.
"""
save_file(
db,
self.user_id,
path,
to_b64(model['content'], model.get('format', None)),
self.crypto.encrypt,
self.max_file_size_bytes,
)
return None
|
def _save_file(self, db, model, path):
"""
Save a non-notebook file.
"""
save_file(
db,
self.user_id,
path,
to_b64(model['content'], model.get('format', None)),
self.crypto.encrypt,
self.max_file_size_bytes,
)
return None
|
[
"Save",
"a",
"non",
"-",
"notebook",
"file",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/pgmanager.py#L321-L333
|
[
"def",
"_save_file",
"(",
"self",
",",
"db",
",",
"model",
",",
"path",
")",
":",
"save_file",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"path",
",",
"to_b64",
"(",
"model",
"[",
"'content'",
"]",
",",
"model",
".",
"get",
"(",
"'format'",
",",
"None",
")",
")",
",",
"self",
".",
"crypto",
".",
"encrypt",
",",
"self",
".",
"max_file_size_bytes",
",",
")",
"return",
"None"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresContentsManager.rename_file
|
Rename object from old_path to path.
NOTE: This method is unfortunately named on the base class. It
actually moves a file or a directory.
|
pgcontents/pgmanager.py
|
def rename_file(self, old_path, path):
"""
Rename object from old_path to path.
NOTE: This method is unfortunately named on the base class. It
actually moves a file or a directory.
"""
with self.engine.begin() as db:
try:
if self.file_exists(old_path):
rename_file(db, self.user_id, old_path, path)
elif self.dir_exists(old_path):
rename_directory(db, self.user_id, old_path, path)
else:
self.no_such_entity(path)
except (FileExists, DirectoryExists):
self.already_exists(path)
except RenameRoot as e:
self.do_409(str(e))
|
def rename_file(self, old_path, path):
"""
Rename object from old_path to path.
NOTE: This method is unfortunately named on the base class. It
actually moves a file or a directory.
"""
with self.engine.begin() as db:
try:
if self.file_exists(old_path):
rename_file(db, self.user_id, old_path, path)
elif self.dir_exists(old_path):
rename_directory(db, self.user_id, old_path, path)
else:
self.no_such_entity(path)
except (FileExists, DirectoryExists):
self.already_exists(path)
except RenameRoot as e:
self.do_409(str(e))
|
[
"Rename",
"object",
"from",
"old_path",
"to",
"path",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/pgmanager.py#L380-L398
|
[
"def",
"rename_file",
"(",
"self",
",",
"old_path",
",",
"path",
")",
":",
"with",
"self",
".",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"try",
":",
"if",
"self",
".",
"file_exists",
"(",
"old_path",
")",
":",
"rename_file",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"old_path",
",",
"path",
")",
"elif",
"self",
".",
"dir_exists",
"(",
"old_path",
")",
":",
"rename_directory",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"old_path",
",",
"path",
")",
"else",
":",
"self",
".",
"no_such_entity",
"(",
"path",
")",
"except",
"(",
"FileExists",
",",
"DirectoryExists",
")",
":",
"self",
".",
"already_exists",
"(",
"path",
")",
"except",
"RenameRoot",
"as",
"e",
":",
"self",
".",
"do_409",
"(",
"str",
"(",
"e",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresContentsManager.delete_file
|
Delete object corresponding to path.
|
pgcontents/pgmanager.py
|
def delete_file(self, path):
"""
Delete object corresponding to path.
"""
if self.file_exists(path):
self._delete_non_directory(path)
elif self.dir_exists(path):
self._delete_directory(path)
else:
self.no_such_entity(path)
|
def delete_file(self, path):
"""
Delete object corresponding to path.
"""
if self.file_exists(path):
self._delete_non_directory(path)
elif self.dir_exists(path):
self._delete_directory(path)
else:
self.no_such_entity(path)
|
[
"Delete",
"object",
"corresponding",
"to",
"path",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/pgmanager.py#L416-L425
|
[
"def",
"delete_file",
"(",
"self",
",",
"path",
")",
":",
"if",
"self",
".",
"file_exists",
"(",
"path",
")",
":",
"self",
".",
"_delete_non_directory",
"(",
"path",
")",
"elif",
"self",
".",
"dir_exists",
"(",
"path",
")",
":",
"self",
".",
"_delete_directory",
"(",
"path",
")",
"else",
":",
"self",
".",
"no_such_entity",
"(",
"path",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
preprocess_incoming_content
|
Apply preprocessing steps to file/notebook content that we're going to
write to the database.
Applies ``encrypt_func`` to ``content`` and checks that the result is
smaller than ``max_size_bytes``.
|
pgcontents/query.py
|
def preprocess_incoming_content(content, encrypt_func, max_size_bytes):
"""
Apply preprocessing steps to file/notebook content that we're going to
write to the database.
Applies ``encrypt_func`` to ``content`` and checks that the result is
smaller than ``max_size_bytes``.
"""
encrypted = encrypt_func(content)
if max_size_bytes != UNLIMITED and len(encrypted) > max_size_bytes:
raise FileTooLarge()
return encrypted
|
def preprocess_incoming_content(content, encrypt_func, max_size_bytes):
"""
Apply preprocessing steps to file/notebook content that we're going to
write to the database.
Applies ``encrypt_func`` to ``content`` and checks that the result is
smaller than ``max_size_bytes``.
"""
encrypted = encrypt_func(content)
if max_size_bytes != UNLIMITED and len(encrypted) > max_size_bytes:
raise FileTooLarge()
return encrypted
|
[
"Apply",
"preprocessing",
"steps",
"to",
"file",
"/",
"notebook",
"content",
"that",
"we",
"re",
"going",
"to",
"write",
"to",
"the",
"database",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L56-L67
|
[
"def",
"preprocess_incoming_content",
"(",
"content",
",",
"encrypt_func",
",",
"max_size_bytes",
")",
":",
"encrypted",
"=",
"encrypt_func",
"(",
"content",
")",
"if",
"max_size_bytes",
"!=",
"UNLIMITED",
"and",
"len",
"(",
"encrypted",
")",
">",
"max_size_bytes",
":",
"raise",
"FileTooLarge",
"(",
")",
"return",
"encrypted"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
ensure_db_user
|
Add a new user if they don't already exist.
|
pgcontents/query.py
|
def ensure_db_user(db, user_id):
"""
Add a new user if they don't already exist.
"""
with ignore_unique_violation():
db.execute(
users.insert().values(id=user_id),
)
|
def ensure_db_user(db, user_id):
"""
Add a new user if they don't already exist.
"""
with ignore_unique_violation():
db.execute(
users.insert().values(id=user_id),
)
|
[
"Add",
"a",
"new",
"user",
"if",
"they",
"don",
"t",
"already",
"exist",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L86-L93
|
[
"def",
"ensure_db_user",
"(",
"db",
",",
"user_id",
")",
":",
"with",
"ignore_unique_violation",
"(",
")",
":",
"db",
".",
"execute",
"(",
"users",
".",
"insert",
"(",
")",
".",
"values",
"(",
"id",
"=",
"user_id",
")",
",",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
purge_user
|
Delete a user and all of their resources.
|
pgcontents/query.py
|
def purge_user(db, user_id):
"""
Delete a user and all of their resources.
"""
db.execute(files.delete().where(
files.c.user_id == user_id
))
db.execute(directories.delete().where(
directories.c.user_id == user_id
))
db.execute(users.delete().where(
users.c.id == user_id
))
|
def purge_user(db, user_id):
"""
Delete a user and all of their resources.
"""
db.execute(files.delete().where(
files.c.user_id == user_id
))
db.execute(directories.delete().where(
directories.c.user_id == user_id
))
db.execute(users.delete().where(
users.c.id == user_id
))
|
[
"Delete",
"a",
"user",
"and",
"all",
"of",
"their",
"resources",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L96-L108
|
[
"def",
"purge_user",
"(",
"db",
",",
"user_id",
")",
":",
"db",
".",
"execute",
"(",
"files",
".",
"delete",
"(",
")",
".",
"where",
"(",
"files",
".",
"c",
".",
"user_id",
"==",
"user_id",
")",
")",
"db",
".",
"execute",
"(",
"directories",
".",
"delete",
"(",
")",
".",
"where",
"(",
"directories",
".",
"c",
".",
"user_id",
"==",
"user_id",
")",
")",
"db",
".",
"execute",
"(",
"users",
".",
"delete",
"(",
")",
".",
"where",
"(",
"users",
".",
"c",
".",
"id",
"==",
"user_id",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
create_directory
|
Create a directory.
|
pgcontents/query.py
|
def create_directory(db, user_id, api_path):
"""
Create a directory.
"""
name = from_api_dirname(api_path)
if name == '/':
parent_name = null()
parent_user_id = null()
else:
# Convert '/foo/bar/buzz/' -> '/foo/bar/'
parent_name = name[:name.rindex('/', 0, -1) + 1]
parent_user_id = user_id
db.execute(
directories.insert().values(
name=name,
user_id=user_id,
parent_name=parent_name,
parent_user_id=parent_user_id,
)
)
|
def create_directory(db, user_id, api_path):
"""
Create a directory.
"""
name = from_api_dirname(api_path)
if name == '/':
parent_name = null()
parent_user_id = null()
else:
# Convert '/foo/bar/buzz/' -> '/foo/bar/'
parent_name = name[:name.rindex('/', 0, -1) + 1]
parent_user_id = user_id
db.execute(
directories.insert().values(
name=name,
user_id=user_id,
parent_name=parent_name,
parent_user_id=parent_user_id,
)
)
|
[
"Create",
"a",
"directory",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L114-L134
|
[
"def",
"create_directory",
"(",
"db",
",",
"user_id",
",",
"api_path",
")",
":",
"name",
"=",
"from_api_dirname",
"(",
"api_path",
")",
"if",
"name",
"==",
"'/'",
":",
"parent_name",
"=",
"null",
"(",
")",
"parent_user_id",
"=",
"null",
"(",
")",
"else",
":",
"# Convert '/foo/bar/buzz/' -> '/foo/bar/'",
"parent_name",
"=",
"name",
"[",
":",
"name",
".",
"rindex",
"(",
"'/'",
",",
"0",
",",
"-",
"1",
")",
"+",
"1",
"]",
"parent_user_id",
"=",
"user_id",
"db",
".",
"execute",
"(",
"directories",
".",
"insert",
"(",
")",
".",
"values",
"(",
"name",
"=",
"name",
",",
"user_id",
"=",
"user_id",
",",
"parent_name",
"=",
"parent_name",
",",
"parent_user_id",
"=",
"parent_user_id",
",",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
_is_in_directory
|
Return a WHERE clause that matches entries in a directory.
Parameterized on table because this clause is re-used between files and
directories.
|
pgcontents/query.py
|
def _is_in_directory(table, user_id, db_dirname):
"""
Return a WHERE clause that matches entries in a directory.
Parameterized on table because this clause is re-used between files and
directories.
"""
return and_(
table.c.parent_name == db_dirname,
table.c.user_id == user_id,
)
|
def _is_in_directory(table, user_id, db_dirname):
"""
Return a WHERE clause that matches entries in a directory.
Parameterized on table because this clause is re-used between files and
directories.
"""
return and_(
table.c.parent_name == db_dirname,
table.c.user_id == user_id,
)
|
[
"Return",
"a",
"WHERE",
"clause",
"that",
"matches",
"entries",
"in",
"a",
"directory",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L145-L155
|
[
"def",
"_is_in_directory",
"(",
"table",
",",
"user_id",
",",
"db_dirname",
")",
":",
"return",
"and_",
"(",
"table",
".",
"c",
".",
"parent_name",
"==",
"db_dirname",
",",
"table",
".",
"c",
".",
"user_id",
"==",
"user_id",
",",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
delete_directory
|
Delete a directory.
|
pgcontents/query.py
|
def delete_directory(db, user_id, api_path):
"""
Delete a directory.
"""
db_dirname = from_api_dirname(api_path)
try:
result = db.execute(
directories.delete().where(
and_(
directories.c.user_id == user_id,
directories.c.name == db_dirname,
)
)
)
except IntegrityError as error:
if is_foreign_key_violation(error):
raise DirectoryNotEmpty(api_path)
else:
raise
rowcount = result.rowcount
if not rowcount:
raise NoSuchDirectory(api_path)
return rowcount
|
def delete_directory(db, user_id, api_path):
"""
Delete a directory.
"""
db_dirname = from_api_dirname(api_path)
try:
result = db.execute(
directories.delete().where(
and_(
directories.c.user_id == user_id,
directories.c.name == db_dirname,
)
)
)
except IntegrityError as error:
if is_foreign_key_violation(error):
raise DirectoryNotEmpty(api_path)
else:
raise
rowcount = result.rowcount
if not rowcount:
raise NoSuchDirectory(api_path)
return rowcount
|
[
"Delete",
"a",
"directory",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L167-L191
|
[
"def",
"delete_directory",
"(",
"db",
",",
"user_id",
",",
"api_path",
")",
":",
"db_dirname",
"=",
"from_api_dirname",
"(",
"api_path",
")",
"try",
":",
"result",
"=",
"db",
".",
"execute",
"(",
"directories",
".",
"delete",
"(",
")",
".",
"where",
"(",
"and_",
"(",
"directories",
".",
"c",
".",
"user_id",
"==",
"user_id",
",",
"directories",
".",
"c",
".",
"name",
"==",
"db_dirname",
",",
")",
")",
")",
"except",
"IntegrityError",
"as",
"error",
":",
"if",
"is_foreign_key_violation",
"(",
"error",
")",
":",
"raise",
"DirectoryNotEmpty",
"(",
"api_path",
")",
"else",
":",
"raise",
"rowcount",
"=",
"result",
".",
"rowcount",
"if",
"not",
"rowcount",
":",
"raise",
"NoSuchDirectory",
"(",
"api_path",
")",
"return",
"rowcount"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
_dir_exists
|
Internal implementation of dir_exists.
Expects a db-style path name.
|
pgcontents/query.py
|
def _dir_exists(db, user_id, db_dirname):
"""
Internal implementation of dir_exists.
Expects a db-style path name.
"""
return db.execute(
select(
[func.count(directories.c.name)],
).where(
and_(
directories.c.user_id == user_id,
directories.c.name == db_dirname,
),
)
).scalar() != 0
|
def _dir_exists(db, user_id, db_dirname):
"""
Internal implementation of dir_exists.
Expects a db-style path name.
"""
return db.execute(
select(
[func.count(directories.c.name)],
).where(
and_(
directories.c.user_id == user_id,
directories.c.name == db_dirname,
),
)
).scalar() != 0
|
[
"Internal",
"implementation",
"of",
"dir_exists",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L201-L216
|
[
"def",
"_dir_exists",
"(",
"db",
",",
"user_id",
",",
"db_dirname",
")",
":",
"return",
"db",
".",
"execute",
"(",
"select",
"(",
"[",
"func",
".",
"count",
"(",
"directories",
".",
"c",
".",
"name",
")",
"]",
",",
")",
".",
"where",
"(",
"and_",
"(",
"directories",
".",
"c",
".",
"user_id",
"==",
"user_id",
",",
"directories",
".",
"c",
".",
"name",
"==",
"db_dirname",
",",
")",
",",
")",
")",
".",
"scalar",
"(",
")",
"!=",
"0"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
files_in_directory
|
Return files in a directory.
|
pgcontents/query.py
|
def files_in_directory(db, user_id, db_dirname):
"""
Return files in a directory.
"""
fields = _file_default_fields()
rows = db.execute(
select(
fields,
).where(
_is_in_directory(files, user_id, db_dirname),
).order_by(
files.c.user_id,
files.c.parent_name,
files.c.name,
files.c.created_at,
).distinct(
files.c.user_id, files.c.parent_name, files.c.name,
)
)
return [to_dict_no_content(fields, row) for row in rows]
|
def files_in_directory(db, user_id, db_dirname):
"""
Return files in a directory.
"""
fields = _file_default_fields()
rows = db.execute(
select(
fields,
).where(
_is_in_directory(files, user_id, db_dirname),
).order_by(
files.c.user_id,
files.c.parent_name,
files.c.name,
files.c.created_at,
).distinct(
files.c.user_id, files.c.parent_name, files.c.name,
)
)
return [to_dict_no_content(fields, row) for row in rows]
|
[
"Return",
"files",
"in",
"a",
"directory",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L219-L238
|
[
"def",
"files_in_directory",
"(",
"db",
",",
"user_id",
",",
"db_dirname",
")",
":",
"fields",
"=",
"_file_default_fields",
"(",
")",
"rows",
"=",
"db",
".",
"execute",
"(",
"select",
"(",
"fields",
",",
")",
".",
"where",
"(",
"_is_in_directory",
"(",
"files",
",",
"user_id",
",",
"db_dirname",
")",
",",
")",
".",
"order_by",
"(",
"files",
".",
"c",
".",
"user_id",
",",
"files",
".",
"c",
".",
"parent_name",
",",
"files",
".",
"c",
".",
"name",
",",
"files",
".",
"c",
".",
"created_at",
",",
")",
".",
"distinct",
"(",
"files",
".",
"c",
".",
"user_id",
",",
"files",
".",
"c",
".",
"parent_name",
",",
"files",
".",
"c",
".",
"name",
",",
")",
")",
"return",
"[",
"to_dict_no_content",
"(",
"fields",
",",
"row",
")",
"for",
"row",
"in",
"rows",
"]"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
directories_in_directory
|
Return subdirectories of a directory.
|
pgcontents/query.py
|
def directories_in_directory(db, user_id, db_dirname):
"""
Return subdirectories of a directory.
"""
fields = _directory_default_fields()
rows = db.execute(
select(
fields,
).where(
_is_in_directory(directories, user_id, db_dirname),
)
)
return [to_dict_no_content(fields, row) for row in rows]
|
def directories_in_directory(db, user_id, db_dirname):
"""
Return subdirectories of a directory.
"""
fields = _directory_default_fields()
rows = db.execute(
select(
fields,
).where(
_is_in_directory(directories, user_id, db_dirname),
)
)
return [to_dict_no_content(fields, row) for row in rows]
|
[
"Return",
"subdirectories",
"of",
"a",
"directory",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L241-L253
|
[
"def",
"directories_in_directory",
"(",
"db",
",",
"user_id",
",",
"db_dirname",
")",
":",
"fields",
"=",
"_directory_default_fields",
"(",
")",
"rows",
"=",
"db",
".",
"execute",
"(",
"select",
"(",
"fields",
",",
")",
".",
"where",
"(",
"_is_in_directory",
"(",
"directories",
",",
"user_id",
",",
"db_dirname",
")",
",",
")",
")",
"return",
"[",
"to_dict_no_content",
"(",
"fields",
",",
"row",
")",
"for",
"row",
"in",
"rows",
"]"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
get_directory
|
Return the names of all files/directories that are direct children of
api_dirname.
If content is False, return a bare model containing just a database-style
name.
|
pgcontents/query.py
|
def get_directory(db, user_id, api_dirname, content):
"""
Return the names of all files/directories that are direct children of
api_dirname.
If content is False, return a bare model containing just a database-style
name.
"""
db_dirname = from_api_dirname(api_dirname)
if not _dir_exists(db, user_id, db_dirname):
raise NoSuchDirectory(api_dirname)
if content:
files = files_in_directory(
db,
user_id,
db_dirname,
)
subdirectories = directories_in_directory(
db,
user_id,
db_dirname,
)
else:
files, subdirectories = None, None
# TODO: Consider using namedtuples for these return values.
return {
'name': db_dirname,
'files': files,
'subdirs': subdirectories,
}
|
def get_directory(db, user_id, api_dirname, content):
"""
Return the names of all files/directories that are direct children of
api_dirname.
If content is False, return a bare model containing just a database-style
name.
"""
db_dirname = from_api_dirname(api_dirname)
if not _dir_exists(db, user_id, db_dirname):
raise NoSuchDirectory(api_dirname)
if content:
files = files_in_directory(
db,
user_id,
db_dirname,
)
subdirectories = directories_in_directory(
db,
user_id,
db_dirname,
)
else:
files, subdirectories = None, None
# TODO: Consider using namedtuples for these return values.
return {
'name': db_dirname,
'files': files,
'subdirs': subdirectories,
}
|
[
"Return",
"the",
"names",
"of",
"all",
"files",
"/",
"directories",
"that",
"are",
"direct",
"children",
"of",
"api_dirname",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L256-L286
|
[
"def",
"get_directory",
"(",
"db",
",",
"user_id",
",",
"api_dirname",
",",
"content",
")",
":",
"db_dirname",
"=",
"from_api_dirname",
"(",
"api_dirname",
")",
"if",
"not",
"_dir_exists",
"(",
"db",
",",
"user_id",
",",
"db_dirname",
")",
":",
"raise",
"NoSuchDirectory",
"(",
"api_dirname",
")",
"if",
"content",
":",
"files",
"=",
"files_in_directory",
"(",
"db",
",",
"user_id",
",",
"db_dirname",
",",
")",
"subdirectories",
"=",
"directories_in_directory",
"(",
"db",
",",
"user_id",
",",
"db_dirname",
",",
")",
"else",
":",
"files",
",",
"subdirectories",
"=",
"None",
",",
"None",
"# TODO: Consider using namedtuples for these return values.",
"return",
"{",
"'name'",
":",
"db_dirname",
",",
"'files'",
":",
"files",
",",
"'subdirs'",
":",
"subdirectories",
",",
"}"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
_file_where
|
Return a WHERE clause matching the given API path and user_id.
|
pgcontents/query.py
|
def _file_where(user_id, api_path):
"""
Return a WHERE clause matching the given API path and user_id.
"""
directory, name = split_api_filepath(api_path)
return and_(
files.c.name == name,
files.c.user_id == user_id,
files.c.parent_name == directory,
)
|
def _file_where(user_id, api_path):
"""
Return a WHERE clause matching the given API path and user_id.
"""
directory, name = split_api_filepath(api_path)
return and_(
files.c.name == name,
files.c.user_id == user_id,
files.c.parent_name == directory,
)
|
[
"Return",
"a",
"WHERE",
"clause",
"matching",
"the",
"given",
"API",
"path",
"and",
"user_id",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L292-L301
|
[
"def",
"_file_where",
"(",
"user_id",
",",
"api_path",
")",
":",
"directory",
",",
"name",
"=",
"split_api_filepath",
"(",
"api_path",
")",
"return",
"and_",
"(",
"files",
".",
"c",
".",
"name",
"==",
"name",
",",
"files",
".",
"c",
".",
"user_id",
"==",
"user_id",
",",
"files",
".",
"c",
".",
"parent_name",
"==",
"directory",
",",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
_select_file
|
Return a SELECT statement that returns the latest N versions of a file.
|
pgcontents/query.py
|
def _select_file(user_id, api_path, fields, limit):
"""
Return a SELECT statement that returns the latest N versions of a file.
"""
query = select(fields).where(
_file_where(user_id, api_path),
).order_by(
_file_creation_order(),
)
if limit is not None:
query = query.limit(limit)
return query
|
def _select_file(user_id, api_path, fields, limit):
"""
Return a SELECT statement that returns the latest N versions of a file.
"""
query = select(fields).where(
_file_where(user_id, api_path),
).order_by(
_file_creation_order(),
)
if limit is not None:
query = query.limit(limit)
return query
|
[
"Return",
"a",
"SELECT",
"statement",
"that",
"returns",
"the",
"latest",
"N",
"versions",
"of",
"a",
"file",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L311-L323
|
[
"def",
"_select_file",
"(",
"user_id",
",",
"api_path",
",",
"fields",
",",
"limit",
")",
":",
"query",
"=",
"select",
"(",
"fields",
")",
".",
"where",
"(",
"_file_where",
"(",
"user_id",
",",
"api_path",
")",
",",
")",
".",
"order_by",
"(",
"_file_creation_order",
"(",
")",
",",
")",
"if",
"limit",
"is",
"not",
"None",
":",
"query",
"=",
"query",
".",
"limit",
"(",
"limit",
")",
"return",
"query"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
_file_default_fields
|
Default fields returned by a file query.
|
pgcontents/query.py
|
def _file_default_fields():
"""
Default fields returned by a file query.
"""
return [
files.c.name,
files.c.created_at,
files.c.parent_name,
]
|
def _file_default_fields():
"""
Default fields returned by a file query.
"""
return [
files.c.name,
files.c.created_at,
files.c.parent_name,
]
|
[
"Default",
"fields",
"returned",
"by",
"a",
"file",
"query",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L326-L334
|
[
"def",
"_file_default_fields",
"(",
")",
":",
"return",
"[",
"files",
".",
"c",
".",
"name",
",",
"files",
".",
"c",
".",
"created_at",
",",
"files",
".",
"c",
".",
"parent_name",
",",
"]"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
_get_file
|
Get file data for the given user_id, path, and query_fields. The
query_fields parameter specifies which database fields should be
included in the returned file data.
|
pgcontents/query.py
|
def _get_file(db, user_id, api_path, query_fields, decrypt_func):
"""
Get file data for the given user_id, path, and query_fields. The
query_fields parameter specifies which database fields should be
included in the returned file data.
"""
result = db.execute(
_select_file(user_id, api_path, query_fields, limit=1),
).first()
if result is None:
raise NoSuchFile(api_path)
if files.c.content in query_fields:
return to_dict_with_content(query_fields, result, decrypt_func)
else:
return to_dict_no_content(query_fields, result)
|
def _get_file(db, user_id, api_path, query_fields, decrypt_func):
"""
Get file data for the given user_id, path, and query_fields. The
query_fields parameter specifies which database fields should be
included in the returned file data.
"""
result = db.execute(
_select_file(user_id, api_path, query_fields, limit=1),
).first()
if result is None:
raise NoSuchFile(api_path)
if files.c.content in query_fields:
return to_dict_with_content(query_fields, result, decrypt_func)
else:
return to_dict_no_content(query_fields, result)
|
[
"Get",
"file",
"data",
"for",
"the",
"given",
"user_id",
"path",
"and",
"query_fields",
".",
"The",
"query_fields",
"parameter",
"specifies",
"which",
"database",
"fields",
"should",
"be",
"included",
"in",
"the",
"returned",
"file",
"data",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L337-L353
|
[
"def",
"_get_file",
"(",
"db",
",",
"user_id",
",",
"api_path",
",",
"query_fields",
",",
"decrypt_func",
")",
":",
"result",
"=",
"db",
".",
"execute",
"(",
"_select_file",
"(",
"user_id",
",",
"api_path",
",",
"query_fields",
",",
"limit",
"=",
"1",
")",
",",
")",
".",
"first",
"(",
")",
"if",
"result",
"is",
"None",
":",
"raise",
"NoSuchFile",
"(",
"api_path",
")",
"if",
"files",
".",
"c",
".",
"content",
"in",
"query_fields",
":",
"return",
"to_dict_with_content",
"(",
"query_fields",
",",
"result",
",",
"decrypt_func",
")",
"else",
":",
"return",
"to_dict_no_content",
"(",
"query_fields",
",",
"result",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
get_file
|
Get file data for the given user_id and path.
Include content only if include_content=True.
|
pgcontents/query.py
|
def get_file(db, user_id, api_path, include_content, decrypt_func):
"""
Get file data for the given user_id and path.
Include content only if include_content=True.
"""
query_fields = _file_default_fields()
if include_content:
query_fields.append(files.c.content)
return _get_file(db, user_id, api_path, query_fields, decrypt_func)
|
def get_file(db, user_id, api_path, include_content, decrypt_func):
"""
Get file data for the given user_id and path.
Include content only if include_content=True.
"""
query_fields = _file_default_fields()
if include_content:
query_fields.append(files.c.content)
return _get_file(db, user_id, api_path, query_fields, decrypt_func)
|
[
"Get",
"file",
"data",
"for",
"the",
"given",
"user_id",
"and",
"path",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L356-L366
|
[
"def",
"get_file",
"(",
"db",
",",
"user_id",
",",
"api_path",
",",
"include_content",
",",
"decrypt_func",
")",
":",
"query_fields",
"=",
"_file_default_fields",
"(",
")",
"if",
"include_content",
":",
"query_fields",
".",
"append",
"(",
"files",
".",
"c",
".",
"content",
")",
"return",
"_get_file",
"(",
"db",
",",
"user_id",
",",
"api_path",
",",
"query_fields",
",",
"decrypt_func",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
get_file_id
|
Get the value in the 'id' column for the file with the given
user_id and path.
|
pgcontents/query.py
|
def get_file_id(db, user_id, api_path):
"""
Get the value in the 'id' column for the file with the given
user_id and path.
"""
return _get_file(
db,
user_id,
api_path,
[files.c.id],
unused_decrypt_func,
)['id']
|
def get_file_id(db, user_id, api_path):
"""
Get the value in the 'id' column for the file with the given
user_id and path.
"""
return _get_file(
db,
user_id,
api_path,
[files.c.id],
unused_decrypt_func,
)['id']
|
[
"Get",
"the",
"value",
"in",
"the",
"id",
"column",
"for",
"the",
"file",
"with",
"the",
"given",
"user_id",
"and",
"path",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L369-L380
|
[
"def",
"get_file_id",
"(",
"db",
",",
"user_id",
",",
"api_path",
")",
":",
"return",
"_get_file",
"(",
"db",
",",
"user_id",
",",
"api_path",
",",
"[",
"files",
".",
"c",
".",
"id",
"]",
",",
"unused_decrypt_func",
",",
")",
"[",
"'id'",
"]"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
delete_file
|
Delete a file.
TODO: Consider making this a soft delete.
|
pgcontents/query.py
|
def delete_file(db, user_id, api_path):
"""
Delete a file.
TODO: Consider making this a soft delete.
"""
result = db.execute(
files.delete().where(
_file_where(user_id, api_path)
)
)
rowcount = result.rowcount
if not rowcount:
raise NoSuchFile(api_path)
return rowcount
|
def delete_file(db, user_id, api_path):
"""
Delete a file.
TODO: Consider making this a soft delete.
"""
result = db.execute(
files.delete().where(
_file_where(user_id, api_path)
)
)
rowcount = result.rowcount
if not rowcount:
raise NoSuchFile(api_path)
return rowcount
|
[
"Delete",
"a",
"file",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L383-L399
|
[
"def",
"delete_file",
"(",
"db",
",",
"user_id",
",",
"api_path",
")",
":",
"result",
"=",
"db",
".",
"execute",
"(",
"files",
".",
"delete",
"(",
")",
".",
"where",
"(",
"_file_where",
"(",
"user_id",
",",
"api_path",
")",
")",
")",
"rowcount",
"=",
"result",
".",
"rowcount",
"if",
"not",
"rowcount",
":",
"raise",
"NoSuchFile",
"(",
"api_path",
")",
"return",
"rowcount"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
file_exists
|
Check if a file exists.
|
pgcontents/query.py
|
def file_exists(db, user_id, path):
"""
Check if a file exists.
"""
try:
get_file(
db,
user_id,
path,
include_content=False,
decrypt_func=unused_decrypt_func,
)
return True
except NoSuchFile:
return False
|
def file_exists(db, user_id, path):
"""
Check if a file exists.
"""
try:
get_file(
db,
user_id,
path,
include_content=False,
decrypt_func=unused_decrypt_func,
)
return True
except NoSuchFile:
return False
|
[
"Check",
"if",
"a",
"file",
"exists",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L402-L416
|
[
"def",
"file_exists",
"(",
"db",
",",
"user_id",
",",
"path",
")",
":",
"try",
":",
"get_file",
"(",
"db",
",",
"user_id",
",",
"path",
",",
"include_content",
"=",
"False",
",",
"decrypt_func",
"=",
"unused_decrypt_func",
",",
")",
"return",
"True",
"except",
"NoSuchFile",
":",
"return",
"False"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
rename_file
|
Rename a file.
|
pgcontents/query.py
|
def rename_file(db, user_id, old_api_path, new_api_path):
"""
Rename a file.
"""
# Overwriting existing files is disallowed.
if file_exists(db, user_id, new_api_path):
raise FileExists(new_api_path)
old_dir, old_name = split_api_filepath(old_api_path)
new_dir, new_name = split_api_filepath(new_api_path)
if old_dir != new_dir:
raise ValueError(
dedent(
"""
Can't rename object to new directory.
Old Path: {old_api_path}
New Path: {new_api_path}
""".format(
old_api_path=old_api_path,
new_api_path=new_api_path
)
)
)
db.execute(
files.update().where(
_file_where(user_id, old_api_path),
).values(
name=new_name,
created_at=func.now(),
)
)
|
def rename_file(db, user_id, old_api_path, new_api_path):
"""
Rename a file.
"""
# Overwriting existing files is disallowed.
if file_exists(db, user_id, new_api_path):
raise FileExists(new_api_path)
old_dir, old_name = split_api_filepath(old_api_path)
new_dir, new_name = split_api_filepath(new_api_path)
if old_dir != new_dir:
raise ValueError(
dedent(
"""
Can't rename object to new directory.
Old Path: {old_api_path}
New Path: {new_api_path}
""".format(
old_api_path=old_api_path,
new_api_path=new_api_path
)
)
)
db.execute(
files.update().where(
_file_where(user_id, old_api_path),
).values(
name=new_name,
created_at=func.now(),
)
)
|
[
"Rename",
"a",
"file",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L419-L451
|
[
"def",
"rename_file",
"(",
"db",
",",
"user_id",
",",
"old_api_path",
",",
"new_api_path",
")",
":",
"# Overwriting existing files is disallowed.",
"if",
"file_exists",
"(",
"db",
",",
"user_id",
",",
"new_api_path",
")",
":",
"raise",
"FileExists",
"(",
"new_api_path",
")",
"old_dir",
",",
"old_name",
"=",
"split_api_filepath",
"(",
"old_api_path",
")",
"new_dir",
",",
"new_name",
"=",
"split_api_filepath",
"(",
"new_api_path",
")",
"if",
"old_dir",
"!=",
"new_dir",
":",
"raise",
"ValueError",
"(",
"dedent",
"(",
"\"\"\"\n Can't rename object to new directory.\n Old Path: {old_api_path}\n New Path: {new_api_path}\n \"\"\"",
".",
"format",
"(",
"old_api_path",
"=",
"old_api_path",
",",
"new_api_path",
"=",
"new_api_path",
")",
")",
")",
"db",
".",
"execute",
"(",
"files",
".",
"update",
"(",
")",
".",
"where",
"(",
"_file_where",
"(",
"user_id",
",",
"old_api_path",
")",
",",
")",
".",
"values",
"(",
"name",
"=",
"new_name",
",",
"created_at",
"=",
"func",
".",
"now",
"(",
")",
",",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
rename_directory
|
Rename a directory.
|
pgcontents/query.py
|
def rename_directory(db, user_id, old_api_path, new_api_path):
"""
Rename a directory.
"""
old_db_path = from_api_dirname(old_api_path)
new_db_path = from_api_dirname(new_api_path)
if old_db_path == '/':
raise RenameRoot('Renaming the root directory is not permitted.')
# Overwriting existing directories is disallowed.
if _dir_exists(db, user_id, new_db_path):
raise DirectoryExists(new_api_path)
# Set this foreign key constraint to deferred so it's not violated
# when we run the first statement to update the name of the directory.
db.execute('SET CONSTRAINTS '
'pgcontents.directories_parent_user_id_fkey DEFERRED')
# Update name column for the directory that's being renamed
db.execute(
directories.update().where(
and_(
directories.c.user_id == user_id,
directories.c.name == old_db_path,
)
).values(
name=new_db_path,
)
)
# Update the name and parent_name of any descendant directories. Do
# this in a single statement so the non-deferrable check constraint
# is satisfied.
db.execute(
directories.update().where(
and_(
directories.c.user_id == user_id,
directories.c.name.startswith(old_db_path),
directories.c.parent_name.startswith(old_db_path),
)
).values(
name=func.concat(
new_db_path,
func.right(directories.c.name, -func.length(old_db_path))
),
parent_name=func.concat(
new_db_path,
func.right(
directories.c.parent_name,
-func.length(old_db_path)
)
),
)
)
|
def rename_directory(db, user_id, old_api_path, new_api_path):
"""
Rename a directory.
"""
old_db_path = from_api_dirname(old_api_path)
new_db_path = from_api_dirname(new_api_path)
if old_db_path == '/':
raise RenameRoot('Renaming the root directory is not permitted.')
# Overwriting existing directories is disallowed.
if _dir_exists(db, user_id, new_db_path):
raise DirectoryExists(new_api_path)
# Set this foreign key constraint to deferred so it's not violated
# when we run the first statement to update the name of the directory.
db.execute('SET CONSTRAINTS '
'pgcontents.directories_parent_user_id_fkey DEFERRED')
# Update name column for the directory that's being renamed
db.execute(
directories.update().where(
and_(
directories.c.user_id == user_id,
directories.c.name == old_db_path,
)
).values(
name=new_db_path,
)
)
# Update the name and parent_name of any descendant directories. Do
# this in a single statement so the non-deferrable check constraint
# is satisfied.
db.execute(
directories.update().where(
and_(
directories.c.user_id == user_id,
directories.c.name.startswith(old_db_path),
directories.c.parent_name.startswith(old_db_path),
)
).values(
name=func.concat(
new_db_path,
func.right(directories.c.name, -func.length(old_db_path))
),
parent_name=func.concat(
new_db_path,
func.right(
directories.c.parent_name,
-func.length(old_db_path)
)
),
)
)
|
[
"Rename",
"a",
"directory",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L454-L508
|
[
"def",
"rename_directory",
"(",
"db",
",",
"user_id",
",",
"old_api_path",
",",
"new_api_path",
")",
":",
"old_db_path",
"=",
"from_api_dirname",
"(",
"old_api_path",
")",
"new_db_path",
"=",
"from_api_dirname",
"(",
"new_api_path",
")",
"if",
"old_db_path",
"==",
"'/'",
":",
"raise",
"RenameRoot",
"(",
"'Renaming the root directory is not permitted.'",
")",
"# Overwriting existing directories is disallowed.",
"if",
"_dir_exists",
"(",
"db",
",",
"user_id",
",",
"new_db_path",
")",
":",
"raise",
"DirectoryExists",
"(",
"new_api_path",
")",
"# Set this foreign key constraint to deferred so it's not violated",
"# when we run the first statement to update the name of the directory.",
"db",
".",
"execute",
"(",
"'SET CONSTRAINTS '",
"'pgcontents.directories_parent_user_id_fkey DEFERRED'",
")",
"# Update name column for the directory that's being renamed",
"db",
".",
"execute",
"(",
"directories",
".",
"update",
"(",
")",
".",
"where",
"(",
"and_",
"(",
"directories",
".",
"c",
".",
"user_id",
"==",
"user_id",
",",
"directories",
".",
"c",
".",
"name",
"==",
"old_db_path",
",",
")",
")",
".",
"values",
"(",
"name",
"=",
"new_db_path",
",",
")",
")",
"# Update the name and parent_name of any descendant directories. Do",
"# this in a single statement so the non-deferrable check constraint",
"# is satisfied.",
"db",
".",
"execute",
"(",
"directories",
".",
"update",
"(",
")",
".",
"where",
"(",
"and_",
"(",
"directories",
".",
"c",
".",
"user_id",
"==",
"user_id",
",",
"directories",
".",
"c",
".",
"name",
".",
"startswith",
"(",
"old_db_path",
")",
",",
"directories",
".",
"c",
".",
"parent_name",
".",
"startswith",
"(",
"old_db_path",
")",
",",
")",
")",
".",
"values",
"(",
"name",
"=",
"func",
".",
"concat",
"(",
"new_db_path",
",",
"func",
".",
"right",
"(",
"directories",
".",
"c",
".",
"name",
",",
"-",
"func",
".",
"length",
"(",
"old_db_path",
")",
")",
")",
",",
"parent_name",
"=",
"func",
".",
"concat",
"(",
"new_db_path",
",",
"func",
".",
"right",
"(",
"directories",
".",
"c",
".",
"parent_name",
",",
"-",
"func",
".",
"length",
"(",
"old_db_path",
")",
")",
")",
",",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
save_file
|
Save a file.
TODO: Update-then-insert is probably cheaper than insert-then-update.
|
pgcontents/query.py
|
def save_file(db, user_id, path, content, encrypt_func, max_size_bytes):
"""
Save a file.
TODO: Update-then-insert is probably cheaper than insert-then-update.
"""
content = preprocess_incoming_content(
content,
encrypt_func,
max_size_bytes,
)
directory, name = split_api_filepath(path)
with db.begin_nested() as savepoint:
try:
res = db.execute(
files.insert().values(
name=name,
user_id=user_id,
parent_name=directory,
content=content,
)
)
except IntegrityError as error:
# The file already exists, so overwrite its content with the newer
# version.
if is_unique_violation(error):
savepoint.rollback()
res = db.execute(
files.update().where(
_file_where(user_id, path),
).values(
content=content,
created_at=func.now(),
)
)
else:
# Unknown error. Reraise
raise
return res
|
def save_file(db, user_id, path, content, encrypt_func, max_size_bytes):
"""
Save a file.
TODO: Update-then-insert is probably cheaper than insert-then-update.
"""
content = preprocess_incoming_content(
content,
encrypt_func,
max_size_bytes,
)
directory, name = split_api_filepath(path)
with db.begin_nested() as savepoint:
try:
res = db.execute(
files.insert().values(
name=name,
user_id=user_id,
parent_name=directory,
content=content,
)
)
except IntegrityError as error:
# The file already exists, so overwrite its content with the newer
# version.
if is_unique_violation(error):
savepoint.rollback()
res = db.execute(
files.update().where(
_file_where(user_id, path),
).values(
content=content,
created_at=func.now(),
)
)
else:
# Unknown error. Reraise
raise
return res
|
[
"Save",
"a",
"file",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L511-L550
|
[
"def",
"save_file",
"(",
"db",
",",
"user_id",
",",
"path",
",",
"content",
",",
"encrypt_func",
",",
"max_size_bytes",
")",
":",
"content",
"=",
"preprocess_incoming_content",
"(",
"content",
",",
"encrypt_func",
",",
"max_size_bytes",
",",
")",
"directory",
",",
"name",
"=",
"split_api_filepath",
"(",
"path",
")",
"with",
"db",
".",
"begin_nested",
"(",
")",
"as",
"savepoint",
":",
"try",
":",
"res",
"=",
"db",
".",
"execute",
"(",
"files",
".",
"insert",
"(",
")",
".",
"values",
"(",
"name",
"=",
"name",
",",
"user_id",
"=",
"user_id",
",",
"parent_name",
"=",
"directory",
",",
"content",
"=",
"content",
",",
")",
")",
"except",
"IntegrityError",
"as",
"error",
":",
"# The file already exists, so overwrite its content with the newer",
"# version.",
"if",
"is_unique_violation",
"(",
"error",
")",
":",
"savepoint",
".",
"rollback",
"(",
")",
"res",
"=",
"db",
".",
"execute",
"(",
"files",
".",
"update",
"(",
")",
".",
"where",
"(",
"_file_where",
"(",
"user_id",
",",
"path",
")",
",",
")",
".",
"values",
"(",
"content",
"=",
"content",
",",
"created_at",
"=",
"func",
".",
"now",
"(",
")",
",",
")",
")",
"else",
":",
"# Unknown error. Reraise",
"raise",
"return",
"res"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
generate_files
|
Create a generator of decrypted files.
Files are yielded in ascending order of their timestamp.
This function selects all current notebooks (optionally, falling within a
datetime range), decrypts them, and returns a generator yielding dicts,
each containing a decoded notebook and metadata including the user,
filepath, and timestamp.
Parameters
----------
engine : SQLAlchemy.engine
Engine encapsulating database connections.
crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of the selected notebooks.
min_dt : datetime.datetime, optional
Minimum last modified datetime at which a file will be included.
max_dt : datetime.datetime, optional
Last modified datetime at and after which a file will be excluded.
logger : Logger, optional
|
pgcontents/query.py
|
def generate_files(engine, crypto_factory, min_dt=None, max_dt=None,
logger=None):
"""
Create a generator of decrypted files.
Files are yielded in ascending order of their timestamp.
This function selects all current notebooks (optionally, falling within a
datetime range), decrypts them, and returns a generator yielding dicts,
each containing a decoded notebook and metadata including the user,
filepath, and timestamp.
Parameters
----------
engine : SQLAlchemy.engine
Engine encapsulating database connections.
crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of the selected notebooks.
min_dt : datetime.datetime, optional
Minimum last modified datetime at which a file will be included.
max_dt : datetime.datetime, optional
Last modified datetime at and after which a file will be excluded.
logger : Logger, optional
"""
return _generate_notebooks(files, files.c.created_at,
engine, crypto_factory, min_dt, max_dt, logger)
|
def generate_files(engine, crypto_factory, min_dt=None, max_dt=None,
logger=None):
"""
Create a generator of decrypted files.
Files are yielded in ascending order of their timestamp.
This function selects all current notebooks (optionally, falling within a
datetime range), decrypts them, and returns a generator yielding dicts,
each containing a decoded notebook and metadata including the user,
filepath, and timestamp.
Parameters
----------
engine : SQLAlchemy.engine
Engine encapsulating database connections.
crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of the selected notebooks.
min_dt : datetime.datetime, optional
Minimum last modified datetime at which a file will be included.
max_dt : datetime.datetime, optional
Last modified datetime at and after which a file will be excluded.
logger : Logger, optional
"""
return _generate_notebooks(files, files.c.created_at,
engine, crypto_factory, min_dt, max_dt, logger)
|
[
"Create",
"a",
"generator",
"of",
"decrypted",
"files",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L553-L580
|
[
"def",
"generate_files",
"(",
"engine",
",",
"crypto_factory",
",",
"min_dt",
"=",
"None",
",",
"max_dt",
"=",
"None",
",",
"logger",
"=",
"None",
")",
":",
"return",
"_generate_notebooks",
"(",
"files",
",",
"files",
".",
"c",
".",
"created_at",
",",
"engine",
",",
"crypto_factory",
",",
"min_dt",
",",
"max_dt",
",",
"logger",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
purge_remote_checkpoints
|
Delete all database records for the given user_id.
|
pgcontents/query.py
|
def purge_remote_checkpoints(db, user_id):
"""
Delete all database records for the given user_id.
"""
db.execute(
remote_checkpoints.delete().where(
remote_checkpoints.c.user_id == user_id,
)
)
|
def purge_remote_checkpoints(db, user_id):
"""
Delete all database records for the given user_id.
"""
db.execute(
remote_checkpoints.delete().where(
remote_checkpoints.c.user_id == user_id,
)
)
|
[
"Delete",
"all",
"database",
"records",
"for",
"the",
"given",
"user_id",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L725-L733
|
[
"def",
"purge_remote_checkpoints",
"(",
"db",
",",
"user_id",
")",
":",
"db",
".",
"execute",
"(",
"remote_checkpoints",
".",
"delete",
"(",
")",
".",
"where",
"(",
"remote_checkpoints",
".",
"c",
".",
"user_id",
"==",
"user_id",
",",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
generate_checkpoints
|
Create a generator of decrypted remote checkpoints.
Checkpoints are yielded in ascending order of their timestamp.
This function selects all notebook checkpoints (optionally, falling within
a datetime range), decrypts them, and returns a generator yielding dicts,
each containing a decoded notebook and metadata including the user,
filepath, and timestamp.
Parameters
----------
engine : SQLAlchemy.engine
Engine encapsulating database connections.
crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of the selected notebooks.
min_dt : datetime.datetime, optional
Minimum last modified datetime at which a file will be included.
max_dt : datetime.datetime, optional
Last modified datetime at and after which a file will be excluded.
logger : Logger, optional
|
pgcontents/query.py
|
def generate_checkpoints(engine, crypto_factory, min_dt=None, max_dt=None,
logger=None):
"""
Create a generator of decrypted remote checkpoints.
Checkpoints are yielded in ascending order of their timestamp.
This function selects all notebook checkpoints (optionally, falling within
a datetime range), decrypts them, and returns a generator yielding dicts,
each containing a decoded notebook and metadata including the user,
filepath, and timestamp.
Parameters
----------
engine : SQLAlchemy.engine
Engine encapsulating database connections.
crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of the selected notebooks.
min_dt : datetime.datetime, optional
Minimum last modified datetime at which a file will be included.
max_dt : datetime.datetime, optional
Last modified datetime at and after which a file will be excluded.
logger : Logger, optional
"""
return _generate_notebooks(remote_checkpoints,
remote_checkpoints.c.last_modified,
engine, crypto_factory, min_dt, max_dt, logger)
|
def generate_checkpoints(engine, crypto_factory, min_dt=None, max_dt=None,
logger=None):
"""
Create a generator of decrypted remote checkpoints.
Checkpoints are yielded in ascending order of their timestamp.
This function selects all notebook checkpoints (optionally, falling within
a datetime range), decrypts them, and returns a generator yielding dicts,
each containing a decoded notebook and metadata including the user,
filepath, and timestamp.
Parameters
----------
engine : SQLAlchemy.engine
Engine encapsulating database connections.
crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of the selected notebooks.
min_dt : datetime.datetime, optional
Minimum last modified datetime at which a file will be included.
max_dt : datetime.datetime, optional
Last modified datetime at and after which a file will be excluded.
logger : Logger, optional
"""
return _generate_notebooks(remote_checkpoints,
remote_checkpoints.c.last_modified,
engine, crypto_factory, min_dt, max_dt, logger)
|
[
"Create",
"a",
"generator",
"of",
"decrypted",
"remote",
"checkpoints",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L736-L764
|
[
"def",
"generate_checkpoints",
"(",
"engine",
",",
"crypto_factory",
",",
"min_dt",
"=",
"None",
",",
"max_dt",
"=",
"None",
",",
"logger",
"=",
"None",
")",
":",
"return",
"_generate_notebooks",
"(",
"remote_checkpoints",
",",
"remote_checkpoints",
".",
"c",
".",
"last_modified",
",",
"engine",
",",
"crypto_factory",
",",
"min_dt",
",",
"max_dt",
",",
"logger",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
_generate_notebooks
|
See docstrings for `generate_files` and `generate_checkpoints`.
Parameters
----------
table : SQLAlchemy.Table
Table to fetch notebooks from, `files` or `remote_checkpoints.
timestamp_column : SQLAlchemy.Column
`table`'s column storing timestamps, `created_at` or `last_modified`.
engine : SQLAlchemy.engine
Engine encapsulating database connections.
crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of the selected notebooks.
min_dt : datetime.datetime
Minimum last modified datetime at which a file will be included.
max_dt : datetime.datetime
Last modified datetime at and after which a file will be excluded.
logger : Logger
|
pgcontents/query.py
|
def _generate_notebooks(table, timestamp_column,
engine, crypto_factory, min_dt, max_dt, logger):
"""
See docstrings for `generate_files` and `generate_checkpoints`.
Parameters
----------
table : SQLAlchemy.Table
Table to fetch notebooks from, `files` or `remote_checkpoints.
timestamp_column : SQLAlchemy.Column
`table`'s column storing timestamps, `created_at` or `last_modified`.
engine : SQLAlchemy.engine
Engine encapsulating database connections.
crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of the selected notebooks.
min_dt : datetime.datetime
Minimum last modified datetime at which a file will be included.
max_dt : datetime.datetime
Last modified datetime at and after which a file will be excluded.
logger : Logger
"""
where_conds = []
if min_dt is not None:
where_conds.append(timestamp_column >= min_dt)
if max_dt is not None:
where_conds.append(timestamp_column < max_dt)
if table is files:
# Only select files that are notebooks
where_conds.append(files.c.name.like(u'%.ipynb'))
# Query for notebooks satisfying the conditions.
query = select([table]).order_by(timestamp_column)
for cond in where_conds:
query = query.where(cond)
result = engine.execute(query)
# Decrypt each notebook and yield the result.
for nb_row in result:
try:
# The decrypt function depends on the user
user_id = nb_row['user_id']
decrypt_func = crypto_factory(user_id).decrypt
nb_dict = to_dict_with_content(table.c, nb_row, decrypt_func)
if table is files:
# Correct for files schema differing somewhat from checkpoints.
nb_dict['path'] = nb_dict['parent_name'] + nb_dict['name']
nb_dict['last_modified'] = nb_dict['created_at']
# For 'content', we use `reads_base64` directly. If the db content
# format is changed from base64, the decoding should be changed
# here as well.
yield {
'id': nb_dict['id'],
'user_id': user_id,
'path': to_api_path(nb_dict['path']),
'last_modified': nb_dict['last_modified'],
'content': reads_base64(nb_dict['content']),
}
except CorruptedFile:
if logger is not None:
logger.warning(
'Corrupted file with id %d in table %s.'
% (nb_row['id'], table.name)
)
|
def _generate_notebooks(table, timestamp_column,
engine, crypto_factory, min_dt, max_dt, logger):
"""
See docstrings for `generate_files` and `generate_checkpoints`.
Parameters
----------
table : SQLAlchemy.Table
Table to fetch notebooks from, `files` or `remote_checkpoints.
timestamp_column : SQLAlchemy.Column
`table`'s column storing timestamps, `created_at` or `last_modified`.
engine : SQLAlchemy.engine
Engine encapsulating database connections.
crypto_factory : function[str -> Any]
A function from user_id to an object providing the interface required
by PostgresContentsManager.crypto. Results of this will be used for
decryption of the selected notebooks.
min_dt : datetime.datetime
Minimum last modified datetime at which a file will be included.
max_dt : datetime.datetime
Last modified datetime at and after which a file will be excluded.
logger : Logger
"""
where_conds = []
if min_dt is not None:
where_conds.append(timestamp_column >= min_dt)
if max_dt is not None:
where_conds.append(timestamp_column < max_dt)
if table is files:
# Only select files that are notebooks
where_conds.append(files.c.name.like(u'%.ipynb'))
# Query for notebooks satisfying the conditions.
query = select([table]).order_by(timestamp_column)
for cond in where_conds:
query = query.where(cond)
result = engine.execute(query)
# Decrypt each notebook and yield the result.
for nb_row in result:
try:
# The decrypt function depends on the user
user_id = nb_row['user_id']
decrypt_func = crypto_factory(user_id).decrypt
nb_dict = to_dict_with_content(table.c, nb_row, decrypt_func)
if table is files:
# Correct for files schema differing somewhat from checkpoints.
nb_dict['path'] = nb_dict['parent_name'] + nb_dict['name']
nb_dict['last_modified'] = nb_dict['created_at']
# For 'content', we use `reads_base64` directly. If the db content
# format is changed from base64, the decoding should be changed
# here as well.
yield {
'id': nb_dict['id'],
'user_id': user_id,
'path': to_api_path(nb_dict['path']),
'last_modified': nb_dict['last_modified'],
'content': reads_base64(nb_dict['content']),
}
except CorruptedFile:
if logger is not None:
logger.warning(
'Corrupted file with id %d in table %s.'
% (nb_row['id'], table.name)
)
|
[
"See",
"docstrings",
"for",
"generate_files",
"and",
"generate_checkpoints",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L770-L836
|
[
"def",
"_generate_notebooks",
"(",
"table",
",",
"timestamp_column",
",",
"engine",
",",
"crypto_factory",
",",
"min_dt",
",",
"max_dt",
",",
"logger",
")",
":",
"where_conds",
"=",
"[",
"]",
"if",
"min_dt",
"is",
"not",
"None",
":",
"where_conds",
".",
"append",
"(",
"timestamp_column",
">=",
"min_dt",
")",
"if",
"max_dt",
"is",
"not",
"None",
":",
"where_conds",
".",
"append",
"(",
"timestamp_column",
"<",
"max_dt",
")",
"if",
"table",
"is",
"files",
":",
"# Only select files that are notebooks",
"where_conds",
".",
"append",
"(",
"files",
".",
"c",
".",
"name",
".",
"like",
"(",
"u'%.ipynb'",
")",
")",
"# Query for notebooks satisfying the conditions.",
"query",
"=",
"select",
"(",
"[",
"table",
"]",
")",
".",
"order_by",
"(",
"timestamp_column",
")",
"for",
"cond",
"in",
"where_conds",
":",
"query",
"=",
"query",
".",
"where",
"(",
"cond",
")",
"result",
"=",
"engine",
".",
"execute",
"(",
"query",
")",
"# Decrypt each notebook and yield the result.",
"for",
"nb_row",
"in",
"result",
":",
"try",
":",
"# The decrypt function depends on the user",
"user_id",
"=",
"nb_row",
"[",
"'user_id'",
"]",
"decrypt_func",
"=",
"crypto_factory",
"(",
"user_id",
")",
".",
"decrypt",
"nb_dict",
"=",
"to_dict_with_content",
"(",
"table",
".",
"c",
",",
"nb_row",
",",
"decrypt_func",
")",
"if",
"table",
"is",
"files",
":",
"# Correct for files schema differing somewhat from checkpoints.",
"nb_dict",
"[",
"'path'",
"]",
"=",
"nb_dict",
"[",
"'parent_name'",
"]",
"+",
"nb_dict",
"[",
"'name'",
"]",
"nb_dict",
"[",
"'last_modified'",
"]",
"=",
"nb_dict",
"[",
"'created_at'",
"]",
"# For 'content', we use `reads_base64` directly. If the db content",
"# format is changed from base64, the decoding should be changed",
"# here as well.",
"yield",
"{",
"'id'",
":",
"nb_dict",
"[",
"'id'",
"]",
",",
"'user_id'",
":",
"user_id",
",",
"'path'",
":",
"to_api_path",
"(",
"nb_dict",
"[",
"'path'",
"]",
")",
",",
"'last_modified'",
":",
"nb_dict",
"[",
"'last_modified'",
"]",
",",
"'content'",
":",
"reads_base64",
"(",
"nb_dict",
"[",
"'content'",
"]",
")",
",",
"}",
"except",
"CorruptedFile",
":",
"if",
"logger",
"is",
"not",
"None",
":",
"logger",
".",
"warning",
"(",
"'Corrupted file with id %d in table %s.'",
"%",
"(",
"nb_row",
"[",
"'id'",
"]",
",",
"table",
".",
"name",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
reencrypt_row_content
|
Re-encrypt a row from ``table`` with ``id`` of ``row_id``.
|
pgcontents/query.py
|
def reencrypt_row_content(db,
table,
row_id,
decrypt_func,
encrypt_func,
logger):
"""
Re-encrypt a row from ``table`` with ``id`` of ``row_id``.
"""
q = (select([table.c.content])
.with_for_update()
.where(table.c.id == row_id))
[(content,)] = db.execute(q)
logger.info("Begin encrypting %s row %s.", table.name, row_id)
db.execute(
table
.update()
.where(table.c.id == row_id)
.values(content=encrypt_func(decrypt_func(content)))
)
logger.info("Done encrypting %s row %s.", table.name, row_id)
|
def reencrypt_row_content(db,
table,
row_id,
decrypt_func,
encrypt_func,
logger):
"""
Re-encrypt a row from ``table`` with ``id`` of ``row_id``.
"""
q = (select([table.c.content])
.with_for_update()
.where(table.c.id == row_id))
[(content,)] = db.execute(q)
logger.info("Begin encrypting %s row %s.", table.name, row_id)
db.execute(
table
.update()
.where(table.c.id == row_id)
.values(content=encrypt_func(decrypt_func(content)))
)
logger.info("Done encrypting %s row %s.", table.name, row_id)
|
[
"Re",
"-",
"encrypt",
"a",
"row",
"from",
"table",
"with",
"id",
"of",
"row_id",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L842-L864
|
[
"def",
"reencrypt_row_content",
"(",
"db",
",",
"table",
",",
"row_id",
",",
"decrypt_func",
",",
"encrypt_func",
",",
"logger",
")",
":",
"q",
"=",
"(",
"select",
"(",
"[",
"table",
".",
"c",
".",
"content",
"]",
")",
".",
"with_for_update",
"(",
")",
".",
"where",
"(",
"table",
".",
"c",
".",
"id",
"==",
"row_id",
")",
")",
"[",
"(",
"content",
",",
")",
"]",
"=",
"db",
".",
"execute",
"(",
"q",
")",
"logger",
".",
"info",
"(",
"\"Begin encrypting %s row %s.\"",
",",
"table",
".",
"name",
",",
"row_id",
")",
"db",
".",
"execute",
"(",
"table",
".",
"update",
"(",
")",
".",
"where",
"(",
"table",
".",
"c",
".",
"id",
"==",
"row_id",
")",
".",
"values",
"(",
"content",
"=",
"encrypt_func",
"(",
"decrypt_func",
"(",
"content",
")",
")",
")",
")",
"logger",
".",
"info",
"(",
"\"Done encrypting %s row %s.\"",
",",
"table",
".",
"name",
",",
"row_id",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
select_file_ids
|
Get all file ids for a user.
|
pgcontents/query.py
|
def select_file_ids(db, user_id):
"""
Get all file ids for a user.
"""
return list(
db.execute(
select([files.c.id])
.where(files.c.user_id == user_id)
)
)
|
def select_file_ids(db, user_id):
"""
Get all file ids for a user.
"""
return list(
db.execute(
select([files.c.id])
.where(files.c.user_id == user_id)
)
)
|
[
"Get",
"all",
"file",
"ids",
"for",
"a",
"user",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L867-L876
|
[
"def",
"select_file_ids",
"(",
"db",
",",
"user_id",
")",
":",
"return",
"list",
"(",
"db",
".",
"execute",
"(",
"select",
"(",
"[",
"files",
".",
"c",
".",
"id",
"]",
")",
".",
"where",
"(",
"files",
".",
"c",
".",
"user_id",
"==",
"user_id",
")",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
select_remote_checkpoint_ids
|
Get all file ids for a user.
|
pgcontents/query.py
|
def select_remote_checkpoint_ids(db, user_id):
"""
Get all file ids for a user.
"""
return list(
db.execute(
select([remote_checkpoints.c.id])
.where(remote_checkpoints.c.user_id == user_id)
)
)
|
def select_remote_checkpoint_ids(db, user_id):
"""
Get all file ids for a user.
"""
return list(
db.execute(
select([remote_checkpoints.c.id])
.where(remote_checkpoints.c.user_id == user_id)
)
)
|
[
"Get",
"all",
"file",
"ids",
"for",
"a",
"user",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L879-L888
|
[
"def",
"select_remote_checkpoint_ids",
"(",
"db",
",",
"user_id",
")",
":",
"return",
"list",
"(",
"db",
".",
"execute",
"(",
"select",
"(",
"[",
"remote_checkpoints",
".",
"c",
".",
"id",
"]",
")",
".",
"where",
"(",
"remote_checkpoints",
".",
"c",
".",
"user_id",
"==",
"user_id",
")",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
reencrypt_user_content
|
Re-encrypt all of the files and checkpoints for a single user.
|
pgcontents/query.py
|
def reencrypt_user_content(engine,
user_id,
old_decrypt_func,
new_encrypt_func,
logger):
"""
Re-encrypt all of the files and checkpoints for a single user.
"""
logger.info("Begin re-encryption for user %s", user_id)
with engine.begin() as db:
# NOTE: Doing both of these operations in one transaction depends for
# correctness on the fact that the creation of new checkpoints always
# involves writing new data into the database from Python, rather than
# simply copying data inside the DB.
# If we change checkpoint creation so that it does an in-database copy,
# then we need to split this transaction to ensure that
# file-reencryption is complete before checkpoint-reencryption starts.
# If that doesn't happen, it will be possible for a user to create a
# new checkpoint in a transaction that hasn't seen the completed
# file-reencryption process, but we might not see that checkpoint here,
# which means that we would never update the content of that checkpoint
# to the new encryption key.
logger.info("Re-encrypting files for %s", user_id)
for (file_id,) in select_file_ids(db, user_id):
reencrypt_row_content(
db,
files,
file_id,
old_decrypt_func,
new_encrypt_func,
logger,
)
logger.info("Re-encrypting checkpoints for %s", user_id)
for (cp_id,) in select_remote_checkpoint_ids(db, user_id):
reencrypt_row_content(
db,
remote_checkpoints,
cp_id,
old_decrypt_func,
new_encrypt_func,
logger,
)
logger.info("Finished re-encryption for user %s", user_id)
|
def reencrypt_user_content(engine,
user_id,
old_decrypt_func,
new_encrypt_func,
logger):
"""
Re-encrypt all of the files and checkpoints for a single user.
"""
logger.info("Begin re-encryption for user %s", user_id)
with engine.begin() as db:
# NOTE: Doing both of these operations in one transaction depends for
# correctness on the fact that the creation of new checkpoints always
# involves writing new data into the database from Python, rather than
# simply copying data inside the DB.
# If we change checkpoint creation so that it does an in-database copy,
# then we need to split this transaction to ensure that
# file-reencryption is complete before checkpoint-reencryption starts.
# If that doesn't happen, it will be possible for a user to create a
# new checkpoint in a transaction that hasn't seen the completed
# file-reencryption process, but we might not see that checkpoint here,
# which means that we would never update the content of that checkpoint
# to the new encryption key.
logger.info("Re-encrypting files for %s", user_id)
for (file_id,) in select_file_ids(db, user_id):
reencrypt_row_content(
db,
files,
file_id,
old_decrypt_func,
new_encrypt_func,
logger,
)
logger.info("Re-encrypting checkpoints for %s", user_id)
for (cp_id,) in select_remote_checkpoint_ids(db, user_id):
reencrypt_row_content(
db,
remote_checkpoints,
cp_id,
old_decrypt_func,
new_encrypt_func,
logger,
)
logger.info("Finished re-encryption for user %s", user_id)
|
[
"Re",
"-",
"encrypt",
"all",
"of",
"the",
"files",
"and",
"checkpoints",
"for",
"a",
"single",
"user",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/query.py#L891-L936
|
[
"def",
"reencrypt_user_content",
"(",
"engine",
",",
"user_id",
",",
"old_decrypt_func",
",",
"new_encrypt_func",
",",
"logger",
")",
":",
"logger",
".",
"info",
"(",
"\"Begin re-encryption for user %s\"",
",",
"user_id",
")",
"with",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"# NOTE: Doing both of these operations in one transaction depends for",
"# correctness on the fact that the creation of new checkpoints always",
"# involves writing new data into the database from Python, rather than",
"# simply copying data inside the DB.",
"# If we change checkpoint creation so that it does an in-database copy,",
"# then we need to split this transaction to ensure that",
"# file-reencryption is complete before checkpoint-reencryption starts.",
"# If that doesn't happen, it will be possible for a user to create a",
"# new checkpoint in a transaction that hasn't seen the completed",
"# file-reencryption process, but we might not see that checkpoint here,",
"# which means that we would never update the content of that checkpoint",
"# to the new encryption key.",
"logger",
".",
"info",
"(",
"\"Re-encrypting files for %s\"",
",",
"user_id",
")",
"for",
"(",
"file_id",
",",
")",
"in",
"select_file_ids",
"(",
"db",
",",
"user_id",
")",
":",
"reencrypt_row_content",
"(",
"db",
",",
"files",
",",
"file_id",
",",
"old_decrypt_func",
",",
"new_encrypt_func",
",",
"logger",
",",
")",
"logger",
".",
"info",
"(",
"\"Re-encrypting checkpoints for %s\"",
",",
"user_id",
")",
"for",
"(",
"cp_id",
",",
")",
"in",
"select_remote_checkpoint_ids",
"(",
"db",
",",
"user_id",
")",
":",
"reencrypt_row_content",
"(",
"db",
",",
"remote_checkpoints",
",",
"cp_id",
",",
"old_decrypt_func",
",",
"new_encrypt_func",
",",
"logger",
",",
")",
"logger",
".",
"info",
"(",
"\"Finished re-encryption for user %s\"",
",",
"user_id",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
derive_single_fernet_key
|
Convert a secret key and a user ID into an encryption key to use with a
``cryptography.fernet.Fernet``.
Taken from
https://cryptography.io/en/latest/fernet/#using-passwords-with-fernet
Parameters
----------
password : unicode
ascii-encodable key to derive
user_id : unicode
ascii-encodable user_id to use as salt
|
pgcontents/crypto.py
|
def derive_single_fernet_key(password, user_id):
"""
Convert a secret key and a user ID into an encryption key to use with a
``cryptography.fernet.Fernet``.
Taken from
https://cryptography.io/en/latest/fernet/#using-passwords-with-fernet
Parameters
----------
password : unicode
ascii-encodable key to derive
user_id : unicode
ascii-encodable user_id to use as salt
"""
password = ascii_unicode_to_bytes(password)
user_id = ascii_unicode_to_bytes(user_id)
kdf = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=user_id,
iterations=100000,
backend=default_backend(),
)
return base64.urlsafe_b64encode(kdf.derive(password))
|
def derive_single_fernet_key(password, user_id):
"""
Convert a secret key and a user ID into an encryption key to use with a
``cryptography.fernet.Fernet``.
Taken from
https://cryptography.io/en/latest/fernet/#using-passwords-with-fernet
Parameters
----------
password : unicode
ascii-encodable key to derive
user_id : unicode
ascii-encodable user_id to use as salt
"""
password = ascii_unicode_to_bytes(password)
user_id = ascii_unicode_to_bytes(user_id)
kdf = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=user_id,
iterations=100000,
backend=default_backend(),
)
return base64.urlsafe_b64encode(kdf.derive(password))
|
[
"Convert",
"a",
"secret",
"key",
"and",
"a",
"user",
"ID",
"into",
"an",
"encryption",
"key",
"to",
"use",
"with",
"a",
"cryptography",
".",
"fernet",
".",
"Fernet",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/crypto.py#L149-L174
|
[
"def",
"derive_single_fernet_key",
"(",
"password",
",",
"user_id",
")",
":",
"password",
"=",
"ascii_unicode_to_bytes",
"(",
"password",
")",
"user_id",
"=",
"ascii_unicode_to_bytes",
"(",
"user_id",
")",
"kdf",
"=",
"PBKDF2HMAC",
"(",
"algorithm",
"=",
"hashes",
".",
"SHA256",
"(",
")",
",",
"length",
"=",
"32",
",",
"salt",
"=",
"user_id",
",",
"iterations",
"=",
"100000",
",",
"backend",
"=",
"default_backend",
"(",
")",
",",
")",
"return",
"base64",
".",
"urlsafe_b64encode",
"(",
"kdf",
".",
"derive",
"(",
"password",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
derive_fallback_fernet_keys
|
Derive a list of per-user Fernet keys from a list of master keys and a
username.
If a None is encountered in ``passwords``, it is forwarded.
Parameters
----------
passwords : list[unicode]
List of ascii-encodable keys to derive.
user_id : unicode or None
ascii-encodable user_id to use as salt
|
pgcontents/crypto.py
|
def derive_fallback_fernet_keys(passwords, user_id):
"""
Derive a list of per-user Fernet keys from a list of master keys and a
username.
If a None is encountered in ``passwords``, it is forwarded.
Parameters
----------
passwords : list[unicode]
List of ascii-encodable keys to derive.
user_id : unicode or None
ascii-encodable user_id to use as salt
"""
# Normally I wouldn't advocate for these kinds of assertions, but we really
# really really don't want to mess up deriving encryption keys.
assert isinstance(passwords, (list, tuple)), \
"Expected list or tuple of keys, got %s." % type(passwords)
def derive_single_allow_none(k):
if k is None:
return None
return derive_single_fernet_key(k, user_id).decode('ascii')
return list(map(derive_single_allow_none, passwords))
|
def derive_fallback_fernet_keys(passwords, user_id):
"""
Derive a list of per-user Fernet keys from a list of master keys and a
username.
If a None is encountered in ``passwords``, it is forwarded.
Parameters
----------
passwords : list[unicode]
List of ascii-encodable keys to derive.
user_id : unicode or None
ascii-encodable user_id to use as salt
"""
# Normally I wouldn't advocate for these kinds of assertions, but we really
# really really don't want to mess up deriving encryption keys.
assert isinstance(passwords, (list, tuple)), \
"Expected list or tuple of keys, got %s." % type(passwords)
def derive_single_allow_none(k):
if k is None:
return None
return derive_single_fernet_key(k, user_id).decode('ascii')
return list(map(derive_single_allow_none, passwords))
|
[
"Derive",
"a",
"list",
"of",
"per",
"-",
"user",
"Fernet",
"keys",
"from",
"a",
"list",
"of",
"master",
"keys",
"and",
"a",
"username",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/crypto.py#L177-L201
|
[
"def",
"derive_fallback_fernet_keys",
"(",
"passwords",
",",
"user_id",
")",
":",
"# Normally I wouldn't advocate for these kinds of assertions, but we really",
"# really really don't want to mess up deriving encryption keys.",
"assert",
"isinstance",
"(",
"passwords",
",",
"(",
"list",
",",
"tuple",
")",
")",
",",
"\"Expected list or tuple of keys, got %s.\"",
"%",
"type",
"(",
"passwords",
")",
"def",
"derive_single_allow_none",
"(",
"k",
")",
":",
"if",
"k",
"is",
"None",
":",
"return",
"None",
"return",
"derive_single_fernet_key",
"(",
"k",
",",
"user_id",
")",
".",
"decode",
"(",
"'ascii'",
")",
"return",
"list",
"(",
"map",
"(",
"derive_single_allow_none",
",",
"passwords",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
single_password_crypto_factory
|
Create and return a function suitable for passing as a crypto_factory to
``pgcontents.utils.sync.reencrypt_all_users``
The factory here returns a ``FernetEncryption`` that uses a key derived
from ``password`` and salted with the supplied user_id.
|
pgcontents/crypto.py
|
def single_password_crypto_factory(password):
"""
Create and return a function suitable for passing as a crypto_factory to
``pgcontents.utils.sync.reencrypt_all_users``
The factory here returns a ``FernetEncryption`` that uses a key derived
from ``password`` and salted with the supplied user_id.
"""
@memoize_single_arg
def factory(user_id):
return FernetEncryption(
Fernet(derive_single_fernet_key(password, user_id))
)
return factory
|
def single_password_crypto_factory(password):
"""
Create and return a function suitable for passing as a crypto_factory to
``pgcontents.utils.sync.reencrypt_all_users``
The factory here returns a ``FernetEncryption`` that uses a key derived
from ``password`` and salted with the supplied user_id.
"""
@memoize_single_arg
def factory(user_id):
return FernetEncryption(
Fernet(derive_single_fernet_key(password, user_id))
)
return factory
|
[
"Create",
"and",
"return",
"a",
"function",
"suitable",
"for",
"passing",
"as",
"a",
"crypto_factory",
"to",
"pgcontents",
".",
"utils",
".",
"sync",
".",
"reencrypt_all_users"
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/crypto.py#L217-L230
|
[
"def",
"single_password_crypto_factory",
"(",
"password",
")",
":",
"@",
"memoize_single_arg",
"def",
"factory",
"(",
"user_id",
")",
":",
"return",
"FernetEncryption",
"(",
"Fernet",
"(",
"derive_single_fernet_key",
"(",
"password",
",",
"user_id",
")",
")",
")",
"return",
"factory"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
memoize_single_arg
|
Decorator memoizing a single-argument function
|
pgcontents/crypto.py
|
def memoize_single_arg(f):
"""
Decorator memoizing a single-argument function
"""
memo = {}
@wraps(f)
def memoized_f(arg):
try:
return memo[arg]
except KeyError:
result = memo[arg] = f(arg)
return result
return memoized_f
|
def memoize_single_arg(f):
"""
Decorator memoizing a single-argument function
"""
memo = {}
@wraps(f)
def memoized_f(arg):
try:
return memo[arg]
except KeyError:
result = memo[arg] = f(arg)
return result
return memoized_f
|
[
"Decorator",
"memoizing",
"a",
"single",
"-",
"argument",
"function"
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/crypto.py#L233-L246
|
[
"def",
"memoize_single_arg",
"(",
"f",
")",
":",
"memo",
"=",
"{",
"}",
"@",
"wraps",
"(",
"f",
")",
"def",
"memoized_f",
"(",
"arg",
")",
":",
"try",
":",
"return",
"memo",
"[",
"arg",
"]",
"except",
"KeyError",
":",
"result",
"=",
"memo",
"[",
"arg",
"]",
"=",
"f",
"(",
"arg",
")",
"return",
"result",
"return",
"memoized_f"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
_get_name
|
Get the name from a column-like SQLAlchemy expression.
Works for Columns and Cast expressions.
|
pgcontents/db_utils.py
|
def _get_name(column_like):
"""
Get the name from a column-like SQLAlchemy expression.
Works for Columns and Cast expressions.
"""
if isinstance(column_like, Column):
return column_like.name
elif isinstance(column_like, Cast):
return column_like.clause.name
|
def _get_name(column_like):
"""
Get the name from a column-like SQLAlchemy expression.
Works for Columns and Cast expressions.
"""
if isinstance(column_like, Column):
return column_like.name
elif isinstance(column_like, Cast):
return column_like.clause.name
|
[
"Get",
"the",
"name",
"from",
"a",
"column",
"-",
"like",
"SQLAlchemy",
"expression",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/db_utils.py#L56-L65
|
[
"def",
"_get_name",
"(",
"column_like",
")",
":",
"if",
"isinstance",
"(",
"column_like",
",",
"Column",
")",
":",
"return",
"column_like",
".",
"name",
"elif",
"isinstance",
"(",
"column_like",
",",
"Cast",
")",
":",
"return",
"column_like",
".",
"clause",
".",
"name"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
to_dict_no_content
|
Convert a SQLAlchemy row that does not contain a 'content' field to a dict.
If row is None, return None.
Raises AssertionError if there is a field named 'content' in ``fields``.
|
pgcontents/db_utils.py
|
def to_dict_no_content(fields, row):
"""
Convert a SQLAlchemy row that does not contain a 'content' field to a dict.
If row is None, return None.
Raises AssertionError if there is a field named 'content' in ``fields``.
"""
assert(len(fields) == len(row))
field_names = list(map(_get_name, fields))
assert 'content' not in field_names, "Unexpected content field."
return dict(zip(field_names, row))
|
def to_dict_no_content(fields, row):
"""
Convert a SQLAlchemy row that does not contain a 'content' field to a dict.
If row is None, return None.
Raises AssertionError if there is a field named 'content' in ``fields``.
"""
assert(len(fields) == len(row))
field_names = list(map(_get_name, fields))
assert 'content' not in field_names, "Unexpected content field."
return dict(zip(field_names, row))
|
[
"Convert",
"a",
"SQLAlchemy",
"row",
"that",
"does",
"not",
"contain",
"a",
"content",
"field",
"to",
"a",
"dict",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/db_utils.py#L68-L81
|
[
"def",
"to_dict_no_content",
"(",
"fields",
",",
"row",
")",
":",
"assert",
"(",
"len",
"(",
"fields",
")",
"==",
"len",
"(",
"row",
")",
")",
"field_names",
"=",
"list",
"(",
"map",
"(",
"_get_name",
",",
"fields",
")",
")",
"assert",
"'content'",
"not",
"in",
"field_names",
",",
"\"Unexpected content field.\"",
"return",
"dict",
"(",
"zip",
"(",
"field_names",
",",
"row",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
to_dict_with_content
|
Convert a SQLAlchemy row that contains a 'content' field to a dict.
``decrypt_func`` will be applied to the ``content`` field of the row.
If row is None, return None.
Raises AssertionError if there is no field named 'content' in ``fields``.
|
pgcontents/db_utils.py
|
def to_dict_with_content(fields, row, decrypt_func):
"""
Convert a SQLAlchemy row that contains a 'content' field to a dict.
``decrypt_func`` will be applied to the ``content`` field of the row.
If row is None, return None.
Raises AssertionError if there is no field named 'content' in ``fields``.
"""
assert(len(fields) == len(row))
field_names = list(map(_get_name, fields))
assert 'content' in field_names, "Missing content field."
result = dict(zip(field_names, row))
result['content'] = decrypt_func(result['content'])
return result
|
def to_dict_with_content(fields, row, decrypt_func):
"""
Convert a SQLAlchemy row that contains a 'content' field to a dict.
``decrypt_func`` will be applied to the ``content`` field of the row.
If row is None, return None.
Raises AssertionError if there is no field named 'content' in ``fields``.
"""
assert(len(fields) == len(row))
field_names = list(map(_get_name, fields))
assert 'content' in field_names, "Missing content field."
result = dict(zip(field_names, row))
result['content'] = decrypt_func(result['content'])
return result
|
[
"Convert",
"a",
"SQLAlchemy",
"row",
"that",
"contains",
"a",
"content",
"field",
"to",
"a",
"dict",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/db_utils.py#L84-L101
|
[
"def",
"to_dict_with_content",
"(",
"fields",
",",
"row",
",",
"decrypt_func",
")",
":",
"assert",
"(",
"len",
"(",
"fields",
")",
"==",
"len",
"(",
"row",
")",
")",
"field_names",
"=",
"list",
"(",
"map",
"(",
"_get_name",
",",
"fields",
")",
")",
"assert",
"'content'",
"in",
"field_names",
",",
"\"Missing content field.\"",
"result",
"=",
"dict",
"(",
"zip",
"(",
"field_names",
",",
"row",
")",
")",
"result",
"[",
"'content'",
"]",
"=",
"decrypt_func",
"(",
"result",
"[",
"'content'",
"]",
")",
"return",
"result"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresCheckpoints.create_notebook_checkpoint
|
Create a checkpoint of the current state of a notebook
Returns a checkpoint_id for the new checkpoint.
|
pgcontents/checkpoints.py
|
def create_notebook_checkpoint(self, nb, path):
"""Create a checkpoint of the current state of a notebook
Returns a checkpoint_id for the new checkpoint.
"""
b64_content = writes_base64(nb)
with self.engine.begin() as db:
return save_remote_checkpoint(
db,
self.user_id,
path,
b64_content,
self.crypto.encrypt,
self.max_file_size_bytes,
)
|
def create_notebook_checkpoint(self, nb, path):
"""Create a checkpoint of the current state of a notebook
Returns a checkpoint_id for the new checkpoint.
"""
b64_content = writes_base64(nb)
with self.engine.begin() as db:
return save_remote_checkpoint(
db,
self.user_id,
path,
b64_content,
self.crypto.encrypt,
self.max_file_size_bytes,
)
|
[
"Create",
"a",
"checkpoint",
"of",
"the",
"current",
"state",
"of",
"a",
"notebook"
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/checkpoints.py#L34-L48
|
[
"def",
"create_notebook_checkpoint",
"(",
"self",
",",
"nb",
",",
"path",
")",
":",
"b64_content",
"=",
"writes_base64",
"(",
"nb",
")",
"with",
"self",
".",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"return",
"save_remote_checkpoint",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"path",
",",
"b64_content",
",",
"self",
".",
"crypto",
".",
"encrypt",
",",
"self",
".",
"max_file_size_bytes",
",",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresCheckpoints.create_file_checkpoint
|
Create a checkpoint of the current state of a file
Returns a checkpoint_id for the new checkpoint.
|
pgcontents/checkpoints.py
|
def create_file_checkpoint(self, content, format, path):
"""Create a checkpoint of the current state of a file
Returns a checkpoint_id for the new checkpoint.
"""
try:
b64_content = to_b64(content, format)
except ValueError as e:
self.do_400(str(e))
with self.engine.begin() as db:
return save_remote_checkpoint(
db,
self.user_id,
path,
b64_content,
self.crypto.encrypt,
self.max_file_size_bytes,
)
|
def create_file_checkpoint(self, content, format, path):
"""Create a checkpoint of the current state of a file
Returns a checkpoint_id for the new checkpoint.
"""
try:
b64_content = to_b64(content, format)
except ValueError as e:
self.do_400(str(e))
with self.engine.begin() as db:
return save_remote_checkpoint(
db,
self.user_id,
path,
b64_content,
self.crypto.encrypt,
self.max_file_size_bytes,
)
|
[
"Create",
"a",
"checkpoint",
"of",
"the",
"current",
"state",
"of",
"a",
"file"
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/checkpoints.py#L51-L68
|
[
"def",
"create_file_checkpoint",
"(",
"self",
",",
"content",
",",
"format",
",",
"path",
")",
":",
"try",
":",
"b64_content",
"=",
"to_b64",
"(",
"content",
",",
"format",
")",
"except",
"ValueError",
"as",
"e",
":",
"self",
".",
"do_400",
"(",
"str",
"(",
"e",
")",
")",
"with",
"self",
".",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"return",
"save_remote_checkpoint",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"path",
",",
"b64_content",
",",
"self",
".",
"crypto",
".",
"encrypt",
",",
"self",
".",
"max_file_size_bytes",
",",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresCheckpoints.delete_checkpoint
|
delete a checkpoint for a file
|
pgcontents/checkpoints.py
|
def delete_checkpoint(self, checkpoint_id, path):
"""delete a checkpoint for a file"""
with self.engine.begin() as db:
return delete_single_remote_checkpoint(
db, self.user_id, path, checkpoint_id,
)
|
def delete_checkpoint(self, checkpoint_id, path):
"""delete a checkpoint for a file"""
with self.engine.begin() as db:
return delete_single_remote_checkpoint(
db, self.user_id, path, checkpoint_id,
)
|
[
"delete",
"a",
"checkpoint",
"for",
"a",
"file"
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/checkpoints.py#L71-L76
|
[
"def",
"delete_checkpoint",
"(",
"self",
",",
"checkpoint_id",
",",
"path",
")",
":",
"with",
"self",
".",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"return",
"delete_single_remote_checkpoint",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"path",
",",
"checkpoint_id",
",",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresCheckpoints.get_checkpoint_content
|
Get the content of a checkpoint.
|
pgcontents/checkpoints.py
|
def get_checkpoint_content(self, checkpoint_id, path):
"""Get the content of a checkpoint."""
with self.engine.begin() as db:
return get_remote_checkpoint(
db,
self.user_id,
path,
checkpoint_id,
self.crypto.decrypt,
)['content']
|
def get_checkpoint_content(self, checkpoint_id, path):
"""Get the content of a checkpoint."""
with self.engine.begin() as db:
return get_remote_checkpoint(
db,
self.user_id,
path,
checkpoint_id,
self.crypto.decrypt,
)['content']
|
[
"Get",
"the",
"content",
"of",
"a",
"checkpoint",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/checkpoints.py#L78-L87
|
[
"def",
"get_checkpoint_content",
"(",
"self",
",",
"checkpoint_id",
",",
"path",
")",
":",
"with",
"self",
".",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"return",
"get_remote_checkpoint",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"path",
",",
"checkpoint_id",
",",
"self",
".",
"crypto",
".",
"decrypt",
",",
")",
"[",
"'content'",
"]"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresCheckpoints.list_checkpoints
|
Return a list of checkpoints for a given file
|
pgcontents/checkpoints.py
|
def list_checkpoints(self, path):
"""Return a list of checkpoints for a given file"""
with self.engine.begin() as db:
return list_remote_checkpoints(db, self.user_id, path)
|
def list_checkpoints(self, path):
"""Return a list of checkpoints for a given file"""
with self.engine.begin() as db:
return list_remote_checkpoints(db, self.user_id, path)
|
[
"Return",
"a",
"list",
"of",
"checkpoints",
"for",
"a",
"given",
"file"
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/checkpoints.py#L108-L111
|
[
"def",
"list_checkpoints",
"(",
"self",
",",
"path",
")",
":",
"with",
"self",
".",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"return",
"list_remote_checkpoints",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"path",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresCheckpoints.rename_all_checkpoints
|
Rename all checkpoints for old_path to new_path.
|
pgcontents/checkpoints.py
|
def rename_all_checkpoints(self, old_path, new_path):
"""Rename all checkpoints for old_path to new_path."""
with self.engine.begin() as db:
return move_remote_checkpoints(
db,
self.user_id,
old_path,
new_path,
)
|
def rename_all_checkpoints(self, old_path, new_path):
"""Rename all checkpoints for old_path to new_path."""
with self.engine.begin() as db:
return move_remote_checkpoints(
db,
self.user_id,
old_path,
new_path,
)
|
[
"Rename",
"all",
"checkpoints",
"for",
"old_path",
"to",
"new_path",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/checkpoints.py#L114-L122
|
[
"def",
"rename_all_checkpoints",
"(",
"self",
",",
"old_path",
",",
"new_path",
")",
":",
"with",
"self",
".",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"return",
"move_remote_checkpoints",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"old_path",
",",
"new_path",
",",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresCheckpoints.delete_all_checkpoints
|
Delete all checkpoints for the given path.
|
pgcontents/checkpoints.py
|
def delete_all_checkpoints(self, path):
"""Delete all checkpoints for the given path."""
with self.engine.begin() as db:
delete_remote_checkpoints(db, self.user_id, path)
|
def delete_all_checkpoints(self, path):
"""Delete all checkpoints for the given path."""
with self.engine.begin() as db:
delete_remote_checkpoints(db, self.user_id, path)
|
[
"Delete",
"all",
"checkpoints",
"for",
"the",
"given",
"path",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/checkpoints.py#L125-L128
|
[
"def",
"delete_all_checkpoints",
"(",
"self",
",",
"path",
")",
":",
"with",
"self",
".",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"delete_remote_checkpoints",
"(",
"db",
",",
"self",
".",
"user_id",
",",
"path",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
PostgresCheckpoints.purge_db
|
Purge all database records for the current user.
|
pgcontents/checkpoints.py
|
def purge_db(self):
"""
Purge all database records for the current user.
"""
with self.engine.begin() as db:
purge_remote_checkpoints(db, self.user_id)
|
def purge_db(self):
"""
Purge all database records for the current user.
"""
with self.engine.begin() as db:
purge_remote_checkpoints(db, self.user_id)
|
[
"Purge",
"all",
"database",
"records",
"for",
"the",
"current",
"user",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/checkpoints.py#L130-L135
|
[
"def",
"purge_db",
"(",
"self",
")",
":",
"with",
"self",
".",
"engine",
".",
"begin",
"(",
")",
"as",
"db",
":",
"purge_remote_checkpoints",
"(",
"db",
",",
"self",
".",
"user_id",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
_resolve_path
|
Resolve a path based on a dictionary of manager prefixes.
Returns a triple of (prefix, manager, manager_relative_path).
|
pgcontents/hybridmanager.py
|
def _resolve_path(path, manager_dict):
"""
Resolve a path based on a dictionary of manager prefixes.
Returns a triple of (prefix, manager, manager_relative_path).
"""
path = normalize_api_path(path)
parts = path.split('/')
# Try to find a sub-manager for the first subdirectory.
mgr = manager_dict.get(parts[0])
if mgr is not None:
return parts[0], mgr, '/'.join(parts[1:])
# Try to find use the root manager, if one was supplied.
mgr = manager_dict.get('')
if mgr is not None:
return '', mgr, path
raise HTTPError(
404,
"Couldn't resolve path [{path}] and "
"no root manager supplied!".format(path=path)
)
|
def _resolve_path(path, manager_dict):
"""
Resolve a path based on a dictionary of manager prefixes.
Returns a triple of (prefix, manager, manager_relative_path).
"""
path = normalize_api_path(path)
parts = path.split('/')
# Try to find a sub-manager for the first subdirectory.
mgr = manager_dict.get(parts[0])
if mgr is not None:
return parts[0], mgr, '/'.join(parts[1:])
# Try to find use the root manager, if one was supplied.
mgr = manager_dict.get('')
if mgr is not None:
return '', mgr, path
raise HTTPError(
404,
"Couldn't resolve path [{path}] and "
"no root manager supplied!".format(path=path)
)
|
[
"Resolve",
"a",
"path",
"based",
"on",
"a",
"dictionary",
"of",
"manager",
"prefixes",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/hybridmanager.py#L18-L41
|
[
"def",
"_resolve_path",
"(",
"path",
",",
"manager_dict",
")",
":",
"path",
"=",
"normalize_api_path",
"(",
"path",
")",
"parts",
"=",
"path",
".",
"split",
"(",
"'/'",
")",
"# Try to find a sub-manager for the first subdirectory.",
"mgr",
"=",
"manager_dict",
".",
"get",
"(",
"parts",
"[",
"0",
"]",
")",
"if",
"mgr",
"is",
"not",
"None",
":",
"return",
"parts",
"[",
"0",
"]",
",",
"mgr",
",",
"'/'",
".",
"join",
"(",
"parts",
"[",
"1",
":",
"]",
")",
"# Try to find use the root manager, if one was supplied.",
"mgr",
"=",
"manager_dict",
".",
"get",
"(",
"''",
")",
"if",
"mgr",
"is",
"not",
"None",
":",
"return",
"''",
",",
"mgr",
",",
"path",
"raise",
"HTTPError",
"(",
"404",
",",
"\"Couldn't resolve path [{path}] and \"",
"\"no root manager supplied!\"",
".",
"format",
"(",
"path",
"=",
"path",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
_get_arg
|
Get an argument, either from kwargs or from the first entry in args.
Raises a TypeError if argname not in kwargs and len(args) == 0.
Mutates kwargs in place if the value is found in kwargs.
|
pgcontents/hybridmanager.py
|
def _get_arg(argname, args, kwargs):
"""
Get an argument, either from kwargs or from the first entry in args.
Raises a TypeError if argname not in kwargs and len(args) == 0.
Mutates kwargs in place if the value is found in kwargs.
"""
try:
return kwargs.pop(argname), args
except KeyError:
pass
try:
return args[0], args[1:]
except IndexError:
raise TypeError("No value passed for %s" % argname)
|
def _get_arg(argname, args, kwargs):
"""
Get an argument, either from kwargs or from the first entry in args.
Raises a TypeError if argname not in kwargs and len(args) == 0.
Mutates kwargs in place if the value is found in kwargs.
"""
try:
return kwargs.pop(argname), args
except KeyError:
pass
try:
return args[0], args[1:]
except IndexError:
raise TypeError("No value passed for %s" % argname)
|
[
"Get",
"an",
"argument",
"either",
"from",
"kwargs",
"or",
"from",
"the",
"first",
"entry",
"in",
"args",
".",
"Raises",
"a",
"TypeError",
"if",
"argname",
"not",
"in",
"kwargs",
"and",
"len",
"(",
"args",
")",
"==",
"0",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/hybridmanager.py#L44-L58
|
[
"def",
"_get_arg",
"(",
"argname",
",",
"args",
",",
"kwargs",
")",
":",
"try",
":",
"return",
"kwargs",
".",
"pop",
"(",
"argname",
")",
",",
"args",
"except",
"KeyError",
":",
"pass",
"try",
":",
"return",
"args",
"[",
"0",
"]",
",",
"args",
"[",
"1",
":",
"]",
"except",
"IndexError",
":",
"raise",
"TypeError",
"(",
"\"No value passed for %s\"",
"%",
"argname",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
_apply_prefix
|
Prefix all path entries in model with the given prefix.
|
pgcontents/hybridmanager.py
|
def _apply_prefix(prefix, model):
"""
Prefix all path entries in model with the given prefix.
"""
if not isinstance(model, dict):
raise TypeError("Expected dict for model, got %s" % type(model))
# We get unwanted leading/trailing slashes if prefix or model['path'] are
# '', both of which are legal values.
model['path'] = '/'.join((prefix, model['path'])).strip('/')
if model['type'] in ('notebook', 'file'):
return model
if model['type'] != 'directory':
raise ValueError("Unknown model type %s." % type(model))
content = model.get('content', None)
if content is not None:
for sub_model in content:
_apply_prefix(prefix, sub_model)
return model
|
def _apply_prefix(prefix, model):
"""
Prefix all path entries in model with the given prefix.
"""
if not isinstance(model, dict):
raise TypeError("Expected dict for model, got %s" % type(model))
# We get unwanted leading/trailing slashes if prefix or model['path'] are
# '', both of which are legal values.
model['path'] = '/'.join((prefix, model['path'])).strip('/')
if model['type'] in ('notebook', 'file'):
return model
if model['type'] != 'directory':
raise ValueError("Unknown model type %s." % type(model))
content = model.get('content', None)
if content is not None:
for sub_model in content:
_apply_prefix(prefix, sub_model)
return model
|
[
"Prefix",
"all",
"path",
"entries",
"in",
"model",
"with",
"the",
"given",
"prefix",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/hybridmanager.py#L61-L82
|
[
"def",
"_apply_prefix",
"(",
"prefix",
",",
"model",
")",
":",
"if",
"not",
"isinstance",
"(",
"model",
",",
"dict",
")",
":",
"raise",
"TypeError",
"(",
"\"Expected dict for model, got %s\"",
"%",
"type",
"(",
"model",
")",
")",
"# We get unwanted leading/trailing slashes if prefix or model['path'] are",
"# '', both of which are legal values.",
"model",
"[",
"'path'",
"]",
"=",
"'/'",
".",
"join",
"(",
"(",
"prefix",
",",
"model",
"[",
"'path'",
"]",
")",
")",
".",
"strip",
"(",
"'/'",
")",
"if",
"model",
"[",
"'type'",
"]",
"in",
"(",
"'notebook'",
",",
"'file'",
")",
":",
"return",
"model",
"if",
"model",
"[",
"'type'",
"]",
"!=",
"'directory'",
":",
"raise",
"ValueError",
"(",
"\"Unknown model type %s.\"",
"%",
"type",
"(",
"model",
")",
")",
"content",
"=",
"model",
".",
"get",
"(",
"'content'",
",",
"None",
")",
"if",
"content",
"is",
"not",
"None",
":",
"for",
"sub_model",
"in",
"content",
":",
"_apply_prefix",
"(",
"prefix",
",",
"sub_model",
")",
"return",
"model"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
path_dispatch1
|
Decorator for methods that accept path as a first argument.
|
pgcontents/hybridmanager.py
|
def path_dispatch1(mname, returns_model):
"""
Decorator for methods that accept path as a first argument.
"""
def _wrapper(self, *args, **kwargs):
path, args = _get_arg('path', args, kwargs)
prefix, mgr, mgr_path = _resolve_path(path, self.managers)
result = getattr(mgr, mname)(mgr_path, *args, **kwargs)
if returns_model and prefix:
return _apply_prefix(prefix, result)
else:
return result
return _wrapper
|
def path_dispatch1(mname, returns_model):
"""
Decorator for methods that accept path as a first argument.
"""
def _wrapper(self, *args, **kwargs):
path, args = _get_arg('path', args, kwargs)
prefix, mgr, mgr_path = _resolve_path(path, self.managers)
result = getattr(mgr, mname)(mgr_path, *args, **kwargs)
if returns_model and prefix:
return _apply_prefix(prefix, result)
else:
return result
return _wrapper
|
[
"Decorator",
"for",
"methods",
"that",
"accept",
"path",
"as",
"a",
"first",
"argument",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/hybridmanager.py#L86-L99
|
[
"def",
"path_dispatch1",
"(",
"mname",
",",
"returns_model",
")",
":",
"def",
"_wrapper",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"path",
",",
"args",
"=",
"_get_arg",
"(",
"'path'",
",",
"args",
",",
"kwargs",
")",
"prefix",
",",
"mgr",
",",
"mgr_path",
"=",
"_resolve_path",
"(",
"path",
",",
"self",
".",
"managers",
")",
"result",
"=",
"getattr",
"(",
"mgr",
",",
"mname",
")",
"(",
"mgr_path",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"returns_model",
"and",
"prefix",
":",
"return",
"_apply_prefix",
"(",
"prefix",
",",
"result",
")",
"else",
":",
"return",
"result",
"return",
"_wrapper"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
path_dispatch_kwarg
|
Parameterized decorator for methods that accept path as a second
argument.
|
pgcontents/hybridmanager.py
|
def path_dispatch_kwarg(mname, path_default, returns_model):
"""
Parameterized decorator for methods that accept path as a second
argument.
"""
def _wrapper(self, path=path_default, **kwargs):
prefix, mgr, mgr_path = _resolve_path(path, self.managers)
result = getattr(mgr, mname)(path=mgr_path, **kwargs)
if returns_model and prefix:
return _apply_prefix(prefix, result)
else:
return result
return _wrapper
|
def path_dispatch_kwarg(mname, path_default, returns_model):
"""
Parameterized decorator for methods that accept path as a second
argument.
"""
def _wrapper(self, path=path_default, **kwargs):
prefix, mgr, mgr_path = _resolve_path(path, self.managers)
result = getattr(mgr, mname)(path=mgr_path, **kwargs)
if returns_model and prefix:
return _apply_prefix(prefix, result)
else:
return result
return _wrapper
|
[
"Parameterized",
"decorator",
"for",
"methods",
"that",
"accept",
"path",
"as",
"a",
"second",
"argument",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/hybridmanager.py#L118-L130
|
[
"def",
"path_dispatch_kwarg",
"(",
"mname",
",",
"path_default",
",",
"returns_model",
")",
":",
"def",
"_wrapper",
"(",
"self",
",",
"path",
"=",
"path_default",
",",
"*",
"*",
"kwargs",
")",
":",
"prefix",
",",
"mgr",
",",
"mgr_path",
"=",
"_resolve_path",
"(",
"path",
",",
"self",
".",
"managers",
")",
"result",
"=",
"getattr",
"(",
"mgr",
",",
"mname",
")",
"(",
"path",
"=",
"mgr_path",
",",
"*",
"*",
"kwargs",
")",
"if",
"returns_model",
"and",
"prefix",
":",
"return",
"_apply_prefix",
"(",
"prefix",
",",
"result",
")",
"else",
":",
"return",
"result",
"return",
"_wrapper"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
path_dispatch_old_new
|
Decorator for methods accepting old_path and new_path.
|
pgcontents/hybridmanager.py
|
def path_dispatch_old_new(mname, returns_model):
"""
Decorator for methods accepting old_path and new_path.
"""
def _wrapper(self, old_path, new_path, *args, **kwargs):
old_prefix, old_mgr, old_mgr_path = _resolve_path(
old_path, self.managers
)
new_prefix, new_mgr, new_mgr_path = _resolve_path(
new_path, self.managers,
)
if old_mgr is not new_mgr:
# TODO: Consider supporting this via get+delete+save.
raise HTTPError(
400,
"Can't move files between backends ({old} -> {new})".format(
old=old_path,
new=new_path,
)
)
assert new_prefix == old_prefix
result = getattr(new_mgr, mname)(
old_mgr_path,
new_mgr_path,
*args,
**kwargs
)
if returns_model and new_prefix:
return _apply_prefix(new_prefix, result)
else:
return result
return _wrapper
|
def path_dispatch_old_new(mname, returns_model):
"""
Decorator for methods accepting old_path and new_path.
"""
def _wrapper(self, old_path, new_path, *args, **kwargs):
old_prefix, old_mgr, old_mgr_path = _resolve_path(
old_path, self.managers
)
new_prefix, new_mgr, new_mgr_path = _resolve_path(
new_path, self.managers,
)
if old_mgr is not new_mgr:
# TODO: Consider supporting this via get+delete+save.
raise HTTPError(
400,
"Can't move files between backends ({old} -> {new})".format(
old=old_path,
new=new_path,
)
)
assert new_prefix == old_prefix
result = getattr(new_mgr, mname)(
old_mgr_path,
new_mgr_path,
*args,
**kwargs
)
if returns_model and new_prefix:
return _apply_prefix(new_prefix, result)
else:
return result
return _wrapper
|
[
"Decorator",
"for",
"methods",
"accepting",
"old_path",
"and",
"new_path",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/hybridmanager.py#L133-L164
|
[
"def",
"path_dispatch_old_new",
"(",
"mname",
",",
"returns_model",
")",
":",
"def",
"_wrapper",
"(",
"self",
",",
"old_path",
",",
"new_path",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"old_prefix",
",",
"old_mgr",
",",
"old_mgr_path",
"=",
"_resolve_path",
"(",
"old_path",
",",
"self",
".",
"managers",
")",
"new_prefix",
",",
"new_mgr",
",",
"new_mgr_path",
"=",
"_resolve_path",
"(",
"new_path",
",",
"self",
".",
"managers",
",",
")",
"if",
"old_mgr",
"is",
"not",
"new_mgr",
":",
"# TODO: Consider supporting this via get+delete+save.",
"raise",
"HTTPError",
"(",
"400",
",",
"\"Can't move files between backends ({old} -> {new})\"",
".",
"format",
"(",
"old",
"=",
"old_path",
",",
"new",
"=",
"new_path",
",",
")",
")",
"assert",
"new_prefix",
"==",
"old_prefix",
"result",
"=",
"getattr",
"(",
"new_mgr",
",",
"mname",
")",
"(",
"old_mgr_path",
",",
"new_mgr_path",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"returns_model",
"and",
"new_prefix",
":",
"return",
"_apply_prefix",
"(",
"new_prefix",
",",
"result",
")",
"else",
":",
"return",
"result",
"return",
"_wrapper"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
HybridContentsManager._managers_changed
|
Strip slashes from directories before updating.
|
pgcontents/hybridmanager.py
|
def _managers_changed(self, name, old, new):
"""
Strip slashes from directories before updating.
"""
for key in new:
if '/' in key:
raise ValueError(
"Expected directory names w/o slashes. Got [%s]" % key
)
self.managers = {k.strip('/'): v for k, v in new.items()}
|
def _managers_changed(self, name, old, new):
"""
Strip slashes from directories before updating.
"""
for key in new:
if '/' in key:
raise ValueError(
"Expected directory names w/o slashes. Got [%s]" % key
)
self.managers = {k.strip('/'): v for k, v in new.items()}
|
[
"Strip",
"slashes",
"from",
"directories",
"before",
"updating",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/hybridmanager.py#L195-L204
|
[
"def",
"_managers_changed",
"(",
"self",
",",
"name",
",",
"old",
",",
"new",
")",
":",
"for",
"key",
"in",
"new",
":",
"if",
"'/'",
"in",
"key",
":",
"raise",
"ValueError",
"(",
"\"Expected directory names w/o slashes. Got [%s]\"",
"%",
"key",
")",
"self",
".",
"managers",
"=",
"{",
"k",
".",
"strip",
"(",
"'/'",
")",
":",
"v",
"for",
"k",
",",
"v",
"in",
"new",
".",
"items",
"(",
")",
"}"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
HybridContentsManager.get
|
Special case handling for listing root dir.
|
pgcontents/hybridmanager.py
|
def get(self, path, content=True, type=None, format=None):
"""
Special case handling for listing root dir.
"""
path = normalize_api_path(path)
if path:
return self.__get(path, content=content, type=type, format=format)
if not content:
return base_directory_model('')
extra_content = self._extra_root_dirs()
rm = self.root_manager
if rm is None:
root_model = base_directory_model('')
root_model.update(
format='json',
content=extra_content,
)
else:
root_model = rm.get(
path,
content=content,
type=type,
format=format,
)
# Append the extra directories.
root_model['content'].extend(extra_content)
return root_model
|
def get(self, path, content=True, type=None, format=None):
"""
Special case handling for listing root dir.
"""
path = normalize_api_path(path)
if path:
return self.__get(path, content=content, type=type, format=format)
if not content:
return base_directory_model('')
extra_content = self._extra_root_dirs()
rm = self.root_manager
if rm is None:
root_model = base_directory_model('')
root_model.update(
format='json',
content=extra_content,
)
else:
root_model = rm.get(
path,
content=content,
type=type,
format=format,
)
# Append the extra directories.
root_model['content'].extend(extra_content)
return root_model
|
[
"Special",
"case",
"handling",
"for",
"listing",
"root",
"dir",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/hybridmanager.py#L229-L256
|
[
"def",
"get",
"(",
"self",
",",
"path",
",",
"content",
"=",
"True",
",",
"type",
"=",
"None",
",",
"format",
"=",
"None",
")",
":",
"path",
"=",
"normalize_api_path",
"(",
"path",
")",
"if",
"path",
":",
"return",
"self",
".",
"__get",
"(",
"path",
",",
"content",
"=",
"content",
",",
"type",
"=",
"type",
",",
"format",
"=",
"format",
")",
"if",
"not",
"content",
":",
"return",
"base_directory_model",
"(",
"''",
")",
"extra_content",
"=",
"self",
".",
"_extra_root_dirs",
"(",
")",
"rm",
"=",
"self",
".",
"root_manager",
"if",
"rm",
"is",
"None",
":",
"root_model",
"=",
"base_directory_model",
"(",
"''",
")",
"root_model",
".",
"update",
"(",
"format",
"=",
"'json'",
",",
"content",
"=",
"extra_content",
",",
")",
"else",
":",
"root_model",
"=",
"rm",
".",
"get",
"(",
"path",
",",
"content",
"=",
"content",
",",
"type",
"=",
"type",
",",
"format",
"=",
"format",
",",
")",
"# Append the extra directories.",
"root_model",
"[",
"'content'",
"]",
".",
"extend",
"(",
"extra_content",
")",
"return",
"root_model"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
HybridContentsManager.delete
|
Ensure that roots of our managers can't be deleted. This should be
enforced by https://github.com/ipython/ipython/pull/8168, but rogue
implementations might override this behavior.
|
pgcontents/hybridmanager.py
|
def delete(self, path):
"""
Ensure that roots of our managers can't be deleted. This should be
enforced by https://github.com/ipython/ipython/pull/8168, but rogue
implementations might override this behavior.
"""
path = normalize_api_path(path)
if path in self.managers:
raise HTTPError(
400, "Can't delete root of %s" % self.managers[path]
)
return self.__delete(path)
|
def delete(self, path):
"""
Ensure that roots of our managers can't be deleted. This should be
enforced by https://github.com/ipython/ipython/pull/8168, but rogue
implementations might override this behavior.
"""
path = normalize_api_path(path)
if path in self.managers:
raise HTTPError(
400, "Can't delete root of %s" % self.managers[path]
)
return self.__delete(path)
|
[
"Ensure",
"that",
"roots",
"of",
"our",
"managers",
"can",
"t",
"be",
"deleted",
".",
"This",
"should",
"be",
"enforced",
"by",
"https",
":",
"//",
"github",
".",
"com",
"/",
"ipython",
"/",
"ipython",
"/",
"pull",
"/",
"8168",
"but",
"rogue",
"implementations",
"might",
"override",
"this",
"behavior",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/hybridmanager.py#L259-L270
|
[
"def",
"delete",
"(",
"self",
",",
"path",
")",
":",
"path",
"=",
"normalize_api_path",
"(",
"path",
")",
"if",
"path",
"in",
"self",
".",
"managers",
":",
"raise",
"HTTPError",
"(",
"400",
",",
"\"Can't delete root of %s\"",
"%",
"self",
".",
"managers",
"[",
"path",
"]",
")",
"return",
"self",
".",
"__delete",
"(",
"path",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
normalize_api_path
|
Resolve paths with '..' to normalized paths, raising an error if the final
result is outside root.
|
pgcontents/api_utils.py
|
def normalize_api_path(api_path):
"""
Resolve paths with '..' to normalized paths, raising an error if the final
result is outside root.
"""
normalized = posixpath.normpath(api_path.strip('/'))
if normalized == '.':
normalized = ''
elif normalized.startswith('..'):
raise PathOutsideRoot(normalized)
return normalized
|
def normalize_api_path(api_path):
"""
Resolve paths with '..' to normalized paths, raising an error if the final
result is outside root.
"""
normalized = posixpath.normpath(api_path.strip('/'))
if normalized == '.':
normalized = ''
elif normalized.startswith('..'):
raise PathOutsideRoot(normalized)
return normalized
|
[
"Resolve",
"paths",
"with",
"..",
"to",
"normalized",
"paths",
"raising",
"an",
"error",
"if",
"the",
"final",
"result",
"is",
"outside",
"root",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/api_utils.py#L55-L65
|
[
"def",
"normalize_api_path",
"(",
"api_path",
")",
":",
"normalized",
"=",
"posixpath",
".",
"normpath",
"(",
"api_path",
".",
"strip",
"(",
"'/'",
")",
")",
"if",
"normalized",
"==",
"'.'",
":",
"normalized",
"=",
"''",
"elif",
"normalized",
".",
"startswith",
"(",
"'..'",
")",
":",
"raise",
"PathOutsideRoot",
"(",
"normalized",
")",
"return",
"normalized"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
split_api_filepath
|
Split an API file path into directory and name.
|
pgcontents/api_utils.py
|
def split_api_filepath(path):
"""
Split an API file path into directory and name.
"""
parts = path.rsplit('/', 1)
if len(parts) == 1:
name = parts[0]
dirname = '/'
else:
name = parts[1]
dirname = parts[0] + '/'
return from_api_dirname(dirname), name
|
def split_api_filepath(path):
"""
Split an API file path into directory and name.
"""
parts = path.rsplit('/', 1)
if len(parts) == 1:
name = parts[0]
dirname = '/'
else:
name = parts[1]
dirname = parts[0] + '/'
return from_api_dirname(dirname), name
|
[
"Split",
"an",
"API",
"file",
"path",
"into",
"directory",
"and",
"name",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/api_utils.py#L94-L106
|
[
"def",
"split_api_filepath",
"(",
"path",
")",
":",
"parts",
"=",
"path",
".",
"rsplit",
"(",
"'/'",
",",
"1",
")",
"if",
"len",
"(",
"parts",
")",
"==",
"1",
":",
"name",
"=",
"parts",
"[",
"0",
"]",
"dirname",
"=",
"'/'",
"else",
":",
"name",
"=",
"parts",
"[",
"1",
"]",
"dirname",
"=",
"parts",
"[",
"0",
"]",
"+",
"'/'",
"return",
"from_api_dirname",
"(",
"dirname",
")",
",",
"name"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
writes_base64
|
Write a notebook as base64.
|
pgcontents/api_utils.py
|
def writes_base64(nb, version=NBFORMAT_VERSION):
"""
Write a notebook as base64.
"""
return b64encode(writes(nb, version=version).encode('utf-8'))
|
def writes_base64(nb, version=NBFORMAT_VERSION):
"""
Write a notebook as base64.
"""
return b64encode(writes(nb, version=version).encode('utf-8'))
|
[
"Write",
"a",
"notebook",
"as",
"base64",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/api_utils.py#L109-L113
|
[
"def",
"writes_base64",
"(",
"nb",
",",
"version",
"=",
"NBFORMAT_VERSION",
")",
":",
"return",
"b64encode",
"(",
"writes",
"(",
"nb",
",",
"version",
"=",
"version",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
reads_base64
|
Read a notebook from base64.
|
pgcontents/api_utils.py
|
def reads_base64(nb, as_version=NBFORMAT_VERSION):
"""
Read a notebook from base64.
"""
try:
return reads(b64decode(nb).decode('utf-8'), as_version=as_version)
except Exception as e:
raise CorruptedFile(e)
|
def reads_base64(nb, as_version=NBFORMAT_VERSION):
"""
Read a notebook from base64.
"""
try:
return reads(b64decode(nb).decode('utf-8'), as_version=as_version)
except Exception as e:
raise CorruptedFile(e)
|
[
"Read",
"a",
"notebook",
"from",
"base64",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/api_utils.py#L116-L123
|
[
"def",
"reads_base64",
"(",
"nb",
",",
"as_version",
"=",
"NBFORMAT_VERSION",
")",
":",
"try",
":",
"return",
"reads",
"(",
"b64decode",
"(",
"nb",
")",
".",
"decode",
"(",
"'utf-8'",
")",
",",
"as_version",
"=",
"as_version",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"CorruptedFile",
"(",
"e",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
_decode_unknown_from_base64
|
Decode base64 data of unknown format.
Attempts to interpret data as utf-8, falling back to ascii on failure.
|
pgcontents/api_utils.py
|
def _decode_unknown_from_base64(path, bcontent):
"""
Decode base64 data of unknown format.
Attempts to interpret data as utf-8, falling back to ascii on failure.
"""
content = b64decode(bcontent)
try:
return (content.decode('utf-8'), 'text')
except UnicodeError:
pass
return bcontent.decode('ascii'), 'base64'
|
def _decode_unknown_from_base64(path, bcontent):
"""
Decode base64 data of unknown format.
Attempts to interpret data as utf-8, falling back to ascii on failure.
"""
content = b64decode(bcontent)
try:
return (content.decode('utf-8'), 'text')
except UnicodeError:
pass
return bcontent.decode('ascii'), 'base64'
|
[
"Decode",
"base64",
"data",
"of",
"unknown",
"format",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/api_utils.py#L137-L148
|
[
"def",
"_decode_unknown_from_base64",
"(",
"path",
",",
"bcontent",
")",
":",
"content",
"=",
"b64decode",
"(",
"bcontent",
")",
"try",
":",
"return",
"(",
"content",
".",
"decode",
"(",
"'utf-8'",
")",
",",
"'text'",
")",
"except",
"UnicodeError",
":",
"pass",
"return",
"bcontent",
".",
"decode",
"(",
"'ascii'",
")",
",",
"'base64'"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
from_b64
|
Decode base64 content for a file.
format:
If 'text', the contents will be decoded as UTF-8.
If 'base64', do nothing.
If not specified, try to decode as UTF-8, and fall back to base64
Returns a triple of decoded_content, format, and mimetype.
|
pgcontents/api_utils.py
|
def from_b64(path, bcontent, format):
"""
Decode base64 content for a file.
format:
If 'text', the contents will be decoded as UTF-8.
If 'base64', do nothing.
If not specified, try to decode as UTF-8, and fall back to base64
Returns a triple of decoded_content, format, and mimetype.
"""
decoders = {
'base64': lambda path, bcontent: (bcontent.decode('ascii'), 'base64'),
'text': _decode_text_from_base64,
None: _decode_unknown_from_base64,
}
try:
content, real_format = decoders[format](path, bcontent)
except HTTPError:
# Pass through HTTPErrors, since we intend for them to bubble all the
# way back to the API layer.
raise
except Exception as e:
# Anything else should be wrapped in a CorruptedFile, since it likely
# indicates misconfiguration of encryption.
raise CorruptedFile(e)
default_mimes = {
'text': 'text/plain',
'base64': 'application/octet-stream',
}
mimetype = mimetypes.guess_type(path)[0] or default_mimes[real_format]
return content, real_format, mimetype
|
def from_b64(path, bcontent, format):
"""
Decode base64 content for a file.
format:
If 'text', the contents will be decoded as UTF-8.
If 'base64', do nothing.
If not specified, try to decode as UTF-8, and fall back to base64
Returns a triple of decoded_content, format, and mimetype.
"""
decoders = {
'base64': lambda path, bcontent: (bcontent.decode('ascii'), 'base64'),
'text': _decode_text_from_base64,
None: _decode_unknown_from_base64,
}
try:
content, real_format = decoders[format](path, bcontent)
except HTTPError:
# Pass through HTTPErrors, since we intend for them to bubble all the
# way back to the API layer.
raise
except Exception as e:
# Anything else should be wrapped in a CorruptedFile, since it likely
# indicates misconfiguration of encryption.
raise CorruptedFile(e)
default_mimes = {
'text': 'text/plain',
'base64': 'application/octet-stream',
}
mimetype = mimetypes.guess_type(path)[0] or default_mimes[real_format]
return content, real_format, mimetype
|
[
"Decode",
"base64",
"content",
"for",
"a",
"file",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/api_utils.py#L151-L185
|
[
"def",
"from_b64",
"(",
"path",
",",
"bcontent",
",",
"format",
")",
":",
"decoders",
"=",
"{",
"'base64'",
":",
"lambda",
"path",
",",
"bcontent",
":",
"(",
"bcontent",
".",
"decode",
"(",
"'ascii'",
")",
",",
"'base64'",
")",
",",
"'text'",
":",
"_decode_text_from_base64",
",",
"None",
":",
"_decode_unknown_from_base64",
",",
"}",
"try",
":",
"content",
",",
"real_format",
"=",
"decoders",
"[",
"format",
"]",
"(",
"path",
",",
"bcontent",
")",
"except",
"HTTPError",
":",
"# Pass through HTTPErrors, since we intend for them to bubble all the",
"# way back to the API layer.",
"raise",
"except",
"Exception",
"as",
"e",
":",
"# Anything else should be wrapped in a CorruptedFile, since it likely",
"# indicates misconfiguration of encryption.",
"raise",
"CorruptedFile",
"(",
"e",
")",
"default_mimes",
"=",
"{",
"'text'",
":",
"'text/plain'",
",",
"'base64'",
":",
"'application/octet-stream'",
",",
"}",
"mimetype",
"=",
"mimetypes",
".",
"guess_type",
"(",
"path",
")",
"[",
"0",
"]",
"or",
"default_mimes",
"[",
"real_format",
"]",
"return",
"content",
",",
"real_format",
",",
"mimetype"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
prefix_dirs
|
Return an iterable of all prefix directories of path, descending from root.
|
pgcontents/api_utils.py
|
def prefix_dirs(path):
"""
Return an iterable of all prefix directories of path, descending from root.
"""
_dirname = posixpath.dirname
path = path.strip('/')
out = []
while path != '':
path = _dirname(path)
out.append(path)
return reversed(out)
|
def prefix_dirs(path):
"""
Return an iterable of all prefix directories of path, descending from root.
"""
_dirname = posixpath.dirname
path = path.strip('/')
out = []
while path != '':
path = _dirname(path)
out.append(path)
return reversed(out)
|
[
"Return",
"an",
"iterable",
"of",
"all",
"prefix",
"directories",
"of",
"path",
"descending",
"from",
"root",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/api_utils.py#L204-L214
|
[
"def",
"prefix_dirs",
"(",
"path",
")",
":",
"_dirname",
"=",
"posixpath",
".",
"dirname",
"path",
"=",
"path",
".",
"strip",
"(",
"'/'",
")",
"out",
"=",
"[",
"]",
"while",
"path",
"!=",
"''",
":",
"path",
"=",
"_dirname",
"(",
"path",
")",
"out",
".",
"append",
"(",
"path",
")",
"return",
"reversed",
"(",
"out",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
outside_root_to_404
|
Decorator for converting PathOutsideRoot errors to 404s.
|
pgcontents/api_utils.py
|
def outside_root_to_404(fn):
"""
Decorator for converting PathOutsideRoot errors to 404s.
"""
@wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except PathOutsideRoot as e:
raise HTTPError(404, "Path outside root: [%s]" % e.args[0])
return wrapped
|
def outside_root_to_404(fn):
"""
Decorator for converting PathOutsideRoot errors to 404s.
"""
@wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except PathOutsideRoot as e:
raise HTTPError(404, "Path outside root: [%s]" % e.args[0])
return wrapped
|
[
"Decorator",
"for",
"converting",
"PathOutsideRoot",
"errors",
"to",
"404s",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/api_utils.py#L217-L227
|
[
"def",
"outside_root_to_404",
"(",
"fn",
")",
":",
"@",
"wraps",
"(",
"fn",
")",
"def",
"wrapped",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"return",
"fn",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"PathOutsideRoot",
"as",
"e",
":",
"raise",
"HTTPError",
"(",
"404",
",",
"\"Path outside root: [%s]\"",
"%",
"e",
".",
"args",
"[",
"0",
"]",
")",
"return",
"wrapped"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
create_user
|
Create a user.
|
pgcontents/utils/sync.py
|
def create_user(db_url, user):
"""
Create a user.
"""
PostgresCheckpoints(
db_url=db_url,
user_id=user,
create_user_on_startup=True,
)
|
def create_user(db_url, user):
"""
Create a user.
"""
PostgresCheckpoints(
db_url=db_url,
user_id=user,
create_user_on_startup=True,
)
|
[
"Create",
"a",
"user",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/utils/sync.py#L17-L25
|
[
"def",
"create_user",
"(",
"db_url",
",",
"user",
")",
":",
"PostgresCheckpoints",
"(",
"db_url",
"=",
"db_url",
",",
"user_id",
"=",
"user",
",",
"create_user_on_startup",
"=",
"True",
",",
")"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
test
|
_separate_dirs_files
|
Split an iterable of models into a list of file paths and a list of
directory paths.
|
pgcontents/utils/sync.py
|
def _separate_dirs_files(models):
"""
Split an iterable of models into a list of file paths and a list of
directory paths.
"""
dirs = []
files = []
for model in models:
if model['type'] == 'directory':
dirs.append(model['path'])
else:
files.append(model['path'])
return dirs, files
|
def _separate_dirs_files(models):
"""
Split an iterable of models into a list of file paths and a list of
directory paths.
"""
dirs = []
files = []
for model in models:
if model['type'] == 'directory':
dirs.append(model['path'])
else:
files.append(model['path'])
return dirs, files
|
[
"Split",
"an",
"iterable",
"of",
"models",
"into",
"a",
"list",
"of",
"file",
"paths",
"and",
"a",
"list",
"of",
"directory",
"paths",
"."
] |
quantopian/pgcontents
|
python
|
https://github.com/quantopian/pgcontents/blob/ed36268b7917332d16868208e1e565742a8753e1/pgcontents/utils/sync.py#L28-L40
|
[
"def",
"_separate_dirs_files",
"(",
"models",
")",
":",
"dirs",
"=",
"[",
"]",
"files",
"=",
"[",
"]",
"for",
"model",
"in",
"models",
":",
"if",
"model",
"[",
"'type'",
"]",
"==",
"'directory'",
":",
"dirs",
".",
"append",
"(",
"model",
"[",
"'path'",
"]",
")",
"else",
":",
"files",
".",
"append",
"(",
"model",
"[",
"'path'",
"]",
")",
"return",
"dirs",
",",
"files"
] |
ed36268b7917332d16868208e1e565742a8753e1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.