INSTRUCTION
stringlengths 1
8.43k
| RESPONSE
stringlengths 75
104k
|
|---|---|
Extract the extension from the given line.
|
def _extensions(self, line):
"""
Extract the extension from the given line.
:param line: The line from the official public suffix repository.
:type line: str
"""
# We strip the parsed line.
line = line.strip()
if not line.startswith("//") and "." in line:
# * The parsed line is not a commented line.
# and
# * There is a point in the parsed line.
line = line.encode("idna").decode("utf-8")
if line.startswith("*."):
# The parsed line start with `*.`.
# We remove the first two characters.
line = line[2:]
# We we split the points and we get the last element.
# Explanation: The idea behind this action is to
# always get the extension.
extension = line.split(".")[-1]
if extension in self.public_suffix_db:
# The extension is alrady in our database.
# We update the content of the 1st level TDL with
# the content of the suffix.
# In between, we format so that we ensure that there is no
# duplicate in the database index content.
self.public_suffix_db[extension] = List(
self.public_suffix_db[extension] + [line]
).format()
else:
# The extension is not already in our database.
# We append the currently formatted extension and the line content.
self.public_suffix_db.update({extension: [line]})
|
Update of the content of the: code: public - suffix. json.
|
def update(self):
"""
Update of the content of the :code:`public-suffix.json`.
"""
if not PyFunceble.CONFIGURATION["quiet"]:
# The quiet mode is not activated.
# We print a message for the user on screen.
print(
"Update of %s" % PyFunceble.OUTPUTS["default_files"]["public_suffix"],
end=" ",
)
# We loop through the line of the upstream file.
list(map(self._extensions, self._data().split("\n")))
# We save the content of our database in the final testination.
Dict(self.public_suffix_db).to_json(self.destination)
if not PyFunceble.CONFIGURATION["quiet"]:
# The quiet mode is not activated.
# We inform the user that everything goes right.
print(PyFunceble.INTERN["done"])
|
Load the public suffix database into the system.
|
def load(self):
"""
Load the public suffix database into the system.
"""
if not PyFunceble.INTERN["psl_db"]:
# The public database was not already loaded.
# * We read, convert to dict and return the file content.
# and
# * We fill/create the database.
PyFunceble.INTERN["psl_db"] = Dict().from_json(
File(self.destination).read()
)
|
Implement the standard and alphabetical sorting.
|
def standard(cls, element):
"""
Implement the standard and alphabetical sorting.
:param element: The element we are currently reading.
:type element: str
:return: The formatted element.
:rtype: str
"""
# We remove all special characters and return the formatted string.
return (
Regex(element, cls.regex_replace, replace_with="@funilrys")
.replace()
.replace("@funilrys", "")
)
|
The idea behind this method is to sort a list of domain hierarchicaly.
|
def hierarchical(cls, element):
"""
The idea behind this method is to sort a list of domain hierarchicaly.
:param element: The element we are currently reading.
:type element: str
:return: The formatted element.
:rtype: str
.. note::
For a domain like :code:`aaa.bbb.ccc.tdl`.
A normal sorting is done in the following order:
1. :code:`aaa`
2. :code:`bbb`
3. :code:`ccc`
4. :code:`tdl`
This method allow the sorting to be done in the following order:
1. :code:`tdl`
2. :code:`ccc`
3. :code:`bbb`
4. :code:`aaa`
"""
# We initiate a variable which will save the element to sort without
# the extension.
to_sort = ""
# We initiate a variable which will save the full extension.
full_extension = ""
# We convert the parsed element to lower case.
element = element.lower()
# We try to get the url base.
url_base = Check().is_url_valid(element, return_base=True)
if not isinstance(url_base, str):
# The url base is not found.
if "." in element:
# There is point in the parsed element.
# We get the position of the first letter of the extension.
extension_index = element.rindex(".") + 1
# We get the extension from the position of the first letter
# of the extension.
extension = element[extension_index:]
if extension in PyFunceble.INTERN["psl_db"]:
# The extension is in the public suffix database.
for suffix in PyFunceble.INTERN["psl_db"][extension]:
# We loop through the list of suffix of the extracted extension.
# We suffix the sufix with a point.
formatted_suffix = "." + suffix
if element.endswith(formatted_suffix):
# The elements ends with the suffix.
# We get the position of the first character of the suffix in
# the parsed element.
suffix_index = element.rindex(formatted_suffix)
# We update the to_sort variable with the element without the suffix.
to_sort = element[:suffix_index]
# We replace the full extension with the currently read suffix.
full_extension = suffix
# We break the loop, we got what we wanted.
break
if not full_extension:
# The full extension is empty.
# We initiate it with the extension.
full_extension = element[extension_index:]
# We update the to_sort variable with the element without the extension.
to_sort = element[: extension_index - 1]
# We append a point to the full extension because the point has to be
# at the end and not at the begining of the extension.
# To understand: Imagine a miror.
full_extension += "."
# We reverse the to_sort string.
tros_ot = to_sort[::-1]
if "." in tros_ot:
# There is a point in the reversed string.
# We prefix the full extension with the top level
# domain name.
full_extension = (
tros_ot[: tros_ot.index(".")][::-1] + "." + full_extension
)
# We remove the tor level domain from the rest of
# the reversed string.
tros_ot = tros_ot[tros_ot.index(".") + 1 :]
# * We reverse each level of the parsed element.
# and
# * We glue each level of the parsed element with each other.
#
# Note: after this, there is no point anymore.
reversion = full_extension + ".".join(
[x[::-1] for x in tros_ot.split(".")]
)
# We remove all special characters and return the formatted string.
return (
Regex(reversion, cls.regex_replace, replace_with="@funilrys")
.replace()
.replace("@funilrys", "")
)
# We remove all special characters and return the formatted string.
return (
Regex(
to_sort + full_extension,
cls.regex_replace,
replace_with="@funilrys",
)
.replace()
.replace("@funilrys", "")
)
# There is no point in the parsed element.
# We return the parsed element.
return element
# The url base is found.
# We get the position of the element.
protocol_position = element.rindex(url_base)
# We extract the protocol from the element position.
protocol = element[:protocol_position]
# We return the output of this method but with the url base instead of the full url.
return protocol + cls.hierarchical(url_base)
|
Initiate the IANA database if it is not the case.
|
def load(self):
"""
Initiate the IANA database if it is not the case.
"""
if "iana_db" not in PyFunceble.INTERN or not PyFunceble.INTERN["iana_db"]:
# The global database is empty, None or does not exist.
# We update it with the database content.
PyFunceble.INTERN["iana_db"] = self.iana_db
|
Return the referer for the given extension.
|
def _referer(self, extension):
"""
Return the referer for the given extension.
:param extension: A valid domain extension.
:type extension: str
:return: The whois server to use to get the WHOIS record.
:rtype: str
"""
# We get the a copy of the page.
iana_record = self.lookup.whois(
PyFunceble.CONFIGURATION["iana_whois_server"], "hello.%s" % extension
)
if iana_record and "refer" in iana_record:
# The record is not empty.
# We initiate a regex which will extract the referer.
regex_referer = r"(?s)refer\:\s+([a-zA-Z0-9._-]+)\n"
# We try to extract the referer.
matched = Regex(
iana_record, regex_referer, return_data=True, group=1
).match()
if matched:
# The referer was extracted successfully.
# We return the matched referer.
return matched
# * The referer was not extracted successfully.
# or
# * The iana record is empty.
if extension in self.manual_server:
# The extension is in the list of manual entries.
# We return the server which we set manually.
return self.manual_server[extension]
# We return None because we weren't able to get the server to call for
# the given extension.
return None
|
Extract the extention from the given block. Plus get its referer.
|
def _extensions(self):
"""
Extract the extention from the given block.
Plus get its referer.
"""
upstream_lines = (
Download(self.iana_url, return_data=True)
.text()
.split('<span class="domain tld">')
)
# We extract the different extension from the currently readed line.
regex_valid_extension = r"(/domains/root/db/)(.*)(\.html)"
for block in upstream_lines:
if "/domains/root/db/" in block:
# The link is in the line.
# We try to extract the extension.
matched = Regex(
block, regex_valid_extension, return_data=True, rematch=True
).match()[1]
if matched:
# The extraction is not empty or None.
# We get the referer.
referer = self._referer(matched)
# We yield the matched extension and its referer.
yield (matched, referer)
|
Update the content of the iana - domains - db file.
|
def update(self):
"""
Update the content of the `iana-domains-db` file.
"""
if not PyFunceble.CONFIGURATION["quiet"]:
# * The quiet mode is not activated.
# We print on screen what we are doing.
print("Update of iana-domains-db", end=" ")
# We loop through the line of the iana website.
for extension, referer in self._extensions():
if extension not in self.iana_db or self.iana_db[extension] != referer:
# We add the extension to the databae.
self.iana_db[extension] = referer
# We save the content of the constructed database.
Dict(self.iana_db).to_json(self.destination)
if not PyFunceble.CONFIGURATION["quiet"]:
# The quiet mode is not activated.
# We indicate that the work is done without any issue.
print(PyFunceble.INTERN["done"])
|
Search for domain or URL related to the original URL or domain.
|
def mine(self): # pragma: no cover
"""
Search for domain or URL related to the original URL or domain.
:return: The mined domains or URL.
:rtype: dict
"""
if PyFunceble.CONFIGURATION["mining"]:
# The mining is activated.
try:
# We get the history.
history = PyFunceble.requests.get(
self.to_get,
timeout=PyFunceble.CONFIGURATION["seconds_before_http_timeout"],
headers=self.headers,
).history
# We initiate a dictionnary which will save the
# list of mined links.
mined = {self.to_get_bare: []}
for element in history:
# We loop through the history.
# We update the element.
element = element.url
if PyFunceble.INTERN["to_test_type"] == "url":
# We are testing a full url.
# We get the element to append.
to_append = Check().is_url_valid(element, return_base=False)
elif PyFunceble.INTERN["to_test_type"] == "domain":
# We are testing a domain.
# We get the element to append.
to_append = Check().is_url_valid(element, return_base=True)
else:
raise Exception("Unknown tested.")
if to_append:
# There is something to append.
if to_append.endswith(":80"):
# The port is present.
# We get rid of it.
to_append = to_append[:-3]
if to_append != self.to_get_bare:
# The element to append is different as
# the element we are globally testing.
# We append the element to append to the
# list of mined links.
mined[self.to_get_bare].append(to_append)
if mined[self.to_get_bare]:
# There is something in the list of mined links.
# We return the whole element.
return mined
# There is nothing in the list of mined links.
# We return None.
return None
except (
PyFunceble.requests.ConnectionError,
PyFunceble.requests.exceptions.Timeout,
PyFunceble.requests.exceptions.InvalidURL,
PyFunceble.socket.timeout,
urllib3_exceptions.InvalidHeader,
UnicodeDecodeError, # The probability that this happend in production is minimal.
):
# Something went wrong.
# We return None.
return None
return None
|
Retrieve the mining informations.
|
def _retrieve(self):
"""
Retrieve the mining informations.
"""
if PyFunceble.CONFIGURATION["mining"]:
# The mining is activated.
if "mined" not in PyFunceble.INTERN:
PyFunceble.INTERN["mined"] = {}
if PyFunceble.path.isfile(self.file):
# Our backup file exist.
# We return the information from our backup.
data = Dict().from_json(File(self.file).read())
# We clean the empty elements.
for file_path in data:
PyFunceble.INTERN["mined"][file_path] = {}
for element in data[file_path]:
if data[file_path][element]:
PyFunceble.INTERN["mined"][file_path][element] = data[
file_path
][element]
return
# * The mining is not activated.
# or
# * Our backup file does not exist.
# We return nothing.
PyFunceble.INTERN["mined"] = {}
return
|
Backup the mined informations.
|
def _backup(self):
"""
Backup the mined informations.
"""
if PyFunceble.CONFIGURATION["mining"]:
# The mining is activated.
# We backup our mined informations.
Dict(PyFunceble.INTERN["mined"]).to_json(self.file)
|
Add the currently mined information to the mined database.
|
def _add(self, to_add):
"""
Add the currently mined information to the
mined "database".
:param to_add: The element to add.
:type to_add: dict
"""
if PyFunceble.CONFIGURATION["mining"]:
# The mining is activated.
if PyFunceble.INTERN["file_to_test"] not in PyFunceble.INTERN["mined"]:
# Our tested file path is not into our mined database.
# We initiate it.
PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]] = {}
for element in to_add:
# We loop through the element to add.
if (
element
in PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]]
):
# The element is already into the tested file path database.
# We extent it with our element to add.
PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]][
element
].extend(to_add[element])
else:
# The element is already into the tested file path database.
# We initiate it.
PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]][
element
] = to_add[element]
# We format the added information in order to avoid duplicate.
PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]][
element
] = List(
PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]][
element
]
).format()
# We backup everything.
self._backup()
|
Remove the currently tested element from the mining data.
|
def remove(self):
"""
Remove the currently tested element from the mining
data.
"""
if PyFunceble.CONFIGURATION["mining"]:
# The mining is activated.
if PyFunceble.INTERN["file_to_test"] in PyFunceble.INTERN["mined"]:
# The currently tested file is in our mined database.
for element in PyFunceble.INTERN["mined"][
PyFunceble.INTERN["file_to_test"]
]:
# We loop through the mined index.
if (
self.to_get_bare
in PyFunceble.INTERN["mined"][
PyFunceble.INTERN["file_to_test"]
][element]
):
# The currently read element content.
# We remove the globally tested element from the currently
# read element content.
PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]][
element
].remove(self.to_get_bare)
# We backup everything.
self._backup()
|
Provide the list of mined so they can be added to the list queue.
|
def list_of_mined(cls):
"""
Provide the list of mined so they can be added to the list
queue.
:return: The list of mined domains or URL.
:rtype: list
"""
# We initiate a variable which will return the result.
result = []
if PyFunceble.CONFIGURATION["mining"]:
# The mining is activated.
if PyFunceble.INTERN["file_to_test"] in PyFunceble.INTERN["mined"]:
# The file we are testing is into our mining database.
for element in PyFunceble.INTERN["mined"][
PyFunceble.INTERN["file_to_test"]
]:
# We loop through the list of index of the file we are testing.
# We append the element of the currently read index to our result.
result.extend(
PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]][
element
]
)
# We format our result.
result = List(result).format()
# We return the result.
return result
|
Process the logic and structuration of the mining database.
|
def process(self): # pragma: no cover
"""
Process the logic and structuration of the mining database.
"""
if PyFunceble.CONFIGURATION["mining"]:
# The mining is activated.
# We load the mining logic.
mined = self.mine()
if mined:
# The mined data is not empty or None.
# We add the mined data to the global database.
self._add(mined)
# And we finally backup everything.
self._backup()
|
Get and return the content of the given log file.
|
def _get_content(cls, file):
"""
Get and return the content of the given log file.
:param file: The file we have to get the content from.
:type file: str
:return The content of the given file.
:rtype: dict
"""
if PyFunceble.path.isfile(file):
return Dict().from_json(File(file).read())
return {}
|
Write the content into the given file.
|
def _write_content(cls, content, file):
"""
Write the content into the given file.
:param content: The dict to write.
:type content: dict
:param file: The file to write.
:type file: str
"""
if not PyFunceble.CONFIGURATION["no_files"]:
if not isinstance(content, dict):
content = {}
Dict(content).to_json(file)
|
Logs the WHOIS record if needed.
|
def whois(self, record):
"""
Logs the WHOIS record if needed.
:param record: The record to log.
:type record: str
"""
if PyFunceble.CONFIGURATION["debug"] and PyFunceble.CONFIGURATION["logs"]:
# The debug and the logs subsystem are activated.
if PyFunceble.INTERN["referer"]:
referer = PyFunceble.INTERN["referer"]
else:
referer = None
to_write = {
self.current_time: {
"domain": PyFunceble.INTERN["to_test"],
"record": record,
"referer": referer,
}
}
if self.output:
output = self.output
else:
output = PyFunceble.OUTPUT_DIRECTORY
output += PyFunceble.OUTPUTS["parent_directory"]
output += PyFunceble.OUTPUTS["logs"]["directories"]["parent"]
output += PyFunceble.OUTPUTS["logs"]["filenames"]["whois"]
current_content = self._get_content(output)
current_content.update(to_write)
self._write_content(current_content, output)
|
Logs the extracted expiration date.
|
def expiration_date(self, extracted):
"""
Logs the extracted expiration date.
:param extracted: The extracted expiration date (from WHOIS record).
:type extracted: str
"""
if PyFunceble.CONFIGURATION["logs"]:
# The logs subsystem is activated.
if PyFunceble.INTERN["referer"]:
referer = PyFunceble.INTERN["referer"]
else:
referer = None
to_write = {
self.current_time: {
"domain": PyFunceble.INTERN["to_test"],
"expiration_date": extracted,
"whois_server": referer,
}
}
if self.output:
output = self.output
else:
output = PyFunceble.OUTPUT_DIRECTORY
output += PyFunceble.OUTPUTS["parent_directory"]
output += PyFunceble.OUTPUTS["logs"]["directories"]["parent"]
output += PyFunceble.OUTPUTS["logs"]["filenames"]["date_format"]
current_content = self._get_content(output)
current_content.update(to_write)
self._write_content(current_content, output)
if PyFunceble.CONFIGURATION["share_logs"]:
# The logs sharing is activated.
# And we share the logs with the api.
PyFunceble.requests.post(
PyFunceble.LINKS["api_date_format"],
data=to_write[self.current_time],
)
|
Logs the case that the referer was not found.
|
def referer_not_found(self, extension):
"""
Logs the case that the referer was not found.
:param extension: The extension of the domain we are testing.
:type extension: str
"""
if PyFunceble.CONFIGURATION["logs"]:
# The logs subsystem is activated.
to_write = {
self.current_time: {
"domain": PyFunceble.INTERN["to_test"],
"extension": extension,
}
}
if self.output:
output = self.output
else:
output = PyFunceble.OUTPUT_DIRECTORY
output += PyFunceble.OUTPUTS["parent_directory"]
output += PyFunceble.OUTPUTS["logs"]["directories"]["parent"]
output += PyFunceble.OUTPUTS["logs"]["filenames"]["no_referer"]
current_content = self._get_content(output)
current_content.update(to_write)
self._write_content(current_content, output)
if PyFunceble.CONFIGURATION["share_logs"]:
# The logs sharing is activated.
# And we share the logs with the api.
PyFunceble.requests.post(
PyFunceble.LINKS["api_no_referer"], data=to_write[self.current_time]
)
|
Print informations about PyFunceble and the date of generation of a file into a given path if doesn t exist.
|
def _before_header(self):
"""
Print informations about PyFunceble and the date of generation of a file
into a given path, if doesn't exist.
"""
if (
not PyFunceble.CONFIGURATION["no_files"]
and self.output
and not PyFunceble.path.isfile(self.output)
):
# * We are allowed to generate files.
# and
# * And output is given.
# and
# * The given output does not exist.
# We initiate the information about what generated the file.
link = "# File generated by %s\n" % PyFunceble.LINKS["repo"]
# We initiate the information about the generation date of this file.
date_of_generation = (
"# Date of generation: %s \n\n" % PyFunceble.CURRENT_TIME
)
# We initiate a variable which will save the list of
# templates which have to meet in order to write the before
# header informations.
authorized_templates = [
"Generic_File",
PyFunceble.STATUS["official"]["up"],
PyFunceble.STATUS["official"]["down"],
PyFunceble.STATUS["official"]["invalid"],
PyFunceble.STATUS["official"]["valid"],
"Less",
]
if self.template in authorized_templates:
# The current header is in our list of authorized templated.
# We get the header.
header = (
self._header_constructor(self.currently_used_header, None)[0] + "\n"
)
try:
# We try to print the link, the date of generation and the header in the
# given file.
File(self.output).write(link + date_of_generation + header)
except UnboundLocalError:
# We don't have any header.
# We print the link and the date in the given file.
File(self.output).write(link + date_of_generation)
|
Construct header of the table according to template.
|
def _header_constructor(
cls, data_to_print, header_separator="-", column_separator=" "
):
"""
Construct header of the table according to template.
:param data_to_print:
The list of data to print into the header of the table.
:type data_to_print: list
:param header_separator:
The separator to use between the table header and our data.
:type header_separator: str
:param colomn_separator: The separator to use between each colomns.
:type colomn_separator: str
:return: The data to print in a list format.
:rtype: list
"""
# We initiate a variable which will save the header data.
header_data = []
# We initiate a variable which will save the header sizes.
header_size = ""
# We initiate the glue to set before the size.
before_size = "%-"
# We initiate the glue to set after the size.
after_size = "s"
if header_separator:
# The header separator is not empty.
# We initiate a variable which will save the list of
# separator data.
header_separator_data = []
# We get the length of the data to print.
length_data_to_print = len(data_to_print) - 1
# We initiate an iterator.
i = 0
for data in data_to_print:
# We loop through the list of data.
# We get the size of the currently read data.
size = data_to_print[data]
# We append the data to the header data list.
header_data.append(data)
# We construct the header size.
# Note: our header size is formatted line %s-sizes
# (the s at the end is part of the formatting.)
header_size += before_size + str(size) + after_size
if i < length_data_to_print:
# The iterator is less than the length of data to print.
# We append the the colomn separator to the header size.
header_size += column_separator
if header_separator:
# The header separator is given.
# We append the right size of separator to the list of
# separator data.
header_separator_data.append(header_separator * size)
# We increase the iterator.
i += 1
if header_separator:
# The header separator is given.
return [
# We return the formatted header (like we will do with print('%s' % 'hello'))
header_size % tuple(header_data),
# We return the formatted header separator.
header_size % tuple(header_separator_data),
]
# The header separator is not given.
# We return the formetted header.
return [header_size % tuple(header_data)]
|
Management and creation of templates of header. Please consider as header the title of each columns.
|
def header(
self, do_not_print=False
): # pragma: no cover pylint: disable=too-many-branches
"""
Management and creation of templates of header.
Please consider as "header" the title of each columns.
:param do_not_print:
Tell us if we have to print the header or not.
:type do_not_print: bool
"""
if (
not PyFunceble.CONFIGURATION["header_printed"]
or self.template == "Percentage"
or do_not_print
):
# * The header has not been already printed.
# or
# * The template is the `Percentage template`.
# or
# * We are authorized to print something.
if (
self.template.lower() in PyFunceble.STATUS["list"]["generic"]
or self.template == "Generic_File"
):
# * The template is into the list of generic status.
# or
# * The template is equal to `Generic_File`.
# The data to print is the Generic header.
to_print = self.headers["Generic"]
if (
self.template.lower() in PyFunceble.STATUS["list"]["generic"]
and PyFunceble.HTTP_CODE["active"]
):
# * The template is in the list of generic status.
# and
# * the http status code extraction is activated.
# We remove the Analyze Date colomn from the data to print.
to_print = Dict(to_print).remove_key("Analyze Date")
elif self.template.lower() in PyFunceble.STATUS["list"]["up"]:
# The template is in the list of up status.
# We informations to print is the up header.
to_print = self.headers[PyFunceble.STATUS["official"]["up"]]
elif self.template.lower() in PyFunceble.STATUS["list"]["valid"]:
# The template is in the list of valid status.
# We informations to print is the valid header.
to_print = self.headers[PyFunceble.STATUS["official"]["valid"]]
elif self.template.lower() in PyFunceble.STATUS["list"]["down"]:
# The template is in the list of down status.
# We informations to print is the down header.
to_print = self.headers[PyFunceble.STATUS["official"]["down"]]
elif self.template.lower() in PyFunceble.STATUS["list"]["invalid"]:
# The template is in the list of invalid status.
# We informations to print is the invalid header.
to_print = self.headers[PyFunceble.STATUS["official"]["invalid"]]
elif (
self.template == "Less"
or self.template == "Percentage"
or self.template == "HTTP"
): # pylint: disable=line-too-long
# * The template is equal to `Less`.
# or
# * The template is equal to `Percentage`.
# or
# * The template is equal to `HTTP`.
# We get the header with the help of the template name.
to_print = self.headers[self.template]
if self.template == "Less" and not PyFunceble.HTTP_CODE["active"]:
# * The template is equal to `Less`.
# and
# * The http status code extraction is deactivated.
# We append the source index to the header.
to_print["Source"] = 10
if not PyFunceble.HTTP_CODE["active"]:
# * The http status code extraction is deactivated.
# We remove the HTTP Code index from the data to print.
to_print = Dict(to_print).remove_key("HTTP Code")
# We update the currently used header.
self.currently_used_header = to_print
if not do_not_print:
# We are not authorized to print anything.
# We generate the before header.
self._before_header()
for formatted_template in self._header_constructor(to_print):
# We loop through the formatted template.
if not self.only_on_file:
# We do not have to print only on file.
# We print on screen the formatted header template.
print(formatted_template)
if not PyFunceble.CONFIGURATION["no_files"] and self.output:
# An output destination is given.
# We write the file with the formatted header template.
File(self.output).write(formatted_template + "\n")
|
Construct the table of data according to given size.
|
def _data_constructor(self, size):
"""
Construct the table of data according to given size.
:param size: The maximal length of each string in the table.
:type size: list
:return:
A dict with all information about the data and how to which what
maximal size to print it.
:rtype: OrderedDict
:raises:
:code:`Exception`
If the data and the size does not have the same length.
"""
# We initiate a variable which will save what we are going to
# return.
result = PyFunceble.OrderedDict()
if len(self.data_to_print) == len(size):
# The length of the data to print is equal to the length of the given size.
for i in range(len(self.data_to_print)):
# We loop until our iterator is less or equal to the length of the data
# to print.
# We initiate the result index and its size.
result[self.data_to_print[i]] = size[i]
else:
# This should never happend. If it's happens then there is something
# wrong from the inputed data.
raise Exception(
"Inputed: " + str(len(self.data_to_print)) + "; Size: " + str(len(size))
)
# We return the constructed result.
return result
|
Get the size of each columns from the header.
|
def _size_from_header(cls, header):
"""
Get the size of each columns from the header.
:param header:
The header template we have to get the size from.
:type header: dict
:return: The maximal size of the each data to print.
:rtype: list
"""
# We initiate the result we are going to return.
result = []
for data in header:
# We lopp through the header.
# And we append the size to our result.
result.append(header[data])
# We return the result.
return result
|
Retun colored string.
|
def _colorify(self, data):
"""
Retun colored string.
:param data: The string to colorify.
:type data: str
:return: A colored string.
:rtype: str
"""
if self.template in ["Generic", "Less"]:
# The template is in the list of template that need the coloration.
if (
self.data_to_print[1].lower() in PyFunceble.STATUS["list"]["up"]
or self.data_to_print[1].lower() in PyFunceble.STATUS["list"]["valid"]
):
# The status is in the list of up status.
# We print the data with a green background.
data = PyFunceble.Fore.BLACK + PyFunceble.Back.GREEN + data
elif self.data_to_print[1].lower() in PyFunceble.STATUS["list"]["down"]:
# The status is in the list of down status.
# We print the data with a red background.
data = PyFunceble.Fore.BLACK + PyFunceble.Back.RED + data
else:
# The status is not in the list of up and down status.
# We print the data with a cyan background.
data = PyFunceble.Fore.BLACK + PyFunceble.Back.CYAN + data
# We return the data.
return data
|
Management of the json template.
|
def _json_print(self): # pragma: no cover
"""
Management of the json template.
"""
if self.output:
# The given output is not empty.
if PyFunceble.path.isfile(self.output):
# The given output already exist.
# We get the content of the output.
content = Dict().from_json(File(self.output).read())
if isinstance(content, list):
# The content is a list.
# We extend the content with our data to print.
content.extend(self.data_to_print)
# We format our list.
content = List(content).custom_format(Sort.standard)
if PyFunceble.CONFIGURATION["hierarchical_sorting"]:
# The hierarchical sorting is activated.
# We format our content hierarchicaly
content = List(content).custom_format(Sort.hierarchical)
# We finally save our content into the file.
Dict(content).to_json(self.output)
else:
# The content is not a list.
# We raise an exception.
raise Exception("Output not correctly formatted.")
else:
# The given output does not already exist.
# We save our data to print into the output.
#
# Note: We do not have to take care if self.data_to_print is a list
# formatted or not because this method should not be called if it is
# not the case.
Dict(self.data_to_print).to_json(self.output)
else:
# The given output is empty.
# We raise an exception.
raise Exception("Empty output given.")
|
Management and input of data to the table.
|
def data(self): # pragma: no cover pylint: disable=inconsistent-return-statements
"""
Management and input of data to the table.
:raises:
:code:`Exception`
When self.data_to_print is not a list.
"""
if isinstance(self.data_to_print, list):
# The data to print is a list.
# We initiate the data we are going to print.
to_print = {}
# We initiate the size we are going to print.
to_print_size = []
# We initiate a variable which will list the list of
# alone case.
alone_cases = ["Percentage", "HTTP"]
# we initiate a variable which will list the list of
# template which does not need a header.
without_header = ["FullHosts", "PlainDomain"]
if self.template.lower() == "json":
# The template is the json template.
if not PyFunceble.CONFIGURATION["no_files"] and self.output:
# * We are allowed to generate file.
# and
# * The given output is not empty.
# We print the json file.
return self._json_print()
# We return nothing.
return None
if self.template not in alone_cases and self.template not in without_header:
# * The template is not in the list of alone case.
# and
# * THe template is not in the list of template without header.
# We get the template we should use.
# Note: We basically only need the self.currently_used_header to be filled.
self.header(True)
# And we get the size from the header.
to_print_size = self._size_from_header(self.currently_used_header)
elif self.template in without_header:
# The template is in the list of template which does not need a header.
for data in self.data_to_print:
# We loop through the list of data to print.
# And we construct the (spacement) size of the data to print.
to_print_size.append(str(len(data)))
else:
# We get the size from the given template name.
to_print_size = self._size_from_header(self.headers[self.template])
# We construct and format the data to print.
to_print = self._data_constructor(to_print_size)
# We print the before header section.
self._before_header()
for data in self._header_constructor(to_print, False):
# We loop through the formatted data.
if self.template.lower() in PyFunceble.STATUS["list"][
"generic"
] or self.template in ["Less", "Percentage"]:
# * The template is in the list of generic status.
# or
# * The template is in a specific list.
if not self.only_on_file:
# We are authorized to print on screen.
# We colorify the data to print.
colorified_data = self._colorify(data)
# And we print the data.
print(colorified_data)
if not PyFunceble.CONFIGURATION["no_files"] and self.output:
# * We are authorized to print on any file.
# and
# * The output is given.
# We write our data into the printed file.
File(self.output).write(data + "\n")
else:
# This should never happend. If it's happens then there's a big issue
# around data_to_print.
raise Exception("Please review Prints().data()")
|
Save the current time to the file.
|
def _save(self, last=False): # pragma: no cover
"""
Save the current time to the file.
:param last:
Tell us if we are at the very end of the file testing.
:type last: bool
"""
if (
self._authorization()
and PyFunceble.CONFIGURATION["logs"]
and "file_to_test" in PyFunceble.INTERN
and PyFunceble.INTERN["file_to_test"]
):
# * We are authorized to work.
# and
# * The generation of logs is activated.
# and
# * We are not testing as an imported module.
# We set the location of the file we are working with.
self.file = (
PyFunceble.OUTPUT_DIRECTORY
+ PyFunceble.OUTPUTS["parent_directory"]
+ PyFunceble.OUTPUTS["logs"]["directories"]["parent"]
+ PyFunceble.OUTPUTS["logs"]["filenames"]["execution_time"]
)
if PyFunceble.path.isfile(self.file):
# The file we are working with exist.
# We get its content so we can directly work with it.
content = Dict().from_json(File(self.file).read())
else:
# The file we are working with does not exist.
# We generate a dummy content.
content = {}
if self.action == "start":
# The action is equal to `start`.
if "final_total" in content and content["final_total"]:
# The final total index exist.
# We delete it.
del content["final_total"]
if "data" in content:
# The data index exist.
# We append the current start time inside it at
# a new sublist.
content["data"].append([PyFunceble.INTERN["start"]])
else:
# The data index does not exist.
# We create the index along with the current start time.
content["data"] = [[PyFunceble.INTERN["start"]]]
elif self.action == "stop":
# The action is equal to `stop`.
try:
# We try to work with the data index.
# We append the end time at the end of the last element
# of data.
#
# Note: It is at the end because we should have as first
# the star time.
content["data"][-1].append(PyFunceble.INTERN["end"])
# We get the start time.
start = content["data"][0][0]
# We get the end time.
end = content["data"][-1][-1]
# We calculate the execution time of the test.
content["current_total"] = self.format_execution_time(start, end)
if last:
# We are at the very end of the file testing.
# We initiate the global execution time.
content["final_total"] = content["current_total"]
# We inform the user about the global execution time.
print(
PyFunceble.Fore.MAGENTA
+ PyFunceble.Style.BRIGHT
+ "Global execution time: "
+ content["final_total"]
)
except KeyError:
# It is not possible to work with the data index because
# it does not exist.
# We ignore the problem.
pass
try:
# We try to save the whole data at its final location.
Dict(content).to_json(self.file)
except FileNotFoundError:
# The directory was not found.
# We construct the output directory
DirectoryStructure()
# And we retry to save the whole data at its final location.
Dict(content).to_json(self.file)
|
calculate the difference between starting and ending time.
|
def _calculate(cls, start=None, end=None):
"""
calculate the difference between starting and ending time.
:param start: A starting time.
:type start: int|str
:param stop: A ending time.
:type stop: int|str
:return:
A dict with following as index.
* :code:`days`
* :code:`hours`
* :code:`minutes`
* :code:`seconds`
as index.
:rtype: dict
"""
if start and end:
# The start and end time is explicitly given.
# We get the difference between the ending and the starting time.
time_difference = int(end) - int(start)
else:
# The start and end time is not explicitly given.
# We get the difference between the ending and the starting time.
time_difference = PyFunceble.INTERN["end"] - PyFunceble.INTERN["start"]
# We initiate an OrderedDict.
# Indeed, we use an ordered dict because we want the structuration and the
# order to stay always the same.
# As a dictionnary is always unordered, we can use it. Otherwise the time will
# not be shown correctly.
data = PyFunceble.OrderedDict()
# We calculate and append the day to our data.
data["days"] = str(time_difference // (24 * 60 * 60)).zfill(2)
# We calculate and append the hours to our data.
data["hours"] = str((time_difference // (60 * 60)) % 24).zfill(2)
# We calculate and append the minutes to our data.
data["minutes"] = str((time_difference % 3600) // 60).zfill(2)
# We calculate and append the minutes to our data.
data["seconds"] = str(time_difference % 60).zfill(2)
# We finaly return our data.
return data
|
Format the calculated time into a human readable format.
|
def format_execution_time(self, start=None, end=None):
"""
Format the calculated time into a human readable format.
:param start: A starting time.
:type start: int|str
:param stop: A ending time.
:type stop: int|str
:return: A human readable date.
:rtype: str
"""
# We return the formatted execution time.
return ":".join(list(self._calculate(start, end).values()))
|
Return the list of file to delete.
|
def file_to_delete(cls):
"""
Return the list of file to delete.
"""
# We initiate the directory we have to look for.
directory = PyFunceble.OUTPUT_DIRECTORY + PyFunceble.OUTPUTS["parent_directory"]
if not directory.endswith(PyFunceble.directory_separator): # pragma: no cover
# For safety, if it does not ends with the directory separator, we append it
# to its end.
directory += PyFunceble.directory_separator
# We initiate a variable which will save the list of file to delete.
result = []
for root, _, files in PyFunceble.walk(directory):
# We walk in the directory and get all files and sub-directories.
for file in files:
# If there is files in the current sub-directory, we loop
# through the list of files.
if file not in [".gitignore", ".keep"]:
# The file is not into our list of file we do not have to delete.
if root.endswith(PyFunceble.directory_separator):
# The root ends with the directory separator.
# We construct the path and append the full path to the result.
result.append(root + file)
else:
# The root directory does not ends with the directory separator.
# We construct the path by appending the directory separator
# between the root and the filename and append the full path to
# the result.
result.append(
root + PyFunceble.directory_separator + file
) # pragma: no cover
# We return our list of file to delete.
return result
|
Set the databases files to delete.
|
def databases_to_delete(cls): # pragma: no cover
"""
Set the databases files to delete.
"""
# We initiate the directory we have to look for.
directory = PyFunceble.CURRENT_DIRECTORY
# We initate the result variable.
result = []
# We append the dir_structure file.
result.append(
directory
+ PyFunceble.CONFIGURATION["outputs"]["default_files"]["dir_structure"]
)
# We append the iana file.
result.append(
directory + PyFunceble.CONFIGURATION["outputs"]["default_files"]["iana"]
)
# We append the public suffix file.
result.append(
directory
+ PyFunceble.CONFIGURATION["outputs"]["default_files"]["public_suffix"]
)
# We append the inactive database file.
result.append(
directory
+ PyFunceble.CONFIGURATION["outputs"]["default_files"]["inactive_db"]
)
# We append the mining database file.
result.append(
directory + PyFunceble.CONFIGURATION["outputs"]["default_files"]["mining"]
)
# We append the whois database file.
result.append(
directory + PyFunceble.CONFIGURATION["outputs"]["default_files"]["whois_db"]
)
return result
|
Delete almost all discovered files.
|
def almost_everything(self, clean_all=False):
"""
Delete almost all discovered files.
:param clean_all:
Tell the subsystem if we have to clean everything instesd
of almost everything.
:type clean_all: bool
"""
# We get the list of file to delete.
to_delete = self.file_to_delete()
if clean_all: # pragma: no cover
to_delete.extend(self.databases_to_delete())
for file in to_delete:
# We loop through the list of file to delete.
# And we delete the currently read file.
File(file).delete()
if clean_all: # pragma: no cover
Load(PyFunceble.CURRENT_DIRECTORY)
|
Get the hash of the given file
|
def _hash_file(self, algo):
"""Get the hash of the given file
:param algo: The algorithm to use.
:type algo: str
:return: The hexdigest of the data.
:rtype: str
"""
# We het the algorithm function.
hash_data = getattr(hashlib, algo)()
with open(self.path, "rb") as file:
# We open an read the parsed path.
# We read the content.
content = file.read()
# We parse the content to the hash algorithm.
hash_data.update(content)
# And we extract and return the hash.
return hash_data.hexdigest()
|
Get hash of the given data.
|
def _hash_data(self, algo):
"""
Get hash of the given data.
:param algo: The algorithm to use.
:type algo: str
"""
# We het the algorithm function.
hash_data = getattr(hashlib, algo)()
# We set the data into our hashlib.
hash_data.update(self.data)
# And we extract and return the hash.
return hash_data.hexdigest()
|
Return the hash of the given file
|
def get(self):
"""
Return the hash of the given file
"""
# We initiate a variable which will save the result we are going
# to return.
result = {}
if self.algorithm in self.valid_algorithms:
# * The parsed path exist.
# and
# * The parsed algorithm is in the list of valid algorithms.
if self.algorithm == "all":
# The parsed algorithm is `all`.
# We remove `all` (the first element of the list) from
# the list of valid algorithms because we are going to
# loop through the list of valid algorithms.
del self.valid_algorithms[0]
for algo in self.valid_algorithms:
# We loop through the list of valid algorithms.
if self.path and path.isfile(self.path):
# The file path exist.
# We save the hash into the result variable.
result[algo] = self._hash_file(algo)
elif self.data:
# * The path does not exist.
# and
# * The given data is not empty.
# We save the hash into the result variable.
result[algo] = self._hash_data(algo)
else: # pragma: no cover
# All other case are met.
# We return None.
return None
else:
# The parsed algorithm is a specific one.
if self.path and path.isfile(self.path):
# The file path exist.
# We save the hash into the result variable.
result[self.algorithm] = self._hash_file(self.algorithm)
elif self.data:
# * The path does not exist.
# and
# * The given data is not empty.
# We save the hash into the result variable.
result[self.algorithm] = self._hash_data(self.algorithm)
else:
# All the other case are met.
# We return None.
return None
else: # pragma: no cover
# The parsed algorithm is not in the list of valid algorithms.
return None
if self.algorithm != "all" and self.only_hash:
# * The parsed algorithm is not equal to `all`.
# and
# * We only have to return the selected hash.
# We return the selected algorithm.
return result[self.algorithm]
# * The parsed algorithm is equal to `all`.
# or
# * We do not have to return the selected hash.
# We return all hashes.
return result
|
Execute the given command.
|
def execute(self):
"""
Execute the given command.
:return: The output of the command.
:rtype: str
"""
# We initiate a process and parse the command to it.
process = Popen(self.command, stdout=PIPE, stderr=PIPE, shell=True)
# We communicate the command and get the output and the error.
(output, error) = process.communicate()
if process.returncode != 0: # pragma: no cover
# The return code is different to 0.
# We return the decoded error.
return self._decode_output(error)
# The return code (or exit code if you prefer) if equal to 0.
# We return the decoded output of the executed command.
return self._decode_output(output)
|
Run the given command and yield each line ( s ) one by one.
|
def run(self):
"""
Run the given command and yield each line(s) one by one.
.. note::
The difference between this method and :code:`self.execute()`
is that :code:`self.execute()` wait for the process to end
in order to return its output.
"""
with Popen(self.command, stdout=PIPE, shell=True) as process:
# We initiate a process and parse the command to it.
while True:
# We loop infinitly because we want to get the output
# until there is none.
# We get the current line from the process stdout.
#
# Note: we use rstrip() because we are paranoid :-)
current_line = process.stdout.readline().rstrip()
if not current_line:
# The current line is empty or equal to None.
# We break the loop.
break
# The line is not empty nor equal to None.
# We encode and yield the current line
yield self._decode_output(current_line)
|
Remove a given key from a given dictionary.
|
def remove_key(self, key_to_remove):
"""
Remove a given key from a given dictionary.
:param key_to_remove: The key(s) to delete.
:type key_to_remove: list|str
:return: The dict without the given key(s).
:rtype: dict|None
"""
if isinstance(self.main_dictionnary, dict):
# The main dictionnary is a dictionnary
if isinstance(key_to_remove, list):
# The parsed key to remove is a list.
for key in key_to_remove:
# We loop through the list of key to remove.
# We delete the key from the dictionnary.
del self.main_dictionnary[key]
else:
# The parsed key to remove is not a list.
try:
# We delete the given key from the dictionnary.
del self.main_dictionnary[key_to_remove]
except KeyError:
pass
# We return the final dictionnary.
return self.main_dictionnary
# The main dictionnary is not a dictionnary.
# We return None.
return None
|
Rename the given keys from the given dictionary.
|
def rename_key(self, key_to_rename, strict=True):
"""
Rename the given keys from the given dictionary.
:param key_to_rename:
The key(s) to rename.
Expected format: :code:`{old:new}`
:type key_to_rename: dict
:param strict:
Tell us if we have to rename the exact index or
the index which looks like the given key(s)
:return: The well formatted dict.
:rtype: dict|None
"""
if isinstance(self.main_dictionnary, dict) and isinstance(key_to_rename, dict):
# * The given main directory is a dictionnary.
# and
# * The given key to rename is a dictionnary.
for old, new in key_to_rename.items():
# We loop through the key to raname.
if strict:
# The strict method is activated.
if old in self.main_dictionnary:
# The old key is in the main dictionnary.
# We initiate the new with the old and remove the old content.
self.main_dictionnary[new] = self.main_dictionnary.pop(old)
else:
# The strict method is not activated.
# We initiate the elements to rename.
to_rename = {}
for index in self.main_dictionnary:
# We loop throught the indexes of the main dictionnary.
if old in index:
# The old key is into the index name.
# We append the index name and the new index to our
# local list to rename.
to_rename.update({index: new[:-1] + index.split(old)[-1]})
# We run this method against the local list to rename in order
# to rename the element.
self.main_dictionnary = Dict(self.main_dictionnary).rename_key(
to_rename, True
)
# We return the final list.
return self.main_dictionnary
# * The given main directory is not a dictionnary.
# or
# * The given key to rename is not a dictionnary.
# We return None.
return None
|
Merge the content of to_merge into the given main dictionnary.
|
def merge(self, to_merge, strict=True):
"""
Merge the content of to_merge into the given main dictionnary.
:param to_merge: The dictionnary to merge.
:type to_merge: dict
:param strict:
Tell us if we have to strictly merge lists.
:code:`True`: We follow index
:code`False`: We follow element (content)
:type strict: bool
:return: The merged dict.
:rtype: dict
"""
# We initiate a variable which will save our result.
result = {}
for element in to_merge:
# We loop throught the given dict to merge.
if element in self.main_dictionnary:
# The currently read element is in the main dict.
if isinstance(to_merge[element], dict) and isinstance(
self.main_dictionnary[element], dict
):
# They are in both side dict.
# We merge the dict tree and save into result.
result[element] = Dict(self.main_dictionnary[element]).merge(
to_merge[element]
)
elif isinstance(to_merge[element], list) and isinstance(
self.main_dictionnary[element], list
):
# They are in both side list.
# We merge the lists and save into result.
result[element] = List(self.main_dictionnary[element]).merge(
to_merge[element], strict
)
else:
# They are not list, not dict.
# We append the currently read element to the result.
result.update({element: to_merge[element]})
else:
# The currently read element is not into the main
# dict.
# We append the currently read element to the result.
result.update({element: to_merge[element]})
for element in self.main_dictionnary:
# We loop through each element of the main dict.
if element not in result:
# The currently read element is not into
# the result.
# We append it to the result.
result[element] = self.main_dictionnary[element]
# We return the result.
return result
|
Save a dictionnary into a JSON file.
|
def to_json(self, destination):
"""
Save a dictionnary into a JSON file.
:param destination:
A path to a file where we're going to
write the converted dict into a JSON format.
:type destination: str
"""
try:
with open(destination, "w") as file:
# We open the file we are going to write.
# Note: We always overwrite the destination.
# We save the current dictionnary into a json format.
dump(
self.main_dictionnary,
file,
ensure_ascii=False,
indent=4,
sort_keys=True,
)
except UnicodeEncodeError: # pragma: no cover
with open(destination, "w", encoding="utf-8") as file:
# We open the file we are going to write.
# Note: We always overwrite the destination.
# We save the current dictionnary into a json format.
dump(
self.main_dictionnary,
file,
ensure_ascii=False,
indent=4,
sort_keys=True,
)
|
Save a dictionnary into a YAML file.
|
def to_yaml(self, destination, flow_style=False):
"""
Save a dictionnary into a YAML file.
:param destination:
A path to a file where we're going to write the
converted dict into a JSON format.
:type destination: str
"""
with open(destination, "w") as file:
# We open the file we are going to write.
# Note: We always overwrite the destination.
# We save the current dictionnary into a json format.
dump_yaml(
self.main_dictionnary,
file,
encoding="utf-8",
allow_unicode=True,
indent=4,
default_flow_style=flow_style,
)
|
Fix the path of the given path.
|
def fix_path(self, splited_path=None):
"""
Fix the path of the given path.
:param splited_path: A list to convert to the right path.
:type splited_path: list
:return: The fixed path.
:rtype: str
"""
if not splited_path:
# A splited path is parsed.
# We initate a variable which will save the splited path.
split_path = []
if self.directory:
# The parsed directory is not empty or equal to None.
if "/" in self.directory:
# We split the separator.
split_path = self.directory.split("/")
elif "\\" in self.directory:
# We split the separator.
split_path = self.directory.split("\\")
else:
split_path = [self.directory]
# We run the same function with the splited_path argument filled.
return self.fix_path(splited_path=[x for x in split_path if x])
# We return the directory.
return self.directory
# We join the splited element with the directory separator as glue.
return directory_separator.join(splited_path) + directory_separator
|
Write or append data into the given file path.
|
def write(self, data_to_write, overwrite=False):
"""
Write or append data into the given file path.
:param data_to_write: The data to write.
:type data_to_write: str
:param overwrite:
Tell us if we have to overwrite the
content of the file we are working with.
:type overwrite: bool
"""
if overwrite or not path.isfile(self.file):
# * We have to overwrite the file data.
# or
# * The file path does not already exist.
with open(self.file, "w", encoding="utf-8", newline="\n") as file:
# We prepare the file for writting.
if data_to_write and isinstance(data_to_write, str):
# * A data to write is given.
# and
# * The data to write is a string
# We write the string into the file.
file.write(data_to_write)
else:
# * We do not have to overwrite the file data.
# or
# * The file path does already exist.
with open(self.file, "a", encoding="utf-8", newline="\n") as file:
# We prepare the file for append writting.
if data_to_write and isinstance(data_to_write, str):
# * A data to write is given.
# and
# * The data to write is a string
# We append the string into the file.
file.write(data_to_write)
|
Read a given file path and return its content.
|
def read(self):
"""
Read a given file path and return its content.
:return: The content of the given file path.
:rtype: str
"""
try:
with open(self.file, "r", encoding="utf-8") as file:
# We open and read a file.
# We get the file content.
funilrys = file.read()
except UnicodeDecodeError: # pragma: no cover
with open(self.file, "r") as file:
# We open and read a file.
# We get the file content.
funilrys = file.read()
# We return the file content.
return funilrys
|
Return a well formatted list. Basicaly it s sort a list and remove duplicate.
|
def format(self):
"""
Return a well formatted list. Basicaly, it's sort a list and remove duplicate.
:return: A sorted, without duplicate, list.
:rtype: list
"""
try:
return sorted(list(set(self.main_list)), key=str.lower)
except TypeError: # pragma: no cover
return self.main_list
|
Return a well formatted list. With the key_method as a function/ method to format the elements before sorting.
|
def custom_format(self, key_method, reverse=False):
"""
Return a well formatted list. With the key_method as a function/method to format
the elements before sorting.
:param key_method:
A function or method to use to format the
readed element before sorting.
:type key_method: function|method
:param reverse: Tell us if we have to reverse the list.
:type reverse: bool
:return: A sorted list.
:rtype: list
"""
try:
return sorted(list(set(self.main_list)), key=key_method, reverse=reverse)
except TypeError: # pragma: no cover
return self.main_list
|
Merge to_merge into the given main list.
|
def merge(self, to_merge, strict=True):
"""
Merge to_merge into the given main list.
:param to_merge: The list to merge.
:type to_merge: list
:param strict:
Tell us if we have to respect index (True)
or not (False).
:type strict: bool
:return: The merged list.
:rtype: list
"""
# We initiate a variable which will save the
# result
result = []
if strict:
# We are in strict mode.
for index, element in enumerate(to_merge):
# We loop through each element of the list to merge
# to the main dict.
try:
if isinstance(element, dict) and isinstance(
self.main_list[index], dict
):
# The currently read element is a dict.
# We merge its content into the main dict
# and append into the result.
result.append(Dict(self.main_list[index]).merge(element))
elif isinstance(element, list) and isinstance(
self.main_list[index], list
):
# The currently read element is a list.
# We loop through this method.
result.append(List(self.main_list[index]).merge(element))
else:
# The currently read element is not a list
# nor a dict.
# We append the element to the result.
result.append(element)
except IndexError: # pragma: no cover
# The index does not exist.
# Which means that for example one list is bigger
# than the other one.
# We append the element to the result.
result.append(element)
else:
# We are not is strict mode.
# We initiate the result with the main list.
result = self.main_list
for element in to_merge:
# We loop through the element to merge.
if element not in result:
# The currently read element is not
# in the result.
# We append it to the result
result.append(element)
# We return the result.
return result
|
Return a list of string which don t match the given regex.
|
def not_matching_list(self):
"""
Return a list of string which don't match the
given regex.
"""
pre_result = comp(self.regex)
return [x for x in self.data if not pre_result.search(str(x))]
|
Used to get exploitable result of re. search
|
def match(self):
"""
Used to get exploitable result of re.search
:return: The data of the match status.
:rtype: mixed
"""
# We initate this variable which gonna contain the returned data
result = []
# We compile the regex string
to_match = comp(self.regex)
# In case we have to use the implementation of ${BASH_REMATCH} we use
# re.findall otherwise, we use re.search
if self.rematch: # pylint: disable=no-member
pre_result = to_match.findall(self.data)
else:
pre_result = to_match.search(self.data)
if self.return_data and pre_result: # pylint: disable=no-member
if self.rematch: # pylint: disable=no-member
for data in pre_result:
if isinstance(data, tuple):
result.extend(list(data))
else:
result.append(data)
if self.group != 0: # pylint: disable=no-member
return result[self.group] # pylint: disable=no-member
else:
result = pre_result.group(
self.group # pylint: disable=no-member
).strip()
return result
if not self.return_data and pre_result: # pylint: disable=no-member
return True
return False
|
Used to replace a matched string with another.
|
def replace(self):
"""
Used to replace a matched string with another.
:return: The data after replacement.
:rtype: str
"""
if self.replace_with: # pylint: disable=no-member
return substrings(
self.regex,
self.replace_with, # pylint: disable=no-member
self.data,
self.occurences, # pylint: disable=no-member
)
return self.data
|
Download the given link and return or save its: code: requests. text at the given destination.
|
def text(self):
"""
Download the given link and return or save its :code:`requests.text`
at the given destination.
:rtype: mixed
:raises:
:code:`Exception`
If the status code is not :code:`200`.
"""
try:
# We request the link.
req = requests.get(self.link, verify=self.verification)
if req.status_code == 200:
# The request http status code is equal to 200.
if self.return_data:
# We have to return the data.
# We return the link content.
return req.text
# We save the link content to the parsed destination.
File(self.destination).write(req.text, overwrite=True)
# We return True.
return True
# The request http status code is not equal to 200.
# We raise an exception saying that we were unable to download.
raise Exception("Unable to download %s." % repr(self.link))
except requests.exceptions.ConnectionError:
print(Fore.RED + "No Internet connection available." + Style.RESET_ALL)
exit(1)
|
Count the number of domain for each status.
|
def count(self):
"""
Count the number of domain for each status.
"""
if self.status:
# The status is parsed.
# We increase the number of tested.
PyFunceble.INTERN["counter"]["number"]["tested"] += 1
if (
self.status.lower() in PyFunceble.STATUS["list"]["up"]
or self.status.lower() in PyFunceble.STATUS["list"]["valid"]
):
# The status is in the list of up status.
# We increase the number of up.
PyFunceble.INTERN["counter"]["number"]["up"] += 1
elif self.status.lower() in PyFunceble.STATUS["list"]["down"]:
# The status is in the list of down status.
# We increase the number of down.
PyFunceble.INTERN["counter"]["number"]["down"] += 1
else:
# The status is not in the list of up nor down status.
# We increase the number of invalid.
PyFunceble.INTERN["counter"]["number"]["invalid"] += 1
|
Calculate the percentage of each status.
|
def _calculate(cls):
"""
Calculate the percentage of each status.
"""
# We map the current state/counters of the different status.
percentages = {
"up": PyFunceble.INTERN["counter"]["number"]["up"],
"down": PyFunceble.INTERN["counter"]["number"]["down"],
"invalid": PyFunceble.INTERN["counter"]["number"]["invalid"],
}
for percentage in percentages:
# We loop through our map index.
# We calculate the percentage.
calculation = (
percentages[percentage]
* 100
// PyFunceble.INTERN["counter"]["number"]["tested"]
)
# And we update the percentage counter of the actual status.
PyFunceble.INTERN["counter"]["percentage"].update({percentage: calculation})
|
Print on screen and on file the percentages for each status.
|
def log(self):
"""
Print on screen and on file the percentages for each status.
"""
if (
PyFunceble.CONFIGURATION["show_percentage"]
and PyFunceble.INTERN["counter"]["number"]["tested"] > 0
):
# * We are allowed to show the percentage on screen.
# and
# * The number of tested is greater than 0.
# We initiate the output file.
output = (
PyFunceble.OUTPUT_DIRECTORY
+ PyFunceble.OUTPUTS["parent_directory"]
+ PyFunceble.OUTPUTS["logs"]["directories"]["parent"]
+ PyFunceble.OUTPUTS["logs"]["directories"]["percentage"]
+ PyFunceble.OUTPUTS["logs"]["filenames"]["percentage"]
)
# We delete the output file if it does exist.
File(output).delete()
# We calculate the percentage of each statuses.
self._calculate()
if not PyFunceble.CONFIGURATION["quiet"]:
# The quiet mode is activated.
# We print a new line.
print("\n")
# We print the percentage header on file and screen.
Prints(None, "Percentage", output).header()
# We construct the different lines/data to print on screen and file.
lines_to_print = [
[
PyFunceble.STATUS["official"]["up"],
str(PyFunceble.INTERN["counter"]["percentage"]["up"]) + "%",
PyFunceble.INTERN["counter"]["number"]["up"],
],
[
PyFunceble.STATUS["official"]["down"],
str(PyFunceble.INTERN["counter"]["percentage"]["down"]) + "%",
PyFunceble.INTERN["counter"]["number"]["down"],
],
[
PyFunceble.STATUS["official"]["invalid"],
str(PyFunceble.INTERN["counter"]["percentage"]["invalid"])
+ "%",
PyFunceble.INTERN["counter"]["number"]["invalid"],
],
]
if PyFunceble.CONFIGURATION["syntax"]:
# We are checking for syntax.
# We update the denomination of the UP.
lines_to_print[0][0] = PyFunceble.STATUS["official"]["valid"]
# And we unset the INACTIVE line.
del lines_to_print[1]
for to_print in lines_to_print:
# We loop throught the different line to print.
# (one line for each status.)
# And we print the current status line on file and screen.
Prints(to_print, "Percentage", output).data()
elif PyFunceble.INTERN["counter"]["number"]["tested"] > 0:
# * We are not allowed to show the percentage on screen.
# but
# * The number of tested is greater than 0.
# We run the calculation.
# Note: The following is needed, because all counter calculation are
# done by this class.
self._calculate()
|
Check if the given URL is valid.
|
def is_url_valid(self, url=None, return_base=False, return_formatted=False):
"""
Check if the given URL is valid.
:param url: The url to validate.
:type url: str
:param return_base:
Allow us the return of the url base (if URL formatted correctly).
:type return_formatted: bool
:param return_formatted:
Allow us to get the URL converted to IDNA if the conversion
is activated.
:type return_formatted: bool
:return: The validity of the URL or its base.
:rtype: bool|str
"""
# We initiate a variable which will save the initial base in case
# we have to convert the base to IDNA.
initial_base = None
if url:
# The given url is not empty.
# We initiate the element to test.
to_test = url
elif self.element:
# The globaly given url is not empty.
# We initiate the element to test.
to_test = self.element
else:
# The given url is empty.
# We initiate the element to test from the globaly URl to test.
to_test = PyFunceble.INTERN["to_test"]
if to_test.startswith("http"):
# The element to test starts with http.
try:
# We initiate a regex which will match the domain or the url base.
regex = r"(^(http:\/\/|https:\/\/)(.+?(?=\/)|.+?$))"
# We extract the url base with the help of the initiated regex.
initial_base = base = Regex(
to_test, regex, return_data=True, rematch=True
).match()[2]
if PyFunceble.CONFIGURATION["idna_conversion"]:
# We have to convert the domain to IDNA.
# We convert the initial base to IDNA.
base = domain2idna(base)
# We check if the url base is a valid domain.
domain_status = self.is_domain_valid(base)
# We check if the url base is a valid IP.
ip_status = self.is_ip_valid(base)
if domain_status or ip_status:
# * The url base is a valid domain.
# and
# * The url base is a valid IP.
if PyFunceble.CONFIGURATION["idna_conversion"] and return_formatted:
# * We have to convert to IDNA.
# and
# * We have to return the converted full URL.
# We return the converted full URL.
return Regex(
to_test,
initial_base,
escape=True,
return_data=True,
replace_with=base,
occurences=1,
).replace()
if return_formatted:
# * We do not have to convert to IDNA.
# but
# * We have to return the full URL.
# We return the initially given URL.
return to_test
if return_base:
# We have to return the base of the URL.
# We return the base of the URL.
return base
# We return True.
return True
except TypeError:
pass
if return_formatted:
# We have to return an URL.
# We return the initily given URL.
return to_test
# We return False.
return False
|
Check if the given domain is a valid.
|
def is_domain_valid(
self, domain=None, subdomain_check=False
): # pylint:disable=too-many-return-statements, too-many-branches
"""
Check if the given domain is a valid.
:param domain: The domain to validate.
:type domain: str
:param subdomain_check:
Activate the subdomain checking.
:type subdomain_check: bool
:return: The validity of the sub-domain.
:rtype: bool
"""
# We initate our regex which will match for valid domains.
regex_valid_domains = r"^(?=.{0,253}$)(([a-z0-9][a-z0-9-]{0,61}[a-z0-9]|[a-z0-9])\.)+((?=.*[^0-9])([a-z0-9][a-z0-9-]{0,61}[a-z0-9](?:\.)?|[a-z0-9](?:\.)?))$" # pylint: disable=line-too-long
# We initiate our regex which will match for valid subdomains.
regex_valid_subdomains = r"^(?=.{0,253}$)(([a-z0-9_][a-z0-9-_]{0,61}[a-z0-9_-]|[a-z0-9])\.)+((?=.*[^0-9])([a-z0-9][a-z0-9-]{0,61}[a-z0-9]|[a-z0-9]))$" # pylint: disable=line-too-long
if domain:
# A domain is given.
# We set the element to test as the parsed domain.
to_test = domain
elif self.element:
# A domain is globally given.
# We set the globally parsed domain.
to_test = self.element
else:
# A domain is not given.
# We set the element to test as the currently tested element.
to_test = PyFunceble.INTERN["to_test"]
try:
# We get the position of the last point.
last_point_index = to_test.rindex(".")
# And with the help of the position of the last point, we get the domain extension.
extension = to_test[last_point_index + 1 :]
if not extension and to_test.endswith("."):
try:
extension = [x for x in to_test.split(".") if x][-1]
except IndexError:
pass
if not extension or extension not in PyFunceble.INTERN["iana_db"]:
# * The extension is not found.
# or
# * The extension is not into the IANA database.
# We return false.
return False
if (
Regex(to_test, regex_valid_domains, return_data=False).match()
and not subdomain_check
):
# * The element pass the domain validation.
# and
# * We are not checking if it is a subdomain.
# We return True. The domain is valid.
return True
# The element did not pass the domain validation. That means that
# it has invalid character or the position of - or _ are not right.
if extension in PyFunceble.INTERN["psl_db"]:
# The extension is into the psl database.
for suffix in PyFunceble.INTERN["psl_db"][extension]:
# We loop through the element of the extension into the psl database.
try:
# We try to get the position of the currently read suffix
# in the element ot test.
suffix_index = to_test.rindex("." + suffix)
# We get the element to check.
# The idea here is to delete the suffix, then retest with our
# subdomains regex.
to_check = to_test[:suffix_index]
if "." not in to_check and subdomain_check:
# * There is no point into the new element to check.
# and
# * We are checking if it is a subdomain.
# We return False, it is not a subdomain.
return False
if "." in to_check and subdomain_check:
# * There is a point into the new element to check.
# and
# * We are checking if it is a subdomain.
# We return True, it is a subdomain.
return True
# We are not checking if it is a subdomain.
if "." in to_check:
# There is a point into the new element to check.
# We check if it passes our subdomain regex.
# * True: It's a valid domain.
# * False: It's an invalid domain.
return Regex(
to_check, regex_valid_subdomains, return_data=False
).match()
except ValueError:
# In case of a value error because the position is not found,
# we continue to the next element.
pass
# * The extension is not into the psl database.
# or
# * there was no point into the suffix checking.
# We get the element before the last point.
to_check = to_test[:last_point_index]
if "." in to_check and subdomain_check:
# * There is a point in to_check.
# and
# * We are checking if it is a subdomain.
# We return True, it is a subdomain.
return True
# We are not checking if it is a subdomain.
if "." in to_check:
# There is a point in to_check.
# We check if it passes our subdomain regex.
# * True: It's a valid domain.
# * False: It's an invalid domain.
return Regex(
to_check, regex_valid_subdomains, return_data=False
).match()
except (ValueError, AttributeError):
# In case of a value or attribute error we ignore them.
pass
# And we return False, the domain is not valid.
return False
|
Check if the given subdomain is a subdomain.
|
def is_subdomain(self, domain=None):
"""
Check if the given subdomain is a subdomain.
:param domain: The domain to validate.
:type domain: str
:return: The validity of the subdomain.
:rtype: bool
"""
if domain:
# A domain is given.
# We set the element to test as the parsed domain.
to_test = domain
elif self.element:
# A domain is globally given.
# We set the globally parsed domain.
to_test = self.element
else:
# A domain is not given.
# We set the element to test as the currently tested element.
to_test = PyFunceble.INTERN["to_test"]
# We return the status of the check.
return self.is_domain_valid(to_test, subdomain_check=True)
|
Check if the given IP is a valid IPv4.
|
def is_ip_valid(self, ip_to_check=None):
"""
Check if the given IP is a valid IPv4.
:param ip_to_check: The IP to test.
:type ip_to_check: str
:return: The validity of the IP.
:rtype: bool
.. note::
We only test IPv4 because for now we only them for now.
"""
# We initate our regex which will match for valid IPv4.
regex_ipv4 = r"^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[0-9]{1,}\/[0-9]{1,})$" # pylint: disable=line-too-long
if ip_to_check:
# An element is localy given.
# We consider it as the element to test.
to_test = ip_to_check
elif self.element:
# An element is given globally.
# We consider it as the element to test.
to_test = self.element
else:
# An element is not localy given.
# We consider the global element to test as the element to test.
to_test = PyFunceble.INTERN["to_test"]
# We check if it passes our IPv4 regex.
# * True: It's a valid IPv4.
# * False: It's an invalid IPv4.
return Regex(to_test, regex_ipv4, return_data=False).match()
|
Check if the given IP is a valid IPv4.
|
def is_ip_range(self, ip_to_check=None):
"""
Check if the given IP is a valid IPv4.
:param ip_to_check: The IP to test.
:type ip_to_check: str
:return: The validity of the IP.
:rtype: bool
.. note::
We only test IPv4 because for now we only them for now.
"""
if ip_to_check:
# An element is localy given.
# We consider it as the element to test.
to_test = ip_to_check
elif self.element:
# An element is given globally.
# We consider it as the element to test.
to_test = self.element
else:
# An element is not localy given.
# We consider the global element to test as the element to test.
to_test = PyFunceble.INTERN["to_test"]
if self.is_ip_valid(to_test):
# We initate our regex which will match for valid IPv4 ranges.
regex_ipv4_range = r"^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.([0-9]{1,}\/[0-9]{1,})$" # pylint: disable=line-too-long
# We check if it passes our regex.
# * True: It's an IPv4 range.
# * False: It's not an IPv4 range.
return Regex(to_test, regex_ipv4_range, return_data=False).match()
return False
|
Execute the logic behind the Syntax handling.
|
def get(cls):
"""
Execute the logic behind the Syntax handling.
:return: The syntax status.
:rtype: str
"""
if PyFunceble.INTERN["to_test_type"] == "domain":
# We are testing for domain or ip.
if Check().is_domain_valid() or Check().is_ip_valid():
# * The domain is valid.
# or
# * The IP is valid.
# We handle and return the valid status.
return SyntaxStatus(PyFunceble.STATUS["official"]["valid"]).handle()
elif PyFunceble.INTERN["to_test_type"] == "url":
# We are testing for URL.
if Check().is_url_valid():
# * The url is valid.
# We handle and return the valid status.
return SyntaxStatus(PyFunceble.STATUS["official"]["valid"]).handle()
else:
raise Exception("Unknow test type.")
# We handle and return the invalid status.
return SyntaxStatus(PyFunceble.STATUS["official"]["invalid"]).handle()
|
Format the old format so it can be merged into the newer format.
|
def _reformat_historical_formating_error(self): # pragma: no cover
"""
Format the old format so it can be merged into the newer format.
"""
if PyFunceble.CONFIGURATION["inactive_database"]:
# The database subsystem is activated.
# We construct the possible path to an older version of the database.
historical_formating_error = (
PyFunceble.CURRENT_DIRECTORY + "inactive-db.json"
)
if PyFunceble.path.isfile(historical_formating_error):
# The histortical file already exists.
# We get its content.
data = Dict().from_json(File(historical_formating_error).read())
# We initiate a variable which will save the data that is going
# to be merged.
data_to_parse = {}
# We get the database keybase.
top_keys = data.keys()
for top_key in top_keys:
# We loop through the list of upper keys.
# We get the lowest keys.
low_keys = data[top_key].keys()
# We initiate the data to parse.
data_to_parse[top_key] = {}
for low_key in low_keys:
# We loop through the list of lower keys.
if low_key.isdigit():
# The current low key is a digit.
# We parse its content (from the old) into the new format.
# In between, we remove 30 days from the low_key so that
# it become in the past. This way they will be retested
# automatically.
data_to_parse[top_key][
int(low_key) - (self.one_day_in_seconds * 30)
] = data[top_key][low_key]
else:
# The current low key is not a digit.
# We parse its content (from the old) into the new format.
# In between, we remove 30 days from the current time so that
# it become in the past. This way they will be retested
# automatically.
data_to_parse[top_key][
int(PyFunceble.time()) - (self.one_day_in_seconds * 30)
] = data[top_key][low_key]
if "inactive_db" in PyFunceble.INTERN:
# The current (new) database is not empty.
# We update add the content of the old into the current database.
PyFunceble.INTERN["inactive_db"].update(data_to_parse)
else:
# The current (new) database is empty.
# We replace the content with the data_to_parse as it is complient
# with the new format.
PyFunceble.INTERN["inactive_db"] = data_to_parse
# We delete the old database file.
File(historical_formating_error).delete()
|
Merge the real database with the older one which has already been set into: code: PyFunceble. INTERN [ inactive_db ]
|
def _merge(self):
"""
Merge the real database with the older one which
has already been set into :code:`PyFunceble.INTERN["inactive_db"]`
"""
if PyFunceble.CONFIGURATION["inactive_database"]:
# The database subsystem is activated.
# We get the content of the database.
database_content = Dict().from_json(File(self.inactive_db_path).read())
# We get the database top keys.
database_top_keys = database_content.keys()
for database_top_key in database_top_keys:
# We loop through the list of database top keys.
if database_top_key not in PyFunceble.INTERN["inactive_db"]:
# The currently read top key is not already into the database.
# We initiate the currently read key with the same key from
# our database file.
PyFunceble.INTERN["inactive_db"][
database_top_key
] = database_content[database_top_key]
else:
# The currently read top key is already into the database.
# We get the list of lower indexes.
database_low_keys = database_content[database_top_key].keys()
for database_low_key in database_low_keys:
# We loop through the lower keys.
if (
database_low_key
not in PyFunceble.INTERN["inactive_db"][database_top_key]
): # pragma: no cover
# The lower key is not already into the database.
# We initiate the currently read low and top key with the
# same combinaison from our database file.
PyFunceble.INTERN["inactive_db"][database_top_key][
database_low_key
] = database_content[database_top_key][database_low_key]
else:
# The lower key is not already into the database.
# We exted the currently read low and top key combinaison
# with the same combinaison from our database file.
PyFunceble.INTERN["inactive_db"][database_top_key][
database_low_key
].extend(
database_content[database_top_key][database_low_key]
)
# And we format the list of element to ensure that there is no
# duplicate into the database content.
PyFunceble.INTERN["inactive_db"][database_top_key][
database_low_key
] = List(
PyFunceble.INTERN["inactive_db"][database_top_key][
database_low_key
]
).format()
|
Return the current content of the inactive - db. json file.
|
def _retrieve(self):
"""
Return the current content of the inactive-db.json file.
"""
if PyFunceble.CONFIGURATION["inactive_database"]:
# The database subsystem is activated.
# We get, format and initiate the historical database file.
self._reformat_historical_formating_error()
if PyFunceble.path.isfile(self.inactive_db_path):
# The database file exist.
# We merge our current database into already initiated one.
self._merge()
|
Save the current database into the inactive - db. json file.
|
def _backup(self):
"""
Save the current database into the inactive-db.json file.
"""
if PyFunceble.CONFIGURATION["inactive_database"]:
# The database subsystem is activated.
# We save the current database state into the database file.
Dict(PyFunceble.INTERN["inactive_db"]).to_json(self.inactive_db_path)
|
Get the timestamp where we are going to save our current list.
|
def _timestamp(self):
"""
Get the timestamp where we are going to save our current list.
:return: The timestamp to append with the currently tested element.
:rtype: int|str
"""
if PyFunceble.CONFIGURATION["inactive_database"]:
# The database subsystem is activated.
if (
"inactive_db" in PyFunceble.INTERN
and PyFunceble.INTERN["file_to_test"]
in PyFunceble.INTERN["inactive_db"]
and PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]]
):
# The file we are testing is into the database and its content
# is not empty.
# We get the indexes of the current file (in the dabase).
database_keys = [
x
for x in PyFunceble.INTERN["inactive_db"][
PyFunceble.INTERN["file_to_test"]
].keys()
if x.isdigit()
]
if database_keys:
# The list of keys is not empty.
# We get the most recent date.
recent_date = max(database_keys)
else: # pragma: no cover
# The list of keys is empty.
# We return the current time.
return int(PyFunceble.time())
if int(PyFunceble.time()) > int(recent_date) + self.one_day_in_seconds:
# The most recent time was in more than one day.
# We return the current time.
return int(PyFunceble.time())
# The most recent time was in less than one day.
if int(PyFunceble.time()) < int(recent_date) + self.days_in_seconds:
# The most recent time was in less than the expected number of day for
# retesting.
# We return the most recent data.
return int(recent_date)
# The database subsystem is not activated.
# We return the current time.
return int(PyFunceble.time())
|
Save the current: code. PyFunceble. CONFIGURATION [ to_test ] into the current timestamp.
|
def add(self):
"""
Save the current :code.`PyFunceble.CONFIGURATION['to_test']`
into the current timestamp.
"""
if PyFunceble.CONFIGURATION["inactive_database"]:
# The database subsystem is activated.
# We get the timestamp to use as index.
timestamp = str(self._timestamp())
if (
"inactive_db" in PyFunceble.INTERN
and PyFunceble.INTERN["file_to_test"]
in PyFunceble.INTERN["inactive_db"]
):
# * The file path is not into the database.
if (
timestamp
in PyFunceble.INTERN["inactive_db"][
PyFunceble.INTERN["file_to_test"]
]
):
# The timetamp is already into the database related to the file we
# are testing.
if (
PyFunceble.INTERN["to_test"]
not in PyFunceble.INTERN["inactive_db"][
PyFunceble.INTERN["file_to_test"]
][timestamp]
):
# The currently tested element is not into the database related
# to the file we are testing.
# We append the currently tested element into the database.
PyFunceble.INTERN["inactive_db"][
PyFunceble.INTERN["file_to_test"]
][timestamp].append(PyFunceble.INTERN["to_test"])
else:
# The timetamp is not into the database related to the file we
# are testing.
# We append the index and the database element into the databse
# related to the file we are testing.
PyFunceble.INTERN["inactive_db"][
PyFunceble.INTERN["file_to_test"]
].update({timestamp: [PyFunceble.INTERN["to_test"]]})
if (
"to_test"
in PyFunceble.INTERN["inactive_db"][
PyFunceble.INTERN["file_to_test"]
]
and PyFunceble.INTERN["to_test"]
in PyFunceble.INTERN["inactive_db"][
PyFunceble.INTERN["file_to_test"]
]["to_test"]
):
# * The `to_test` index is into the database related to the file we
# are testing.
# and
# * The element we are testing is into the `to_test` index related to
# the file we are testing.
# We remove the element from the list of element to test.
PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]][
"to_test"
].remove(PyFunceble.INTERN["to_test"])
else:
# The file path is not into the database.
# We initiate the file path and its content into the database.
PyFunceble.INTERN["inactive_db"] = {
PyFunceble.INTERN["file_to_test"]: {
timestamp: [PyFunceble.INTERN["to_test"]]
}
}
# And we save the data into the database.
self._backup()
|
Remove all occurence of: code: PyFunceble. CONFIGURATION [ to_test ] from the database.
|
def remove(self):
"""
Remove all occurence of :code:`PyFunceble.CONFIGURATION['to_test']`
from the database.
"""
if PyFunceble.CONFIGURATION["inactive_database"]:
# The database subsystem is activated.
if PyFunceble.INTERN["file_to_test"] in PyFunceble.INTERN["inactive_db"]:
# The file path is into the database.
for data in PyFunceble.INTERN["inactive_db"][
PyFunceble.INTERN["file_to_test"]
]:
# We loop through the index of the file database.
if (
PyFunceble.INTERN["to_test"]
in PyFunceble.INTERN["inactive_db"][
PyFunceble.INTERN["file_to_test"]
][data]
):
# The currently tested element into the currently read index.
# We remove the currently tested element from the read index.
PyFunceble.INTERN["inactive_db"][
PyFunceble.INTERN["file_to_test"]
][data].remove(PyFunceble.INTERN["to_test"])
# And we save the data into the database.
self._backup()
|
Get the content of the database.
|
def content(cls):
"""
Get the content of the database.
:return: The content of the database.
:rtype: list
"""
# We initiate a variable which will save what we are going to return.
result = []
if (
PyFunceble.CONFIGURATION["inactive_database"]
and PyFunceble.INTERN["inactive_db"]
):
# * The database subsystem is activated.
# and
# * The database is not empty.
for key in PyFunceble.INTERN["inactive_db"][
PyFunceble.INTERN["file_to_test"]
]:
# We loop through the index of the current file database.
if key == "to_test":
# The current key is `to_test`.
# We continue to the next element.
continue
# We extend the result with the content of the currently read index.
result.extend(
PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]][
key
]
)
# We return the content of the database.
return result
|
Check if the currently tested element is into the database.
|
def is_present(cls):
"""
Check if the currently tested element is into the database.
"""
if PyFunceble.CONFIGURATION["inactive_database"]:
# The database subsystem is activated.
if PyFunceble.INTERN["to_test"] in PyFunceble.INTERN[
"flatten_inactive_db"
] or (
PyFunceble.INTERN["file_to_test"] in PyFunceble.INTERN["inactive_db"]
and PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]]
and "to_test"
in PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]]
and PyFunceble.INTERN["to_test"]
in PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]][
"to_test"
]
):
return True
return False
|
Retrieve the data from the database.
|
def _retrieve(self):
"""
Retrieve the data from the database.
"""
if self._authorization() and "whois_db" not in PyFunceble.INTERN:
# The usage of the whois database is activated.
if PyFunceble.path.isfile(self.whois_db_path):
# The database file exist.
# We merge our current database into already initiated one.
PyFunceble.INTERN["whois_db"] = Dict().from_json(
File(self.whois_db_path).read()
)
else:
# The database file does not exist.
# We initiate an empty database.
PyFunceble.INTERN["whois_db"] = {}
|
Backup the database into its file.
|
def _backup(self):
"""
Backup the database into its file.
"""
if self._authorization():
# We are authorized to work.
# We backup the current state of the datbase.
Dict(PyFunceble.INTERN["whois_db"]).to_json(self.whois_db_path)
|
Check if the element is into the database.
|
def is_in_database(self):
"""
Check if the element is into the database.
"""
if (
self._authorization()
and PyFunceble.INTERN["file_to_test"] in PyFunceble.INTERN["whois_db"]
and PyFunceble.INTERN["to_test"]
in PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]]
):
# * We are authorized to work.
# and
# * The given file path exist in the database.
# and
# * The element we are testing is in the database related to the
# given file path.
# We return True, the element we are testing is into the database.
return True
# * We are not authorized to work.
# or
# * The given file path does not exist in the database.
# or
# * The element we are testing is not in the database related to the
# given file path.
# We return False,the element we are testing is not into the database.
return False
|
Check if the current time is older than the one in the database.
|
def is_time_older(self):
"""
Check if the current time is older than the one in the database.
"""
if (
self._authorization()
and self.is_in_database()
and int(
PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]][
PyFunceble.INTERN["to_test"]
]["epoch"]
)
< int(PyFunceble.time())
):
# * We are authorized to work.
# and
# * The element we are testing is in the database.
# and
# * The epoch of the expiration date is less than our current epoch.
# The expiration date is in the past, we return True.
return True
# The expiration date is in the future, we return False.
return False
|
Get the expiration date from the database.
|
def get_expiration_date(self):
"""
Get the expiration date from the database.
:return: The expiration date from the database.
:rtype: str|None
"""
if self._authorization() and self.is_in_database() and not self.is_time_older():
# * We are authorized to work.
# and
# * The element we are testing is in the database.
# and
# * The expiration date is in the future.
# We get the expiration date from the database.
result = PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]][
PyFunceble.INTERN["to_test"]
]["expiration_date"]
if result:
# The expiration date from the database is not empty nor
# equal to None.
# We return it.
return result
# We return None, there is no data to work with.
return None
|
Add the currently tested element into the database.
|
def add(self):
"""
Add the currently tested element into the database.
"""
if self._authorization():
# We are authorized to work.
if self.epoch < int(PyFunceble.time()):
state = "past"
else:
state = "future"
if self.is_in_database():
# The element we are working with is in the database.
if (
str(self.epoch)
!= PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]][
PyFunceble.INTERN["to_test"]
]["epoch"]
):
# The given epoch is diffent from the one saved.
# We update it.
PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]][
PyFunceble.INTERN["to_test"]
].update(
{
"epoch": str(self.epoch),
"state": state,
"expiration_date": self.expiration_date,
}
)
elif self.is_time_older():
# The expiration date from the database is in the past.
if (
PyFunceble.INTERN["whois_db"][
PyFunceble.INTERN["file_to_test"]
][PyFunceble.INTERN["to_test"]]["state"]
!= "past"
): # pragma: no cover
# The state of the element in the datbase is not
# equal to `past`.
# We update it to `past`.
PyFunceble.INTERN["whois_db"][
PyFunceble.INTERN["file_to_test"]
][PyFunceble.INTERN["to_test"]].update({"state": "past"})
elif (
PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]][
PyFunceble.INTERN["to_test"]
]["state"]
!= "future"
):
# * The expiration date from the database is in the future.
# and
# * The state of the element in the database is not
# equal to `future`.
# We update it to `future`.
PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]][
PyFunceble.INTERN["to_test"]
].update({"state": "future"})
else:
# The element we are working with is not in the database.
if (
not PyFunceble.INTERN["file_to_test"]
in PyFunceble.INTERN["whois_db"]
):
# The file path is not in the database.
# We initiate it.
PyFunceble.INTERN["whois_db"][
PyFunceble.INTERN["file_to_test"]
] = {}
# We create the first dataset.
PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]].update(
{
PyFunceble.INTERN["to_test"]: {
"epoch": str(self.epoch),
"state": state,
"expiration_date": self.expiration_date,
}
}
)
# We do a safety backup of our database.
self._backup()
|
Set permissions in order to avoid issues before commiting.
|
def travis_permissions(cls):
"""
Set permissions in order to avoid issues before commiting.
"""
if PyFunceble.CONFIGURATION["travis"]:
try:
build_dir = PyFunceble.environ["TRAVIS_BUILD_DIR"]
commands = [
"sudo chown -R travis:travis %s" % (build_dir),
"sudo chgrp -R travis %s" % (build_dir),
"sudo chmod -R g+rwX %s" % (build_dir),
"sudo chmod 777 -Rf %s.git"
% (build_dir + PyFunceble.directory_separator),
r"sudo find %s -type d -exec chmod g+x '{}' \;" % (build_dir),
]
for command in commands:
Command(command).execute()
if Command("git config core.sharedRepository").execute() == "":
Command("git config core.sharedRepository group").execute()
except KeyError:
pass
|
Logic behind autosave under Travis CI.
|
def _travis(self):
"""
Logic behind autosave under Travis CI.
"""
if PyFunceble.CONFIGURATION["travis"]:
try:
_ = PyFunceble.environ["TRAVIS_BUILD_DIR"]
time_autorisation = False
try:
time_autorisation = int(PyFunceble.time()) >= int(
PyFunceble.INTERN["start"]
) + (int(PyFunceble.CONFIGURATION["travis_autosave_minutes"]) * 60)
except KeyError:
if self.last and not self.bypass:
raise Exception(
"Please review the way `ExecutionTime()` is called."
)
if self.last or time_autorisation or self.bypass:
Percentage().log()
self.travis_permissions()
command = 'git add --all && git commit -a -m "%s"'
if self.last or self.bypass:
if PyFunceble.CONFIGURATION["command_before_end"]:
for line in Command(
PyFunceble.CONFIGURATION["command_before_end"]
).run():
sys_stdout.write("{}\n".format(line))
self.travis_permissions()
message = (
PyFunceble.CONFIGURATION["travis_autosave_final_commit"]
+ " [ci skip]"
)
Command(command % message).execute()
else:
if PyFunceble.CONFIGURATION["command"]:
for line in Command(
PyFunceble.CONFIGURATION["command"]
).run():
sys_stdout.write("{}\n".format(line))
self.travis_permissions()
Command(
command % PyFunceble.CONFIGURATION["travis_autosave_commit"]
).execute()
print(
Command(
"git push origin %s"
% PyFunceble.CONFIGURATION["travis_branch"]
).execute()
)
exit(0)
except KeyError:
pass
|
Implementation of UNIX nslookup.
|
def nslookup(cls):
"""
Implementation of UNIX nslookup.
"""
try:
# We try to get the addresse information of the given domain or IP.
if "current_test_data" in PyFunceble.INTERN: # pragma: no cover
# The end-user want more information whith his test.
if not Check().is_ip_valid():
# The element we are testing is not an IP.
# We request the address informations.
request = PyFunceble.socket.getaddrinfo(
PyFunceble.INTERN["to_test"],
80,
0,
0,
PyFunceble.socket.IPPROTO_TCP,
)
for sequence in request:
# We loop through the sequence returned by the request.
# We append the NS informations into the nslookup index.
PyFunceble.INTERN["current_test_data"]["nslookup"].append(
sequence[-1][0]
)
else:
# The element we are testing is an IP.
request = PyFunceble.socket.gethostbyaddr(
PyFunceble.INTERN["to_test"]
)
# We append the NS informations into the nslookup index.
PyFunceble.INTERN["current_test_data"]["nslookup"][
"hostname"
] = request[0]
PyFunceble.INTERN["current_test_data"]["nslookup"][
"aliases"
] = request[1]
PyFunceble.INTERN["current_test_data"]["nslookup"]["ips"] = request[
2
]
else:
if not Check().is_ip_valid():
# The element we are testing is not an IP.
PyFunceble.socket.getaddrinfo(
PyFunceble.INTERN["to_test"],
80,
0,
0,
PyFunceble.socket.IPPROTO_TCP,
)
else:
# The element we are testing is an IP.
PyFunceble.socket.gethostbyaddr(PyFunceble.INTERN["to_test"])
# It was done successfuly, we return True.
# Note: we don't need to read the addresses so we consider as successful
# as long as there is no error.
return True
except (OSError, PyFunceble.socket.herror, PyFunceble.socket.gaierror):
# One of the listed exception is matched.
# It was done unsuccesfuly, we return False.
return False
|
Implementation of UNIX whois.
|
def whois(cls, whois_server, domain=None, timeout=None): # pragma: no cover
"""
Implementation of UNIX whois.
:param whois_server: The WHOIS server to use to get the record.
:type whois_server: str
:param domain: The domain to get the whois record from.
:type domain: str
:param timeout: The timeout to apply to the request.
:type timeout: int
:return: The whois record from the given whois server, if exist.
:rtype: str|None
"""
if domain is None:
# The domain is not given (localy).
# We consider the domain as the domain or IP we are currently testing.
domain = PyFunceble.INTERN["to_test"]
if timeout is None:
# The time is not given (localy).
# We consider the timeout from the configuration as the timeout to use.
timeout = PyFunceble.CONFIGURATION["seconds_before_http_timeout"]
if whois_server:
# A whois server is given.
# We initiate a PyFunceble.socket.
req = PyFunceble.socket.socket(
PyFunceble.socket.AF_INET, PyFunceble.socket.SOCK_STREAM
)
if timeout % 3 == 0:
# The timeout is modulo 3.
# We report the timeout to our initiated PyFunceble.socket.
req.settimeout(timeout)
else:
# The timeout is not modulo 3.
# We report 3 seconds as the timeout to our initiated PyFunceble.socket.
req.settimeout(3)
try:
# We try to connect to the whois server at the port 43.
req.connect((whois_server, 43))
except PyFunceble.socket.error:
# We got an error.
# We return None.
return None
# We send end encode the domain we want the data from.
req.send((domain + "\r\n").encode())
# We initiate a bytes variable which will save the response
# from the server.
response = b""
while True:
# We loop infinitly.
try:
# We try to receive the data in a buffer of 4096 bytes.
data = req.recv(4096)
except (PyFunceble.socket.timeout, ConnectionResetError):
# We got an error.
# We close the connection.
req.close()
# And we return None.
return None
# Everything goes right.
# We append data to the response we got.
response += data
if not data:
# The data is empty.
# We break the loop.
break
# We close the connection.
req.close()
try:
# We finally decode and return the response we got from the
# server.
return response.decode()
except UnicodeDecodeError:
# We got an encoding error.
# We decode the response.
# Note: Because we don't want to deal with other issue, we
# decided to use `replace` in order to automatically replace
# all non utf-8 encoded characters.
return response.decode("utf-8", "replace")
# The whois server is not given.
# We return None.
return None
|
Execute the logic behind the URL handling.
|
def get(cls): # pragma: no cover
"""
Execute the logic behind the URL handling.
:return: The status of the URL.
:rtype: str
"""
if Check().is_url_valid() or PyFunceble.CONFIGURATION["local"]:
# * The url is valid.
# or
# * We are testing in/for a local or private network.
if "current_test_data" in PyFunceble.INTERN:
PyFunceble.INTERN["current_test_data"]["url_syntax_validation"] = True
# We initiate the HTTP status code.
PyFunceble.INTERN.update({"http_code": HTTPCode().get()})
# We initiate the list of active status code.
active_list = []
active_list.extend(PyFunceble.HTTP_CODE["list"]["potentially_up"])
active_list.extend(PyFunceble.HTTP_CODE["list"]["up"])
# We initiate the list of inactive status code.
inactive_list = []
inactive_list.extend(PyFunceble.HTTP_CODE["list"]["potentially_down"])
inactive_list.append("*" * 3)
if PyFunceble.INTERN["http_code"] in active_list:
# The extracted HTTP status code is in the list of active list.
# We handle and return the up status.
return URLStatus(PyFunceble.STATUS["official"]["up"]).handle()
if PyFunceble.INTERN["http_code"] in inactive_list:
# The extracted HTTP status code is in the list of inactive list.
# We handle and return the down status.
return URLStatus(PyFunceble.STATUS["official"]["down"]).handle()
# The extracted HTTP status code is not in the list of active nor invalid list.
if "current_test_data" in PyFunceble.INTERN:
# The end-user want more information whith his test.
# We update the url_syntax_validation index.
PyFunceble.INTERN["current_test_data"]["url_syntax_validation"] = False
# We handle and return the invalid down status.
return URLStatus(PyFunceble.STATUS["official"]["invalid"]).handle()
|
This function will extract the version from PyFunceble/ __init__. py
|
def _get_version():
"""
This function will extract the version from PyFunceble/__init__.py
"""
to_match = comp(r'VERSION\s=\s"(.*)"\n')
extracted = to_match.findall(
open("PyFunceble/__init__.py", encoding="utf-8").read()
)[0]
return ".".join([x for x in extracted.split(".") if x.isdigit()])
|
Return the referer aka the WHOIS server of the current domain extension.
|
def get(self):
"""
Return the referer aka the WHOIS server of the current domain extension.
"""
if not PyFunceble.CONFIGURATION["local"]:
# We are not running a test in a local network.
if self.domain_extension not in self.ignored_extension:
# The extension of the domain we are testing is not into
# the list of ignored extensions.
# We set the referer to None as we do not have any.
referer = None
if self.domain_extension in PyFunceble.INTERN["iana_db"]:
# The domain extension is in the iana database.
if not PyFunceble.CONFIGURATION["no_whois"]:
# We are authorized to use WHOIS for the test result.
# We get the referer from the database.
referer = PyFunceble.INTERN["iana_db"][self.domain_extension]
if not referer:
# The referer is not filled.
# We log the case of the current extension.
Logs().referer_not_found(self.domain_extension)
# And we handle and return None status.
return None
# The referer is into the database.
# We return the extracted referer.
return referer
# We are not authorized to use WHOIS for the test result.
# We return None.
return None
# The domain extension is not in the iana database.
# We return False, it is an invalid domain.
return False
# The extension of the domain we are testing is not into
# the list of ignored extensions.
# We return None, the domain does not have a whois server.
return None
# We are running a test in a local network.
# We return None.
return None
|
register serviser
|
def _register_servicer(self, servicer):
"""register serviser
:param servicer: servicer
"""
name = servicer.__name__
if name in self._servicers:
raise exceptions.ConfigException(
'servicer duplicated: {}'.format(name))
add_func = self._get_servicer_add_func(servicer)
self._servicers[name] = (add_func, servicer)
|
register extension
|
def _register_extension(self, name, ext):
"""register extension
:param name: extension name
:param ext: extension object
"""
ext.init_app(self)
if name in self._extensions:
raise exceptions.ConfigException(
'extension duplicated: {}'.format(name))
self._extensions[name] = ext
|
Get current object. This is useful if you want the real object behind the proxy at a time for performance reasons or because you want to pass the object into a different context.
|
def _get_current_object(self):
"""Get current object.
This is useful if you want the real
object behind the proxy at a time for performance reasons or because
you want to pass the object into a different context.
"""
loc = object.__getattribute__(self, '_Proxy__local')
if not hasattr(loc, '__release_local__'):
return loc(*self.__args, **self.__kwargs)
try: # pragma: no cover
# not sure what this is about
return getattr(loc, self.__name__)
except AttributeError: # pragma: no cover
raise RuntimeError('no object bound to {0.__name__}'.format(self))
|
Yield paths to standard modules.
|
def standard_paths():
"""Yield paths to standard modules."""
for is_plat_spec in [True, False]:
path = distutils.sysconfig.get_python_lib(standard_lib=True,
plat_specific=is_plat_spec)
for name in os.listdir(path):
yield name
try:
for name in os.listdir(os.path.join(path, 'lib-dynload')):
yield name
except OSError: # pragma: no cover
pass
|
Yield standard module names.
|
def standard_package_names():
"""Yield standard module names."""
for name in standard_paths():
if name.startswith('_') or '-' in name:
continue
if '.' in name and name.rsplit('.')[-1] not in ['so', 'py', 'pyc']:
continue
yield name.split('.')[0]
|
Yield line numbers of unused imports.
|
def unused_import_line_numbers(messages):
"""Yield line numbers of unused imports."""
for message in messages:
if isinstance(message, pyflakes.messages.UnusedImport):
yield message.lineno
|
Yield line number and module name of unused imports.
|
def unused_import_module_name(messages):
"""Yield line number and module name of unused imports."""
pattern = r'\'(.+?)\''
for message in messages:
if isinstance(message, pyflakes.messages.UnusedImport):
module_name = re.search(pattern, str(message))
module_name = module_name.group()[1:-1]
if module_name:
yield (message.lineno, module_name)
|
Yield line number of star import usage.
|
def star_import_used_line_numbers(messages):
"""Yield line number of star import usage."""
for message in messages:
if isinstance(message, pyflakes.messages.ImportStarUsed):
yield message.lineno
|
Yield line number undefined name and its possible origin module.
|
def star_import_usage_undefined_name(messages):
"""Yield line number, undefined name, and its possible origin module."""
for message in messages:
if isinstance(message, pyflakes.messages.ImportStarUsage):
undefined_name = message.message_args[0]
module_name = message.message_args[1]
yield (message.lineno, undefined_name, module_name)
|
Yield line numbers of unused variables.
|
def unused_variable_line_numbers(messages):
"""Yield line numbers of unused variables."""
for message in messages:
if isinstance(message, pyflakes.messages.UnusedVariable):
yield message.lineno
|
Yield line numbers of duplicate keys.
|
def duplicate_key_line_numbers(messages, source):
"""Yield line numbers of duplicate keys."""
messages = [
message for message in messages
if isinstance(message, pyflakes.messages.MultiValueRepeatedKeyLiteral)]
if messages:
# Filter out complex cases. We don't want to bother trying to parse
# this stuff and get it right. We can do it on a key-by-key basis.
key_to_messages = create_key_to_messages_dict(messages)
lines = source.split('\n')
for (key, messages) in key_to_messages.items():
good = True
for message in messages:
line = lines[message.lineno - 1]
key = message.message_args[0]
if not dict_entry_has_key(line, key):
good = False
if good:
for message in messages:
yield message.lineno
|
Return dict mapping the key to list of messages.
|
def create_key_to_messages_dict(messages):
"""Return dict mapping the key to list of messages."""
dictionary = collections.defaultdict(lambda: [])
for message in messages:
dictionary[message.message_args[0]].append(message)
return dictionary
|
Return messages from pyflakes.
|
def check(source):
"""Return messages from pyflakes."""
if sys.version_info[0] == 2 and isinstance(source, unicode):
# Convert back to original byte string encoding, otherwise pyflakes
# call to compile() will complain. See PEP 263. This only affects
# Python 2.
try:
source = source.encode('utf-8')
except UnicodeError: # pragma: no cover
return []
reporter = ListReporter()
try:
pyflakes.api.check(source, filename='<string>', reporter=reporter)
except (AttributeError, RecursionError, UnicodeDecodeError):
pass
return reporter.messages
|
Return package name in import statement.
|
def extract_package_name(line):
"""Return package name in import statement."""
assert '\\' not in line
assert '(' not in line
assert ')' not in line
assert ';' not in line
if line.lstrip().startswith(('import', 'from')):
word = line.split()[1]
else:
# Ignore doctests.
return None
package = word.split('.')[0]
assert ' ' not in package
return package
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.