INSTRUCTION
stringlengths
1
8.43k
RESPONSE
stringlengths
75
104k
Creates a new price record
def add_price(self, price: PriceModel): """ Creates a new price record """ # assert isinstance(price, PriceModel) if not price: raise ValueError("Cannot add price. The received model is null!") mapper = mappers.PriceMapper() entity = mapper.map_model(price) self.add_price_entity(entity)
Adds the price
def add_price_entity(self, price: dal.Price): """ Adds the price """ from decimal import Decimal # check if the price already exists in db. repo = self.get_price_repository() existing = ( repo.query .filter(dal.Price.namespace == price.namespace) .filter(dal.Price.symbol == price.symbol) .filter(dal.Price.date == price.date) .filter(dal.Price.time == price.time) .first() ) if existing: # Update existing price. new_value = Decimal(price.value) / Decimal(price.denom) self.logger.info(f"Exists: {price}") if price.currency != existing.currency: raise ValueError( f"The currency is different for price {price}!") if existing.value != price.value: existing.value = price.value self.logger.info(f"Updating to {new_value}.") if existing.denom != price.denom: existing.denom = price.denom else: # Insert new price self.session.add(price) self.logger.info(f"Added {price}")
Download and save price online
def download_price(self, symbol: str, currency: str, agent: str) -> PriceModel: """ Download and save price online """ price = self.__download_price(symbol, currency, agent) self.save() return price
Downloads all the prices that are listed in the Security table. Accepts filter arguments: currency agent symbol namespace.
def download_prices(self, **kwargs): """ Downloads all the prices that are listed in the Security table. Accepts filter arguments: currency, agent, symbol, namespace. """ currency: str = kwargs.get('currency', None) if currency: currency = currency.upper() agent: str = kwargs.get('agent', None) if agent: agent = agent.upper() symbol: str = kwargs.get('symbol', None) if symbol: symbol = symbol.upper() namespace: str = kwargs.get('namespace', None) if namespace: namespace = namespace.upper() securities = self.__get_securities(currency, agent, symbol, namespace) #self.logger.debug(securities) for sec in securities: symbol = f"{sec.namespace}:{sec.symbol}" currency = sec.currency agent = sec.updater #self.logger.debug(f"Initiating download for {symbol} {currency} with {agent}...") try: self.__download_price(symbol.strip(), currency, agent) except Exception as e: self.logger.error(str(e)) self.save()
Incomplete
def import_prices(self, file_path: str, currency_symbol: str): """ Incomplete """ from .csv import CsvParser assert isinstance(file_path, str) assert isinstance(currency_symbol, str) self.logger.debug(f"Importing {file_path}") parser = CsvParser() prices = parser.parse_file(file_path, currency_symbol) counter = 0 session = self.session # Create insert statements mapper = mappers.PriceMapper() for price in prices: new_price = mapper.map_model(price) self.add_price_entity(new_price) counter += 1 # Save all to database session.commit() print(f"{counter} records inserted.")
Returns the current db session
def session(self): """ Returns the current db session """ if not self.__session: self.__session = dal.get_default_session() return self.__session
Fetches all the prices for the given arguments
def get_prices(self, date: str, currency: str) -> List[PriceModel]: """ Fetches all the prices for the given arguments """ from .repositories import PriceRepository session = self.session repo = PriceRepository(session) query = repo.query if date: query = query.filter(dal.Price.date == date) if currency: query = query.filter(dal.Price.currency == currency) # Sort by symbol. query = query.order_by(dal.Price.namespace, dal.Price.symbol) price_entities = query.all() mapper = mappers.PriceMapper() result = [] for entity in price_entities: model = mapper.map_entity(entity) result.append(model) return result
Returns the latest price on the date
def get_prices_on(self, on_date: str, namespace: str, symbol: str): """ Returns the latest price on the date """ repo = self.get_price_repository() query = ( repo.query.filter(dal.Price.namespace == namespace) .filter(dal.Price.symbol == symbol) .filter(dal.Price.date == on_date) .order_by(dal.Price.time.desc()) ) result = query.first() # logging.debug(result) return result
Price repository
def get_price_repository(self): """ Price repository """ from .repositories import PriceRepository if not self.price_repo: self.price_repo = PriceRepository(self.session) return self.price_repo
Security repository
def get_security_repository(self): """ Security repository """ from .repositories import SecurityRepository if not self.security_repo: self.security_repo = SecurityRepository(self.session) return self.security_repo
Prune historical prices for all symbols leaving only the latest. Returns the number of items removed.
def prune_all(self) -> int: """ Prune historical prices for all symbols, leaving only the latest. Returns the number of items removed. """ from .repositories import PriceRepository # get all symbols that have prices repo = PriceRepository() items = repo.query.distinct(dal.Price.namespace, dal.Price.symbol).all() # self.logger.debug(items) count = 0 for item in items: symbol = SecuritySymbol(item.namespace, item.symbol) deleted = self.prune(symbol) if deleted: count += 1 return count
Delete all but the latest available price for the given symbol. Returns the number of items removed.
def prune(self, symbol: SecuritySymbol): """ Delete all but the latest available price for the given symbol. Returns the number of items removed. """ from .repositories import PriceRepository assert isinstance(symbol, SecuritySymbol) self.logger.debug(f"pruning prices for {symbol}") repo = PriceRepository() query = ( repo.query.filter(dal.Price.namespace == symbol.namespace) .filter(dal.Price.symbol == symbol.mnemonic) .order_by(dal.Price.date.desc()) .order_by(dal.Price.time.desc()) ) all_prices = query.all() # self.logger.debug(f"fetched {all_prices}") deleted = False first = True for single in all_prices: if not first: repo.query.filter(dal.Price.id == single.id).delete() deleted = True self.logger.debug(f"deleting {single.id}") else: first = False repo.save() return deleted
Save changes
def save(self): """ Save changes """ if self.__session: self.session.commit() else: self.logger.warning("Save called but no session open.")
Downloads and parses the price
def __download_price(self, symbol: str, currency: str, agent: str): """ Downloads and parses the price """ from finance_quote_python import Quote assert isinstance(symbol, str) assert isinstance(currency, str) assert isinstance(agent, str) if not symbol: return None #self.logger.info(f"Downloading {symbol}... ") dl = Quote() dl.logger = self.logger dl.set_source(agent) dl.set_currency(currency) result = dl.fetch(agent, [symbol]) if not result: raise ValueError(f"Did not receive a response for {symbol}.") price = result[0] if not price: raise ValueError(f"Price not downloaded/parsed for {symbol}.") else: # Create price data entity, to be inserted. self.add_price(price) return price
Fetches the securities that match the given filters
def __get_securities(self, currency: str, agent: str, symbol: str, namespace: str) -> List[dal.Security]: """ Fetches the securities that match the given filters """ repo = self.get_security_repository() query = repo.query if currency is not None: query = query.filter(dal.Security.currency == currency) if agent is not None: query = query.filter(dal.Security.updater == agent) if symbol is not None: query = query.filter(dal.Security.symbol == symbol) if namespace is not None: query = query.filter(dal.Security.namespace == namespace) # Sorting query = query.order_by(dal.Security.namespace, dal.Security.symbol) securities = query.all() return securities
Return partial of original function call
def partial(self): """Return partial of original function call""" ba = self.data["bound_args"] return state_partial(self.data["func"], *ba.args[1:], **ba.kwargs)
Replace child nodes on original function call with their partials
def update_child_calls(self): """Replace child nodes on original function call with their partials""" for node in filter(lambda n: len(n.arg_name), self.child_list): self.data["bound_args"].arguments[node.arg_name] = node.partial() self.updated = True
Descend depth first into all child nodes
def descend(self, include_me=True): """Descend depth first into all child nodes""" if include_me: yield self for child in self.child_list: yield child yield from child.descend()
Decorator for multi to remove nodes for original test functions from root node
def multi_dec(f): """Decorator for multi to remove nodes for original test functions from root node""" @wraps(f) def wrapper(*args, **kwargs): args = ( args[0] if len(args) == 1 and isinstance(args[0], (list, tuple)) else args ) for arg in args: if isinstance(arg, Node) and arg.parent.name is "root": arg.parent.remove_child(arg) arg.update_child_calls() return f(*args, **kwargs) return wrapper
Verify that a part that is zoomed in on has equal length.
def has_equal_part_len(state, name, unequal_msg): """Verify that a part that is zoomed in on has equal length. Typically used in the context of ``check_function_def()`` Arguments: name (str): name of the part for which to check the length to the corresponding part in the solution. unequal_msg (str): Message in case the lengths do not match. state (State): state as passed by the SCT chain. Don't specify this explicitly. :Examples: Student and solution code:: def shout(word): return word + '!!!' SCT that checks number of arguments:: Ex().check_function_def('shout').has_equal_part_len('args', 'not enough args!') """ d = dict( stu_len=len(state.student_parts[name]), sol_len=len(state.solution_parts[name]) ) if d["stu_len"] != d["sol_len"]: _msg = state.build_message(unequal_msg, d) state.report(Feedback(_msg, state)) return state
Test whether abstract syntax trees match between the student and solution code.
def has_equal_ast(state, incorrect_msg=None, code=None, exact=True, append=None): """Test whether abstract syntax trees match between the student and solution code. ``has_equal_ast()`` can be used in two ways: * As a robust version of ``has_code()``. By setting ``code``, you can look for the AST representation of ``code`` in the student's submission. But be aware that ``a`` and ``a = 1`` won't match, as reading and assigning are not the same in an AST. Use ``ast.dump(ast.parse(code))`` to see an AST representation of ``code``. * As an expression-based check when using more advanced SCT chain, e.g. to compare the equality of expressions to set function arguments. Args: incorrect_msg: message displayed when ASTs mismatch. When you specify ``code`` yourself, you have to specify this. code: optional code to use instead of the solution AST. exact: whether the representations must match exactly. If false, the solution AST only needs to be contained within the student AST (similar to using test student typed). Defaults to ``True``, unless the ``code`` argument has been specified. :Example: Student and Solution Code:: dict(a = 'value').keys() SCT:: # all pass Ex().has_equal_ast() Ex().has_equal_ast(code = "dict(a = 'value').keys()") Ex().has_equal_ast(code = "dict(a = 'value')", exact = False) Student and Solution Code:: import numpy as np arr = np.array([1, 2, 3, 4, 5]) np.mean(arr) SCT:: # Check underlying value of arugment a of np.mean: Ex().check_function('numpy.mean').check_args('a').has_equal_ast() # Only check AST equality of expression used to specify argument a: Ex().check_function('numpy.mean').check_args('a').has_equal_ast() """ if utils.v2_only(): state.assert_is_not(["object_assignments"], "has_equal_ast", ["check_object"]) state.assert_is_not(["function_calls"], "has_equal_ast", ["check_function"]) if code and incorrect_msg is None: raise InstructorError( "If you manually specify the code to match inside has_equal_ast(), " "you have to explicitly set the `incorrect_msg` argument." ) if ( append is None ): # if not specified, set to False if incorrect_msg was manually specified append = incorrect_msg is None if incorrect_msg is None: incorrect_msg = "Expected `{{sol_str}}`, but got `{{stu_str}}`." def parse_tree(tree): # get contents of module.body if only 1 element crnt = ( tree.body[0] if isinstance(tree, ast.Module) and len(tree.body) == 1 else tree ) # remove Expr if it exists return ast.dump(crnt.value if isinstance(crnt, ast.Expr) else crnt) stu_rep = parse_tree(state.student_ast) sol_rep = parse_tree(state.solution_ast if not code else ast.parse(code)) fmt_kwargs = { "sol_str": state.solution_code if not code else code, "stu_str": state.student_code, } _msg = state.build_message(incorrect_msg, fmt_kwargs, append=append) if exact and not code: state.do_test(EqualTest(stu_rep, sol_rep, Feedback(_msg, state))) elif not sol_rep in stu_rep: state.report(Feedback(_msg, state)) return state
Test the student code.
def has_code(state, text, pattern=True, not_typed_msg=None): """Test the student code. Tests if the student typed a (pattern of) text. It is advised to use ``has_equal_ast()`` instead of ``has_code()``, as it is more robust to small syntactical differences that don't change the code's behavior. Args: text (str): the text that is searched for pattern (bool): if True (the default), the text is treated as a pattern. If False, it is treated as plain text. not_typed_msg (str): feedback message to be displayed if the student did not type the text. :Example: Student code and solution code:: y = 1 + 2 + 3 SCT:: # Verify that student code contains pattern (not robust!!): Ex().has_code(r"1\\s*\\+2\\s*\\+3") """ if not not_typed_msg: if pattern: not_typed_msg = "Could not find the correct pattern in your code." else: not_typed_msg = "Could not find the following text in your code: %r" % text student_code = state.student_code _msg = state.build_message(not_typed_msg) state.do_test( StringContainsTest(student_code, text, pattern, Feedback(_msg, state)) ) return state
Checks whether student imported a package or function correctly.
def has_import( state, name, same_as=False, not_imported_msg="Did you import `{{pkg}}`?", incorrect_as_msg="Did you import `{{pkg}}` as `{{alias}}`?", ): """Checks whether student imported a package or function correctly. Python features many ways to import packages. All of these different methods revolve around the ``import``, ``from`` and ``as`` keywords. ``has_import()`` provides a robust way to check whether a student correctly imported a certain package. By default, ``has_import()`` allows for different ways of aliasing the imported package or function. If you want to make sure the correct alias was used to refer to the package or function that was imported, set ``same_as=True``. Args: name (str): the name of the package that has to be checked. same_as (bool): if True, the alias of the package or function has to be the same. Defaults to False. not_imported_msg (str): feedback message when the package is not imported. incorrect_as_msg (str): feedback message if the alias is wrong. :Example: Example 1, where aliases don't matter (defaut): :: # solution import matplotlib.pyplot as plt # sct Ex().has_import("matplotlib.pyplot") # passing submissions import matplotlib.pyplot as plt from matplotlib import pyplot as plt import matplotlib.pyplot as pltttt # failing submissions import matplotlib as mpl Example 2, where the SCT is coded so aliases do matter: :: # solution import matplotlib.pyplot as plt # sct Ex().has_import("matplotlib.pyplot", same_as=True) # passing submissions import matplotlib.pyplot as plt from matplotlib import pyplot as plt # failing submissions import matplotlib.pyplot as pltttt """ student_imports = state.ast_dispatcher("imports", state.student_ast) solution_imports = state.ast_dispatcher("imports", state.solution_ast) if name not in solution_imports: raise InstructorError( "`has_import()` couldn't find an import of the package %s in your solution code." % name ) fmt_kwargs = {"pkg": name, "alias": solution_imports[name]} _msg = state.build_message(not_imported_msg, fmt_kwargs) state.do_test(DefinedCollTest(name, student_imports, _msg)) if same_as: _msg = state.build_message(incorrect_as_msg, fmt_kwargs) state.do_test(EqualTest(solution_imports[name], student_imports[name], _msg)) return state
Search student output for a pattern.
def has_output(state, text, pattern=True, no_output_msg=None): """Search student output for a pattern. Among the student and solution process, the student submission and solution code as a string, the ``Ex()`` state also contains the output that a student generated with his or her submission. With ``has_output()``, you can access this output and match it against a regular or fixed expression. Args: text (str): the text that is searched for pattern (bool): if True (default), the text is treated as a pattern. If False, it is treated as plain text. no_output_msg (str): feedback message to be displayed if the output is not found. :Example: As an example, suppose we want a student to print out a sentence: :: # Print the "This is some ... stuff" print("This is some weird stuff") The following SCT tests whether the student prints out ``This is some weird stuff``: :: # Using exact string matching Ex().has_output("This is some weird stuff", pattern = False) # Using a regular expression (more robust) # pattern = True is the default msg = "Print out ``This is some ... stuff`` to the output, " + \\ "fill in ``...`` with a word you like." Ex().has_output(r"This is some \w* stuff", no_output_msg = msg) """ if not no_output_msg: no_output_msg = "You did not output the correct things." _msg = state.build_message(no_output_msg) state.do_test(StringContainsTest(state.raw_student_output, text, pattern, _msg)) return state
Check if the right printouts happened.
def has_printout( state, index, not_printed_msg=None, pre_code=None, name=None, copy=False ): """Check if the right printouts happened. ``has_printout()`` will look for the printout in the solution code that you specified with ``index`` (0 in this case), rerun the ``print()`` call in the solution process, capture its output, and verify whether the output is present in the output of the student. This is more robust as ``Ex().check_function('print')`` initiated chains as students can use as many printouts as they want, as long as they do the correct one somewhere. Args: index (int): index of the ``print()`` call in the solution whose output you want to search for in the student output. not_printed_msg (str): if specified, this overrides the default message that is generated when the output is not found in the student output. pre_code (str): Python code as a string that is executed before running the targeted student call. This is the ideal place to set a random seed, for example. copy (bool): whether to try to deep copy objects in the environment, such as lists, that could accidentally be mutated. Disabled by default, which speeds up SCTs. state (State): state as passed by the SCT chain. Don't specify this explicitly. :Example: Suppose you want somebody to print out 4: :: print(1, 2, 3, 4) The following SCT would check that: :: Ex().has_printout(0) All of the following SCTs would pass: :: print(1, 2, 3, 4) print('1 2 3 4') print(1, 2, '3 4') print("random"); print(1, 2, 3, 4) :Example: Watch out: ``has_printout()`` will effectively **rerun** the ``print()`` call in the solution process after the entire solution script was executed. If your solution script updates the value of `x` after executing it, ``has_printout()`` will not work. Suppose you have the following solution: :: x = 4 print(x) x = 6 The following SCT will not work: :: Ex().has_printout(0) Why? When the ``print(x)`` call is executed, the value of ``x`` will be 6, and pythonwhat will look for the output `'6`' in the output the student generated. In cases like these, ``has_printout()`` cannot be used. :Example: Inside a for loop ``has_printout()`` Suppose you have the following solution: :: for i in range(5): print(i) The following SCT will not work: :: Ex().check_for_loop().check_body().has_printout(0) The reason is that ``has_printout()`` can only be called from the root state. ``Ex()``. If you want to check printouts done in e.g. a for loop, you have to use a `check_function('print')` chain instead: :: Ex().check_for_loop().check_body().\\ set_context(0).check_function('print').\\ check_args(0).has_equal_value() """ extra_msg = "If you want to check printouts done in e.g. a for loop, you have to use a `check_function('print')` chain instead." state.assert_root("has_printout", extra_msg=extra_msg) if not_printed_msg is None: not_printed_msg = ( "Have you used `{{sol_call}}` to do the appropriate printouts?" ) try: sol_call_ast = state.ast_dispatcher("function_calls", state.solution_ast)[ "print" ][index]["node"] except (KeyError, IndexError): raise InstructorError( "`has_printout({})` couldn't find the {} print call in your solution.".format( index, utils.get_ord(index + 1) ) ) out_sol, str_sol = getOutputInProcess( tree=sol_call_ast, process=state.solution_process, context=state.solution_context, env=state.solution_env, pre_code=pre_code, copy=copy, ) sol_call_str = state.solution_ast_tokens.get_text(sol_call_ast) if isinstance(str_sol, Exception): raise InstructorError( "Evaluating the solution expression {} raised error in solution process." "Error: {} - {}".format(sol_call_str, type(out_sol), str_sol) ) _msg = state.build_message(not_printed_msg, {"sol_call": sol_call_str}) has_output(state, out_sol.strip(), pattern=False, no_output_msg=_msg) return state
Check whether the submission did not generate a runtime error.
def has_no_error( state, incorrect_msg="Have a look at the console: your code contains an error. Fix it and try again!", ): """Check whether the submission did not generate a runtime error. If all SCTs for an exercise pass, before marking the submission as correct pythonwhat will automatically check whether the student submission generated an error. This means it is not needed to use ``has_no_error()`` explicitly. However, in some cases, using ``has_no_error()`` explicitly somewhere throughout your SCT execution can be helpful: - If you want to make sure people didn't write typos when writing a long function name. - If you want to first verify whether a function actually runs, before checking whether the arguments were specified correctly. - More generally, if, because of the content, it's instrumental that the script runs without errors before doing any other verifications. Args: incorrect_msg: if specified, this overrides the default message if the student code generated an error. :Example: Suppose you're verifying an exercise about model training and validation: :: # pre exercise code import numpy as np from sklearn.model_selection import train_test_split from sklearn import datasets from sklearn import svm iris = datasets.load_iris() iris.data.shape, iris.target.shape # solution X_train, X_test, y_train, y_test = train_test_split( iris.data, iris.target, test_size=0.4, random_state=0) If you want to make sure that ``train_test_split()`` ran without errors, which would check if the student typed the function without typos and used sensical arguments, you could use the following SCT: :: Ex().has_no_error() Ex().check_function('sklearn.model_selection.train_test_split').multi( check_args(['arrays', 0]).has_equal_value(), check_args(['arrays', 0]).has_equal_value(), check_args(['options', 'test_size']).has_equal_value(), check_args(['options', 'random_state']).has_equal_value() ) If, on the other hand, you want to fall back onto pythonwhat's built in behavior, that checks for an error before marking the exercise as correct, you can simply leave of the ``has_no_error()`` step. """ state.assert_root("has_no_error") if state.reporter.errors: _msg = state.build_message( incorrect_msg, {"error": str(state.reporter.errors[0])} ) state.report(Feedback(_msg, state)) return state
Test multiple choice exercise.
def has_chosen(state, correct, msgs): """Test multiple choice exercise. Test for a MultipleChoiceExercise. The correct answer (as an integer) and feedback messages are passed to this function. Args: correct (int): the index of the correct answer (should be an instruction). Starts at 1. msgs (list(str)): a list containing all feedback messages belonging to each choice of the student. The list should have the same length as the number of options. """ if not issubclass(type(correct), int): raise InstructorError( "Inside `has_chosen()`, the argument `correct` should be an integer." ) student_process = state.student_process if not isDefinedInProcess(MC_VAR_NAME, student_process): raise InstructorError("Option not available in the student process") else: selected_option = getOptionFromProcess(student_process, MC_VAR_NAME) if not issubclass(type(selected_option), int): raise InstructorError("selected_option should be an integer") if selected_option < 1 or correct < 1: raise InstructorError( "selected_option and correct should be greater than zero" ) if selected_option > len(msgs) or correct > len(msgs): raise InstructorError("there are not enough feedback messages defined") feedback_msg = msgs[selected_option - 1] state.reporter.success_msg = msgs[correct - 1] state.do_test(EqualTest(selected_option, correct, feedback_msg))
Check whether a particular function is called.
def check_function( state, name, index=0, missing_msg=None, params_not_matched_msg=None, expand_msg=None, signature=True, ): """Check whether a particular function is called. ``check_function()`` is typically followed by: - ``check_args()`` to check whether the arguments were specified. In turn, ``check_args()`` can be followed by ``has_equal_value()`` or ``has_equal_ast()`` to assert that the arguments were correctly specified. - ``has_equal_value()`` to check whether rerunning the function call coded by the student gives the same result as calling the function call as in the solution. Checking function calls is a tricky topic. Please visit the `dedicated article <articles/checking_function_calls.html>`_ for more explanation, edge cases and best practices. Args: name (str): the name of the function to be tested. When checking functions in packages, always use the 'full path' of the function. index (int): index of the function call to be checked. Defaults to 0. missing_msg (str): If specified, this overrides an automatically generated feedback message in case the student did not call the function correctly. params_not_matched_msg (str): If specified, this overrides an automatically generated feedback message in case the function parameters were not successfully matched. expand_msg (str): If specified, this overrides any messages that are prepended by previous SCT chains. signature (Signature): Normally, check_function() can figure out what the function signature is, but it might be necessary to use ``sig_from_params()`` to manually build a signature and pass this along. state (State): State object that is passed from the SCT Chain (don't specify this). :Examples: Student code and solution code:: import numpy as np arr = np.array([1, 2, 3, 4, 5]) np.mean(arr) SCT:: # Verify whether arr was correctly set in np.mean Ex().check_function('numpy.mean').check_args('a').has_equal_value() # Verify whether np.mean(arr) produced the same result Ex().check_function('numpy.mean').has_equal_value() """ append_missing = missing_msg is None append_params_not_matched = params_not_matched_msg is None if missing_msg is None: missing_msg = MISSING_MSG if expand_msg is None: expand_msg = PREPEND_MSG if params_not_matched_msg is None: params_not_matched_msg = SIG_ISSUE_MSG stu_out = state.ast_dispatcher("function_calls", state.student_ast) sol_out = state.ast_dispatcher("function_calls", state.solution_ast) student_mappings = state.ast_dispatcher("mappings", state.student_ast) fmt_kwargs = { "times": get_times(index + 1), "ord": get_ord(index + 1), "index": index, "mapped_name": get_mapped_name(name, student_mappings), } # Get Parts ---- # Copy, otherwise signature binding overwrites sol_out[name][index]['args'] try: sol_parts = {**sol_out[name][index]} except KeyError: raise InstructorError( "`check_function()` couldn't find a call of `%s()` in the solution code. Make sure you get the mapping right!" % name ) except IndexError: raise InstructorError( "`check_function()` couldn't find %s calls of `%s()` in your solution code." % (index + 1, name) ) try: # Copy, otherwise signature binding overwrites stu_out[name][index]['args'] stu_parts = {**stu_out[name][index]} except (KeyError, IndexError): _msg = state.build_message(missing_msg, fmt_kwargs, append=append_missing) state.report(Feedback(_msg, state)) # Signatures ----- if signature: signature = None if isinstance(signature, bool) else signature get_sig = partial( getSignatureInProcess, name=name, signature=signature, manual_sigs=state.get_manual_sigs(), ) try: sol_sig = get_sig( mapped_name=sol_parts["name"], process=state.solution_process ) sol_parts["args"] = bind_args(sol_sig, sol_parts["args"]) except Exception as e: raise InstructorError( "`check_function()` couldn't match the %s call of `%s` to its signature:\n%s " % (get_ord(index + 1), name, e) ) try: stu_sig = get_sig( mapped_name=stu_parts["name"], process=state.student_process ) stu_parts["args"] = bind_args(stu_sig, stu_parts["args"]) except Exception: _msg = state.build_message( params_not_matched_msg, fmt_kwargs, append=append_params_not_matched ) state.report( Feedback( _msg, StubState(stu_parts["node"], state.highlighting_disabled) ) ) # three types of parts: pos_args, keywords, args (e.g. these are bound to sig) append_message = {"msg": expand_msg, "kwargs": fmt_kwargs} child = part_to_child( stu_parts, sol_parts, append_message, state, node_name="function_calls" ) return child
Decorator to ( optionally ) run function in a process.
def process_task(f): """Decorator to (optionally) run function in a process.""" sig = inspect.signature(f) @wraps(f) def wrapper(*args, **kwargs): # get bound arguments for call ba = sig.bind_partial(*args, **kwargs) # when process is specified, remove from args and use to execute process = ba.arguments.get("process") if process: ba.arguments["process"] = None # partial function since shell argument may have been left # unspecified, as it will be passed when the process executes pf = partial(wrapper, *ba.args, **ba.kwargs) return process.executeTask(pf) # otherwise, run original function return f(*ba.args, **ba.kwargs) return wrapper
Get a value from process return tuple of value res if succesful
def getResultFromProcess(res, tempname, process): """Get a value from process, return tuple of value, res if succesful""" if not isinstance(res, (UndefinedValue, Exception)): value = getRepresentation(tempname, process) return value, res else: return res, str(res)
Creates code to assign name ( or tuple of names ) node from expr
def assign_from_ast(node, expr): """ Creates code to assign name (or tuple of names) node from expr This is useful for recreating destructuring assignment behavior, like a, *b = [1,2,3]. """ if isinstance(expr, str): expr = ast.Name(id=expr, ctx=ast.Load()) mod = ast.Module([ast.Assign(targets=[node], value=expr)]) ast.fix_missing_locations(mod) return compile(mod, "<assignment_script>", "exec")
Override the solution code with something arbitrary.
def override(state, solution): """Override the solution code with something arbitrary. There might be cases in which you want to temporarily override the solution code so you can allow for alternative ways of solving an exercise. When you use ``override()`` in an SCT chain, the remainder of that SCT chain will run as if the solution code you specified is the only code that was in the solution. Check the glossary for an example (pandas plotting) Args: solution: solution code as a string that overrides the original solution code. state: State instance describing student and solution code. Can be omitted if used with Ex(). """ # the old ast may be a number of node types, but generally either a # (1) ast.Module, or for single expressions... # (2) whatever was grabbed using module.body[0] # (3) module.body[0].value, when module.body[0] is an Expr node old_ast = state.solution_ast new_ast = ast.parse(solution) if not isinstance(old_ast, ast.Module) and len(new_ast.body) == 1: expr = new_ast.body[0] candidates = [expr, expr.value] if isinstance(expr, ast.Expr) else [expr] for node in candidates: if isinstance(node, old_ast.__class__): new_ast = node break kwargs = state.messages[-1] if state.messages else {} child = state.to_child( solution_ast=new_ast, student_ast=state.student_ast, highlight=state.highlight, append_message={"msg": "", "kwargs": kwargs}, ) return child
Update context values for student and solution environments. When has_equal_x () is used after this the context values ( in for loops and function definitions for example ) will have the values specified through his function. It is the function equivalent of the context_vals argument of the has_equal_x () functions.
def set_context(state, *args, **kwargs): """Update context values for student and solution environments. When ``has_equal_x()`` is used after this, the context values (in ``for`` loops and function definitions, for example) will have the values specified through his function. It is the function equivalent of the ``context_vals`` argument of the ``has_equal_x()`` functions. - Note 1: excess args and unmatched kwargs will be unused in the student environment. - Note 2: When you try to set context values that don't match any target variables in the solution code, ``set_context()`` raises an exception that lists the ones available. - Note 3: positional arguments are more robust to the student using different names for context values. - Note 4: You have to specify arguments either by position, either by name. A combination is not possible. :Example: Solution code:: total = 0 for i in range(10): print(i ** 2) Student submission that will pass (different iterator, different calculation):: total = 0 for j in range(10): print(j * j) SCT:: # set_context is robust against different names of context values. Ex().check_for_loop().check_body().multi( set_context(1).has_equal_output(), set_context(2).has_equal_output(), set_context(3).has_equal_output() ) # equivalent SCT, by setting context_vals in has_equal_output() Ex().check_for_loop().check_body().\\ multi([s.has_equal_output(context_vals=[i]) for i in range(1, 4)]) """ stu_crnt = state.student_context.context sol_crnt = state.solution_context.context # for now, you can't specify both if len(args) > 0 and len(kwargs) > 0: raise InstructorError( "In `set_context()`, specify arguments either by position, either by name." ) # set args specified by pos ----------------------------------------------- if args: # stop if too many pos args for solution if len(args) > len(sol_crnt): raise InstructorError( "Too many positional args. There are {} context vals, but tried to set {}".format( len(sol_crnt), len(args) ) ) # set pos args upd_sol = sol_crnt.update(dict(zip(sol_crnt.keys(), args))) upd_stu = stu_crnt.update(dict(zip(stu_crnt.keys(), args))) else: upd_sol = sol_crnt upd_stu = stu_crnt # set args specified by keyword ------------------------------------------- if kwargs: # stop if keywords don't match with solution if set(kwargs) - set(upd_sol): raise InstructorError( "`set_context()` failed: context val names are {}, but you tried to set {}.".format( upd_sol or "missing", sorted(list(kwargs.keys())) ) ) out_sol = upd_sol.update(kwargs) # need to match keys in kwargs with corresponding keys in stu context # in case they used, e.g., different loop variable names match_keys = dict(zip(sol_crnt.keys(), stu_crnt.keys())) out_stu = upd_stu.update( {match_keys[k]: v for k, v in kwargs.items() if k in match_keys} ) else: out_sol = upd_sol out_stu = upd_stu return state.to_child( student_context=out_stu, solution_context=out_sol, highlight=state.highlight )
Update/ set environemnt variables for student and solution environments.
def set_env(state, **kwargs): """Update/set environemnt variables for student and solution environments. When ``has_equal_x()`` is used after this, the variables specified through this function will be available in the student and solution process. Note that you will not see these variables in the student process of the state produced by this function: the values are saved on the state and are only added to the student and solution processes when ``has_equal_ast()`` is called. :Example: Student and Solution Code:: a = 1 if a > 4: print('pretty large') SCT:: # check if condition works with different values of a Ex().check_if_else().check_test().multi( set_env(a = 3).has_equal_value(), set_env(a = 4).has_equal_value(), set_env(a = 5).has_equal_value() ) # equivalent SCT, by setting extra_env in has_equal_value() Ex().check_if_else().check_test().\\ multi([has_equal_value(extra_env={'a': i}) for i in range(3, 6)]) """ stu_crnt = state.student_env.context sol_crnt = state.solution_env.context stu_new = stu_crnt.update(kwargs) sol_new = sol_crnt.update(kwargs) return state.to_child( student_env=stu_new, solution_env=sol_new, highlight=state.highlight )
Check object existence ( and equality )
def check_object( state, index, missing_msg=None, expand_msg=None, typestr="variable" ): """Check object existence (and equality) Check whether an object is defined in the student's process, and zoom in on its value in both student and solution process to inspect quality (with has_equal_value(). In ``pythonbackend``, both the student's submission as well as the solution code are executed, in separate processes. ``check_object()`` looks at these processes and checks if the referenced object is available in the student process. Next, you can use ``has_equal_value()`` to check whether the objects in the student and solution process correspond. Args: index (str): the name of the object which value has to be checked. missing_msg (str): feedback message when the object is not defined in the student process. expand_msg (str): If specified, this overrides any messages that are prepended by previous SCT chains. :Example: Suppose you want the student to create a variable ``x``, equal to 15: :: x = 15 The following SCT will verify this: :: Ex().check_object("x").has_equal_value() - ``check_object()`` will check if the variable ``x`` is defined in the student process. - ``has_equal_value()`` will check whether the value of ``x`` in the solution process is the same as in the student process. Note that ``has_equal_value()`` only looks at **end result** of a variable in the student process. In the example, how the object ``x`` came about in the student's submission, does not matter. This means that all of the following submission will also pass the above SCT: :: x = 15 x = 12 + 3 x = 3; x += 12 :Example: As the previous example mentioned, ``has_equal_value()`` only looks at the **end result**. If your exercise is first initializing and object and further down the script is updating the object, you can only look at the final value! Suppose you want the student to initialize and populate a list `my_list` as follows: :: my_list = [] for i in range(20): if i % 3 == 0: my_list.append(i) There is no robust way to verify whether `my_list = [0]` was coded correctly in a separate way. The best SCT would look something like this: :: msg = "Have you correctly initialized `my_list`?" Ex().check_correct( check_object('my_list').has_equal_value(), multi( # check initialization: [] or list() check_or( has_equal_ast(code = "[]", incorrect_msg = msg), check_function('list') ), check_for_loop().multi( check_iter().has_equal_value(), check_body().check_if_else().multi( check_test().multi( set_context(2).has_equal_value(), set_context(3).has_equal_value() ), check_body().set_context(3).\\ set_env(my_list = [0]).\\ has_equal_value(name = 'my_list') ) ) ) ) - ``check_correct()`` is used to robustly check whether ``my_list`` was built correctly. - If ``my_list`` is not correct, **both** the initialization and the population code are checked. :Example: Because checking object correctness incorrectly is such a common misconception, we're adding another example: :: import pandas as pd df = pd.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6]}) df['c'] = [7, 8, 9] The following SCT would be **wrong**, as it does not factor in the possibility that the 'add column ``c``' step could've been wrong: :: Ex().check_correct( check_object('df').has_equal_value(), check_function('pandas.DataFrame').check_args(0).has_equal_value() ) The following SCT would be better, as it is specific to the steps: :: # verify the df = pd.DataFrame(...) step Ex().check_correct( check_df('df').multi( check_keys('a').has_equal_value(), check_keys('b').has_equal_value() ), check_function('pandas.DataFrame').check_args(0).has_equal_value() ) # verify the df['c'] = [...] step Ex().check_df('df').check_keys('c').has_equal_value() :Example: pythonwhat compares the objects in the student and solution process with the ``==`` operator. For basic objects, this ``==`` is operator is properly implemented, so that the objects can be effectively compared. For more complex objects that are produced by third-party packages, however, it's possible that this equality operator is not implemented in a way you'd expect. Often, for these object types the ``==`` will compare the actual object instances: :: # pre exercise code class Number(): def __init__(self, n): self.n = n # solution x = Number(1) # sct that won't work Ex().check_object().has_equal_value() # sct Ex().check_object().has_equal_value(expr_code = 'x.n') # submissions that will pass this sct x = Number(1) x = Number(2 - 1) The basic SCT like in the previous example will notwork here. Notice how we used the ``expr_code`` argument to _override_ which value `has_equal_value()` is checking. Instead of checking whether `x` corresponds between student and solution process, it's now executing the expression ``x.n`` and seeing if the result of running this expression in both student and solution process match. """ # Only do the assertion if PYTHONWHAT_V2_ONLY is set to '1' if v2_only(): extra_msg = "If you want to check the value of an object in e.g. a for loop, use `has_equal_value(name = 'my_obj')` instead." state.assert_root("check_object", extra_msg=extra_msg) if missing_msg is None: missing_msg = "Did you define the {{typestr}} `{{index}}` without errors?" if expand_msg is None: expand_msg = "Did you correctly define the {{typestr}} `{{index}}`? " if ( not isDefinedInProcess(index, state.solution_process) and state.has_different_processes() ): raise InstructorError( "`check_object()` couldn't find object `%s` in the solution process." % index ) append_message = {"msg": expand_msg, "kwargs": {"index": index, "typestr": typestr}} # create child state, using either parser output, or create part from name fallback = lambda: ObjectAssignmentParser.get_part(index) stu_part = state.ast_dispatcher("object_assignments", state.student_ast).get(index, fallback()) sol_part = state.ast_dispatcher("object_assignments", state.solution_ast).get(index, fallback()) # test object exists _msg = state.build_message(missing_msg, append_message["kwargs"]) state.do_test(DefinedProcessTest(index, state.student_process, Feedback(_msg))) child = part_to_child( stu_part, sol_part, append_message, state, node_name="object_assignments" ) return child
Check whether an object is an instance of a certain class.
def is_instance(state, inst, not_instance_msg=None): """Check whether an object is an instance of a certain class. ``is_instance()`` can currently only be used when chained from ``check_object()``, the function that is used to 'zoom in' on the object of interest. Args: inst (class): The class that the object should have. not_instance_msg (str): When specified, this overrides the automatically generated message in case the object does not have the expected class. state (State): The state that is passed in through the SCT chain (don't specify this). :Example: Student code and solution code:: import numpy as np arr = np.array([1, 2, 3, 4, 5]) SCT:: # Verify the class of arr import numpy Ex().check_object('arr').is_instance(numpy.ndarray) """ state.assert_is(["object_assignments"], "is_instance", ["check_object"]) sol_name = state.solution_parts.get("name") stu_name = state.student_parts.get("name") if not_instance_msg is None: not_instance_msg = "Is it a {{inst.__name__}}?" if not isInstanceInProcess(sol_name, inst, state.solution_process): raise InstructorError( "`is_instance()` noticed that `%s` is not a `%s` in the solution process." % (sol_name, inst.__name__) ) _msg = state.build_message(not_instance_msg, {"inst": inst}) feedback = Feedback(_msg, state) state.do_test(InstanceProcessTest(stu_name, inst, state.student_process, feedback)) return state
Check whether a DataFrame was defined and it is the right type check_df () is a combo of check_object () and is_instance () that checks whether the specified object exists and whether the specified object is pandas DataFrame.
def check_df( state, index, missing_msg=None, not_instance_msg=None, expand_msg=None ): """Check whether a DataFrame was defined and it is the right type ``check_df()`` is a combo of ``check_object()`` and ``is_instance()`` that checks whether the specified object exists and whether the specified object is pandas DataFrame. You can continue checking the data frame with ``check_keys()`` function to 'zoom in' on a particular column in the pandas DataFrame: Args: index (str): Name of the data frame to zoom in on. missing_msg (str): See ``check_object()``. not_instance_msg (str): See ``is_instance()``. expand_msg (str): If specified, this overrides any messages that are prepended by previous SCT chains. :Example: Suppose you want the student to create a DataFrame ``my_df`` with two columns. The column ``a`` should contain the numbers 1 to 3, while the contents of column ``b`` can be anything: :: import pandas as pd my_df = pd.DataFrame({"a": [1, 2, 3], "b": ["a", "n", "y"]}) The following SCT would robustly check that: :: Ex().check_df("my_df").multi( check_keys("a").has_equal_value(), check_keys("b") ) - ``check_df()`` checks if ``my_df`` exists (``check_object()`` behind the scenes) and is a DataFrame (``is_instance()``) - ``check_keys("a")`` zooms in on the column ``a`` of the data frame, and ``has_equal_value()`` checks if the columns correspond between student and solution process. - ``check_keys("b")`` zooms in on hte column ``b`` of the data frame, but there's no 'equality checking' happening The following submissions would pass the SCT above: :: my_df = pd.DataFrame({"a": [1, 1 + 1, 3], "b": ["a", "l", "l"]}) my_df = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]}) """ child = check_object( state, index, missing_msg=missing_msg, expand_msg=expand_msg, typestr="pandas DataFrame", ) is_instance(child, pd.DataFrame, not_instance_msg=not_instance_msg) return child
Check whether an object ( dict DataFrame etc ) has a key.
def check_keys(state, key, missing_msg=None, expand_msg=None): """Check whether an object (dict, DataFrame, etc) has a key. ``check_keys()`` can currently only be used when chained from ``check_object()``, the function that is used to 'zoom in' on the object of interest. Args: key (str): Name of the key that the object should have. missing_msg (str): When specified, this overrides the automatically generated message in case the key does not exist. expand_msg (str): If specified, this overrides any messages that are prepended by previous SCT chains. state (State): The state that is passed in through the SCT chain (don't specify this). :Example: Student code and solution code:: x = {'a': 2} SCT:: # Verify that x contains a key a Ex().check_object('x').check_keys('a') # Verify that x contains a key a and a is correct. Ex().check_object('x').check_keys('a').has_equal_value() """ state.assert_is(["object_assignments"], "is_instance", ["check_object", "check_df"]) if missing_msg is None: missing_msg = "There is no {{ 'column' if 'DataFrame' in parent.typestr else 'key' }} `'{{key}}'`." if expand_msg is None: expand_msg = "Did you correctly set the {{ 'column' if 'DataFrame' in parent.typestr else 'key' }} `'{{key}}'`? " sol_name = state.solution_parts.get("name") stu_name = state.student_parts.get("name") if not isDefinedCollInProcess(sol_name, key, state.solution_process): raise InstructorError( "`check_keys()` couldn't find key `%s` in object `%s` in the solution process." % (key, sol_name) ) # check if key available _msg = state.build_message(missing_msg, {"key": key}) state.do_test( DefinedCollProcessTest( stu_name, key, state.student_process, Feedback(_msg, state) ) ) def get_part(name, key, highlight): if isinstance(key, str): slice_val = ast.Str(s=key) else: slice_val = ast.parse(str(key)).body[0].value expr = ast.Subscript( value=ast.Name(id=name, ctx=ast.Load()), slice=ast.Index(value=slice_val), ctx=ast.Load(), ) ast.fix_missing_locations(expr) return {"node": expr, "highlight": highlight} stu_part = get_part(stu_name, key, state.student_parts.get("highlight")) sol_part = get_part(sol_name, key, state.solution_parts.get("highlight")) append_message = {"msg": expand_msg, "kwargs": {"key": key}} child = part_to_child(stu_part, sol_part, append_message, state) return child
Return copy of instance omitting entries that are EMPTY
def defined_items(self): """Return copy of instance, omitting entries that are EMPTY""" return self.__class__( [(k, v) for k, v in self.items() if v is not self.EMPTY], is_empty=False )
Dive into nested tree.
def to_child(self, append_message="", node_name="", **kwargs): """Dive into nested tree. Set the current state as a state with a subtree of this syntax tree as student tree and solution tree. This is necessary when testing if statements or for loops for example. """ base_kwargs = { attr: getattr(self, attr) for attr in self.params if attr not in ["highlight"] } if not isinstance(append_message, dict): append_message = {"msg": append_message, "kwargs": {}} kwargs["messages"] = [*self.messages, append_message] kwargs["parent_state"] = self for kwarg in ["solution_context", "student_context"]: if kwarg in kwargs and not kwargs[kwarg]: kwargs.pop(kwarg, None) def update_kwarg(name, func): kwargs[name] = func(kwargs[name]) def update_context(name): update_kwarg(name, getattr(self, name).update_ctx) if isinstance(kwargs.get("student_ast", None), list): update_kwarg("student_ast", wrap_in_module) if isinstance(kwargs.get("solution_ast", None), list): update_kwarg("solution_ast", wrap_in_module) if "student_ast" in kwargs: kwargs["student_code"] = self.student_ast_tokens.get_text( kwargs["student_ast"] ) if "solution_ast" in kwargs: kwargs["solution_code"] = self.solution_ast_tokens.get_text( kwargs["solution_ast"] ) # get new contexts if "solution_context" in kwargs: update_context("solution_context") if "student_context" in kwargs: update_context("student_context") # get new envs if "solution_env" in kwargs: update_context("solution_env") if "student_env" in kwargs: update_context("student_env") klass = self.SUBCLASSES[node_name] if node_name else State child = klass(**{**base_kwargs, **kwargs}) return child
getter for Parser outputs
def _getx(self, Parser, ext_attr, tree): """getter for Parser outputs""" # return cached output if possible cache_key = Parser.__name__ + str(hash(tree)) if self._parser_cache.get(cache_key): p = self._parser_cache[cache_key] else: # otherwise, run parser over tree p = Parser() # set mappings for parsers that inspect attribute access if ext_attr != "mappings" and Parser in [ FunctionParser, ObjectAccessParser, ]: p.mappings = self.context_mappings.copy() # run parser p.visit(tree) # cache self._parser_cache[cache_key] = p return getattr(p, ext_attr)
When dispatched on loops has_context the target vars are the attribute _target_vars.
def has_context_loop(state, incorrect_msg, exact_names): """When dispatched on loops, has_context the target vars are the attribute _target_vars. Note: This is to allow people to call has_context on a node (e.g. for_loop) rather than one of its attributes (e.g. body). Purely for convenience. """ return _test( state, incorrect_msg or MSG_INCORRECT_LOOP, exact_names, tv_name="_target_vars", highlight_name="target", )
When dispatched on with statements has_context loops over each context manager.
def has_context_with(state, incorrect_msg, exact_names): """When dispatched on with statements, has_context loops over each context manager. Note: This is to allow people to call has_context on the with statement, rather than having to manually loop over each context manager. e.g. Ex().check_with(0).has_context() vs Ex().check_with(0).check_context(0).has_context() """ for i in range(len(state.solution_parts["context"])): ctxt_state = check_part_index(state, "context", i, "{{ordinal}} context") _has_context(ctxt_state, incorrect_msg or MSG_INCORRECT_WITH, exact_names) return state
Return child state with name part as its ast tree
def check_part(state, name, part_msg, missing_msg=None, expand_msg=None): """Return child state with name part as its ast tree""" if missing_msg is None: missing_msg = "Are you sure you defined the {{part}}? " if expand_msg is None: expand_msg = "Did you correctly specify the {{part}}? " if not part_msg: part_msg = name append_message = {"msg": expand_msg, "kwargs": {"part": part_msg}} has_part(state, name, missing_msg, append_message["kwargs"]) stu_part = state.student_parts[name] sol_part = state.solution_parts[name] assert_ast(state, sol_part, append_message["kwargs"]) return part_to_child(stu_part, sol_part, append_message, state)
Return child state with indexed name part as its ast tree.
def check_part_index(state, name, index, part_msg, missing_msg=None, expand_msg=None): """Return child state with indexed name part as its ast tree. ``index`` can be: - an integer, in which case the student/solution_parts are indexed by position. - a string, in which case the student/solution_parts are expected to be a dictionary. - a list of indices (which can be integer or string), in which case the student parts are indexed step by step. """ if missing_msg is None: missing_msg = "Are you sure you defined the {{part}}? " if expand_msg is None: expand_msg = "Did you correctly specify the {{part}}? " # create message ordinal = get_ord(index + 1) if isinstance(index, int) else "" fmt_kwargs = {"index": index, "ordinal": ordinal} fmt_kwargs.update(part=render(part_msg, fmt_kwargs)) append_message = {"msg": expand_msg, "kwargs": fmt_kwargs} # check there are enough parts for index has_part(state, name, missing_msg, fmt_kwargs, index) # get part at index stu_part = state.student_parts[name] sol_part = state.solution_parts[name] if isinstance(index, list): for ind in index: stu_part = stu_part[ind] sol_part = sol_part[ind] else: stu_part = stu_part[index] sol_part = sol_part[index] assert_ast(state, sol_part, fmt_kwargs) # return child state from part return part_to_child(stu_part, sol_part, append_message, state)
Check whether a function argument is specified.
def check_args(state, name, missing_msg=None): """Check whether a function argument is specified. This function can follow ``check_function()`` in an SCT chain and verifies whether an argument is specified. If you want to go on and check whether the argument was correctly specified, you can can continue chaining with ``has_equal_value()`` (value-based check) or ``has_equal_ast()`` (AST-based check) This function can also follow ``check_function_def()`` or ``check_lambda_function()`` to see if arguments have been specified. Args: name (str): the name of the argument for which you want to check it is specified. This can also be a number, in which case it refers to the positional arguments. Named argumetns take precedence. missing_msg (str): If specified, this overrides an automatically generated feedback message in case the student did specify the argument. state (State): State object that is passed from the SCT Chain (don't specify this). :Examples: Student and solution code:: import numpy as np arr = np.array([1, 2, 3, 4, 5]) np.mean(arr) SCT:: # Verify whether arr was correctly set in np.mean # has_equal_value() checks the value of arr, used to set argument a Ex().check_function('numpy.mean').check_args('a').has_equal_value() # Verify whether arr was correctly set in np.mean # has_equal_ast() checks the expression used to set argument a Ex().check_function('numpy.mean').check_args('a').has_equal_ast() Student and solution code:: def my_power(x): print("calculating sqrt...") return(x * x) SCT:: Ex().check_function_def('my_power').multi( check_args('x') # will fail if student used y as arg check_args(0) # will still pass if student used y as arg ) """ if missing_msg is None: missing_msg = "Did you specify the {{part}}?" if name in ["*args", "**kwargs"]: # for check_function_def return check_part(state, name, name, missing_msg=missing_msg) else: if isinstance(name, list): # dealing with args or kwargs if name[0] == "args": arg_str = "{} argument passed as a variable length argument".format( get_ord(name[1] + 1) ) else: arg_str = "argument `{}`".format(name[1]) else: arg_str = ( "{} argument".format(get_ord(name + 1)) if isinstance(name, int) else "argument `{}`".format(name) ) return check_part_index(state, "args", name, arg_str, missing_msg=missing_msg)
When checking a function definition of lambda function prepare has_equal_x for checking the call of a user - defined function.
def check_call(state, callstr, argstr=None, expand_msg=None): """When checking a function definition of lambda function, prepare has_equal_x for checking the call of a user-defined function. Args: callstr (str): call string that specifies how the function should be called, e.g. `f(1, a = 2)`. ``check_call()`` will replace ``f`` with the function/lambda you're targeting. argstr (str): If specified, this overrides the way the function call is refered to in the expand message. expand_msg (str): If specified, this overrides any messages that are prepended by previous SCT chains. state (State): state object that is chained from. :Example: Student and solution code:: def my_power(x): print("calculating sqrt...") return(x * x) SCT:: Ex().check_function_def('my_power').multi( check_call("f(3)").has_equal_value() check_call("f(3)").has_equal_output() ) """ state.assert_is( ["function_defs", "lambda_functions"], "check_call", ["check_function_def", "check_lambda_function"], ) if expand_msg is None: expand_msg = "To verify it, we reran {{argstr}}. " stu_part, _argstr = build_call(callstr, state.student_parts["node"]) sol_part, _ = build_call(callstr, state.solution_parts["node"]) append_message = {"msg": expand_msg, "kwargs": {"argstr": argstr or _argstr}} child = part_to_child(stu_part, sol_part, append_message, state) return child
Does this compiler support OpenMP parallelization?
def detect_openmp(): """Does this compiler support OpenMP parallelization?""" compiler = new_compiler() print("Checking for OpenMP support... ") hasopenmp = hasfunction(compiler, 'omp_get_num_threads()') needs_gomp = hasopenmp if not hasopenmp: compiler.add_library('gomp') hasopenmp = hasfunction(compiler, 'omp_get_num_threads()') needs_gomp = hasopenmp if hasopenmp: print("Compiler supports OpenMP") else: print( "Did not detect OpenMP support.") return hasopenmp, needs_gomp
zs = np. linspace ( 0. 1. 1000 ) rp = 0. 1 wrapped = wrapper ( _quadratic_ld. _quadratic_ld zs rp 0. 1 0. 3 1 ) t = timeit. timeit ( wrapped number = 10000 ) print ( time: t )
def make_plots(): import matplotlib.pyplot as plt """zs = np.linspace(0., 1., 1000) rp = 0.1 wrapped = wrapper(_quadratic_ld._quadratic_ld, zs, rp, 0.1, 0.3, 1) t = timeit.timeit(wrapped,number=10000) print("time:", t)""" """zs = np.linspace(0., 1., 1000) rp = 0.1 u = [0., 0.7, 0.0, -0.3] f = _nonlinear_ld._nonlinear_ld(zs, rp, u[0], u[1], u[2], u[3], 1.0e-2, 4) fhi = _nonlinear_ld._nonlinear_ld(zs, rp, u[0], u[1], u[2], u[3], 1.0e-4, 4) fquad = occultquad.occultquad(zs, rp, 0.1, 0.3, 4) #for i in range(len(f)): print "z, fnl, fquad", zs[i], f[i], fquad[i] for i in range(1,16): wrapped = wrapper(occultquad.occultquad, zs, rp, 0.1, 0.3, i) t = timeit.timeit(wrapped,number=1) print i, t plt.plot(zs, (f - fhi)*1.0e6) plt.plot(zs, (fhi - fquad)*1.0e6, color='r') plt.axvline(0.9) plt.show()""" #generates Figure FIXME: max err as a function of function call time zs = np.linspace(0., 1., 1000) rp = 0.1 u = [0., 0.7, 0.0, -0.3] n = 20 ts = [] errs = [] f_ref = _nonlinear_ld._nonlinear_ld(zs, rp, u[0], u[1], u[2], u[3], 1.0e-4, 4) fac = np.logspace(-3, -1, n) for i in range(n): wrapped = wrapper(_nonlinear_ld._nonlinear_ld, zs, rp, u[0], u[1], u[2], u[3], fac[i], 1) t = timeit.timeit(wrapped,number=10)/10. ts.append(t) print(t) f= _nonlinear_ld._nonlinear_ld(zs, rp, u[0], u[1], u[2], u[3], fac[i], 12) err = np.max(np.abs(f - f_ref)) errs.append(err) plt.plot(np.array(ts), np.array(errs)*1.0e6, color='k') plt.xlim((1.0e-3, 1.0e-1)) plt.yscale('log') plt.xscale('log') plt.xlabel("Time (s)") plt.ylabel("Max Err (ppm)") plt.show()
Calculate a model light curve.
def light_curve(self, params): """ Calculate a model light curve. :param params: Transit parameters :type params: A `TransitParams` instance :return: Relative flux :rtype: ndarray :Example: >>> flux = m.light_curve(params) """ #recalculates rsky and fac if necessary if params.t0 != self.t0 or params.per != self.per or params.a != self.a or params.inc != self.inc or params.ecc != self.ecc or params.w != self.w or params.t_secondary != self.t_secondary: if self.transittype == 2 and params.t_secondary != self.t_secondary: params.t0 = self.get_t_conjunction(params) self.ds= _rsky._rsky(self.t_supersample, params.t0, params.per, params.a, params.inc*pi/180., params.ecc, params.w*pi/180., self.transittype, self.nthreads) if params.limb_dark != self.limb_dark: self.fac = self._get_fac() #updates transit params self.t0 = params.t0 self.per = params.per self.rp = params.rp self.a = params.a self.inc = params.inc self.ecc = params.ecc self.w = params.w self.u = params.u self.limb_dark = params.limb_dark self.fp = params.fp self.t_secondary = params.t_secondary self.inverse = False #handles the case of inverse transits (rp < 0) if self.rp < 0.: self.rp = -1.*self.rp params.rp = -1.*params.rp self.inverse = True if self.transittype == 1: if params.limb_dark != self.limb_dark: raise Exception("Need to reinitialize model in order to change limb darkening option") if self.limb_dark == "quadratic": lc = _quadratic_ld._quadratic_ld(self.ds, params.rp, params.u[0], params.u[1], self.nthreads) elif self.limb_dark == "linear": lc = _quadratic_ld._quadratic_ld(self.ds, params.rp, params.u[0], 0., self.nthreads) elif self.limb_dark == "nonlinear": lc = _nonlinear_ld._nonlinear_ld(self.ds, params.rp, params.u[0], params.u[1], params.u[2], params.u[3], self.fac, self.nthreads) elif self.limb_dark == "squareroot": lc = _nonlinear_ld._nonlinear_ld(self.ds, params.rp, params.u[1], params.u[0], 0., 0., self.fac, self.nthreads) elif self.limb_dark == "uniform": lc = _uniform_ld._uniform_ld(self.ds, params.rp, self.nthreads) elif self.limb_dark == "logarithmic": lc = _logarithmic_ld._logarithmic_ld(self.ds, params.rp, params.u[0], params.u[1], self.fac, self.nthreads) elif self.limb_dark == "exponential": lc = _exponential_ld._exponential_ld(self.ds, params.rp, params.u[0], params.u[1], self.fac, self.nthreads) elif self.limb_dark == "power2": lc = _power2_ld._power2_ld(self.ds, params.rp, params.u[0], params.u[1], self.fac, self.nthreads) elif self.limb_dark == "custom": lc = _custom_ld._custom_ld(self.ds, params.rp, params.u[0], params.u[1], params.u[2], params.u[3], params.u[4], params.u[5], self.fac, self.nthreads) else: raise Exception("Invalid limb darkening option") if self.inverse == True: lc = 2. - lc else: lc = _eclipse._eclipse(self.ds, params.rp, params.fp, self.nthreads) if self.supersample_factor == 1: return lc else: return np.mean(lc.reshape(-1, self.supersample_factor), axis=1)
Return the time of periastron passage ( calculated using params. t0 ).
def get_t_periastron(self, params): """ Return the time of periastron passage (calculated using `params.t0`). """ phase = self._get_phase(params, "primary") return params.t0 - params.per*phase
Return the time of secondary eclipse center ( calculated using params. t0 ).
def get_t_secondary(self, params): """ Return the time of secondary eclipse center (calculated using `params.t0`). """ phase = self._get_phase(params, "primary") phase2 = self._get_phase(params, "secondary") return params.t0 + params.per*(phase2-phase)
Return the time of primary transit center ( calculated using params. t_secondary ).
def get_t_conjunction(self, params): """ Return the time of primary transit center (calculated using `params.t_secondary`). """ phase = self._get_phase(params, "primary") phase2 = self._get_phase(params, "secondary") return params.t_secondary + params.per*(phase-phase2)
Return the true anomaly at each time
def get_true_anomaly(self): """ Return the true anomaly at each time """ self.f = _rsky._getf(self.t_supersample, self.t0, self.per, self.a, self.inc*pi/180., self.ecc, self.w*pi/180., self.transittype, self.nthreads) return self.f
Does this compiler support OpenMP parallelization?
def detect(): """Does this compiler support OpenMP parallelization?""" compiler = new_compiler() hasopenmp = hasfunction(compiler, 'omp_get_num_threads()') needs_gomp = hasopenmp if not hasopenmp: compiler.add_library('gomp') hasopenmp = hasfunction(compiler, 'omp_get_num_threads()') needs_gomp = hasopenmp return hasopenmp
Validate the username/ password data against ldap directory
def validate_ldap(self): logging.debug('Validating LDAPLoginForm against LDAP') 'Validate the username/password data against ldap directory' ldap_mgr = current_app.ldap3_login_manager username = self.username.data password = self.password.data result = ldap_mgr.authenticate(username, password) if result.status == AuthenticationResponseStatus.success: self.user = ldap_mgr._save_user( result.user_dn, result.user_id, result.user_info, result.user_groups ) return True else: self.user = None self.username.errors.append('Invalid Username/Password.') self.password.errors.append('Invalid Username/Password.') return False
Validates the form by calling validate on each field passing any extra Form. validate_<fieldname > validators to the field validator.
def validate(self, *args, **kwargs): """ Validates the form by calling `validate` on each field, passing any extra `Form.validate_<fieldname>` validators to the field validator. also calls `validate_ldap` """ valid = FlaskForm.validate(self, *args, **kwargs) if not valid: logging.debug("Form validation failed before we had a chance to " "check ldap. Reasons: '{0}'".format(self.errors)) return valid return self.validate_ldap()
Configures this extension with the given app. This registers an teardown_appcontext call and attaches this LDAP3LoginManager to it as app. ldap3_login_manager.
def init_app(self, app): ''' Configures this extension with the given app. This registers an ``teardown_appcontext`` call, and attaches this ``LDAP3LoginManager`` to it as ``app.ldap3_login_manager``. Args: app (flask.Flask): The flask app to initialise with ''' app.ldap3_login_manager = self servers = list(self._server_pool) for s in servers: self._server_pool.remove(s) self.init_config(app.config) if hasattr(app, 'teardown_appcontext'): app.teardown_appcontext(self.teardown) else: # pragma: no cover app.teardown_request(self.teardown) self.app = app
Configures this extension with a given configuration dictionary. This allows use of this extension without a flask app.
def init_config(self, config): ''' Configures this extension with a given configuration dictionary. This allows use of this extension without a flask app. Args: config (dict): A dictionary with configuration keys ''' self.config.update(config) self.config.setdefault('LDAP_PORT', 389) self.config.setdefault('LDAP_HOST', None) self.config.setdefault('LDAP_USE_SSL', False) self.config.setdefault('LDAP_READONLY', True) self.config.setdefault('LDAP_CHECK_NAMES', True) self.config.setdefault('LDAP_BIND_DIRECT_CREDENTIALS', False) self.config.setdefault('LDAP_BIND_DIRECT_PREFIX', '') self.config.setdefault('LDAP_BIND_DIRECT_SUFFIX', '') self.config.setdefault('LDAP_BIND_DIRECT_GET_USER_INFO', True) self.config.setdefault('LDAP_ALWAYS_SEARCH_BIND', False) self.config.setdefault('LDAP_BASE_DN', '') self.config.setdefault('LDAP_BIND_USER_DN', None) self.config.setdefault('LDAP_BIND_USER_PASSWORD', None) self.config.setdefault('LDAP_SEARCH_FOR_GROUPS', True) self.config.setdefault('LDAP_FAIL_AUTH_ON_MULTIPLE_FOUND', False) # Prepended to the Base DN to limit scope when searching for # Users/Groups. self.config.setdefault('LDAP_USER_DN', '') self.config.setdefault('LDAP_GROUP_DN', '') self.config.setdefault('LDAP_BIND_AUTHENTICATION_TYPE', 'SIMPLE') # Ldap Filters self.config.setdefault('LDAP_USER_SEARCH_SCOPE', 'LEVEL') self.config.setdefault('LDAP_USER_OBJECT_FILTER', '(objectclass=person)') self.config.setdefault('LDAP_USER_LOGIN_ATTR', 'uid') self.config.setdefault('LDAP_USER_RDN_ATTR', 'uid') self.config.setdefault( 'LDAP_GET_USER_ATTRIBUTES', ldap3.ALL_ATTRIBUTES) self.config.setdefault('LDAP_GROUP_SEARCH_SCOPE', 'LEVEL') self.config.setdefault( 'LDAP_GROUP_OBJECT_FILTER', '(objectclass=group)') self.config.setdefault('LDAP_GROUP_MEMBERS_ATTR', 'uniqueMember') self.config.setdefault( 'LDAP_GET_GROUP_ATTRIBUTES', ldap3.ALL_ATTRIBUTES) self.config.setdefault('LDAP_ADD_SERVER', True) if self.config['LDAP_ADD_SERVER']: self.add_server( hostname=self.config['LDAP_HOST'], port=self.config['LDAP_PORT'], use_ssl=self.config['LDAP_USE_SSL'] )
Add an additional server to the server pool and return the freshly created server.
def add_server(self, hostname, port, use_ssl, tls_ctx=None): """ Add an additional server to the server pool and return the freshly created server. Args: hostname (str): Hostname of the server port (int): Port of the server use_ssl (bool): True if SSL is to be used when connecting. tls_ctx (ldap3.Tls): An optional TLS context object to use when connecting. Returns: ldap3.Server: The freshly created server object. """ if not use_ssl and tls_ctx: raise ValueError("Cannot specify a TLS context and not use SSL!") server = ldap3.Server( hostname, port=port, use_ssl=use_ssl, tls=tls_ctx ) self._server_pool.add(server) return server
Add a connection to the appcontext so it can be freed/ unbound at a later time if an exception occured and it was not freed.
def _contextualise_connection(self, connection): """ Add a connection to the appcontext so it can be freed/unbound at a later time if an exception occured and it was not freed. Args: connection (ldap3.Connection): Connection to add to the appcontext """ ctx = stack.top if ctx is not None: if not hasattr(ctx, 'ldap3_manager_connections'): ctx.ldap3_manager_connections = [connection] else: ctx.ldap3_manager_connections.append(connection)
Remove a connection from the appcontext.
def _decontextualise_connection(self, connection): """ Remove a connection from the appcontext. Args: connection (ldap3.Connection): connection to remove from the appcontext """ ctx = stack.top if ctx is not None and connection in ctx.ldap3_manager_connections: ctx.ldap3_manager_connections.remove(connection)
Cleanup after a request. Close any open connections.
def teardown(self, exception): """ Cleanup after a request. Close any open connections. """ ctx = stack.top if ctx is not None: if hasattr(ctx, 'ldap3_manager_connections'): for connection in ctx.ldap3_manager_connections: self.destroy_connection(connection) if hasattr(ctx, 'ldap3_manager_main_connection'): log.debug( "Unbinding a connection used within the request context.") ctx.ldap3_manager_main_connection.unbind() ctx.ldap3_manager_main_connection = None
An abstracted authentication method. Decides whether to perform a direct bind or a search bind based upon the login attribute configured in the config.
def authenticate(self, username, password): """ An abstracted authentication method. Decides whether to perform a direct bind or a search bind based upon the login attribute configured in the config. Args: username (str): Username of the user to bind password (str): User's password to bind with. Returns: AuthenticationResponse """ if self.config.get('LDAP_BIND_DIRECT_CREDENTIALS'): result = self.authenticate_direct_credentials(username, password) elif not self.config.get('LDAP_ALWAYS_SEARCH_BIND') and \ self.config.get('LDAP_USER_RDN_ATTR') == \ self.config.get('LDAP_USER_LOGIN_ATTR'): # Since the user's RDN is the same as the login field, # we can do a direct bind. result = self.authenticate_direct_bind(username, password) else: # We need to search the User's DN to find who the user is (and # their DN) so we can try bind with their password. result = self.authenticate_search_bind(username, password) return result
Performs a direct bind however using direct credentials. Can be used if interfacing with an Active Directory domain controller which authenticates using username@domain. com directly.
def authenticate_direct_credentials(self, username, password): """ Performs a direct bind, however using direct credentials. Can be used if interfacing with an Active Directory domain controller which authenticates using username@domain.com directly. Performing this kind of lookup limits the information we can get from ldap. Instead we can only deduce whether or not their bind was successful. Do not use this method if you require more user info. Args: username (str): Username for the user to bind with. LDAP_BIND_DIRECT_PREFIX will be prepended and LDAP_BIND_DIRECT_SUFFIX will be appended. password (str): User's password to bind with. Returns: AuthenticationResponse """ bind_user = '{}{}{}'.format( self.config.get('LDAP_BIND_DIRECT_PREFIX'), username, self.config.get('LDAP_BIND_DIRECT_SUFFIX') ) connection = self._make_connection( bind_user=bind_user, bind_password=password, ) response = AuthenticationResponse() try: connection.bind() response.status = AuthenticationResponseStatus.success response.user_id = username log.debug( "Authentication was successful for user '{0}'".format(username)) if self.config.get('LDAP_BIND_DIRECT_GET_USER_INFO'): # User wants extra info about the bind user_filter = '({search_attr}={username})'.format( search_attr=self.config.get('LDAP_USER_LOGIN_ATTR'), username=username ) search_filter = '(&{0}{1})'.format( self.config.get('LDAP_USER_OBJECT_FILTER'), user_filter, ) connection.search( search_base=self.full_user_search_dn, search_filter=search_filter, search_scope=getattr( ldap3, self.config.get('LDAP_USER_SEARCH_SCOPE')), attributes=self.config.get('LDAP_GET_USER_ATTRIBUTES'), ) if len(connection.response) == 0 or \ (self.config.get('LDAP_FAIL_AUTH_ON_MULTIPLE_FOUND') and len(connection.response) > 1): # Don't allow them to log in. log.error( "Could not gather extra info for user '{0}'".format(username)) else: user = connection.response[0] user['attributes']['dn'] = user['dn'] response.user_info = user['attributes'] response.user_dn = user['dn'] except ldap3.core.exceptions.LDAPInvalidCredentialsResult: log.debug( "Authentication was not successful for user '{0}'".format(username)) response.status = AuthenticationResponseStatus.fail except Exception as e: log.error(e) response.status = AuthenticationResponseStatus.fail self.destroy_connection(connection) return response
Performs a direct bind. We can do this since the RDN is the same as the login attribute. Hence we just string together a dn to find this user with.
def authenticate_direct_bind(self, username, password): """ Performs a direct bind. We can do this since the RDN is the same as the login attribute. Hence we just string together a dn to find this user with. Args: username (str): Username of the user to bind (the field specified as LDAP_BIND_RDN_ATTR) password (str): User's password to bind with. Returns: AuthenticationResponse """ bind_user = '{rdn}={username},{user_search_dn}'.format( rdn=self.config.get('LDAP_USER_RDN_ATTR'), username=username, user_search_dn=self.full_user_search_dn, ) connection = self._make_connection( bind_user=bind_user, bind_password=password, ) response = AuthenticationResponse() try: connection.bind() log.debug( "Authentication was successful for user '{0}'".format(username)) response.status = AuthenticationResponseStatus.success # Get user info here. user_info = self.get_user_info( dn=bind_user, _connection=connection) response.user_dn = bind_user response.user_id = username response.user_info = user_info if self.config.get('LDAP_SEARCH_FOR_GROUPS'): response.user_groups = self.get_user_groups( dn=bind_user, _connection=connection) except ldap3.core.exceptions.LDAPInvalidCredentialsResult: log.debug( "Authentication was not successful for user '{0}'".format(username)) response.status = AuthenticationResponseStatus.fail except Exception as e: log.error(e) response.status = AuthenticationResponseStatus.fail self.destroy_connection(connection) return response
Performs a search bind to authenticate a user. This is required when a the login attribute is not the same as the RDN since we cannot string together their DN on the fly instead we have to find it in the LDAP then attempt to bind with their credentials.
def authenticate_search_bind(self, username, password): """ Performs a search bind to authenticate a user. This is required when a the login attribute is not the same as the RDN, since we cannot string together their DN on the fly, instead we have to find it in the LDAP, then attempt to bind with their credentials. Args: username (str): Username of the user to bind (the field specified as LDAP_BIND_LOGIN_ATTR) password (str): User's password to bind with when we find their dn. Returns: AuthenticationResponse """ connection = self._make_connection( bind_user=self.config.get('LDAP_BIND_USER_DN'), bind_password=self.config.get('LDAP_BIND_USER_PASSWORD'), ) try: connection.bind() log.debug("Successfully bound to LDAP as '{0}' for search_bind method".format( self.config.get('LDAP_BIND_USER_DN') or 'Anonymous' )) except Exception as e: self.destroy_connection(connection) log.error(e) return AuthenticationResponse() # Find the user in the search path. user_filter = '({search_attr}={username})'.format( search_attr=self.config.get('LDAP_USER_LOGIN_ATTR'), username=username ) search_filter = '(&{0}{1})'.format( self.config.get('LDAP_USER_OBJECT_FILTER'), user_filter, ) log.debug( "Performing an LDAP Search using filter '{0}', base '{1}', " "and scope '{2}'".format( search_filter, self.full_user_search_dn, self.config.get('LDAP_USER_SEARCH_SCOPE') )) connection.search( search_base=self.full_user_search_dn, search_filter=search_filter, search_scope=getattr( ldap3, self.config.get('LDAP_USER_SEARCH_SCOPE')), attributes=self.config.get('LDAP_GET_USER_ATTRIBUTES') ) response = AuthenticationResponse() if len(connection.response) == 0 or \ (self.config.get('LDAP_FAIL_AUTH_ON_MULTIPLE_FOUND') and len(connection.response) > 1): # Don't allow them to log in. log.debug( "Authentication was not successful for user '{0}'".format(username)) else: for user in connection.response: # Attempt to bind with each user we find until we can find # one that works. if 'type' not in user or user.get('type') != 'searchResEntry': # Issue #13 - Don't return non-entry results. continue user_connection = self._make_connection( bind_user=user['dn'], bind_password=password ) log.debug( "Directly binding a connection to a server with " "user:'{0}'".format(user['dn'])) try: user_connection.bind() log.debug( "Authentication was successful for user '{0}'".format(username)) response.status = AuthenticationResponseStatus.success # Populate User Data user['attributes']['dn'] = user['dn'] response.user_info = user['attributes'] response.user_id = username response.user_dn = user['dn'] if self.config.get('LDAP_SEARCH_FOR_GROUPS'): response.user_groups = self.get_user_groups( dn=user['dn'], _connection=connection) self.destroy_connection(user_connection) break except ldap3.core.exceptions.LDAPInvalidCredentialsResult: log.debug( "Authentication was not successful for " "user '{0}'".format(username)) response.status = AuthenticationResponseStatus.fail except Exception as e: # pragma: no cover # This should never happen, however in case ldap3 does ever # throw an error here, we catch it and log it log.error(e) response.status = AuthenticationResponseStatus.fail self.destroy_connection(user_connection) self.destroy_connection(connection) return response
Gets a list of groups a user at dn is a member of
def get_user_groups(self, dn, group_search_dn=None, _connection=None): """ Gets a list of groups a user at dn is a member of Args: dn (str): The dn of the user to find memberships for. _connection (ldap3.Connection): A connection object to use when searching. If not given, a temporary connection will be created, and destroyed after use. group_search_dn (str): The search dn for groups. Defaults to ``'{LDAP_GROUP_DN},{LDAP_BASE_DN}'``. Returns: list: A list of LDAP groups the user is a member of. """ connection = _connection if not connection: connection = self._make_connection( bind_user=self.config.get('LDAP_BIND_USER_DN'), bind_password=self.config.get('LDAP_BIND_USER_PASSWORD') ) connection.bind() safe_dn = ldap3.utils.conv.escape_filter_chars(dn) search_filter = '(&{group_filter}({members_attr}={user_dn}))'.format( group_filter=self.config.get('LDAP_GROUP_OBJECT_FILTER'), members_attr=self.config.get('LDAP_GROUP_MEMBERS_ATTR'), user_dn=safe_dn ) log.debug( "Searching for groups for specific user with filter '{0}' " ", base '{1}' and scope '{2}'".format( search_filter, group_search_dn or self.full_group_search_dn, self.config.get('LDAP_GROUP_SEARCH_SCOPE') )) connection.search( search_base=group_search_dn or self.full_group_search_dn, search_filter=search_filter, attributes=self.config.get('LDAP_GET_GROUP_ATTRIBUTES'), search_scope=getattr( ldap3, self.config.get('LDAP_GROUP_SEARCH_SCOPE')) ) results = [] for item in connection.response: if 'type' not in item or item.get('type') != 'searchResEntry': # Issue #13 - Don't return non-entry results. continue group_data = item['attributes'] group_data['dn'] = item['dn'] results.append(group_data) if not _connection: # We made a connection, so we need to kill it. self.destroy_connection(connection) return results
Gets info about a user specified at dn.
def get_user_info(self, dn, _connection=None): """ Gets info about a user specified at dn. Args: dn (str): The dn of the user to find _connection (ldap3.Connection): A connection object to use when searching. If not given, a temporary connection will be created, and destroyed after use. Returns: dict: A dictionary of the user info from LDAP """ return self.get_object( dn=dn, filter=self.config.get('LDAP_USER_OBJECT_FILTER'), attributes=self.config.get("LDAP_GET_USER_ATTRIBUTES"), _connection=_connection, )
Gets info about a user at a specified username by searching the Users DN. Username attribute is the same as specified as LDAP_USER_LOGIN_ATTR.
def get_user_info_for_username(self, username, _connection=None): """ Gets info about a user at a specified username by searching the Users DN. Username attribute is the same as specified as LDAP_USER_LOGIN_ATTR. Args: username (str): Username of the user to search for. _connection (ldap3.Connection): A connection object to use when searching. If not given, a temporary connection will be created, and destroyed after use. Returns: dict: A dictionary of the user info from LDAP """ ldap_filter = '(&({0}={1}){2})'.format( self.config.get('LDAP_USER_LOGIN_ATTR'), username, self.config.get('LDAP_USER_OBJECT_FILTER') ) return self.get_object( dn=self.full_user_search_dn, filter=ldap_filter, attributes=self.config.get("LDAP_GET_USER_ATTRIBUTES"), _connection=_connection, )
Gets an object at the specified dn and returns it.
def get_object(self, dn, filter, attributes, _connection=None): """ Gets an object at the specified dn and returns it. Args: dn (str): The dn of the object to find. filter (str): The LDAP syntax search filter. attributes (list): A list of LDAP attributes to get when searching. _connection (ldap3.Connection): A connection object to use when searching. If not given, a temporary connection will be created, and destroyed after use. Returns: dict: A dictionary of the object info from LDAP """ connection = _connection if not connection: connection = self._make_connection( bind_user=self.config.get('LDAP_BIND_USER_DN'), bind_password=self.config.get('LDAP_BIND_USER_PASSWORD') ) connection.bind() connection.search( search_base=dn, search_filter=filter, attributes=attributes, ) data = None if len(connection.response) > 0: data = connection.response[0]['attributes'] data['dn'] = connection.response[0]['dn'] if not _connection: # We made a connection, so we need to kill it. self.destroy_connection(connection) return data
Convenience property for externally accessing an authenticated connection to the server. This connection is automatically handled by the appcontext so you do not have to perform an unbind.
def connection(self): """ Convenience property for externally accessing an authenticated connection to the server. This connection is automatically handled by the appcontext, so you do not have to perform an unbind. Returns: ldap3.Connection: A bound ldap3.Connection Raises: ldap3.core.exceptions.LDAPException: Since this method is performing a bind on behalf of the caller. You should handle this case occuring, such as invalid service credentials. """ ctx = stack.top if ctx is None: raise Exception("Working outside of the Flask application " "context. If you wish to make a connection outside of a flask" " application context, please handle your connections " "and use manager.make_connection()") if hasattr(ctx, 'ldap3_manager_main_connection'): return ctx.ldap3_manager_main_connection else: connection = self._make_connection( bind_user=self.config.get('LDAP_BIND_USER_DN'), bind_password=self.config.get('LDAP_BIND_USER_PASSWORD'), contextualise=False ) connection.bind() if ctx is not None: ctx.ldap3_manager_main_connection = connection return connection
Make a connection to the LDAP Directory.
def make_connection(self, bind_user=None, bind_password=None, **kwargs): """ Make a connection to the LDAP Directory. Args: bind_user (str): User to bind with. If `None`, AUTH_ANONYMOUS is used, otherwise authentication specified with config['LDAP_BIND_AUTHENTICATION_TYPE'] is used. bind_password (str): Password to bind to the directory with **kwargs (dict): Additional arguments to pass to the ``ldap3.Connection`` Returns: ldap3.Connection: An unbound ldap3.Connection. You should handle exceptions upon bind if you use this internal method. """ return self._make_connection(bind_user, bind_password, contextualise=False, **kwargs)
Make a connection.
def _make_connection(self, bind_user=None, bind_password=None, contextualise=True, **kwargs): """ Make a connection. Args: bind_user (str): User to bind with. If `None`, AUTH_ANONYMOUS is used, otherwise authentication specified with config['LDAP_BIND_AUTHENTICATION_TYPE'] is used. bind_password (str): Password to bind to the directory with contextualise (bool): If true (default), will add this connection to the appcontext so it can be unbound upon app_teardown. Returns: ldap3.Connection: An unbound ldap3.Connection. You should handle exceptions upon bind if you use this internal method. """ authentication = ldap3.ANONYMOUS if bind_user: authentication = getattr(ldap3, self.config.get( 'LDAP_BIND_AUTHENTICATION_TYPE')) log.debug("Opening connection with bind user '{0}'".format( bind_user or 'Anonymous')) connection = ldap3.Connection( server=self._server_pool, read_only=self.config.get('LDAP_READONLY'), user=bind_user, password=bind_password, client_strategy=ldap3.SYNC, authentication=authentication, check_names=self.config['LDAP_CHECK_NAMES'], raise_exceptions=True, **kwargs ) if contextualise: self._contextualise_connection(connection) return connection
Destroys a connection. Removes the connection from the appcontext and unbinds it.
def destroy_connection(self, connection): """ Destroys a connection. Removes the connection from the appcontext, and unbinds it. Args: connection (ldap3.Connection): The connnection to destroy """ log.debug("Destroying connection at <{0}>".format(hex(id(connection)))) self._decontextualise_connection(connection) connection.unbind()
Returns: str: A DN with the DN Base appended to the end.
def compiled_sub_dn(self, prepend): """ Returns: str: A DN with the DN Base appended to the end. Args: prepend (str): The dn to prepend to the base. """ prepend = prepend.strip() if prepend == '': return self.config.get('LDAP_BASE_DN') return '{prepend},{base}'.format( prepend=prepend, base=self.config.get('LDAP_BASE_DN') )
query a s3 endpoint for an image based on a string
def search(self, query=None, args=None): '''query a s3 endpoint for an image based on a string EXAMPLE QUERIES: [empty] list all container collections vsoch/dinosaur look for containers with name vsoch/dinosaur ''' if query is not None: return self._container_search(query) # Search collections across all fields return self._search_all()
a show all search that doesn t require a query Parameters ========== quiet: if quiet is True we only are using the function to return rows of results.
def search_all(self, quiet=False): '''a "show all" search that doesn't require a query Parameters ========== quiet: if quiet is True, we only are using the function to return rows of results. ''' results = [] for obj in self.bucket.objects.all(): subsrc = obj.Object() # Metadata bug will capitalize all fields, workaround is to lowercase # https://github.com/boto/boto3/issues/1709 metadata = dict((k.lower(), v) for k, v in subsrc.metadata.items()) size = '' # MM-DD-YYYY datestr = "%s-%s-%s" %(obj.last_modified.month, obj.last_modified.day, obj.last_modified.year) if 'sizemb' in metadata: size = '%sMB' % metadata['sizemb'] results.append([obj.key, datestr, size ]) if len(results) == 0: bot.info("No container collections found.") sys.exit(1) if not quiet: bot.info("Containers") bot.table(results) return results
search for a specific container. If across collections is False the query is parsed as a full container name and a specific container is returned. If across_collections is True the container is searched for across collections. If across collections is True details are not shown
def container_search(self, query, across_collections=False): '''search for a specific container. If across collections is False, the query is parsed as a full container name and a specific container is returned. If across_collections is True, the container is searched for across collections. If across collections is True, details are not shown''' results = self._search_all(quiet=True) matches = [] for result in results: # This is the container name if query in result[0]: matches.append(result) if len(matches) > 0: bot.info("Containers %s" %query) bot.table(matches) else: bot.info('No matches for %s found.' % name) return matches
query a Singularity registry for a list of images. If query is None collections are listed.
def search(self, query=None, args=None): '''query a Singularity registry for a list of images. If query is None, collections are listed. EXAMPLE QUERIES: [empty] list all collections in registry vsoch do a general search for the expression "vsoch" vsoch/ list all containers in collection vsoch /dinosaur list containers across collections called "dinosaur" vsoch/dinosaur list details of container vsoch/dinosaur tag "latest" is used by default, and then the most recent vsoch/dinosaur:tag list details for specific container ''' if query is not None: # List all containers in collection query/ if query.endswith('/'): # collection search return self._collection_search(query) # List containers across collections called /query elif query.startswith('/'): return self._container_search(query, across_collections=True) # List details of a specific collection container elif "/" in query or ":" in query: return self._container_search(query) # Search collections across all fields return self._collection_search(query=query) # Search collections across all fields return self._search_all()
collection search will list all containers for a specific collection. We assume query is the name of a collection
def collection_search(self, query): '''collection search will list all containers for a specific collection. We assume query is the name of a collection''' query = query.lower().strip('/') url = '%s/collection/%s' %(self.base, query) result = self._get(url) if len(result) == 0: bot.info("No collections found.") sys.exit(1) bot.custom(prefix="COLLECTION", message=query) rows = [] for container in result['containers']: rows.append([ container['uri'], container['detail'] ]) bot.table(rows) return rows
search across labels
def label_search(self, key=None, value=None): '''search across labels''' if key is not None: key = key.lower() if value is not None: value = value.lower() show_details = True if key is None and value is None: url = '%s/labels/search' % (self.base) show_details = False elif key is not None and value is not None: url = '%s/labels/search/%s/key/%s/value' % (self.base, key, value) elif key is None: url = '%s/labels/search/%s/value' % (self.base, value) else: url = '%s/labels/search/%s/key' % (self.base, key) result = self._get(url) if len(result) == 0: bot.info("No labels found.") sys.exit(0) bot.info("Labels\n") rows = [] for l in result: if show_details is True: entry = ["%s:%s" %(l['key'],l['value']), "\n%s\n\n" %"\n".join(l['containers'])] else: entry = ["N=%s" %len(l['containers']), "%s:%s" %(l['key'],l['value']) ] rows.append(entry) bot.table(rows) return rows
search for a specific container. If across collections is False the query is parsed as a full container name and a specific container is returned. If across_collections is True the container is searched for across collections. If across collections is True details are not shown
def container_search(self, query, across_collections=False): '''search for a specific container. If across collections is False, the query is parsed as a full container name and a specific container is returned. If across_collections is True, the container is searched for across collections. If across collections is True, details are not shown''' query = query.lower().strip('/') q = parse_image_name(remove_uri(query), defaults=False) if q['tag'] is not None: if across_collections is True: url = '%s/container/search/name/%s/tag/%s' % (self.base, q['image'], q['tag']) else: url = '%s/container/search/collection/%s/name/%s/tag/%s' % (self.base, q['collection'], q['image'], q['tag']) elif q['tag'] is None: if across_collections is True: url = '%s/container/search/name/%s' % (self.base, q['image']) else: url = '%s/container/search/collection/%s/name/%s' % (self.base, q['collection'], q['image']) result = self._get(url) if "containers" in result: result = result['containers'] if len(result) == 0: bot.info("No containers found.") sys.exit(1) bot.info("Containers %s" %query) rows = [] for c in result: rows.append([ '%s/%s' %(c['collection'], c['name']), c['tag'] ]) bot.table(rows) return rows
query a GitLab artifacts folder for a list of images. If query is None collections are listed.
def search(self, query=None, args=None): '''query a GitLab artifacts folder for a list of images. If query is None, collections are listed. ''' if query is None: bot.exit('You must include a collection query, <collection>/<repo>') # or default to listing (searching) all things. return self._search_all(query)
a show all search that doesn t require a query the user is shown URLs to
def search_all(self, collection, job_id=None): '''a "show all" search that doesn't require a query the user is shown URLs to ''' results = [['job_id', 'browser']] url = "%s/projects/%s/jobs" %(self.api_base, quote_plus(collection.strip('/'))) response = requests.get(url, headers=self.headers) if response.status_code == 200: jobs = response.json() # We can't get a listing of artifacts # https://gitlab.com/gitlab-org/gitlab-ce/issues/51515 # Parse through jobs (each can have different tags for a collection): for job in jobs: # Only show jobs that are successful if job['status'] == 'success': name = job['name'] for artifact in job['artifacts']: if artifact['filename'].endswith('zip'): # The user must browse to see the names artifact_url = ("%s/%s/-/jobs/%s/artifacts/browse/%s" %(self.base , collection, job['id'], name)) results.append([str(job['id']), artifact_url]) if len(results) == 1: bot.info("No potential archives found in artifacts.") sys.exit(0) bot.info("Artifact Browsers (you will need path and job id for pull)") bot.table(results) return results
ensure that the client name is included in a list of tags. This is important for matching builders to the correct client. We exit on fail. Parameters ========== tags: a list of tags to look for client name in
def _client_tagged(self, tags): '''ensure that the client name is included in a list of tags. This is important for matching builders to the correct client. We exit on fail. Parameters ========== tags: a list of tags to look for client name in ''' # We must match the client to a tag name = self.client_name.lower() tags = [t.lower() for t in tags] if name not in tags: bot.error('%s not found in %s, must match!' %(name, tags)) sys.exit(1)
a function for the client to announce him or herself depending on the level specified. If you want your client to have additional announced things here then implement the class _speak for your client.
def speak(self): ''' a function for the client to announce him or herself, depending on the level specified. If you want your client to have additional announced things here, then implement the class `_speak` for your client. ''' if self.quiet is False: bot.info('[client|%s] [database|%s]' %(self.client_name, self.database)) self._speak()
the client will announce itself given that a command is not in a particular predefined list.
def announce(self, command=None): '''the client will announce itself given that a command is not in a particular predefined list. ''' if command is not None: if command not in ['get'] and self.quiet is False: self.speak()
The user is required to have an application secrets file in his or her environment. The client exists with error if the variable isn t found.
def _update_secrets(self): '''The user is required to have an application secrets file in his or her environment. The client exists with error if the variable isn't found. ''' env = 'SREGISTRY_GOOGLE_DRIVE_CREDENTIALS' self._secrets = self._get_and_update_setting(env) self._base = self._get_and_update_setting('SREGISTRY_GOOGLE_DRIVE_ROOT') if self._base is None: self._base = 'sregistry' if self._secrets is None: bot.error('You must export %s to use Google Drive client' %env) bot.info("https://singularityhub.github.io/sregistry-cli/client-google-drive") sys.exit(1)
get service client for the google drive API: param version: version to use ( default is v3 )
def _get_service(self, version='v3'): '''get service client for the google drive API :param version: version to use (default is v3) ''' invalid = True # The user hasn't disabled cache of credentials if self._credential_cache is not None: storage = Storage(self._credential_cache) # The store has never been used before if os.path.exists(self._credential_cache): credentials = storage.get() if not credentials.invalid: invalid = False # If credentials are allowed but invalid, refresh if invalid is True: class flags: auth_host_name='localhost' auth_host_port=[8080] noauth_local_webserver=False logging_level='INFO' flow = oclient.flow_from_clientsecrets(self._secrets, self._scope) credentials = tools.run_flow(flow, storage, flags) # If the user is ok to cache them if self._credential_cache is not None: storage.put(credentials) # Either way, authenticate the user with credentials http = credentials.authorize(httplib2.Http()) return build('drive', version, http=http)
dummy add simple returns an object that mimics a database entry so the calling function ( in push or pull ) can interact with it equally. Most variables ( other than image_path ) are not used.
def add(self, image_path=None, image_uri=None, image_name=None, url=None, metadata=None, save=True, copy=False): '''dummy add simple returns an object that mimics a database entry, so the calling function (in push or pull) can interact with it equally. Most variables (other than image_path) are not used.''' # We can only save if the image is provided if image_path is not None: if not os.path.exists(image_path): bot.error('Cannot find %s' %image_path) sys.exit(1) if image_uri is None: bot.error('You must provide an image uri <collection>/<namespace>') sys.exit(1) names = parse_image_name( remove_uri(image_uri) ) bot.debug('Added %s to filesystem' % names['uri']) # Create a dummy container on the fly class DummyContainer: def __init__(self, image_path, client_name, url, names): self.image=image_path self.client=client_name self.url=url self.name=names['image'] self.tag=names['tag'] self.uri=names['uri'] container = DummyContainer(image_path, self.client_name, url, names) bot.info("[container][%s] %s" % (action,names['uri'])) return container
query a Singularity registry for a list of images. If query is None collections are listed.
def search(self, query=None, **kwargs): '''query a Singularity registry for a list of images. If query is None, collections are listed. EXAMPLE QUERIES: [empty] list all collections in singularity hub vsoch do a general search for collection "vsoch" vsoch/dinosaur list details of container vsoch/dinosaur tag "latest" is used by default, and then the most recent vsoch/dinosaur:tag list details for specific container ''' if query is not None: return self._search_collection(query) # Search collections across all fields return self.list()
a show all search that doesn t require a query
def list_all(self, **kwargs): '''a "show all" search that doesn't require a query''' quiet=False if "quiet" in kwargs: quiet = kwargs['quiet'] bot.spinner.start() url = '%s/collections/' %self.base results = self._paginate_get(url) bot.spinner.stop() if len(results) == 0: bot.info("No container collections found.") sys.exit(1) rows = [] for result in results: if "containers" in result: if result['id'] not in [37,38,39]: for c in result['containers']: rows.append([c['detail'],"%s:%s" %(c['name'],c['tag'])]) if quiet is False: bot.info("Collections") bot.table(rows) return rows
collection search will list all containers for a specific collection. We assume query is the name of a collection
def search_collection(self, query): '''collection search will list all containers for a specific collection. We assume query is the name of a collection''' query = query.lower().strip('/') q = parse_image_name(remove_uri(query), defaults=False) # Workaround for now - the Singularity Hub search endpoind needs fixing containers = self.list(quiet=True) rows = [] for result in containers: if re.search(query, result[1]): rows.append(result) if len(rows) > 0: bot.table(rows) else: bot.info('No containers found.') return rows
pull an image from gitlab. The image is found based on the uri that should correspond to a gitlab repository and then the branch job name artifact folder and tag of the container. The minimum that we need are the job id collection and job name. Eg:
def pull(self, images, file_name=None, save=True, **kwargs): '''pull an image from gitlab. The image is found based on the uri that should correspond to a gitlab repository, and then the branch, job name, artifact folder, and tag of the container. The minimum that we need are the job id, collection, and job name. Eg: job_id|collection|job_name (or) job_id|collection Parameters ========== images: refers to the uri given by the user to pull in the format specified above file_name: the user's requested name for the file. It can optionally be None if the user wants a default. save: if True, you should save the container to the database using self.add() Returns ======= finished: a single container path, or list of paths ''' force = False if "force" in kwargs: force = kwargs['force'] if not isinstance(images, list): images = [images] bot.debug('Execution of PULL for %s images' %len(images)) # If used internally we want to return a list to the user. finished = [] for image in images: # Format job_id|collection|job_name # 122056733,singularityhub/gitlab-ci' # 122056733,singularityhub/gitlab-ci,build job_id, collection, job_name = self._parse_image_name(image) names = parse_image_name(remove_uri(collection)) # If the user didn't provide a file, make one based on the names if file_name is None: file_name = self._get_storage_name(names) # If the file already exists and force is False if os.path.exists(file_name) and force is False: bot.error('Image exists! Remove first, or use --force to overwrite') sys.exit(1) # Put together the GitLab URI image_name = "Singularity.%s.simg" %(names['tag']) if names['tag'] == 'latest': image_name = "Singularity.simg" # Assemble artifact path artifact_path = "%s/%s" %(self.artifacts, image_name) bot.info('Looking for artifact %s for job name %s, %s' %(artifact_path, job_name, job_id)) project = quote_plus(collection.strip('/')) # This is supposed to work, but it doesn't # url = "%s/projects/%s/jobs/%s/artifacts/file/%s" %(self.api_base, # project, job_id, # artifact_path) # This does work :) url = "%s/%s/-/jobs/%s/artifacts/raw/%s/?inline=false" % (self.base, collection, job_id, artifact_path) bot.info(url) # stream the url content to the file name image_file = self.download(url=url, file_name=file_name, show_progress=True) metadata = self._get_metadata() metadata['collection'] = collection metadata['job_id'] = job_id metadata['job_name'] = job_name metadata['artifact_path'] = artifact_path metadata['sregistry_pull'] = image # If we save to storage, the uri is the dropbox_path if save is True: container = self.add(image_path = image_file, image_uri = image, metadata = metadata, url = url) # When the container is created, this is the path to the image image_file = container.image if os.path.exists(image_file): bot.debug('Retrieved image file %s' %image_file) bot.custom(prefix="Success!", message=image_file) finished.append(image_file) if len(finished) == 1: finished = finished[0] return finished
run will send a list of tasks a tuple with arguments through a function. the arguments should be ordered correctly.: param func: the function to run with multiprocessing. pool: param tasks: a list of tasks each a tuple of arguments to process: param func2: filter function to run result from func through ( optional )
def run(self, func, tasks, func2=None): '''run will send a list of tasks, a tuple with arguments, through a function. the arguments should be ordered correctly. :param func: the function to run with multiprocessing.pool :param tasks: a list of tasks, each a tuple of arguments to process :param func2: filter function to run result from func through (optional) ''' # Keep track of some progress for the user progress = 1 total = len(tasks) # if we don't have tasks, don't run if len(tasks) == 0: return # If two functions are run per task, double total jobs if func2 is not None: total = total * 2 finished = [] level1 = [] results = [] try: prefix = "[%s/%s]" % (progress, total) bot.show_progress(0, total, length=35, prefix=prefix) pool = multiprocessing.Pool(self.workers, init_worker) self.start() for task in tasks: result = pool.apply_async(multi_wrapper, multi_package(func, [task])) results.append(result) level1.append(result._job) while len(results) > 0: result = results.pop() result.wait() bot.show_progress(progress, total, length=35, prefix=prefix) progress += 1 prefix = "[%s/%s]" % (progress, total) # Pass the result through a second function? if func2 is not None and result._job in level1: result = pool.apply_async(multi_wrapper, multi_package(func2, [(result.get(),)])) results.append(result) else: finished.append(result.get()) self.end() pool.close() pool.join() except (KeyboardInterrupt, SystemExit): bot.error("Keyboard interrupt detected, terminating workers!") pool.terminate() sys.exit(1) except Exception as e: bot.error(e) return finished
get_cache will return the user s cache for singularity.: param subfolder: a subfolder in the cache base to retrieve specifically
def get_cache(subfolder=None, quiet=False): '''get_cache will return the user's cache for singularity. :param subfolder: a subfolder in the cache base to retrieve, specifically ''' DISABLE_CACHE = convert2boolean(getenv("SINGULARITY_DISABLE_CACHE", default=False)) if DISABLE_CACHE: SINGULARITY_CACHE = tempfile.mkdtemp() else: userhome = pwd.getpwuid(os.getuid())[5] _cache = os.path.join(userhome, ".singularity") SINGULARITY_CACHE = getenv("SINGULARITY_CACHEDIR", default=_cache) # Clean up the path and create cache_base = clean_path(SINGULARITY_CACHE) # Does the user want to get a subfolder in cache base? if subfolder is not None: cache_base = "%s/%s" % (cache_base, subfolder) # Create the cache folder(s), if don't exist mkdir_p(cache_base) if not quiet: bot.debug("Cache folder set to %s" % cache_base) return cache_base
push an image to Google Cloud Storage meaning uploading it path: should correspond to an absolte image path ( or derive it ) name: should be the complete uri that the user has requested to push. tag: should correspond with an image tag. This is provided to mirror Docker
def push(self, path, name, tag=None): '''push an image to Google Cloud Storage, meaning uploading it path: should correspond to an absolte image path (or derive it) name: should be the complete uri that the user has requested to push. tag: should correspond with an image tag. This is provided to mirror Docker ''' path = os.path.abspath(path) bot.debug("PUSH %s" % path) if not os.path.exists(path): bot.error('%s does not exist.' %path) sys.exit(1) # This returns a data structure with collection, container, based on uri names = parse_image_name(remove_uri(name),tag=tag) if names['version'] is None: version = get_image_hash(path) names = parse_image_name(remove_uri(name), tag=tag, version=version) # Update metadata with names metadata = self.get_metadata(path, names=names) if "data" in metadata: metadata = metadata['data'] metadata.update(names) manifest = self._upload(source=path, destination=names['storage'], metadata=metadata) print(manifest['mediaLink'])
upload a file from a source to a destination. The client is expected to have a bucket ( self. _bucket ) that is created when instantiated. This would be the method to do the same using the storage client but not easily done for resumable
def upload(self, source, destination, bucket, chunk_size = 2 * 1024 * 1024, metadata=None, keep_private=True): '''upload a file from a source to a destination. The client is expected to have a bucket (self._bucket) that is created when instantiated. This would be the method to do the same using the storage client, but not easily done for resumable blob = self._bucket.blob(destination) blob.upload_from_filename(filename=source, content_type="application/zip", client=self._service) url = blob.public_url if isinstance(url, six.binary_type): url = url.decode('utf-8') return url ''' env = 'SREGISTRY_GOOGLE_STORAGE_PRIVATE' keep_private = self._get_and_update_setting(env) or keep_private media = MediaFileUpload(source, chunksize=chunk_size, resumable=True) request = self._storage_service.objects().insert(bucket=bucket.name, name=destination, media_body=media) response = None total = request.resumable._size / (1024*1024.0) bar = ProgressBar(expected_size=total, filled_char='=', hide=self.quiet) while response is None: error = None try: progress, response = request.next_chunk() if progress: bar.show(progress.resumable_progress / (1024*1024.0)) except: raise # When we finish upload, get as blob blob = bucket.blob(destination) if blob.exists(): if not keep_private: blob.make_public() # If the user has a dictionary of metadata to update if metadata is not None: body = prepare_metadata(metadata) blob.metadata = metadata blob._properties['metadata'] = metadata blob.patch() return response
update headers with a token & other fields
def update_headers(self,fields=None): '''update headers with a token & other fields ''' do_reset = True if hasattr(self, 'headers'): if self.headers is not None: do_reset = False if do_reset is True: self._reset_headers() if fields is not None: for key,value in fields.items(): self.headers[key] = value header_names = ",".join(list(self.headers.keys())) bot.debug("Headers found: %s" %header_names)