partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
valid
set_name_lists
Set three globally available lists of names.
pantheon/names.py
def set_name_lists(ethnicity=None): """Set three globally available lists of names.""" if not ethnicity: ethnicity = random.choice(get_ethnicities()) print("Loading names from: " + ethnicity) filename = names_dir + ethnicity + '.json' try: with open(filename, 'r') as injson: data = json.load(injson) except: return 'Unable to read from file: ' + filename else: names = [ tuple(name.split(',')) for name in data ] random.shuffle(names) global female_names female_names = [name for name,gender,*desc in names if gender == 'girl'] global male_names male_names = [name for name,gender,*desc in names if gender == 'boy'] global nb_names nb_names = [name for name,gender,*desc in names if gender == 'boygirl']
def set_name_lists(ethnicity=None): """Set three globally available lists of names.""" if not ethnicity: ethnicity = random.choice(get_ethnicities()) print("Loading names from: " + ethnicity) filename = names_dir + ethnicity + '.json' try: with open(filename, 'r') as injson: data = json.load(injson) except: return 'Unable to read from file: ' + filename else: names = [ tuple(name.split(',')) for name in data ] random.shuffle(names) global female_names female_names = [name for name,gender,*desc in names if gender == 'girl'] global male_names male_names = [name for name,gender,*desc in names if gender == 'boy'] global nb_names nb_names = [name for name,gender,*desc in names if gender == 'boygirl']
[ "Set", "three", "globally", "available", "lists", "of", "names", "." ]
carawarner/pantheon
python
https://github.com/carawarner/pantheon/blob/7e8718f4397eaa389fb3d5dc04fa01c7cb556512/pantheon/names.py#L14-L36
[ "def", "set_name_lists", "(", "ethnicity", "=", "None", ")", ":", "if", "not", "ethnicity", ":", "ethnicity", "=", "random", ".", "choice", "(", "get_ethnicities", "(", ")", ")", "print", "(", "\"Loading names from: \"", "+", "ethnicity", ")", "filename", "=", "names_dir", "+", "ethnicity", "+", "'.json'", "try", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "injson", ":", "data", "=", "json", ".", "load", "(", "injson", ")", "except", ":", "return", "'Unable to read from file: '", "+", "filename", "else", ":", "names", "=", "[", "tuple", "(", "name", ".", "split", "(", "','", ")", ")", "for", "name", "in", "data", "]", "random", ".", "shuffle", "(", "names", ")", "global", "female_names", "female_names", "=", "[", "name", "for", "name", ",", "gender", ",", "", "*", "desc", "in", "names", "if", "gender", "==", "'girl'", "]", "global", "male_names", "male_names", "=", "[", "name", "for", "name", ",", "gender", ",", "", "*", "desc", "in", "names", "if", "gender", "==", "'boy'", "]", "global", "nb_names", "nb_names", "=", "[", "name", "for", "name", ",", "gender", ",", "", "*", "desc", "in", "names", "if", "gender", "==", "'boygirl'", "]" ]
7e8718f4397eaa389fb3d5dc04fa01c7cb556512
valid
God.set_chromosomes
This model uses the XY sex-determination system. Sex != gender. Assign either XX or XY randomly with a 50/50 chance of each, unless <chromosomes> are passed as an argument.
pantheon/gods.py
def set_chromosomes(self, chromosomes=None): """This model uses the XY sex-determination system. Sex != gender. Assign either XX or XY randomly with a 50/50 chance of each, unless <chromosomes> are passed as an argument. """ if chromosomes and chromosomes in valid_chromosomes: self.chromosomes = chromosomes else: self.chromosomes = random.choice([XX, XY])
def set_chromosomes(self, chromosomes=None): """This model uses the XY sex-determination system. Sex != gender. Assign either XX or XY randomly with a 50/50 chance of each, unless <chromosomes> are passed as an argument. """ if chromosomes and chromosomes in valid_chromosomes: self.chromosomes = chromosomes else: self.chromosomes = random.choice([XX, XY])
[ "This", "model", "uses", "the", "XY", "sex", "-", "determination", "system", ".", "Sex", "!", "=", "gender", ".", "Assign", "either", "XX", "or", "XY", "randomly", "with", "a", "50", "/", "50", "chance", "of", "each", "unless", "<chromosomes", ">", "are", "passed", "as", "an", "argument", "." ]
carawarner/pantheon
python
https://github.com/carawarner/pantheon/blob/7e8718f4397eaa389fb3d5dc04fa01c7cb556512/pantheon/gods.py#L49-L57
[ "def", "set_chromosomes", "(", "self", ",", "chromosomes", "=", "None", ")", ":", "if", "chromosomes", "and", "chromosomes", "in", "valid_chromosomes", ":", "self", ".", "chromosomes", "=", "chromosomes", "else", ":", "self", ".", "chromosomes", "=", "random", ".", "choice", "(", "[", "XX", ",", "XY", "]", ")" ]
7e8718f4397eaa389fb3d5dc04fa01c7cb556512
valid
God.set_gender
This model recognizes that sex chromosomes don't always line up with gender. Assign M, F, or NB according to the probabilities in p_gender.
pantheon/gods.py
def set_gender(self, gender=None): """This model recognizes that sex chromosomes don't always line up with gender. Assign M, F, or NB according to the probabilities in p_gender. """ if gender and gender in genders: self.gender = gender else: if not self.chromosomes: self.set_chromosomes() self.gender = npchoice(genders, 1, p=p_gender[self.chromosomes])[0]
def set_gender(self, gender=None): """This model recognizes that sex chromosomes don't always line up with gender. Assign M, F, or NB according to the probabilities in p_gender. """ if gender and gender in genders: self.gender = gender else: if not self.chromosomes: self.set_chromosomes() self.gender = npchoice(genders, 1, p=p_gender[self.chromosomes])[0]
[ "This", "model", "recognizes", "that", "sex", "chromosomes", "don", "t", "always", "line", "up", "with", "gender", ".", "Assign", "M", "F", "or", "NB", "according", "to", "the", "probabilities", "in", "p_gender", "." ]
carawarner/pantheon
python
https://github.com/carawarner/pantheon/blob/7e8718f4397eaa389fb3d5dc04fa01c7cb556512/pantheon/gods.py#L60-L68
[ "def", "set_gender", "(", "self", ",", "gender", "=", "None", ")", ":", "if", "gender", "and", "gender", "in", "genders", ":", "self", ".", "gender", "=", "gender", "else", ":", "if", "not", "self", ".", "chromosomes", ":", "self", ".", "set_chromosomes", "(", ")", "self", ".", "gender", "=", "npchoice", "(", "genders", ",", "1", ",", "p", "=", "p_gender", "[", "self", ".", "chromosomes", "]", ")", "[", "0", "]" ]
7e8718f4397eaa389fb3d5dc04fa01c7cb556512
valid
God.set_inherited_traits
Accept either strings or Gods as inputs.
pantheon/gods.py
def set_inherited_traits(self, egg_donor, sperm_donor): """Accept either strings or Gods as inputs.""" if type(egg_donor) == str: self.reproduce_asexually(egg_donor, sperm_donor) else: self.reproduce_sexually(egg_donor, sperm_donor)
def set_inherited_traits(self, egg_donor, sperm_donor): """Accept either strings or Gods as inputs.""" if type(egg_donor) == str: self.reproduce_asexually(egg_donor, sperm_donor) else: self.reproduce_sexually(egg_donor, sperm_donor)
[ "Accept", "either", "strings", "or", "Gods", "as", "inputs", "." ]
carawarner/pantheon
python
https://github.com/carawarner/pantheon/blob/7e8718f4397eaa389fb3d5dc04fa01c7cb556512/pantheon/gods.py#L71-L76
[ "def", "set_inherited_traits", "(", "self", ",", "egg_donor", ",", "sperm_donor", ")", ":", "if", "type", "(", "egg_donor", ")", "==", "str", ":", "self", ".", "reproduce_asexually", "(", "egg_donor", ",", "sperm_donor", ")", "else", ":", "self", ".", "reproduce_sexually", "(", "egg_donor", ",", "sperm_donor", ")" ]
7e8718f4397eaa389fb3d5dc04fa01c7cb556512
valid
God.reproduce_asexually
Produce two gametes, an egg and a sperm, from the input strings. Combine them to produce a genome a la sexual reproduction.
pantheon/gods.py
def reproduce_asexually(self, egg_word, sperm_word): """Produce two gametes, an egg and a sperm, from the input strings. Combine them to produce a genome a la sexual reproduction. """ egg = self.generate_gamete(egg_word) sperm = self.generate_gamete(sperm_word) self.genome = list(set(egg + sperm)) # Eliminate duplicates self.generation = 1 self.divinity = god
def reproduce_asexually(self, egg_word, sperm_word): """Produce two gametes, an egg and a sperm, from the input strings. Combine them to produce a genome a la sexual reproduction. """ egg = self.generate_gamete(egg_word) sperm = self.generate_gamete(sperm_word) self.genome = list(set(egg + sperm)) # Eliminate duplicates self.generation = 1 self.divinity = god
[ "Produce", "two", "gametes", "an", "egg", "and", "a", "sperm", "from", "the", "input", "strings", ".", "Combine", "them", "to", "produce", "a", "genome", "a", "la", "sexual", "reproduction", "." ]
carawarner/pantheon
python
https://github.com/carawarner/pantheon/blob/7e8718f4397eaa389fb3d5dc04fa01c7cb556512/pantheon/gods.py#L79-L88
[ "def", "reproduce_asexually", "(", "self", ",", "egg_word", ",", "sperm_word", ")", ":", "egg", "=", "self", ".", "generate_gamete", "(", "egg_word", ")", "sperm", "=", "self", ".", "generate_gamete", "(", "sperm_word", ")", "self", ".", "genome", "=", "list", "(", "set", "(", "egg", "+", "sperm", ")", ")", "# Eliminate duplicates", "self", ".", "generation", "=", "1", "self", ".", "divinity", "=", "god" ]
7e8718f4397eaa389fb3d5dc04fa01c7cb556512
valid
God.reproduce_sexually
Produce two gametes, an egg and a sperm, from input Gods. Combine them to produce a genome a la sexual reproduction. Assign divinity according to probabilities in p_divinity. The more divine the parents, the more divine their offspring.
pantheon/gods.py
def reproduce_sexually(self, egg_donor, sperm_donor): """Produce two gametes, an egg and a sperm, from input Gods. Combine them to produce a genome a la sexual reproduction. Assign divinity according to probabilities in p_divinity. The more divine the parents, the more divine their offspring. """ egg_word = random.choice(egg_donor.genome) egg = self.generate_gamete(egg_word) sperm_word = random.choice(sperm_donor.genome) sperm = self.generate_gamete(sperm_word) self.genome = list(set(egg + sperm)) # Eliminate duplicates self.parents = [egg_donor.name, sperm_donor.name] self.generation = max(egg_donor.generation, sperm_donor.generation) + 1 sum_ = egg_donor.divinity + sperm_donor.divinity self.divinity = int(npchoice(divinities, 1, p=p_divinity[sum_])[0])
def reproduce_sexually(self, egg_donor, sperm_donor): """Produce two gametes, an egg and a sperm, from input Gods. Combine them to produce a genome a la sexual reproduction. Assign divinity according to probabilities in p_divinity. The more divine the parents, the more divine their offspring. """ egg_word = random.choice(egg_donor.genome) egg = self.generate_gamete(egg_word) sperm_word = random.choice(sperm_donor.genome) sperm = self.generate_gamete(sperm_word) self.genome = list(set(egg + sperm)) # Eliminate duplicates self.parents = [egg_donor.name, sperm_donor.name] self.generation = max(egg_donor.generation, sperm_donor.generation) + 1 sum_ = egg_donor.divinity + sperm_donor.divinity self.divinity = int(npchoice(divinities, 1, p=p_divinity[sum_])[0])
[ "Produce", "two", "gametes", "an", "egg", "and", "a", "sperm", "from", "input", "Gods", ".", "Combine", "them", "to", "produce", "a", "genome", "a", "la", "sexual", "reproduction", ".", "Assign", "divinity", "according", "to", "probabilities", "in", "p_divinity", ".", "The", "more", "divine", "the", "parents", "the", "more", "divine", "their", "offspring", "." ]
carawarner/pantheon
python
https://github.com/carawarner/pantheon/blob/7e8718f4397eaa389fb3d5dc04fa01c7cb556512/pantheon/gods.py#L91-L106
[ "def", "reproduce_sexually", "(", "self", ",", "egg_donor", ",", "sperm_donor", ")", ":", "egg_word", "=", "random", ".", "choice", "(", "egg_donor", ".", "genome", ")", "egg", "=", "self", ".", "generate_gamete", "(", "egg_word", ")", "sperm_word", "=", "random", ".", "choice", "(", "sperm_donor", ".", "genome", ")", "sperm", "=", "self", ".", "generate_gamete", "(", "sperm_word", ")", "self", ".", "genome", "=", "list", "(", "set", "(", "egg", "+", "sperm", ")", ")", "# Eliminate duplicates", "self", ".", "parents", "=", "[", "egg_donor", ".", "name", ",", "sperm_donor", ".", "name", "]", "self", ".", "generation", "=", "max", "(", "egg_donor", ".", "generation", ",", "sperm_donor", ".", "generation", ")", "+", "1", "sum_", "=", "egg_donor", ".", "divinity", "+", "sperm_donor", ".", "divinity", "self", ".", "divinity", "=", "int", "(", "npchoice", "(", "divinities", ",", "1", ",", "p", "=", "p_divinity", "[", "sum_", "]", ")", "[", "0", "]", ")" ]
7e8718f4397eaa389fb3d5dc04fa01c7cb556512
valid
God.set_name
Pick a random name from the lists loaded with the model. For Gods that identify as neither M nor F, the model attempts to retrieve an androgynous name. Note: not all of the scraped name lists contain androgynous names.
pantheon/gods.py
def set_name(self): """Pick a random name from the lists loaded with the model. For Gods that identify as neither M nor F, the model attempts to retrieve an androgynous name. Note: not all of the scraped name lists contain androgynous names. """ if not self.gender: self.set_gender() name = '' if self.gender == female: name = names.female_names.pop() elif self.gender == male: name = names.male_names.pop() else: try: name = names.nb_names.pop() except: # No androgynous names available name = names.male_names.pop() self.name = name
def set_name(self): """Pick a random name from the lists loaded with the model. For Gods that identify as neither M nor F, the model attempts to retrieve an androgynous name. Note: not all of the scraped name lists contain androgynous names. """ if not self.gender: self.set_gender() name = '' if self.gender == female: name = names.female_names.pop() elif self.gender == male: name = names.male_names.pop() else: try: name = names.nb_names.pop() except: # No androgynous names available name = names.male_names.pop() self.name = name
[ "Pick", "a", "random", "name", "from", "the", "lists", "loaded", "with", "the", "model", ".", "For", "Gods", "that", "identify", "as", "neither", "M", "nor", "F", "the", "model", "attempts", "to", "retrieve", "an", "androgynous", "name", ".", "Note", ":", "not", "all", "of", "the", "scraped", "name", "lists", "contain", "androgynous", "names", "." ]
carawarner/pantheon
python
https://github.com/carawarner/pantheon/blob/7e8718f4397eaa389fb3d5dc04fa01c7cb556512/pantheon/gods.py#L109-L128
[ "def", "set_name", "(", "self", ")", ":", "if", "not", "self", ".", "gender", ":", "self", ".", "set_gender", "(", ")", "name", "=", "''", "if", "self", ".", "gender", "==", "female", ":", "name", "=", "names", ".", "female_names", ".", "pop", "(", ")", "elif", "self", ".", "gender", "==", "male", ":", "name", "=", "names", ".", "male_names", ".", "pop", "(", ")", "else", ":", "try", ":", "name", "=", "names", ".", "nb_names", ".", "pop", "(", ")", "except", ":", "# No androgynous names available", "name", "=", "names", ".", "male_names", ".", "pop", "(", ")", "self", ".", "name", "=", "name" ]
7e8718f4397eaa389fb3d5dc04fa01c7cb556512
valid
God.set_epithet
Divine an appropriate epithet for this God. (See what I did there?)
pantheon/gods.py
def set_epithet(self): """Divine an appropriate epithet for this God. (See what I did there?)""" if self.divinity == human: obsession = random.choice(self.genome) if self.gender == female: self.epithet = 'ordinary woman' elif self.gender == male: self.epithet = 'ordinary man' else: self.epithet = 'ordinary human being' self.epithet += ' who loves ' + obsession return # Return early. The rest of the function deals with gods. if self.gender == female: title = 'Goddess' elif self.gender == male: title = 'God' else: title = 'Divine Being' if self.divinity == demi_god: title = 'Semi-' + title if self.gender == non_binary else 'Demi-' + title num_domains = npchoice([1,2,3,4], 1, p=[0.05, 0.35, 0.55, 0.05])[0] if num_domains == 1: template = '%s of %s' if num_domains == 2: template = '%s of %s and %s' elif num_domains == 3: template = '%s of %s, %s, and %s' # Oxford comma, the most divine punctuation. elif num_domains == 4: template = '%s of %s, %s, %s, and %s' self.domains = [d.title() for d in random.sample(self.genome, num_domains)] # Put it all together self.epithet = template % (title, *self.domains)
def set_epithet(self): """Divine an appropriate epithet for this God. (See what I did there?)""" if self.divinity == human: obsession = random.choice(self.genome) if self.gender == female: self.epithet = 'ordinary woman' elif self.gender == male: self.epithet = 'ordinary man' else: self.epithet = 'ordinary human being' self.epithet += ' who loves ' + obsession return # Return early. The rest of the function deals with gods. if self.gender == female: title = 'Goddess' elif self.gender == male: title = 'God' else: title = 'Divine Being' if self.divinity == demi_god: title = 'Semi-' + title if self.gender == non_binary else 'Demi-' + title num_domains = npchoice([1,2,3,4], 1, p=[0.05, 0.35, 0.55, 0.05])[0] if num_domains == 1: template = '%s of %s' if num_domains == 2: template = '%s of %s and %s' elif num_domains == 3: template = '%s of %s, %s, and %s' # Oxford comma, the most divine punctuation. elif num_domains == 4: template = '%s of %s, %s, %s, and %s' self.domains = [d.title() for d in random.sample(self.genome, num_domains)] # Put it all together self.epithet = template % (title, *self.domains)
[ "Divine", "an", "appropriate", "epithet", "for", "this", "God", ".", "(", "See", "what", "I", "did", "there?", ")" ]
carawarner/pantheon
python
https://github.com/carawarner/pantheon/blob/7e8718f4397eaa389fb3d5dc04fa01c7cb556512/pantheon/gods.py#L131-L167
[ "def", "set_epithet", "(", "self", ")", ":", "if", "self", ".", "divinity", "==", "human", ":", "obsession", "=", "random", ".", "choice", "(", "self", ".", "genome", ")", "if", "self", ".", "gender", "==", "female", ":", "self", ".", "epithet", "=", "'ordinary woman'", "elif", "self", ".", "gender", "==", "male", ":", "self", ".", "epithet", "=", "'ordinary man'", "else", ":", "self", ".", "epithet", "=", "'ordinary human being'", "self", ".", "epithet", "+=", "' who loves '", "+", "obsession", "return", "# Return early. The rest of the function deals with gods.", "if", "self", ".", "gender", "==", "female", ":", "title", "=", "'Goddess'", "elif", "self", ".", "gender", "==", "male", ":", "title", "=", "'God'", "else", ":", "title", "=", "'Divine Being'", "if", "self", ".", "divinity", "==", "demi_god", ":", "title", "=", "'Semi-'", "+", "title", "if", "self", ".", "gender", "==", "non_binary", "else", "'Demi-'", "+", "title", "num_domains", "=", "npchoice", "(", "[", "1", ",", "2", ",", "3", ",", "4", "]", ",", "1", ",", "p", "=", "[", "0.05", ",", "0.35", ",", "0.55", ",", "0.05", "]", ")", "[", "0", "]", "if", "num_domains", "==", "1", ":", "template", "=", "'%s of %s'", "if", "num_domains", "==", "2", ":", "template", "=", "'%s of %s and %s'", "elif", "num_domains", "==", "3", ":", "template", "=", "'%s of %s, %s, and %s'", "# Oxford comma, the most divine punctuation.", "elif", "num_domains", "==", "4", ":", "template", "=", "'%s of %s, %s, %s, and %s'", "self", ".", "domains", "=", "[", "d", ".", "title", "(", ")", "for", "d", "in", "random", ".", "sample", "(", "self", ".", "genome", ",", "num_domains", ")", "]", "# Put it all together", "self", ".", "epithet", "=", "template", "%", "(", "title", ",", "*", "self", ".", "domains", ")" ]
7e8718f4397eaa389fb3d5dc04fa01c7cb556512
valid
God.generate_gamete
Extract 23 'chromosomes' aka words from 'gene pool' aka list of tokens by searching the list of tokens for words that are related to the given egg_or_sperm_word.
pantheon/gods.py
def generate_gamete(self, egg_or_sperm_word): """Extract 23 'chromosomes' aka words from 'gene pool' aka list of tokens by searching the list of tokens for words that are related to the given egg_or_sperm_word. """ p_rate_of_mutation = [0.9, 0.1] should_use_mutant_pool = (npchoice([0,1], 1, p=p_rate_of_mutation)[0] == 1) if should_use_mutant_pool: pool = tokens.secondary_tokens else: pool = tokens.primary_tokens return get_matches(egg_or_sperm_word, pool, 23)
def generate_gamete(self, egg_or_sperm_word): """Extract 23 'chromosomes' aka words from 'gene pool' aka list of tokens by searching the list of tokens for words that are related to the given egg_or_sperm_word. """ p_rate_of_mutation = [0.9, 0.1] should_use_mutant_pool = (npchoice([0,1], 1, p=p_rate_of_mutation)[0] == 1) if should_use_mutant_pool: pool = tokens.secondary_tokens else: pool = tokens.primary_tokens return get_matches(egg_or_sperm_word, pool, 23)
[ "Extract", "23", "chromosomes", "aka", "words", "from", "gene", "pool", "aka", "list", "of", "tokens", "by", "searching", "the", "list", "of", "tokens", "for", "words", "that", "are", "related", "to", "the", "given", "egg_or_sperm_word", "." ]
carawarner/pantheon
python
https://github.com/carawarner/pantheon/blob/7e8718f4397eaa389fb3d5dc04fa01c7cb556512/pantheon/gods.py#L170-L182
[ "def", "generate_gamete", "(", "self", ",", "egg_or_sperm_word", ")", ":", "p_rate_of_mutation", "=", "[", "0.9", ",", "0.1", "]", "should_use_mutant_pool", "=", "(", "npchoice", "(", "[", "0", ",", "1", "]", ",", "1", ",", "p", "=", "p_rate_of_mutation", ")", "[", "0", "]", "==", "1", ")", "if", "should_use_mutant_pool", ":", "pool", "=", "tokens", ".", "secondary_tokens", "else", ":", "pool", "=", "tokens", ".", "primary_tokens", "return", "get_matches", "(", "egg_or_sperm_word", ",", "pool", ",", "23", ")" ]
7e8718f4397eaa389fb3d5dc04fa01c7cb556512
valid
God.print_parents
Print parents' names and epithets.
pantheon/gods.py
def print_parents(self): """Print parents' names and epithets.""" if self.gender == female: title = 'Daughter' elif self.gender == male: title = 'Son' else: title = 'Child' p1 = self.parents[0] p2 = self.parents[1] template = '%s of %s, the %s, and %s, the %s.' print(template % (title, p1.name, p1.epithet, p2.name, p2.epithet))
def print_parents(self): """Print parents' names and epithets.""" if self.gender == female: title = 'Daughter' elif self.gender == male: title = 'Son' else: title = 'Child' p1 = self.parents[0] p2 = self.parents[1] template = '%s of %s, the %s, and %s, the %s.' print(template % (title, p1.name, p1.epithet, p2.name, p2.epithet))
[ "Print", "parents", "names", "and", "epithets", "." ]
carawarner/pantheon
python
https://github.com/carawarner/pantheon/blob/7e8718f4397eaa389fb3d5dc04fa01c7cb556512/pantheon/gods.py#L185-L199
[ "def", "print_parents", "(", "self", ")", ":", "if", "self", ".", "gender", "==", "female", ":", "title", "=", "'Daughter'", "elif", "self", ".", "gender", "==", "male", ":", "title", "=", "'Son'", "else", ":", "title", "=", "'Child'", "p1", "=", "self", ".", "parents", "[", "0", "]", "p2", "=", "self", ".", "parents", "[", "1", "]", "template", "=", "'%s of %s, the %s, and %s, the %s.'", "print", "(", "template", "%", "(", "title", ",", "p1", ".", "name", ",", "p1", ".", "epithet", ",", "p2", ".", "name", ",", "p2", ".", "epithet", ")", ")" ]
7e8718f4397eaa389fb3d5dc04fa01c7cb556512
valid
Stage.instance
Returns all the information regarding a specific stage run See the `Go stage instance documentation`__ for examples. .. __: http://api.go.cd/current/#get-stage-instance Args: counter (int): The stage instance to fetch. If falsey returns the latest stage instance from :meth:`history`. pipeline_counter (int): The pipeline instance for which to fetch the stage. If falsey returns the latest pipeline instance. Returns: Response: :class:`gocd.api.response.Response` object
gocd/api/stage.py
def instance(self, counter=None, pipeline_counter=None): """Returns all the information regarding a specific stage run See the `Go stage instance documentation`__ for examples. .. __: http://api.go.cd/current/#get-stage-instance Args: counter (int): The stage instance to fetch. If falsey returns the latest stage instance from :meth:`history`. pipeline_counter (int): The pipeline instance for which to fetch the stage. If falsey returns the latest pipeline instance. Returns: Response: :class:`gocd.api.response.Response` object """ pipeline_counter = pipeline_counter or self.pipeline_counter pipeline_instance = None if not pipeline_counter: pipeline_instance = self.server.pipeline(self.pipeline_name).instance() self.pipeline_counter = int(pipeline_instance['counter']) if not counter: if pipeline_instance is None: pipeline_instance = ( self.server .pipeline(self.pipeline_name) .instance(pipeline_counter) ) for stages in pipeline_instance['stages']: if stages['name'] == self.stage_name: return self.instance( counter=int(stages['counter']), pipeline_counter=pipeline_counter ) return self._get('/instance/{pipeline_counter:d}/{counter:d}' .format(pipeline_counter=pipeline_counter, counter=counter))
def instance(self, counter=None, pipeline_counter=None): """Returns all the information regarding a specific stage run See the `Go stage instance documentation`__ for examples. .. __: http://api.go.cd/current/#get-stage-instance Args: counter (int): The stage instance to fetch. If falsey returns the latest stage instance from :meth:`history`. pipeline_counter (int): The pipeline instance for which to fetch the stage. If falsey returns the latest pipeline instance. Returns: Response: :class:`gocd.api.response.Response` object """ pipeline_counter = pipeline_counter or self.pipeline_counter pipeline_instance = None if not pipeline_counter: pipeline_instance = self.server.pipeline(self.pipeline_name).instance() self.pipeline_counter = int(pipeline_instance['counter']) if not counter: if pipeline_instance is None: pipeline_instance = ( self.server .pipeline(self.pipeline_name) .instance(pipeline_counter) ) for stages in pipeline_instance['stages']: if stages['name'] == self.stage_name: return self.instance( counter=int(stages['counter']), pipeline_counter=pipeline_counter ) return self._get('/instance/{pipeline_counter:d}/{counter:d}' .format(pipeline_counter=pipeline_counter, counter=counter))
[ "Returns", "all", "the", "information", "regarding", "a", "specific", "stage", "run" ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/api/stage.py#L52-L91
[ "def", "instance", "(", "self", ",", "counter", "=", "None", ",", "pipeline_counter", "=", "None", ")", ":", "pipeline_counter", "=", "pipeline_counter", "or", "self", ".", "pipeline_counter", "pipeline_instance", "=", "None", "if", "not", "pipeline_counter", ":", "pipeline_instance", "=", "self", ".", "server", ".", "pipeline", "(", "self", ".", "pipeline_name", ")", ".", "instance", "(", ")", "self", ".", "pipeline_counter", "=", "int", "(", "pipeline_instance", "[", "'counter'", "]", ")", "if", "not", "counter", ":", "if", "pipeline_instance", "is", "None", ":", "pipeline_instance", "=", "(", "self", ".", "server", ".", "pipeline", "(", "self", ".", "pipeline_name", ")", ".", "instance", "(", "pipeline_counter", ")", ")", "for", "stages", "in", "pipeline_instance", "[", "'stages'", "]", ":", "if", "stages", "[", "'name'", "]", "==", "self", ".", "stage_name", ":", "return", "self", ".", "instance", "(", "counter", "=", "int", "(", "stages", "[", "'counter'", "]", ")", ",", "pipeline_counter", "=", "pipeline_counter", ")", "return", "self", ".", "_get", "(", "'/instance/{pipeline_counter:d}/{counter:d}'", ".", "format", "(", "pipeline_counter", "=", "pipeline_counter", ",", "counter", "=", "counter", ")", ")" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
Response.is_json
Returns: bool: True if `content_type` is `application/json`
gocd/api/response.py
def is_json(self): """ Returns: bool: True if `content_type` is `application/json` """ return (self.content_type.startswith('application/json') or re.match(r'application/vnd.go.cd.v(\d+)\+json', self.content_type))
def is_json(self): """ Returns: bool: True if `content_type` is `application/json` """ return (self.content_type.startswith('application/json') or re.match(r'application/vnd.go.cd.v(\d+)\+json', self.content_type))
[ "Returns", ":", "bool", ":", "True", "if", "content_type", "is", "application", "/", "json" ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/api/response.py#L62-L68
[ "def", "is_json", "(", "self", ")", ":", "return", "(", "self", ".", "content_type", ".", "startswith", "(", "'application/json'", ")", "or", "re", ".", "match", "(", "r'application/vnd.go.cd.v(\\d+)\\+json'", ",", "self", ".", "content_type", ")", ")" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
Response.payload
Returns: `str` when not json. `dict` when json.
gocd/api/response.py
def payload(self): """ Returns: `str` when not json. `dict` when json. """ if self.is_json: if not self._body_parsed: if hasattr(self._body, 'decode'): body = self._body.decode('utf-8') else: body = self._body self._body_parsed = json.loads(body) return self._body_parsed else: return self._body
def payload(self): """ Returns: `str` when not json. `dict` when json. """ if self.is_json: if not self._body_parsed: if hasattr(self._body, 'decode'): body = self._body.decode('utf-8') else: body = self._body self._body_parsed = json.loads(body) return self._body_parsed else: return self._body
[ "Returns", ":", "str", "when", "not", "json", ".", "dict", "when", "json", "." ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/api/response.py#L91-L108
[ "def", "payload", "(", "self", ")", ":", "if", "self", ".", "is_json", ":", "if", "not", "self", ".", "_body_parsed", ":", "if", "hasattr", "(", "self", ".", "_body", ",", "'decode'", ")", ":", "body", "=", "self", ".", "_body", ".", "decode", "(", "'utf-8'", ")", "else", ":", "body", "=", "self", ".", "_body", "self", ".", "_body_parsed", "=", "json", ".", "loads", "(", "body", ")", "return", "self", ".", "_body_parsed", "else", ":", "return", "self", ".", "_body" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
Server.request
Performs a HTTP request to the Go server Args: path (str): The full path on the Go server to request. This includes any query string attributes. data (str, dict, bool, optional): If any data is present this request will become a POST request. headers (dict, optional): Headers to set for this particular request Raises: HTTPError: when the HTTP request fails. Returns: file like object: The response from a :func:`urllib2.urlopen` call
gocd/server.py
def request(self, path, data=None, headers=None, method=None): """Performs a HTTP request to the Go server Args: path (str): The full path on the Go server to request. This includes any query string attributes. data (str, dict, bool, optional): If any data is present this request will become a POST request. headers (dict, optional): Headers to set for this particular request Raises: HTTPError: when the HTTP request fails. Returns: file like object: The response from a :func:`urllib2.urlopen` call """ if isinstance(data, str): data = data.encode('utf-8') response = urlopen(self._request(path, data=data, headers=headers, method=method)) self._set_session_cookie(response) return response
def request(self, path, data=None, headers=None, method=None): """Performs a HTTP request to the Go server Args: path (str): The full path on the Go server to request. This includes any query string attributes. data (str, dict, bool, optional): If any data is present this request will become a POST request. headers (dict, optional): Headers to set for this particular request Raises: HTTPError: when the HTTP request fails. Returns: file like object: The response from a :func:`urllib2.urlopen` call """ if isinstance(data, str): data = data.encode('utf-8') response = urlopen(self._request(path, data=data, headers=headers, method=method)) self._set_session_cookie(response) return response
[ "Performs", "a", "HTTP", "request", "to", "the", "Go", "server" ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/server.py#L135-L158
[ "def", "request", "(", "self", ",", "path", ",", "data", "=", "None", ",", "headers", "=", "None", ",", "method", "=", "None", ")", ":", "if", "isinstance", "(", "data", ",", "str", ")", ":", "data", "=", "data", ".", "encode", "(", "'utf-8'", ")", "response", "=", "urlopen", "(", "self", ".", "_request", "(", "path", ",", "data", "=", "data", ",", "headers", "=", "headers", ",", "method", "=", "method", ")", ")", "self", ".", "_set_session_cookie", "(", "response", ")", "return", "response" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
Server.add_logged_in_session
Make the request appear to be coming from a browser This is to interact with older parts of Go that doesn't have a proper API call to be made. What will be done: 1. If no response passed in a call to `go/api/pipelines.xml` is made to get a valid session 2. `JSESSIONID` will be populated from this request 3. A request to `go/pipelines` will be so the `authenticity_token` (CSRF) can be extracted. It will then silently be injected into `post_args` on any POST calls that doesn't start with `go/api` from this point. Args: response: a :class:`Response` object from a previously successful API call. So we won't have to query `go/api/pipelines.xml` unnecessarily. Raises: HTTPError: when the HTTP request fails. AuthenticationFailed: when failing to get the `session_id` or the `authenticity_token`.
gocd/server.py
def add_logged_in_session(self, response=None): """Make the request appear to be coming from a browser This is to interact with older parts of Go that doesn't have a proper API call to be made. What will be done: 1. If no response passed in a call to `go/api/pipelines.xml` is made to get a valid session 2. `JSESSIONID` will be populated from this request 3. A request to `go/pipelines` will be so the `authenticity_token` (CSRF) can be extracted. It will then silently be injected into `post_args` on any POST calls that doesn't start with `go/api` from this point. Args: response: a :class:`Response` object from a previously successful API call. So we won't have to query `go/api/pipelines.xml` unnecessarily. Raises: HTTPError: when the HTTP request fails. AuthenticationFailed: when failing to get the `session_id` or the `authenticity_token`. """ if not response: response = self.get('go/api/pipelines.xml') self._set_session_cookie(response) if not self._session_id: raise AuthenticationFailed('No session id extracted from request.') response = self.get('go/pipelines') match = re.search( r'name="authenticity_token".+?value="([^"]+)', response.read().decode('utf-8') ) if match: self._authenticity_token = match.group(1) else: raise AuthenticationFailed('Authenticity token not found on page')
def add_logged_in_session(self, response=None): """Make the request appear to be coming from a browser This is to interact with older parts of Go that doesn't have a proper API call to be made. What will be done: 1. If no response passed in a call to `go/api/pipelines.xml` is made to get a valid session 2. `JSESSIONID` will be populated from this request 3. A request to `go/pipelines` will be so the `authenticity_token` (CSRF) can be extracted. It will then silently be injected into `post_args` on any POST calls that doesn't start with `go/api` from this point. Args: response: a :class:`Response` object from a previously successful API call. So we won't have to query `go/api/pipelines.xml` unnecessarily. Raises: HTTPError: when the HTTP request fails. AuthenticationFailed: when failing to get the `session_id` or the `authenticity_token`. """ if not response: response = self.get('go/api/pipelines.xml') self._set_session_cookie(response) if not self._session_id: raise AuthenticationFailed('No session id extracted from request.') response = self.get('go/pipelines') match = re.search( r'name="authenticity_token".+?value="([^"]+)', response.read().decode('utf-8') ) if match: self._authenticity_token = match.group(1) else: raise AuthenticationFailed('Authenticity token not found on page')
[ "Make", "the", "request", "appear", "to", "be", "coming", "from", "a", "browser" ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/server.py#L160-L200
[ "def", "add_logged_in_session", "(", "self", ",", "response", "=", "None", ")", ":", "if", "not", "response", ":", "response", "=", "self", ".", "get", "(", "'go/api/pipelines.xml'", ")", "self", ".", "_set_session_cookie", "(", "response", ")", "if", "not", "self", ".", "_session_id", ":", "raise", "AuthenticationFailed", "(", "'No session id extracted from request.'", ")", "response", "=", "self", ".", "get", "(", "'go/pipelines'", ")", "match", "=", "re", ".", "search", "(", "r'name=\"authenticity_token\".+?value=\"([^\"]+)'", ",", "response", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", ")", "if", "match", ":", "self", ".", "_authenticity_token", "=", "match", ".", "group", "(", "1", ")", "else", ":", "raise", "AuthenticationFailed", "(", "'Authenticity token not found on page'", ")" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
Server.stage
Returns an instance of :class:`Stage` Args: pipeline_name (str): Name of the pipeline the stage belongs to stage_name (str): Name of the stage to act on pipeline_counter (int): The pipeline instance the stage is for. Returns: Stage: an instantiated :class:`Stage`.
gocd/server.py
def stage(self, pipeline_name, stage_name, pipeline_counter=None): """Returns an instance of :class:`Stage` Args: pipeline_name (str): Name of the pipeline the stage belongs to stage_name (str): Name of the stage to act on pipeline_counter (int): The pipeline instance the stage is for. Returns: Stage: an instantiated :class:`Stage`. """ return Stage(self, pipeline_name, stage_name, pipeline_counter=pipeline_counter)
def stage(self, pipeline_name, stage_name, pipeline_counter=None): """Returns an instance of :class:`Stage` Args: pipeline_name (str): Name of the pipeline the stage belongs to stage_name (str): Name of the stage to act on pipeline_counter (int): The pipeline instance the stage is for. Returns: Stage: an instantiated :class:`Stage`. """ return Stage(self, pipeline_name, stage_name, pipeline_counter=pipeline_counter)
[ "Returns", "an", "instance", "of", ":", "class", ":", "Stage" ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/server.py#L229-L240
[ "def", "stage", "(", "self", ",", "pipeline_name", ",", "stage_name", ",", "pipeline_counter", "=", "None", ")", ":", "return", "Stage", "(", "self", ",", "pipeline_name", ",", "stage_name", ",", "pipeline_counter", "=", "pipeline_counter", ")" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
flatten
Return a dict as a list of lists. >>> flatten({"a": "b"}) [['a', 'b']] >>> flatten({"a": [1, 2, 3]}) [['a', [1, 2, 3]]] >>> flatten({"a": {"b": "c"}}) [['a', 'b', 'c']] >>> flatten({"a": {"b": {"c": "e"}}}) [['a', 'b', 'c', 'e']] >>> flatten({"a": {"b": "c", "d": "e"}}) [['a', 'b', 'c'], ['a', 'd', 'e']] >>> flatten({"a": {"b": "c", "d": "e"}, "b": {"c": "d"}}) [['a', 'b', 'c'], ['a', 'd', 'e'], ['b', 'c', 'd']]
gocd/vendor/multidimensional_urlencode/urlencoder.py
def flatten(d): """Return a dict as a list of lists. >>> flatten({"a": "b"}) [['a', 'b']] >>> flatten({"a": [1, 2, 3]}) [['a', [1, 2, 3]]] >>> flatten({"a": {"b": "c"}}) [['a', 'b', 'c']] >>> flatten({"a": {"b": {"c": "e"}}}) [['a', 'b', 'c', 'e']] >>> flatten({"a": {"b": "c", "d": "e"}}) [['a', 'b', 'c'], ['a', 'd', 'e']] >>> flatten({"a": {"b": "c", "d": "e"}, "b": {"c": "d"}}) [['a', 'b', 'c'], ['a', 'd', 'e'], ['b', 'c', 'd']] """ if not isinstance(d, dict): return [[d]] returned = [] for key, value in d.items(): # Each key, value is treated as a row. nested = flatten(value) for nest in nested: current_row = [key] current_row.extend(nest) returned.append(current_row) return returned
def flatten(d): """Return a dict as a list of lists. >>> flatten({"a": "b"}) [['a', 'b']] >>> flatten({"a": [1, 2, 3]}) [['a', [1, 2, 3]]] >>> flatten({"a": {"b": "c"}}) [['a', 'b', 'c']] >>> flatten({"a": {"b": {"c": "e"}}}) [['a', 'b', 'c', 'e']] >>> flatten({"a": {"b": "c", "d": "e"}}) [['a', 'b', 'c'], ['a', 'd', 'e']] >>> flatten({"a": {"b": "c", "d": "e"}, "b": {"c": "d"}}) [['a', 'b', 'c'], ['a', 'd', 'e'], ['b', 'c', 'd']] """ if not isinstance(d, dict): return [[d]] returned = [] for key, value in d.items(): # Each key, value is treated as a row. nested = flatten(value) for nest in nested: current_row = [key] current_row.extend(nest) returned.append(current_row) return returned
[ "Return", "a", "dict", "as", "a", "list", "of", "lists", "." ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/vendor/multidimensional_urlencode/urlencoder.py#L9-L39
[ "def", "flatten", "(", "d", ")", ":", "if", "not", "isinstance", "(", "d", ",", "dict", ")", ":", "return", "[", "[", "d", "]", "]", "returned", "=", "[", "]", "for", "key", ",", "value", "in", "d", ".", "items", "(", ")", ":", "# Each key, value is treated as a row.", "nested", "=", "flatten", "(", "value", ")", "for", "nest", "in", "nested", ":", "current_row", "=", "[", "key", "]", "current_row", ".", "extend", "(", "nest", ")", "returned", ".", "append", "(", "current_row", ")", "return", "returned" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
Pipeline.instance
Returns all the information regarding a specific pipeline run See the `Go pipeline instance documentation`__ for examples. .. __: http://api.go.cd/current/#get-pipeline-instance Args: counter (int): The pipeline instance to fetch. If falsey returns the latest pipeline instance from :meth:`history`. Returns: Response: :class:`gocd.api.response.Response` object
gocd/api/pipeline.py
def instance(self, counter=None): """Returns all the information regarding a specific pipeline run See the `Go pipeline instance documentation`__ for examples. .. __: http://api.go.cd/current/#get-pipeline-instance Args: counter (int): The pipeline instance to fetch. If falsey returns the latest pipeline instance from :meth:`history`. Returns: Response: :class:`gocd.api.response.Response` object """ if not counter: history = self.history() if not history: return history else: return Response._from_json(history['pipelines'][0]) return self._get('/instance/{counter:d}'.format(counter=counter))
def instance(self, counter=None): """Returns all the information regarding a specific pipeline run See the `Go pipeline instance documentation`__ for examples. .. __: http://api.go.cd/current/#get-pipeline-instance Args: counter (int): The pipeline instance to fetch. If falsey returns the latest pipeline instance from :meth:`history`. Returns: Response: :class:`gocd.api.response.Response` object """ if not counter: history = self.history() if not history: return history else: return Response._from_json(history['pipelines'][0]) return self._get('/instance/{counter:d}'.format(counter=counter))
[ "Returns", "all", "the", "information", "regarding", "a", "specific", "pipeline", "run" ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/api/pipeline.py#L99-L120
[ "def", "instance", "(", "self", ",", "counter", "=", "None", ")", ":", "if", "not", "counter", ":", "history", "=", "self", ".", "history", "(", ")", "if", "not", "history", ":", "return", "history", "else", ":", "return", "Response", ".", "_from_json", "(", "history", "[", "'pipelines'", "]", "[", "0", "]", ")", "return", "self", ".", "_get", "(", "'/instance/{counter:d}'", ".", "format", "(", "counter", "=", "counter", ")", ")" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
Pipeline.schedule
Schedule a pipeline run Aliased as :meth:`run`, :meth:`schedule`, and :meth:`trigger`. Args: variables (dict, optional): Variables to set/override secure_variables (dict, optional): Secure variables to set/override materials (dict, optional): Material revisions to be used for this pipeline run. The exact format for this is a bit iffy, have a look at the official `Go pipeline scheduling documentation`__ or inspect a call from triggering manually in the UI. return_new_instance (bool): Returns a :meth:`history` compatible response for the newly scheduled instance. This is primarily so users easily can get the new instance number. **Note:** This is done in a very naive way, it just checks that the instance number is higher than before the pipeline was triggered. backoff_time (float): How long between each check for :arg:`return_new_instance`. .. __: http://api.go.cd/current/#scheduling-pipelines Returns: Response: :class:`gocd.api.response.Response` object
gocd/api/pipeline.py
def schedule(self, variables=None, secure_variables=None, materials=None, return_new_instance=False, backoff_time=1.0): """Schedule a pipeline run Aliased as :meth:`run`, :meth:`schedule`, and :meth:`trigger`. Args: variables (dict, optional): Variables to set/override secure_variables (dict, optional): Secure variables to set/override materials (dict, optional): Material revisions to be used for this pipeline run. The exact format for this is a bit iffy, have a look at the official `Go pipeline scheduling documentation`__ or inspect a call from triggering manually in the UI. return_new_instance (bool): Returns a :meth:`history` compatible response for the newly scheduled instance. This is primarily so users easily can get the new instance number. **Note:** This is done in a very naive way, it just checks that the instance number is higher than before the pipeline was triggered. backoff_time (float): How long between each check for :arg:`return_new_instance`. .. __: http://api.go.cd/current/#scheduling-pipelines Returns: Response: :class:`gocd.api.response.Response` object """ scheduling_args = dict( variables=variables, secure_variables=secure_variables, material_fingerprint=materials, headers={"Confirm": True}, ) scheduling_args = dict((k, v) for k, v in scheduling_args.items() if v is not None) # TODO: Replace this with whatever is the official way as soon as gocd#990 is fixed. # https://github.com/gocd/gocd/issues/990 if return_new_instance: pipelines = self.history()['pipelines'] if len(pipelines) == 0: last_run = None else: last_run = pipelines[0]['counter'] response = self._post('/schedule', ok_status=202, **scheduling_args) if not response: return response max_tries = 10 while max_tries > 0: current = self.instance() if not last_run and current: return current elif last_run and current['counter'] > last_run: return current else: time.sleep(backoff_time) max_tries -= 1 # I can't come up with a scenario in testing where this would happen, but it seems # better than returning None. return response else: return self._post('/schedule', ok_status=202, **scheduling_args)
def schedule(self, variables=None, secure_variables=None, materials=None, return_new_instance=False, backoff_time=1.0): """Schedule a pipeline run Aliased as :meth:`run`, :meth:`schedule`, and :meth:`trigger`. Args: variables (dict, optional): Variables to set/override secure_variables (dict, optional): Secure variables to set/override materials (dict, optional): Material revisions to be used for this pipeline run. The exact format for this is a bit iffy, have a look at the official `Go pipeline scheduling documentation`__ or inspect a call from triggering manually in the UI. return_new_instance (bool): Returns a :meth:`history` compatible response for the newly scheduled instance. This is primarily so users easily can get the new instance number. **Note:** This is done in a very naive way, it just checks that the instance number is higher than before the pipeline was triggered. backoff_time (float): How long between each check for :arg:`return_new_instance`. .. __: http://api.go.cd/current/#scheduling-pipelines Returns: Response: :class:`gocd.api.response.Response` object """ scheduling_args = dict( variables=variables, secure_variables=secure_variables, material_fingerprint=materials, headers={"Confirm": True}, ) scheduling_args = dict((k, v) for k, v in scheduling_args.items() if v is not None) # TODO: Replace this with whatever is the official way as soon as gocd#990 is fixed. # https://github.com/gocd/gocd/issues/990 if return_new_instance: pipelines = self.history()['pipelines'] if len(pipelines) == 0: last_run = None else: last_run = pipelines[0]['counter'] response = self._post('/schedule', ok_status=202, **scheduling_args) if not response: return response max_tries = 10 while max_tries > 0: current = self.instance() if not last_run and current: return current elif last_run and current['counter'] > last_run: return current else: time.sleep(backoff_time) max_tries -= 1 # I can't come up with a scenario in testing where this would happen, but it seems # better than returning None. return response else: return self._post('/schedule', ok_status=202, **scheduling_args)
[ "Schedule", "a", "pipeline", "run" ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/api/pipeline.py#L122-L185
[ "def", "schedule", "(", "self", ",", "variables", "=", "None", ",", "secure_variables", "=", "None", ",", "materials", "=", "None", ",", "return_new_instance", "=", "False", ",", "backoff_time", "=", "1.0", ")", ":", "scheduling_args", "=", "dict", "(", "variables", "=", "variables", ",", "secure_variables", "=", "secure_variables", ",", "material_fingerprint", "=", "materials", ",", "headers", "=", "{", "\"Confirm\"", ":", "True", "}", ",", ")", "scheduling_args", "=", "dict", "(", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "scheduling_args", ".", "items", "(", ")", "if", "v", "is", "not", "None", ")", "# TODO: Replace this with whatever is the official way as soon as gocd#990 is fixed.", "# https://github.com/gocd/gocd/issues/990", "if", "return_new_instance", ":", "pipelines", "=", "self", ".", "history", "(", ")", "[", "'pipelines'", "]", "if", "len", "(", "pipelines", ")", "==", "0", ":", "last_run", "=", "None", "else", ":", "last_run", "=", "pipelines", "[", "0", "]", "[", "'counter'", "]", "response", "=", "self", ".", "_post", "(", "'/schedule'", ",", "ok_status", "=", "202", ",", "*", "*", "scheduling_args", ")", "if", "not", "response", ":", "return", "response", "max_tries", "=", "10", "while", "max_tries", ">", "0", ":", "current", "=", "self", ".", "instance", "(", ")", "if", "not", "last_run", "and", "current", ":", "return", "current", "elif", "last_run", "and", "current", "[", "'counter'", "]", ">", "last_run", ":", "return", "current", "else", ":", "time", ".", "sleep", "(", "backoff_time", ")", "max_tries", "-=", "1", "# I can't come up with a scenario in testing where this would happen, but it seems", "# better than returning None.", "return", "response", "else", ":", "return", "self", ".", "_post", "(", "'/schedule'", ",", "ok_status", "=", "202", ",", "*", "*", "scheduling_args", ")" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
Pipeline.artifact
Helper to instantiate an :class:`gocd.api.artifact.Artifact` object Args: counter (int): The pipeline counter to get the artifact for stage: Stage name job: Job name stage_counter: Defaults to 1 Returns: Artifact: :class:`gocd.api.artifact.Artifact` object
gocd/api/pipeline.py
def artifact(self, counter, stage, job, stage_counter=1): """Helper to instantiate an :class:`gocd.api.artifact.Artifact` object Args: counter (int): The pipeline counter to get the artifact for stage: Stage name job: Job name stage_counter: Defaults to 1 Returns: Artifact: :class:`gocd.api.artifact.Artifact` object """ return Artifact(self.server, self.name, counter, stage, job, stage_counter)
def artifact(self, counter, stage, job, stage_counter=1): """Helper to instantiate an :class:`gocd.api.artifact.Artifact` object Args: counter (int): The pipeline counter to get the artifact for stage: Stage name job: Job name stage_counter: Defaults to 1 Returns: Artifact: :class:`gocd.api.artifact.Artifact` object """ return Artifact(self.server, self.name, counter, stage, job, stage_counter)
[ "Helper", "to", "instantiate", "an", ":", "class", ":", "gocd", ".", "api", ".", "artifact", ".", "Artifact", "object" ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/api/pipeline.py#L192-L204
[ "def", "artifact", "(", "self", ",", "counter", ",", "stage", ",", "job", ",", "stage_counter", "=", "1", ")", ":", "return", "Artifact", "(", "self", ".", "server", ",", "self", ".", "name", ",", "counter", ",", "stage", ",", "job", ",", "stage_counter", ")" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
Pipeline.console_output
Yields the output and metadata from all jobs in the pipeline Args: instance: The result of a :meth:`instance` call, if not supplied the latest of the pipeline will be used. Yields: tuple: (metadata (dict), output (str)). metadata contains: - pipeline - pipeline_counter - stage - stage_counter - job - job_result
gocd/api/pipeline.py
def console_output(self, instance=None): """Yields the output and metadata from all jobs in the pipeline Args: instance: The result of a :meth:`instance` call, if not supplied the latest of the pipeline will be used. Yields: tuple: (metadata (dict), output (str)). metadata contains: - pipeline - pipeline_counter - stage - stage_counter - job - job_result """ if instance is None: instance = self.instance() for stage in instance['stages']: for job in stage['jobs']: if job['result'] not in self.final_results: continue artifact = self.artifact( instance['counter'], stage['name'], job['name'], stage['counter'] ) output = artifact.get('cruise-output/console.log') yield ( { 'pipeline': self.name, 'pipeline_counter': instance['counter'], 'stage': stage['name'], 'stage_counter': stage['counter'], 'job': job['name'], 'job_result': job['result'], }, output.body )
def console_output(self, instance=None): """Yields the output and metadata from all jobs in the pipeline Args: instance: The result of a :meth:`instance` call, if not supplied the latest of the pipeline will be used. Yields: tuple: (metadata (dict), output (str)). metadata contains: - pipeline - pipeline_counter - stage - stage_counter - job - job_result """ if instance is None: instance = self.instance() for stage in instance['stages']: for job in stage['jobs']: if job['result'] not in self.final_results: continue artifact = self.artifact( instance['counter'], stage['name'], job['name'], stage['counter'] ) output = artifact.get('cruise-output/console.log') yield ( { 'pipeline': self.name, 'pipeline_counter': instance['counter'], 'stage': stage['name'], 'stage_counter': stage['counter'], 'job': job['name'], 'job_result': job['result'], }, output.body )
[ "Yields", "the", "output", "and", "metadata", "from", "all", "jobs", "in", "the", "pipeline" ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/api/pipeline.py#L208-L252
[ "def", "console_output", "(", "self", ",", "instance", "=", "None", ")", ":", "if", "instance", "is", "None", ":", "instance", "=", "self", ".", "instance", "(", ")", "for", "stage", "in", "instance", "[", "'stages'", "]", ":", "for", "job", "in", "stage", "[", "'jobs'", "]", ":", "if", "job", "[", "'result'", "]", "not", "in", "self", ".", "final_results", ":", "continue", "artifact", "=", "self", ".", "artifact", "(", "instance", "[", "'counter'", "]", ",", "stage", "[", "'name'", "]", ",", "job", "[", "'name'", "]", ",", "stage", "[", "'counter'", "]", ")", "output", "=", "artifact", ".", "get", "(", "'cruise-output/console.log'", ")", "yield", "(", "{", "'pipeline'", ":", "self", ".", "name", ",", "'pipeline_counter'", ":", "instance", "[", "'counter'", "]", ",", "'stage'", ":", "stage", "[", "'name'", "]", ",", "'stage_counter'", ":", "stage", "[", "'counter'", "]", ",", "'job'", ":", "job", "[", "'name'", "]", ",", "'job_result'", ":", "job", "[", "'result'", "]", ",", "}", ",", "output", ".", "body", ")" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
Pipeline.stage
Helper to instantiate a :class:`gocd.api.stage.Stage` object Args: name: The name of the stage pipeline_counter: Returns:
gocd/api/pipeline.py
def stage(self, name, pipeline_counter=None): """Helper to instantiate a :class:`gocd.api.stage.Stage` object Args: name: The name of the stage pipeline_counter: Returns: """ return Stage( self.server, pipeline_name=self.name, stage_name=name, pipeline_counter=pipeline_counter, )
def stage(self, name, pipeline_counter=None): """Helper to instantiate a :class:`gocd.api.stage.Stage` object Args: name: The name of the stage pipeline_counter: Returns: """ return Stage( self.server, pipeline_name=self.name, stage_name=name, pipeline_counter=pipeline_counter, )
[ "Helper", "to", "instantiate", "a", ":", "class", ":", "gocd", ".", "api", ".", "stage", ".", "Stage", "object" ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/api/pipeline.py#L254-L269
[ "def", "stage", "(", "self", ",", "name", ",", "pipeline_counter", "=", "None", ")", ":", "return", "Stage", "(", "self", ".", "server", ",", "pipeline_name", "=", "self", ".", "name", ",", "stage_name", "=", "name", ",", "pipeline_counter", "=", "pipeline_counter", ",", ")" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
TemplateConfig.edit
Update template config for specified template name. .. __: https://api.go.cd/current/#edit-template-config Returns: Response: :class:`gocd.api.response.Response` object
gocd/api/template_config.py
def edit(self, config, etag): """Update template config for specified template name. .. __: https://api.go.cd/current/#edit-template-config Returns: Response: :class:`gocd.api.response.Response` object """ data = self._json_encode(config) headers = self._default_headers() if etag is not None: headers["If-Match"] = etag return self._request(self.name, ok_status=None, data=data, headers=headers, method="PUT")
def edit(self, config, etag): """Update template config for specified template name. .. __: https://api.go.cd/current/#edit-template-config Returns: Response: :class:`gocd.api.response.Response` object """ data = self._json_encode(config) headers = self._default_headers() if etag is not None: headers["If-Match"] = etag return self._request(self.name, ok_status=None, data=data, headers=headers, method="PUT")
[ "Update", "template", "config", "for", "specified", "template", "name", "." ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/api/template_config.py#L37-L56
[ "def", "edit", "(", "self", ",", "config", ",", "etag", ")", ":", "data", "=", "self", ".", "_json_encode", "(", "config", ")", "headers", "=", "self", ".", "_default_headers", "(", ")", "if", "etag", "is", "not", "None", ":", "headers", "[", "\"If-Match\"", "]", "=", "etag", "return", "self", ".", "_request", "(", "self", ".", "name", ",", "ok_status", "=", "None", ",", "data", "=", "data", ",", "headers", "=", "headers", ",", "method", "=", "\"PUT\"", ")" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
TemplateConfig.create
Create template config for specified template name. .. __: https://api.go.cd/current/#create-template-config Returns: Response: :class:`gocd.api.response.Response` object
gocd/api/template_config.py
def create(self, config): """Create template config for specified template name. .. __: https://api.go.cd/current/#create-template-config Returns: Response: :class:`gocd.api.response.Response` object """ assert config["name"] == self.name, "Given config is not for this template" data = self._json_encode(config) headers = self._default_headers() return self._request("", ok_status=None, data=data, headers=headers)
def create(self, config): """Create template config for specified template name. .. __: https://api.go.cd/current/#create-template-config Returns: Response: :class:`gocd.api.response.Response` object """ assert config["name"] == self.name, "Given config is not for this template" data = self._json_encode(config) headers = self._default_headers() return self._request("", ok_status=None, data=data, headers=headers)
[ "Create", "template", "config", "for", "specified", "template", "name", "." ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/api/template_config.py#L58-L75
[ "def", "create", "(", "self", ",", "config", ")", ":", "assert", "config", "[", "\"name\"", "]", "==", "self", ".", "name", ",", "\"Given config is not for this template\"", "data", "=", "self", ".", "_json_encode", "(", "config", ")", "headers", "=", "self", ".", "_default_headers", "(", ")", "return", "self", ".", "_request", "(", "\"\"", ",", "ok_status", "=", "None", ",", "data", "=", "data", ",", "headers", "=", "headers", ")" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
TemplateConfig.delete
Delete template config for specified template name. .. __: https://api.go.cd/current/#delete-a-template Returns: Response: :class:`gocd.api.response.Response` object
gocd/api/template_config.py
def delete(self): """Delete template config for specified template name. .. __: https://api.go.cd/current/#delete-a-template Returns: Response: :class:`gocd.api.response.Response` object """ headers = self._default_headers() return self._request(self.name, ok_status=None, data=None, headers=headers, method="DELETE")
def delete(self): """Delete template config for specified template name. .. __: https://api.go.cd/current/#delete-a-template Returns: Response: :class:`gocd.api.response.Response` object """ headers = self._default_headers() return self._request(self.name, ok_status=None, data=None, headers=headers, method="DELETE")
[ "Delete", "template", "config", "for", "specified", "template", "name", "." ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/api/template_config.py#L77-L92
[ "def", "delete", "(", "self", ")", ":", "headers", "=", "self", ".", "_default_headers", "(", ")", "return", "self", ".", "_request", "(", "self", ".", "name", ",", "ok_status", "=", "None", ",", "data", "=", "None", ",", "headers", "=", "headers", ",", "method", "=", "\"DELETE\"", ")" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
PipelineGroups.pipelines
Returns a set of all pipelines from the last response Returns: set: Response success: all the pipelines available in the response Response failure: an empty set
gocd/api/pipeline_groups.py
def pipelines(self): """Returns a set of all pipelines from the last response Returns: set: Response success: all the pipelines available in the response Response failure: an empty set """ if not self.response: return set() elif self._pipelines is None and self.response: self._pipelines = set() for group in self.response.payload: for pipeline in group['pipelines']: self._pipelines.add(pipeline['name']) return self._pipelines
def pipelines(self): """Returns a set of all pipelines from the last response Returns: set: Response success: all the pipelines available in the response Response failure: an empty set """ if not self.response: return set() elif self._pipelines is None and self.response: self._pipelines = set() for group in self.response.payload: for pipeline in group['pipelines']: self._pipelines.add(pipeline['name']) return self._pipelines
[ "Returns", "a", "set", "of", "all", "pipelines", "from", "the", "last", "response" ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/api/pipeline_groups.py#L40-L55
[ "def", "pipelines", "(", "self", ")", ":", "if", "not", "self", ".", "response", ":", "return", "set", "(", ")", "elif", "self", ".", "_pipelines", "is", "None", "and", "self", ".", "response", ":", "self", ".", "_pipelines", "=", "set", "(", ")", "for", "group", "in", "self", ".", "response", ".", "payload", ":", "for", "pipeline", "in", "group", "[", "'pipelines'", "]", ":", "self", ".", "_pipelines", ".", "add", "(", "pipeline", "[", "'name'", "]", ")", "return", "self", ".", "_pipelines" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
Artifact.get_directory
Gets an artifact directory by its path. See the `Go artifact directory documentation`__ for example responses. .. __: http://api.go.cd/current/#get-artifact-directory .. note:: Getting a directory relies on Go creating a zip file of the directory in question. Because of this Go will zip the file in the background and return a 202 Accepted response. It's then up to the client to check again later and get the final file. To work with normal assumptions this :meth:`get_directory` will retry itself up to ``timeout`` seconds to get a 200 response to return. At that point it will then return the response as is, no matter whether it's still 202 or 200. The retry is done with an exponential backoff with a max value between retries. See the ``backoff`` and ``max_wait`` variables. If you want to handle the retry logic yourself then use :meth:`get` and add '.zip' as a suffix on the directory. Args: path_to_directory (str): The path to the directory to get. It can be nested eg ``target/dist.zip`` timeout (int): How many seconds we will wait in total for a successful response from Go when we're receiving 202 backoff (float): The initial value used for backoff, raises exponentially until it reaches ``max_wait`` max_wait (int): The max time between retries Returns: Response: :class:`gocd.api.response.Response` object A successful response is a zip-file.
gocd/api/artifact.py
def get_directory(self, path_to_directory, timeout=30, backoff=0.4, max_wait=4): """Gets an artifact directory by its path. See the `Go artifact directory documentation`__ for example responses. .. __: http://api.go.cd/current/#get-artifact-directory .. note:: Getting a directory relies on Go creating a zip file of the directory in question. Because of this Go will zip the file in the background and return a 202 Accepted response. It's then up to the client to check again later and get the final file. To work with normal assumptions this :meth:`get_directory` will retry itself up to ``timeout`` seconds to get a 200 response to return. At that point it will then return the response as is, no matter whether it's still 202 or 200. The retry is done with an exponential backoff with a max value between retries. See the ``backoff`` and ``max_wait`` variables. If you want to handle the retry logic yourself then use :meth:`get` and add '.zip' as a suffix on the directory. Args: path_to_directory (str): The path to the directory to get. It can be nested eg ``target/dist.zip`` timeout (int): How many seconds we will wait in total for a successful response from Go when we're receiving 202 backoff (float): The initial value used for backoff, raises exponentially until it reaches ``max_wait`` max_wait (int): The max time between retries Returns: Response: :class:`gocd.api.response.Response` object A successful response is a zip-file. """ response = None started_at = None time_elapsed = 0 i = 0 while time_elapsed < timeout: response = self._get('{0}.zip'.format(path_to_directory)) if response: break else: if started_at is None: started_at = time.time() time.sleep(min(backoff * (2 ** i), max_wait)) i += 1 time_elapsed = time.time() - started_at return response
def get_directory(self, path_to_directory, timeout=30, backoff=0.4, max_wait=4): """Gets an artifact directory by its path. See the `Go artifact directory documentation`__ for example responses. .. __: http://api.go.cd/current/#get-artifact-directory .. note:: Getting a directory relies on Go creating a zip file of the directory in question. Because of this Go will zip the file in the background and return a 202 Accepted response. It's then up to the client to check again later and get the final file. To work with normal assumptions this :meth:`get_directory` will retry itself up to ``timeout`` seconds to get a 200 response to return. At that point it will then return the response as is, no matter whether it's still 202 or 200. The retry is done with an exponential backoff with a max value between retries. See the ``backoff`` and ``max_wait`` variables. If you want to handle the retry logic yourself then use :meth:`get` and add '.zip' as a suffix on the directory. Args: path_to_directory (str): The path to the directory to get. It can be nested eg ``target/dist.zip`` timeout (int): How many seconds we will wait in total for a successful response from Go when we're receiving 202 backoff (float): The initial value used for backoff, raises exponentially until it reaches ``max_wait`` max_wait (int): The max time between retries Returns: Response: :class:`gocd.api.response.Response` object A successful response is a zip-file. """ response = None started_at = None time_elapsed = 0 i = 0 while time_elapsed < timeout: response = self._get('{0}.zip'.format(path_to_directory)) if response: break else: if started_at is None: started_at = time.time() time.sleep(min(backoff * (2 ** i), max_wait)) i += 1 time_elapsed = time.time() - started_at return response
[ "Gets", "an", "artifact", "directory", "by", "its", "path", "." ]
gaqzi/py-gocd
python
https://github.com/gaqzi/py-gocd/blob/6fe5b62dea51e665c11a343aba5fc98e130c5c63/gocd/api/artifact.py#L65-L119
[ "def", "get_directory", "(", "self", ",", "path_to_directory", ",", "timeout", "=", "30", ",", "backoff", "=", "0.4", ",", "max_wait", "=", "4", ")", ":", "response", "=", "None", "started_at", "=", "None", "time_elapsed", "=", "0", "i", "=", "0", "while", "time_elapsed", "<", "timeout", ":", "response", "=", "self", ".", "_get", "(", "'{0}.zip'", ".", "format", "(", "path_to_directory", ")", ")", "if", "response", ":", "break", "else", ":", "if", "started_at", "is", "None", ":", "started_at", "=", "time", ".", "time", "(", ")", "time", ".", "sleep", "(", "min", "(", "backoff", "*", "(", "2", "**", "i", ")", ",", "max_wait", ")", ")", "i", "+=", "1", "time_elapsed", "=", "time", ".", "time", "(", ")", "-", "started_at", "return", "response" ]
6fe5b62dea51e665c11a343aba5fc98e130c5c63
valid
DelayQueue.ask
Return the wait time in seconds required to retrieve the item currently at the head of the queue. Note that there is no guarantee that a call to `get()` will succeed even if `ask()` returns 0. By the time the calling thread reacts, other threads may have caused a different item to be at the head of the queue.
delayqueue/core.py
def ask(self): """ Return the wait time in seconds required to retrieve the item currently at the head of the queue. Note that there is no guarantee that a call to `get()` will succeed even if `ask()` returns 0. By the time the calling thread reacts, other threads may have caused a different item to be at the head of the queue. """ with self.mutex: if not len(self.queue): raise Empty utcnow = dt.datetime.utcnow() if self.queue[0][0] <= utcnow: self.ready.notify() return 0 return (self.queue[0][0] - utcnow).total_seconds()
def ask(self): """ Return the wait time in seconds required to retrieve the item currently at the head of the queue. Note that there is no guarantee that a call to `get()` will succeed even if `ask()` returns 0. By the time the calling thread reacts, other threads may have caused a different item to be at the head of the queue. """ with self.mutex: if not len(self.queue): raise Empty utcnow = dt.datetime.utcnow() if self.queue[0][0] <= utcnow: self.ready.notify() return 0 return (self.queue[0][0] - utcnow).total_seconds()
[ "Return", "the", "wait", "time", "in", "seconds", "required", "to", "retrieve", "the", "item", "currently", "at", "the", "head", "of", "the", "queue", ".", "Note", "that", "there", "is", "no", "guarantee", "that", "a", "call", "to", "get", "()", "will", "succeed", "even", "if", "ask", "()", "returns", "0", ".", "By", "the", "time", "the", "calling", "thread", "reacts", "other", "threads", "may", "have", "caused", "a", "different", "item", "to", "be", "at", "the", "head", "of", "the", "queue", "." ]
aisthesis/delayqueue
python
https://github.com/aisthesis/delayqueue/blob/9f357d22e966a5cf252bae5446d92efa7b07e83d/delayqueue/core.py#L46-L63
[ "def", "ask", "(", "self", ")", ":", "with", "self", ".", "mutex", ":", "if", "not", "len", "(", "self", ".", "queue", ")", ":", "raise", "Empty", "utcnow", "=", "dt", ".", "datetime", ".", "utcnow", "(", ")", "if", "self", ".", "queue", "[", "0", "]", "[", "0", "]", "<=", "utcnow", ":", "self", ".", "ready", ".", "notify", "(", ")", "return", "0", "return", "(", "self", ".", "queue", "[", "0", "]", "[", "0", "]", "-", "utcnow", ")", ".", "total_seconds", "(", ")" ]
9f357d22e966a5cf252bae5446d92efa7b07e83d
valid
p_transition
transition : START_KWD KEY NULL_KWD FLOAT transition : KEY KEY NULL_KWD FLOAT transition : KEY END_KWD NULL_KWD FLOAT transition : START_KWD KEY KEY FLOAT transition : KEY KEY KEY FLOAT transition : KEY END_KWD KEY FLOAT transition : START_KWD KEY NULL_KWD INTEGER transition : KEY KEY NULL_KWD INTEGER transition : KEY END_KWD NULL_KWD INTEGER transition : START_KWD KEY KEY INTEGER transition : KEY KEY KEY INTEGER transition : KEY END_KWD KEY INTEGER transition : START_KWD KEY NULL_KWD KEY transition : KEY KEY NULL_KWD KEY transition : KEY END_KWD NULL_KWD KEY transition : START_KWD KEY KEY KEY transition : KEY KEY KEY KEY transition : KEY END_KWD KEY KEY
marionette_tg/dsl.py
def p_transition(p): """ transition : START_KWD KEY NULL_KWD FLOAT transition : KEY KEY NULL_KWD FLOAT transition : KEY END_KWD NULL_KWD FLOAT transition : START_KWD KEY KEY FLOAT transition : KEY KEY KEY FLOAT transition : KEY END_KWD KEY FLOAT transition : START_KWD KEY NULL_KWD INTEGER transition : KEY KEY NULL_KWD INTEGER transition : KEY END_KWD NULL_KWD INTEGER transition : START_KWD KEY KEY INTEGER transition : KEY KEY KEY INTEGER transition : KEY END_KWD KEY INTEGER transition : START_KWD KEY NULL_KWD KEY transition : KEY KEY NULL_KWD KEY transition : KEY END_KWD NULL_KWD KEY transition : START_KWD KEY KEY KEY transition : KEY KEY KEY KEY transition : KEY END_KWD KEY KEY """ p[3] = None if p[3] == 'NULL' else p[3] if p[4] == 'error': p[0] = MarionetteTransition(p[1], p[2], p[3], 0, True) else: p[0] = MarionetteTransition(p[1], p[2], p[3], p[4], False)
def p_transition(p): """ transition : START_KWD KEY NULL_KWD FLOAT transition : KEY KEY NULL_KWD FLOAT transition : KEY END_KWD NULL_KWD FLOAT transition : START_KWD KEY KEY FLOAT transition : KEY KEY KEY FLOAT transition : KEY END_KWD KEY FLOAT transition : START_KWD KEY NULL_KWD INTEGER transition : KEY KEY NULL_KWD INTEGER transition : KEY END_KWD NULL_KWD INTEGER transition : START_KWD KEY KEY INTEGER transition : KEY KEY KEY INTEGER transition : KEY END_KWD KEY INTEGER transition : START_KWD KEY NULL_KWD KEY transition : KEY KEY NULL_KWD KEY transition : KEY END_KWD NULL_KWD KEY transition : START_KWD KEY KEY KEY transition : KEY KEY KEY KEY transition : KEY END_KWD KEY KEY """ p[3] = None if p[3] == 'NULL' else p[3] if p[4] == 'error': p[0] = MarionetteTransition(p[1], p[2], p[3], 0, True) else: p[0] = MarionetteTransition(p[1], p[2], p[3], p[4], False)
[ "transition", ":", "START_KWD", "KEY", "NULL_KWD", "FLOAT", "transition", ":", "KEY", "KEY", "NULL_KWD", "FLOAT", "transition", ":", "KEY", "END_KWD", "NULL_KWD", "FLOAT", "transition", ":", "START_KWD", "KEY", "KEY", "FLOAT", "transition", ":", "KEY", "KEY", "KEY", "FLOAT", "transition", ":", "KEY", "END_KWD", "KEY", "FLOAT", "transition", ":", "START_KWD", "KEY", "NULL_KWD", "INTEGER", "transition", ":", "KEY", "KEY", "NULL_KWD", "INTEGER", "transition", ":", "KEY", "END_KWD", "NULL_KWD", "INTEGER", "transition", ":", "START_KWD", "KEY", "KEY", "INTEGER", "transition", ":", "KEY", "KEY", "KEY", "INTEGER", "transition", ":", "KEY", "END_KWD", "KEY", "INTEGER", "transition", ":", "START_KWD", "KEY", "NULL_KWD", "KEY", "transition", ":", "KEY", "KEY", "NULL_KWD", "KEY", "transition", ":", "KEY", "END_KWD", "NULL_KWD", "KEY", "transition", ":", "START_KWD", "KEY", "KEY", "KEY", "transition", ":", "KEY", "KEY", "KEY", "KEY", "transition", ":", "KEY", "END_KWD", "KEY", "KEY" ]
marionette-tg/marionette
python
https://github.com/marionette-tg/marionette/blob/bb40a334a18c82728eec01c9b56330bcb91a28da/marionette_tg/dsl.py#L174-L199
[ "def", "p_transition", "(", "p", ")", ":", "p", "[", "3", "]", "=", "None", "if", "p", "[", "3", "]", "==", "'NULL'", "else", "p", "[", "3", "]", "if", "p", "[", "4", "]", "==", "'error'", ":", "p", "[", "0", "]", "=", "MarionetteTransition", "(", "p", "[", "1", "]", ",", "p", "[", "2", "]", ",", "p", "[", "3", "]", ",", "0", ",", "True", ")", "else", ":", "p", "[", "0", "]", "=", "MarionetteTransition", "(", "p", "[", "1", "]", ",", "p", "[", "2", "]", ",", "p", "[", "3", "]", ",", "p", "[", "4", "]", ",", "False", ")" ]
bb40a334a18c82728eec01c9b56330bcb91a28da
valid
p_action_blocks
action_blocks : action_blocks action_block
marionette_tg/dsl.py
def p_action_blocks(p): """ action_blocks : action_blocks action_block """ if isinstance(p[1], list): if isinstance(p[1][0], list): p[0] = p[1][0] + [p[2]] else: p[0] = p[1] + p[2] else: p[0] = [p[1], p[2]]
def p_action_blocks(p): """ action_blocks : action_blocks action_block """ if isinstance(p[1], list): if isinstance(p[1][0], list): p[0] = p[1][0] + [p[2]] else: p[0] = p[1] + p[2] else: p[0] = [p[1], p[2]]
[ "action_blocks", ":", "action_blocks", "action_block" ]
marionette-tg/marionette
python
https://github.com/marionette-tg/marionette/blob/bb40a334a18c82728eec01c9b56330bcb91a28da/marionette_tg/dsl.py#L202-L212
[ "def", "p_action_blocks", "(", "p", ")", ":", "if", "isinstance", "(", "p", "[", "1", "]", ",", "list", ")", ":", "if", "isinstance", "(", "p", "[", "1", "]", "[", "0", "]", ",", "list", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]", "[", "0", "]", "+", "[", "p", "[", "2", "]", "]", "else", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]", "+", "p", "[", "2", "]", "else", ":", "p", "[", "0", "]", "=", "[", "p", "[", "1", "]", ",", "p", "[", "2", "]", "]" ]
bb40a334a18c82728eec01c9b56330bcb91a28da
valid
p_action_block
action_block : ACTION_KWD KEY COLON actions
marionette_tg/dsl.py
def p_action_block(p): """ action_block : ACTION_KWD KEY COLON actions """ p[0] = [] for i in range(len(p[4])): p[0] += [marionette_tg.action.MarionetteAction(p[2], p[4][i][0], p[4][i][1], p[4][i][2], p[4][i][3], p[4][i][4])]
def p_action_block(p): """ action_block : ACTION_KWD KEY COLON actions """ p[0] = [] for i in range(len(p[4])): p[0] += [marionette_tg.action.MarionetteAction(p[2], p[4][i][0], p[4][i][1], p[4][i][2], p[4][i][3], p[4][i][4])]
[ "action_block", ":", "ACTION_KWD", "KEY", "COLON", "actions" ]
marionette-tg/marionette
python
https://github.com/marionette-tg/marionette/blob/bb40a334a18c82728eec01c9b56330bcb91a28da/marionette_tg/dsl.py#L222-L232
[ "def", "p_action_block", "(", "p", ")", ":", "p", "[", "0", "]", "=", "[", "]", "for", "i", "in", "range", "(", "len", "(", "p", "[", "4", "]", ")", ")", ":", "p", "[", "0", "]", "+=", "[", "marionette_tg", ".", "action", ".", "MarionetteAction", "(", "p", "[", "2", "]", ",", "p", "[", "4", "]", "[", "i", "]", "[", "0", "]", ",", "p", "[", "4", "]", "[", "i", "]", "[", "1", "]", ",", "p", "[", "4", "]", "[", "i", "]", "[", "2", "]", ",", "p", "[", "4", "]", "[", "i", "]", "[", "3", "]", ",", "p", "[", "4", "]", "[", "i", "]", "[", "4", "]", ")", "]" ]
bb40a334a18c82728eec01c9b56330bcb91a28da
valid
p_action
action : CLIENT_KWD KEY DOT KEY LPAREN args RPAREN action : SERVER_KWD KEY DOT KEY LPAREN args RPAREN action : CLIENT_KWD KEY DOT KEY LPAREN args RPAREN IF_KWD REGEX_MATCH_INCOMING_KWD LPAREN p_string_arg RPAREN action : SERVER_KWD KEY DOT KEY LPAREN args RPAREN IF_KWD REGEX_MATCH_INCOMING_KWD LPAREN p_string_arg RPAREN
marionette_tg/dsl.py
def p_action(p): """ action : CLIENT_KWD KEY DOT KEY LPAREN args RPAREN action : SERVER_KWD KEY DOT KEY LPAREN args RPAREN action : CLIENT_KWD KEY DOT KEY LPAREN args RPAREN IF_KWD REGEX_MATCH_INCOMING_KWD LPAREN p_string_arg RPAREN action : SERVER_KWD KEY DOT KEY LPAREN args RPAREN IF_KWD REGEX_MATCH_INCOMING_KWD LPAREN p_string_arg RPAREN """ if len(p)==8: p[0] = [p[1], p[2], p[4], p[6], None] elif len(p)==13: p[0] = [p[1], p[2], p[4], p[6], p[11]]
def p_action(p): """ action : CLIENT_KWD KEY DOT KEY LPAREN args RPAREN action : SERVER_KWD KEY DOT KEY LPAREN args RPAREN action : CLIENT_KWD KEY DOT KEY LPAREN args RPAREN IF_KWD REGEX_MATCH_INCOMING_KWD LPAREN p_string_arg RPAREN action : SERVER_KWD KEY DOT KEY LPAREN args RPAREN IF_KWD REGEX_MATCH_INCOMING_KWD LPAREN p_string_arg RPAREN """ if len(p)==8: p[0] = [p[1], p[2], p[4], p[6], None] elif len(p)==13: p[0] = [p[1], p[2], p[4], p[6], p[11]]
[ "action", ":", "CLIENT_KWD", "KEY", "DOT", "KEY", "LPAREN", "args", "RPAREN", "action", ":", "SERVER_KWD", "KEY", "DOT", "KEY", "LPAREN", "args", "RPAREN", "action", ":", "CLIENT_KWD", "KEY", "DOT", "KEY", "LPAREN", "args", "RPAREN", "IF_KWD", "REGEX_MATCH_INCOMING_KWD", "LPAREN", "p_string_arg", "RPAREN", "action", ":", "SERVER_KWD", "KEY", "DOT", "KEY", "LPAREN", "args", "RPAREN", "IF_KWD", "REGEX_MATCH_INCOMING_KWD", "LPAREN", "p_string_arg", "RPAREN" ]
marionette-tg/marionette
python
https://github.com/marionette-tg/marionette/blob/bb40a334a18c82728eec01c9b56330bcb91a28da/marionette_tg/dsl.py#L249-L259
[ "def", "p_action", "(", "p", ")", ":", "if", "len", "(", "p", ")", "==", "8", ":", "p", "[", "0", "]", "=", "[", "p", "[", "1", "]", ",", "p", "[", "2", "]", ",", "p", "[", "4", "]", ",", "p", "[", "6", "]", ",", "None", "]", "elif", "len", "(", "p", ")", "==", "13", ":", "p", "[", "0", "]", "=", "[", "p", "[", "1", "]", ",", "p", "[", "2", "]", ",", "p", "[", "4", "]", ",", "p", "[", "6", "]", ",", "p", "[", "11", "]", "]" ]
bb40a334a18c82728eec01c9b56330bcb91a28da
valid
config_loader
Configuration loader. Adds support for loading templates from the Flask application's instance folder (``<instance_folder>/templates``).
invenio_app/factory.py
def config_loader(app, **kwargs_config): """Configuration loader. Adds support for loading templates from the Flask application's instance folder (``<instance_folder>/templates``). """ # This is the only place customize the Flask application right after # it has been created, but before all extensions etc are loaded. local_templates_path = os.path.join(app.instance_path, 'templates') if os.path.exists(local_templates_path): # Let's customize the template loader to look into packages # and application templates folders. app.jinja_loader = ChoiceLoader([ FileSystemLoader(local_templates_path), app.jinja_loader, ]) app.jinja_options = dict( app.jinja_options, cache_size=1000, bytecode_cache=BytecodeCache(app) ) invenio_config_loader(app, **kwargs_config)
def config_loader(app, **kwargs_config): """Configuration loader. Adds support for loading templates from the Flask application's instance folder (``<instance_folder>/templates``). """ # This is the only place customize the Flask application right after # it has been created, but before all extensions etc are loaded. local_templates_path = os.path.join(app.instance_path, 'templates') if os.path.exists(local_templates_path): # Let's customize the template loader to look into packages # and application templates folders. app.jinja_loader = ChoiceLoader([ FileSystemLoader(local_templates_path), app.jinja_loader, ]) app.jinja_options = dict( app.jinja_options, cache_size=1000, bytecode_cache=BytecodeCache(app) ) invenio_config_loader(app, **kwargs_config)
[ "Configuration", "loader", "." ]
inveniosoftware/invenio-app
python
https://github.com/inveniosoftware/invenio-app/blob/6ef600f28913a501c05d75ffbe203e941e229f49/invenio_app/factory.py#L48-L71
[ "def", "config_loader", "(", "app", ",", "*", "*", "kwargs_config", ")", ":", "# This is the only place customize the Flask application right after", "# it has been created, but before all extensions etc are loaded.", "local_templates_path", "=", "os", ".", "path", ".", "join", "(", "app", ".", "instance_path", ",", "'templates'", ")", "if", "os", ".", "path", ".", "exists", "(", "local_templates_path", ")", ":", "# Let's customize the template loader to look into packages", "# and application templates folders.", "app", ".", "jinja_loader", "=", "ChoiceLoader", "(", "[", "FileSystemLoader", "(", "local_templates_path", ")", ",", "app", ".", "jinja_loader", ",", "]", ")", "app", ".", "jinja_options", "=", "dict", "(", "app", ".", "jinja_options", ",", "cache_size", "=", "1000", ",", "bytecode_cache", "=", "BytecodeCache", "(", "app", ")", ")", "invenio_config_loader", "(", "app", ",", "*", "*", "kwargs_config", ")" ]
6ef600f28913a501c05d75ffbe203e941e229f49
valid
app_class
Create Flask application class. Invenio-Files-REST needs to patch the Werkzeug form parsing in order to support streaming large file uploads. This is done by subclassing the Flask application class.
invenio_app/factory.py
def app_class(): """Create Flask application class. Invenio-Files-REST needs to patch the Werkzeug form parsing in order to support streaming large file uploads. This is done by subclassing the Flask application class. """ try: pkg_resources.get_distribution('invenio-files-rest') from invenio_files_rest.app import Flask as FlaskBase except pkg_resources.DistributionNotFound: from flask import Flask as FlaskBase # Add Host header validation via APP_ALLOWED_HOSTS configuration variable. class Request(TrustedHostsMixin, FlaskBase.request_class): pass class Flask(FlaskBase): request_class = Request return Flask
def app_class(): """Create Flask application class. Invenio-Files-REST needs to patch the Werkzeug form parsing in order to support streaming large file uploads. This is done by subclassing the Flask application class. """ try: pkg_resources.get_distribution('invenio-files-rest') from invenio_files_rest.app import Flask as FlaskBase except pkg_resources.DistributionNotFound: from flask import Flask as FlaskBase # Add Host header validation via APP_ALLOWED_HOSTS configuration variable. class Request(TrustedHostsMixin, FlaskBase.request_class): pass class Flask(FlaskBase): request_class = Request return Flask
[ "Create", "Flask", "application", "class", "." ]
inveniosoftware/invenio-app
python
https://github.com/inveniosoftware/invenio-app/blob/6ef600f28913a501c05d75ffbe203e941e229f49/invenio_app/factory.py#L74-L94
[ "def", "app_class", "(", ")", ":", "try", ":", "pkg_resources", ".", "get_distribution", "(", "'invenio-files-rest'", ")", "from", "invenio_files_rest", ".", "app", "import", "Flask", "as", "FlaskBase", "except", "pkg_resources", ".", "DistributionNotFound", ":", "from", "flask", "import", "Flask", "as", "FlaskBase", "# Add Host header validation via APP_ALLOWED_HOSTS configuration variable.", "class", "Request", "(", "TrustedHostsMixin", ",", "FlaskBase", ".", "request_class", ")", ":", "pass", "class", "Flask", "(", "FlaskBase", ")", ":", "request_class", "=", "Request", "return", "Flask" ]
6ef600f28913a501c05d75ffbe203e941e229f49
valid
InvenioApp.init_app
Initialize application object. :param app: An instance of :class:`~flask.Flask`.
invenio_app/ext.py
def init_app(self, app, **kwargs): """Initialize application object. :param app: An instance of :class:`~flask.Flask`. """ # Init the configuration self.init_config(app) # Enable Rate limiter self.limiter = Limiter(app, key_func=get_ipaddr) # Enable secure HTTP headers if app.config['APP_ENABLE_SECURE_HEADERS']: self.talisman = Talisman( app, **app.config.get('APP_DEFAULT_SECURE_HEADERS', {}) ) # Enable PING view if app.config['APP_HEALTH_BLUEPRINT_ENABLED']: blueprint = Blueprint('invenio_app_ping', __name__) @blueprint.route('/ping') def ping(): """Load balancer ping view.""" return 'OK' ping.talisman_view_options = {'force_https': False} app.register_blueprint(blueprint) requestid_header = app.config.get('APP_REQUESTID_HEADER') if requestid_header: @app.before_request def set_request_id(): """Extracts a request id from an HTTP header.""" request_id = request.headers.get(requestid_header) if request_id: # Capped at 200 to protect against malicious clients # sending very large headers. g.request_id = request_id[:200] # If installed register the Flask-DebugToolbar extension try: from flask_debugtoolbar import DebugToolbarExtension app.extensions['flask-debugtoolbar'] = DebugToolbarExtension(app) except ImportError: app.logger.debug('Flask-DebugToolbar extension not installed.') # Register self app.extensions['invenio-app'] = self
def init_app(self, app, **kwargs): """Initialize application object. :param app: An instance of :class:`~flask.Flask`. """ # Init the configuration self.init_config(app) # Enable Rate limiter self.limiter = Limiter(app, key_func=get_ipaddr) # Enable secure HTTP headers if app.config['APP_ENABLE_SECURE_HEADERS']: self.talisman = Talisman( app, **app.config.get('APP_DEFAULT_SECURE_HEADERS', {}) ) # Enable PING view if app.config['APP_HEALTH_BLUEPRINT_ENABLED']: blueprint = Blueprint('invenio_app_ping', __name__) @blueprint.route('/ping') def ping(): """Load balancer ping view.""" return 'OK' ping.talisman_view_options = {'force_https': False} app.register_blueprint(blueprint) requestid_header = app.config.get('APP_REQUESTID_HEADER') if requestid_header: @app.before_request def set_request_id(): """Extracts a request id from an HTTP header.""" request_id = request.headers.get(requestid_header) if request_id: # Capped at 200 to protect against malicious clients # sending very large headers. g.request_id = request_id[:200] # If installed register the Flask-DebugToolbar extension try: from flask_debugtoolbar import DebugToolbarExtension app.extensions['flask-debugtoolbar'] = DebugToolbarExtension(app) except ImportError: app.logger.debug('Flask-DebugToolbar extension not installed.') # Register self app.extensions['invenio-app'] = self
[ "Initialize", "application", "object", "." ]
inveniosoftware/invenio-app
python
https://github.com/inveniosoftware/invenio-app/blob/6ef600f28913a501c05d75ffbe203e941e229f49/invenio_app/ext.py#L40-L86
[ "def", "init_app", "(", "self", ",", "app", ",", "*", "*", "kwargs", ")", ":", "# Init the configuration", "self", ".", "init_config", "(", "app", ")", "# Enable Rate limiter", "self", ".", "limiter", "=", "Limiter", "(", "app", ",", "key_func", "=", "get_ipaddr", ")", "# Enable secure HTTP headers", "if", "app", ".", "config", "[", "'APP_ENABLE_SECURE_HEADERS'", "]", ":", "self", ".", "talisman", "=", "Talisman", "(", "app", ",", "*", "*", "app", ".", "config", ".", "get", "(", "'APP_DEFAULT_SECURE_HEADERS'", ",", "{", "}", ")", ")", "# Enable PING view", "if", "app", ".", "config", "[", "'APP_HEALTH_BLUEPRINT_ENABLED'", "]", ":", "blueprint", "=", "Blueprint", "(", "'invenio_app_ping'", ",", "__name__", ")", "@", "blueprint", ".", "route", "(", "'/ping'", ")", "def", "ping", "(", ")", ":", "\"\"\"Load balancer ping view.\"\"\"", "return", "'OK'", "ping", ".", "talisman_view_options", "=", "{", "'force_https'", ":", "False", "}", "app", ".", "register_blueprint", "(", "blueprint", ")", "requestid_header", "=", "app", ".", "config", ".", "get", "(", "'APP_REQUESTID_HEADER'", ")", "if", "requestid_header", ":", "@", "app", ".", "before_request", "def", "set_request_id", "(", ")", ":", "\"\"\"Extracts a request id from an HTTP header.\"\"\"", "request_id", "=", "request", ".", "headers", ".", "get", "(", "requestid_header", ")", "if", "request_id", ":", "# Capped at 200 to protect against malicious clients", "# sending very large headers.", "g", ".", "request_id", "=", "request_id", "[", ":", "200", "]", "# If installed register the Flask-DebugToolbar extension", "try", ":", "from", "flask_debugtoolbar", "import", "DebugToolbarExtension", "app", ".", "extensions", "[", "'flask-debugtoolbar'", "]", "=", "DebugToolbarExtension", "(", "app", ")", "except", "ImportError", ":", "app", ".", "logger", ".", "debug", "(", "'Flask-DebugToolbar extension not installed.'", ")", "# Register self", "app", ".", "extensions", "[", "'invenio-app'", "]", "=", "self" ]
6ef600f28913a501c05d75ffbe203e941e229f49
valid
InvenioApp.init_config
Initialize configuration. :param app: An instance of :class:`~flask.Flask`.
invenio_app/ext.py
def init_config(self, app): """Initialize configuration. :param app: An instance of :class:`~flask.Flask`. """ config_apps = ['APP_', 'RATELIMIT_'] flask_talisman_debug_mode = ["'unsafe-inline'"] for k in dir(config): if any([k.startswith(prefix) for prefix in config_apps]): app.config.setdefault(k, getattr(config, k)) if app.config['DEBUG']: app.config.setdefault('APP_DEFAULT_SECURE_HEADERS', {}) headers = app.config['APP_DEFAULT_SECURE_HEADERS'] # ensure `content_security_policy` is not set to {} if headers.get('content_security_policy') != {}: headers.setdefault('content_security_policy', {}) csp = headers['content_security_policy'] # ensure `default-src` is not set to [] if csp.get('default-src') != []: csp.setdefault('default-src', []) # add default `content_security_policy` value when debug csp['default-src'] += flask_talisman_debug_mode
def init_config(self, app): """Initialize configuration. :param app: An instance of :class:`~flask.Flask`. """ config_apps = ['APP_', 'RATELIMIT_'] flask_talisman_debug_mode = ["'unsafe-inline'"] for k in dir(config): if any([k.startswith(prefix) for prefix in config_apps]): app.config.setdefault(k, getattr(config, k)) if app.config['DEBUG']: app.config.setdefault('APP_DEFAULT_SECURE_HEADERS', {}) headers = app.config['APP_DEFAULT_SECURE_HEADERS'] # ensure `content_security_policy` is not set to {} if headers.get('content_security_policy') != {}: headers.setdefault('content_security_policy', {}) csp = headers['content_security_policy'] # ensure `default-src` is not set to [] if csp.get('default-src') != []: csp.setdefault('default-src', []) # add default `content_security_policy` value when debug csp['default-src'] += flask_talisman_debug_mode
[ "Initialize", "configuration", "." ]
inveniosoftware/invenio-app
python
https://github.com/inveniosoftware/invenio-app/blob/6ef600f28913a501c05d75ffbe203e941e229f49/invenio_app/ext.py#L88-L110
[ "def", "init_config", "(", "self", ",", "app", ")", ":", "config_apps", "=", "[", "'APP_'", ",", "'RATELIMIT_'", "]", "flask_talisman_debug_mode", "=", "[", "\"'unsafe-inline'\"", "]", "for", "k", "in", "dir", "(", "config", ")", ":", "if", "any", "(", "[", "k", ".", "startswith", "(", "prefix", ")", "for", "prefix", "in", "config_apps", "]", ")", ":", "app", ".", "config", ".", "setdefault", "(", "k", ",", "getattr", "(", "config", ",", "k", ")", ")", "if", "app", ".", "config", "[", "'DEBUG'", "]", ":", "app", ".", "config", ".", "setdefault", "(", "'APP_DEFAULT_SECURE_HEADERS'", ",", "{", "}", ")", "headers", "=", "app", ".", "config", "[", "'APP_DEFAULT_SECURE_HEADERS'", "]", "# ensure `content_security_policy` is not set to {}", "if", "headers", ".", "get", "(", "'content_security_policy'", ")", "!=", "{", "}", ":", "headers", ".", "setdefault", "(", "'content_security_policy'", ",", "{", "}", ")", "csp", "=", "headers", "[", "'content_security_policy'", "]", "# ensure `default-src` is not set to []", "if", "csp", ".", "get", "(", "'default-src'", ")", "!=", "[", "]", ":", "csp", ".", "setdefault", "(", "'default-src'", ",", "[", "]", ")", "# add default `content_security_policy` value when debug", "csp", "[", "'default-src'", "]", "+=", "flask_talisman_debug_mode" ]
6ef600f28913a501c05d75ffbe203e941e229f49
valid
remove_leading
Remove leading needle string (if exists). >>> remove_leading('Test', 'TestThisAndThat') 'ThisAndThat' >>> remove_leading('Test', 'ArbitraryName') 'ArbitraryName'
spec/plugin.py
def remove_leading(needle, haystack): """Remove leading needle string (if exists). >>> remove_leading('Test', 'TestThisAndThat') 'ThisAndThat' >>> remove_leading('Test', 'ArbitraryName') 'ArbitraryName' """ if haystack[:len(needle)] == needle: return haystack[len(needle):] return haystack
def remove_leading(needle, haystack): """Remove leading needle string (if exists). >>> remove_leading('Test', 'TestThisAndThat') 'ThisAndThat' >>> remove_leading('Test', 'ArbitraryName') 'ArbitraryName' """ if haystack[:len(needle)] == needle: return haystack[len(needle):] return haystack
[ "Remove", "leading", "needle", "string", "(", "if", "exists", ")", "." ]
bitprophet/spec
python
https://github.com/bitprophet/spec/blob/d9646c5daf8e479937f970d21ebe185ad936a35a/spec/plugin.py#L38-L48
[ "def", "remove_leading", "(", "needle", ",", "haystack", ")", ":", "if", "haystack", "[", ":", "len", "(", "needle", ")", "]", "==", "needle", ":", "return", "haystack", "[", "len", "(", "needle", ")", ":", "]", "return", "haystack" ]
d9646c5daf8e479937f970d21ebe185ad936a35a
valid
remove_trailing
Remove trailing needle string (if exists). >>> remove_trailing('Test', 'ThisAndThatTest') 'ThisAndThat' >>> remove_trailing('Test', 'ArbitraryName') 'ArbitraryName'
spec/plugin.py
def remove_trailing(needle, haystack): """Remove trailing needle string (if exists). >>> remove_trailing('Test', 'ThisAndThatTest') 'ThisAndThat' >>> remove_trailing('Test', 'ArbitraryName') 'ArbitraryName' """ if haystack[-len(needle):] == needle: return haystack[:-len(needle)] return haystack
def remove_trailing(needle, haystack): """Remove trailing needle string (if exists). >>> remove_trailing('Test', 'ThisAndThatTest') 'ThisAndThat' >>> remove_trailing('Test', 'ArbitraryName') 'ArbitraryName' """ if haystack[-len(needle):] == needle: return haystack[:-len(needle)] return haystack
[ "Remove", "trailing", "needle", "string", "(", "if", "exists", ")", "." ]
bitprophet/spec
python
https://github.com/bitprophet/spec/blob/d9646c5daf8e479937f970d21ebe185ad936a35a/spec/plugin.py#L51-L61
[ "def", "remove_trailing", "(", "needle", ",", "haystack", ")", ":", "if", "haystack", "[", "-", "len", "(", "needle", ")", ":", "]", "==", "needle", ":", "return", "haystack", "[", ":", "-", "len", "(", "needle", ")", "]", "return", "haystack" ]
d9646c5daf8e479937f970d21ebe185ad936a35a
valid
camel2word
Covert name from CamelCase to "Normal case". >>> camel2word('CamelCase') 'Camel case' >>> camel2word('CaseWithSpec') 'Case with spec'
spec/plugin.py
def camel2word(string): """Covert name from CamelCase to "Normal case". >>> camel2word('CamelCase') 'Camel case' >>> camel2word('CaseWithSpec') 'Case with spec' """ def wordize(match): return ' ' + match.group(1).lower() return string[0] + re.sub(r'([A-Z])', wordize, string[1:])
def camel2word(string): """Covert name from CamelCase to "Normal case". >>> camel2word('CamelCase') 'Camel case' >>> camel2word('CaseWithSpec') 'Case with spec' """ def wordize(match): return ' ' + match.group(1).lower() return string[0] + re.sub(r'([A-Z])', wordize, string[1:])
[ "Covert", "name", "from", "CamelCase", "to", "Normal", "case", "." ]
bitprophet/spec
python
https://github.com/bitprophet/spec/blob/d9646c5daf8e479937f970d21ebe185ad936a35a/spec/plugin.py#L68-L79
[ "def", "camel2word", "(", "string", ")", ":", "def", "wordize", "(", "match", ")", ":", "return", "' '", "+", "match", ".", "group", "(", "1", ")", ".", "lower", "(", ")", "return", "string", "[", "0", "]", "+", "re", ".", "sub", "(", "r'([A-Z])'", ",", "wordize", ",", "string", "[", "1", ":", "]", ")" ]
d9646c5daf8e479937f970d21ebe185ad936a35a
valid
complete_english
>>> complete_english('dont do this') "don't do this" >>> complete_english('doesnt is matched as well') "doesn't is matched as well"
spec/plugin.py
def complete_english(string): """ >>> complete_english('dont do this') "don't do this" >>> complete_english('doesnt is matched as well') "doesn't is matched as well" """ for x, y in [("dont", "don't"), ("doesnt", "doesn't"), ("wont", "won't"), ("wasnt", "wasn't")]: string = string.replace(x, y) return string
def complete_english(string): """ >>> complete_english('dont do this') "don't do this" >>> complete_english('doesnt is matched as well') "doesn't is matched as well" """ for x, y in [("dont", "don't"), ("doesnt", "doesn't"), ("wont", "won't"), ("wasnt", "wasn't")]: string = string.replace(x, y) return string
[ ">>>", "complete_english", "(", "dont", "do", "this", ")", "don", "t", "do", "this", ">>>", "complete_english", "(", "doesnt", "is", "matched", "as", "well", ")", "doesn", "t", "is", "matched", "as", "well" ]
bitprophet/spec
python
https://github.com/bitprophet/spec/blob/d9646c5daf8e479937f970d21ebe185ad936a35a/spec/plugin.py#L82-L94
[ "def", "complete_english", "(", "string", ")", ":", "for", "x", ",", "y", "in", "[", "(", "\"dont\"", ",", "\"don't\"", ")", ",", "(", "\"doesnt\"", ",", "\"doesn't\"", ")", ",", "(", "\"wont\"", ",", "\"won't\"", ")", ",", "(", "\"wasnt\"", ",", "\"wasn't\"", ")", "]", ":", "string", "=", "string", ".", "replace", "(", "x", ",", "y", ")", "return", "string" ]
d9646c5daf8e479937f970d21ebe185ad936a35a
valid
SpecPlugin.format_seconds
Format a time in seconds.
spec/plugin.py
def format_seconds(self, n_seconds): """Format a time in seconds.""" func = self.ok if n_seconds >= 60: n_minutes, n_seconds = divmod(n_seconds, 60) return "%s minutes %s seconds" % ( func("%d" % n_minutes), func("%.3f" % n_seconds)) else: return "%s seconds" % ( func("%.3f" % n_seconds))
def format_seconds(self, n_seconds): """Format a time in seconds.""" func = self.ok if n_seconds >= 60: n_minutes, n_seconds = divmod(n_seconds, 60) return "%s minutes %s seconds" % ( func("%d" % n_minutes), func("%.3f" % n_seconds)) else: return "%s seconds" % ( func("%.3f" % n_seconds))
[ "Format", "a", "time", "in", "seconds", "." ]
bitprophet/spec
python
https://github.com/bitprophet/spec/blob/d9646c5daf8e479937f970d21ebe185ad936a35a/spec/plugin.py#L503-L513
[ "def", "format_seconds", "(", "self", ",", "n_seconds", ")", ":", "func", "=", "self", ".", "ok", "if", "n_seconds", ">=", "60", ":", "n_minutes", ",", "n_seconds", "=", "divmod", "(", "n_seconds", ",", "60", ")", "return", "\"%s minutes %s seconds\"", "%", "(", "func", "(", "\"%d\"", "%", "n_minutes", ")", ",", "func", "(", "\"%.3f\"", "%", "n_seconds", ")", ")", "else", ":", "return", "\"%s seconds\"", "%", "(", "func", "(", "\"%.3f\"", "%", "n_seconds", ")", ")" ]
d9646c5daf8e479937f970d21ebe185ad936a35a
valid
ppdict
Indent representation of a dict
esiosdata/prettyprinting/__init__.py
def ppdict(dict_to_print, br='\n', html=False, key_align='l', sort_keys=True, key_preffix='', key_suffix='', value_prefix='', value_suffix='', left_margin=3, indent=2): """Indent representation of a dict""" if dict_to_print: if sort_keys: dic = dict_to_print.copy() keys = list(dic.keys()) keys.sort() dict_to_print = OrderedDict() for k in keys: dict_to_print[k] = dic[k] tmp = ['{'] ks = [type(x) == str and "'%s'" % x or x for x in dict_to_print.keys()] vs = [type(x) == str and "'%s'" % x or x for x in dict_to_print.values()] max_key_len = max([len(str(x)) for x in ks]) for i in range(len(ks)): k = {1: str(ks[i]).ljust(max_key_len), key_align == 'r': str(ks[i]).rjust(max_key_len)}[1] v = vs[i] tmp.append(' ' * indent + '{}{}{}:{}{}{},'.format(key_preffix, k, key_suffix, value_prefix, v, value_suffix)) tmp[-1] = tmp[-1][:-1] # remove the ',' in the last item tmp.append('}') if left_margin: tmp = [' ' * left_margin + x for x in tmp] if html: return '<code>{}</code>'.format(br.join(tmp).replace(' ', '&nbsp;')) else: return br.join(tmp) else: return '{}'
def ppdict(dict_to_print, br='\n', html=False, key_align='l', sort_keys=True, key_preffix='', key_suffix='', value_prefix='', value_suffix='', left_margin=3, indent=2): """Indent representation of a dict""" if dict_to_print: if sort_keys: dic = dict_to_print.copy() keys = list(dic.keys()) keys.sort() dict_to_print = OrderedDict() for k in keys: dict_to_print[k] = dic[k] tmp = ['{'] ks = [type(x) == str and "'%s'" % x or x for x in dict_to_print.keys()] vs = [type(x) == str and "'%s'" % x or x for x in dict_to_print.values()] max_key_len = max([len(str(x)) for x in ks]) for i in range(len(ks)): k = {1: str(ks[i]).ljust(max_key_len), key_align == 'r': str(ks[i]).rjust(max_key_len)}[1] v = vs[i] tmp.append(' ' * indent + '{}{}{}:{}{}{},'.format(key_preffix, k, key_suffix, value_prefix, v, value_suffix)) tmp[-1] = tmp[-1][:-1] # remove the ',' in the last item tmp.append('}') if left_margin: tmp = [' ' * left_margin + x for x in tmp] if html: return '<code>{}</code>'.format(br.join(tmp).replace(' ', '&nbsp;')) else: return br.join(tmp) else: return '{}'
[ "Indent", "representation", "of", "a", "dict" ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/prettyprinting/__init__.py#L106-L142
[ "def", "ppdict", "(", "dict_to_print", ",", "br", "=", "'\\n'", ",", "html", "=", "False", ",", "key_align", "=", "'l'", ",", "sort_keys", "=", "True", ",", "key_preffix", "=", "''", ",", "key_suffix", "=", "''", ",", "value_prefix", "=", "''", ",", "value_suffix", "=", "''", ",", "left_margin", "=", "3", ",", "indent", "=", "2", ")", ":", "if", "dict_to_print", ":", "if", "sort_keys", ":", "dic", "=", "dict_to_print", ".", "copy", "(", ")", "keys", "=", "list", "(", "dic", ".", "keys", "(", ")", ")", "keys", ".", "sort", "(", ")", "dict_to_print", "=", "OrderedDict", "(", ")", "for", "k", "in", "keys", ":", "dict_to_print", "[", "k", "]", "=", "dic", "[", "k", "]", "tmp", "=", "[", "'{'", "]", "ks", "=", "[", "type", "(", "x", ")", "==", "str", "and", "\"'%s'\"", "%", "x", "or", "x", "for", "x", "in", "dict_to_print", ".", "keys", "(", ")", "]", "vs", "=", "[", "type", "(", "x", ")", "==", "str", "and", "\"'%s'\"", "%", "x", "or", "x", "for", "x", "in", "dict_to_print", ".", "values", "(", ")", "]", "max_key_len", "=", "max", "(", "[", "len", "(", "str", "(", "x", ")", ")", "for", "x", "in", "ks", "]", ")", "for", "i", "in", "range", "(", "len", "(", "ks", ")", ")", ":", "k", "=", "{", "1", ":", "str", "(", "ks", "[", "i", "]", ")", ".", "ljust", "(", "max_key_len", ")", ",", "key_align", "==", "'r'", ":", "str", "(", "ks", "[", "i", "]", ")", ".", "rjust", "(", "max_key_len", ")", "}", "[", "1", "]", "v", "=", "vs", "[", "i", "]", "tmp", ".", "append", "(", "' '", "*", "indent", "+", "'{}{}{}:{}{}{},'", ".", "format", "(", "key_preffix", ",", "k", ",", "key_suffix", ",", "value_prefix", ",", "v", ",", "value_suffix", ")", ")", "tmp", "[", "-", "1", "]", "=", "tmp", "[", "-", "1", "]", "[", ":", "-", "1", "]", "# remove the ',' in the last item", "tmp", ".", "append", "(", "'}'", ")", "if", "left_margin", ":", "tmp", "=", "[", "' '", "*", "left_margin", "+", "x", "for", "x", "in", "tmp", "]", "if", "html", ":", "return", "'<code>{}</code>'", ".", "format", "(", "br", ".", "join", "(", "tmp", ")", ".", "replace", "(", "' '", ",", "'&nbsp;'", ")", ")", "else", ":", "return", "br", ".", "join", "(", "tmp", ")", "else", ":", "return", "'{}'" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
eq_
Shadow of the Nose builtin which presents easier to read multiline output.
spec/__init__.py
def eq_(result, expected, msg=None): """ Shadow of the Nose builtin which presents easier to read multiline output. """ params = {'expected': expected, 'result': result} aka = """ --------------------------------- aka ----------------------------------------- Expected: %(expected)r Got: %(result)r """ % params default_msg = """ Expected: %(expected)s Got: %(result)s """ % params if ( (repr(result) != six.text_type(result)) or (repr(expected) != six.text_type(expected)) ): default_msg += aka assertion_msg = msg or default_msg # This assert will bubble up to Nose's failure handling, which at some # point calls explicit str() - which will UnicodeDecodeError on any non # ASCII text. # To work around this, we make sure Unicode strings become bytestrings # beforehand, with explicit encode. if isinstance(assertion_msg, six.text_type): assertion_msg = assertion_msg.encode('utf-8') assert result == expected, assertion_msg
def eq_(result, expected, msg=None): """ Shadow of the Nose builtin which presents easier to read multiline output. """ params = {'expected': expected, 'result': result} aka = """ --------------------------------- aka ----------------------------------------- Expected: %(expected)r Got: %(result)r """ % params default_msg = """ Expected: %(expected)s Got: %(result)s """ % params if ( (repr(result) != six.text_type(result)) or (repr(expected) != six.text_type(expected)) ): default_msg += aka assertion_msg = msg or default_msg # This assert will bubble up to Nose's failure handling, which at some # point calls explicit str() - which will UnicodeDecodeError on any non # ASCII text. # To work around this, we make sure Unicode strings become bytestrings # beforehand, with explicit encode. if isinstance(assertion_msg, six.text_type): assertion_msg = assertion_msg.encode('utf-8') assert result == expected, assertion_msg
[ "Shadow", "of", "the", "Nose", "builtin", "which", "presents", "easier", "to", "read", "multiline", "output", "." ]
bitprophet/spec
python
https://github.com/bitprophet/spec/blob/d9646c5daf8e479937f970d21ebe185ad936a35a/spec/__init__.py#L32-L67
[ "def", "eq_", "(", "result", ",", "expected", ",", "msg", "=", "None", ")", ":", "params", "=", "{", "'expected'", ":", "expected", ",", "'result'", ":", "result", "}", "aka", "=", "\"\"\"\n\n--------------------------------- aka -----------------------------------------\n\nExpected:\n%(expected)r\n\nGot:\n%(result)r\n\"\"\"", "%", "params", "default_msg", "=", "\"\"\"\nExpected:\n%(expected)s\n\nGot:\n%(result)s\n\"\"\"", "%", "params", "if", "(", "(", "repr", "(", "result", ")", "!=", "six", ".", "text_type", "(", "result", ")", ")", "or", "(", "repr", "(", "expected", ")", "!=", "six", ".", "text_type", "(", "expected", ")", ")", ")", ":", "default_msg", "+=", "aka", "assertion_msg", "=", "msg", "or", "default_msg", "# This assert will bubble up to Nose's failure handling, which at some", "# point calls explicit str() - which will UnicodeDecodeError on any non", "# ASCII text.", "# To work around this, we make sure Unicode strings become bytestrings", "# beforehand, with explicit encode.", "if", "isinstance", "(", "assertion_msg", ",", "six", ".", "text_type", ")", ":", "assertion_msg", "=", "assertion_msg", ".", "encode", "(", "'utf-8'", ")", "assert", "result", "==", "expected", ",", "assertion_msg" ]
d9646c5daf8e479937f970d21ebe185ad936a35a
valid
_assert_contains
Test for existence of ``needle`` regex within ``haystack``. Say ``escape`` to escape the ``needle`` if you aren't really using the regex feature & have special characters in it.
spec/__init__.py
def _assert_contains(haystack, needle, invert, escape=False): """ Test for existence of ``needle`` regex within ``haystack``. Say ``escape`` to escape the ``needle`` if you aren't really using the regex feature & have special characters in it. """ myneedle = re.escape(needle) if escape else needle matched = re.search(myneedle, haystack, re.M) if (invert and matched) or (not invert and not matched): raise AssertionError("'%s' %sfound in '%s'" % ( needle, "" if invert else "not ", haystack ))
def _assert_contains(haystack, needle, invert, escape=False): """ Test for existence of ``needle`` regex within ``haystack``. Say ``escape`` to escape the ``needle`` if you aren't really using the regex feature & have special characters in it. """ myneedle = re.escape(needle) if escape else needle matched = re.search(myneedle, haystack, re.M) if (invert and matched) or (not invert and not matched): raise AssertionError("'%s' %sfound in '%s'" % ( needle, "" if invert else "not ", haystack ))
[ "Test", "for", "existence", "of", "needle", "regex", "within", "haystack", "." ]
bitprophet/spec
python
https://github.com/bitprophet/spec/blob/d9646c5daf8e479937f970d21ebe185ad936a35a/spec/__init__.py#L80-L94
[ "def", "_assert_contains", "(", "haystack", ",", "needle", ",", "invert", ",", "escape", "=", "False", ")", ":", "myneedle", "=", "re", ".", "escape", "(", "needle", ")", "if", "escape", "else", "needle", "matched", "=", "re", ".", "search", "(", "myneedle", ",", "haystack", ",", "re", ".", "M", ")", "if", "(", "invert", "and", "matched", ")", "or", "(", "not", "invert", "and", "not", "matched", ")", ":", "raise", "AssertionError", "(", "\"'%s' %sfound in '%s'\"", "%", "(", "needle", ",", "\"\"", "if", "invert", "else", "\"not \"", ",", "haystack", ")", ")" ]
d9646c5daf8e479937f970d21ebe185ad936a35a
valid
parse_config_file
Find the .splunk_logger config file in the current directory, or in the user's home and parse it. The one in the current directory has precedence. :return: A tuple with: - project_id - access_token
splunk_logger/utils.py
def parse_config_file(): """ Find the .splunk_logger config file in the current directory, or in the user's home and parse it. The one in the current directory has precedence. :return: A tuple with: - project_id - access_token """ for filename in ('.splunk_logger', os.path.expanduser('~/.splunk_logger')): project_id, access_token, api_domain = _parse_config_file_impl(filename) if project_id is not None\ and access_token is not None\ and api_domain is not None: return project_id, access_token, api_domain else: return None, None, None
def parse_config_file(): """ Find the .splunk_logger config file in the current directory, or in the user's home and parse it. The one in the current directory has precedence. :return: A tuple with: - project_id - access_token """ for filename in ('.splunk_logger', os.path.expanduser('~/.splunk_logger')): project_id, access_token, api_domain = _parse_config_file_impl(filename) if project_id is not None\ and access_token is not None\ and api_domain is not None: return project_id, access_token, api_domain else: return None, None, None
[ "Find", "the", ".", "splunk_logger", "config", "file", "in", "the", "current", "directory", "or", "in", "the", "user", "s", "home", "and", "parse", "it", ".", "The", "one", "in", "the", "current", "directory", "has", "precedence", ".", ":", "return", ":", "A", "tuple", "with", ":", "-", "project_id", "-", "access_token" ]
andresriancho/splunk-logger
python
https://github.com/andresriancho/splunk-logger/blob/448d5ba54464fc355786ffb64f11fd6367792381/splunk_logger/utils.py#L5-L24
[ "def", "parse_config_file", "(", ")", ":", "for", "filename", "in", "(", "'.splunk_logger'", ",", "os", ".", "path", ".", "expanduser", "(", "'~/.splunk_logger'", ")", ")", ":", "project_id", ",", "access_token", ",", "api_domain", "=", "_parse_config_file_impl", "(", "filename", ")", "if", "project_id", "is", "not", "None", "and", "access_token", "is", "not", "None", "and", "api_domain", "is", "not", "None", ":", "return", "project_id", ",", "access_token", ",", "api_domain", "else", ":", "return", "None", ",", "None", ",", "None" ]
448d5ba54464fc355786ffb64f11fd6367792381
valid
_parse_config_file_impl
Format for the file is: credentials: project_id: ... access_token: ... api_domain: ... :param filename: The filename to parse :return: A tuple with: - project_id - access_token - api_domain
splunk_logger/utils.py
def _parse_config_file_impl(filename): """ Format for the file is: credentials: project_id: ... access_token: ... api_domain: ... :param filename: The filename to parse :return: A tuple with: - project_id - access_token - api_domain """ try: doc = yaml.load(file(filename).read()) project_id = doc["credentials"]["project_id"] access_token = doc["credentials"]["access_token"] api_domain = doc["credentials"]["api_domain"] return project_id, access_token, api_domain except: return None, None, None
def _parse_config_file_impl(filename): """ Format for the file is: credentials: project_id: ... access_token: ... api_domain: ... :param filename: The filename to parse :return: A tuple with: - project_id - access_token - api_domain """ try: doc = yaml.load(file(filename).read()) project_id = doc["credentials"]["project_id"] access_token = doc["credentials"]["access_token"] api_domain = doc["credentials"]["api_domain"] return project_id, access_token, api_domain except: return None, None, None
[ "Format", "for", "the", "file", "is", ":", "credentials", ":", "project_id", ":", "...", "access_token", ":", "...", "api_domain", ":", "...", ":", "param", "filename", ":", "The", "filename", "to", "parse", ":", "return", ":", "A", "tuple", "with", ":", "-", "project_id", "-", "access_token", "-", "api_domain" ]
andresriancho/splunk-logger
python
https://github.com/andresriancho/splunk-logger/blob/448d5ba54464fc355786ffb64f11fd6367792381/splunk_logger/utils.py#L27-L51
[ "def", "_parse_config_file_impl", "(", "filename", ")", ":", "try", ":", "doc", "=", "yaml", ".", "load", "(", "file", "(", "filename", ")", ".", "read", "(", ")", ")", "project_id", "=", "doc", "[", "\"credentials\"", "]", "[", "\"project_id\"", "]", "access_token", "=", "doc", "[", "\"credentials\"", "]", "[", "\"access_token\"", "]", "api_domain", "=", "doc", "[", "\"credentials\"", "]", "[", "\"api_domain\"", "]", "return", "project_id", ",", "access_token", ",", "api_domain", "except", ":", "return", "None", ",", "None", ",", "None" ]
448d5ba54464fc355786ffb64f11fd6367792381
valid
dem_url_dia
Obtiene las urls de descarga de los datos de demanda energética de un día concreto.
esiosdata/importdemdata.py
def dem_url_dia(dt_day='2015-06-22'): """Obtiene las urls de descarga de los datos de demanda energética de un día concreto.""" def _url_tipo_dato(str_dia, k): url = SERVER + '/archives/{}/download_json?locale=es'.format(D_TIPOS_REQ_DEM[k]) if type(str_dia) is str: return url + '&date=' + str_dia else: return url + '&date=' + str_dia.date().isoformat() urls = [_url_tipo_dato(dt_day, k) for k in D_TIPOS_REQ_DEM.keys()] return urls
def dem_url_dia(dt_day='2015-06-22'): """Obtiene las urls de descarga de los datos de demanda energética de un día concreto.""" def _url_tipo_dato(str_dia, k): url = SERVER + '/archives/{}/download_json?locale=es'.format(D_TIPOS_REQ_DEM[k]) if type(str_dia) is str: return url + '&date=' + str_dia else: return url + '&date=' + str_dia.date().isoformat() urls = [_url_tipo_dato(dt_day, k) for k in D_TIPOS_REQ_DEM.keys()] return urls
[ "Obtiene", "las", "urls", "de", "descarga", "de", "los", "datos", "de", "demanda", "energética", "de", "un", "día", "concreto", "." ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/importdemdata.py#L39-L50
[ "def", "dem_url_dia", "(", "dt_day", "=", "'2015-06-22'", ")", ":", "def", "_url_tipo_dato", "(", "str_dia", ",", "k", ")", ":", "url", "=", "SERVER", "+", "'/archives/{}/download_json?locale=es'", ".", "format", "(", "D_TIPOS_REQ_DEM", "[", "k", "]", ")", "if", "type", "(", "str_dia", ")", "is", "str", ":", "return", "url", "+", "'&date='", "+", "str_dia", "else", ":", "return", "url", "+", "'&date='", "+", "str_dia", ".", "date", "(", ")", ".", "isoformat", "(", ")", "urls", "=", "[", "_url_tipo_dato", "(", "dt_day", ",", "k", ")", "for", "k", "in", "D_TIPOS_REQ_DEM", ".", "keys", "(", ")", "]", "return", "urls" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
dem_procesa_datos_dia
Procesa los datos descargados en JSON.
esiosdata/importdemdata.py
def dem_procesa_datos_dia(key_day, response): """Procesa los datos descargados en JSON.""" dfs_import, df_import, dfs_maxmin, hay_errores = [], None, [], 0 for r in response: tipo_datos, data = _extract_func_json_data(r) if tipo_datos is not None: if ('IND_MaxMin' in tipo_datos) and data: df_import = _import_daily_max_min(data) dfs_maxmin.append(df_import) elif data: df_import = _import_json_ts_data(data) dfs_import.append(df_import) if tipo_datos is None or df_import is None: hay_errores += 1 if hay_errores == 4: # No hay nada, salida temprana sin retry: print_redb('** No hay datos para el día {}!'.format(key_day)) return None, -2 else: # if hay_errores < 3: # TODO formar datos incompletos!! (max-min con NaN's, etc.) data_import = {} if dfs_import: data_import[KEYS_DATA_DEM[0]] = dfs_import[0].join(dfs_import[1]) if len(dfs_maxmin) == 2: data_import[KEYS_DATA_DEM[1]] = dfs_maxmin[0].join(dfs_maxmin[1]) elif dfs_maxmin: data_import[KEYS_DATA_DEM[1]] = dfs_maxmin[0] if not data_import: print_err('DÍA: {} -> # ERRORES: {}'.format(key_day, hay_errores)) return None, -2 return data_import, 0
def dem_procesa_datos_dia(key_day, response): """Procesa los datos descargados en JSON.""" dfs_import, df_import, dfs_maxmin, hay_errores = [], None, [], 0 for r in response: tipo_datos, data = _extract_func_json_data(r) if tipo_datos is not None: if ('IND_MaxMin' in tipo_datos) and data: df_import = _import_daily_max_min(data) dfs_maxmin.append(df_import) elif data: df_import = _import_json_ts_data(data) dfs_import.append(df_import) if tipo_datos is None or df_import is None: hay_errores += 1 if hay_errores == 4: # No hay nada, salida temprana sin retry: print_redb('** No hay datos para el día {}!'.format(key_day)) return None, -2 else: # if hay_errores < 3: # TODO formar datos incompletos!! (max-min con NaN's, etc.) data_import = {} if dfs_import: data_import[KEYS_DATA_DEM[0]] = dfs_import[0].join(dfs_import[1]) if len(dfs_maxmin) == 2: data_import[KEYS_DATA_DEM[1]] = dfs_maxmin[0].join(dfs_maxmin[1]) elif dfs_maxmin: data_import[KEYS_DATA_DEM[1]] = dfs_maxmin[0] if not data_import: print_err('DÍA: {} -> # ERRORES: {}'.format(key_day, hay_errores)) return None, -2 return data_import, 0
[ "Procesa", "los", "datos", "descargados", "en", "JSON", "." ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/importdemdata.py#L98-L128
[ "def", "dem_procesa_datos_dia", "(", "key_day", ",", "response", ")", ":", "dfs_import", ",", "df_import", ",", "dfs_maxmin", ",", "hay_errores", "=", "[", "]", ",", "None", ",", "[", "]", ",", "0", "for", "r", "in", "response", ":", "tipo_datos", ",", "data", "=", "_extract_func_json_data", "(", "r", ")", "if", "tipo_datos", "is", "not", "None", ":", "if", "(", "'IND_MaxMin'", "in", "tipo_datos", ")", "and", "data", ":", "df_import", "=", "_import_daily_max_min", "(", "data", ")", "dfs_maxmin", ".", "append", "(", "df_import", ")", "elif", "data", ":", "df_import", "=", "_import_json_ts_data", "(", "data", ")", "dfs_import", ".", "append", "(", "df_import", ")", "if", "tipo_datos", "is", "None", "or", "df_import", "is", "None", ":", "hay_errores", "+=", "1", "if", "hay_errores", "==", "4", ":", "# No hay nada, salida temprana sin retry:", "print_redb", "(", "'** No hay datos para el día {}!'.", "f", "ormat(", "k", "ey_day)", ")", "", "return", "None", ",", "-", "2", "else", ":", "# if hay_errores < 3:", "# TODO formar datos incompletos!! (max-min con NaN's, etc.)", "data_import", "=", "{", "}", "if", "dfs_import", ":", "data_import", "[", "KEYS_DATA_DEM", "[", "0", "]", "]", "=", "dfs_import", "[", "0", "]", ".", "join", "(", "dfs_import", "[", "1", "]", ")", "if", "len", "(", "dfs_maxmin", ")", "==", "2", ":", "data_import", "[", "KEYS_DATA_DEM", "[", "1", "]", "]", "=", "dfs_maxmin", "[", "0", "]", ".", "join", "(", "dfs_maxmin", "[", "1", "]", ")", "elif", "dfs_maxmin", ":", "data_import", "[", "KEYS_DATA_DEM", "[", "1", "]", "]", "=", "dfs_maxmin", "[", "0", "]", "if", "not", "data_import", ":", "print_err", "(", "'DÍA: {} -> # ERRORES: {}'.", "f", "ormat(", "k", "ey_day,", " ", "ay_errores)", ")", "", "return", "None", ",", "-", "2", "return", "data_import", ",", "0" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
dem_data_dia
Obtiene datos de demanda energética en un día concreto o un intervalo, accediendo directamente a la web.
esiosdata/importdemdata.py
def dem_data_dia(str_dia='2015-10-10', str_dia_fin=None): """Obtiene datos de demanda energética en un día concreto o un intervalo, accediendo directamente a la web.""" params = {'date_fmt': DATE_FMT, 'usar_multithread': False, 'num_retries': 1, "timeout": 10, 'func_procesa_data_dia': dem_procesa_datos_dia, 'func_url_data_dia': dem_url_dia, 'data_extra_request': {'json_req': False, 'headers': HEADERS}} if str_dia_fin is not None: params['usar_multithread'] = True data, hay_errores, str_import = get_data_en_intervalo(str_dia, str_dia_fin, **params) else: data, hay_errores, str_import = get_data_en_intervalo(str_dia, str_dia, **params) if not hay_errores: return data else: print_err(str_import) return None
def dem_data_dia(str_dia='2015-10-10', str_dia_fin=None): """Obtiene datos de demanda energética en un día concreto o un intervalo, accediendo directamente a la web.""" params = {'date_fmt': DATE_FMT, 'usar_multithread': False, 'num_retries': 1, "timeout": 10, 'func_procesa_data_dia': dem_procesa_datos_dia, 'func_url_data_dia': dem_url_dia, 'data_extra_request': {'json_req': False, 'headers': HEADERS}} if str_dia_fin is not None: params['usar_multithread'] = True data, hay_errores, str_import = get_data_en_intervalo(str_dia, str_dia_fin, **params) else: data, hay_errores, str_import = get_data_en_intervalo(str_dia, str_dia, **params) if not hay_errores: return data else: print_err(str_import) return None
[ "Obtiene", "datos", "de", "demanda", "energética", "en", "un", "día", "concreto", "o", "un", "intervalo", "accediendo", "directamente", "a", "la", "web", "." ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/importdemdata.py#L131-L145
[ "def", "dem_data_dia", "(", "str_dia", "=", "'2015-10-10'", ",", "str_dia_fin", "=", "None", ")", ":", "params", "=", "{", "'date_fmt'", ":", "DATE_FMT", ",", "'usar_multithread'", ":", "False", ",", "'num_retries'", ":", "1", ",", "\"timeout\"", ":", "10", ",", "'func_procesa_data_dia'", ":", "dem_procesa_datos_dia", ",", "'func_url_data_dia'", ":", "dem_url_dia", ",", "'data_extra_request'", ":", "{", "'json_req'", ":", "False", ",", "'headers'", ":", "HEADERS", "}", "}", "if", "str_dia_fin", "is", "not", "None", ":", "params", "[", "'usar_multithread'", "]", "=", "True", "data", ",", "hay_errores", ",", "str_import", "=", "get_data_en_intervalo", "(", "str_dia", ",", "str_dia_fin", ",", "*", "*", "params", ")", "else", ":", "data", ",", "hay_errores", ",", "str_import", "=", "get_data_en_intervalo", "(", "str_dia", ",", "str_dia", ",", "*", "*", "params", ")", "if", "not", "hay_errores", ":", "return", "data", "else", ":", "print_err", "(", "str_import", ")", "return", "None" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
flag_inner_classes
Mutates any attributes on ``obj`` which are classes, with link to ``obj``. Adds a convenience accessor which instantiates ``obj`` and then calls its ``setup`` method. Recurses on those objects as well.
spec/utils.py
def flag_inner_classes(obj): """ Mutates any attributes on ``obj`` which are classes, with link to ``obj``. Adds a convenience accessor which instantiates ``obj`` and then calls its ``setup`` method. Recurses on those objects as well. """ for tup in class_members(obj): tup[1]._parent = obj tup[1]._parent_inst = None tup[1].__getattr__ = my_getattr flag_inner_classes(tup[1])
def flag_inner_classes(obj): """ Mutates any attributes on ``obj`` which are classes, with link to ``obj``. Adds a convenience accessor which instantiates ``obj`` and then calls its ``setup`` method. Recurses on those objects as well. """ for tup in class_members(obj): tup[1]._parent = obj tup[1]._parent_inst = None tup[1].__getattr__ = my_getattr flag_inner_classes(tup[1])
[ "Mutates", "any", "attributes", "on", "obj", "which", "are", "classes", "with", "link", "to", "obj", "." ]
bitprophet/spec
python
https://github.com/bitprophet/spec/blob/d9646c5daf8e479937f970d21ebe185ad936a35a/spec/utils.py#L30-L43
[ "def", "flag_inner_classes", "(", "obj", ")", ":", "for", "tup", "in", "class_members", "(", "obj", ")", ":", "tup", "[", "1", "]", ".", "_parent", "=", "obj", "tup", "[", "1", "]", ".", "_parent_inst", "=", "None", "tup", "[", "1", "]", ".", "__getattr__", "=", "my_getattr", "flag_inner_classes", "(", "tup", "[", "1", "]", ")" ]
d9646c5daf8e479937f970d21ebe185ad936a35a
valid
autohide
Automatically hide setup() and teardown() methods, recursively.
spec/utils.py
def autohide(obj): """ Automatically hide setup() and teardown() methods, recursively. """ # Members on obj for name, item in six.iteritems(vars(obj)): if callable(item) and name in ('setup', 'teardown'): item = hide(item) # Recurse into class members for name, subclass in class_members(obj): autohide(subclass)
def autohide(obj): """ Automatically hide setup() and teardown() methods, recursively. """ # Members on obj for name, item in six.iteritems(vars(obj)): if callable(item) and name in ('setup', 'teardown'): item = hide(item) # Recurse into class members for name, subclass in class_members(obj): autohide(subclass)
[ "Automatically", "hide", "setup", "()", "and", "teardown", "()", "methods", "recursively", "." ]
bitprophet/spec
python
https://github.com/bitprophet/spec/blob/d9646c5daf8e479937f970d21ebe185ad936a35a/spec/utils.py#L45-L55
[ "def", "autohide", "(", "obj", ")", ":", "# Members on obj", "for", "name", ",", "item", "in", "six", ".", "iteritems", "(", "vars", "(", "obj", ")", ")", ":", "if", "callable", "(", "item", ")", "and", "name", "in", "(", "'setup'", ",", "'teardown'", ")", ":", "item", "=", "hide", "(", "item", ")", "# Recurse into class members", "for", "name", ",", "subclass", "in", "class_members", "(", "obj", ")", ":", "autohide", "(", "subclass", ")" ]
d9646c5daf8e479937f970d21ebe185ad936a35a
valid
trap
Replace sys.std(out|err) with a wrapper during execution, restored after. In addition, a new combined-streams output (another wrapper) will appear at ``sys.stdall``. This stream will resemble what a user sees at a terminal, i.e. both out/err streams intermingled.
spec/trap.py
def trap(func): """ Replace sys.std(out|err) with a wrapper during execution, restored after. In addition, a new combined-streams output (another wrapper) will appear at ``sys.stdall``. This stream will resemble what a user sees at a terminal, i.e. both out/err streams intermingled. """ @wraps(func) def wrapper(*args, **kwargs): # Use another CarbonCopy even though we're not cc'ing; for our "write # bytes, return strings on py3" behavior. Meh. sys.stdall = CarbonCopy() my_stdout, sys.stdout = sys.stdout, CarbonCopy(cc=sys.stdall) my_stderr, sys.stderr = sys.stderr, CarbonCopy(cc=sys.stdall) try: return func(*args, **kwargs) finally: sys.stdout = my_stdout sys.stderr = my_stderr del sys.stdall return wrapper
def trap(func): """ Replace sys.std(out|err) with a wrapper during execution, restored after. In addition, a new combined-streams output (another wrapper) will appear at ``sys.stdall``. This stream will resemble what a user sees at a terminal, i.e. both out/err streams intermingled. """ @wraps(func) def wrapper(*args, **kwargs): # Use another CarbonCopy even though we're not cc'ing; for our "write # bytes, return strings on py3" behavior. Meh. sys.stdall = CarbonCopy() my_stdout, sys.stdout = sys.stdout, CarbonCopy(cc=sys.stdall) my_stderr, sys.stderr = sys.stderr, CarbonCopy(cc=sys.stdall) try: return func(*args, **kwargs) finally: sys.stdout = my_stdout sys.stderr = my_stderr del sys.stdall return wrapper
[ "Replace", "sys", ".", "std", "(", "out|err", ")", "with", "a", "wrapper", "during", "execution", "restored", "after", "." ]
bitprophet/spec
python
https://github.com/bitprophet/spec/blob/d9646c5daf8e479937f970d21ebe185ad936a35a/spec/trap.py#L60-L81
[ "def", "trap", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Use another CarbonCopy even though we're not cc'ing; for our \"write", "# bytes, return strings on py3\" behavior. Meh.", "sys", ".", "stdall", "=", "CarbonCopy", "(", ")", "my_stdout", ",", "sys", ".", "stdout", "=", "sys", ".", "stdout", ",", "CarbonCopy", "(", "cc", "=", "sys", ".", "stdall", ")", "my_stderr", ",", "sys", ".", "stderr", "=", "sys", ".", "stderr", ",", "CarbonCopy", "(", "cc", "=", "sys", ".", "stdall", ")", "try", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "sys", ".", "stdout", "=", "my_stdout", "sys", ".", "stderr", "=", "my_stderr", "del", "sys", ".", "stdall", "return", "wrapper" ]
d9646c5daf8e479937f970d21ebe185ad936a35a
valid
pvpc_url_dia
Obtiene la url de descarga de los datos de PVPC de un día concreto. Anteriormente era: 'http://www.esios.ree.es/Solicitar?fileName=pvpcdesglosehorario_' + str_dia + '&fileType=xml&idioma=es', pero ahora es en JSON y requiere token_auth en headers.
esiosdata/importpvpcdata.py
def pvpc_url_dia(dt_day): """Obtiene la url de descarga de los datos de PVPC de un día concreto. Anteriormente era: 'http://www.esios.ree.es/Solicitar?fileName=pvpcdesglosehorario_' + str_dia + '&fileType=xml&idioma=es', pero ahora es en JSON y requiere token_auth en headers. """ if type(dt_day) is str: return SERVER + '/archives/70/download_json?locale=es' + '&date=' + dt_day else: return SERVER + '/archives/70/download_json?locale=es' + '&date=' + dt_day.date().isoformat()
def pvpc_url_dia(dt_day): """Obtiene la url de descarga de los datos de PVPC de un día concreto. Anteriormente era: 'http://www.esios.ree.es/Solicitar?fileName=pvpcdesglosehorario_' + str_dia + '&fileType=xml&idioma=es', pero ahora es en JSON y requiere token_auth en headers. """ if type(dt_day) is str: return SERVER + '/archives/70/download_json?locale=es' + '&date=' + dt_day else: return SERVER + '/archives/70/download_json?locale=es' + '&date=' + dt_day.date().isoformat()
[ "Obtiene", "la", "url", "de", "descarga", "de", "los", "datos", "de", "PVPC", "de", "un", "día", "concreto", "." ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/importpvpcdata.py#L36-L45
[ "def", "pvpc_url_dia", "(", "dt_day", ")", ":", "if", "type", "(", "dt_day", ")", "is", "str", ":", "return", "SERVER", "+", "'/archives/70/download_json?locale=es'", "+", "'&date='", "+", "dt_day", "else", ":", "return", "SERVER", "+", "'/archives/70/download_json?locale=es'", "+", "'&date='", "+", "dt_day", ".", "date", "(", ")", ".", "isoformat", "(", ")" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
pvpc_calc_tcu_cp_feu_d
Procesa TCU, CP, FEU diario. :param df: :param verbose: :param convert_kwh: :return:
esiosdata/importpvpcdata.py
def pvpc_calc_tcu_cp_feu_d(df, verbose=True, convert_kwh=True): """Procesa TCU, CP, FEU diario. :param df: :param verbose: :param convert_kwh: :return: """ if 'TCU' + TARIFAS[0] not in df.columns: # Pasa de €/MWh a €/kWh: if convert_kwh: cols_mwh = [c + t for c in COLS_PVPC for t in TARIFAS if c != 'COF'] df[cols_mwh] = df[cols_mwh].applymap(lambda x: x / 1000.) # Obtiene columnas TCU, CP, precio día gb_t = df.groupby(lambda x: TARIFAS[np.argmax([t in x for t in TARIFAS])], axis=1) for k, g in gb_t: if verbose: print('TARIFA {}'.format(k)) print(g.head()) # Cálculo de TCU df['TCU{}'.format(k)] = g[k] - g['TEU{}'.format(k)] # Cálculo de CP # cols_cp = [c + k for c in ['FOS', 'FOM', 'INT', 'PCAP', 'PMH', 'SAH']] cols_cp = [c + k for c in COLS_PVPC if c not in ['', 'COF', 'TEU']] df['CP{}'.format(k)] = g[cols_cp].sum(axis=1) # Cálculo de PERD --> No es posible así, ya que los valores base ya vienen con PERD # dfs_pvpc[k]['PERD{}'.format(k)] = dfs_pvpc[k]['TCU{}'.format(k)] / dfs_pvpc[k]['CP{}'.format(k)] # dfs_pvpc[k]['PERD{}'.format(k)] = dfs_pvpc[k]['INT{}'.format(k)] / 1.92 # Cálculo de FEU diario cols_k = ['TEU' + k, 'TCU' + k, 'COF' + k] g = df[cols_k].groupby('TEU' + k) pr = g.apply(lambda x: x['TCU' + k].dot(x['COF' + k]) / x['COF' + k].sum()) pr.name = 'PD_' + k df = df.join(pr, on='TEU' + k, rsuffix='_r') df['PD_' + k] += df['TEU' + k] return df
def pvpc_calc_tcu_cp_feu_d(df, verbose=True, convert_kwh=True): """Procesa TCU, CP, FEU diario. :param df: :param verbose: :param convert_kwh: :return: """ if 'TCU' + TARIFAS[0] not in df.columns: # Pasa de €/MWh a €/kWh: if convert_kwh: cols_mwh = [c + t for c in COLS_PVPC for t in TARIFAS if c != 'COF'] df[cols_mwh] = df[cols_mwh].applymap(lambda x: x / 1000.) # Obtiene columnas TCU, CP, precio día gb_t = df.groupby(lambda x: TARIFAS[np.argmax([t in x for t in TARIFAS])], axis=1) for k, g in gb_t: if verbose: print('TARIFA {}'.format(k)) print(g.head()) # Cálculo de TCU df['TCU{}'.format(k)] = g[k] - g['TEU{}'.format(k)] # Cálculo de CP # cols_cp = [c + k for c in ['FOS', 'FOM', 'INT', 'PCAP', 'PMH', 'SAH']] cols_cp = [c + k for c in COLS_PVPC if c not in ['', 'COF', 'TEU']] df['CP{}'.format(k)] = g[cols_cp].sum(axis=1) # Cálculo de PERD --> No es posible así, ya que los valores base ya vienen con PERD # dfs_pvpc[k]['PERD{}'.format(k)] = dfs_pvpc[k]['TCU{}'.format(k)] / dfs_pvpc[k]['CP{}'.format(k)] # dfs_pvpc[k]['PERD{}'.format(k)] = dfs_pvpc[k]['INT{}'.format(k)] / 1.92 # Cálculo de FEU diario cols_k = ['TEU' + k, 'TCU' + k, 'COF' + k] g = df[cols_k].groupby('TEU' + k) pr = g.apply(lambda x: x['TCU' + k].dot(x['COF' + k]) / x['COF' + k].sum()) pr.name = 'PD_' + k df = df.join(pr, on='TEU' + k, rsuffix='_r') df['PD_' + k] += df['TEU' + k] return df
[ "Procesa", "TCU", "CP", "FEU", "diario", "." ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/importpvpcdata.py#L48-L87
[ "def", "pvpc_calc_tcu_cp_feu_d", "(", "df", ",", "verbose", "=", "True", ",", "convert_kwh", "=", "True", ")", ":", "if", "'TCU'", "+", "TARIFAS", "[", "0", "]", "not", "in", "df", ".", "columns", ":", "# Pasa de €/MWh a €/kWh:", "if", "convert_kwh", ":", "cols_mwh", "=", "[", "c", "+", "t", "for", "c", "in", "COLS_PVPC", "for", "t", "in", "TARIFAS", "if", "c", "!=", "'COF'", "]", "df", "[", "cols_mwh", "]", "=", "df", "[", "cols_mwh", "]", ".", "applymap", "(", "lambda", "x", ":", "x", "/", "1000.", ")", "# Obtiene columnas TCU, CP, precio día", "gb_t", "=", "df", ".", "groupby", "(", "lambda", "x", ":", "TARIFAS", "[", "np", ".", "argmax", "(", "[", "t", "in", "x", "for", "t", "in", "TARIFAS", "]", ")", "]", ",", "axis", "=", "1", ")", "for", "k", ",", "g", "in", "gb_t", ":", "if", "verbose", ":", "print", "(", "'TARIFA {}'", ".", "format", "(", "k", ")", ")", "print", "(", "g", ".", "head", "(", ")", ")", "# Cálculo de TCU", "df", "[", "'TCU{}'", ".", "format", "(", "k", ")", "]", "=", "g", "[", "k", "]", "-", "g", "[", "'TEU{}'", ".", "format", "(", "k", ")", "]", "# Cálculo de CP", "# cols_cp = [c + k for c in ['FOS', 'FOM', 'INT', 'PCAP', 'PMH', 'SAH']]", "cols_cp", "=", "[", "c", "+", "k", "for", "c", "in", "COLS_PVPC", "if", "c", "not", "in", "[", "''", ",", "'COF'", ",", "'TEU'", "]", "]", "df", "[", "'CP{}'", ".", "format", "(", "k", ")", "]", "=", "g", "[", "cols_cp", "]", ".", "sum", "(", "axis", "=", "1", ")", "# Cálculo de PERD --> No es posible así, ya que los valores base ya vienen con PERD", "# dfs_pvpc[k]['PERD{}'.format(k)] = dfs_pvpc[k]['TCU{}'.format(k)] / dfs_pvpc[k]['CP{}'.format(k)]", "# dfs_pvpc[k]['PERD{}'.format(k)] = dfs_pvpc[k]['INT{}'.format(k)] / 1.92", "# Cálculo de FEU diario", "cols_k", "=", "[", "'TEU'", "+", "k", ",", "'TCU'", "+", "k", ",", "'COF'", "+", "k", "]", "g", "=", "df", "[", "cols_k", "]", ".", "groupby", "(", "'TEU'", "+", "k", ")", "pr", "=", "g", ".", "apply", "(", "lambda", "x", ":", "x", "[", "'TCU'", "+", "k", "]", ".", "dot", "(", "x", "[", "'COF'", "+", "k", "]", ")", "/", "x", "[", "'COF'", "+", "k", "]", ".", "sum", "(", ")", ")", "pr", ".", "name", "=", "'PD_'", "+", "k", "df", "=", "df", ".", "join", "(", "pr", ",", "on", "=", "'TEU'", "+", "k", ",", "rsuffix", "=", "'_r'", ")", "df", "[", "'PD_'", "+", "k", "]", "+=", "df", "[", "'TEU'", "+", "k", "]", "return", "df" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
pvpc_procesa_datos_dia
Procesa la información JSON descargada y forma el dataframe de los datos de un día.
esiosdata/importpvpcdata.py
def pvpc_procesa_datos_dia(_, response, verbose=True): """Procesa la información JSON descargada y forma el dataframe de los datos de un día.""" try: d_data = response['PVPC'] df = _process_json_pvpc_hourly_data(pd.DataFrame(d_data)) return df, 0 except Exception as e: if verbose: print('ERROR leyendo información de web: {}'.format(e)) return None, -2
def pvpc_procesa_datos_dia(_, response, verbose=True): """Procesa la información JSON descargada y forma el dataframe de los datos de un día.""" try: d_data = response['PVPC'] df = _process_json_pvpc_hourly_data(pd.DataFrame(d_data)) return df, 0 except Exception as e: if verbose: print('ERROR leyendo información de web: {}'.format(e)) return None, -2
[ "Procesa", "la", "información", "JSON", "descargada", "y", "forma", "el", "dataframe", "de", "los", "datos", "de", "un", "día", "." ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/importpvpcdata.py#L106-L115
[ "def", "pvpc_procesa_datos_dia", "(", "_", ",", "response", ",", "verbose", "=", "True", ")", ":", "try", ":", "d_data", "=", "response", "[", "'PVPC'", "]", "df", "=", "_process_json_pvpc_hourly_data", "(", "pd", ".", "DataFrame", "(", "d_data", ")", ")", "return", "df", ",", "0", "except", "Exception", "as", "e", ":", "if", "verbose", ":", "print", "(", "'ERROR leyendo información de web: {}'.", "f", "ormat(", "e", ")", ")", "", "return", "None", ",", "-", "2" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
pvpc_data_dia
Obtiene datos de PVPC en un día concreto o un intervalo, accediendo directamente a la web.
esiosdata/importpvpcdata.py
def pvpc_data_dia(str_dia, str_dia_fin=None): """Obtiene datos de PVPC en un día concreto o un intervalo, accediendo directamente a la web.""" params = {'date_fmt': DATE_FMT, 'usar_multithread': False, 'func_procesa_data_dia': pvpc_procesa_datos_dia, 'func_url_data_dia': pvpc_url_dia, 'data_extra_request': {'json_req': True, 'headers': HEADERS}} if str_dia_fin is not None: params['usar_multithread'] = True data, hay_errores, str_import = get_data_en_intervalo(str_dia, str_dia_fin, **params) else: data, hay_errores, str_import = get_data_en_intervalo(str_dia, str_dia, **params) if not hay_errores: return data else: return str_import
def pvpc_data_dia(str_dia, str_dia_fin=None): """Obtiene datos de PVPC en un día concreto o un intervalo, accediendo directamente a la web.""" params = {'date_fmt': DATE_FMT, 'usar_multithread': False, 'func_procesa_data_dia': pvpc_procesa_datos_dia, 'func_url_data_dia': pvpc_url_dia, 'data_extra_request': {'json_req': True, 'headers': HEADERS}} if str_dia_fin is not None: params['usar_multithread'] = True data, hay_errores, str_import = get_data_en_intervalo(str_dia, str_dia_fin, **params) else: data, hay_errores, str_import = get_data_en_intervalo(str_dia, str_dia, **params) if not hay_errores: return data else: return str_import
[ "Obtiene", "datos", "de", "PVPC", "en", "un", "día", "concreto", "o", "un", "intervalo", "accediendo", "directamente", "a", "la", "web", "." ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/importpvpcdata.py#L118-L131
[ "def", "pvpc_data_dia", "(", "str_dia", ",", "str_dia_fin", "=", "None", ")", ":", "params", "=", "{", "'date_fmt'", ":", "DATE_FMT", ",", "'usar_multithread'", ":", "False", ",", "'func_procesa_data_dia'", ":", "pvpc_procesa_datos_dia", ",", "'func_url_data_dia'", ":", "pvpc_url_dia", ",", "'data_extra_request'", ":", "{", "'json_req'", ":", "True", ",", "'headers'", ":", "HEADERS", "}", "}", "if", "str_dia_fin", "is", "not", "None", ":", "params", "[", "'usar_multithread'", "]", "=", "True", "data", ",", "hay_errores", ",", "str_import", "=", "get_data_en_intervalo", "(", "str_dia", ",", "str_dia_fin", ",", "*", "*", "params", ")", "else", ":", "data", ",", "hay_errores", ",", "str_import", "=", "get_data_en_intervalo", "(", "str_dia", ",", "str_dia", ",", "*", "*", "params", ")", "if", "not", "hay_errores", ":", "return", "data", "else", ":", "return", "str_import" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
SplunkLogger._compress
Compress the log message in order to send less bytes to the wire.
splunk_logger/splunk_logger.py
def _compress(self, input_str): """ Compress the log message in order to send less bytes to the wire. """ compressed_bits = cStringIO.StringIO() f = gzip.GzipFile(fileobj=compressed_bits, mode='wb') f.write(input_str) f.close() return compressed_bits.getvalue()
def _compress(self, input_str): """ Compress the log message in order to send less bytes to the wire. """ compressed_bits = cStringIO.StringIO() f = gzip.GzipFile(fileobj=compressed_bits, mode='wb') f.write(input_str) f.close() return compressed_bits.getvalue()
[ "Compress", "the", "log", "message", "in", "order", "to", "send", "less", "bytes", "to", "the", "wire", "." ]
andresriancho/splunk-logger
python
https://github.com/andresriancho/splunk-logger/blob/448d5ba54464fc355786ffb64f11fd6367792381/splunk_logger/splunk_logger.py#L68-L78
[ "def", "_compress", "(", "self", ",", "input_str", ")", ":", "compressed_bits", "=", "cStringIO", ".", "StringIO", "(", ")", "f", "=", "gzip", ".", "GzipFile", "(", "fileobj", "=", "compressed_bits", ",", "mode", "=", "'wb'", ")", "f", ".", "write", "(", "input_str", ")", "f", ".", "close", "(", ")", "return", "compressed_bits", ".", "getvalue", "(", ")" ]
448d5ba54464fc355786ffb64f11fd6367792381
valid
get_data_coeficientes_perfilado_2017
Extrae la información de las dos hojas del Excel proporcionado por REE con los perfiles iniciales para 2017. :param force_download: Descarga el fichero 'raw' del servidor, en vez de acudir a la copia local. :return: perfiles_2017, coefs_alpha_beta_gamma :rtype: tuple
esiosdata/perfilesconsumopvpc.py
def get_data_coeficientes_perfilado_2017(force_download=False): """Extrae la información de las dos hojas del Excel proporcionado por REE con los perfiles iniciales para 2017. :param force_download: Descarga el fichero 'raw' del servidor, en vez de acudir a la copia local. :return: perfiles_2017, coefs_alpha_beta_gamma :rtype: tuple """ path_perfs = os.path.join(STORAGE_DIR, 'perfiles_consumo_2017.h5') if force_download or not os.path.exists(path_perfs): # Coeficientes de perfilado y demanda de referencia (1ª hoja) cols_sheet1 = ['Mes', 'Día', 'Hora', 'Pa,0m,d,h', 'Pb,0m,d,h', 'Pc,0m,d,h', 'Pd,0m,d,h', 'Demanda de Referencia 2017 (MW)'] perfs_2017 = pd.read_excel(URL_PERFILES_2017, header=None, skiprows=[0, 1], names=cols_sheet1) perfs_2017['ts'] = pd.DatetimeIndex(start='2017-01-01', freq='H', tz=TZ, end='2017-12-31 23:59') perfs_2017 = perfs_2017.set_index('ts').drop(['Mes', 'Día', 'Hora'], axis=1) # Coefs Alfa, Beta, Gamma (2ª hoja): coefs_alpha_beta_gamma = pd.read_excel(URL_PERFILES_2017, sheetname=1) print('Escribiendo perfiles 2017 en disco, en {}'.format(path_perfs)) with pd.HDFStore(path_perfs, 'w') as st: st.put('coefs', coefs_alpha_beta_gamma) st.put('perfiles', perfs_2017) print('HDFStore de tamaño {:.3f} KB'.format(os.path.getsize(path_perfs) / 1000)) else: with pd.HDFStore(path_perfs, 'r') as st: coefs_alpha_beta_gamma = st['coefs'] perfs_2017 = st['perfiles'] return perfs_2017, coefs_alpha_beta_gamma
def get_data_coeficientes_perfilado_2017(force_download=False): """Extrae la información de las dos hojas del Excel proporcionado por REE con los perfiles iniciales para 2017. :param force_download: Descarga el fichero 'raw' del servidor, en vez de acudir a la copia local. :return: perfiles_2017, coefs_alpha_beta_gamma :rtype: tuple """ path_perfs = os.path.join(STORAGE_DIR, 'perfiles_consumo_2017.h5') if force_download or not os.path.exists(path_perfs): # Coeficientes de perfilado y demanda de referencia (1ª hoja) cols_sheet1 = ['Mes', 'Día', 'Hora', 'Pa,0m,d,h', 'Pb,0m,d,h', 'Pc,0m,d,h', 'Pd,0m,d,h', 'Demanda de Referencia 2017 (MW)'] perfs_2017 = pd.read_excel(URL_PERFILES_2017, header=None, skiprows=[0, 1], names=cols_sheet1) perfs_2017['ts'] = pd.DatetimeIndex(start='2017-01-01', freq='H', tz=TZ, end='2017-12-31 23:59') perfs_2017 = perfs_2017.set_index('ts').drop(['Mes', 'Día', 'Hora'], axis=1) # Coefs Alfa, Beta, Gamma (2ª hoja): coefs_alpha_beta_gamma = pd.read_excel(URL_PERFILES_2017, sheetname=1) print('Escribiendo perfiles 2017 en disco, en {}'.format(path_perfs)) with pd.HDFStore(path_perfs, 'w') as st: st.put('coefs', coefs_alpha_beta_gamma) st.put('perfiles', perfs_2017) print('HDFStore de tamaño {:.3f} KB'.format(os.path.getsize(path_perfs) / 1000)) else: with pd.HDFStore(path_perfs, 'r') as st: coefs_alpha_beta_gamma = st['coefs'] perfs_2017 = st['perfiles'] return perfs_2017, coefs_alpha_beta_gamma
[ "Extrae", "la", "información", "de", "las", "dos", "hojas", "del", "Excel", "proporcionado", "por", "REE", "con", "los", "perfiles", "iniciales", "para", "2017", ".", ":", "param", "force_download", ":", "Descarga", "el", "fichero", "raw", "del", "servidor", "en", "vez", "de", "acudir", "a", "la", "copia", "local", ".", ":", "return", ":", "perfiles_2017", "coefs_alpha_beta_gamma", ":", "rtype", ":", "tuple" ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/perfilesconsumopvpc.py#L44-L72
[ "def", "get_data_coeficientes_perfilado_2017", "(", "force_download", "=", "False", ")", ":", "path_perfs", "=", "os", ".", "path", ".", "join", "(", "STORAGE_DIR", ",", "'perfiles_consumo_2017.h5'", ")", "if", "force_download", "or", "not", "os", ".", "path", ".", "exists", "(", "path_perfs", ")", ":", "# Coeficientes de perfilado y demanda de referencia (1ª hoja)", "cols_sheet1", "=", "[", "'Mes'", ",", "'Día',", " ", "Hora',", "", "'Pa,0m,d,h'", ",", "'Pb,0m,d,h'", ",", "'Pc,0m,d,h'", ",", "'Pd,0m,d,h'", ",", "'Demanda de Referencia 2017 (MW)'", "]", "perfs_2017", "=", "pd", ".", "read_excel", "(", "URL_PERFILES_2017", ",", "header", "=", "None", ",", "skiprows", "=", "[", "0", ",", "1", "]", ",", "names", "=", "cols_sheet1", ")", "perfs_2017", "[", "'ts'", "]", "=", "pd", ".", "DatetimeIndex", "(", "start", "=", "'2017-01-01'", ",", "freq", "=", "'H'", ",", "tz", "=", "TZ", ",", "end", "=", "'2017-12-31 23:59'", ")", "perfs_2017", "=", "perfs_2017", ".", "set_index", "(", "'ts'", ")", ".", "drop", "(", "[", "'Mes'", ",", "'Día',", " ", "Hora']", ",", " ", "xis=", "1", ")", "", "# Coefs Alfa, Beta, Gamma (2ª hoja):", "coefs_alpha_beta_gamma", "=", "pd", ".", "read_excel", "(", "URL_PERFILES_2017", ",", "sheetname", "=", "1", ")", "print", "(", "'Escribiendo perfiles 2017 en disco, en {}'", ".", "format", "(", "path_perfs", ")", ")", "with", "pd", ".", "HDFStore", "(", "path_perfs", ",", "'w'", ")", "as", "st", ":", "st", ".", "put", "(", "'coefs'", ",", "coefs_alpha_beta_gamma", ")", "st", ".", "put", "(", "'perfiles'", ",", "perfs_2017", ")", "print", "(", "'HDFStore de tamaño {:.3f} KB'.", "f", "ormat(", "o", "s.", "p", "ath.", "g", "etsize(", "p", "ath_perfs)", " ", " ", "000)", ")", "", "else", ":", "with", "pd", ".", "HDFStore", "(", "path_perfs", ",", "'r'", ")", "as", "st", ":", "coefs_alpha_beta_gamma", "=", "st", "[", "'coefs'", "]", "perfs_2017", "=", "st", "[", "'perfiles'", "]", "return", "perfs_2017", ",", "coefs_alpha_beta_gamma" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
get_data_perfiles_estimados_2017
Extrae perfiles estimados para 2017 con el formato de los CSV's mensuales con los perfiles definitivos. :param force_download: bool para forzar la descarga del excel de la web de REE. :return: perfiles_2017 :rtype: pd.Dataframe
esiosdata/perfilesconsumopvpc.py
def get_data_perfiles_estimados_2017(force_download=False): """Extrae perfiles estimados para 2017 con el formato de los CSV's mensuales con los perfiles definitivos. :param force_download: bool para forzar la descarga del excel de la web de REE. :return: perfiles_2017 :rtype: pd.Dataframe """ global DATA_PERFILES_2017 if (DATA_PERFILES_2017 is None) or force_download: perf_demref_2017, _ = get_data_coeficientes_perfilado_2017(force_download=force_download) # Conversión de formato de dataframe de perfiles 2017 a finales (para uniformizar): cols_usar = ['Pa,0m,d,h', 'Pb,0m,d,h', 'Pc,0m,d,h', 'Pd,0m,d,h'] perfs_2017 = perf_demref_2017[cols_usar].copy() perfs_2017.columns = ['COEF. PERFIL {}'.format(p) for p in 'ABCD'] DATA_PERFILES_2017 = perfs_2017 return perfs_2017 return DATA_PERFILES_2017
def get_data_perfiles_estimados_2017(force_download=False): """Extrae perfiles estimados para 2017 con el formato de los CSV's mensuales con los perfiles definitivos. :param force_download: bool para forzar la descarga del excel de la web de REE. :return: perfiles_2017 :rtype: pd.Dataframe """ global DATA_PERFILES_2017 if (DATA_PERFILES_2017 is None) or force_download: perf_demref_2017, _ = get_data_coeficientes_perfilado_2017(force_download=force_download) # Conversión de formato de dataframe de perfiles 2017 a finales (para uniformizar): cols_usar = ['Pa,0m,d,h', 'Pb,0m,d,h', 'Pc,0m,d,h', 'Pd,0m,d,h'] perfs_2017 = perf_demref_2017[cols_usar].copy() perfs_2017.columns = ['COEF. PERFIL {}'.format(p) for p in 'ABCD'] DATA_PERFILES_2017 = perfs_2017 return perfs_2017 return DATA_PERFILES_2017
[ "Extrae", "perfiles", "estimados", "para", "2017", "con", "el", "formato", "de", "los", "CSV", "s", "mensuales", "con", "los", "perfiles", "definitivos", ".", ":", "param", "force_download", ":", "bool", "para", "forzar", "la", "descarga", "del", "excel", "de", "la", "web", "de", "REE", ".", ":", "return", ":", "perfiles_2017", ":", "rtype", ":", "pd", ".", "Dataframe" ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/perfilesconsumopvpc.py#L75-L90
[ "def", "get_data_perfiles_estimados_2017", "(", "force_download", "=", "False", ")", ":", "global", "DATA_PERFILES_2017", "if", "(", "DATA_PERFILES_2017", "is", "None", ")", "or", "force_download", ":", "perf_demref_2017", ",", "_", "=", "get_data_coeficientes_perfilado_2017", "(", "force_download", "=", "force_download", ")", "# Conversión de formato de dataframe de perfiles 2017 a finales (para uniformizar):", "cols_usar", "=", "[", "'Pa,0m,d,h'", ",", "'Pb,0m,d,h'", ",", "'Pc,0m,d,h'", ",", "'Pd,0m,d,h'", "]", "perfs_2017", "=", "perf_demref_2017", "[", "cols_usar", "]", ".", "copy", "(", ")", "perfs_2017", ".", "columns", "=", "[", "'COEF. PERFIL {}'", ".", "format", "(", "p", ")", "for", "p", "in", "'ABCD'", "]", "DATA_PERFILES_2017", "=", "perfs_2017", "return", "perfs_2017", "return", "DATA_PERFILES_2017" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
main_cli
Actualiza la base de datos de PVPC/DEMANDA almacenados como dataframe en local, creando una nueva si no existe o hubiere algún problema. Los datos registrados se guardan en HDF5
esiosdata/__main__.py
def main_cli(): """ Actualiza la base de datos de PVPC/DEMANDA almacenados como dataframe en local, creando una nueva si no existe o hubiere algún problema. Los datos registrados se guardan en HDF5 """ def _get_parser_args(): p = argparse.ArgumentParser(description='Gestor de DB de PVPC/DEMANDA (esios.ree.es)') p.add_argument('-d', '--dem', action='store_true', help='Selecciona BD de demanda (BD de PVPC por defecto)') p.add_argument('-i', '--info', action='store', nargs='*', help="Muestra información de la BD seleccionada. " "* Puede usar intervalos temporales y nombres de columnas, " "como '-i gen noc 2017-01-24 2017-01-26'") p.add_argument('-fu', '-FU', '--forceupdate', action='store_true', help="Fuerza la reconstrucción total de la BD seleccionada") p.add_argument('-u', '-U', '--update', action='store_true', help="Actualiza la información de la BD seleccionada hasta el instante actual") p.add_argument('-p', '--plot', action='store_true', help="Genera plots de la información filtrada de la BD") p.add_argument('-v', '--verbose', action='store_true', help='Muestra información extra') arguments = p.parse_args() return arguments, p def _parse_date(string, columns): try: ts = pd.Timestamp(string) print_cyan('{} es timestamp: {:%c} --> {}'.format(string, ts, ts.date())) columns.remove(string) return ts.date().isoformat() except ValueError: pass args, parser = _get_parser_args() print_secc('ESIOS PVPC/DEMANDA') if args.dem: db_web = DatosREE(update=args.update, force_update=args.forceupdate, verbose=args.verbose) else: db_web = PVPC(update=args.update, force_update=args.forceupdate, verbose=args.verbose) data = db_web.data['data'] if args.info is not None: if len(args.info) > 0: cols = args.info.copy() dates = [d for d in [_parse_date(s, cols) for s in args.info] if d] if len(dates) == 2: data = data.loc[dates[0]:dates[1]] elif len(dates) == 1: data = data.loc[dates[0]] if len(cols) > 0: try: data = data[[c.upper() for c in cols]] except KeyError as e: print_red('NO SE PUEDE FILTRAR LA COLUMNA (Exception: {})\nLAS COLUMNAS DISPONIBLES SON:\n{}' .format(e, data.columns)) print_info(data) else: print_secc('LAST 24h in DB:') print_info(data.iloc[-24:]) print_cyan(data.columns) if args.plot: if args.dem: from esiosdata.pvpcplot import pvpcplot_tarifas_hora, pvpcplot_grid_hora print_red('IMPLEMENTAR PLOTS DEM') else: from esiosdata.pvpcplot import pvpcplot_tarifas_hora, pvpcplot_grid_hora if len(data) < 750: pvpcplot_grid_hora(data) # pvpcplot_tarifas_hora(data) else: print_red('La selección para plot es excesiva: {} samples de {} a {}\nSe hace plot de las últimas 24h'. format(len(data), data.index[0], data.index[-1])) pvpcplot_grid_hora(db_web.data['data'].iloc[-24:]) pvpcplot_tarifas_hora(db_web.data['data'].iloc[-24:])
def main_cli(): """ Actualiza la base de datos de PVPC/DEMANDA almacenados como dataframe en local, creando una nueva si no existe o hubiere algún problema. Los datos registrados se guardan en HDF5 """ def _get_parser_args(): p = argparse.ArgumentParser(description='Gestor de DB de PVPC/DEMANDA (esios.ree.es)') p.add_argument('-d', '--dem', action='store_true', help='Selecciona BD de demanda (BD de PVPC por defecto)') p.add_argument('-i', '--info', action='store', nargs='*', help="Muestra información de la BD seleccionada. " "* Puede usar intervalos temporales y nombres de columnas, " "como '-i gen noc 2017-01-24 2017-01-26'") p.add_argument('-fu', '-FU', '--forceupdate', action='store_true', help="Fuerza la reconstrucción total de la BD seleccionada") p.add_argument('-u', '-U', '--update', action='store_true', help="Actualiza la información de la BD seleccionada hasta el instante actual") p.add_argument('-p', '--plot', action='store_true', help="Genera plots de la información filtrada de la BD") p.add_argument('-v', '--verbose', action='store_true', help='Muestra información extra') arguments = p.parse_args() return arguments, p def _parse_date(string, columns): try: ts = pd.Timestamp(string) print_cyan('{} es timestamp: {:%c} --> {}'.format(string, ts, ts.date())) columns.remove(string) return ts.date().isoformat() except ValueError: pass args, parser = _get_parser_args() print_secc('ESIOS PVPC/DEMANDA') if args.dem: db_web = DatosREE(update=args.update, force_update=args.forceupdate, verbose=args.verbose) else: db_web = PVPC(update=args.update, force_update=args.forceupdate, verbose=args.verbose) data = db_web.data['data'] if args.info is not None: if len(args.info) > 0: cols = args.info.copy() dates = [d for d in [_parse_date(s, cols) for s in args.info] if d] if len(dates) == 2: data = data.loc[dates[0]:dates[1]] elif len(dates) == 1: data = data.loc[dates[0]] if len(cols) > 0: try: data = data[[c.upper() for c in cols]] except KeyError as e: print_red('NO SE PUEDE FILTRAR LA COLUMNA (Exception: {})\nLAS COLUMNAS DISPONIBLES SON:\n{}' .format(e, data.columns)) print_info(data) else: print_secc('LAST 24h in DB:') print_info(data.iloc[-24:]) print_cyan(data.columns) if args.plot: if args.dem: from esiosdata.pvpcplot import pvpcplot_tarifas_hora, pvpcplot_grid_hora print_red('IMPLEMENTAR PLOTS DEM') else: from esiosdata.pvpcplot import pvpcplot_tarifas_hora, pvpcplot_grid_hora if len(data) < 750: pvpcplot_grid_hora(data) # pvpcplot_tarifas_hora(data) else: print_red('La selección para plot es excesiva: {} samples de {} a {}\nSe hace plot de las últimas 24h'. format(len(data), data.index[0], data.index[-1])) pvpcplot_grid_hora(db_web.data['data'].iloc[-24:]) pvpcplot_tarifas_hora(db_web.data['data'].iloc[-24:])
[ "Actualiza", "la", "base", "de", "datos", "de", "PVPC", "/", "DEMANDA", "almacenados", "como", "dataframe", "en", "local", "creando", "una", "nueva", "si", "no", "existe", "o", "hubiere", "algún", "problema", ".", "Los", "datos", "registrados", "se", "guardan", "en", "HDF5" ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/__main__.py#L26-L96
[ "def", "main_cli", "(", ")", ":", "def", "_get_parser_args", "(", ")", ":", "p", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Gestor de DB de PVPC/DEMANDA (esios.ree.es)'", ")", "p", ".", "add_argument", "(", "'-d'", ",", "'--dem'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Selecciona BD de demanda (BD de PVPC por defecto)'", ")", "p", ".", "add_argument", "(", "'-i'", ",", "'--info'", ",", "action", "=", "'store'", ",", "nargs", "=", "'*'", ",", "help", "=", "\"Muestra información de la BD seleccionada. \"", "\"* Puede usar intervalos temporales y nombres de columnas, \"", "\"como '-i gen noc 2017-01-24 2017-01-26'\"", ")", "p", ".", "add_argument", "(", "'-fu'", ",", "'-FU'", ",", "'--forceupdate'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"Fuerza la reconstrucción total de la BD seleccionada\")", "", "p", ".", "add_argument", "(", "'-u'", ",", "'-U'", ",", "'--update'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"Actualiza la información de la BD seleccionada hasta el instante actual\")", "", "p", ".", "add_argument", "(", "'-p'", ",", "'--plot'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"Genera plots de la información filtrada de la BD\")", "", "p", ".", "add_argument", "(", "'-v'", ",", "'--verbose'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Muestra información extra')", "", "arguments", "=", "p", ".", "parse_args", "(", ")", "return", "arguments", ",", "p", "def", "_parse_date", "(", "string", ",", "columns", ")", ":", "try", ":", "ts", "=", "pd", ".", "Timestamp", "(", "string", ")", "print_cyan", "(", "'{} es timestamp: {:%c} --> {}'", ".", "format", "(", "string", ",", "ts", ",", "ts", ".", "date", "(", ")", ")", ")", "columns", ".", "remove", "(", "string", ")", "return", "ts", ".", "date", "(", ")", ".", "isoformat", "(", ")", "except", "ValueError", ":", "pass", "args", ",", "parser", "=", "_get_parser_args", "(", ")", "print_secc", "(", "'ESIOS PVPC/DEMANDA'", ")", "if", "args", ".", "dem", ":", "db_web", "=", "DatosREE", "(", "update", "=", "args", ".", "update", ",", "force_update", "=", "args", ".", "forceupdate", ",", "verbose", "=", "args", ".", "verbose", ")", "else", ":", "db_web", "=", "PVPC", "(", "update", "=", "args", ".", "update", ",", "force_update", "=", "args", ".", "forceupdate", ",", "verbose", "=", "args", ".", "verbose", ")", "data", "=", "db_web", ".", "data", "[", "'data'", "]", "if", "args", ".", "info", "is", "not", "None", ":", "if", "len", "(", "args", ".", "info", ")", ">", "0", ":", "cols", "=", "args", ".", "info", ".", "copy", "(", ")", "dates", "=", "[", "d", "for", "d", "in", "[", "_parse_date", "(", "s", ",", "cols", ")", "for", "s", "in", "args", ".", "info", "]", "if", "d", "]", "if", "len", "(", "dates", ")", "==", "2", ":", "data", "=", "data", ".", "loc", "[", "dates", "[", "0", "]", ":", "dates", "[", "1", "]", "]", "elif", "len", "(", "dates", ")", "==", "1", ":", "data", "=", "data", ".", "loc", "[", "dates", "[", "0", "]", "]", "if", "len", "(", "cols", ")", ">", "0", ":", "try", ":", "data", "=", "data", "[", "[", "c", ".", "upper", "(", ")", "for", "c", "in", "cols", "]", "]", "except", "KeyError", "as", "e", ":", "print_red", "(", "'NO SE PUEDE FILTRAR LA COLUMNA (Exception: {})\\nLAS COLUMNAS DISPONIBLES SON:\\n{}'", ".", "format", "(", "e", ",", "data", ".", "columns", ")", ")", "print_info", "(", "data", ")", "else", ":", "print_secc", "(", "'LAST 24h in DB:'", ")", "print_info", "(", "data", ".", "iloc", "[", "-", "24", ":", "]", ")", "print_cyan", "(", "data", ".", "columns", ")", "if", "args", ".", "plot", ":", "if", "args", ".", "dem", ":", "from", "esiosdata", ".", "pvpcplot", "import", "pvpcplot_tarifas_hora", ",", "pvpcplot_grid_hora", "print_red", "(", "'IMPLEMENTAR PLOTS DEM'", ")", "else", ":", "from", "esiosdata", ".", "pvpcplot", "import", "pvpcplot_tarifas_hora", ",", "pvpcplot_grid_hora", "if", "len", "(", "data", ")", "<", "750", ":", "pvpcplot_grid_hora", "(", "data", ")", "# pvpcplot_tarifas_hora(data)", "else", ":", "print_red", "(", "'La selección para plot es excesiva: {} samples de {} a {}\\nSe hace plot de las últimas 24h'.", "", "format", "(", "len", "(", "data", ")", ",", "data", ".", "index", "[", "0", "]", ",", "data", ".", "index", "[", "-", "1", "]", ")", ")", "pvpcplot_grid_hora", "(", "db_web", ".", "data", "[", "'data'", "]", ".", "iloc", "[", "-", "24", ":", "]", ")", "pvpcplot_tarifas_hora", "(", "db_web", ".", "data", "[", "'data'", "]", ".", "iloc", "[", "-", "24", ":", "]", ")" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
SpecSelector.registerGoodClass
Internal bookkeeping to handle nested classes
spec/cli.py
def registerGoodClass(self, class_): """ Internal bookkeeping to handle nested classes """ # Class itself added to "good" list self._valid_classes.append(class_) # Recurse into any inner classes for name, cls in class_members(class_): if self.isValidClass(cls): self.registerGoodClass(cls)
def registerGoodClass(self, class_): """ Internal bookkeeping to handle nested classes """ # Class itself added to "good" list self._valid_classes.append(class_) # Recurse into any inner classes for name, cls in class_members(class_): if self.isValidClass(cls): self.registerGoodClass(cls)
[ "Internal", "bookkeeping", "to", "handle", "nested", "classes" ]
bitprophet/spec
python
https://github.com/bitprophet/spec/blob/d9646c5daf8e479937f970d21ebe185ad936a35a/spec/cli.py#L56-L65
[ "def", "registerGoodClass", "(", "self", ",", "class_", ")", ":", "# Class itself added to \"good\" list", "self", ".", "_valid_classes", ".", "append", "(", "class_", ")", "# Recurse into any inner classes", "for", "name", ",", "cls", "in", "class_members", "(", "class_", ")", ":", "if", "self", ".", "isValidClass", "(", "cls", ")", ":", "self", ".", "registerGoodClass", "(", "cls", ")" ]
d9646c5daf8e479937f970d21ebe185ad936a35a
valid
SpecSelector.isValidClass
Needs to be its own method so it can be called from both wantClass and registerGoodClass.
spec/cli.py
def isValidClass(self, class_): """ Needs to be its own method so it can be called from both wantClass and registerGoodClass. """ module = inspect.getmodule(class_) valid = ( module in self._valid_modules or ( hasattr(module, '__file__') and module.__file__ in self._valid_named_modules ) ) return valid and not private(class_)
def isValidClass(self, class_): """ Needs to be its own method so it can be called from both wantClass and registerGoodClass. """ module = inspect.getmodule(class_) valid = ( module in self._valid_modules or ( hasattr(module, '__file__') and module.__file__ in self._valid_named_modules ) ) return valid and not private(class_)
[ "Needs", "to", "be", "its", "own", "method", "so", "it", "can", "be", "called", "from", "both", "wantClass", "and", "registerGoodClass", "." ]
bitprophet/spec
python
https://github.com/bitprophet/spec/blob/d9646c5daf8e479937f970d21ebe185ad936a35a/spec/cli.py#L67-L80
[ "def", "isValidClass", "(", "self", ",", "class_", ")", ":", "module", "=", "inspect", ".", "getmodule", "(", "class_", ")", "valid", "=", "(", "module", "in", "self", ".", "_valid_modules", "or", "(", "hasattr", "(", "module", ",", "'__file__'", ")", "and", "module", ".", "__file__", "in", "self", ".", "_valid_named_modules", ")", ")", "return", "valid", "and", "not", "private", "(", "class_", ")" ]
d9646c5daf8e479937f970d21ebe185ad936a35a
valid
PVPC.procesa_data_dia
Procesa los datos descargados correspondientes a un día `key_dia`.
esiosdata/classdataesios.py
def procesa_data_dia(self, key_dia, datos_para_procesar): """Procesa los datos descargados correspondientes a un día `key_dia`.""" return pvpc_procesa_datos_dia(key_dia, datos_para_procesar, verbose=self.verbose)
def procesa_data_dia(self, key_dia, datos_para_procesar): """Procesa los datos descargados correspondientes a un día `key_dia`.""" return pvpc_procesa_datos_dia(key_dia, datos_para_procesar, verbose=self.verbose)
[ "Procesa", "los", "datos", "descargados", "correspondientes", "a", "un", "día", "key_dia", "." ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/classdataesios.py#L59-L61
[ "def", "procesa_data_dia", "(", "self", ",", "key_dia", ",", "datos_para_procesar", ")", ":", "return", "pvpc_procesa_datos_dia", "(", "key_dia", ",", "datos_para_procesar", ",", "verbose", "=", "self", ".", "verbose", ")" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
PVPC.get_resample_data
Obtiene los dataframes de los datos de PVPC con resampling diario y mensual.
esiosdata/classdataesios.py
def get_resample_data(self): """Obtiene los dataframes de los datos de PVPC con resampling diario y mensual.""" if self.data is not None: if self._pvpc_mean_daily is None: self._pvpc_mean_daily = self.data['data'].resample('D').mean() if self._pvpc_mean_monthly is None: self._pvpc_mean_monthly = self.data['data'].resample('MS').mean() return self._pvpc_mean_daily, self._pvpc_mean_monthly
def get_resample_data(self): """Obtiene los dataframes de los datos de PVPC con resampling diario y mensual.""" if self.data is not None: if self._pvpc_mean_daily is None: self._pvpc_mean_daily = self.data['data'].resample('D').mean() if self._pvpc_mean_monthly is None: self._pvpc_mean_monthly = self.data['data'].resample('MS').mean() return self._pvpc_mean_daily, self._pvpc_mean_monthly
[ "Obtiene", "los", "dataframes", "de", "los", "datos", "de", "PVPC", "con", "resampling", "diario", "y", "mensual", "." ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/classdataesios.py#L63-L70
[ "def", "get_resample_data", "(", "self", ")", ":", "if", "self", ".", "data", "is", "not", "None", ":", "if", "self", ".", "_pvpc_mean_daily", "is", "None", ":", "self", ".", "_pvpc_mean_daily", "=", "self", ".", "data", "[", "'data'", "]", ".", "resample", "(", "'D'", ")", ".", "mean", "(", ")", "if", "self", ".", "_pvpc_mean_monthly", "is", "None", ":", "self", ".", "_pvpc_mean_monthly", "=", "self", ".", "data", "[", "'data'", "]", ".", "resample", "(", "'MS'", ")", ".", "mean", "(", ")", "return", "self", ".", "_pvpc_mean_daily", ",", "self", ".", "_pvpc_mean_monthly" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
DatosREE.last_entry
Definición específica para filtrar por datos de demanda energética (pues los datos se extienden más allá del tiempo presente debido a las columnas de potencia prevista y programada. :param data_revisar: (OPC) Se puede pasar un dataframe específico :param key_revisar: (OPC) Normalmente, para utilizar 'dem' :return: tmax, num_entradas
esiosdata/classdataesios.py
def last_entry(self, data_revisar=None, key_revisar=None): """ Definición específica para filtrar por datos de demanda energética (pues los datos se extienden más allá del tiempo presente debido a las columnas de potencia prevista y programada. :param data_revisar: (OPC) Se puede pasar un dataframe específico :param key_revisar: (OPC) Normalmente, para utilizar 'dem' :return: tmax, num_entradas """ if data_revisar is None and key_revisar is None: data_revisar = self.data[self.masterkey][pd.notnull(self.data[self.masterkey]['dem'])] super(DatosREE, self).printif('Últimos valores de generación y demanda:', 'info') super(DatosREE, self).printif(data_revisar.tail(), 'info') return super(DatosREE, self).last_entry(data_revisar, 'dem') else: return super(DatosREE, self).last_entry(data_revisar, key_revisar)
def last_entry(self, data_revisar=None, key_revisar=None): """ Definición específica para filtrar por datos de demanda energética (pues los datos se extienden más allá del tiempo presente debido a las columnas de potencia prevista y programada. :param data_revisar: (OPC) Se puede pasar un dataframe específico :param key_revisar: (OPC) Normalmente, para utilizar 'dem' :return: tmax, num_entradas """ if data_revisar is None and key_revisar is None: data_revisar = self.data[self.masterkey][pd.notnull(self.data[self.masterkey]['dem'])] super(DatosREE, self).printif('Últimos valores de generación y demanda:', 'info') super(DatosREE, self).printif(data_revisar.tail(), 'info') return super(DatosREE, self).last_entry(data_revisar, 'dem') else: return super(DatosREE, self).last_entry(data_revisar, key_revisar)
[ "Definición", "específica", "para", "filtrar", "por", "datos", "de", "demanda", "energética", "(", "pues", "los", "datos", "se", "extienden", "más", "allá", "del", "tiempo", "presente", "debido", "a", "las", "columnas", "de", "potencia", "prevista", "y", "programada", "." ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/classdataesios.py#L125-L140
[ "def", "last_entry", "(", "self", ",", "data_revisar", "=", "None", ",", "key_revisar", "=", "None", ")", ":", "if", "data_revisar", "is", "None", "and", "key_revisar", "is", "None", ":", "data_revisar", "=", "self", ".", "data", "[", "self", ".", "masterkey", "]", "[", "pd", ".", "notnull", "(", "self", ".", "data", "[", "self", ".", "masterkey", "]", "[", "'dem'", "]", ")", "]", "super", "(", "DatosREE", ",", "self", ")", ".", "printif", "(", "'Últimos valores de generación y demanda:', ", "'", "nfo')", "", "super", "(", "DatosREE", ",", "self", ")", ".", "printif", "(", "data_revisar", ".", "tail", "(", ")", ",", "'info'", ")", "return", "super", "(", "DatosREE", ",", "self", ")", ".", "last_entry", "(", "data_revisar", ",", "'dem'", ")", "else", ":", "return", "super", "(", "DatosREE", ",", "self", ")", ".", "last_entry", "(", "data_revisar", ",", "key_revisar", ")" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
DatosREE.integridad_data
Definición específica para comprobar timezone y frecuencia de los datos, además de comprobar que el index de cada dataframe de la base de datos sea de fechas, único (sin duplicados) y creciente :param data_integr: :param key:
esiosdata/classdataesios.py
def integridad_data(self, data_integr=None, key=None): """ Definición específica para comprobar timezone y frecuencia de los datos, además de comprobar que el index de cada dataframe de la base de datos sea de fechas, único (sin duplicados) y creciente :param data_integr: :param key: """ if data_integr is None and key is None and all(k in self.data.keys() for k in KEYS_DATA_DEM): assert(self.data[KEYS_DATA_DEM[0]].index.freq == FREQ_DAT_DEM and self.data[KEYS_DATA_DEM[0]].index.tz == self.TZ) if self.data[KEYS_DATA_DEM[1]] is not None: assert(self.data[KEYS_DATA_DEM[1]].index.freq == 'D') super(DatosREE, self).integridad_data(data_integr, key)
def integridad_data(self, data_integr=None, key=None): """ Definición específica para comprobar timezone y frecuencia de los datos, además de comprobar que el index de cada dataframe de la base de datos sea de fechas, único (sin duplicados) y creciente :param data_integr: :param key: """ if data_integr is None and key is None and all(k in self.data.keys() for k in KEYS_DATA_DEM): assert(self.data[KEYS_DATA_DEM[0]].index.freq == FREQ_DAT_DEM and self.data[KEYS_DATA_DEM[0]].index.tz == self.TZ) if self.data[KEYS_DATA_DEM[1]] is not None: assert(self.data[KEYS_DATA_DEM[1]].index.freq == 'D') super(DatosREE, self).integridad_data(data_integr, key)
[ "Definición", "específica", "para", "comprobar", "timezone", "y", "frecuencia", "de", "los", "datos", "además", "de", "comprobar", "que", "el", "index", "de", "cada", "dataframe", "de", "la", "base", "de", "datos", "sea", "de", "fechas", "único", "(", "sin", "duplicados", ")", "y", "creciente", ":", "param", "data_integr", ":", ":", "param", "key", ":" ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/classdataesios.py#L143-L155
[ "def", "integridad_data", "(", "self", ",", "data_integr", "=", "None", ",", "key", "=", "None", ")", ":", "if", "data_integr", "is", "None", "and", "key", "is", "None", "and", "all", "(", "k", "in", "self", ".", "data", ".", "keys", "(", ")", "for", "k", "in", "KEYS_DATA_DEM", ")", ":", "assert", "(", "self", ".", "data", "[", "KEYS_DATA_DEM", "[", "0", "]", "]", ".", "index", ".", "freq", "==", "FREQ_DAT_DEM", "and", "self", ".", "data", "[", "KEYS_DATA_DEM", "[", "0", "]", "]", ".", "index", ".", "tz", "==", "self", ".", "TZ", ")", "if", "self", ".", "data", "[", "KEYS_DATA_DEM", "[", "1", "]", "]", "is", "not", "None", ":", "assert", "(", "self", ".", "data", "[", "KEYS_DATA_DEM", "[", "1", "]", "]", ".", "index", ".", "freq", "==", "'D'", ")", "super", "(", "DatosREE", ",", "self", ")", ".", "integridad_data", "(", "data_integr", ",", "key", ")" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
DatosREE.busca_errores_data
Busca errores o inconsistencias en los datos adquiridos :return: Dataframe de errores encontrados
esiosdata/classdataesios.py
def busca_errores_data(self): """ Busca errores o inconsistencias en los datos adquiridos :return: Dataframe de errores encontrados """ data_busqueda = self.append_delta_index(TS_DATA_DEM, data_delta=self.data[self.masterkey].copy()) idx_desconex = (((data_busqueda.index < 'now') & (data_busqueda.index >= self.DATE_INI)) & ((data_busqueda.delta_T > 1) | data_busqueda['dem'].isnull() | data_busqueda['pre'].isnull() | data_busqueda['pro'].isnull())) sosp = data_busqueda[idx_desconex].copy() assert len(sosp) == 0 # if len(sosp) > 0: # cols_show = ['bad_dem', 'bad_pre', 'bad_T', 'delta', 'delta_T', 'dem', 'pre', 'pro'] # cols_ss = cols_show[:3] # how_r = {k: pd.Series.sum if k == 'delta' else 'sum' for k in cols_show} # sosp[cols_show[0]] = sosp['dem'].isnull() # sosp[cols_show[1]] = sosp['pre'].isnull() # sosp[cols_show[2]] = sosp['delta_T'] > 1 # if verbose: # print(sosp[cols_show].tz_localize(None).resample('D', how=how_r).dropna(how='all', subset=cols_ss)) # print(sosp[cols_show].tz_localize(None).resample('MS', how=how_r).dropna(how='all', subset=cols_ss)) # return sosp return pd.DataFrame()
def busca_errores_data(self): """ Busca errores o inconsistencias en los datos adquiridos :return: Dataframe de errores encontrados """ data_busqueda = self.append_delta_index(TS_DATA_DEM, data_delta=self.data[self.masterkey].copy()) idx_desconex = (((data_busqueda.index < 'now') & (data_busqueda.index >= self.DATE_INI)) & ((data_busqueda.delta_T > 1) | data_busqueda['dem'].isnull() | data_busqueda['pre'].isnull() | data_busqueda['pro'].isnull())) sosp = data_busqueda[idx_desconex].copy() assert len(sosp) == 0 # if len(sosp) > 0: # cols_show = ['bad_dem', 'bad_pre', 'bad_T', 'delta', 'delta_T', 'dem', 'pre', 'pro'] # cols_ss = cols_show[:3] # how_r = {k: pd.Series.sum if k == 'delta' else 'sum' for k in cols_show} # sosp[cols_show[0]] = sosp['dem'].isnull() # sosp[cols_show[1]] = sosp['pre'].isnull() # sosp[cols_show[2]] = sosp['delta_T'] > 1 # if verbose: # print(sosp[cols_show].tz_localize(None).resample('D', how=how_r).dropna(how='all', subset=cols_ss)) # print(sosp[cols_show].tz_localize(None).resample('MS', how=how_r).dropna(how='all', subset=cols_ss)) # return sosp return pd.DataFrame()
[ "Busca", "errores", "o", "inconsistencias", "en", "los", "datos", "adquiridos", ":", "return", ":", "Dataframe", "de", "errores", "encontrados" ]
azogue/esiosdata
python
https://github.com/azogue/esiosdata/blob/680c7918955bc6ceee5bded92b3a4485f5ea8151/esiosdata/classdataesios.py#L157-L179
[ "def", "busca_errores_data", "(", "self", ")", ":", "data_busqueda", "=", "self", ".", "append_delta_index", "(", "TS_DATA_DEM", ",", "data_delta", "=", "self", ".", "data", "[", "self", ".", "masterkey", "]", ".", "copy", "(", ")", ")", "idx_desconex", "=", "(", "(", "(", "data_busqueda", ".", "index", "<", "'now'", ")", "&", "(", "data_busqueda", ".", "index", ">=", "self", ".", "DATE_INI", ")", ")", "&", "(", "(", "data_busqueda", ".", "delta_T", ">", "1", ")", "|", "data_busqueda", "[", "'dem'", "]", ".", "isnull", "(", ")", "|", "data_busqueda", "[", "'pre'", "]", ".", "isnull", "(", ")", "|", "data_busqueda", "[", "'pro'", "]", ".", "isnull", "(", ")", ")", ")", "sosp", "=", "data_busqueda", "[", "idx_desconex", "]", ".", "copy", "(", ")", "assert", "len", "(", "sosp", ")", "==", "0", "# if len(sosp) > 0:", "# cols_show = ['bad_dem', 'bad_pre', 'bad_T', 'delta', 'delta_T', 'dem', 'pre', 'pro']", "# cols_ss = cols_show[:3]", "# how_r = {k: pd.Series.sum if k == 'delta' else 'sum' for k in cols_show}", "# sosp[cols_show[0]] = sosp['dem'].isnull()", "# sosp[cols_show[1]] = sosp['pre'].isnull()", "# sosp[cols_show[2]] = sosp['delta_T'] > 1", "# if verbose:", "# print(sosp[cols_show].tz_localize(None).resample('D', how=how_r).dropna(how='all', subset=cols_ss))", "# print(sosp[cols_show].tz_localize(None).resample('MS', how=how_r).dropna(how='all', subset=cols_ss))", "# return sosp", "return", "pd", ".", "DataFrame", "(", ")" ]
680c7918955bc6ceee5bded92b3a4485f5ea8151
valid
sanitize_path
Performs sanitation of the path after validating :param path: path to sanitize :return: path :raises: - InvalidPath if the path doesn't start with a slash
flask_journey/utils.py
def sanitize_path(path): """Performs sanitation of the path after validating :param path: path to sanitize :return: path :raises: - InvalidPath if the path doesn't start with a slash """ if path == '/': # Nothing to do, just return return path if path[:1] != '/': raise InvalidPath('The path must start with a slash') # Deduplicate slashes in path path = re.sub(r'/+', '/', path) # Strip trailing slashes and return return path.rstrip('/')
def sanitize_path(path): """Performs sanitation of the path after validating :param path: path to sanitize :return: path :raises: - InvalidPath if the path doesn't start with a slash """ if path == '/': # Nothing to do, just return return path if path[:1] != '/': raise InvalidPath('The path must start with a slash') # Deduplicate slashes in path path = re.sub(r'/+', '/', path) # Strip trailing slashes and return return path.rstrip('/')
[ "Performs", "sanitation", "of", "the", "path", "after", "validating" ]
rbw/flask-journey
python
https://github.com/rbw/flask-journey/blob/6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285/flask_journey/utils.py#L13-L32
[ "def", "sanitize_path", "(", "path", ")", ":", "if", "path", "==", "'/'", ":", "# Nothing to do, just return", "return", "path", "if", "path", "[", ":", "1", "]", "!=", "'/'", ":", "raise", "InvalidPath", "(", "'The path must start with a slash'", ")", "# Deduplicate slashes in path", "path", "=", "re", ".", "sub", "(", "r'/+'", ",", "'/'", ",", "path", ")", "# Strip trailing slashes and return", "return", "path", ".", "rstrip", "(", "'/'", ")" ]
6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285
valid
_validate_schema
Ensures the passed schema instance is compatible :param obj: object to validate :return: obj :raises: - IncompatibleSchema if the passed schema is of an incompatible type
flask_journey/utils.py
def _validate_schema(obj): """Ensures the passed schema instance is compatible :param obj: object to validate :return: obj :raises: - IncompatibleSchema if the passed schema is of an incompatible type """ if obj is not None and not isinstance(obj, Schema): raise IncompatibleSchema('Schema must be of type {0}'.format(Schema)) return obj
def _validate_schema(obj): """Ensures the passed schema instance is compatible :param obj: object to validate :return: obj :raises: - IncompatibleSchema if the passed schema is of an incompatible type """ if obj is not None and not isinstance(obj, Schema): raise IncompatibleSchema('Schema must be of type {0}'.format(Schema)) return obj
[ "Ensures", "the", "passed", "schema", "instance", "is", "compatible" ]
rbw/flask-journey
python
https://github.com/rbw/flask-journey/blob/6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285/flask_journey/utils.py#L35-L47
[ "def", "_validate_schema", "(", "obj", ")", ":", "if", "obj", "is", "not", "None", "and", "not", "isinstance", "(", "obj", ",", "Schema", ")", ":", "raise", "IncompatibleSchema", "(", "'Schema must be of type {0}'", ".", "format", "(", "Schema", ")", ")", "return", "obj" ]
6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285
valid
route
Journey route decorator Enables simple serialization, deserialization and validation of Flask routes with the help of Marshmallow. :param bp: :class:`flask.Blueprint` object :param args: args to pass along to `Blueprint.route` :param kwargs: - :strict_slashes: Enable / disable strict slashes (default False) - :validate: Enable / disable body/query validation (default True) - :_query: Unmarshal Query string into this schema - :_body: Unmarshal JSON body into this schema - :marshal_with: Serialize the output with this schema :raises: - ValidationError if the query parameters or JSON body fails validation
flask_journey/utils.py
def route(bp, *args, **kwargs): """Journey route decorator Enables simple serialization, deserialization and validation of Flask routes with the help of Marshmallow. :param bp: :class:`flask.Blueprint` object :param args: args to pass along to `Blueprint.route` :param kwargs: - :strict_slashes: Enable / disable strict slashes (default False) - :validate: Enable / disable body/query validation (default True) - :_query: Unmarshal Query string into this schema - :_body: Unmarshal JSON body into this schema - :marshal_with: Serialize the output with this schema :raises: - ValidationError if the query parameters or JSON body fails validation """ kwargs['strict_slashes'] = kwargs.pop('strict_slashes', False) body = _validate_schema(kwargs.pop('_body', None)) query = _validate_schema(kwargs.pop('_query', None)) output = _validate_schema(kwargs.pop('marshal_with', None)) validate = kwargs.pop('validate', True) def decorator(f): @bp.route(*args, **kwargs) @wraps(f) def wrapper(*inner_args, **inner_kwargs): """If a schema (_body and/or _query) was supplied to the route decorator, the deserialized :class`marshmallow.Schema` object is injected into the decorated function's kwargs.""" try: if query is not None: query.strict = validate url = furl(request.url) inner_kwargs['_query'] = query.load(data=url.args) if body is not None: body.strict = validate json_data = request.get_json() if json_data is None: # Set json_data to empty dict if body is empty, so it gets picked up by the validator json_data = {} inner_kwargs['_body'] = body.load(data=json_data) except ValidationError as err: return jsonify(err.messages), 422 if output: data = output.dump(f(*inner_args, **inner_kwargs)) return jsonify(data[0]) return f(*inner_args, **inner_kwargs) return f return decorator
def route(bp, *args, **kwargs): """Journey route decorator Enables simple serialization, deserialization and validation of Flask routes with the help of Marshmallow. :param bp: :class:`flask.Blueprint` object :param args: args to pass along to `Blueprint.route` :param kwargs: - :strict_slashes: Enable / disable strict slashes (default False) - :validate: Enable / disable body/query validation (default True) - :_query: Unmarshal Query string into this schema - :_body: Unmarshal JSON body into this schema - :marshal_with: Serialize the output with this schema :raises: - ValidationError if the query parameters or JSON body fails validation """ kwargs['strict_slashes'] = kwargs.pop('strict_slashes', False) body = _validate_schema(kwargs.pop('_body', None)) query = _validate_schema(kwargs.pop('_query', None)) output = _validate_schema(kwargs.pop('marshal_with', None)) validate = kwargs.pop('validate', True) def decorator(f): @bp.route(*args, **kwargs) @wraps(f) def wrapper(*inner_args, **inner_kwargs): """If a schema (_body and/or _query) was supplied to the route decorator, the deserialized :class`marshmallow.Schema` object is injected into the decorated function's kwargs.""" try: if query is not None: query.strict = validate url = furl(request.url) inner_kwargs['_query'] = query.load(data=url.args) if body is not None: body.strict = validate json_data = request.get_json() if json_data is None: # Set json_data to empty dict if body is empty, so it gets picked up by the validator json_data = {} inner_kwargs['_body'] = body.load(data=json_data) except ValidationError as err: return jsonify(err.messages), 422 if output: data = output.dump(f(*inner_args, **inner_kwargs)) return jsonify(data[0]) return f(*inner_args, **inner_kwargs) return f return decorator
[ "Journey", "route", "decorator" ]
rbw/flask-journey
python
https://github.com/rbw/flask-journey/blob/6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285/flask_journey/utils.py#L50-L107
[ "def", "route", "(", "bp", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'strict_slashes'", "]", "=", "kwargs", ".", "pop", "(", "'strict_slashes'", ",", "False", ")", "body", "=", "_validate_schema", "(", "kwargs", ".", "pop", "(", "'_body'", ",", "None", ")", ")", "query", "=", "_validate_schema", "(", "kwargs", ".", "pop", "(", "'_query'", ",", "None", ")", ")", "output", "=", "_validate_schema", "(", "kwargs", ".", "pop", "(", "'marshal_with'", ",", "None", ")", ")", "validate", "=", "kwargs", ".", "pop", "(", "'validate'", ",", "True", ")", "def", "decorator", "(", "f", ")", ":", "@", "bp", ".", "route", "(", "*", "args", ",", "*", "*", "kwargs", ")", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "inner_args", ",", "*", "*", "inner_kwargs", ")", ":", "\"\"\"If a schema (_body and/or _query) was supplied to the route decorator, the deserialized\n :class`marshmallow.Schema` object is injected into the decorated function's kwargs.\"\"\"", "try", ":", "if", "query", "is", "not", "None", ":", "query", ".", "strict", "=", "validate", "url", "=", "furl", "(", "request", ".", "url", ")", "inner_kwargs", "[", "'_query'", "]", "=", "query", ".", "load", "(", "data", "=", "url", ".", "args", ")", "if", "body", "is", "not", "None", ":", "body", ".", "strict", "=", "validate", "json_data", "=", "request", ".", "get_json", "(", ")", "if", "json_data", "is", "None", ":", "# Set json_data to empty dict if body is empty, so it gets picked up by the validator", "json_data", "=", "{", "}", "inner_kwargs", "[", "'_body'", "]", "=", "body", ".", "load", "(", "data", "=", "json_data", ")", "except", "ValidationError", "as", "err", ":", "return", "jsonify", "(", "err", ".", "messages", ")", ",", "422", "if", "output", ":", "data", "=", "output", ".", "dump", "(", "f", "(", "*", "inner_args", ",", "*", "*", "inner_kwargs", ")", ")", "return", "jsonify", "(", "data", "[", "0", "]", ")", "return", "f", "(", "*", "inner_args", ",", "*", "*", "inner_kwargs", ")", "return", "f", "return", "decorator" ]
6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285
valid
BlueprintBundle.attach_bp
Attaches a flask.Blueprint to the bundle :param bp: :class:`flask.Blueprint` object :param description: Optional description string :raises: - InvalidBlueprint if the Blueprint is not of type `flask.Blueprint`
flask_journey/blueprint_bundle.py
def attach_bp(self, bp, description=''): """Attaches a flask.Blueprint to the bundle :param bp: :class:`flask.Blueprint` object :param description: Optional description string :raises: - InvalidBlueprint if the Blueprint is not of type `flask.Blueprint` """ if not isinstance(bp, Blueprint): raise InvalidBlueprint('Blueprints attached to the bundle must be of type {0}'.format(Blueprint)) self.blueprints.append((bp, description))
def attach_bp(self, bp, description=''): """Attaches a flask.Blueprint to the bundle :param bp: :class:`flask.Blueprint` object :param description: Optional description string :raises: - InvalidBlueprint if the Blueprint is not of type `flask.Blueprint` """ if not isinstance(bp, Blueprint): raise InvalidBlueprint('Blueprints attached to the bundle must be of type {0}'.format(Blueprint)) self.blueprints.append((bp, description))
[ "Attaches", "a", "flask", ".", "Blueprint", "to", "the", "bundle" ]
rbw/flask-journey
python
https://github.com/rbw/flask-journey/blob/6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285/flask_journey/blueprint_bundle.py#L20-L32
[ "def", "attach_bp", "(", "self", ",", "bp", ",", "description", "=", "''", ")", ":", "if", "not", "isinstance", "(", "bp", ",", "Blueprint", ")", ":", "raise", "InvalidBlueprint", "(", "'Blueprints attached to the bundle must be of type {0}'", ".", "format", "(", "Blueprint", ")", ")", "self", ".", "blueprints", ".", "append", "(", "(", "bp", ",", "description", ")", ")" ]
6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285
valid
DottedRule.move_dot
Returns the DottedRule that results from moving the dot.
purplex/grammar.py
def move_dot(self): """Returns the DottedRule that results from moving the dot.""" return self.__class__(self.production, self.pos + 1, self.lookahead)
def move_dot(self): """Returns the DottedRule that results from moving the dot.""" return self.__class__(self.production, self.pos + 1, self.lookahead)
[ "Returns", "the", "DottedRule", "that", "results", "from", "moving", "the", "dot", "." ]
mtomwing/purplex
python
https://github.com/mtomwing/purplex/blob/4072109e1d4395826983cd9d95ead2c1dfc1184e/purplex/grammar.py#L69-L71
[ "def", "move_dot", "(", "self", ")", ":", "return", "self", ".", "__class__", "(", "self", ".", "production", ",", "self", ".", "pos", "+", "1", ",", "self", ".", "lookahead", ")" ]
4072109e1d4395826983cd9d95ead2c1dfc1184e
valid
Grammar.first
Computes the intermediate FIRST set using symbols.
purplex/grammar.py
def first(self, symbols): """Computes the intermediate FIRST set using symbols.""" ret = set() if EPSILON in symbols: return set([EPSILON]) for symbol in symbols: ret |= self._first[symbol] - set([EPSILON]) if EPSILON not in self._first[symbol]: break else: ret.add(EPSILON) return ret
def first(self, symbols): """Computes the intermediate FIRST set using symbols.""" ret = set() if EPSILON in symbols: return set([EPSILON]) for symbol in symbols: ret |= self._first[symbol] - set([EPSILON]) if EPSILON not in self._first[symbol]: break else: ret.add(EPSILON) return ret
[ "Computes", "the", "intermediate", "FIRST", "set", "using", "symbols", "." ]
mtomwing/purplex
python
https://github.com/mtomwing/purplex/blob/4072109e1d4395826983cd9d95ead2c1dfc1184e/purplex/grammar.py#L94-L108
[ "def", "first", "(", "self", ",", "symbols", ")", ":", "ret", "=", "set", "(", ")", "if", "EPSILON", "in", "symbols", ":", "return", "set", "(", "[", "EPSILON", "]", ")", "for", "symbol", "in", "symbols", ":", "ret", "|=", "self", ".", "_first", "[", "symbol", "]", "-", "set", "(", "[", "EPSILON", "]", ")", "if", "EPSILON", "not", "in", "self", ".", "_first", "[", "symbol", "]", ":", "break", "else", ":", "ret", ".", "add", "(", "EPSILON", ")", "return", "ret" ]
4072109e1d4395826983cd9d95ead2c1dfc1184e
valid
Grammar._compute_first
Computes the FIRST set for every symbol in the grammar. Tenatively based on _compute_first in PLY.
purplex/grammar.py
def _compute_first(self): """Computes the FIRST set for every symbol in the grammar. Tenatively based on _compute_first in PLY. """ for terminal in self.terminals: self._first[terminal].add(terminal) self._first[END_OF_INPUT].add(END_OF_INPUT) while True: changed = False for nonterminal, productions in self.nonterminals.items(): for production in productions: new_first = self.first(production.rhs) if new_first - self._first[nonterminal]: self._first[nonterminal] |= new_first changed = True if not changed: break
def _compute_first(self): """Computes the FIRST set for every symbol in the grammar. Tenatively based on _compute_first in PLY. """ for terminal in self.terminals: self._first[terminal].add(terminal) self._first[END_OF_INPUT].add(END_OF_INPUT) while True: changed = False for nonterminal, productions in self.nonterminals.items(): for production in productions: new_first = self.first(production.rhs) if new_first - self._first[nonterminal]: self._first[nonterminal] |= new_first changed = True if not changed: break
[ "Computes", "the", "FIRST", "set", "for", "every", "symbol", "in", "the", "grammar", "." ]
mtomwing/purplex
python
https://github.com/mtomwing/purplex/blob/4072109e1d4395826983cd9d95ead2c1dfc1184e/purplex/grammar.py#L110-L130
[ "def", "_compute_first", "(", "self", ")", ":", "for", "terminal", "in", "self", ".", "terminals", ":", "self", ".", "_first", "[", "terminal", "]", ".", "add", "(", "terminal", ")", "self", ".", "_first", "[", "END_OF_INPUT", "]", ".", "add", "(", "END_OF_INPUT", ")", "while", "True", ":", "changed", "=", "False", "for", "nonterminal", ",", "productions", "in", "self", ".", "nonterminals", ".", "items", "(", ")", ":", "for", "production", "in", "productions", ":", "new_first", "=", "self", ".", "first", "(", "production", ".", "rhs", ")", "if", "new_first", "-", "self", ".", "_first", "[", "nonterminal", "]", ":", "self", ".", "_first", "[", "nonterminal", "]", "|=", "new_first", "changed", "=", "True", "if", "not", "changed", ":", "break" ]
4072109e1d4395826983cd9d95ead2c1dfc1184e
valid
Grammar._compute_follow
Computes the FOLLOW set for every non-terminal in the grammar. Tenatively based on _compute_follow in PLY.
purplex/grammar.py
def _compute_follow(self): """Computes the FOLLOW set for every non-terminal in the grammar. Tenatively based on _compute_follow in PLY. """ self._follow[self.start_symbol].add(END_OF_INPUT) while True: changed = False for nonterminal, productions in self.nonterminals.items(): for production in productions: for i, symbol in enumerate(production.rhs): if symbol not in self.nonterminals: continue first = self.first(production.rhs[i + 1:]) new_follow = first - set([EPSILON]) if EPSILON in first or i == (len(production.rhs) - 1): new_follow |= self._follow[nonterminal] if new_follow - self._follow[symbol]: self._follow[symbol] |= new_follow changed = True if not changed: break
def _compute_follow(self): """Computes the FOLLOW set for every non-terminal in the grammar. Tenatively based on _compute_follow in PLY. """ self._follow[self.start_symbol].add(END_OF_INPUT) while True: changed = False for nonterminal, productions in self.nonterminals.items(): for production in productions: for i, symbol in enumerate(production.rhs): if symbol not in self.nonterminals: continue first = self.first(production.rhs[i + 1:]) new_follow = first - set([EPSILON]) if EPSILON in first or i == (len(production.rhs) - 1): new_follow |= self._follow[nonterminal] if new_follow - self._follow[symbol]: self._follow[symbol] |= new_follow changed = True if not changed: break
[ "Computes", "the", "FOLLOW", "set", "for", "every", "non", "-", "terminal", "in", "the", "grammar", "." ]
mtomwing/purplex
python
https://github.com/mtomwing/purplex/blob/4072109e1d4395826983cd9d95ead2c1dfc1184e/purplex/grammar.py#L132-L158
[ "def", "_compute_follow", "(", "self", ")", ":", "self", ".", "_follow", "[", "self", ".", "start_symbol", "]", ".", "add", "(", "END_OF_INPUT", ")", "while", "True", ":", "changed", "=", "False", "for", "nonterminal", ",", "productions", "in", "self", ".", "nonterminals", ".", "items", "(", ")", ":", "for", "production", "in", "productions", ":", "for", "i", ",", "symbol", "in", "enumerate", "(", "production", ".", "rhs", ")", ":", "if", "symbol", "not", "in", "self", ".", "nonterminals", ":", "continue", "first", "=", "self", ".", "first", "(", "production", ".", "rhs", "[", "i", "+", "1", ":", "]", ")", "new_follow", "=", "first", "-", "set", "(", "[", "EPSILON", "]", ")", "if", "EPSILON", "in", "first", "or", "i", "==", "(", "len", "(", "production", ".", "rhs", ")", "-", "1", ")", ":", "new_follow", "|=", "self", ".", "_follow", "[", "nonterminal", "]", "if", "new_follow", "-", "self", ".", "_follow", "[", "symbol", "]", ":", "self", ".", "_follow", "[", "symbol", "]", "|=", "new_follow", "changed", "=", "True", "if", "not", "changed", ":", "break" ]
4072109e1d4395826983cd9d95ead2c1dfc1184e
valid
Grammar.initial_closure
Computes the initial closure using the START_foo production.
purplex/grammar.py
def initial_closure(self): """Computes the initial closure using the START_foo production.""" first_rule = DottedRule(self.start, 0, END_OF_INPUT) return self.closure([first_rule])
def initial_closure(self): """Computes the initial closure using the START_foo production.""" first_rule = DottedRule(self.start, 0, END_OF_INPUT) return self.closure([first_rule])
[ "Computes", "the", "initial", "closure", "using", "the", "START_foo", "production", "." ]
mtomwing/purplex
python
https://github.com/mtomwing/purplex/blob/4072109e1d4395826983cd9d95ead2c1dfc1184e/purplex/grammar.py#L160-L163
[ "def", "initial_closure", "(", "self", ")", ":", "first_rule", "=", "DottedRule", "(", "self", ".", "start", ",", "0", ",", "END_OF_INPUT", ")", "return", "self", ".", "closure", "(", "[", "first_rule", "]", ")" ]
4072109e1d4395826983cd9d95ead2c1dfc1184e
valid
Grammar.goto
Computes the next closure for rules based on the symbol we got. Args: rules - an iterable of DottedRules symbol - a string denoting the symbol we've just seen Returns: frozenset of DottedRules
purplex/grammar.py
def goto(self, rules, symbol): """Computes the next closure for rules based on the symbol we got. Args: rules - an iterable of DottedRules symbol - a string denoting the symbol we've just seen Returns: frozenset of DottedRules """ return self.closure( {rule.move_dot() for rule in rules if not rule.at_end and rule.rhs[rule.pos] == symbol}, )
def goto(self, rules, symbol): """Computes the next closure for rules based on the symbol we got. Args: rules - an iterable of DottedRules symbol - a string denoting the symbol we've just seen Returns: frozenset of DottedRules """ return self.closure( {rule.move_dot() for rule in rules if not rule.at_end and rule.rhs[rule.pos] == symbol}, )
[ "Computes", "the", "next", "closure", "for", "rules", "based", "on", "the", "symbol", "we", "got", "." ]
mtomwing/purplex
python
https://github.com/mtomwing/purplex/blob/4072109e1d4395826983cd9d95ead2c1dfc1184e/purplex/grammar.py#L165-L177
[ "def", "goto", "(", "self", ",", "rules", ",", "symbol", ")", ":", "return", "self", ".", "closure", "(", "{", "rule", ".", "move_dot", "(", ")", "for", "rule", "in", "rules", "if", "not", "rule", ".", "at_end", "and", "rule", ".", "rhs", "[", "rule", ".", "pos", "]", "==", "symbol", "}", ",", ")" ]
4072109e1d4395826983cd9d95ead2c1dfc1184e
valid
Grammar.closure
Fills out the entire closure based on some initial dotted rules. Args: rules - an iterable of DottedRules Returns: frozenset of DottedRules
purplex/grammar.py
def closure(self, rules): """Fills out the entire closure based on some initial dotted rules. Args: rules - an iterable of DottedRules Returns: frozenset of DottedRules """ closure = set() todo = set(rules) while todo: rule = todo.pop() closure.add(rule) # If the dot is at the end, there's no need to process it. if rule.at_end: continue symbol = rule.rhs[rule.pos] for production in self.nonterminals[symbol]: for first in self.first(rule.rest): if EPSILON in production.rhs: # Move immediately to the end if the production # goes to epsilon new_rule = DottedRule(production, 1, first) else: new_rule = DottedRule(production, 0, first) if new_rule not in closure: todo.add(new_rule) return frozenset(closure)
def closure(self, rules): """Fills out the entire closure based on some initial dotted rules. Args: rules - an iterable of DottedRules Returns: frozenset of DottedRules """ closure = set() todo = set(rules) while todo: rule = todo.pop() closure.add(rule) # If the dot is at the end, there's no need to process it. if rule.at_end: continue symbol = rule.rhs[rule.pos] for production in self.nonterminals[symbol]: for first in self.first(rule.rest): if EPSILON in production.rhs: # Move immediately to the end if the production # goes to epsilon new_rule = DottedRule(production, 1, first) else: new_rule = DottedRule(production, 0, first) if new_rule not in closure: todo.add(new_rule) return frozenset(closure)
[ "Fills", "out", "the", "entire", "closure", "based", "on", "some", "initial", "dotted", "rules", "." ]
mtomwing/purplex
python
https://github.com/mtomwing/purplex/blob/4072109e1d4395826983cd9d95ead2c1dfc1184e/purplex/grammar.py#L179-L212
[ "def", "closure", "(", "self", ",", "rules", ")", ":", "closure", "=", "set", "(", ")", "todo", "=", "set", "(", "rules", ")", "while", "todo", ":", "rule", "=", "todo", ".", "pop", "(", ")", "closure", ".", "add", "(", "rule", ")", "# If the dot is at the end, there's no need to process it.", "if", "rule", ".", "at_end", ":", "continue", "symbol", "=", "rule", ".", "rhs", "[", "rule", ".", "pos", "]", "for", "production", "in", "self", ".", "nonterminals", "[", "symbol", "]", ":", "for", "first", "in", "self", ".", "first", "(", "rule", ".", "rest", ")", ":", "if", "EPSILON", "in", "production", ".", "rhs", ":", "# Move immediately to the end if the production", "# goes to epsilon", "new_rule", "=", "DottedRule", "(", "production", ",", "1", ",", "first", ")", "else", ":", "new_rule", "=", "DottedRule", "(", "production", ",", "0", ",", "first", ")", "if", "new_rule", "not", "in", "closure", ":", "todo", ".", "add", "(", "new_rule", ")", "return", "frozenset", "(", "closure", ")" ]
4072109e1d4395826983cd9d95ead2c1dfc1184e
valid
Grammar.closures
Computes all LR(1) closure sets for the grammar.
purplex/grammar.py
def closures(self): """Computes all LR(1) closure sets for the grammar.""" initial = self.initial_closure() closures = collections.OrderedDict() goto = collections.defaultdict(dict) todo = set([initial]) while todo: closure = todo.pop() closures[closure] = closure symbols = {rule.rhs[rule.pos] for rule in closure if not rule.at_end} for symbol in symbols: next_closure = self.goto(closure, symbol) if next_closure in closures or next_closure in todo: next_closure = (closures.get(next_closure) or todo.get(next_closure)) else: closures[next_closure] = next_closure todo.add(next_closure) goto[closure][symbol] = next_closure return initial, closures, goto
def closures(self): """Computes all LR(1) closure sets for the grammar.""" initial = self.initial_closure() closures = collections.OrderedDict() goto = collections.defaultdict(dict) todo = set([initial]) while todo: closure = todo.pop() closures[closure] = closure symbols = {rule.rhs[rule.pos] for rule in closure if not rule.at_end} for symbol in symbols: next_closure = self.goto(closure, symbol) if next_closure in closures or next_closure in todo: next_closure = (closures.get(next_closure) or todo.get(next_closure)) else: closures[next_closure] = next_closure todo.add(next_closure) goto[closure][symbol] = next_closure return initial, closures, goto
[ "Computes", "all", "LR", "(", "1", ")", "closure", "sets", "for", "the", "grammar", "." ]
mtomwing/purplex
python
https://github.com/mtomwing/purplex/blob/4072109e1d4395826983cd9d95ead2c1dfc1184e/purplex/grammar.py#L214-L239
[ "def", "closures", "(", "self", ")", ":", "initial", "=", "self", ".", "initial_closure", "(", ")", "closures", "=", "collections", ".", "OrderedDict", "(", ")", "goto", "=", "collections", ".", "defaultdict", "(", "dict", ")", "todo", "=", "set", "(", "[", "initial", "]", ")", "while", "todo", ":", "closure", "=", "todo", ".", "pop", "(", ")", "closures", "[", "closure", "]", "=", "closure", "symbols", "=", "{", "rule", ".", "rhs", "[", "rule", ".", "pos", "]", "for", "rule", "in", "closure", "if", "not", "rule", ".", "at_end", "}", "for", "symbol", "in", "symbols", ":", "next_closure", "=", "self", ".", "goto", "(", "closure", ",", "symbol", ")", "if", "next_closure", "in", "closures", "or", "next_closure", "in", "todo", ":", "next_closure", "=", "(", "closures", ".", "get", "(", "next_closure", ")", "or", "todo", ".", "get", "(", "next_closure", ")", ")", "else", ":", "closures", "[", "next_closure", "]", "=", "next_closure", "todo", ".", "add", "(", "next_closure", ")", "goto", "[", "closure", "]", "[", "symbol", "]", "=", "next_closure", "return", "initial", ",", "closures", ",", "goto" ]
4072109e1d4395826983cd9d95ead2c1dfc1184e
valid
Journey.init_app
Initializes Journey extension :param app: App passed from constructor or directly to init_app :raises: - NoBundlesAttached if no bundles has been attached attached
flask_journey/journey.py
def init_app(self, app): """Initializes Journey extension :param app: App passed from constructor or directly to init_app :raises: - NoBundlesAttached if no bundles has been attached attached """ if len(self._attached_bundles) == 0: raise NoBundlesAttached("At least one bundle must be attached before initializing Journey") for bundle in self._attached_bundles: processed_bundle = { 'path': bundle.path, 'description': bundle.description, 'blueprints': [] } for (bp, description) in bundle.blueprints: # Register the BP blueprint = self._register_blueprint(app, bp, bundle.path, self.get_bp_path(bp), description) # Finally, attach the blueprints to its parent processed_bundle['blueprints'].append(blueprint) self._registered_bundles.append(processed_bundle)
def init_app(self, app): """Initializes Journey extension :param app: App passed from constructor or directly to init_app :raises: - NoBundlesAttached if no bundles has been attached attached """ if len(self._attached_bundles) == 0: raise NoBundlesAttached("At least one bundle must be attached before initializing Journey") for bundle in self._attached_bundles: processed_bundle = { 'path': bundle.path, 'description': bundle.description, 'blueprints': [] } for (bp, description) in bundle.blueprints: # Register the BP blueprint = self._register_blueprint(app, bp, bundle.path, self.get_bp_path(bp), description) # Finally, attach the blueprints to its parent processed_bundle['blueprints'].append(blueprint) self._registered_bundles.append(processed_bundle)
[ "Initializes", "Journey", "extension" ]
rbw/flask-journey
python
https://github.com/rbw/flask-journey/blob/6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285/flask_journey/journey.py#L38-L65
[ "def", "init_app", "(", "self", ",", "app", ")", ":", "if", "len", "(", "self", ".", "_attached_bundles", ")", "==", "0", ":", "raise", "NoBundlesAttached", "(", "\"At least one bundle must be attached before initializing Journey\"", ")", "for", "bundle", "in", "self", ".", "_attached_bundles", ":", "processed_bundle", "=", "{", "'path'", ":", "bundle", ".", "path", ",", "'description'", ":", "bundle", ".", "description", ",", "'blueprints'", ":", "[", "]", "}", "for", "(", "bp", ",", "description", ")", "in", "bundle", ".", "blueprints", ":", "# Register the BP", "blueprint", "=", "self", ".", "_register_blueprint", "(", "app", ",", "bp", ",", "bundle", ".", "path", ",", "self", ".", "get_bp_path", "(", "bp", ")", ",", "description", ")", "# Finally, attach the blueprints to its parent", "processed_bundle", "[", "'blueprints'", "]", ".", "append", "(", "blueprint", ")", "self", ".", "_registered_bundles", ".", "append", "(", "processed_bundle", ")" ]
6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285
valid
Journey.routes_simple
Returns simple info about registered blueprints :return: Tuple containing endpoint, path and allowed methods for each route
flask_journey/journey.py
def routes_simple(self): """Returns simple info about registered blueprints :return: Tuple containing endpoint, path and allowed methods for each route """ routes = [] for bundle in self._registered_bundles: bundle_path = bundle['path'] for blueprint in bundle['blueprints']: bp_path = blueprint['path'] for child in blueprint['routes']: routes.append( ( child['endpoint'], bundle_path + bp_path + child['path'], child['methods'] ) ) return routes
def routes_simple(self): """Returns simple info about registered blueprints :return: Tuple containing endpoint, path and allowed methods for each route """ routes = [] for bundle in self._registered_bundles: bundle_path = bundle['path'] for blueprint in bundle['blueprints']: bp_path = blueprint['path'] for child in blueprint['routes']: routes.append( ( child['endpoint'], bundle_path + bp_path + child['path'], child['methods'] ) ) return routes
[ "Returns", "simple", "info", "about", "registered", "blueprints" ]
rbw/flask-journey
python
https://github.com/rbw/flask-journey/blob/6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285/flask_journey/journey.py#L77-L98
[ "def", "routes_simple", "(", "self", ")", ":", "routes", "=", "[", "]", "for", "bundle", "in", "self", ".", "_registered_bundles", ":", "bundle_path", "=", "bundle", "[", "'path'", "]", "for", "blueprint", "in", "bundle", "[", "'blueprints'", "]", ":", "bp_path", "=", "blueprint", "[", "'path'", "]", "for", "child", "in", "blueprint", "[", "'routes'", "]", ":", "routes", ".", "append", "(", "(", "child", "[", "'endpoint'", "]", ",", "bundle_path", "+", "bp_path", "+", "child", "[", "'path'", "]", ",", "child", "[", "'methods'", "]", ")", ")", "return", "routes" ]
6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285
valid
Journey._bundle_exists
Checks if a bundle exists at the provided path :param path: Bundle path :return: bool
flask_journey/journey.py
def _bundle_exists(self, path): """Checks if a bundle exists at the provided path :param path: Bundle path :return: bool """ for attached_bundle in self._attached_bundles: if path == attached_bundle.path: return True return False
def _bundle_exists(self, path): """Checks if a bundle exists at the provided path :param path: Bundle path :return: bool """ for attached_bundle in self._attached_bundles: if path == attached_bundle.path: return True return False
[ "Checks", "if", "a", "bundle", "exists", "at", "the", "provided", "path" ]
rbw/flask-journey
python
https://github.com/rbw/flask-journey/blob/6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285/flask_journey/journey.py#L100-L111
[ "def", "_bundle_exists", "(", "self", ",", "path", ")", ":", "for", "attached_bundle", "in", "self", ".", "_attached_bundles", ":", "if", "path", "==", "attached_bundle", ".", "path", ":", "return", "True", "return", "False" ]
6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285
valid
Journey.attach_bundle
Attaches a bundle object :param bundle: :class:`flask_journey.BlueprintBundle` object :raises: - IncompatibleBundle if the bundle is not of type `BlueprintBundle` - ConflictingPath if a bundle already exists at bundle.path - MissingBlueprints if the bundle doesn't contain any blueprints
flask_journey/journey.py
def attach_bundle(self, bundle): """Attaches a bundle object :param bundle: :class:`flask_journey.BlueprintBundle` object :raises: - IncompatibleBundle if the bundle is not of type `BlueprintBundle` - ConflictingPath if a bundle already exists at bundle.path - MissingBlueprints if the bundle doesn't contain any blueprints """ if not isinstance(bundle, BlueprintBundle): raise IncompatibleBundle('BlueprintBundle object passed to attach_bundle must be of type {0}' .format(BlueprintBundle)) elif len(bundle.blueprints) == 0: raise MissingBlueprints("Bundles must contain at least one flask.Blueprint") elif self._bundle_exists(bundle.path): raise ConflictingPath("Duplicate bundle path {0}".format(bundle.path)) elif self._journey_path == bundle.path == '/': raise ConflictingPath("Bundle path and Journey path cannot both be {0}".format(bundle.path)) self._attached_bundles.append(bundle)
def attach_bundle(self, bundle): """Attaches a bundle object :param bundle: :class:`flask_journey.BlueprintBundle` object :raises: - IncompatibleBundle if the bundle is not of type `BlueprintBundle` - ConflictingPath if a bundle already exists at bundle.path - MissingBlueprints if the bundle doesn't contain any blueprints """ if not isinstance(bundle, BlueprintBundle): raise IncompatibleBundle('BlueprintBundle object passed to attach_bundle must be of type {0}' .format(BlueprintBundle)) elif len(bundle.blueprints) == 0: raise MissingBlueprints("Bundles must contain at least one flask.Blueprint") elif self._bundle_exists(bundle.path): raise ConflictingPath("Duplicate bundle path {0}".format(bundle.path)) elif self._journey_path == bundle.path == '/': raise ConflictingPath("Bundle path and Journey path cannot both be {0}".format(bundle.path)) self._attached_bundles.append(bundle)
[ "Attaches", "a", "bundle", "object" ]
rbw/flask-journey
python
https://github.com/rbw/flask-journey/blob/6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285/flask_journey/journey.py#L113-L133
[ "def", "attach_bundle", "(", "self", ",", "bundle", ")", ":", "if", "not", "isinstance", "(", "bundle", ",", "BlueprintBundle", ")", ":", "raise", "IncompatibleBundle", "(", "'BlueprintBundle object passed to attach_bundle must be of type {0}'", ".", "format", "(", "BlueprintBundle", ")", ")", "elif", "len", "(", "bundle", ".", "blueprints", ")", "==", "0", ":", "raise", "MissingBlueprints", "(", "\"Bundles must contain at least one flask.Blueprint\"", ")", "elif", "self", ".", "_bundle_exists", "(", "bundle", ".", "path", ")", ":", "raise", "ConflictingPath", "(", "\"Duplicate bundle path {0}\"", ".", "format", "(", "bundle", ".", "path", ")", ")", "elif", "self", ".", "_journey_path", "==", "bundle", ".", "path", "==", "'/'", ":", "raise", "ConflictingPath", "(", "\"Bundle path and Journey path cannot both be {0}\"", ".", "format", "(", "bundle", ".", "path", ")", ")", "self", ".", "_attached_bundles", ".", "append", "(", "bundle", ")" ]
6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285
valid
Journey._register_blueprint
Register and return info about the registered blueprint :param bp: :class:`flask.Blueprint` object :param bundle_path: the URL prefix of the bundle :param child_path: blueprint relative to the bundle path :return: Dict with info about the blueprint
flask_journey/journey.py
def _register_blueprint(self, app, bp, bundle_path, child_path, description): """Register and return info about the registered blueprint :param bp: :class:`flask.Blueprint` object :param bundle_path: the URL prefix of the bundle :param child_path: blueprint relative to the bundle path :return: Dict with info about the blueprint """ base_path = sanitize_path(self._journey_path + bundle_path + child_path) app.register_blueprint(bp, url_prefix=base_path) return { 'name': bp.name, 'path': child_path, 'import_name': bp.import_name, 'description': description, 'routes': self.get_blueprint_routes(app, base_path) }
def _register_blueprint(self, app, bp, bundle_path, child_path, description): """Register and return info about the registered blueprint :param bp: :class:`flask.Blueprint` object :param bundle_path: the URL prefix of the bundle :param child_path: blueprint relative to the bundle path :return: Dict with info about the blueprint """ base_path = sanitize_path(self._journey_path + bundle_path + child_path) app.register_blueprint(bp, url_prefix=base_path) return { 'name': bp.name, 'path': child_path, 'import_name': bp.import_name, 'description': description, 'routes': self.get_blueprint_routes(app, base_path) }
[ "Register", "and", "return", "info", "about", "the", "registered", "blueprint" ]
rbw/flask-journey
python
https://github.com/rbw/flask-journey/blob/6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285/flask_journey/journey.py#L135-L154
[ "def", "_register_blueprint", "(", "self", ",", "app", ",", "bp", ",", "bundle_path", ",", "child_path", ",", "description", ")", ":", "base_path", "=", "sanitize_path", "(", "self", ".", "_journey_path", "+", "bundle_path", "+", "child_path", ")", "app", ".", "register_blueprint", "(", "bp", ",", "url_prefix", "=", "base_path", ")", "return", "{", "'name'", ":", "bp", ".", "name", ",", "'path'", ":", "child_path", ",", "'import_name'", ":", "bp", ".", "import_name", ",", "'description'", ":", "description", ",", "'routes'", ":", "self", ".", "get_blueprint_routes", "(", "app", ",", "base_path", ")", "}" ]
6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285
valid
Journey.get_blueprint_routes
Returns detailed information about registered blueprint routes matching the `BlueprintBundle` path :param app: App instance to obtain rules from :param base_path: Base path to return detailed route info for :return: List of route detail dicts
flask_journey/journey.py
def get_blueprint_routes(app, base_path): """Returns detailed information about registered blueprint routes matching the `BlueprintBundle` path :param app: App instance to obtain rules from :param base_path: Base path to return detailed route info for :return: List of route detail dicts """ routes = [] for child in app.url_map.iter_rules(): if child.rule.startswith(base_path): relative_path = child.rule[len(base_path):] routes.append({ 'path': relative_path, 'endpoint': child.endpoint, 'methods': list(child.methods) }) return routes
def get_blueprint_routes(app, base_path): """Returns detailed information about registered blueprint routes matching the `BlueprintBundle` path :param app: App instance to obtain rules from :param base_path: Base path to return detailed route info for :return: List of route detail dicts """ routes = [] for child in app.url_map.iter_rules(): if child.rule.startswith(base_path): relative_path = child.rule[len(base_path):] routes.append({ 'path': relative_path, 'endpoint': child.endpoint, 'methods': list(child.methods) }) return routes
[ "Returns", "detailed", "information", "about", "registered", "blueprint", "routes", "matching", "the", "BlueprintBundle", "path" ]
rbw/flask-journey
python
https://github.com/rbw/flask-journey/blob/6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285/flask_journey/journey.py#L167-L186
[ "def", "get_blueprint_routes", "(", "app", ",", "base_path", ")", ":", "routes", "=", "[", "]", "for", "child", "in", "app", ".", "url_map", ".", "iter_rules", "(", ")", ":", "if", "child", ".", "rule", ".", "startswith", "(", "base_path", ")", ":", "relative_path", "=", "child", ".", "rule", "[", "len", "(", "base_path", ")", ":", "]", "routes", ".", "append", "(", "{", "'path'", ":", "relative_path", ",", "'endpoint'", ":", "child", ".", "endpoint", ",", "'methods'", ":", "list", "(", "child", ".", "methods", ")", "}", ")", "return", "routes" ]
6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285
valid
ParserBase.compute_precedence
Computes the precedence of terminal and production. The precedence of a terminal is it's level in the PRECEDENCE tuple. For a production, the precedence is the right-most terminal (if it exists). The default precedence is DEFAULT_PREC - (LEFT, 0). Returns: precedence - dict[terminal | production] = (assoc, level)
purplex/parse.py
def compute_precedence(terminals, productions, precedence_levels): """Computes the precedence of terminal and production. The precedence of a terminal is it's level in the PRECEDENCE tuple. For a production, the precedence is the right-most terminal (if it exists). The default precedence is DEFAULT_PREC - (LEFT, 0). Returns: precedence - dict[terminal | production] = (assoc, level) """ precedence = collections.OrderedDict() for terminal in terminals: precedence[terminal] = DEFAULT_PREC level_precs = range(len(precedence_levels), 0, -1) for i, level in zip(level_precs, precedence_levels): assoc = level[0] for symbol in level[1:]: precedence[symbol] = (assoc, i) for production, prec_symbol in productions: if prec_symbol is None: prod_terminals = [symbol for symbol in production.rhs if symbol in terminals] or [None] precedence[production] = precedence.get(prod_terminals[-1], DEFAULT_PREC) else: precedence[production] = precedence.get(prec_symbol, DEFAULT_PREC) return precedence
def compute_precedence(terminals, productions, precedence_levels): """Computes the precedence of terminal and production. The precedence of a terminal is it's level in the PRECEDENCE tuple. For a production, the precedence is the right-most terminal (if it exists). The default precedence is DEFAULT_PREC - (LEFT, 0). Returns: precedence - dict[terminal | production] = (assoc, level) """ precedence = collections.OrderedDict() for terminal in terminals: precedence[terminal] = DEFAULT_PREC level_precs = range(len(precedence_levels), 0, -1) for i, level in zip(level_precs, precedence_levels): assoc = level[0] for symbol in level[1:]: precedence[symbol] = (assoc, i) for production, prec_symbol in productions: if prec_symbol is None: prod_terminals = [symbol for symbol in production.rhs if symbol in terminals] or [None] precedence[production] = precedence.get(prod_terminals[-1], DEFAULT_PREC) else: precedence[production] = precedence.get(prec_symbol, DEFAULT_PREC) return precedence
[ "Computes", "the", "precedence", "of", "terminal", "and", "production", "." ]
mtomwing/purplex
python
https://github.com/mtomwing/purplex/blob/4072109e1d4395826983cd9d95ead2c1dfc1184e/purplex/parse.py#L85-L117
[ "def", "compute_precedence", "(", "terminals", ",", "productions", ",", "precedence_levels", ")", ":", "precedence", "=", "collections", ".", "OrderedDict", "(", ")", "for", "terminal", "in", "terminals", ":", "precedence", "[", "terminal", "]", "=", "DEFAULT_PREC", "level_precs", "=", "range", "(", "len", "(", "precedence_levels", ")", ",", "0", ",", "-", "1", ")", "for", "i", ",", "level", "in", "zip", "(", "level_precs", ",", "precedence_levels", ")", ":", "assoc", "=", "level", "[", "0", "]", "for", "symbol", "in", "level", "[", "1", ":", "]", ":", "precedence", "[", "symbol", "]", "=", "(", "assoc", ",", "i", ")", "for", "production", ",", "prec_symbol", "in", "productions", ":", "if", "prec_symbol", "is", "None", ":", "prod_terminals", "=", "[", "symbol", "for", "symbol", "in", "production", ".", "rhs", "if", "symbol", "in", "terminals", "]", "or", "[", "None", "]", "precedence", "[", "production", "]", "=", "precedence", ".", "get", "(", "prod_terminals", "[", "-", "1", "]", ",", "DEFAULT_PREC", ")", "else", ":", "precedence", "[", "production", "]", "=", "precedence", ".", "get", "(", "prec_symbol", ",", "DEFAULT_PREC", ")", "return", "precedence" ]
4072109e1d4395826983cd9d95ead2c1dfc1184e
valid
ParserBase.make_tables
Generates the ACTION and GOTO tables for the grammar. Returns: action - dict[state][lookahead] = (action, ...) goto - dict[state][just_reduced] = new_state
purplex/parse.py
def make_tables(grammar, precedence): """Generates the ACTION and GOTO tables for the grammar. Returns: action - dict[state][lookahead] = (action, ...) goto - dict[state][just_reduced] = new_state """ ACTION = {} GOTO = {} labels = {} def get_label(closure): if closure not in labels: labels[closure] = len(labels) return labels[closure] def resolve_shift_reduce(lookahead, s_action, r_action): s_assoc, s_level = precedence[lookahead] r_assoc, r_level = precedence[r_action[1]] if s_level < r_level: return r_action elif s_level == r_level and r_assoc == LEFT: return r_action else: return s_action initial, closures, goto = grammar.closures() for closure in closures: label = get_label(closure) for rule in closure: new_action, lookahead = None, rule.lookahead if not rule.at_end: symbol = rule.rhs[rule.pos] is_terminal = symbol in grammar.terminals has_goto = symbol in goto[closure] if is_terminal and has_goto: next_state = get_label(goto[closure][symbol]) new_action, lookahead = ('shift', next_state), symbol elif rule.production == grammar.start and rule.at_end: new_action = ('accept',) elif rule.at_end: new_action = ('reduce', rule.production) if new_action is None: continue prev_action = ACTION.get((label, lookahead)) if prev_action is None or prev_action == new_action: ACTION[label, lookahead] = new_action else: types = (prev_action[0], new_action[0]) if types == ('shift', 'reduce'): chosen = resolve_shift_reduce(lookahead, prev_action, new_action) elif types == ('reduce', 'shift'): chosen = resolve_shift_reduce(lookahead, new_action, prev_action) else: raise TableConflictError(prev_action, new_action) ACTION[label, lookahead] = chosen for symbol in grammar.nonterminals: if symbol in goto[closure]: GOTO[label, symbol] = get_label(goto[closure][symbol]) return get_label(initial), ACTION, GOTO
def make_tables(grammar, precedence): """Generates the ACTION and GOTO tables for the grammar. Returns: action - dict[state][lookahead] = (action, ...) goto - dict[state][just_reduced] = new_state """ ACTION = {} GOTO = {} labels = {} def get_label(closure): if closure not in labels: labels[closure] = len(labels) return labels[closure] def resolve_shift_reduce(lookahead, s_action, r_action): s_assoc, s_level = precedence[lookahead] r_assoc, r_level = precedence[r_action[1]] if s_level < r_level: return r_action elif s_level == r_level and r_assoc == LEFT: return r_action else: return s_action initial, closures, goto = grammar.closures() for closure in closures: label = get_label(closure) for rule in closure: new_action, lookahead = None, rule.lookahead if not rule.at_end: symbol = rule.rhs[rule.pos] is_terminal = symbol in grammar.terminals has_goto = symbol in goto[closure] if is_terminal and has_goto: next_state = get_label(goto[closure][symbol]) new_action, lookahead = ('shift', next_state), symbol elif rule.production == grammar.start and rule.at_end: new_action = ('accept',) elif rule.at_end: new_action = ('reduce', rule.production) if new_action is None: continue prev_action = ACTION.get((label, lookahead)) if prev_action is None or prev_action == new_action: ACTION[label, lookahead] = new_action else: types = (prev_action[0], new_action[0]) if types == ('shift', 'reduce'): chosen = resolve_shift_reduce(lookahead, prev_action, new_action) elif types == ('reduce', 'shift'): chosen = resolve_shift_reduce(lookahead, new_action, prev_action) else: raise TableConflictError(prev_action, new_action) ACTION[label, lookahead] = chosen for symbol in grammar.nonterminals: if symbol in goto[closure]: GOTO[label, symbol] = get_label(goto[closure][symbol]) return get_label(initial), ACTION, GOTO
[ "Generates", "the", "ACTION", "and", "GOTO", "tables", "for", "the", "grammar", "." ]
mtomwing/purplex
python
https://github.com/mtomwing/purplex/blob/4072109e1d4395826983cd9d95ead2c1dfc1184e/purplex/parse.py#L120-L193
[ "def", "make_tables", "(", "grammar", ",", "precedence", ")", ":", "ACTION", "=", "{", "}", "GOTO", "=", "{", "}", "labels", "=", "{", "}", "def", "get_label", "(", "closure", ")", ":", "if", "closure", "not", "in", "labels", ":", "labels", "[", "closure", "]", "=", "len", "(", "labels", ")", "return", "labels", "[", "closure", "]", "def", "resolve_shift_reduce", "(", "lookahead", ",", "s_action", ",", "r_action", ")", ":", "s_assoc", ",", "s_level", "=", "precedence", "[", "lookahead", "]", "r_assoc", ",", "r_level", "=", "precedence", "[", "r_action", "[", "1", "]", "]", "if", "s_level", "<", "r_level", ":", "return", "r_action", "elif", "s_level", "==", "r_level", "and", "r_assoc", "==", "LEFT", ":", "return", "r_action", "else", ":", "return", "s_action", "initial", ",", "closures", ",", "goto", "=", "grammar", ".", "closures", "(", ")", "for", "closure", "in", "closures", ":", "label", "=", "get_label", "(", "closure", ")", "for", "rule", "in", "closure", ":", "new_action", ",", "lookahead", "=", "None", ",", "rule", ".", "lookahead", "if", "not", "rule", ".", "at_end", ":", "symbol", "=", "rule", ".", "rhs", "[", "rule", ".", "pos", "]", "is_terminal", "=", "symbol", "in", "grammar", ".", "terminals", "has_goto", "=", "symbol", "in", "goto", "[", "closure", "]", "if", "is_terminal", "and", "has_goto", ":", "next_state", "=", "get_label", "(", "goto", "[", "closure", "]", "[", "symbol", "]", ")", "new_action", ",", "lookahead", "=", "(", "'shift'", ",", "next_state", ")", ",", "symbol", "elif", "rule", ".", "production", "==", "grammar", ".", "start", "and", "rule", ".", "at_end", ":", "new_action", "=", "(", "'accept'", ",", ")", "elif", "rule", ".", "at_end", ":", "new_action", "=", "(", "'reduce'", ",", "rule", ".", "production", ")", "if", "new_action", "is", "None", ":", "continue", "prev_action", "=", "ACTION", ".", "get", "(", "(", "label", ",", "lookahead", ")", ")", "if", "prev_action", "is", "None", "or", "prev_action", "==", "new_action", ":", "ACTION", "[", "label", ",", "lookahead", "]", "=", "new_action", "else", ":", "types", "=", "(", "prev_action", "[", "0", "]", ",", "new_action", "[", "0", "]", ")", "if", "types", "==", "(", "'shift'", ",", "'reduce'", ")", ":", "chosen", "=", "resolve_shift_reduce", "(", "lookahead", ",", "prev_action", ",", "new_action", ")", "elif", "types", "==", "(", "'reduce'", ",", "'shift'", ")", ":", "chosen", "=", "resolve_shift_reduce", "(", "lookahead", ",", "new_action", ",", "prev_action", ")", "else", ":", "raise", "TableConflictError", "(", "prev_action", ",", "new_action", ")", "ACTION", "[", "label", ",", "lookahead", "]", "=", "chosen", "for", "symbol", "in", "grammar", ".", "nonterminals", ":", "if", "symbol", "in", "goto", "[", "closure", "]", ":", "GOTO", "[", "label", ",", "symbol", "]", "=", "get_label", "(", "goto", "[", "closure", "]", "[", "symbol", "]", ")", "return", "get_label", "(", "initial", ")", ",", "ACTION", ",", "GOTO" ]
4072109e1d4395826983cd9d95ead2c1dfc1184e
valid
KB_AgentProgram
A generic logical knowledge-based agent program. [Fig. 7.1]
aima/logic.py
def KB_AgentProgram(KB): """A generic logical knowledge-based agent program. [Fig. 7.1]""" steps = itertools.count() def program(percept): t = steps.next() KB.tell(make_percept_sentence(percept, t)) action = KB.ask(make_action_query(t)) KB.tell(make_action_sentence(action, t)) return action def make_percept_sentence(self, percept, t): return Expr("Percept")(percept, t) def make_action_query(self, t): return expr("ShouldDo(action, %d)" % t) def make_action_sentence(self, action, t): return Expr("Did")(action[expr('action')], t) return program
def KB_AgentProgram(KB): """A generic logical knowledge-based agent program. [Fig. 7.1]""" steps = itertools.count() def program(percept): t = steps.next() KB.tell(make_percept_sentence(percept, t)) action = KB.ask(make_action_query(t)) KB.tell(make_action_sentence(action, t)) return action def make_percept_sentence(self, percept, t): return Expr("Percept")(percept, t) def make_action_query(self, t): return expr("ShouldDo(action, %d)" % t) def make_action_sentence(self, action, t): return Expr("Did")(action[expr('action')], t) return program
[ "A", "generic", "logical", "knowledge", "-", "based", "agent", "program", ".", "[", "Fig", ".", "7", ".", "1", "]" ]
hobson/aima
python
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L92-L112
[ "def", "KB_AgentProgram", "(", "KB", ")", ":", "steps", "=", "itertools", ".", "count", "(", ")", "def", "program", "(", "percept", ")", ":", "t", "=", "steps", ".", "next", "(", ")", "KB", ".", "tell", "(", "make_percept_sentence", "(", "percept", ",", "t", ")", ")", "action", "=", "KB", ".", "ask", "(", "make_action_query", "(", "t", ")", ")", "KB", ".", "tell", "(", "make_action_sentence", "(", "action", ",", "t", ")", ")", "return", "action", "def", "make_percept_sentence", "(", "self", ",", "percept", ",", "t", ")", ":", "return", "Expr", "(", "\"Percept\"", ")", "(", "percept", ",", "t", ")", "def", "make_action_query", "(", "self", ",", "t", ")", ":", "return", "expr", "(", "\"ShouldDo(action, %d)\"", "%", "t", ")", "def", "make_action_sentence", "(", "self", ",", "action", ",", "t", ")", ":", "return", "Expr", "(", "\"Did\"", ")", "(", "action", "[", "expr", "(", "'action'", ")", "]", ",", "t", ")", "return", "program" ]
3572b2fb92039b4a1abe384be8545560fbd3d470
valid
expr
Create an Expr representing a logic expression by parsing the input string. Symbols and numbers are automatically converted to Exprs. In addition you can use alternative spellings of these operators: 'x ==> y' parses as (x >> y) # Implication 'x <== y' parses as (x << y) # Reverse implication 'x <=> y' parses as (x % y) # Logical equivalence 'x =/= y' parses as (x ^ y) # Logical disequality (xor) But BE CAREFUL; precedence of implication is wrong. expr('P & Q ==> R & S') is ((P & (Q >> R)) & S); so you must use expr('(P & Q) ==> (R & S)'). >>> expr('P <=> Q(1)') (P <=> Q(1)) >>> expr('P & Q | ~R(x, F(x))') ((P & Q) | ~R(x, F(x)))
aima/logic.py
def expr(s): """Create an Expr representing a logic expression by parsing the input string. Symbols and numbers are automatically converted to Exprs. In addition you can use alternative spellings of these operators: 'x ==> y' parses as (x >> y) # Implication 'x <== y' parses as (x << y) # Reverse implication 'x <=> y' parses as (x % y) # Logical equivalence 'x =/= y' parses as (x ^ y) # Logical disequality (xor) But BE CAREFUL; precedence of implication is wrong. expr('P & Q ==> R & S') is ((P & (Q >> R)) & S); so you must use expr('(P & Q) ==> (R & S)'). >>> expr('P <=> Q(1)') (P <=> Q(1)) >>> expr('P & Q | ~R(x, F(x))') ((P & Q) | ~R(x, F(x))) """ if isinstance(s, Expr): return s if isnumber(s): return Expr(s) ## Replace the alternative spellings of operators with canonical spellings s = s.replace('==>', '>>').replace('<==', '<<') s = s.replace('<=>', '%').replace('=/=', '^') ## Replace a symbol or number, such as 'P' with 'Expr("P")' s = re.sub(r'([a-zA-Z0-9_.]+)', r'Expr("\1")', s) ## Now eval the string. (A security hole; do not use with an adversary.) return eval(s, {'Expr':Expr})
def expr(s): """Create an Expr representing a logic expression by parsing the input string. Symbols and numbers are automatically converted to Exprs. In addition you can use alternative spellings of these operators: 'x ==> y' parses as (x >> y) # Implication 'x <== y' parses as (x << y) # Reverse implication 'x <=> y' parses as (x % y) # Logical equivalence 'x =/= y' parses as (x ^ y) # Logical disequality (xor) But BE CAREFUL; precedence of implication is wrong. expr('P & Q ==> R & S') is ((P & (Q >> R)) & S); so you must use expr('(P & Q) ==> (R & S)'). >>> expr('P <=> Q(1)') (P <=> Q(1)) >>> expr('P & Q | ~R(x, F(x))') ((P & Q) | ~R(x, F(x))) """ if isinstance(s, Expr): return s if isnumber(s): return Expr(s) ## Replace the alternative spellings of operators with canonical spellings s = s.replace('==>', '>>').replace('<==', '<<') s = s.replace('<=>', '%').replace('=/=', '^') ## Replace a symbol or number, such as 'P' with 'Expr("P")' s = re.sub(r'([a-zA-Z0-9_.]+)', r'Expr("\1")', s) ## Now eval the string. (A security hole; do not use with an adversary.) return eval(s, {'Expr':Expr})
[ "Create", "an", "Expr", "representing", "a", "logic", "expression", "by", "parsing", "the", "input", "string", ".", "Symbols", "and", "numbers", "are", "automatically", "converted", "to", "Exprs", ".", "In", "addition", "you", "can", "use", "alternative", "spellings", "of", "these", "operators", ":", "x", "==", ">", "y", "parses", "as", "(", "x", ">>", "y", ")", "#", "Implication", "x", "<", "==", "y", "parses", "as", "(", "x", "<<", "y", ")", "#", "Reverse", "implication", "x", "<", "=", ">", "y", "parses", "as", "(", "x", "%", "y", ")", "#", "Logical", "equivalence", "x", "=", "/", "=", "y", "parses", "as", "(", "x", "^", "y", ")", "#", "Logical", "disequality", "(", "xor", ")", "But", "BE", "CAREFUL", ";", "precedence", "of", "implication", "is", "wrong", ".", "expr", "(", "P", "&", "Q", "==", ">", "R", "&", "S", ")", "is", "((", "P", "&", "(", "Q", ">>", "R", "))", "&", "S", ")", ";", "so", "you", "must", "use", "expr", "(", "(", "P", "&", "Q", ")", "==", ">", "(", "R", "&", "S", ")", ")", ".", ">>>", "expr", "(", "P", "<", "=", ">", "Q", "(", "1", ")", ")", "(", "P", "<", "=", ">", "Q", "(", "1", "))", ">>>", "expr", "(", "P", "&", "Q", "|", "~R", "(", "x", "F", "(", "x", "))", ")", "((", "P", "&", "Q", ")", "|", "~R", "(", "x", "F", "(", "x", ")))" ]
hobson/aima
python
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L220-L243
[ "def", "expr", "(", "s", ")", ":", "if", "isinstance", "(", "s", ",", "Expr", ")", ":", "return", "s", "if", "isnumber", "(", "s", ")", ":", "return", "Expr", "(", "s", ")", "## Replace the alternative spellings of operators with canonical spellings", "s", "=", "s", ".", "replace", "(", "'==>'", ",", "'>>'", ")", ".", "replace", "(", "'<=='", ",", "'<<'", ")", "s", "=", "s", ".", "replace", "(", "'<=>'", ",", "'%'", ")", ".", "replace", "(", "'=/='", ",", "'^'", ")", "## Replace a symbol or number, such as 'P' with 'Expr(\"P\")'", "s", "=", "re", ".", "sub", "(", "r'([a-zA-Z0-9_.]+)'", ",", "r'Expr(\"\\1\")'", ",", "s", ")", "## Now eval the string. (A security hole; do not use with an adversary.)", "return", "eval", "(", "s", ",", "{", "'Expr'", ":", "Expr", "}", ")" ]
3572b2fb92039b4a1abe384be8545560fbd3d470
valid
variables
Return a set of the variables in expression s. >>> ppset(variables(F(x, A, y))) set([x, y]) >>> ppset(variables(F(G(x), z))) set([x, z]) >>> ppset(variables(expr('F(x, x) & G(x, y) & H(y, z) & R(A, z, z)'))) set([x, y, z])
aima/logic.py
def variables(s): """Return a set of the variables in expression s. >>> ppset(variables(F(x, A, y))) set([x, y]) >>> ppset(variables(F(G(x), z))) set([x, z]) >>> ppset(variables(expr('F(x, x) & G(x, y) & H(y, z) & R(A, z, z)'))) set([x, y, z]) """ result = set([]) def walk(s): if is_variable(s): result.add(s) else: for arg in s.args: walk(arg) walk(s) return result
def variables(s): """Return a set of the variables in expression s. >>> ppset(variables(F(x, A, y))) set([x, y]) >>> ppset(variables(F(G(x), z))) set([x, z]) >>> ppset(variables(expr('F(x, x) & G(x, y) & H(y, z) & R(A, z, z)'))) set([x, y, z]) """ result = set([]) def walk(s): if is_variable(s): result.add(s) else: for arg in s.args: walk(arg) walk(s) return result
[ "Return", "a", "set", "of", "the", "variables", "in", "expression", "s", ".", ">>>", "ppset", "(", "variables", "(", "F", "(", "x", "A", "y", ")))", "set", "(", "[", "x", "y", "]", ")", ">>>", "ppset", "(", "variables", "(", "F", "(", "G", "(", "x", ")", "z", ")))", "set", "(", "[", "x", "z", "]", ")", ">>>", "ppset", "(", "variables", "(", "expr", "(", "F", "(", "x", "x", ")", "&", "G", "(", "x", "y", ")", "&", "H", "(", "y", "z", ")", "&", "R", "(", "A", "z", "z", ")", ")))", "set", "(", "[", "x", "y", "z", "]", ")" ]
hobson/aima
python
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L258-L275
[ "def", "variables", "(", "s", ")", ":", "result", "=", "set", "(", "[", "]", ")", "def", "walk", "(", "s", ")", ":", "if", "is_variable", "(", "s", ")", ":", "result", ".", "add", "(", "s", ")", "else", ":", "for", "arg", "in", "s", ".", "args", ":", "walk", "(", "arg", ")", "walk", "(", "s", ")", "return", "result" ]
3572b2fb92039b4a1abe384be8545560fbd3d470
valid
is_definite_clause
returns True for exprs s of the form A & B & ... & C ==> D, where all literals are positive. In clause form, this is ~A | ~B | ... | ~C | D, where exactly one clause is positive. >>> is_definite_clause(expr('Farmer(Mac)')) True >>> is_definite_clause(expr('~Farmer(Mac)')) False >>> is_definite_clause(expr('(Farmer(f) & Rabbit(r)) ==> Hates(f, r)')) True >>> is_definite_clause(expr('(Farmer(f) & ~Rabbit(r)) ==> Hates(f, r)')) False >>> is_definite_clause(expr('(Farmer(f) | Rabbit(r)) ==> Hates(f, r)')) False
aima/logic.py
def is_definite_clause(s): """returns True for exprs s of the form A & B & ... & C ==> D, where all literals are positive. In clause form, this is ~A | ~B | ... | ~C | D, where exactly one clause is positive. >>> is_definite_clause(expr('Farmer(Mac)')) True >>> is_definite_clause(expr('~Farmer(Mac)')) False >>> is_definite_clause(expr('(Farmer(f) & Rabbit(r)) ==> Hates(f, r)')) True >>> is_definite_clause(expr('(Farmer(f) & ~Rabbit(r)) ==> Hates(f, r)')) False >>> is_definite_clause(expr('(Farmer(f) | Rabbit(r)) ==> Hates(f, r)')) False """ if is_symbol(s.op): return True elif s.op == '>>': antecedent, consequent = s.args return (is_symbol(consequent.op) and every(lambda arg: is_symbol(arg.op), conjuncts(antecedent))) else: return False
def is_definite_clause(s): """returns True for exprs s of the form A & B & ... & C ==> D, where all literals are positive. In clause form, this is ~A | ~B | ... | ~C | D, where exactly one clause is positive. >>> is_definite_clause(expr('Farmer(Mac)')) True >>> is_definite_clause(expr('~Farmer(Mac)')) False >>> is_definite_clause(expr('(Farmer(f) & Rabbit(r)) ==> Hates(f, r)')) True >>> is_definite_clause(expr('(Farmer(f) & ~Rabbit(r)) ==> Hates(f, r)')) False >>> is_definite_clause(expr('(Farmer(f) | Rabbit(r)) ==> Hates(f, r)')) False """ if is_symbol(s.op): return True elif s.op == '>>': antecedent, consequent = s.args return (is_symbol(consequent.op) and every(lambda arg: is_symbol(arg.op), conjuncts(antecedent))) else: return False
[ "returns", "True", "for", "exprs", "s", "of", "the", "form", "A", "&", "B", "&", "...", "&", "C", "==", ">", "D", "where", "all", "literals", "are", "positive", ".", "In", "clause", "form", "this", "is", "~A", "|", "~B", "|", "...", "|", "~C", "|", "D", "where", "exactly", "one", "clause", "is", "positive", ".", ">>>", "is_definite_clause", "(", "expr", "(", "Farmer", "(", "Mac", ")", "))", "True", ">>>", "is_definite_clause", "(", "expr", "(", "~Farmer", "(", "Mac", ")", "))", "False", ">>>", "is_definite_clause", "(", "expr", "(", "(", "Farmer", "(", "f", ")", "&", "Rabbit", "(", "r", "))", "==", ">", "Hates", "(", "f", "r", ")", "))", "True", ">>>", "is_definite_clause", "(", "expr", "(", "(", "Farmer", "(", "f", ")", "&", "~Rabbit", "(", "r", "))", "==", ">", "Hates", "(", "f", "r", ")", "))", "False", ">>>", "is_definite_clause", "(", "expr", "(", "(", "Farmer", "(", "f", ")", "|", "Rabbit", "(", "r", "))", "==", ">", "Hates", "(", "f", "r", ")", "))", "False" ]
hobson/aima
python
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L277-L299
[ "def", "is_definite_clause", "(", "s", ")", ":", "if", "is_symbol", "(", "s", ".", "op", ")", ":", "return", "True", "elif", "s", ".", "op", "==", "'>>'", ":", "antecedent", ",", "consequent", "=", "s", ".", "args", "return", "(", "is_symbol", "(", "consequent", ".", "op", ")", "and", "every", "(", "lambda", "arg", ":", "is_symbol", "(", "arg", ".", "op", ")", ",", "conjuncts", "(", "antecedent", ")", ")", ")", "else", ":", "return", "False" ]
3572b2fb92039b4a1abe384be8545560fbd3d470
valid
parse_definite_clause
Return the antecedents and the consequent of a definite clause.
aima/logic.py
def parse_definite_clause(s): "Return the antecedents and the consequent of a definite clause." assert is_definite_clause(s) if is_symbol(s.op): return [], s else: antecedent, consequent = s.args return conjuncts(antecedent), consequent
def parse_definite_clause(s): "Return the antecedents and the consequent of a definite clause." assert is_definite_clause(s) if is_symbol(s.op): return [], s else: antecedent, consequent = s.args return conjuncts(antecedent), consequent
[ "Return", "the", "antecedents", "and", "the", "consequent", "of", "a", "definite", "clause", "." ]
hobson/aima
python
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L301-L308
[ "def", "parse_definite_clause", "(", "s", ")", ":", "assert", "is_definite_clause", "(", "s", ")", "if", "is_symbol", "(", "s", ".", "op", ")", ":", "return", "[", "]", ",", "s", "else", ":", "antecedent", ",", "consequent", "=", "s", ".", "args", "return", "conjuncts", "(", "antecedent", ")", ",", "consequent" ]
3572b2fb92039b4a1abe384be8545560fbd3d470
valid
tt_entails
Does kb entail the sentence alpha? Use truth tables. For propositional kb's and sentences. [Fig. 7.10] >>> tt_entails(expr('P & Q'), expr('Q')) True
aima/logic.py
def tt_entails(kb, alpha): """Does kb entail the sentence alpha? Use truth tables. For propositional kb's and sentences. [Fig. 7.10] >>> tt_entails(expr('P & Q'), expr('Q')) True """ assert not variables(alpha) return tt_check_all(kb, alpha, prop_symbols(kb & alpha), {})
def tt_entails(kb, alpha): """Does kb entail the sentence alpha? Use truth tables. For propositional kb's and sentences. [Fig. 7.10] >>> tt_entails(expr('P & Q'), expr('Q')) True """ assert not variables(alpha) return tt_check_all(kb, alpha, prop_symbols(kb & alpha), {})
[ "Does", "kb", "entail", "the", "sentence", "alpha?", "Use", "truth", "tables", ".", "For", "propositional", "kb", "s", "and", "sentences", ".", "[", "Fig", ".", "7", ".", "10", "]", ">>>", "tt_entails", "(", "expr", "(", "P", "&", "Q", ")", "expr", "(", "Q", "))", "True" ]
hobson/aima
python
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L316-L323
[ "def", "tt_entails", "(", "kb", ",", "alpha", ")", ":", "assert", "not", "variables", "(", "alpha", ")", "return", "tt_check_all", "(", "kb", ",", "alpha", ",", "prop_symbols", "(", "kb", "&", "alpha", ")", ",", "{", "}", ")" ]
3572b2fb92039b4a1abe384be8545560fbd3d470
valid
tt_check_all
Auxiliary routine to implement tt_entails.
aima/logic.py
def tt_check_all(kb, alpha, symbols, model): "Auxiliary routine to implement tt_entails." if not symbols: if pl_true(kb, model): result = pl_true(alpha, model) assert result in (True, False) return result else: return True else: P, rest = symbols[0], symbols[1:] return (tt_check_all(kb, alpha, rest, extend(model, P, True)) and tt_check_all(kb, alpha, rest, extend(model, P, False)))
def tt_check_all(kb, alpha, symbols, model): "Auxiliary routine to implement tt_entails." if not symbols: if pl_true(kb, model): result = pl_true(alpha, model) assert result in (True, False) return result else: return True else: P, rest = symbols[0], symbols[1:] return (tt_check_all(kb, alpha, rest, extend(model, P, True)) and tt_check_all(kb, alpha, rest, extend(model, P, False)))
[ "Auxiliary", "routine", "to", "implement", "tt_entails", "." ]
hobson/aima
python
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L325-L337
[ "def", "tt_check_all", "(", "kb", ",", "alpha", ",", "symbols", ",", "model", ")", ":", "if", "not", "symbols", ":", "if", "pl_true", "(", "kb", ",", "model", ")", ":", "result", "=", "pl_true", "(", "alpha", ",", "model", ")", "assert", "result", "in", "(", "True", ",", "False", ")", "return", "result", "else", ":", "return", "True", "else", ":", "P", ",", "rest", "=", "symbols", "[", "0", "]", ",", "symbols", "[", "1", ":", "]", "return", "(", "tt_check_all", "(", "kb", ",", "alpha", ",", "rest", ",", "extend", "(", "model", ",", "P", ",", "True", ")", ")", "and", "tt_check_all", "(", "kb", ",", "alpha", ",", "rest", ",", "extend", "(", "model", ",", "P", ",", "False", ")", ")", ")" ]
3572b2fb92039b4a1abe384be8545560fbd3d470
valid
prop_symbols
Return a list of all propositional symbols in x.
aima/logic.py
def prop_symbols(x): "Return a list of all propositional symbols in x." if not isinstance(x, Expr): return [] elif is_prop_symbol(x.op): return [x] else: return list(set(symbol for arg in x.args for symbol in prop_symbols(arg)))
def prop_symbols(x): "Return a list of all propositional symbols in x." if not isinstance(x, Expr): return [] elif is_prop_symbol(x.op): return [x] else: return list(set(symbol for arg in x.args for symbol in prop_symbols(arg)))
[ "Return", "a", "list", "of", "all", "propositional", "symbols", "in", "x", "." ]
hobson/aima
python
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L339-L347
[ "def", "prop_symbols", "(", "x", ")", ":", "if", "not", "isinstance", "(", "x", ",", "Expr", ")", ":", "return", "[", "]", "elif", "is_prop_symbol", "(", "x", ".", "op", ")", ":", "return", "[", "x", "]", "else", ":", "return", "list", "(", "set", "(", "symbol", "for", "arg", "in", "x", ".", "args", "for", "symbol", "in", "prop_symbols", "(", "arg", ")", ")", ")" ]
3572b2fb92039b4a1abe384be8545560fbd3d470
valid
pl_true
Return True if the propositional logic expression is true in the model, and False if it is false. If the model does not specify the value for every proposition, this may return None to indicate 'not obvious'; this may happen even when the expression is tautological.
aima/logic.py
def pl_true(exp, model={}): """Return True if the propositional logic expression is true in the model, and False if it is false. If the model does not specify the value for every proposition, this may return None to indicate 'not obvious'; this may happen even when the expression is tautological.""" op, args = exp.op, exp.args if exp == TRUE: return True elif exp == FALSE: return False elif is_prop_symbol(op): return model.get(exp) elif op == '~': p = pl_true(args[0], model) if p is None: return None else: return not p elif op == '|': result = False for arg in args: p = pl_true(arg, model) if p is True: return True if p is None: result = None return result elif op == '&': result = True for arg in args: p = pl_true(arg, model) if p is False: return False if p is None: result = None return result p, q = args if op == '>>': return pl_true(~p | q, model) elif op == '<<': return pl_true(p | ~q, model) pt = pl_true(p, model) if pt is None: return None qt = pl_true(q, model) if qt is None: return None if op == '<=>': return pt == qt elif op == '^': return pt != qt else: raise ValueError, "illegal operator in logic expression" + str(exp)
def pl_true(exp, model={}): """Return True if the propositional logic expression is true in the model, and False if it is false. If the model does not specify the value for every proposition, this may return None to indicate 'not obvious'; this may happen even when the expression is tautological.""" op, args = exp.op, exp.args if exp == TRUE: return True elif exp == FALSE: return False elif is_prop_symbol(op): return model.get(exp) elif op == '~': p = pl_true(args[0], model) if p is None: return None else: return not p elif op == '|': result = False for arg in args: p = pl_true(arg, model) if p is True: return True if p is None: result = None return result elif op == '&': result = True for arg in args: p = pl_true(arg, model) if p is False: return False if p is None: result = None return result p, q = args if op == '>>': return pl_true(~p | q, model) elif op == '<<': return pl_true(p | ~q, model) pt = pl_true(p, model) if pt is None: return None qt = pl_true(q, model) if qt is None: return None if op == '<=>': return pt == qt elif op == '^': return pt != qt else: raise ValueError, "illegal operator in logic expression" + str(exp)
[ "Return", "True", "if", "the", "propositional", "logic", "expression", "is", "true", "in", "the", "model", "and", "False", "if", "it", "is", "false", ".", "If", "the", "model", "does", "not", "specify", "the", "value", "for", "every", "proposition", "this", "may", "return", "None", "to", "indicate", "not", "obvious", ";", "this", "may", "happen", "even", "when", "the", "expression", "is", "tautological", "." ]
hobson/aima
python
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L357-L401
[ "def", "pl_true", "(", "exp", ",", "model", "=", "{", "}", ")", ":", "op", ",", "args", "=", "exp", ".", "op", ",", "exp", ".", "args", "if", "exp", "==", "TRUE", ":", "return", "True", "elif", "exp", "==", "FALSE", ":", "return", "False", "elif", "is_prop_symbol", "(", "op", ")", ":", "return", "model", ".", "get", "(", "exp", ")", "elif", "op", "==", "'~'", ":", "p", "=", "pl_true", "(", "args", "[", "0", "]", ",", "model", ")", "if", "p", "is", "None", ":", "return", "None", "else", ":", "return", "not", "p", "elif", "op", "==", "'|'", ":", "result", "=", "False", "for", "arg", "in", "args", ":", "p", "=", "pl_true", "(", "arg", ",", "model", ")", "if", "p", "is", "True", ":", "return", "True", "if", "p", "is", "None", ":", "result", "=", "None", "return", "result", "elif", "op", "==", "'&'", ":", "result", "=", "True", "for", "arg", "in", "args", ":", "p", "=", "pl_true", "(", "arg", ",", "model", ")", "if", "p", "is", "False", ":", "return", "False", "if", "p", "is", "None", ":", "result", "=", "None", "return", "result", "p", ",", "q", "=", "args", "if", "op", "==", "'>>'", ":", "return", "pl_true", "(", "~", "p", "|", "q", ",", "model", ")", "elif", "op", "==", "'<<'", ":", "return", "pl_true", "(", "p", "|", "~", "q", ",", "model", ")", "pt", "=", "pl_true", "(", "p", ",", "model", ")", "if", "pt", "is", "None", ":", "return", "None", "qt", "=", "pl_true", "(", "q", ",", "model", ")", "if", "qt", "is", "None", ":", "return", "None", "if", "op", "==", "'<=>'", ":", "return", "pt", "==", "qt", "elif", "op", "==", "'^'", ":", "return", "pt", "!=", "qt", "else", ":", "raise", "ValueError", ",", "\"illegal operator in logic expression\"", "+", "str", "(", "exp", ")" ]
3572b2fb92039b4a1abe384be8545560fbd3d470
valid
to_cnf
Convert a propositional logical sentence s to conjunctive normal form. That is, to the form ((A | ~B | ...) & (B | C | ...) & ...) [p. 253] >>> to_cnf("~(B|C)") (~B & ~C) >>> to_cnf("B <=> (P1|P2)") ((~P1 | B) & (~P2 | B) & (P1 | P2 | ~B)) >>> to_cnf("a | (b & c) | d") ((b | a | d) & (c | a | d)) >>> to_cnf("A & (B | (D & E))") (A & (D | B) & (E | B)) >>> to_cnf("A | (B | (C | (D & E)))") ((D | A | B | C) & (E | A | B | C))
aima/logic.py
def to_cnf(s): """Convert a propositional logical sentence s to conjunctive normal form. That is, to the form ((A | ~B | ...) & (B | C | ...) & ...) [p. 253] >>> to_cnf("~(B|C)") (~B & ~C) >>> to_cnf("B <=> (P1|P2)") ((~P1 | B) & (~P2 | B) & (P1 | P2 | ~B)) >>> to_cnf("a | (b & c) | d") ((b | a | d) & (c | a | d)) >>> to_cnf("A & (B | (D & E))") (A & (D | B) & (E | B)) >>> to_cnf("A | (B | (C | (D & E)))") ((D | A | B | C) & (E | A | B | C)) """ if isinstance(s, str): s = expr(s) s = eliminate_implications(s) # Steps 1, 2 from p. 253 s = move_not_inwards(s) # Step 3 return distribute_and_over_or(s)
def to_cnf(s): """Convert a propositional logical sentence s to conjunctive normal form. That is, to the form ((A | ~B | ...) & (B | C | ...) & ...) [p. 253] >>> to_cnf("~(B|C)") (~B & ~C) >>> to_cnf("B <=> (P1|P2)") ((~P1 | B) & (~P2 | B) & (P1 | P2 | ~B)) >>> to_cnf("a | (b & c) | d") ((b | a | d) & (c | a | d)) >>> to_cnf("A & (B | (D & E))") (A & (D | B) & (E | B)) >>> to_cnf("A | (B | (C | (D & E)))") ((D | A | B | C) & (E | A | B | C)) """ if isinstance(s, str): s = expr(s) s = eliminate_implications(s) # Steps 1, 2 from p. 253 s = move_not_inwards(s) # Step 3 return distribute_and_over_or(s)
[ "Convert", "a", "propositional", "logical", "sentence", "s", "to", "conjunctive", "normal", "form", ".", "That", "is", "to", "the", "form", "((", "A", "|", "~B", "|", "...", ")", "&", "(", "B", "|", "C", "|", "...", ")", "&", "...", ")", "[", "p", ".", "253", "]", ">>>", "to_cnf", "(", "~", "(", "B|C", ")", ")", "(", "~B", "&", "~C", ")", ">>>", "to_cnf", "(", "B", "<", "=", ">", "(", "P1|P2", ")", ")", "((", "~P1", "|", "B", ")", "&", "(", "~P2", "|", "B", ")", "&", "(", "P1", "|", "P2", "|", "~B", "))", ">>>", "to_cnf", "(", "a", "|", "(", "b", "&", "c", ")", "|", "d", ")", "((", "b", "|", "a", "|", "d", ")", "&", "(", "c", "|", "a", "|", "d", "))", ">>>", "to_cnf", "(", "A", "&", "(", "B", "|", "(", "D", "&", "E", "))", ")", "(", "A", "&", "(", "D", "|", "B", ")", "&", "(", "E", "|", "B", "))", ">>>", "to_cnf", "(", "A", "|", "(", "B", "|", "(", "C", "|", "(", "D", "&", "E", ")))", ")", "((", "D", "|", "A", "|", "B", "|", "C", ")", "&", "(", "E", "|", "A", "|", "B", "|", "C", "))" ]
hobson/aima
python
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L407-L424
[ "def", "to_cnf", "(", "s", ")", ":", "if", "isinstance", "(", "s", ",", "str", ")", ":", "s", "=", "expr", "(", "s", ")", "s", "=", "eliminate_implications", "(", "s", ")", "# Steps 1, 2 from p. 253", "s", "=", "move_not_inwards", "(", "s", ")", "# Step 3", "return", "distribute_and_over_or", "(", "s", ")" ]
3572b2fb92039b4a1abe384be8545560fbd3d470
valid
eliminate_implications
Change >>, <<, and <=> into &, |, and ~. That is, return an Expr that is equivalent to s, but has only &, |, and ~ as logical operators. >>> eliminate_implications(A >> (~B << C)) ((~B | ~C) | ~A) >>> eliminate_implications(A ^ B) ((A & ~B) | (~A & B))
aima/logic.py
def eliminate_implications(s): """Change >>, <<, and <=> into &, |, and ~. That is, return an Expr that is equivalent to s, but has only &, |, and ~ as logical operators. >>> eliminate_implications(A >> (~B << C)) ((~B | ~C) | ~A) >>> eliminate_implications(A ^ B) ((A & ~B) | (~A & B)) """ if not s.args or is_symbol(s.op): return s ## (Atoms are unchanged.) args = map(eliminate_implications, s.args) a, b = args[0], args[-1] if s.op == '>>': return (b | ~a) elif s.op == '<<': return (a | ~b) elif s.op == '<=>': return (a | ~b) & (b | ~a) elif s.op == '^': assert len(args) == 2 ## TODO: relax this restriction return (a & ~b) | (~a & b) else: assert s.op in ('&', '|', '~') return Expr(s.op, *args)
def eliminate_implications(s): """Change >>, <<, and <=> into &, |, and ~. That is, return an Expr that is equivalent to s, but has only &, |, and ~ as logical operators. >>> eliminate_implications(A >> (~B << C)) ((~B | ~C) | ~A) >>> eliminate_implications(A ^ B) ((A & ~B) | (~A & B)) """ if not s.args or is_symbol(s.op): return s ## (Atoms are unchanged.) args = map(eliminate_implications, s.args) a, b = args[0], args[-1] if s.op == '>>': return (b | ~a) elif s.op == '<<': return (a | ~b) elif s.op == '<=>': return (a | ~b) & (b | ~a) elif s.op == '^': assert len(args) == 2 ## TODO: relax this restriction return (a & ~b) | (~a & b) else: assert s.op in ('&', '|', '~') return Expr(s.op, *args)
[ "Change", ">>", "<<", "and", "<", "=", ">", "into", "&", "|", "and", "~", ".", "That", "is", "return", "an", "Expr", "that", "is", "equivalent", "to", "s", "but", "has", "only", "&", "|", "and", "~", "as", "logical", "operators", ".", ">>>", "eliminate_implications", "(", "A", ">>", "(", "~B", "<<", "C", "))", "((", "~B", "|", "~C", ")", "|", "~A", ")", ">>>", "eliminate_implications", "(", "A", "^", "B", ")", "((", "A", "&", "~B", ")", "|", "(", "~A", "&", "B", "))" ]
hobson/aima
python
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L426-L448
[ "def", "eliminate_implications", "(", "s", ")", ":", "if", "not", "s", ".", "args", "or", "is_symbol", "(", "s", ".", "op", ")", ":", "return", "s", "## (Atoms are unchanged.)", "args", "=", "map", "(", "eliminate_implications", ",", "s", ".", "args", ")", "a", ",", "b", "=", "args", "[", "0", "]", ",", "args", "[", "-", "1", "]", "if", "s", ".", "op", "==", "'>>'", ":", "return", "(", "b", "|", "~", "a", ")", "elif", "s", ".", "op", "==", "'<<'", ":", "return", "(", "a", "|", "~", "b", ")", "elif", "s", ".", "op", "==", "'<=>'", ":", "return", "(", "a", "|", "~", "b", ")", "&", "(", "b", "|", "~", "a", ")", "elif", "s", ".", "op", "==", "'^'", ":", "assert", "len", "(", "args", ")", "==", "2", "## TODO: relax this restriction", "return", "(", "a", "&", "~", "b", ")", "|", "(", "~", "a", "&", "b", ")", "else", ":", "assert", "s", ".", "op", "in", "(", "'&'", ",", "'|'", ",", "'~'", ")", "return", "Expr", "(", "s", ".", "op", ",", "*", "args", ")" ]
3572b2fb92039b4a1abe384be8545560fbd3d470
valid
move_not_inwards
Rewrite sentence s by moving negation sign inward. >>> move_not_inwards(~(A | B)) (~A & ~B) >>> move_not_inwards(~(A & B)) (~A | ~B) >>> move_not_inwards(~(~(A | ~B) | ~~C)) ((A | ~B) & ~C)
aima/logic.py
def move_not_inwards(s): """Rewrite sentence s by moving negation sign inward. >>> move_not_inwards(~(A | B)) (~A & ~B) >>> move_not_inwards(~(A & B)) (~A | ~B) >>> move_not_inwards(~(~(A | ~B) | ~~C)) ((A | ~B) & ~C) """ if s.op == '~': NOT = lambda b: move_not_inwards(~b) a = s.args[0] if a.op == '~': return move_not_inwards(a.args[0]) # ~~A ==> A if a.op =='&': return associate('|', map(NOT, a.args)) if a.op =='|': return associate('&', map(NOT, a.args)) return s elif is_symbol(s.op) or not s.args: return s else: return Expr(s.op, *map(move_not_inwards, s.args))
def move_not_inwards(s): """Rewrite sentence s by moving negation sign inward. >>> move_not_inwards(~(A | B)) (~A & ~B) >>> move_not_inwards(~(A & B)) (~A | ~B) >>> move_not_inwards(~(~(A | ~B) | ~~C)) ((A | ~B) & ~C) """ if s.op == '~': NOT = lambda b: move_not_inwards(~b) a = s.args[0] if a.op == '~': return move_not_inwards(a.args[0]) # ~~A ==> A if a.op =='&': return associate('|', map(NOT, a.args)) if a.op =='|': return associate('&', map(NOT, a.args)) return s elif is_symbol(s.op) or not s.args: return s else: return Expr(s.op, *map(move_not_inwards, s.args))
[ "Rewrite", "sentence", "s", "by", "moving", "negation", "sign", "inward", ".", ">>>", "move_not_inwards", "(", "~", "(", "A", "|", "B", "))", "(", "~A", "&", "~B", ")", ">>>", "move_not_inwards", "(", "~", "(", "A", "&", "B", "))", "(", "~A", "|", "~B", ")", ">>>", "move_not_inwards", "(", "~", "(", "~", "(", "A", "|", "~B", ")", "|", "~~C", "))", "((", "A", "|", "~B", ")", "&", "~C", ")" ]
hobson/aima
python
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L450-L469
[ "def", "move_not_inwards", "(", "s", ")", ":", "if", "s", ".", "op", "==", "'~'", ":", "NOT", "=", "lambda", "b", ":", "move_not_inwards", "(", "~", "b", ")", "a", "=", "s", ".", "args", "[", "0", "]", "if", "a", ".", "op", "==", "'~'", ":", "return", "move_not_inwards", "(", "a", ".", "args", "[", "0", "]", ")", "# ~~A ==> A", "if", "a", ".", "op", "==", "'&'", ":", "return", "associate", "(", "'|'", ",", "map", "(", "NOT", ",", "a", ".", "args", ")", ")", "if", "a", ".", "op", "==", "'|'", ":", "return", "associate", "(", "'&'", ",", "map", "(", "NOT", ",", "a", ".", "args", ")", ")", "return", "s", "elif", "is_symbol", "(", "s", ".", "op", ")", "or", "not", "s", ".", "args", ":", "return", "s", "else", ":", "return", "Expr", "(", "s", ".", "op", ",", "*", "map", "(", "move_not_inwards", ",", "s", ".", "args", ")", ")" ]
3572b2fb92039b4a1abe384be8545560fbd3d470
valid
distribute_and_over_or
Given a sentence s consisting of conjunctions and disjunctions of literals, return an equivalent sentence in CNF. >>> distribute_and_over_or((A & B) | C) ((A | C) & (B | C))
aima/logic.py
def distribute_and_over_or(s): """Given a sentence s consisting of conjunctions and disjunctions of literals, return an equivalent sentence in CNF. >>> distribute_and_over_or((A & B) | C) ((A | C) & (B | C)) """ if s.op == '|': s = associate('|', s.args) if s.op != '|': return distribute_and_over_or(s) if len(s.args) == 0: return FALSE if len(s.args) == 1: return distribute_and_over_or(s.args[0]) conj = find_if((lambda d: d.op == '&'), s.args) if not conj: return s others = [a for a in s.args if a is not conj] rest = associate('|', others) return associate('&', [distribute_and_over_or(c|rest) for c in conj.args]) elif s.op == '&': return associate('&', map(distribute_and_over_or, s.args)) else: return s
def distribute_and_over_or(s): """Given a sentence s consisting of conjunctions and disjunctions of literals, return an equivalent sentence in CNF. >>> distribute_and_over_or((A & B) | C) ((A | C) & (B | C)) """ if s.op == '|': s = associate('|', s.args) if s.op != '|': return distribute_and_over_or(s) if len(s.args) == 0: return FALSE if len(s.args) == 1: return distribute_and_over_or(s.args[0]) conj = find_if((lambda d: d.op == '&'), s.args) if not conj: return s others = [a for a in s.args if a is not conj] rest = associate('|', others) return associate('&', [distribute_and_over_or(c|rest) for c in conj.args]) elif s.op == '&': return associate('&', map(distribute_and_over_or, s.args)) else: return s
[ "Given", "a", "sentence", "s", "consisting", "of", "conjunctions", "and", "disjunctions", "of", "literals", "return", "an", "equivalent", "sentence", "in", "CNF", ".", ">>>", "distribute_and_over_or", "((", "A", "&", "B", ")", "|", "C", ")", "((", "A", "|", "C", ")", "&", "(", "B", "|", "C", "))" ]
hobson/aima
python
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/logic.py#L471-L495
[ "def", "distribute_and_over_or", "(", "s", ")", ":", "if", "s", ".", "op", "==", "'|'", ":", "s", "=", "associate", "(", "'|'", ",", "s", ".", "args", ")", "if", "s", ".", "op", "!=", "'|'", ":", "return", "distribute_and_over_or", "(", "s", ")", "if", "len", "(", "s", ".", "args", ")", "==", "0", ":", "return", "FALSE", "if", "len", "(", "s", ".", "args", ")", "==", "1", ":", "return", "distribute_and_over_or", "(", "s", ".", "args", "[", "0", "]", ")", "conj", "=", "find_if", "(", "(", "lambda", "d", ":", "d", ".", "op", "==", "'&'", ")", ",", "s", ".", "args", ")", "if", "not", "conj", ":", "return", "s", "others", "=", "[", "a", "for", "a", "in", "s", ".", "args", "if", "a", "is", "not", "conj", "]", "rest", "=", "associate", "(", "'|'", ",", "others", ")", "return", "associate", "(", "'&'", ",", "[", "distribute_and_over_or", "(", "c", "|", "rest", ")", "for", "c", "in", "conj", ".", "args", "]", ")", "elif", "s", ".", "op", "==", "'&'", ":", "return", "associate", "(", "'&'", ",", "map", "(", "distribute_and_over_or", ",", "s", ".", "args", ")", ")", "else", ":", "return", "s" ]
3572b2fb92039b4a1abe384be8545560fbd3d470