INSTRUCTION
stringlengths 1
8.43k
| RESPONSE
stringlengths 75
104k
|
|---|---|
Parsing of some custom sv frequencies These are very specific at the moment this will hopefully get better over time when the field of structural variants is more developed.
|
def parse_sv_frequencies(variant):
"""Parsing of some custom sv frequencies
These are very specific at the moment, this will hopefully get better over time when the
field of structural variants is more developed.
Args:
variant(cyvcf2.Variant)
Returns:
sv_frequencies(dict)
"""
frequency_keys = [
'clingen_cgh_benignAF',
'clingen_cgh_benign',
'clingen_cgh_pathogenicAF',
'clingen_cgh_pathogenic',
'clingen_ngi',
'clingen_ngiAF',
'swegen',
'swegenAF',
'decipherAF',
'decipher'
]
sv_frequencies = {}
for key in frequency_keys:
value = variant.INFO.get(key, 0)
if 'AF' in key:
value = float(value)
else:
value = int(value)
if value > 0:
sv_frequencies[key] = value
return sv_frequencies
|
Show all users in the database
|
def users(context):
"""Show all users in the database"""
LOG.info("Running scout view users")
adapter = context.obj['adapter']
user_objs = adapter.users()
if user_objs.count() == 0:
LOG.info("No users found")
context.abort()
click.echo("#name\temail\troles\tinstitutes")
for user_obj in user_objs:
click.echo("{0}\t{1}\t{2}\t{3}\t".format(
user_obj['name'],
user_obj.get('mail', user_obj['_id']),
', '.join(user_obj.get('roles', [])),
', '.join(user_obj.get('institutes', [])),
)
)
|
Load a case into the database
|
def load_case(adapter, case_obj, update=False):
"""Load a case into the database
If the case already exists the function will exit.
If the user want to load a case that is already in the database
'update' has to be 'True'
Args:
adapter (MongoAdapter): connection to the database
case_obj (dict): case object to persist to the database
update(bool): If existing case should be updated
Returns:
case_obj(dict): A dictionary with the builded case
"""
logger.info('Loading case {} into database'.format(case_obj['display_name']))
# Check if case exists in database
existing_case = adapter.case(case_obj['_id'])
if existing_case:
if update:
adapter.update_case(case_obj)
else:
raise IntegrityError("Case {0} already exists in database".format(case_obj['_id']))
else:
adapter.add_case(case_obj)
return case_obj
|
Build a hgnc_gene object
|
def build_hgnc_gene(gene_info, build='37'):
"""Build a hgnc_gene object
Args:
gene_info(dict): Gene information
Returns:
gene_obj(dict)
{
'_id': ObjectId(),
# This is the hgnc id, required:
'hgnc_id': int,
# The primary symbol, required
'hgnc_symbol': str,
'ensembl_id': str, # required
'build': str, # '37' or '38', defaults to '37', required
'chromosome': str, # required
'start': int, # required
'end': int, # required
'description': str, # Gene description
'aliases': list(), # Gene symbol aliases, includes hgnc_symbol, str
'entrez_id': int,
'omim_id': int,
'pli_score': float,
'primary_transcripts': list(), # List of refseq transcripts (str)
'ucsc_id': str,
'uniprot_ids': list(), # List of str
'vega_id': str,
'transcripts': list(), # List of hgnc_transcript
# Inheritance information
'inheritance_models': list(), # List of model names
'incomplete_penetrance': bool, # Acquired from HPO
# Phenotype information
'phenotypes': list(), # List of dictionaries with phenotype information
}
"""
try:
hgnc_id = int(gene_info['hgnc_id'])
except KeyError as err:
raise KeyError("Gene has to have a hgnc_id")
except ValueError as err:
raise ValueError("hgnc_id has to be integer")
try:
hgnc_symbol = gene_info['hgnc_symbol']
except KeyError as err:
raise KeyError("Gene has to have a hgnc_symbol")
try:
ensembl_id = gene_info['ensembl_gene_id']
except KeyError as err:
raise KeyError("Gene has to have a ensembl_id")
try:
chromosome = gene_info['chromosome']
except KeyError as err:
raise KeyError("Gene has to have a chromosome")
try:
start = int(gene_info['start'])
except KeyError as err:
raise KeyError("Gene has to have a start position")
except TypeError as err:
raise TypeError("Gene start has to be a integer")
try:
end = int(gene_info['end'])
except KeyError as err:
raise KeyError("Gene has to have a end position")
except TypeError as err:
raise TypeError("Gene end has to be a integer")
gene_obj = HgncGene(
hgnc_id=hgnc_id,
hgnc_symbol=hgnc_symbol,
ensembl_id=ensembl_id,
chrom=chromosome,
start=start,
end=end,
build=build,
)
if gene_info.get('description'):
gene_obj['description'] = gene_info['description']
# LOG.debug("Adding info %s", gene_info['description'])
if gene_info.get('previous_symbols'):
gene_obj['aliases'] = gene_info['previous_symbols']
if gene_info.get('entrez_id'):
gene_obj['entrez_id'] = int(gene_info['entrez_id'])
if gene_info.get('omim_id'):
gene_obj['omim_id'] = int(gene_info['omim_id'])
if gene_info.get('pli_score'):
gene_obj['pli_score'] = float(gene_info['pli_score'])
if gene_info.get('ref_seq'):
gene_obj['primary_transcripts'] = gene_info['ref_seq']
if gene_info.get('ucsc_id'):
gene_obj['ucsc_id'] = gene_info['ucsc_id']
if gene_info.get('uniprot_ids'):
gene_obj['uniprot_ids'] = gene_info['uniprot_ids']
if gene_info.get('vega_id'):
gene_obj['vega_id'] = gene_info['vega_id']
if gene_info.get('incomplete_penetrance'):
gene_obj['incomplete_penetrance'] = True
if gene_info.get('inheritance_models'):
gene_obj['inheritance_models'] = gene_info['inheritance_models']
phenotype_objs = []
for phenotype_info in gene_info.get('phenotypes', []):
phenotype_objs.append(build_phenotype(phenotype_info))
if phenotype_objs:
gene_obj['phenotypes'] = phenotype_objs
for key in list(gene_obj):
if gene_obj[key] is None:
gene_obj.pop(key)
return gene_obj
|
Load a gene panel based on the info sent A panel object is built and integrity checks are made. The panel object is then loaded into the database.
|
def load_panel(self, parsed_panel):
"""Load a gene panel based on the info sent
A panel object is built and integrity checks are made.
The panel object is then loaded into the database.
Args:
path(str): Path to panel file
institute(str): Name of institute that owns the panel
panel_id(str): Panel id
date(datetime.datetime): Date of creation
version(float)
full_name(str): Option to have a long name
panel_info(dict): {
'file': <path to panel file>(str),
'institute': <institute>(str),
'type': <panel type>(str),
'date': date,
'version': version,
'panel_name': panel_id,
'full_name': name,
}
"""
panel_obj = build_panel(parsed_panel, self)
self.add_gene_panel(panel_obj)
|
Create and load the OMIM - AUTO panel
|
def load_omim_panel(self, api_key, institute=None):
"""Create and load the OMIM-AUTO panel"""
existing_panel = self.gene_panel(panel_id='OMIM-AUTO')
if not existing_panel:
LOG.warning("OMIM-AUTO does not exists in database")
LOG.info('Creating a first version')
version = 1.0
if existing_panel:
version = float(math.floor(existing_panel['version']) + 1)
LOG.info("Setting version to %s", version)
try:
mim_files = fetch_mim_files(api_key=api_key, genemap2=True, mim2genes=True)
except Exception as err:
raise err
date_string = None
# Get the correct date when omim files where released
for line in mim_files['genemap2']:
if 'Generated' in line:
date_string = line.split(':')[-1].lstrip().rstrip()
date_obj = get_date(date_string)
if existing_panel:
if existing_panel['date'] == date_obj:
LOG.warning("There is no new version of OMIM")
return
panel_data = {}
panel_data['path'] = None
panel_data['type'] = 'clinical'
panel_data['date'] = date_obj
panel_data['panel_id'] = 'OMIM-AUTO'
panel_data['institute'] = institute or 'cust002'
panel_data['version'] = version
panel_data['display_name'] = 'OMIM-AUTO'
panel_data['genes'] = []
alias_genes = self.genes_by_alias()
genes = get_omim_panel_genes(
genemap2_lines = mim_files['genemap2'],
mim2gene_lines = mim_files['mim2genes'],
alias_genes = alias_genes,
)
for gene in genes:
panel_data['genes'].append(gene)
panel_obj = build_panel(panel_data, self)
if existing_panel:
new_genes = self.compare_mim_panels(existing_panel, panel_obj)
if new_genes:
self.update_mim_version(new_genes, panel_obj, old_version=existing_panel['version'])
else:
LOG.info("The new version of omim does not differ from the old one")
LOG.info("No update is added")
return
self.add_gene_panel(panel_obj)
|
Check if the latest version of OMIM differs from the most recent in database Return all genes that where not in the previous version.
|
def compare_mim_panels(self, existing_panel, new_panel):
"""Check if the latest version of OMIM differs from the most recent in database
Return all genes that where not in the previous version.
Args:
existing_panel(dict)
new_panel(dict)
Returns:
new_genes(set(str))
"""
existing_genes = set([gene['hgnc_id'] for gene in existing_panel['genes']])
new_genes = set([gene['hgnc_id'] for gene in new_panel['genes']])
return new_genes.difference(existing_genes)
|
Set the correct version for each gene Loop over the genes in the new panel
|
def update_mim_version(self, new_genes, new_panel, old_version):
"""Set the correct version for each gene
Loop over the genes in the new panel
Args:
new_genes(set(str)): Set with the new gene symbols
new_panel(dict)
"""
LOG.info('Updating versions for new genes')
version = new_panel['version']
for gene in new_panel['genes']:
gene_symbol = gene['hgnc_id']
# If the gene is new we add the version
if gene_symbol in new_genes:
gene['database_entry_version'] = version
continue
# If the gene is old it will have the previous version
gene['database_entry_version'] = old_version
return
|
Add a gene panel to the database
|
def add_gene_panel(self, panel_obj):
"""Add a gene panel to the database
Args:
panel_obj(dict)
"""
panel_name = panel_obj['panel_name']
panel_version = panel_obj['version']
display_name = panel_obj.get('display_name', panel_name)
if self.gene_panel(panel_name, panel_version):
raise IntegrityError("Panel {0} with version {1} already"
" exist in database".format(panel_name, panel_version))
LOG.info("loading panel {0}, version {1} to database".format(
display_name, panel_version
))
result = self.panel_collection.insert_one(panel_obj)
LOG.debug("Panel saved")
return result.inserted_id
|
Fetch a gene panel by _id.
|
def panel(self, panel_id):
"""Fetch a gene panel by '_id'.
Args:
panel_id (str, ObjectId): str or ObjectId of document ObjectId
Returns:
dict: panel object or `None` if panel not found
"""
if not isinstance(panel_id, ObjectId):
panel_id = ObjectId(panel_id)
panel_obj = self.panel_collection.find_one({'_id': panel_id})
return panel_obj
|
Delete a panel by _id.
|
def delete_panel(self, panel_obj):
"""Delete a panel by '_id'.
Args:
panel_obj(dict)
Returns:
res(pymongo.DeleteResult)
"""
res = self.panel_collection.delete_one({'_id': panel_obj['_id']})
LOG.warning("Deleting panel %s, version %s" % (panel_obj['panel_name'], panel_obj['version']))
return res
|
Fetch a gene panel.
|
def gene_panel(self, panel_id, version=None):
"""Fetch a gene panel.
If no panel is sent return all panels
Args:
panel_id (str): unique id for the panel
version (str): version of the panel. If 'None' latest version will be returned
Returns:
gene_panel: gene panel object
"""
query = {'panel_name': panel_id}
if version:
LOG.info("Fetch gene panel {0}, version {1} from database".format(
panel_id, version
))
query['version'] = version
return self.panel_collection.find_one(query)
else:
LOG.info("Fetching gene panels %s from database", panel_id)
res = self.panel_collection.find(query).sort('version', -1)
if res.count() > 0:
return res[0]
else:
LOG.info("No gene panel found")
return None
|
Return all gene panels
|
def gene_panels(self, panel_id=None, institute_id=None, version=None):
"""Return all gene panels
If panel_id return all versions of panels by that panel name
Args:
panel_id(str)
Returns:
cursor(pymongo.cursor)
"""
query = {}
if panel_id:
query['panel_name'] = panel_id
if version:
query['version'] = version
if institute_id:
query['institute'] = institute_id
return self.panel_collection.find(query)
|
Fetch all gene panels and group them by gene
|
def gene_to_panels(self, case_obj):
"""Fetch all gene panels and group them by gene
Args:
case_obj(scout.models.Case)
Returns:
gene_dict(dict): A dictionary with gene as keys and a set of
panel names as value
"""
LOG.info("Building gene to panels")
gene_dict = {}
for panel_info in case_obj.get('panels', []):
panel_name = panel_info['panel_name']
panel_version = panel_info['version']
panel_obj = self.gene_panel(panel_name, version=panel_version)
if not panel_obj:
## Raise exception here???
LOG.warning("Panel: {0}, version {1} does not exist in database".format(panel_name, panel_version))
for gene in panel_obj['genes']:
hgnc_id = gene['hgnc_id']
if hgnc_id not in gene_dict:
gene_dict[hgnc_id] = set([panel_name])
continue
gene_dict[hgnc_id].add(panel_name)
LOG.info("Gene to panels done")
return gene_dict
|
Replace a existing gene panel with a new one
|
def update_panel(self, panel_obj, version=None, date_obj=None):
"""Replace a existing gene panel with a new one
Keeps the object id
Args:
panel_obj(dict)
version(float)
date_obj(datetime.datetime)
Returns:
updated_panel(dict)
"""
LOG.info("Updating panel %s", panel_obj['panel_name'])
# update date of panel to "today"
date = panel_obj['date']
if version:
LOG.info("Updating version from {0} to version {1}".format(
panel_obj['version'], version))
panel_obj['version'] = version
# Updating version should not update date
if date_obj:
date = date_obj
else:
date = date_obj or dt.datetime.now()
panel_obj['date'] = date
updated_panel = self.panel_collection.find_one_and_replace(
{'_id': panel_obj['_id']},
panel_obj,
return_document=pymongo.ReturnDocument.AFTER
)
return updated_panel
|
Add a pending action to a gene panel
|
def add_pending(self, panel_obj, hgnc_gene, action, info=None):
"""Add a pending action to a gene panel
Store the pending actions in panel.pending
Args:
panel_obj(dict): The panel that is about to be updated
hgnc_gene(dict)
action(str): choices=['add','delete','edit']
info(dict): additional gene info (disease_associated_transcripts,
reduced_penetrance, mosaicism, database_entry_version ,
inheritance_models, comment)
Returns:
updated_panel(dict):
"""
valid_actions = ['add', 'delete', 'edit']
if action not in valid_actions:
raise ValueError("Invalid action {0}".format(action))
info = info or {}
pending_action = {
'hgnc_id': hgnc_gene['hgnc_id'],
'action': action,
'info': info,
'symbol': hgnc_gene['hgnc_symbol'],
}
updated_panel = self.panel_collection.find_one_and_update(
{'_id': panel_obj['_id']},
{
'$addToSet': {
'pending': pending_action
}
},
return_document=pymongo.ReturnDocument.AFTER
)
return updated_panel
|
Apply the pending changes to an existing gene panel or create a new version of the same panel.
|
def apply_pending(self, panel_obj, version):
"""Apply the pending changes to an existing gene panel or create a new version of the same panel.
Args:
panel_obj(dict): panel in database to update
version(double): panel version to update
Returns:
inserted_id(str): id of updated panel or the new one
"""
updates = {}
new_panel = deepcopy(panel_obj)
new_panel['pending'] = []
new_panel['date'] = dt.datetime.now()
info_fields = ['disease_associated_transcripts', 'inheritance_models', 'reduced_penetrance',
'mosaicism', 'database_entry_version', 'comment']
new_genes = []
for update in panel_obj.get('pending', []):
hgnc_id = update['hgnc_id']
# If action is add we create a new gene object
if update['action'] != 'add':
updates[hgnc_id] = update
continue
info = update.get('info', {})
gene_obj = {
'hgnc_id': hgnc_id,
'symbol': update['symbol']
}
for field in info_fields:
if field in info:
gene_obj[field] = info[field]
new_genes.append(gene_obj)
for gene in panel_obj['genes']:
hgnc_id = gene['hgnc_id']
if hgnc_id not in updates:
new_genes.append(gene)
continue
current_update = updates[hgnc_id]
action = current_update['action']
info = current_update['info']
# If action is delete we do not add the gene to new genes
if action == 'delete':
continue
elif action == 'edit':
for field in info_fields:
if field in info:
gene[field] = info[field]
new_genes.append(gene)
new_panel['genes'] = new_genes
new_panel['version'] = float(version)
inserted_id = None
# if the same version of the panel should be updated
if new_panel['version'] == panel_obj['version']:
# replace panel_obj with new_panel
result = self.panel_collection.find_one_and_replace(
{'_id':panel_obj['_id']},
new_panel,
return_document=pymongo.ReturnDocument.AFTER
)
inserted_id = result['_id']
else: # create a new version of the same panel
new_panel.pop('_id')
# archive the old panel
panel_obj['is_archived'] = True
self.update_panel(panel_obj=panel_obj, date_obj=panel_obj['date'])
# insert the new panel
inserted_id = self.panel_collection.insert_one(new_panel).inserted_id
return inserted_id
|
Return all the clinical gene symbols for a case.
|
def clinical_symbols(self, case_obj):
"""Return all the clinical gene symbols for a case."""
panel_ids = [panel['panel_id'] for panel in case_obj['panels']]
query = self.panel_collection.aggregate([
{'$match': {'_id': {'$in': panel_ids}}},
{'$unwind': '$genes'},
{'$group': {'_id': '$genes.symbol'}}
])
return set(item['_id'] for item in query)
|
Interact with cases existing in the database.
|
def cases(context, case_id, institute, reruns, finished, causatives, research_requested,
is_research, status, json):
"""Interact with cases existing in the database."""
adapter = context.obj['adapter']
models = []
if case_id:
case_obj = adapter.case(case_id=case_id)
if case_obj:
models.append(case_obj)
else:
LOG.info("No case with id {}".format(case_id))
else:
models = adapter.cases(collaborator=institute, reruns=reruns,
finished=finished, has_causatives=causatives,
research_requested=research_requested,
is_research=is_research, status=status)
models = [case_obj for case_obj in models]
if len(models) == 0:
LOG.info("No cases could be found")
if json:
click.echo(dumps(models))
return
for model in models:
pp(model)
|
Emit a record. Format the record and send it to the specified addressees.
|
def emit(self, record):
"""Emit a record.
Format the record and send it to the specified addressees.
"""
try:
import smtplib
try:
from email.utils import formatdate
except ImportError:
formatdate = self.date_time
port = self.mailport
if not port:
port = smtplib.SMTP_PORT
smtp = smtplib.SMTP(self.mailhost, port)
msg = self.format(record)
msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % (
self.fromaddr,
','.join(self.toaddrs),
self.getSubject(record),
formatdate(), msg
)
if self.username:
smtp.ehlo() # For 'tls', add this line
smtp.starttls() # For 'tls', add this line
smtp.ehlo() # For 'tls', add this line
smtp.login(self.username, self.password)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
|
Return a list with the current indexes Skip the mandatory _id_ indexes Args: collection ( str )
|
def indexes(self, collection=None):
"""Return a list with the current indexes
Skip the mandatory _id_ indexes
Args:
collection(str)
Returns:
indexes(list)
"""
indexes = []
for collection_name in self.collections():
if collection and collection != collection_name:
continue
for index_name in self.db[collection_name].index_information():
if index_name != '_id_':
indexes.append(index_name)
return indexes
|
Add the proper indexes to the scout instance.
|
def load_indexes(self):
"""Add the proper indexes to the scout instance.
All indexes are specified in scout/constants/indexes.py
If this method is utilised when new indexes are defined those should be added
"""
for collection_name in INDEXES:
existing_indexes = self.indexes(collection_name)
indexes = INDEXES[collection_name]
for index in indexes:
index_name = index.document.get('name')
if index_name in existing_indexes:
LOG.info("Deleting old index: %s" % index_name)
self.db[collection_name].drop_index(index_name)
LOG.info("creating indexes for {0} collection: {1}".format(
collection_name,
', '.join([index.document.get('name') for index in indexes])
))
self.db[collection_name].create_indexes(indexes)
|
Update the indexes If there are any indexes that are not added to the database add those.
|
def update_indexes(self):
"""Update the indexes
If there are any indexes that are not added to the database, add those.
"""
LOG.info("Updating indexes...")
nr_updated = 0
for collection_name in INDEXES:
existing_indexes = self.indexes(collection_name)
indexes = INDEXES[collection_name]
for index in indexes:
index_name = index.document.get('name')
if index_name not in existing_indexes:
nr_updated += 1
LOG.info("Adding index : %s" % index_name)
self.db[collection_name].create_indexes(indexes)
if nr_updated == 0:
LOG.info("All indexes in place")
|
Delete all indexes for the database
|
def drop_indexes(self):
"""Delete all indexes for the database"""
LOG.warning("Dropping all indexe")
for collection_name in INDEXES:
LOG.warning("Dropping all indexes for collection name %s", collection_name)
self.db[collection_name].drop_indexes()
|
Build a mongo query across multiple cases. Translate query options from a form into a complete mongo query dictionary.
|
def build_variant_query(self, query=None, category='snv', variant_type=['clinical']):
"""Build a mongo query across multiple cases.
Translate query options from a form into a complete mongo query dictionary.
Beware that unindexed queries against a large variant collection will
be extremely slow.
Currently indexed query options:
hgnc_symbols
rank_score
variant_type
category
Args:
query(dict): A query dictionary for the database, from a query form.
category(str): 'snv', 'sv', 'str' or 'cancer'
variant_type(str): 'clinical' or 'research'
Returns:
mongo_query : A dictionary in the mongo query format.
"""
query = query or {}
mongo_variant_query = {}
LOG.debug("Building a mongo query for %s" % query)
if query.get('hgnc_symbols'):
mongo_variant_query['hgnc_symbols'] = {'$in': query['hgnc_symbols']}
mongo_variant_query['variant_type'] = {'$in': variant_type}
mongo_variant_query['category'] = category
rank_score = query.get('rank_score') or 15
mongo_variant_query['rank_score'] = {'$gte': rank_score}
LOG.debug("Querying %s" % mongo_variant_query)
return mongo_variant_query
|
Build a mongo query
|
def build_query(self, case_id, query=None, variant_ids=None, category='snv'):
"""Build a mongo query
These are the different query options:
{
'genetic_models': list,
'chrom': str,
'thousand_genomes_frequency': float,
'exac_frequency': float,
'clingen_ngi': int,
'cadd_score': float,
'cadd_inclusive": boolean,
'genetic_models': list(str),
'hgnc_symbols': list,
'region_annotations': list,
'functional_annotations': list,
'clinsig': list,
'clinsig_confident_always_returned': boolean,
'variant_type': str(('research', 'clinical')),
'chrom': str,
'start': int,
'end': int,
'svtype': list,
'size': int,
'size_shorter': boolean,
'gene_panels': list(str),
'mvl_tag": boolean,
'decipher": boolean,
}
Arguments:
case_id(str)
query(dict): a dictionary of query filters specified by the users
variant_ids(list(str)): A list of md5 variant ids
Returns:
mongo_query : A dictionary in the mongo query format
"""
query = query or {}
mongo_query = {}
gene_query = None
##### Base query params
# set up the fundamental query params: case_id, category, type and
# restrict to list of variants (if var list is provided)
for criterion in FUNDAMENTAL_CRITERIA:
if criterion == 'case_id':
LOG.debug("Building a mongo query for %s" % case_id)
mongo_query['case_id'] = case_id
elif criterion == 'variant_ids' and variant_ids:
LOG.debug("Adding variant_ids %s to query" % ', '.join(variant_ids))
mongo_query['variant_id'] = {'$in': variant_ids}
elif criterion == 'category':
LOG.debug("Querying category %s" % category)
mongo_query['category'] = category
elif criterion == 'variant_type':
mongo_query['variant_type'] = query.get('variant_type', 'clinical')
LOG.debug("Set variant type to %s", mongo_query['variant_type'])
# Requests to filter based on gene panels, hgnc_symbols or
# coordinate ranges must always be honored. They are always added to
# query as top level, implicit '$and'. When both hgnc_symbols and a
# panel is used, addition of this is delayed until after the rest of
# the query content is clear.
elif criterion in ['hgnc_symbols', 'gene_panels'] and gene_query is None:
gene_query = self.gene_filter(query, mongo_query)
elif criterion == 'chrom' and query.get('chrom'): # filter by coordinates
self.coordinate_filter(query, mongo_query)
elif criterion == 'variant_ids' and variant_ids:
LOG.debug("Adding variant_ids %s to query" % ', '.join(variant_ids))
mongo_query['variant_id'] = {'$in': variant_ids}
##### end of fundamental query params
##### start of the custom query params
# there is only 'clinsig' criterion among the primary terms right now
primary_terms = False
# gnomad_frequency, local_obs, clingen_ngi, swegen, spidex_human, cadd_score, genetic_models, mvl_tag
# functional_annotations, region_annotations, size, svtype, decipher, depth, alt_count, control_frequency
secondary_terms = False
# check if any of the primary criteria was specified in the query
for term in PRIMARY_CRITERIA:
if query.get(term):
primary_terms = True
# check if any of the secondary criteria was specified in the query:
for term in SECONDARY_CRITERIA:
if query.get(term):
secondary_terms = True
if primary_terms is True:
clinsign_filter = self.clinsig_query(query, mongo_query)
# Secondary, excluding filter criteria will hide variants in general,
# but can be overridden by an including, major filter criteria
# such as a Pathogenic ClinSig.
if secondary_terms is True:
secondary_filter = self.secondary_query(query, mongo_query)
# If there are no primary criteria given, all secondary criteria are added as a
# top level '$and' to the query.
if primary_terms is False:
if gene_query:
mongo_query['$and'] = [ {'$or': gene_query}, {'$and': secondary_filter}]
else:
mongo_query['$and'] = secondary_filter
# If there is only one primary criterion given without any secondary, it will also be
# added as a top level '$and'.
# Otherwise, primary criteria are added as a high level '$or' and all secondary criteria
# are joined together with them as a single lower level '$and'.
if primary_terms is True: # clinsig is specified
# Given a request to always return confident clinical variants,
# add the clnsig query as a major criteria, but only
# trust clnsig entries with trusted revstat levels.
if query.get('clinsig_confident_always_returned') == True:
if gene_query:
mongo_query['$and'] = [
{'$or': gene_query},
{
'$or': [
{'$and': secondary_filter}, clinsign_filter
]
}
]
else:
mongo_query['$or'] = [ {'$and': secondary_filter}, clinsign_filter ]
else: # clisig terms are provided but no need for trusted revstat levels
secondary_filter.append(clinsign_filter)
if gene_query:
mongo_query['$and'] = [ {'$or': gene_query}, {'$and': secondary_filter}]
else:
mongo_query['$and'] = secondary_filter
elif primary_terms is True: # clisig is provided without secondary terms query
# use implicit and
mongo_query['clnsig'] = clinsign_filter['clnsig']
if gene_query:
mongo_query['$and'] = [{ '$or': gene_query }]
elif gene_query: # no primary or secondary filters provided
mongo_query['$and'] = [{ '$or': gene_query }]
LOG.info("mongo query: %s", mongo_query)
return mongo_query
|
Add clinsig filter values to the mongo query object
|
def clinsig_query(self, query, mongo_query):
""" Add clinsig filter values to the mongo query object
Args:
query(dict): a dictionary of query filters specified by the users
mongo_query(dict): the query that is going to be submitted to the database
Returns:
clinsig_query(dict): a dictionary with clinsig key-values
"""
LOG.debug('clinsig is a query parameter')
trusted_revision_level = ['mult', 'single', 'exp', 'guideline']
rank = []
str_rank = []
clnsig_query = {}
for item in query['clinsig']:
rank.append(int(item))
# search for human readable clinsig values in newer cases
rank.append(CLINSIG_MAP[int(item)])
str_rank.append(CLINSIG_MAP[int(item)])
if query.get('clinsig_confident_always_returned') == True:
LOG.debug("add CLINSIG filter with trusted_revision_level")
clnsig_query = { "clnsig":
{
'$elemMatch': {
'$or' : [
{
'$and' : [
{'value' : { '$in': rank }},
{'revstat': { '$in': trusted_revision_level }}
]
},
{
'$and': [
{'value' : re.compile('|'.join(str_rank))},
{'revstat' : re.compile('|'.join(trusted_revision_level))}
]
}
]
}
}
}
else:
LOG.debug("add CLINSIG filter for rank: %s" %
', '.join(str(query['clinsig'])))
clnsig_query = {
"clnsig":
{
'$elemMatch': {
'$or' : [
{ 'value' : { '$in': rank }},
{ 'value' : re.compile('|'.join(str_rank)) }
]
}
}
}
return clnsig_query
|
Adds genomic coordinated - related filters to the query object
|
def coordinate_filter(self, query, mongo_query):
""" Adds genomic coordinated-related filters to the query object
Args:
query(dict): a dictionary of query filters specified by the users
mongo_query(dict): the query that is going to be submitted to the database
Returns:
mongo_query(dict): returned object contains coordinate filters
"""
LOG.debug('Adding genomic coordinates to the query')
chromosome = query['chrom']
mongo_query['chromosome'] = chromosome
if (query.get('start') and query.get('end')):
mongo_query['position'] = {'$lte': int(query['end'])}
mongo_query['end'] = {'$gte': int(query['start'])}
return mongo_query
|
Adds gene - related filters to the query object
|
def gene_filter(self, query, mongo_query):
""" Adds gene-related filters to the query object
Args:
query(dict): a dictionary of query filters specified by the users
mongo_query(dict): the query that is going to be submitted to the database
Returns:
mongo_query(dict): returned object contains gene and panel-related filters
"""
LOG.debug('Adding panel and genes-related parameters to the query')
gene_query = []
if query.get('hgnc_symbols') and query.get('gene_panels'):
gene_query.append({'hgnc_symbols': {'$in': query['hgnc_symbols']}})
gene_query.append({'panels': {'$in': query['gene_panels']}})
mongo_query['$or']=gene_query
else:
if query.get('hgnc_symbols'):
hgnc_symbols = query['hgnc_symbols']
mongo_query['hgnc_symbols'] = {'$in': hgnc_symbols}
LOG.debug("Adding hgnc_symbols: %s to query" %
', '.join(hgnc_symbols))
if query.get('gene_panels'):
gene_panels = query['gene_panels']
mongo_query['panels'] = {'$in': gene_panels}
return gene_query
|
Creates a secondary query object based on secondary parameters specified by user
|
def secondary_query(self, query, mongo_query, secondary_filter=None):
"""Creates a secondary query object based on secondary parameters specified by user
Args:
query(dict): a dictionary of query filters specified by the users
mongo_query(dict): the query that is going to be submitted to the database
Returns:
mongo_secondary_query(list): a dictionary with secondary query parameters
"""
LOG.debug('Creating a query object with secondary parameters')
mongo_secondary_query = []
# loop over secondary query criteria
for criterion in SECONDARY_CRITERIA:
if not query.get(criterion):
continue
if criterion == 'gnomad_frequency':
gnomad = query.get('gnomad_frequency')
if gnomad == '-1':
# -1 means to exclude all variants that exists in gnomad
mongo_query['gnomad_frequency'] = {'$exists': False}
else:
# Replace comma with dot
mongo_secondary_query.append(
{
'$or': [
{
'gnomad_frequency': {'$lt': float(gnomad)}
},
{
'gnomad_frequency': {'$exists': False}
}
]
}
)
LOG.debug("Adding gnomad_frequency to query")
if criterion == 'local_obs':
local_obs = query.get('local_obs')
mongo_secondary_query.append({
'$or': [
{'local_obs_old': None},
{'local_obs_old': {'$lt': local_obs + 1}},
]
})
if criterion in ['clingen_ngi', 'swegen']:
mongo_secondary_query.append({
'$or': [
{ criterion : {'$exists': False}},
{ criterion : {'$lt': query[criterion] + 1}},
]
})
if criterion == 'spidex_human':
# construct spidex query. Build the or part starting with empty SPIDEX values
spidex_human = query['spidex_human']
spidex_query_or_part = []
if ( 'not_reported' in spidex_human):
spidex_query_or_part.append({'spidex': {'$exists': False}})
for spidex_level in SPIDEX_HUMAN:
if ( spidex_level in spidex_human ):
spidex_query_or_part.append({'$or': [
{'$and': [{'spidex': {'$gt': SPIDEX_HUMAN[spidex_level]['neg'][0]}},
{'spidex': {'$lt': SPIDEX_HUMAN[spidex_level]['neg'][1]}}]},
{'$and': [{'spidex': {'$gt': SPIDEX_HUMAN[spidex_level]['pos'][0]}},
{'spidex': {'$lt': SPIDEX_HUMAN[spidex_level]['pos'][1]}} ]} ]})
mongo_secondary_query.append({'$or': spidex_query_or_part })
if criterion == 'cadd_score':
cadd = query['cadd_score']
cadd_query = {'cadd_score': {'$gt': float(cadd)}}
LOG.debug("Adding cadd_score: %s to query", cadd)
if query.get('cadd_inclusive') is True:
cadd_query = {
'$or': [
cadd_query,
{'cadd_score': {'$exists': False}}
]}
LOG.debug("Adding cadd inclusive to query")
mongo_secondary_query.append(cadd_query)
if criterion in ['genetic_models', 'functional_annotations', 'region_annotations']:
criterion_values = query[criterion]
if criterion == 'genetic_models':
mongo_secondary_query.append({criterion: {'$in': criterion_values}})
else:
# filter key will be genes.[criterion (minus final char)]
mongo_secondary_query.append({ '.'.join(['genes', criterion[:-1]]) : {'$in': criterion_values}})
LOG.debug("Adding {0}: {1} to query".format(criterion, ', '.join(criterion_values)))
if criterion == 'size':
size = query['size']
size_query = {'length': {'$gt': int(size)}}
LOG.debug("Adding length: %s to query" % size)
if query.get('size_shorter'):
size_query = {
'$or': [
{'length': {'$lt': int(size)}},
{'length': {'$exists': False}}
]}
LOG.debug("Adding size less than, undef inclusive to query.")
mongo_secondary_query.append(size_query)
if criterion == 'svtype':
svtype = query['svtype']
mongo_secondary_query.append({'sub_category': {'$in': svtype}})
LOG.debug("Adding SV_type %s to query" %
', '.join(svtype))
if criterion == 'decipher':
mongo_query['decipher'] = {'$exists': True}
LOG.debug("Adding decipher to query")
if criterion == 'depth':
LOG.debug("add depth filter")
mongo_secondary_query.append({
'tumor.read_depth': {
'$gt': query.get('depth'),
}
})
if criterion == 'alt_count':
LOG.debug("add min alt count filter")
mongo_secondary_query.append({
'tumor.alt_depth': {
'$gt': query.get('alt_count'),
}
})
if criterion == 'control_frequency':
LOG.debug("add minimum control frequency filter")
mongo_secondary_query.append({
'normal.alt_freq': {
'$lt': float(query.get('control_frequency')),
}
})
if criterion == 'mvl_tag':
LOG.debug("add managed variant list filter")
mongo_secondary_query.append({
'mvl_tag': {
'$exists': True,
}
})
return mongo_secondary_query
|
Drop the mongo database given.
|
def wipe(ctx):
"""Drop the mongo database given."""
LOG.info("Running scout wipe")
db_name = ctx.obj['mongodb']
LOG.info("Dropping database %s", db_name)
try:
ctx.obj['client'].drop_database(db_name)
except Exception as err:
LOG.warning(err)
ctx.abort()
LOG.info("Dropped whole database")
|
Parse user submitted panel.
|
def parse_panel(csv_stream):
"""Parse user submitted panel."""
reader = csv.DictReader(csv_stream, delimiter=';', quoting=csv.QUOTE_NONE)
genes = []
for gene_row in reader:
if not gene_row['HGNC_IDnumber'].strip().isdigit():
continue
transcripts_raw = gene_row.get('Disease_associated_transcript')
if transcripts_raw:
transcripts_list = [tx.split(':', 1)[-1].strip() for tx in transcripts_raw.split(',')]
else:
transcripts_list = []
models_raw = gene_row.get('Genetic_disease_model')
models_list = [model.strip() for model in models_raw.split(',')] if models_raw else []
panel_gene = dict(
symbol=gene_row['HGNC_symbol'].strip() if gene_row.get('HGNC_symbol') else None,
hgnc_id=int(gene_row['HGNC_IDnumber'].strip()),
disease_associated_transcripts=transcripts_list,
reduced_penetrance=True if gene_row.get('Reduced_penetrance') else None,
mosaicism=True if gene_row.get('Mosaicism') else None,
inheritance_models=models_list,
database_entry_version=gene_row.get('Database_entry_version'),
)
genes.append(panel_gene)
return genes
|
docstring for build_clnsig
|
def build_clnsig(clnsig_info):
"""docstring for build_clnsig"""
clnsig_obj = dict(
value = clnsig_info['value'],
accession = clnsig_info.get('accession'),
revstat = clnsig_info.get('revstat')
)
return clnsig_obj
|
Load a bulk of hgnc gene objects Raises IntegrityError if there are any write concerns
|
def load_hgnc_bulk(self, gene_objs):
"""Load a bulk of hgnc gene objects
Raises IntegrityError if there are any write concerns
Args:
gene_objs(iterable(scout.models.hgnc_gene))
Returns:
result (pymongo.results.InsertManyResult)
"""
LOG.info("Loading gene bulk with length %s", len(gene_objs))
try:
result = self.hgnc_collection.insert_many(gene_objs)
except (DuplicateKeyError, BulkWriteError) as err:
raise IntegrityError(err)
return result
|
Load a bulk of transcript objects to the database
|
def load_transcript_bulk(self, transcript_objs):
"""Load a bulk of transcript objects to the database
Arguments:
transcript_objs(iterable(scout.models.hgnc_transcript))
"""
LOG.info("Loading transcript bulk")
try:
result = self.transcript_collection.insert_many(transcript_objs)
except (DuplicateKeyError, BulkWriteError) as err:
raise IntegrityError(err)
return result
|
Load a bulk of exon objects to the database
|
def load_exon_bulk(self, exon_objs):
"""Load a bulk of exon objects to the database
Arguments:
exon_objs(iterable(scout.models.hgnc_exon))
"""
try:
result = self.exon_collection.insert_many(transcript_objs)
except (DuplicateKeyError, BulkWriteError) as err:
raise IntegrityError(err)
return result
|
Fetch a hgnc gene
|
def hgnc_gene(self, hgnc_identifier, build='37'):
"""Fetch a hgnc gene
Args:
hgnc_identifier(int)
Returns:
gene_obj(HgncGene)
"""
if not build in ['37', '38']:
build = '37'
query = {}
try:
# If the identifier is a integer we search for hgnc_id
hgnc_identifier = int(hgnc_identifier)
query['hgnc_id'] = hgnc_identifier
except ValueError:
# Else we seach for a hgnc_symbol
query['hgnc_symbol'] = hgnc_identifier
query['build'] = build
LOG.debug("Fetching gene %s" % hgnc_identifier)
gene_obj = self.hgnc_collection.find_one(query)
if not gene_obj:
return None
# Add the transcripts:
transcripts = []
tx_objs = self.transcripts(build=build, hgnc_id=gene_obj['hgnc_id'])
if tx_objs.count() > 0:
for tx in tx_objs:
transcripts.append(tx)
gene_obj['transcripts'] = transcripts
return gene_obj
|
Query the genes with a hgnc symbol and return the hgnc id
|
def hgnc_id(self, hgnc_symbol, build='37'):
"""Query the genes with a hgnc symbol and return the hgnc id
Args:
hgnc_symbol(str)
build(str)
Returns:
hgnc_id(int)
"""
#LOG.debug("Fetching gene %s", hgnc_symbol)
query = {'hgnc_symbol':hgnc_symbol, 'build':build}
projection = {'hgnc_id':1, '_id':0}
res = self.hgnc_collection.find(query, projection)
if res.count() > 0:
return res[0]['hgnc_id']
else:
return None
|
Fetch all hgnc genes that match a hgnc symbol
|
def hgnc_genes(self, hgnc_symbol, build='37', search=False):
"""Fetch all hgnc genes that match a hgnc symbol
Check both hgnc_symbol and aliases
Args:
hgnc_symbol(str)
build(str): The build in which to search
search(bool): if partial searching should be used
Returns:
result()
"""
LOG.debug("Fetching genes with symbol %s" % hgnc_symbol)
if search:
# first search for a full match
full_query = self.hgnc_collection.find({
'$or': [
{'aliases': hgnc_symbol},
{'hgnc_id': int(hgnc_symbol) if hgnc_symbol.isdigit() else None},
],
'build': build
})
if full_query.count() != 0:
return full_query
return self.hgnc_collection.find({
'aliases': {'$regex': hgnc_symbol, '$options': 'i'},
'build': build
})
return self.hgnc_collection.find({'build': build, 'aliases': hgnc_symbol})
|
Fetch all hgnc genes
|
def all_genes(self, build='37'):
"""Fetch all hgnc genes
Returns:
result()
"""
LOG.info("Fetching all genes")
return self.hgnc_collection.find({'build': build}).sort('chromosome', 1)
|
Return the number of hgnc genes in collection
|
def nr_genes(self, build=None):
"""Return the number of hgnc genes in collection
If build is used, return the number of genes of a certain build
Returns:
result()
"""
if build:
LOG.info("Fetching all genes from build %s", build)
else:
LOG.info("Fetching all genes")
return self.hgnc_collection.find({'build':build}).count()
|
Delete the genes collection
|
def drop_genes(self, build=None):
"""Delete the genes collection"""
if build:
LOG.info("Dropping the hgnc_gene collection, build %s", build)
self.hgnc_collection.delete_many({'build': build})
else:
LOG.info("Dropping the hgnc_gene collection")
self.hgnc_collection.drop()
|
Delete the transcripts collection
|
def drop_transcripts(self, build=None):
"""Delete the transcripts collection"""
if build:
LOG.info("Dropping the transcripts collection, build %s", build)
self.transcript_collection.delete_many({'build': build})
else:
LOG.info("Dropping the transcripts collection")
self.transcript_collection.drop()
|
Delete the exons collection
|
def drop_exons(self, build=None):
"""Delete the exons collection"""
if build:
LOG.info("Dropping the exons collection, build %s", build)
self.exon_collection.delete_many({'build': build})
else:
LOG.info("Dropping the exons collection")
self.exon_collection.drop()
|
Return a dictionary with ensembl ids as keys and transcripts as value.
|
def ensembl_transcripts(self, build='37'):
"""Return a dictionary with ensembl ids as keys and transcripts as value.
Args:
build(str)
Returns:
ensembl_transcripts(dict): {<enst_id>: transcripts_obj, ...}
"""
ensembl_transcripts = {}
LOG.info("Fetching all transcripts")
for transcript_obj in self.transcript_collection.find({'build':build}):
enst_id = transcript_obj['transcript_id']
ensembl_transcripts[enst_id] = transcript_obj
LOG.info("Ensembl transcripts fetched")
return ensembl_transcripts
|
Return a dictionary with hgnc_symbol as key and gene_obj as value
|
def hgncsymbol_to_gene(self, build='37', genes=None):
"""Return a dictionary with hgnc_symbol as key and gene_obj as value
The result will have ONE entry for each gene in the database.
(For a specific build)
Args:
build(str)
genes(iterable(scout.models.HgncGene)):
Returns:
hgnc_dict(dict): {<hgnc_symbol(str)>: <gene(dict)>}
"""
hgnc_dict = {}
LOG.info("Building hgncsymbol_to_gene")
if not genes:
genes = self.hgnc_collection.find({'build':build})
for gene_obj in genes:
hgnc_dict[gene_obj['hgnc_symbol']] = gene_obj
LOG.info("All genes fetched")
return hgnc_dict
|
Return a iterable with hgnc_genes.
|
def gene_by_alias(self, symbol, build='37'):
"""Return a iterable with hgnc_genes.
If the gene symbol is listed as primary the iterable will only have
one result. If not the iterable will include all hgnc genes that have
the symbol as an alias.
Args:
symbol(str)
build(str)
Returns:
res(pymongo.Cursor(dict))
"""
res = self.hgnc_collection.find({'hgnc_symbol': symbol, 'build':build})
if res.count() == 0:
res = self.hgnc_collection.find({'aliases': symbol, 'build':build})
return res
|
Return a dictionary with hgnc symbols as keys and a list of hgnc ids as value.
|
def genes_by_alias(self, build='37', genes=None):
"""Return a dictionary with hgnc symbols as keys and a list of hgnc ids
as value.
If a gene symbol is listed as primary the list of ids will only consist
of that entry if not the gene can not be determined so the result is a list
of hgnc_ids
Args:
build(str)
genes(iterable(scout.models.HgncGene)):
Returns:
alias_genes(dict): {<hgnc_alias>: {'true': <hgnc_id>, 'ids': {<hgnc_id_1>, <hgnc_id_2>, ...}}}
"""
LOG.info("Fetching all genes by alias")
# Collect one entry for each alias symbol that exists
alias_genes = {}
# Loop over all genes
if not genes:
genes = self.hgnc_collection.find({'build':build})
for gene in genes:
# Collect the hgnc_id
hgnc_id = gene['hgnc_id']
# Collect the true symbol given by hgnc
hgnc_symbol = gene['hgnc_symbol']
# Loop aver all aliases
for alias in gene['aliases']:
true_id = None
# If the alias is the same as hgnc symbol we know the true id
if alias == hgnc_symbol:
true_id = hgnc_id
# If the alias is already in the list we add the id
if alias in alias_genes:
alias_genes[alias]['ids'].add(hgnc_id)
if true_id:
alias_genes[alias]['true'] = hgnc_id
else:
alias_genes[alias] = {
'true': hgnc_id,
'ids': set([hgnc_id])
}
return alias_genes
|
Return a set with identifier transcript ( s )
|
def get_id_transcripts(self, hgnc_id, build='37'):
"""Return a set with identifier transcript(s)
Choose all refseq transcripts with NM symbols, if none where found choose ONE with NR,
if no NR choose ONE with XM. If there are no RefSeq transcripts identifiers choose the
longest ensembl transcript.
Args:
hgnc_id(int)
build(str)
Returns:
identifier_transcripts(set)
"""
transcripts = self.transcripts(build=build, hgnc_id=hgnc_id)
identifier_transcripts = set()
longest = None
nr = []
xm = []
for tx in transcripts:
enst_id = tx['transcript_id']
# Should we not check if it is longest?
if not longest:
longest = enst_id
refseq_id = tx.get('refseq_id')
if not refseq_id:
continue
if 'NM' in refseq_id:
identifier_transcripts.add(enst_id)
elif 'NR' in refseq_id:
nr.append(enst_id)
elif 'XM' in refseq_id:
xm.append(enst_id)
if identifier_transcripts:
return identifier_transcripts
if nr:
return set([nr[0]])
if xm:
return set([xm[0]])
return set([longest])
|
Return a dictionary with hgnc_id as keys and a list of transcripts as value Args: build ( str ) Returns: hgnc_transcripts ( dict )
|
def transcripts_by_gene(self, build='37'):
"""Return a dictionary with hgnc_id as keys and a list of transcripts as value
Args:
build(str)
Returns:
hgnc_transcripts(dict)
"""
hgnc_transcripts = {}
LOG.info("Fetching all transcripts")
for transcript in self.transcript_collection.find({'build':build}):
hgnc_id = transcript['hgnc_id']
if not hgnc_id in hgnc_transcripts:
hgnc_transcripts[hgnc_id] = []
hgnc_transcripts[hgnc_id].append(transcript)
return hgnc_transcripts
|
Return a dictionary with hgnc_id as keys and a set of id transcripts as value Args: build ( str ) Returns: hgnc_id_transcripts ( dict )
|
def id_transcripts_by_gene(self, build='37'):
"""Return a dictionary with hgnc_id as keys and a set of id transcripts as value
Args:
build(str)
Returns:
hgnc_id_transcripts(dict)
"""
hgnc_id_transcripts = {}
LOG.info("Fetching all id transcripts")
for gene_obj in self.hgnc_collection.find({'build': build}):
hgnc_id = gene_obj['hgnc_id']
id_transcripts = self.get_id_transcripts(hgnc_id=hgnc_id, build=build)
hgnc_id_transcripts[hgnc_id] = id_transcripts
return hgnc_id_transcripts
|
Return a dictionary with ensembl ids as keys and gene objects as value.
|
def ensembl_genes(self, build='37'):
"""Return a dictionary with ensembl ids as keys and gene objects as value.
Args:
build(str)
Returns:
genes(dict): {<ensg_id>: gene_obj, ...}
"""
genes = {}
LOG.info("Fetching all genes")
for gene_obj in self.hgnc_collection.find({'build':build}):
ensg_id = gene_obj['ensembl_id']
hgnc_id = gene_obj['hgnc_id']
genes[ensg_id] = gene_obj
LOG.info("Ensembl genes fetched")
return genes
|
Return all transcripts. If a gene is specified return all transcripts for the gene Args: build ( str ) hgnc_id ( int ) Returns: iterable ( transcript )
|
def transcripts(self, build='37', hgnc_id=None):
"""Return all transcripts.
If a gene is specified return all transcripts for the gene
Args:
build(str)
hgnc_id(int)
Returns:
iterable(transcript)
"""
query = {'build': build}
if hgnc_id:
query['hgnc_id'] = hgnc_id
return self.transcript_collection.find(query)
|
Check if a hgnc symbol is an alias
|
def to_hgnc(self, hgnc_alias, build='37'):
"""Check if a hgnc symbol is an alias
Return the correct hgnc symbol, if not existing return None
Args:
hgnc_alias(str)
Returns:
hgnc_symbol(str)
"""
result = self.hgnc_genes(hgnc_symbol=hgnc_alias, build=build)
if result:
for gene in result:
return gene['hgnc_symbol']
else:
return None
|
Add the correct hgnc id to a set of genes with hgnc symbols
|
def add_hgnc_id(self, genes):
"""Add the correct hgnc id to a set of genes with hgnc symbols
Args:
genes(list(dict)): A set of genes with hgnc symbols only
"""
genes_by_alias = self.genes_by_alias()
for gene in genes:
id_info = genes_by_alias.get(gene['hgnc_symbol'])
if not id_info:
LOG.warning("Gene %s does not exist in scout", gene['hgnc_symbol'])
continue
gene['hgnc_id'] = id_info['true']
if not id_info['true']:
if len(id_info['ids']) > 1:
LOG.warning("Gene %s has ambiguous value, please choose one hgnc id in result", gene['hgnc_symbol'])
gene['hgnc_id'] = ','.join([str(hgnc_id) for hgnc_id in id_info['ids']])
|
Return a dictionary with chromosomes as keys and interval trees as values
|
def get_coding_intervals(self, build='37', genes=None):
"""Return a dictionary with chromosomes as keys and interval trees as values
Each interval represents a coding region of overlapping genes.
Args:
build(str): The genome build
genes(iterable(scout.models.HgncGene)):
Returns:
intervals(dict): A dictionary with chromosomes as keys and overlapping genomic intervals as values
"""
intervals = {}
if not genes:
genes = self.all_genes(build=build)
LOG.info("Building interval trees...")
for i,hgnc_obj in enumerate(genes):
chrom = hgnc_obj['chromosome']
start = max((hgnc_obj['start'] - 5000), 1)
end = hgnc_obj['end'] + 5000
# If this is the first time a chromosome is seen we create a new
# interval tree with current interval
if chrom not in intervals:
intervals[chrom] = intervaltree.IntervalTree()
intervals[chrom].addi(start, end, i)
continue
res = intervals[chrom].search(start, end)
# If the interval did not overlap any other intervals we insert it and continue
if not res:
intervals[chrom].addi(start, end, i)
continue
# Loop over the overlapping intervals
for interval in res:
# Update the positions to new max and mins
if interval.begin < start:
start = interval.begin
if interval.end > end:
end = interval.end
# Delete the old interval
intervals[chrom].remove(interval)
# Add the new interval consisting och the overlapping ones
intervals[chrom].addi(start, end, i)
return intervals
|
Create exon objects and insert them into the database Args: exons ( iterable ( dict ))
|
def load_exons(self, exons, genes=None, build='37'):
"""Create exon objects and insert them into the database
Args:
exons(iterable(dict))
"""
genes = genes or self.ensembl_genes(build)
for exon in exons:
exon_obj = build_exon(exon, genes)
if not exon_obj:
continue
res = self.exon_collection.insert_one(exon_obj)
|
Return all exons Args: hgnc_id ( int ) transcript_id ( str ) build ( str ) Returns: exons ( iterable ( dict ))
|
def exons(self, hgnc_id=None, transcript_id=None, build=None):
"""Return all exons
Args:
hgnc_id(int)
transcript_id(str)
build(str)
Returns:
exons(iterable(dict))
"""
query = {}
if build:
query['build'] = build
if hgnc_id:
query['hgnc_id'] = hgnc_id
if transcript_id:
query['transcript_id'] = transcript_id
return self.exon_collection.find(query)
|
Update the automate generated omim gene panel in the database.
|
def omim(context, api_key, institute):
"""
Update the automate generated omim gene panel in the database.
"""
LOG.info("Running scout update omim")
adapter = context.obj['adapter']
api_key = api_key or context.obj.get('omim_api_key')
if not api_key:
LOG.warning("Please provide a omim api key to load the omim gene panel")
context.abort()
institute_obj = adapter.institute(institute)
if not institute_obj:
LOG.info("Institute %s could not be found in database", institute)
LOG.warning("Please specify an existing institute")
context.abort()
try:
adapter.load_omim_panel(api_key, institute=institute)
except Exception as err:
LOG.error(err)
context.abort()
|
Display a list of all user institutes.
|
def index():
"""Display a list of all user institutes."""
institute_objs = user_institutes(store, current_user)
institutes_count = ((institute_obj, store.cases(collaborator=institute_obj['_id']).count())
for institute_obj in institute_objs if institute_obj)
return dict(institutes=institutes_count)
|
Display a list of cases for an institute.
|
def cases(institute_id):
"""Display a list of cases for an institute."""
institute_obj = institute_and_case(store, institute_id)
query = request.args.get('query')
limit = 100
if request.args.get('limit'):
limit = int(request.args.get('limit'))
skip_assigned = request.args.get('skip_assigned')
is_research = request.args.get('is_research')
all_cases = store.cases(collaborator=institute_id, name_query=query,
skip_assigned=skip_assigned, is_research=is_research)
data = controllers.cases(store, all_cases, limit)
sanger_unevaluated = controllers.get_sanger_unevaluated(store, institute_id, current_user.email)
if len(sanger_unevaluated)> 0:
data['sanger_unevaluated'] = sanger_unevaluated
return dict(institute=institute_obj, skip_assigned=skip_assigned,
is_research=is_research, query=query, **data)
|
Display one case.
|
def case(institute_id, case_name):
"""Display one case."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
data = controllers.case(store, institute_obj, case_obj)
return dict(institute=institute_obj, case=case_obj, **data)
|
Show all MatchMaker matches for a given case
|
def matchmaker_matches(institute_id, case_name):
"""Show all MatchMaker matches for a given case"""
# check that only authorized users can access MME patients matches
user_obj = store.user(current_user.email)
if 'mme_submitter' not in user_obj['roles']:
flash('unauthorized request', 'warning')
return redirect(request.referrer)
# Required params for getting matches from MME server:
mme_base_url = current_app.config.get('MME_URL')
mme_token = current_app.config.get('MME_TOKEN')
if not mme_base_url or not mme_token:
flash('An error occurred reading matchmaker connection parameters. Please check config file!', 'danger')
return redirect(request.referrer)
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
data = controllers.mme_matches(case_obj, institute_obj, mme_base_url, mme_token)
if data and data.get('server_errors'):
flash('MatchMaker server returned error:{}'.format(data['server_errors']), 'danger')
return redirect(request.referrer)
elif not data:
data = {
'institute' : institute_obj,
'case' : case_obj
}
return data
|
Starts an internal match or a match against one or all MME external nodes
|
def matchmaker_match(institute_id, case_name, target):
"""Starts an internal match or a match against one or all MME external nodes"""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
# check that only authorized users can run matches
user_obj = store.user(current_user.email)
if 'mme_submitter' not in user_obj['roles']:
flash('unauthorized request', 'warning')
return redirect(request.referrer)
# Required params for sending an add request to MME:
mme_base_url = current_app.config.get('MME_URL')
mme_accepts = current_app.config.get('MME_ACCEPTS')
mme_token = current_app.config.get('MME_TOKEN')
nodes = current_app.mme_nodes
if not mme_base_url or not mme_token or not mme_accepts:
flash('An error occurred reading matchmaker connection parameters. Please check config file!', 'danger')
return redirect(request.referrer)
match_results = controllers.mme_match(case_obj, target, mme_base_url, mme_token, nodes, mme_accepts)
ok_responses = 0
for match_results in match_results:
match_results['status_code'] == 200
ok_responses +=1
if ok_responses:
flash("Match request sent. Look for eventual matches in 'Matches' page.", 'info')
else:
flash('An error occurred while sending match request.', 'danger')
return redirect(request.referrer)
|
Add or update a case in MatchMaker
|
def matchmaker_add(institute_id, case_name):
"""Add or update a case in MatchMaker"""
# check that only authorized users can add patients to MME
user_obj = store.user(current_user.email)
if 'mme_submitter' not in user_obj['roles']:
flash('unauthorized request', 'warning')
return redirect(request.referrer)
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
causatives = False
features = False
if case_obj.get('suspects') and len(case_obj.get('suspects'))>3:
flash('At the moment it is not possible to save to MatchMaker more than 3 pinned variants', 'warning')
return redirect(request.referrer)
elif case_obj.get('suspects'):
causatives = True
if case_obj.get('phenotype_terms'):
features = True
mme_save_options = ['sex', 'features', 'disorders']
for index, item in enumerate(mme_save_options):
if item in request.form:
log.info('item {} is in request form'.format(item))
mme_save_options[index] = True
else:
mme_save_options[index] = False
genomic_features = request.form.get('genomicfeatures')
genes_only = True # upload to matchmaker only gene names
if genomic_features == 'variants':
genes_only = False # upload to matchmaker both variants and gene names
# If there are no genomic features nor HPO terms to share for this case, abort
if (not case_obj.get('suspects') and not mme_save_options[1]) or (causatives is False and features is False):
flash('In order to upload a case to MatchMaker you need to pin a variant or at least assign a phenotype (HPO term)', 'danger')
return redirect(request.referrer)
user_obj = store.user(current_user.email)
# Required params for sending an add request to MME:
mme_base_url = current_app.config.get('MME_URL')
mme_accepts = current_app.config.get('MME_ACCEPTS')
mme_token = current_app.config.get('MME_TOKEN')
if not mme_base_url or not mme_accepts or not mme_token:
flash('An error occurred reading matchmaker connection parameters. Please check config file!', 'danger')
return redirect(request.referrer)
add_result = controllers.mme_add(store=store, user_obj=user_obj, case_obj=case_obj,
add_gender=mme_save_options[0], add_features=mme_save_options[1],
add_disorders=mme_save_options[2], genes_only=genes_only,
mme_base_url = mme_base_url, mme_accepts=mme_accepts, mme_token=mme_token)
# flash MME responses (one for each patient posted)
n_succes_response = 0
n_inserted = 0
n_updated = 0
category = 'warning'
for resp in add_result['server_responses']:
message = resp.get('message')
if resp.get('status_code') == 200:
n_succes_response += 1
else:
flash('an error occurred while adding patient to matchmaker: {}'.format(message), 'warning')
if message == 'Patient was successfully updated.':
n_updated +=1
elif message == 'Patient was successfully inserted into database.':
n_inserted +=1
# if at least one patient was inserted or updated into matchmaker, save submission at the case level:
if n_inserted or n_updated:
category = 'success'
store.case_mme_update(case_obj=case_obj, user_obj=user_obj, mme_subm_obj=add_result)
flash('Number of new patients in matchmaker:{0}, number of updated records:{1}, number of failed requests:{2}'.format(
n_inserted, n_updated, len(add_result.get('server_responses')) - n_succes_response), category)
return redirect(request.referrer)
|
Remove a case from MatchMaker
|
def matchmaker_delete(institute_id, case_name):
"""Remove a case from MatchMaker"""
# check that only authorized users can delete patients from MME
user_obj = store.user(current_user.email)
if 'mme_submitter' not in user_obj['roles']:
flash('unauthorized request', 'warning')
return redirect(request.referrer)
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
# Required params for sending a delete request to MME:
mme_base_url = current_app.config.get('MME_URL')
mme_token = current_app.config.get('MME_TOKEN')
if not mme_base_url or not mme_token:
flash('An error occurred reading matchmaker connection parameters. Please check config file!', 'danger')
return redirect(request.referrer)
delete_result = controllers.mme_delete(case_obj, mme_base_url, mme_token)
n_deleted = 0
category = 'warning'
for resp in delete_result:
if resp['status_code'] == 200:
n_deleted += 1
else:
flash(resp['message'], category)
if n_deleted:
category = 'success'
# update case by removing mme submission
# and create events for patients deletion from MME
user_obj = store.user(current_user.email)
store.case_mme_delete(case_obj=case_obj, user_obj=user_obj)
flash('Number of patients deleted from Matchmaker: {} out of {}'.format(n_deleted, len(delete_result)), category)
return redirect(request.referrer)
|
Display a list of SNV variants.
|
def gene_variants(institute_id):
"""Display a list of SNV variants."""
page = int(request.form.get('page', 1))
institute_obj = institute_and_case(store, institute_id)
# populate form, conditional on request method
if(request.method == "POST"):
form = GeneVariantFiltersForm(request.form)
else:
form = GeneVariantFiltersForm(request.args)
variant_type = form.data.get('variant_type', 'clinical')
# check if supplied gene symbols exist
hgnc_symbols = []
non_clinical_symbols = []
not_found_symbols = []
not_found_ids = []
data = {}
if (form.hgnc_symbols.data) and len(form.hgnc_symbols.data) > 0:
is_clinical = form.data.get('variant_type', 'clinical') == 'clinical'
clinical_symbols = store.clinical_symbols(case_obj) if is_clinical else None
for hgnc_symbol in form.hgnc_symbols.data:
if hgnc_symbol.isdigit():
hgnc_gene = store.hgnc_gene(int(hgnc_symbol))
if hgnc_gene is None:
not_found_ids.append(hgnc_symbol)
else:
hgnc_symbols.append(hgnc_gene['hgnc_symbol'])
elif store.hgnc_genes(hgnc_symbol).count() == 0:
not_found_symbols.append(hgnc_symbol)
elif is_clinical and (hgnc_symbol not in clinical_symbols):
non_clinical_symbols.append(hgnc_symbol)
else:
hgnc_symbols.append(hgnc_symbol)
if (not_found_ids):
flash("HGNC id not found: {}".format(", ".join(not_found_ids)), 'warning')
if (not_found_symbols):
flash("HGNC symbol not found: {}".format(", ".join(not_found_symbols)), 'warning')
if (non_clinical_symbols):
flash("Gene not included in clinical list: {}".format(", ".join(non_clinical_symbols)), 'warning')
form.hgnc_symbols.data = hgnc_symbols
log.debug("query {}".format(form.data))
variants_query = store.gene_variants(query=form.data, category='snv',
variant_type=variant_type)
data = controllers.gene_variants(store, variants_query, page)
return dict(institute=institute_obj, form=form, page=page, **data)
|
Update ( PUT ) synopsis of a specific case.
|
def case_synopsis(institute_id, case_name):
"""Update (PUT) synopsis of a specific case."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
new_synopsis = request.form.get('synopsis')
controllers.update_synopsis(store, institute_obj, case_obj, user_obj, new_synopsis)
return redirect(request.referrer)
|
Visualize case report
|
def case_report(institute_id, case_name):
"""Visualize case report"""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
data = controllers.case_report_content(store, institute_obj, case_obj)
return dict(institute=institute_obj, case=case_obj, format='html', **data)
|
Download a pdf report for a case
|
def pdf_case_report(institute_id, case_name):
"""Download a pdf report for a case"""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
data = controllers.case_report_content(store, institute_obj, case_obj)
# add coverage report on the bottom of this report
if current_app.config.get('SQLALCHEMY_DATABASE_URI'):
data['coverage_report'] = controllers.coverage_report_contents(store, institute_obj, case_obj, request.url_root)
# workaround to be able to print the case pedigree to pdf
if case_obj.get('madeline_info') is not None:
with open(os.path.join(cases_bp.static_folder, 'madeline.svg'), 'w') as temp_madeline:
temp_madeline.write(case_obj['madeline_info'])
html_report = render_template('cases/case_report.html', institute=institute_obj, case=case_obj, format='pdf', **data)
return render_pdf(HTML(string=html_report), download_filename=case_obj['display_name']+'_'+datetime.datetime.now().strftime("%Y-%m-%d")+'_scout.pdf')
|
Add or remove a diagnosis for a case.
|
def case_diagnosis(institute_id, case_name):
"""Add or remove a diagnosis for a case."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
link = url_for('.case', institute_id=institute_id, case_name=case_name)
level = 'phenotype' if 'phenotype' in request.form else 'gene'
omim_id = request.form['omim_id']
remove = True if request.args.get('remove') == 'yes' else False
store.diagnose(institute_obj, case_obj, user_obj, link, level=level,
omim_id=omim_id, remove=remove)
return redirect(request.referrer)
|
Handle phenotypes.
|
def phenotypes(institute_id, case_name, phenotype_id=None):
"""Handle phenotypes."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
case_url = url_for('.case', institute_id=institute_id, case_name=case_name)
is_group = request.args.get('is_group') == 'yes'
user_obj = store.user(current_user.email)
if phenotype_id:
# DELETE a phenotype item/group from case
store.remove_phenotype(institute_obj, case_obj, user_obj, case_url,
phenotype_id, is_group=is_group)
else:
try:
# add a new phenotype item/group to the case
phenotype_term = request.form['hpo_term']
if phenotype_term.startswith('HP:') or len(phenotype_term) == 7:
hpo_term = phenotype_term.split(' | ', 1)[0]
store.add_phenotype(institute_obj, case_obj, user_obj, case_url,
hpo_term=hpo_term, is_group=is_group)
else:
# assume omim id
store.add_phenotype(institute_obj, case_obj, user_obj, case_url,
omim_term=phenotype_term)
except ValueError:
return abort(400, ("unable to add phenotype: {}".format(phenotype_term)))
return redirect(case_url)
|
Perform actions on multiple phenotypes.
|
def phenotypes_actions(institute_id, case_name):
"""Perform actions on multiple phenotypes."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
case_url = url_for('.case', institute_id=institute_id, case_name=case_name)
action = request.form['action']
hpo_ids = request.form.getlist('hpo_id')
user_obj = store.user(current_user.email)
if action == 'DELETE':
for hpo_id in hpo_ids:
# DELETE a phenotype from the list
store.remove_phenotype(institute_obj, case_obj, user_obj, case_url, hpo_id)
elif action == 'PHENOMIZER':
if len(hpo_ids) == 0:
hpo_ids = [term['phenotype_id'] for term in case_obj.get('phenotype_terms', [])]
username = current_app.config['PHENOMIZER_USERNAME']
password = current_app.config['PHENOMIZER_PASSWORD']
diseases = controllers.hpo_diseases(username, password, hpo_ids)
return render_template('cases/diseases.html', diseases=diseases,
institute=institute_obj, case=case_obj)
elif action == 'GENES':
hgnc_symbols = set()
for raw_symbols in request.form.getlist('genes'):
# avoid empty lists
if raw_symbols:
hgnc_symbols.update(raw_symbol.split(' ', 1)[0] for raw_symbol in
raw_symbols.split('|'))
store.update_dynamic_gene_list(case_obj, hgnc_symbols=hgnc_symbols)
elif action == 'GENERATE':
if len(hpo_ids) == 0:
hpo_ids = [term['phenotype_id'] for term in case_obj.get('phenotype_terms', [])]
results = store.generate_hpo_gene_list(*hpo_ids)
# determine how many HPO terms each gene must match
hpo_count = int(request.form.get('min_match') or 1)
hgnc_ids = [result[0] for result in results if result[1] >= hpo_count]
store.update_dynamic_gene_list(case_obj, hgnc_ids=hgnc_ids, phenotype_ids=hpo_ids)
return redirect(case_url)
|
Handle events.
|
def events(institute_id, case_name, event_id=None):
"""Handle events."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
link = request.form.get('link')
content = request.form.get('content')
variant_id = request.args.get('variant_id')
user_obj = store.user(current_user.email)
if event_id:
# delete the event
store.delete_event(event_id)
else:
if variant_id:
# create a variant comment
variant_obj = store.variant(variant_id)
level = request.form.get('level', 'specific')
store.comment(institute_obj, case_obj, user_obj, link,
variant=variant_obj, content=content, comment_level=level)
else:
# create a case comment
store.comment(institute_obj, case_obj, user_obj, link, content=content)
return redirect(request.referrer)
|
Update status of a specific case.
|
def status(institute_id, case_name):
"""Update status of a specific case."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
status = request.form.get('status', case_obj['status'])
link = url_for('.case', institute_id=institute_id, case_name=case_name)
if status == 'archive':
store.archive_case(institute_obj, case_obj, user_obj, status, link)
else:
store.update_status(institute_obj, case_obj, user_obj, status, link)
return redirect(request.referrer)
|
Assign and unassign a user from a case.
|
def assign(institute_id, case_name, user_id=None):
"""Assign and unassign a user from a case."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
link = url_for('.case', institute_id=institute_id, case_name=case_name)
if user_id:
user_obj = store.user(user_id)
else:
user_obj = store.user(current_user.email)
if request.form.get('action') == 'DELETE':
store.unassign(institute_obj, case_obj, user_obj, link)
else:
store.assign(institute_obj, case_obj, user_obj, link)
return redirect(request.referrer)
|
Search for HPO terms.
|
def hpoterms():
"""Search for HPO terms."""
query = request.args.get('query')
if query is None:
return abort(500)
terms = sorted(store.hpo_terms(query=query), key=itemgetter('hpo_number'))
json_terms = [
{'name': '{} | {}'.format(term['_id'], term['description']),
'id': term['_id']
} for term in terms[:7]]
return jsonify(json_terms)
|
Pin and unpin variants to/ from the list of suspects.
|
def pin_variant(institute_id, case_name, variant_id):
"""Pin and unpin variants to/from the list of suspects."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
variant_obj = store.variant(variant_id)
user_obj = store.user(current_user.email)
link = url_for('variants.variant', institute_id=institute_id, case_name=case_name,
variant_id=variant_id)
if request.form['action'] == 'ADD':
store.pin_variant(institute_obj, case_obj, user_obj, link, variant_obj)
elif request.form['action'] == 'DELETE':
store.unpin_variant(institute_obj, case_obj, user_obj, link, variant_obj)
return redirect(request.referrer or link)
|
Mark a variant as sanger validated.
|
def mark_validation(institute_id, case_name, variant_id):
"""Mark a variant as sanger validated."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
variant_obj = store.variant(variant_id)
user_obj = store.user(current_user.email)
validate_type = request.form['type'] or None
link = url_for('variants.variant', institute_id=institute_id, case_name=case_name,
variant_id=variant_id)
store.validate(institute_obj, case_obj, user_obj, link, variant_obj, validate_type)
return redirect(request.referrer or link)
|
Mark a variant as confirmed causative.
|
def mark_causative(institute_id, case_name, variant_id):
"""Mark a variant as confirmed causative."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
variant_obj = store.variant(variant_id)
user_obj = store.user(current_user.email)
link = url_for('variants.variant', institute_id=institute_id, case_name=case_name,
variant_id=variant_id)
if request.form['action'] == 'ADD':
store.mark_causative(institute_obj, case_obj, user_obj, link, variant_obj)
elif request.form['action'] == 'DELETE':
store.unmark_causative(institute_obj, case_obj, user_obj, link, variant_obj)
# send the user back to the case that was marked as solved
case_url = url_for('.case', institute_id=institute_id, case_name=case_name)
return redirect(case_url)
|
Mark a case that is has been checked. This means to set case [ needs_check ] to False
|
def check_case(institute_id, case_name):
"""Mark a case that is has been checked.
This means to set case['needs_check'] to False
"""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
store.case_collection.find_one_and_update({'_id':case_obj['_id']}, {'$set': {'needs_check': False}})
return redirect(request.referrer)
|
Display delivery report.
|
def delivery_report(institute_id, case_name):
"""Display delivery report."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
if case_obj.get('delivery_report') is None:
return abort(404)
date_str = request.args.get('date')
if date_str:
delivery_report = None
analysis_date = parse_date(date_str)
for analysis_data in case_obj['analyses']:
if analysis_data['date'] == analysis_date:
delivery_report = analysis_data['delivery_report']
if delivery_report is None:
return abort(404)
else:
delivery_report = case_obj['delivery_report']
out_dir = os.path.dirname(delivery_report)
filename = os.path.basename(delivery_report)
return send_from_directory(out_dir, filename)
|
Share a case with a different institute.
|
def share(institute_id, case_name):
"""Share a case with a different institute."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
collaborator_id = request.form['collaborator']
revoke_access = 'revoke' in request.form
link = url_for('.case', institute_id=institute_id, case_name=case_name)
if revoke_access:
store.unshare(institute_obj, case_obj, collaborator_id, user_obj, link)
else:
store.share(institute_obj, case_obj, collaborator_id, user_obj, link)
return redirect(request.referrer)
|
Request a case to be rerun.
|
def rerun(institute_id, case_name):
"""Request a case to be rerun."""
sender = current_app.config['MAIL_USERNAME']
recipient = current_app.config['TICKET_SYSTEM_EMAIL']
controllers.rerun(store, mail, current_user, institute_id, case_name, sender,
recipient)
return redirect(request.referrer)
|
Open the research list for a case.
|
def research(institute_id, case_name):
"""Open the research list for a case."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
link = url_for('.case', institute_id=institute_id, case_name=case_name)
store.open_research(institute_obj, case_obj, user_obj, link)
return redirect(request.referrer)
|
Add/ remove institute tags.
|
def cohorts(institute_id, case_name):
"""Add/remove institute tags."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
link = url_for('.case', institute_id=institute_id, case_name=case_name)
cohort_tag = request.form['cohort_tag']
if request.args.get('remove') == 'yes':
store.remove_cohort(institute_obj, case_obj, user_obj, link, cohort_tag)
else:
store.add_cohort(institute_obj, case_obj, user_obj, link, cohort_tag)
return redirect(request.referrer)
|
Update default panels for a case.
|
def default_panels(institute_id, case_name):
"""Update default panels for a case."""
panel_ids = request.form.getlist('panel_ids')
controllers.update_default_panels(store, current_user, institute_id, case_name, panel_ids)
return redirect(request.referrer)
|
Download vcf2cytosure file for individual.
|
def vcf2cytosure(institute_id, case_name, individual_id):
"""Download vcf2cytosure file for individual."""
(display_name, vcf2cytosure) = controllers.vcf2cytosure(store,
institute_id, case_name, individual_id)
outdir = os.path.abspath(os.path.dirname(vcf2cytosure))
filename = os.path.basename(vcf2cytosure)
log.debug("Attempt to deliver file {0} from dir {1}".format(filename, outdir))
attachment_filename = display_name + ".vcf2cytosure.cgh"
return send_from_directory(outdir, filename,
attachment_filename=attachment_filename,
as_attachment=True)
|
Load multiqc report for the case.
|
def multiqc(institute_id, case_name):
"""Load multiqc report for the case."""
data = controllers.multiqc(store, institute_id, case_name)
if data['case'].get('multiqc') is None:
return abort(404)
out_dir = os.path.abspath(os.path.dirname(data['case']['multiqc']))
filename = os.path.basename(data['case']['multiqc'])
return send_from_directory(out_dir, filename)
|
scout: manage interactions with a scout instance.
|
def update_panels(context, mongodb, username, password, authdb, host, port, loglevel, config):
"""scout: manage interactions with a scout instance."""
coloredlogs.install(level=loglevel)
LOG.info("Running scout version %s", __version__)
LOG.debug("Debug logging enabled.")
mongo_config = {}
cli_config = {}
if config:
LOG.debug("Use config file %s", config)
with open(config, 'r') as in_handle:
cli_config = yaml.load(in_handle)
mongo_config['mongodb'] = (mongodb or cli_config.get('mongodb') or 'scout')
mongo_config['host'] = (host or cli_config.get('host') or 'localhost')
mongo_config['port'] = (port or cli_config.get('port') or 27017)
mongo_config['username'] = username or cli_config.get('username')
mongo_config['password'] = password or cli_config.get('password')
mongo_config['authdb'] = authdb or cli_config.get('authdb') or mongo_config['mongodb']
mongo_config['omim_api_key'] = cli_config.get('omim_api_key')
LOG.info("Setting database name to %s", mongo_config['mongodb'])
LOG.debug("Setting host to %s", mongo_config['host'])
LOG.debug("Setting port to %s", mongo_config['port'])
valid_connection = check_connection(
host=mongo_config['host'],
port=mongo_config['port'],
username=mongo_config['username'],
password=mongo_config['password'],
authdb=mongo_config['authdb'],
)
LOG.info("Test if mongod is running")
if not valid_connection:
LOG.warning("Connection could not be established")
context.abort()
try:
client = get_connection(**mongo_config)
except ConnectionFailure:
context.abort()
database = client[mongo_config['mongodb']]
LOG.info("Setting up a mongo adapter")
mongo_config['client'] = client
adapter = MongoAdapter(database)
requests = []
for case_obj in adapter.case_collection.find():
# pp(case_obj)
gene_to_panels = adapter.gene_to_panels(case_obj)
variants = adapter.variant_collection.find({
'case_id': case_obj['_id'],
'category': 'snv',
'variant_type': 'clinical',
})
for variant_obj in variants:
panel_names = set()
for hgnc_id in variant_obj['hgnc_ids']:
gene_panels = gene_to_panels.get(hgnc_id, set())
panel_names = panel_names.union(gene_panels)
if panel_names:
operation = pymongo.UpdateOne(
{'_id': variant_obj['_id']},
{
'$set': {
'panels': list(panel_names)
}
})
requests.append(operation)
if len(requests) > 5000:
adapter.variant_collection.bulk_write(requests, ordered=False)
requests = []
if requests:
adapter.variant_collection.bulk_write(requests, ordered=False)
requests = []
|
Preprocess case objects.
|
def cases(store, case_query, limit=100):
"""Preprocess case objects.
Add the necessary information to display the 'cases' view
Args:
store(adapter.MongoAdapter)
case_query(pymongo.Cursor)
limit(int): Maximum number of cases to display
Returns:
data(dict): includes the cases, how many there are and the limit.
"""
case_groups = {status: [] for status in CASE_STATUSES}
for case_obj in case_query.limit(limit):
analysis_types = set(ind['analysis_type'] for ind in case_obj['individuals'])
case_obj['analysis_types'] = list(analysis_types)
case_obj['assignees'] = [store.user(user_email) for user_email in
case_obj.get('assignees', [])]
case_groups[case_obj['status']].append(case_obj)
case_obj['is_rerun'] = len(case_obj.get('analyses', [])) > 0
case_obj['clinvar_variants'] = store.case_to_clinVars(case_obj['_id'])
case_obj['display_track'] = TRACKS[case_obj.get('track', 'rare')]
data = {
'cases': [(status, case_groups[status]) for status in CASE_STATUSES],
'found_cases': case_query.count(),
'limit': limit,
}
return data
|
Preprocess a single case.
|
def case(store, institute_obj, case_obj):
"""Preprocess a single case.
Prepare the case to be displayed in the case view.
Args:
store(adapter.MongoAdapter)
institute_obj(models.Institute)
case_obj(models.Case)
Returns:
data(dict): includes the cases, how many there are and the limit.
"""
# Convert individual information to more readable format
case_obj['individual_ids'] = []
for individual in case_obj['individuals']:
try:
sex = int(individual.get('sex', 0))
except ValueError as err:
sex = 0
individual['sex_human'] = SEX_MAP[sex]
pheno_map = PHENOTYPE_MAP
if case_obj.get('track', 'rare') == 'cancer':
pheno_map = CANCER_PHENOTYPE_MAP
individual['phenotype_human'] = pheno_map.get(individual['phenotype'])
case_obj['individual_ids'].append(individual['individual_id'])
case_obj['assignees'] = [store.user(user_email) for user_email in
case_obj.get('assignees', [])]
# Fetch the variant objects for suspects and causatives
suspects = [store.variant(variant_id) or variant_id for variant_id in
case_obj.get('suspects', [])]
causatives = [store.variant(variant_id) or variant_id for variant_id in
case_obj.get('causatives', [])]
# Set of all unique genes in the default gene panels
distinct_genes = set()
case_obj['panel_names'] = []
for panel_info in case_obj.get('panels', []):
if not panel_info.get('is_default'):
continue
panel_obj = store.gene_panel(panel_info['panel_name'], version=panel_info.get('version'))
distinct_genes.update([gene['hgnc_id'] for gene in panel_obj.get('genes', [])])
full_name = "{} ({})".format(panel_obj['display_name'], panel_obj['version'])
case_obj['panel_names'].append(full_name)
case_obj['default_genes'] = list(distinct_genes)
for hpo_term in itertools.chain(case_obj.get('phenotype_groups', []),
case_obj.get('phenotype_terms', [])):
hpo_term['hpo_link'] = ("http://compbio.charite.de/hpoweb/showterm?id={}"
.format(hpo_term['phenotype_id']))
# other collaborators than the owner of the case
o_collaborators = []
for collab_id in case_obj['collaborators']:
if collab_id != case_obj['owner'] and store.institute(collab_id):
o_collaborators.append(store.institute(collab_id))
case_obj['o_collaborators'] = [(collab_obj['_id'], collab_obj['display_name']) for
collab_obj in o_collaborators]
irrelevant_ids = ('cust000', institute_obj['_id'])
collab_ids = [(collab['_id'], collab['display_name']) for collab in store.institutes() if
(collab['_id'] not in irrelevant_ids) and
(collab['_id'] not in case_obj['collaborators'])]
events = list(store.events(institute_obj, case=case_obj))
for event in events:
event['verb'] = VERBS_MAP[event['verb']]
case_obj['clinvar_variants'] = store.case_to_clinVars(case_obj['_id'])
# Phenotype groups can be specific for an institute, there are some default groups
pheno_groups = institute_obj.get('phenotype_groups') or PHENOTYPE_GROUPS
data = {
'status_class': STATUS_MAP.get(case_obj['status']),
'other_causatives': store.check_causatives(case_obj=case_obj),
'comments': store.events(institute_obj, case=case_obj, comments=True),
'hpo_groups': pheno_groups,
'events': events,
'suspects': suspects,
'causatives': causatives,
'collaborators': collab_ids,
'cohort_tags': COHORT_TAGS,
'mme_nodes': current_app.mme_nodes, # Get available MatchMaker nodes for matching case
}
return data
|
Gather contents to be visualized in a case report
|
def case_report_content(store, institute_obj, case_obj):
"""Gather contents to be visualized in a case report
Args:
store(adapter.MongoAdapter)
institute_obj(models.Institute)
case_obj(models.Case)
Returns:
data(dict)
"""
variant_types = {
'causatives_detailed': 'causatives',
'suspects_detailed': 'suspects',
'classified_detailed': 'acmg_classification',
'tagged_detailed': 'manual_rank',
'dismissed_detailed': 'dismiss_variant',
'commented_detailed': 'is_commented',
}
data = case_obj
for individual in data['individuals']:
try:
sex = int(individual.get('sex', 0))
except ValueError as err:
sex = 0
individual['sex_human'] = SEX_MAP[sex]
individual['phenotype_human'] = PHENOTYPE_MAP.get(individual['phenotype'])
# Add the case comments
data['comments'] = store.events(institute_obj, case=case_obj, comments=True)
data['manual_rank_options'] = MANUAL_RANK_OPTIONS
data['dismissed_options'] = DISMISS_VARIANT_OPTIONS
data['genetic_models'] = dict(GENETIC_MODELS)
data['report_created_at'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
evaluated_variants = {}
for vt in variant_types:
evaluated_variants[vt] = []
# We collect all causatives and suspected variants
# These are handeled in separate since they are on case level
for var_type in ['causatives', 'suspects']:
#These include references to variants
vt = '_'.join([var_type, 'detailed'])
for var_id in case_obj.get(var_type,[]):
variant_obj = store.variant(var_id)
if not variant_obj:
continue
# If the variant exists we add it to the evaluated variants
evaluated_variants[vt].append(variant_obj)
## get variants for this case that are either classified, commented, tagged or dismissed.
for var_obj in store.evaluated_variants(case_id=case_obj['_id']):
# Check which category it belongs to
for vt in variant_types:
keyword = variant_types[vt]
# When found we add it to the categpry
# Eac variant can belong to multiple categories
if keyword in var_obj:
evaluated_variants[vt].append(var_obj)
for var_type in evaluated_variants:
decorated_variants = []
for var_obj in evaluated_variants[var_type]:
# We decorate the variant with some extra information
if var_obj['category'] == 'snv':
decorated_info = variant_decorator(
store=store,
institute_obj=institute_obj,
case_obj=case_obj,
variant_id=None,
variant_obj=var_obj,
add_case=False,
add_other=False,
get_overlapping=False
)
else:
decorated_info = sv_variant(
store=store,
institute_id=institute_obj['_id'],
case_name=case_obj['display_name'],
variant_obj=var_obj,
add_case=False,
get_overlapping=False
)
decorated_variants.append(decorated_info['variant'])
# Add the decorated variants to the case
data[var_type] = decorated_variants
return data
|
Posts a request to chanjo - report and capture the body of the returned response to include it in case report
|
def coverage_report_contents(store, institute_obj, case_obj, base_url):
"""Posts a request to chanjo-report and capture the body of the returned response to include it in case report
Args:
store(adapter.MongoAdapter)
institute_obj(models.Institute)
case_obj(models.Case)
base_url(str): base url of server
Returns:
coverage_data(str): string rendering of the content between <body </body> tags of a coverage report
"""
request_data = {}
# extract sample ids from case_obj and add them to the post request object:
request_data['sample_id'] = [ ind['individual_id'] for ind in case_obj['individuals'] ]
# extract default panel names and default genes from case_obj and add them to the post request object
distinct_genes = set()
panel_names = []
for panel_info in case_obj.get('panels', []):
if not panel_info.get('is_default'):
continue
panel_obj = store.gene_panel(panel_info['panel_name'], version=panel_info.get('version'))
full_name = "{} ({})".format(panel_obj['display_name'], panel_obj['version'])
panel_names.append(full_name)
panel_names = ' ,'.join(panel_names)
request_data['panel_name'] = panel_names
# add institute-specific cutoff level to the post request object
request_data['level'] = institute_obj.get('coverage_cutoff', 15)
#send get request to chanjo report
resp = requests.get(base_url+'reports/report', params=request_data)
#read response content
soup = BeautifulSoup(resp.text)
# remove links in the printed version of coverage report
for tag in soup.find_all('a'):
tag.replaceWith('')
#extract body content using BeautifulSoup
coverage_data = ''.join(['%s' % x for x in soup.body.contents])
return coverage_data
|
Get all Clinvar submissions for a user and an institute
|
def clinvar_submissions(store, user_id, institute_id):
"""Get all Clinvar submissions for a user and an institute"""
submissions = list(store.clinvar_submissions(user_id, institute_id))
return submissions
|
Collect MT variants and format line of a MT variant report to be exported in excel format
|
def mt_excel_files(store, case_obj, temp_excel_dir):
"""Collect MT variants and format line of a MT variant report
to be exported in excel format
Args:
store(adapter.MongoAdapter)
case_obj(models.Case)
temp_excel_dir(os.Path): folder where the temp excel files are written to
Returns:
written_files(int): the number of files written to temp_excel_dir
"""
today = datetime.datetime.now().strftime('%Y-%m-%d')
samples = case_obj.get('individuals')
query = {'chrom':'MT'}
mt_variants = list(store.variants(case_id=case_obj['_id'], query=query, nr_of_variants= -1, sort_key='position'))
written_files = 0
for sample in samples:
sample_id = sample['individual_id']
sample_lines = export_mt_variants(variants=mt_variants, sample_id=sample_id)
# set up document name
document_name = '.'.join([case_obj['display_name'], sample_id, today]) + '.xlsx'
workbook = Workbook(os.path.join(temp_excel_dir,document_name))
Report_Sheet = workbook.add_worksheet()
# Write the column header
row = 0
for col,field in enumerate(MT_EXPORT_HEADER):
Report_Sheet.write(row,col,field)
# Write variant lines, after header (start at line 1)
for row, line in enumerate(sample_lines,1): # each line becomes a row in the document
for col, field in enumerate(line): # each field in line becomes a cell
Report_Sheet.write(row,col,field)
workbook.close()
if os.path.exists(os.path.join(temp_excel_dir,document_name)):
written_files += 1
return written_files
|
Update synopsis.
|
def update_synopsis(store, institute_obj, case_obj, user_obj, new_synopsis):
"""Update synopsis."""
# create event only if synopsis was actually changed
if case_obj['synopsis'] != new_synopsis:
link = url_for('cases.case', institute_id=institute_obj['_id'],
case_name=case_obj['display_name'])
store.update_synopsis(institute_obj, case_obj, user_obj, link,
content=new_synopsis)
|
Return the list of HGNC symbols that match annotated HPO terms.
|
def hpo_diseases(username, password, hpo_ids, p_value_treshold=1):
"""Return the list of HGNC symbols that match annotated HPO terms.
Args:
username (str): username to use for phenomizer connection
password (str): password to use for phenomizer connection
Returns:
query_result: a generator of dictionaries on the form
{
'p_value': float,
'disease_source': str,
'disease_nr': int,
'gene_symbols': list(str),
'description': str,
'raw_line': str
}
"""
# skip querying Phenomizer unless at least one HPO terms exists
try:
results = query_phenomizer.query(username, password, *hpo_ids)
diseases = [result for result in results
if result['p_value'] <= p_value_treshold]
return diseases
except SystemExit:
return None
|
Request a rerun by email.
|
def rerun(store, mail, current_user, institute_id, case_name, sender, recipient):
"""Request a rerun by email."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
link = url_for('cases.case', institute_id=institute_id, case_name=case_name)
store.request_rerun(institute_obj, case_obj, user_obj, link)
# this should send a JSON document to the SuSy API in the future
html = """
<p>{institute}: {case} ({case_id})</p>
<p>Re-run requested by: {name}</p>
""".format(institute=institute_obj['display_name'],
case=case_obj['display_name'], case_id=case_obj['_id'],
name=user_obj['name'].encode())
# compose and send the email message
msg = Message(subject=("SCOUT: request RERUN for {}"
.format(case_obj['display_name'])),
html=html, sender=sender, recipients=[recipient],
# cc the sender of the email for confirmation
cc=[user_obj['email']])
mail.send(msg)
|
Update default panels for a case.
|
def update_default_panels(store, current_user, institute_id, case_name, panel_ids):
"""Update default panels for a case."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
link = url_for('cases.case', institute_id=institute_id, case_name=case_name)
panel_objs = [store.panel(panel_id) for panel_id in panel_ids]
store.update_default_panels(institute_obj, case_obj, user_obj, link, panel_objs)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.