Skip to content

Commit

Permalink
Merge branch 'dev-integrate' into welling/soft_assay_types_start
Browse files Browse the repository at this point in the history
  • Loading branch information
gesinaphillips authored Dec 6, 2023
2 parents e4cdb98 + fd086c0 commit eaeba4b
Showing 1 changed file with 9 additions and 8 deletions.
17 changes: 9 additions & 8 deletions src/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -2398,15 +2398,15 @@ def update_datasets_datastatus():
organ_types_url = app.config['UBKG_WEBSERVICE_URL'] + 'organs/by-code?application_context=HUBMAP'
organ_types_dict = requests.get(organ_types_url).json()
all_datasets_query = (
"MATCH (ds:Dataset)<-[:ACTIVITY_OUTPUT]-(:Activity)<-[:ACTIVITY_INPUT]-(ancestor) "
"MATCH (ds:Dataset)<-[:ACTIVITY_OUTPUT]-(a:Activity)<-[:ACTIVITY_INPUT]-(ancestor) "
"RETURN ds.uuid AS uuid, ds.group_name AS group_name, ds.data_types AS data_types, "
"ds.hubmap_id AS hubmap_id, ds.lab_dataset_id AS provider_experiment_id, ds.status AS status, "
"ds.status_history AS status_history, "
"ds.status_history AS status_history, ds.assigned_to_group_name AS assigned_to_group_name, "
"ds.last_modified_timestamp AS last_touch, ds.published_timestamp AS published_timestamp, "
"ds.data_access_level AS data_access_level, "
"ds.data_access_level AS data_access_level, ds.ingest_task AS ingest_task, "
"COALESCE(ds.contributors IS NOT NULL) AS has_contributors, "
"COALESCE(ds.contacts IS NOT NULL) AS has_contacts, "
"ancestor.entity_type AS ancestor_entity_type"
"a.creation_action AS activity_creation_action"
)

organ_query = (
Expand Down Expand Up @@ -2497,10 +2497,10 @@ def update_datasets_datastatus():
dataset['globus_url'] = globus_url
last_touch = dataset['last_touch'] if dataset['published_timestamp'] is None else dataset['published_timestamp']
dataset['last_touch'] = str(datetime.datetime.utcfromtimestamp(last_touch/1000))
if dataset.get('ancestor_entity_type').lower() != "dataset":
dataset['is_primary'] = "true"
if dataset.get('activity_creation_action').lower().endswith("process"):
dataset['is_primary'] = "False"
else:
dataset['is_primary'] = "false"
dataset['is_primary'] = "True"
has_data = files_exist(dataset.get('uuid'), dataset.get('data_access_level'), dataset.get('group_name'))
has_dataset_metadata = files_exist(dataset.get('uuid'), dataset.get('data_access_level'), dataset.get('group_name'), metadata=True)
dataset['has_data'] = has_data
Expand Down Expand Up @@ -2548,7 +2548,8 @@ def update_uploads_datastatus():
"MATCH (up:Upload) "
"OPTIONAL MATCH (up)<-[:IN_UPLOAD]-(ds:Dataset) "
"RETURN up.uuid AS uuid, up.group_name AS group_name, up.hubmap_id AS hubmap_id, up.status AS status, "
"up.title AS title, COLLECT(DISTINCT ds.uuid) AS datasets "
"up.title AS title, up.ingest_task AS ingest_task, up.assigned_to_group_name AS assigned_to_group_name, "
"COLLECT(DISTINCT ds.uuid) AS datasets "
)

displayed_fields = [
Expand Down

0 comments on commit eaeba4b

Please sign in to comment.