Skip to content
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.5.10
2.5.11
259 changes: 87 additions & 172 deletions src/app.py

Large diffs are not rendered by default.

45 changes: 23 additions & 22 deletions src/app_neo4j_queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def check_connection(neo4j_driver):
logger.info("Neo4j is connected :)")
return True

logger.info("Neo4j is NOT connected :(")
logger.error("Neo4j is NOT connected :(")

return False

Expand Down Expand Up @@ -83,7 +83,7 @@ def get_entities_by_type(neo4j_driver, entity_type, property_key = None):
f"RETURN apoc.coll.toSet(COLLECT(e)) AS {record_field_name}")

logger.info("======get_entities_by_type() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
record = session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand Down Expand Up @@ -148,7 +148,7 @@ def get_ancestor_organs(neo4j_driver, entity_uuid):
f"RETURN apoc.coll.toSet(COLLECT(organ)) AS {record_field_name}")

logger.info("======get_ancestor_organs() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
record = session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand Down Expand Up @@ -199,7 +199,7 @@ def create_multiple_samples(neo4j_driver, samples_dict_list, activity_data_dict,
f"CREATE (a)-[:ACTIVITY_OUTPUT]->(e)")

logger.info("======create_multiple_samples() individual query======")
logger.info(query)
logger.debug(query)

result = tx.run(query)

Expand All @@ -211,7 +211,7 @@ def create_multiple_samples(neo4j_driver, samples_dict_list, activity_data_dict,
logger.exception(msg)

if tx.closed() == False:
logger.info("Failed to commit create_multiple_samples() transaction, rollback")
logger.error("Failed to commit create_multiple_samples() transaction, rollback")

tx.rollback()

Expand Down Expand Up @@ -262,7 +262,7 @@ def create_multiple_datasets(neo4j_driver, datasets_dict_list, activity_data_dic
f"RETURN e AS {record_field_name}")

logger.info("======create_multiple_samples() individual query======")
logger.info(query)
logger.debug(query)

result = tx.run(query)
record = result.single()
Expand All @@ -279,7 +279,7 @@ def create_multiple_datasets(neo4j_driver, datasets_dict_list, activity_data_dic
logger.exception(msg)

if tx.closed() == False:
logger.info("Failed to commit create_multiple_samples() transaction, rollback")
logger.error("Failed to commit create_multiple_samples() transaction, rollback")

tx.rollback()

Expand Down Expand Up @@ -314,7 +314,7 @@ def get_sorted_revisions(neo4j_driver, uuid):
f"RETURN COLLECT(node) AS {record_field_name}")

logger.info("======get_sorted_revisions() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
record = session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand Down Expand Up @@ -364,7 +364,7 @@ def get_sorted_multi_revisions(neo4j_driver, uuid, fetch_all=True, property_key=
)

logger.info("======get_sorted_revisions() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
record = session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand Down Expand Up @@ -420,7 +420,7 @@ def get_previous_revisions(neo4j_driver, uuid, property_key = None):
f"RETURN apoc.coll.toSet(COLLECT(prev)) AS {record_field_name}")

logger.info("======get_previous_revisions() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
record = session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand Down Expand Up @@ -470,7 +470,7 @@ def get_next_revisions(neo4j_driver, uuid, property_key = None):
f"RETURN apoc.coll.toSet(COLLECT(next)) AS {record_field_name}")

logger.info("======get_next_revisions() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
record = session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand Down Expand Up @@ -511,7 +511,7 @@ def is_next_revision_latest(neo4j_driver, uuid):
f"RETURN apoc.coll.toSet(COLLECT(next.uuid)) AS {record_field_name}")

logger.info("======is_next_revision_latest() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
record = session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand Down Expand Up @@ -547,7 +547,7 @@ def nested_previous_revisions(neo4j_driver, previous_revision_list):
"RETURN connectedUUID1, connectedUUID2 ")

logger.info("======nested_previous_revisions() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
record = session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand Down Expand Up @@ -598,7 +598,7 @@ def get_provenance(neo4j_driver, uuid, depth):
f"RETURN json")

logger.info("======get_provenance() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
return session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand Down Expand Up @@ -634,7 +634,7 @@ def get_dataset_latest_revision(neo4j_driver, uuid, public = False):
f"RETURN latest AS {record_field_name}")

logger.info("======get_dataset_latest_revision() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
record = session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand Down Expand Up @@ -667,7 +667,7 @@ def get_dataset_revision_number(neo4j_driver, uuid):
f"RETURN COUNT(prev) AS {record_field_name}")

logger.info("======get_dataset_revision_number() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
record = session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand Down Expand Up @@ -698,7 +698,7 @@ def get_associated_organs_from_dataset(neo4j_driver, dataset_uuid):
f"RETURN apoc.coll.toSet(COLLECT(organ)) AS {record_field_name}")

logger.info("======get_associated_organs_from_dataset() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
record = session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand All @@ -717,7 +717,7 @@ def get_associated_samples_from_dataset(neo4j_driver, dataset_uuid):
f"RETURN apoc.coll.toSet(COLLECT(sample)) AS {record_field_name}")

logger.info("======get_associated_samples_from_dataset() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
record = session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand All @@ -736,7 +736,7 @@ def get_associated_donors_from_dataset(neo4j_driver, dataset_uuid):
f"RETURN apoc.coll.toSet(COLLECT(donor)) AS {record_field_name}")

logger.info("======get_associated_donors_from_dataset() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
record = session.read_transaction(schema_neo4j_queries.execute_readonly_tx, query)
Expand Down Expand Up @@ -779,7 +779,7 @@ def get_individual_prov_info(neo4j_driver, dataset_uuid):
f" ds.group_uuid, ds.created_timestamp, ds.created_by_user_email, ds.last_modified_timestamp, "
f" ds.last_modified_user_email, ds.lab_dataset_id, ds.dataset_type, METASAMPLE, PROCESSED_DATASET")
logger.info("======get_prov_info() query======")
logger.info(query)
logger.debug(query)

record_contents = []
record_dict = {}
Expand Down Expand Up @@ -849,7 +849,7 @@ def get_individual_prov_info(neo4j_driver, dataset_uuid):
def get_all_dataset_samples(neo4j_driver, dataset_uuid):
query = f"MATCH p = (ds:Dataset {{uuid: '{dataset_uuid}'}})<-[*]-(dn:Donor) return p"
logger.info("======get_all_dataset_samples() query======")
logger.info(query)
logger.debug(query)

# Dictionary of Dictionaries, keyed by UUID, containing each Sample returned in the Neo4j Path
dataset_sample_list = {}
Expand Down Expand Up @@ -890,7 +890,8 @@ def get_sankey_info(neo4j_driver, public_only):
f"ORDER BY ds.group_name")

logger.info("======get_sankey_info() query======")
logger.info(query)
logger.debug(query)

with neo4j_driver.session() as session:
# Because we're returning multiple things, we use session.run rather than session.read_transaction
result = session.run(query)
Expand Down
12 changes: 8 additions & 4 deletions src/instance/app.cfg.example
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
# Set to True to disable the PUT and POST calls, used on STAGE to make entity-api READ-ONLY
# Set to False for PROD deployment to use INFO logging level
# Default to DEBUG for DEV/TEST
DEBUG_MODE = True

# Set to True to disable the PUT and POST calls to make entity-api READ-ONLY
READ_ONLY_MODE = False

# File path of schema yaml file, DO NOT MODIFY
Expand Down Expand Up @@ -37,7 +41,7 @@ MEMCACHED_SERVER = 'host:11211'
# Change prefix based on deployment environment, default for DEV
MEMCACHED_PREFIX = 'hm_entity_dev_'

# URL for talking to UUID API (default value used for docker deployment, no token needed)
# URL for talking to UUID API (default value used for docker deployment)
# Works regardless of the trailing slash /
UUID_API_URL = 'http://uuid-api:8080'

Expand All @@ -54,9 +58,9 @@ ONTOLOGY_API_URL = 'https://ontology-api.dev.hubmapconsortium.org'
# necessitates subsequent calls for other entities.
ENTITY_API_URL = 'http://localhost:5002'

# A list of URLs for talking to multiple Search API instances (default value used for docker deployment, no token needed)
# URL for talking to Search API (default value used for docker deployment)
# Works regardless of the trailing slash /
SEARCH_API_URL_LIST = ['http://search-api:8080']
SEARCH_API_URL = 'http://search-api:8080'

#The Base URL to the Globus transfer application site
GLOBUS_APP_BASE_URL = 'https://app.globus.org'
Expand Down
8 changes: 4 additions & 4 deletions src/schema/schema_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -1524,7 +1524,7 @@ def get_user_info(request):
user_info = _auth_helper.getUserInfoUsingRequest(request, True)

logger.info("======get_user_info()======")
logger.info(user_info)
logger.debug(user_info)

# For debugging purposes
try:
Expand All @@ -1533,7 +1533,7 @@ def get_user_info(request):
groups_list = auth_helper_instance.get_user_groups_deprecated(token)

logger.info("======Groups using get_user_groups_deprecated()======")
logger.info(groups_list)
logger.debug(groups_list)
except Exception:
msg = "For debugging purposes, failed to parse the Authorization token by calling commons.auth_helper.getAuthorizationTokens()"
# Log the full stack trace, prepend a line with our message
Expand Down Expand Up @@ -1819,7 +1819,7 @@ def create_hubmap_ids(normalized_class, json_data_dict, user_token, user_info_di
query_parms = {'entity_count': count}

logger.info("======create_hubmap_ids() json_to_post to uuid-api======")
logger.info(json_to_post)
logger.debug(json_to_post)

# Disable ssl certificate verification
target_url = _uuid_api_url + SchemaConstants.UUID_API_ID_ENDPOINT
Expand Down Expand Up @@ -1857,7 +1857,7 @@ def create_hubmap_ids(normalized_class, json_data_dict, user_token, user_info_di
d.pop('hubmap_base_id', None)

logger.info("======create_hubmap_ids() generated ids from uuid-api======")
logger.info(ids_list)
logger.debug(ids_list)

return ids_list
else:
Expand Down
Loading