From c8b5c268eecedab3f7133abb6057085625fe32e9 Mon Sep 17 00:00:00 2001 From: DerekFurstPitt Date: Fri, 13 Feb 2026 09:35:24 -0500 Subject: [PATCH 1/3] updated main to accommmodate changes in search-adaptor to handle the case where there's no redis config info --- src/main.py | 10 ++++++---- src/search-adaptor | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/main.py b/src/main.py index fa86a43d..31178900 100644 --- a/src/main.py +++ b/src/main.py @@ -33,10 +33,12 @@ config['ONTOLOGY_API_BASE_URL'] = app.config['ONTOLOGY_API_BASE_URL'].strip('/') config['DEBUG_MODE'] = app.config['DEBUG_MODE'] config['JOB_QUEUE_MODE'] = app.config['JOB_QUEUE_MODE'] -config['REDIS_HOST'] = app.config['REDIS_HOST'] -config['REDIS_PORT'] = app.config['REDIS_PORT'] -config['REDIS_DB'] = app.config['REDIS_DB'] -config['REDIS_PASSWORD'] = app.config['REDIS_PASSWORD'] +if config.get('JOB_QUEUE_MODE') == True: + config['REDIS_HOST'] = app.config.get('REDIS_HOST') + config['REDIS_PORT'] = app.config.get('REDIS_PORT') + config['REDIS_DB'] = app.config.get('REDIS_DB') + config['REDIS_PASSWORD'] = app.config.get('REDIS_PASSWORD') + if not config['ONTOLOGY_API_BASE_URL']: raise Exception(f"Unable retrieve ontology information using" diff --git a/src/search-adaptor b/src/search-adaptor index eca2c5f5..89a0af68 160000 --- a/src/search-adaptor +++ b/src/search-adaptor @@ -1 +1 @@ -Subproject commit eca2c5f50c7789ee899ff1b821bb84c2d10f316e +Subproject commit 89a0af684aed0c6745010ecbce8066b830a5e8fc From 83b6376edd0997e1e72752e276c23a2bae6b20dd Mon Sep 17 00:00:00 2001 From: DerekFurstPitt Date: Wed, 18 Feb 2026 11:48:13 -0500 Subject: [PATCH 2/3] fixed a potential bug in hubmap_translator that causes target entities to not enqueue properly if they're collections/epicollections/uploads --- src/hubmap_translator.py | 52 +++++++++++++++++++--------------------- 1 file changed, 25 insertions(+), 27 deletions(-) diff --git a/src/hubmap_translator.py b/src/hubmap_translator.py index 1170a11d..e0a6af29 100644 --- a/src/hubmap_translator.py +++ b/src/hubmap_translator.py @@ -712,6 +712,13 @@ def enqueue_reindex(self, entity_id, reindex_queue, priority): ) collection_associations = [] upload_associations = [] + previous_revision_ids = [] + next_revision_ids = [] + neo4j_collection_ids = [] + neo4j_upload_ids = [] + neo4j_ancestor_ids = [] + neo4j_descendant_ids = [] + if entity['entity_type'] in ['Collection', 'Epicollection']: collection = self.get_collection_doc(entity_id=entity_id) if 'datasets' in collection: @@ -722,38 +729,29 @@ def enqueue_reindex(self, entity_id, reindex_queue, priority): if 'associated_publication' in collection and collection['associated_publication']: logger.info(f"Enqueueing associated_publication for {entity['entity_type']} {entity_id}") collection_associations.append(collection['associated_publication']) - - logger.info(f"Finished executing enqueue_reindex() for {entity['entity_type']} of uuid: {entity_id}") - return job_id - if entity['entity_type'] == 'Upload': + elif entity['entity_type'] == 'Upload': if 'datasets' in entity: logger.info(f"Enqueueing {len(entity['datasets'])} datasets for Upload {entity_id}") for dataset in entity['datasets']: upload_associations.append(dataset['uuid']) - logger.info(f"Finished executing enqueue_reindex() for Upload of uuid: {entity_id}") - return job_id - - logger.info(f"Calculating related entities for {entity_id}") - - neo4j_ancestor_ids = self.call_entity_api( - entity_id=entity_id, - endpoint_base='ancestors', - endpoint_suffix=None, - url_property='uuid' - ) - - neo4j_descendant_ids = self.call_entity_api( - entity_id=entity_id, - endpoint_base='descendants', - endpoint_suffix=None, - url_property='uuid' - ) - - previous_revision_ids = [] - next_revision_ids = [] - neo4j_collection_ids = [] - neo4j_upload_ids = [] + + else: + logger.info(f"Calculating related entities for {entity_id}") + + neo4j_ancestor_ids = self.call_entity_api( + entity_id=entity_id, + endpoint_base='ancestors', + endpoint_suffix=None, + url_property='uuid' + ) + + neo4j_descendant_ids = self.call_entity_api( + entity_id=entity_id, + endpoint_base='descendants', + endpoint_suffix=None, + url_property='uuid' + ) if entity['entity_type'] in ['Dataset', 'Publication']: previous_revision_ids = self.call_entity_api( From c3c2381360b73b12e381600c6fbb4dabc112839e Mon Sep 17 00:00:00 2001 From: DerekFurstPitt Date: Thu, 19 Feb 2026 12:12:19 -0500 Subject: [PATCH 3/3] updated search-adaptor to latest hash --- src/search-adaptor | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/search-adaptor b/src/search-adaptor index 89a0af68..6687ae73 160000 --- a/src/search-adaptor +++ b/src/search-adaptor @@ -1 +1 @@ -Subproject commit 89a0af684aed0c6745010ecbce8066b830a5e8fc +Subproject commit 6687ae735f610b20e3ea86d0d268d879a73a9ec9