@@ -158,18 +158,18 @@ def convert_nm_to_ellipse_units(distance: float, units: str) -> float:
158158 # NB. assume "majmin_m" if any others
159159 return distance * 1852
160160
161- def get_field_type (elastic_hosts : str ,headers : Optional [str ],params : Dict [str , Any ],field :str ,idx : str ) -> str :
161+ def get_field_type (elastic_hosts : str , headers : Optional [str ], params : Dict [str , Any ], field : str , idx : str ) -> str :
162162 user = params .get ("user" )
163163 x_opaque_id = params .get ("x-opaque-id" )
164164 es = Elasticsearch (
165165 elastic_hosts .split ("," ),
166166 verify_certs = False ,
167167 timeout = 900 ,
168- headers = get_es_headers (headers , user ,x_opaque_id ),
168+ headers = get_es_headers (headers , user , x_opaque_id ),
169169 )
170- mappings = es .indices .get_field_mapping (fields = field ,index = idx )
171- #{'foot_prints': {'mappings': {'foot_print': {'full_name': 'foot_print', 'mapping': {'foot_print': {'type': 'geo_shape'}}}}}}
172- index = list (mappings .keys ())[0 ] #if index is my_index* it comes back as my_index
170+ mappings = es .indices .get_field_mapping (fields = field , index = idx )
171+ # {'foot_prints': {'mappings': {'foot_print': {'full_name': 'foot_print', 'mapping': {'foot_print': {'type': 'geo_shape'}}}}}}
172+ index = list (mappings .keys ())[0 ] # if index is my_index* it comes back as my_index
173173 return mappings [index ]['mappings' ][field ]['mapping' ][field ]['type' ]
174174
175175def get_search_base (
@@ -199,7 +199,7 @@ def get_search_base(
199199 elastic_hosts .split ("," ),
200200 verify_certs = False ,
201201 timeout = 900 ,
202- headers = get_es_headers (headers , user ,x_opaque_id ),
202+ headers = get_es_headers (headers , user , x_opaque_id ),
203203 )
204204
205205 # Create base search
@@ -219,13 +219,13 @@ def get_search_base(
219219 if time_range and time_range [timestamp_field ]:
220220 base_s = base_s .filter ("range" , ** time_range )
221221
222- #filter the ellipse search range in the data base query so the legen matches the tiles
223- if params .get ('render_mode' ,"" ) == "ellipses" :
224- units = convert_nm_to_ellipse_units (params ['search_nautical_miles' ],params ['ellipse_units' ])
225- search_range = {params ["ellipse_major" ]:{"lte" :units }}
226- base_s = base_s .filter ("range" ,** search_range )
227- search_range = {params ["ellipse_minor" ]:{"lte" :units }}
228- base_s = base_s .filter ("range" ,** search_range )
222+ # filter the ellipse search range in the data base query so the legen matches the tiles
223+ if params .get ('render_mode' , "" ) == "ellipses" :
224+ units = convert_nm_to_ellipse_units (params ['search_nautical_miles' ], params ['ellipse_units' ])
225+ search_range = {params ["ellipse_major" ]: {"lte" : units }}
226+ base_s = base_s .filter ("range" , ** search_range )
227+ search_range = {params ["ellipse_minor" ]: {"lte" : units }}
228+ base_s = base_s .filter ("range" , ** search_range )
229229
230230 # Add lucene query
231231 if lucene_query :
@@ -355,14 +355,14 @@ def build_dsl_filter(filter_inputs) -> Optional[Dict[str, Any]]:
355355 filter_key = f .get ("meta" , {}).get ("key" )
356356 if f .get ("meta" , {}).get ("negate" ):
357357 if filter_key == "query" :
358- filter_dict ["must_not" ].append ( { "bool" : f .get (filter_key ).get ("bool" ) } )
358+ filter_dict ["must_not" ].append ({ "bool" : f .get (filter_key ).get ("bool" )} )
359359 else :
360- filter_dict ["must_not" ].append ( { filter_key : f .get (filter_key ) } )
360+ filter_dict ["must_not" ].append ({ filter_key : f .get (filter_key )} )
361361 else :
362362 if filter_key == "query" :
363- filter_dict ["filter" ].append ( { "bool" : f .get (filter_key ).get ("bool" ) } )
363+ filter_dict ["filter" ].append ({ "bool" : f .get (filter_key ).get ("bool" )} )
364364 else :
365- filter_dict ["filter" ].append ( { filter_key : f .get (filter_key ) } )
365+ filter_dict ["filter" ].append ({ filter_key : f .get (filter_key )} )
366366
367367 else :
368368 raise ValueError ("unsupported filter type {}" .format (f .get ("meta" ).get ("type" ))) # pylint: disable=C0209
@@ -389,7 +389,7 @@ def load_datashader_headers(header_file_path_str: Optional[str]) -> Dict[Any, An
389389
390390 return loaded_yaml
391391
392- def get_es_headers (request_headers = None , user = None ,x_opaque_id = None ):
392+ def get_es_headers (request_headers = None , user = None , x_opaque_id = None ):
393393 """
394394
395395 :param request_headers:
@@ -420,15 +420,17 @@ def get_es_headers(request_headers=None, user=None,x_opaque_id=None):
420420 return result
421421
422422def parse_duration_interval (interval ):
423- durations = {"days" :"d" ,
424- "minutes" :"m" ,
425- "hours" :"h" ,
426- "weeks" :"w" ,
427- "months" :"M" ,
428- #"quarter":"q", dateutil.relativedelta doesn't handle quarters
429- "years" :"y" }
423+ durations = {
424+ "days" : "d" ,
425+ "minutes" : "m" ,
426+ "hours" : "h" ,
427+ "weeks" : "w" ,
428+ "months" : "M" ,
429+ # "quarter": "q", dateutil.relativedelta doesn't handle quarters
430+ "years" : "y" ,
431+ }
430432 kwargs = {}
431- for key ,value in durations .items ():
433+ for key , value in durations .items ():
432434 if interval [len (interval )- 1 ] == value :
433435 kwargs [key ] = int (interval [0 :len (interval )- 1 ])
434436 return relativedelta (** kwargs )
@@ -524,11 +526,11 @@ def geotile_bucket_to_lonlat(bucket):
524526 if hasattr (bucket , "centroid" ):
525527 lon = bucket .centroid .location .lon
526528 lat = bucket .centroid .location .lat
527- elif hasattr (bucket .key ,'grids' ):
528- z , x , y = [ int (x ) for x in bucket .key .grids .split ("/" ) ]
529+ elif hasattr (bucket .key , 'grids' ):
530+ z , x , y = [int (x ) for x in bucket .key .grids .split ("/" )]
529531 lon , lat = mu .center (x , y , z )
530532 else :
531- z , x , y = [ int (x ) for x in bucket .key .split ("/" ) ]
533+ z , x , y = [int (x ) for x in bucket .key .split ("/" )]
532534 lon , lat = mu .center (x , y , z )
533535 return lon , lat
534536
@@ -571,7 +573,7 @@ def get_nested_field_from_hit(hit, field_parts: List[str], default=None):
571573 raise ValueError ("field must be provided" )
572574
573575def chunk_iter (iterable , chunk_size ):
574- chunks = [ None ] * chunk_size
576+ chunks = [None ] * chunk_size
575577 i = - 1
576578 for i , v in enumerate (iterable ):
577579 idx = (i % chunk_size )
@@ -581,14 +583,14 @@ def chunk_iter(iterable, chunk_size):
581583 chunks [idx ] = v
582584
583585 if i >= 0 :
584- last_written_idx = ( i % chunk_size )
586+ last_written_idx = ( i % chunk_size )
585587 yield (False , chunks [0 :last_written_idx + 1 ])
586588
587- def bucket_noop (bucket ,search ):
589+ def bucket_noop (bucket , search ):
588590 # pylint: disable=unused-argument
589591 return bucket
590592class Scan :
591- def __init__ (self , searches , inner_aggs = None ,field = None ,precision = None , size = 10 , timeout = None ,bucket_callback = bucket_noop ):
593+ def __init__ (self , searches , inner_aggs = None , field = None , precision = None , size = 10 , timeout = None , bucket_callback = bucket_noop ):
592594 self .field = field
593595 self .precision = precision
594596 self .searches = searches
@@ -616,29 +618,29 @@ def execute(self):
616618 self .total_took = 0
617619 self .aborted = False
618620
619- def run_search (s ,** kwargs ):
621+ def run_search (s , ** kwargs ):
620622 _timeout_at = kwargs .pop ("timeout_at" , None )
621623 if _timeout_at :
622624 _time_remaining = _timeout_at - int (time .time ())
623625 s = s .params (timeout = f"{ _time_remaining } s" )
624626 if self .field and self .precision :
625- s .aggs .bucket ("comp" , "geotile_grid" , field = self .field ,precision = self .precision ,size = self .size )
626- #logger.info(json.dumps(s.to_dict(),indent=2,default=str))
627+ s .aggs .bucket ("comp" , "geotile_grid" , field = self .field , precision = self .precision , size = self .size )
628+ # logger.info(json.dumps(s.to_dict(), indent=2, default=str))
627629 return s .execute ()
628630
629631 timeout_at = None
630632 if self .timeout :
631633 timeout_at = int (time .time ()) + self .timeout
632634 for search in self .searches :
633- response = run_search (search ,timeout_at = timeout_at )
635+ response = run_search (search , timeout_at = timeout_at )
634636 self .num_searches += 1
635637 self .total_took += response .took
636638 self .total_shards += response ._shards .total # pylint: disable=W0212
637639 self .total_skipped += response ._shards .skipped # pylint: disable=W0212
638640 self .total_successful += response ._shards .successful # pylint: disable=W0212
639641 self .total_failed += response ._shards .failed # pylint: disable=W0212
640642 for b in response .aggregations .comp .buckets :
641- b = self .bucket_callback (b ,self )
643+ b = self .bucket_callback (b , self )
642644 yield b
643645
644646
@@ -735,19 +737,19 @@ def get_tile_categories(base_s, x, y, z, geopoint_field, category_field, size):
735737 cat_s = cat_s .params (size = 0 )
736738 cat_s = cat_s .filter ("geo_bounding_box" , ** {geopoint_field : bb_dict })
737739 cat_s .aggs .bucket ("categories" , "terms" , field = category_field , size = size )
738- cat_s .aggs .bucket ("missing" , "filter" , bool = { "must_not" : { "exists" : { "field" : category_field } } })
740+ cat_s .aggs .bucket ("missing" , "filter" , bool = {"must_not" : {"exists" : {"field" : category_field }} })
739741 response = cat_s .execute ()
740742 if hasattr (response .aggregations , "categories" ):
741743 for category in response .aggregations .categories :
742744 # this if prevents bools from using 0/1 instead of true/false
743745 if hasattr (category , "key_as_string" ):
744- category_filters [str (category .key )] = { "term" : {category_field : category .key_as_string } }
746+ category_filters [str (category .key )] = {"term" : {category_field : category .key_as_string }}
745747 else :
746- category_filters [str (category .key )] = { "term" : {category_field : category .key } }
748+ category_filters [str (category .key )] = {"term" : {category_field : category .key }}
747749 category_legend [str (category .key )] = category .doc_count
748750 category_legend ["Other" ] = response .aggregations .categories .sum_other_doc_count
749751 if hasattr (response .aggregations , "missing" ) and response .aggregations .missing .doc_count > 0 :
750- category_filters ["N/A" ] = { "bool" : { "must_not" : { "exists" : { "field" : category_field } } } }
752+ category_filters ["N/A" ] = {"bool" : {"must_not" : {"exists" : {"field" : category_field }}} }
751753 category_legend ["N/A" ] = response .aggregations .missing .doc_count
752754
753755 return category_filters , category_legend
0 commit comments