@@ -158,26 +158,6 @@ def convert_nm_to_ellipse_units(distance: float, units: str) -> float:
158158 # NB. assume "majmin_m" if any others
159159 return distance * 1852
160160
161- def get_field_type (elastic_hosts : str , headers : Optional [str ], params : Dict [str , Any ], field : str , idx : str ) -> str :
162- user = params .get ("user" )
163- x_opaque_id = params .get ("x-opaque-id" )
164- es = Elasticsearch (
165- elastic_hosts .split ("," ),
166- verify_certs = False ,
167- timeout = 900 ,
168- headers = get_es_headers (headers , user , x_opaque_id ),
169- )
170- if idx .find ("*:" ) != - 1 :
171- idx = idx [idx .find ("*:" )+ 2 :] # when you query for mappings if it is cross cluster you don't get a mapping
172- mappings = es .indices .get_field_mapping (fields = field , index = idx )
173- # {'foot_prints': {'mappings': {'foot_print': {'full_name': 'foot_print', 'mapping': {'foot_print': {'type': 'geo_shape'}}}}}}
174- index = list (mappings .keys ())[0 ] # if index is my_index* it comes back as my_index
175- field_parts = field .split ("." )
176- try :
177- return mappings [index ]['mappings' ][field ]['mapping' ][field_parts [- 1 ]]['type' ] # handles 'geo_center' or a nested object {signal:{geo:{location:{}}}}
178- except AttributeError :
179- return mappings [index ]['mappings' ][field ]['mapping' ][field ]['type' ] # handles literal string with periods 'signal.geo.location'
180-
181161def get_search_base (
182162 elastic_hosts : str ,
183163 headers : Optional [str ],
@@ -271,21 +251,6 @@ def get_search_base(
271251
272252 return base_s
273253
274- def handle_range_or_exists_filters (filter_input : Dict [Any , Any ]) -> Dict [str , Any ]:
275- """
276- `range` and `exists` filters can appear either directly under
277- `filter[]` or under `filter[].query` depending on the version
278- of Kibana, the former being the old way, so they need special
279- handling for backward compatibility.
280- """
281- filter_type = filter_input .get ("meta" ).get ("type" ) # "range" or "exists"
282-
283- # Handle old query structure for backward compatibility
284- if filter_input .get (filter_type ) is not None :
285- return {filter_type : filter_input .get (filter_type )}
286-
287- return filter_input .get ("query" )
288-
289254def build_dsl_filter (filter_inputs ) -> Optional [Dict [str , Any ]]:
290255 """
291256
@@ -309,7 +274,7 @@ def build_dsl_filter(filter_inputs) -> Optional[Dict[str, Any]]:
309274 f .get ("geo_shape" ) or
310275 f .get ("geo_distance" )
311276 )
312- if f .get ("query" ,None ):
277+ if f .get ("query" , None ):
313278 if f .get ("meta" ).get ("negate" ):
314279 filter_dict ["must_not" ].append (f .get ("query" ))
315280 else :
@@ -318,16 +283,16 @@ def build_dsl_filter(filter_inputs) -> Optional[Dict[str, Any]]:
318283 if not is_spatial_filter :
319284 filt_type = f .get ("meta" ).get ("type" )
320285 if f .get ("meta" ).get ("negate" ):
321- filter_dict ["must_not" ].append ({filt_type :f .get (filt_type )})
286+ filter_dict ["must_not" ].append ({filt_type : f .get (filt_type )})
322287 else :
323- filter_dict ["filter" ].append ({filt_type :f .get (filt_type )})
288+ filter_dict ["filter" ].append ({filt_type : f .get (filt_type )})
324289 else :
325- for geo_type in ["geo_polygon" ,"geo_bounding_box" ,"geo_shape" ,"geo_distance" ]:
326- if f .get (geo_type ,None ):
290+ for geo_type in ["geo_polygon" , "geo_bounding_box" , "geo_shape" , "geo_distance" ]:
291+ if f .get (geo_type , None ):
327292 if f .get ("meta" ).get ("negate" ):
328- filter_dict ["must_not" ].append ({geo_type :f .get (geo_type )})
293+ filter_dict ["must_not" ].append ({geo_type : f .get (geo_type )})
329294 else :
330- filter_dict ["filter" ].append ({geo_type :f .get (geo_type )})
295+ filter_dict ["filter" ].append ({geo_type : f .get (geo_type )})
331296 logger .info ("Filter output %s" , filter_dict )
332297 return filter_dict
333298
@@ -396,32 +361,6 @@ def parse_duration_interval(interval):
396361 kwargs [key ] = int (interval [0 :len (interval )- 1 ])
397362 return relativedelta (** kwargs )
398363
399- def convert (response , category_formatter = str ):
400- """
401-
402- :param response:
403- :return:
404- """
405- if hasattr (response .aggregations , "categories" ):
406- for category in response .aggregations .categories :
407- for bucket in category .grids :
408- x , y = lnglat_to_meters (
409- bucket .centroid .location .lon , bucket .centroid .location .lat
410- )
411- yield {
412- "lon" : bucket .centroid .location .lon ,
413- "lat" : bucket .centroid .location .lat ,
414- "x" : x ,
415- "y" : y ,
416- "c" : bucket .centroid .count ,
417- "t" : category_formatter (category .key ),
418- }
419- else :
420- for bucket in response .aggregations .grids :
421- lon = bucket .centroid .location .lon
422- lat = bucket .centroid .location .lat
423- x , y = lnglat_to_meters (lon , lat )
424- yield {"lon" : lon , "lat" : lat , "x" : x , "y" : y , "c" : bucket .centroid .count }
425364
426365def convert_composite (response , categorical , filter_buckets , histogram_interval , category_type , category_format ):
427366 if categorical and filter_buckets is False :
@@ -533,20 +472,6 @@ def get_nested_field_from_hit(hit, field_parts: List[str], default=None):
533472
534473 raise ValueError ("field must be provided" )
535474
536- def chunk_iter (iterable , chunk_size ):
537- chunks = [None ] * chunk_size
538- i = - 1
539- for i , v in enumerate (iterable ):
540- idx = i % chunk_size
541- if idx == 0 and i > 0 :
542- i = - 1
543- yield (True , chunks )
544- chunks [idx ] = v
545-
546- if i >= 0 :
547- last_written_idx = i % chunk_size
548- yield (False , chunks [0 :last_written_idx + 1 ])
549-
550475def bucket_noop (bucket , search ):
551476 # pylint: disable=unused-argument
552477 return bucket
0 commit comments