From 69f19071ff42806b25604af299c063153bfcadb9 Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Thu, 1 May 2025 01:23:39 -0400 Subject: [PATCH 01/47] Replacing functions with BB Class This class will allow one location to be stored as a single object. Further improvements to call on data from a singular buoy will be added. SSI calculations can now be done by calling "BB-object".get_SSI_(WVHT / PRES / WSPD) class function. --- gatheringInfo.py | 275 ++++++++++++++++++++++++++++++----------------- 1 file changed, 174 insertions(+), 101 deletions(-) diff --git a/gatheringInfo.py b/gatheringInfo.py index 385167d..88a33ee 100644 --- a/gatheringInfo.py +++ b/gatheringInfo.py @@ -1,106 +1,179 @@ from ndbc_api import NdbcApi import datetime +import pandas as pd +import math +# Initialize the NDBC API so we can gather information from buoys api = NdbcApi() - -def gatherWindSpeed(BuoyList): - utc_now = datetime.datetime.now(tz=datetime.UTC) - formatted_endTime = utc_now.strftime("%Y-%m-%d") - unformatted_startTime = utc_now - datetime.timedelta(days=1) - formatted_startTime = unformatted_startTime.strftime("%Y-%m-%d") - List = [] - avgWindSpeed = 0.0 - for buoy in BuoyList: - buoyWind = api.get_data( - station_id=buoy, - mode='stdmet', - start_time=formatted_startTime, - end_time=formatted_endTime, - as_df=True - ) - - try: - subsetWindOnly = buoyWind.iloc[0:3, 1] - avgWindSpeed += buoyWind.iloc[0:3, 1].sum() - List.append(subsetWindOnly) - - except KeyError as e: - print(f"Warning. {e}") - continue - except AttributeError as a: - print(f"Warning. {a}") - continue - - print(f"Average Wind speed for stations {BuoyList}: {avgWindSpeed / 15}") - return avgWindSpeed / 15 - - -def gatherPres(BuoyList): - utc_now = datetime.datetime.now(tz=datetime.UTC) - formatted_endTime = utc_now.strftime("%Y-%m-%d") - unformatted_startTime = utc_now - datetime.timedelta(days=1) - formatted_startTime = unformatted_startTime.strftime("%Y-%m-%d") - avgPres = 0.0 - List = [] - for buoy in BuoyList: - buoyPres = api.get_data( - station_id=buoy, - mode='stdmet', - start_time=formatted_startTime, - end_time=formatted_endTime, - as_df=True - ) - - try: - subsetPresOnly = buoyPres.iloc[0:3, 7] - avgPres += buoyPres.iloc[0:3, 7].sum() - List.append(subsetPresOnly) - - except KeyError as e: - print(f"Warning. {e}") - continue - except AttributeError as a: - print(f"Warning. {a}") - continue - - print(f"Average Pressure for stations {BuoyList}: {avgPres / 15}") - return avgPres / 15 - - -def gatherWaveHeight(BuoyList): - utc_now = datetime.datetime.now(tz=datetime.UTC) - formatted_endTime = utc_now.strftime("%Y-%m-%d") - unformatted_startTime = utc_now - datetime.timedelta(days=1) - formatted_startTime = unformatted_startTime.strftime("%Y-%m-%d") - avgWave = 0.0 - List = [] - print(BuoyList) - for buoy in BuoyList: - buoyWave = api.get_data( - station_id=buoy, - mode='stdmet', - start_time=formatted_startTime, - end_time=formatted_endTime, - as_df=True - ) - - try: - subsetWaveOnly = buoyWave.iloc[0:3, 3] - avgWave += buoyWave.iloc[0:3, 3].sum() - List.append(subsetWaveOnly) - - except KeyError as e: - print(f"Warning. {e}") - continue - except AttributeError as a: - print(f"Warning. {a}") - continue - - return avgWave / 2 - - -nyStationList = ['44025', '44065', 'SDNH4', 'KPTN6', 'MHRN6', 'ROBN4', 'BATN6'] - - -print(gatherWaveHeight(nyStationList)) +# The NDBC uses the UTC time as its standard, so we use this to be able to grab the most recent data. +# Initially this is set to gather readings from within 1 hour +utc_now = datetime.datetime.now(tz=datetime.UTC) +given_endTime = utc_now.strftime("%Y-%m-%d %H:%M") +start = utc_now - datetime.timedelta(hours=1) +given_startTime = start.strftime("%Y-%m-%d %H:%M") + + +# Formatting for debugging & testing +pd.set_option('display.max_rows', None) +pd.set_option('display.max_columns', None) +pd.set_option('display.width', None) +pd.set_option('display.max_colwidth', None) +# ______________________________________________________________________________________ + +''' + print(lastFive.iloc[0:3, 3]) + ^ Prints the first 3 values for WSPD. As long as it below lastFive.reset_index() its sorted by recent + + print(lastFive) + ^ Prints dataframe, sorted like above, but includes everything + + NaN/nan is what will show in empty value spaces. Use math.isnan(value) to check for if nan. ITS A FLOAT!! + + WSPD == 3 + WVHT == 5 + PRES == 10 +''' + + +# This class will hold one location per object, and within that its respective closest Buoys. +class BB: + # First the class initialization is overwritten and will throw an error if no list is given + def __init__(self, id_list): + self.__id_list = id_list + + def get_station_ID_list(self): + return self.__id_list + +# This function will get the average Wind Speed from the locations buoy list, and will be returned to the user + def get_SSI_WSPD(self): + # Set initial counting variable to determine which Buoys are giving the information we are searching for + WSPD_counter = 0 + sum_of_WSPD = 0.0 + # Iterate through the list of buoys provided + for buoy in self.__id_list: + # For tracking purposes + print(buoy) + # API Call for {buoy} in station list + BuoyWspd = api.get_data( + station_id=buoy, + mode='stdmet', + start_time=given_startTime, + end_time=given_endTime, + as_df=True + ) + # For tracking progress + print(f"Finished API call for buoy: {buoy}") + + # Flip resulting Dataframe (using pandas) to see 5 most recent results + lastFive = BuoyWspd.tail() + + # Remove Multi Indexing so we can sort by timestamp + lastFive = lastFive.reset_index() + lastFive = lastFive.sort_values(by='timestamp', ascending=False) + + # Check if the stations most recent readings have been submitted, else grab the next furthest back, or if + # not within 3 spaces, skip this buoys reading. If read properly, increment the WSPD_counter += 1 + lastFive = lastFive.iloc[0:3, 3] + WSPD_values = lastFive.tolist() + # Iterate through the WSPD_values list + for value in WSPD_values: + # If the value is not a nan value (its usually regarded as a float and will break calculations) + if not math.isnan(value): + sum_of_WSPD += value + WSPD_counter += 1 + break + else: + # Pass if it is a nan + pass + + return sum_of_WSPD / WSPD_counter + + def get_SSI_WVHT(self): + # Set initial counting variable to determine which Buoys are giving the information we are searching for + WVHT_counter = 0 + sum_of_WVHT = 0.0 + # Iterate through the list of buoys provided + for buoy in self.__id_list: + # For tracking purposes + print(buoy) + # API Call for {buoy} in station list + BuoyWvht = api.get_data( + station_id=buoy, + mode='stdmet', + start_time=given_startTime, + end_time=given_endTime, + as_df=True + ) + # For tracking progress + print(f"Finished API call for buoy: {buoy}") + + # Flip resulting Dataframe (using pandas) to see 5 most recent results + lastFive = BuoyWvht.tail() + + # Remove Multi Indexing so we can sort by timestamp + lastFive = lastFive.reset_index() + lastFive = lastFive.sort_values(by='timestamp', ascending=False) + + # Check if the stations most recent readings have been submitted, else grab the next furthest back, or if + # not within 3 spaces, skip this buoys reading. If read properly, increment the WVHT_counter += 1 + lastFive = lastFive.iloc[0:3, 5] + WVHT_values = lastFive.tolist() + # Iterate through the WVHT_values list + for value in WVHT_values: + # If the value is not a nan value (its usually regarded as a float and will break calculations) + if not math.isnan(value): + sum_of_WVHT += value + WVHT_counter += 1 + break + else: + # Pass if it is a nan + pass + + return sum_of_WVHT / WVHT_counter + + def get_SSI_PRES(self): + # Set initial counting variable to determine which Buoys are giving the information we are searching for + PRES_counter = 0 + sum_of_PRES = 0.0 + # Iterate through the list of buoys provided + for buoy in self.__id_list: + # For tracking purposes + print(buoy) + # API Call for {buoy} in station list + BuoyPres = api.get_data( + station_id=buoy, + mode='stdmet', + start_time=given_startTime, + end_time=given_endTime, + as_df=True + ) + # For tracking progress + print(f"Finished API call for buoy: {buoy}") + + # Flip resulting Dataframe (using pandas) to see 5 most recent results + lastFive = BuoyPres.tail() + + # Remove Multi Indexing so we can sort by timestamp + lastFive = lastFive.reset_index() + lastFive = lastFive.sort_values(by='timestamp', ascending=False) + + # Check if the stations most recent readings have been submitted, else grab the next furthest back, or if + # not within 3 spaces, skip this buoys reading. If read properly, increment the PRES_counter += 1 + lastFive = lastFive.iloc[0:3, 9] + + PRES_values = lastFive.tolist() + # Iterate through the PRES_values list + for value in PRES_values: + # If the value is not a nan value (its usually regarded as a float and will break calculations) + if not math.isnan(value): + sum_of_PRES += value + PRES_counter += 1 + break + else: + # Pass if it is a nan + pass + + # Now we gather the final WSPD Average and return it + return sum_of_PRES / PRES_counter + From 4f79ea2e79956f4a072e35c4404627b7b68af7ee Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Thu, 1 May 2025 23:06:50 -0400 Subject: [PATCH 02/47] Update main.py Corrected to new gatheringInfo.py --- main.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/main.py b/main.py index d2ea84f..32b9511 100644 --- a/main.py +++ b/main.py @@ -1,27 +1,21 @@ import gatheringInfo -nyStationList = ['44025', '44065', 'SDNH4', 'KPTN6', 'MHRN6', 'ROBN4', 'BATN6'] +nyStationList = ['44025', '44065', 'SDHN4', 'KPTN6', 'MHRN6', 'ROBN4', 'BATN6'] -print(gatheringInfo.gatherWindSpeed(nyStationList)) -print(gatheringInfo.gatherWindSpeed(nyStationList) ** 2) +newYork = gatheringInfo.BB(nyStationList) -SSI = (0.5 * ((gatheringInfo.gatherWindSpeed(nyStationList) / 60)**2) + - 0.3 * (930 / gatheringInfo.gatherPres(nyStationList)) + - 0.2 * (gatheringInfo.gatherWaveHeight(nyStationList)) / 12) +SSI = (0.5 * ((newYork.get_SSI_WSPD() / 60) ** 2) + + 0.3 * (930 / newYork.get_SSI_PRES()) + + 0.2 * (newYork.get_SSI_WVHT()) / 12) if SSI < 0.2: - print("\nNote that some values are hard coded. Further research must be done to validate our prediction values") print("The expected storm should be a minimal storm") if 0.21 < SSI < 0.4: - print("\nNote that some values are hard coded. Further research must be done to validate our prediction values") print("The expected storm should be a moderate storm") if 0.41 < SSI < 0.6: - print("\nNote that some values are hard coded. Further research must be done to validate our prediction values") print("The expected storm should be a strong storm") if 0.61 < SSI < 0.8: - print("\nNote that some values are hard coded. Further research must be done to validate our prediction values") print("The expected storm should be a severe storm") if 0.81 < SSI: - print("\nNote that some values are hard coded. Further research must be done to validate our prediction values") print("The expected storm should be an extreme storm") From c0ba64ec16e4bc432435b9eef8b05b5d133d9171 Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 05:14:34 -0400 Subject: [PATCH 03/47] Single Buoy Information Added functionality to get information from individual buoys, such as wind speed, wave height, pressure, along with the longitude & latitude, and the name of the station --- gatheringInfo.py | 67 +++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 66 insertions(+), 1 deletion(-) diff --git a/gatheringInfo.py b/gatheringInfo.py index 88a33ee..4516a76 100644 --- a/gatheringInfo.py +++ b/gatheringInfo.py @@ -176,4 +176,69 @@ def get_SSI_PRES(self): # Now we gather the final WSPD Average and return it return sum_of_PRES / PRES_counter - + + +# In order to pull information about each individual buoy, we define another class that will use the API to gather the +# individual information about hte buoy sucha as WSPD, WVHT, PRES, but also the location (longitude, latitude) +# and the nearest town to said buoy ==> EX: station_id 44025 is 30 NM south of Islip, NY +class Buoy: + def __init__(self, id): + # First we set the ID to the member variable, and call for the information from the NDBC_API + self.__id = id + buoy = api.get_data(station_id=id, + mode='stdmet', + start_time=given_startTime, + end_time=given_endTime, + as_df=True) + # Similar to the BB class, we call the most recent entries and sort them + lastFive = buoy.tail().reset_index() + lastFive = lastFive.sort_values(by='timestamp', ascending=False) + # We set individual lists of the 3 most recent entries (~30 minutes of reporting) in order to still give a value + # in case a buoy does not report (some reports are hourly, on a 30-minute mark, etc.) + WSPD_val = lastFive.iloc[0:3, 3].tolist() + WVHT_val = lastFive.iloc[0:3, 5].tolist() + PRES_val = lastFive.iloc[0:3, 9].tolist() + + # We then check through each of these lists for a non-NaN answer and save it to the self.__VARIABLE + for value in WSPD_val: + if not math.isnan(value): + self.__WSPD = value + break + else: + pass + + for value in WVHT_val: + if not math.isnan(value): + self.__WVHT = value + break + else: + pass + + for value in PRES_val: + if not math.isnan(value): + self.__PRES = value + break + else: + pass + + # We then call the station information to gather info such as Latitude and Longitude, and the Name + lastFive = api.station(station_id=id, as_df=True) + self.__NAME = "".join(str(lastFive.loc['Name'].tolist())) + self.__LOC = "".join(str(lastFive.loc['Location'].tolist())) + + # Define the getter functions to get the needed information from individual buoys + def getWSPD(self): + return self.__WSPD + + def getWVHT(self): + return self.__WVHT + + def getPRES(self): + return self.__PRES + + def getNAME(self): + return self.__NAME + + def getLOC(self): + return self.__LOC + From 60cef664b8cb79d0d852240bf566df8a81e782e2 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sat, 3 May 2025 21:30:48 -0400 Subject: [PATCH 04/47] Pushing gui to main finished callback to display data for the specific buoy and the region --- app_welcome.py | 39 +++++++++++++ pages/florida.py | 78 ++++++++++++++++++++++++++ pages/georgia.py | 77 ++++++++++++++++++++++++++ pages/maine.py | 77 ++++++++++++++++++++++++++ pages/maryland.py | 76 +++++++++++++++++++++++++ pages/massachusetts.py | 76 +++++++++++++++++++++++++ pages/new_york.py | 119 ++++++++++++++++++++++++++++++++++++++++ pages/north_carolina.py | 76 +++++++++++++++++++++++++ pages/south_carolina.py | 31 +++++++++++ pages/virginia.py | 77 ++++++++++++++++++++++++++ 10 files changed, 726 insertions(+) create mode 100644 app_welcome.py create mode 100644 pages/florida.py create mode 100644 pages/georgia.py create mode 100644 pages/maine.py create mode 100644 pages/maryland.py create mode 100644 pages/massachusetts.py create mode 100644 pages/new_york.py create mode 100644 pages/north_carolina.py create mode 100644 pages/south_carolina.py create mode 100644 pages/virginia.py diff --git a/app_welcome.py b/app_welcome.py new file mode 100644 index 0000000..0c73b94 --- /dev/null +++ b/app_welcome.py @@ -0,0 +1,39 @@ +# Author: Joshua Bauer +# Date: May 1, 2025 +# Description: updating multipage GUI with a home page + +# Import packages +import dash +from dash import Dash, html, dcc + +# Initialize the app and css +external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css'] +app = dash.Dash(__name__, use_pages=True, external_stylesheets=external_stylesheets) + +# App layout, containing title for app, a secondary title prompting to choose a region, and buttons that will take user to page with dropdown menu +app.layout = html.Div([ + html.H1(className='row', children='Welcome to Weather Risk Assessment!', style={'textAlign': 'center', 'color': 'blue', 'fontSize': 30}), + html.Div([ + html.Div([ + html.H3(className='row', children='How To Use:',style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}), + html.Div([ + html.H4(className='row',children='1. Select a region of the eastern United States using the buttons',style={'textAlign': 'left', 'color': 'black', 'fontSize': 20}), + html.H4(className='row',children='2. Select a buoy in your selected region',style={'textAlign': 'left', 'color': 'black', 'fontSize': 20}), + html.H4(className='row',children='3. Average data from the buoy and the selected region will be displayed with a predicted storm strength',style={'textAlign': 'left', 'color': 'black', 'fontSize': 20})]) + ]), + + html.H2(className='row', children='Choose a Region in the Eastern United States:', style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}), + html.Div([ + dcc.Link(html.Button(page['name']), href=page['path'], style={'width': '5px'}) for page in dash.page_registry.values() + + ]) + ]), + + html.Br(), + dash.page_container +]) + +#print(dash.page_registry.values()) + +if __name__ == '__main__': + app.run(debug=True) diff --git a/pages/florida.py b/pages/florida.py new file mode 100644 index 0000000..ce6f2bd --- /dev/null +++ b/pages/florida.py @@ -0,0 +1,78 @@ +import dash, gatheringInfo +from dash import html, dcc, callback, Input, Output + + +dash.register_page(__name__) + +options = ['none','Port Everglades','Virginia Key','Little Madeira', 'Murray Key', 'Watson Place', 'Fort Myers'] + +# dropdown menu of buoys in selected region +layout = html.Div([ + html.Div(className='row', children='Southern Florida Buoy Selection', + style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}), + dcc.Dropdown(options,'none', id='fl_dropdown'), + html.Br(), + html.Div(id='fl_output') + ]) + + +@callback( + Output(component_id='ny_output', component_property='children', allow_duplicate=True), + Input(component_id='ny_dropdown', component_property='value'), + prevent_initial_call='initial_duplicate' + +) +def display_single_buoy_data(selected_string): + """ + :param selected_string: this is the selection from the dropdown menu + :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection + """ + id_list = ['PEGF1', 'VAKF1', 'LMDF1', 'MUKF1', 'WPLF1', 'FMRF1'] + + # display individual buoy data + if selected_string == 'none': + # display nothing + return '' + else: + # convert selected_string to station ID using id_list + selection_index = options.index(selected_string) + selected_station_id = id_list[selection_index - 1] + single_buoy = gatheringInfo.Buoy(selected_station_id) + + buoy_name = single_buoy.getNAME() + wind_speed = single_buoy.getWSPD() + wave_height = single_buoy.getWVHT() + pressure = single_buoy.getPRES() + + region = gatheringInfo.BB(id_list) + + avg_wind_speed = region.get_SSI_WSPD() + avg_wave_height = region.get_SSI_WVHT() + avg_pressure = region.get_SSI_PRES() + + # determines storm strength + SSI = (0.5 * ((avg_wind_speed / 60) ** 2) + + 0.3 * (930 / avg_pressure) + + 0.2 * avg_wave_height / 12) + + if SSI < 0.2: + storm_strength = f"The expected storm should be a minimal storm" + if 0.21 < SSI < 0.4: + storm_strength = f"The expected storm should be a moderate storm" + if 0.41 < SSI < 0.6: + storm_strength = f"The expected storm should be a strong storm" + if 0.61 < SSI < 0.8: + storm_strength = f"The expected storm should be a severe storm" + if 0.81 < SSI: + storm_strength = f"The expected storm should be an extreme storm" + + return (f'Weather conditions at {buoy_name} buoy:\n' + f'Wind Speed: {wind_speed} m/s\n' + f'Wave Height: {wave_height} m\n' + f'Pressure: {pressure} millibars\n' + f'New York Metropolitan Region Weather Data:\n' + f'Average Wind Speed: {avg_wind_speed} m/s\n' + f'Average Wave Height: {avg_wave_height} m\n' + f'Average Pressure: {avg_pressure} millibars\n' + f'{storm_strength}' + ) diff --git a/pages/georgia.py b/pages/georgia.py new file mode 100644 index 0000000..8462369 --- /dev/null +++ b/pages/georgia.py @@ -0,0 +1,77 @@ +import dash, gatheringInfo +from dash import html, dcc, callback, Input, Output + +dash.register_page(__name__) + +options = ['none','Fort Pulaski','Grays Reef','Kings Bay MSF Pier', 'Sapelo Island Reserve'] + +# dropdown menu of buoys in selected region +layout = html.Div([ + html.Div(className='row', children='Georgia Region Buoy Selection', + style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}), + dcc.Dropdown(options,'none', id='ga_dropdown'), + html.Br(), + html.Div(id='ga_output') +]) + + +@callback( + Output(component_id='ga_output', component_property='children', allow_duplicate=True), + Input(component_id='ga_dropdown', component_property='value'), + prevent_initial_call='initial_duplicate' + +) +def display_single_buoy_data(selected_string): + """ + :param selected_string: this is the selection from the dropdown menu + :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection + """ + id_list = ['FPKG1', '41008', 'KBMG1', 'SAQG1'] + + # display individual buoy data + if selected_string == 'none': + # display nothing + return '' + else: + # convert selected_string to station ID using id_list + selection_index = options.index(selected_string) + selected_station_id = id_list[selection_index - 1] + single_buoy = gatheringInfo.Buoy(selected_station_id) + + buoy_name = single_buoy.getNAME() + wind_speed = single_buoy.getWSPD() + wave_height = single_buoy.getWVHT() + pressure = single_buoy.getPRES() + + region = gatheringInfo.BB(id_list) + + avg_wind_speed = region.get_SSI_WSPD() + avg_wave_height = region.get_SSI_WVHT() + avg_pressure = region.get_SSI_PRES() + + # determines storm strength + SSI = (0.5 * ((avg_wind_speed / 60) ** 2) + + 0.3 * (930 / avg_pressure) + + 0.2 * avg_wave_height / 12) + + if SSI < 0.2: + storm_strength = f"The expected storm should be a minimal storm" + if 0.21 < SSI < 0.4: + storm_strength = f"The expected storm should be a moderate storm" + if 0.41 < SSI < 0.6: + storm_strength = f"The expected storm should be a strong storm" + if 0.61 < SSI < 0.8: + storm_strength = f"The expected storm should be a severe storm" + if 0.81 < SSI: + storm_strength = f"The expected storm should be an extreme storm" + + return (f'Weather conditions at {buoy_name} buoy:\n' + f'Wind Speed: {wind_speed} m/s\n' + f'Wave Height: {wave_height} m\n' + f'Pressure: {pressure} millibars\n' + f'New York Metropolitan Region Weather Data:\n' + f'Average Wind Speed: {avg_wind_speed} m/s\n' + f'Average Wave Height: {avg_wave_height} m\n' + f'Average Pressure: {avg_pressure} millibars\n' + f'{storm_strength}' + ) \ No newline at end of file diff --git a/pages/maine.py b/pages/maine.py new file mode 100644 index 0000000..bec353c --- /dev/null +++ b/pages/maine.py @@ -0,0 +1,77 @@ +import dash, gatheringInfo +from dash import html, dcc, callback, Input, Output + +dash.register_page(__name__) + +options = ['none','Eastport','Bar Harbor','Portland','Western Maine Shelf','Matinicus Rock','Jonesport'] + +# dropdown menu of buoys in selected region +layout = html.Div([ + html.Div(className='row', children='Maine Region Buoy Selection', + style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}), + dcc.Dropdown(options,'none', id='me_dropdown'), + html.Br(), + html.Div(id='me_output') +]) + + +@callback( + Output(component_id='ga_output', component_property='children', allow_duplicate=True), + Input(component_id='ga_dropdown', component_property='value'), + prevent_initial_call='initial_duplicate' + +) +def display_single_buoy_data(selected_string): + """ + :param selected_string: this is the selection from the dropdown menu + :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection + """ + id_list = ['PSBM1','ATGM1','CASM1','44030','MISM1','44027'] + + # display individual buoy data + if selected_string == 'none': + # display nothing + return '' + else: + # convert selected_string to station ID using id_list + selection_index = options.index(selected_string) + selected_station_id = id_list[selection_index - 1] + single_buoy = gatheringInfo.Buoy(selected_station_id) + + buoy_name = single_buoy.getNAME() + wind_speed = single_buoy.getWSPD() + wave_height = single_buoy.getWVHT() + pressure = single_buoy.getPRES() + + region = gatheringInfo.BB(id_list) + + avg_wind_speed = region.get_SSI_WSPD() + avg_wave_height = region.get_SSI_WVHT() + avg_pressure = region.get_SSI_PRES() + + # determines storm strength + SSI = (0.5 * ((avg_wind_speed / 60) ** 2) + + 0.3 * (930 / avg_pressure) + + 0.2 * avg_wave_height / 12) + + if SSI < 0.2: + storm_strength = f"The expected storm should be a minimal storm" + if 0.21 < SSI < 0.4: + storm_strength = f"The expected storm should be a moderate storm" + if 0.41 < SSI < 0.6: + storm_strength = f"The expected storm should be a strong storm" + if 0.61 < SSI < 0.8: + storm_strength = f"The expected storm should be a severe storm" + if 0.81 < SSI: + storm_strength = f"The expected storm should be an extreme storm" + + return (f'Weather conditions at {buoy_name} buoy:\n' + f'Wind Speed: {wind_speed} m/s\n' + f'Wave Height: {wave_height} m\n' + f'Pressure: {pressure} millibars\n' + f'New York Metropolitan Region Weather Data:\n' + f'Average Wind Speed: {avg_wind_speed} m/s\n' + f'Average Wave Height: {avg_wave_height} m\n' + f'Average Pressure: {avg_pressure} millibars\n' + f'{storm_strength}' + ) \ No newline at end of file diff --git a/pages/maryland.py b/pages/maryland.py new file mode 100644 index 0000000..da7cec7 --- /dev/null +++ b/pages/maryland.py @@ -0,0 +1,76 @@ +import dash, gatheringInfo +from dash import html, dcc, callback, Input, Output + +dash.register_page(__name__) + +options = ['none','Baltimore','Chesapeake Bay','Annapolis','Washington D.C.','Cambridge','Cooperative Oxford Laboratory'] + +# dropdown menu of buoys in selected region +layout = html.Div([ + html.Div(className='row', children='Maryland Region Buoy Selection', + style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}), + dcc.Dropdown(options,'none', id='md_dropdown'), + html.Br(), + html.Div(id='md_output') + ]) + +@callback( + Output(component_id='ga_output', component_property='children', allow_duplicate=True), + Input(component_id='ga_dropdown', component_property='value'), + prevent_initial_call='initial_duplicate' + +) +def display_single_buoy_data(selected_string): + """ + :param selected_string: this is the selection from the dropdown menu + :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection + """ + id_list = ['BLTM2', 'CPVM2', 'APAM2', 'WASD2', 'CAMM2', 'CXLM2'] + + # display individual buoy data + if selected_string == 'none': + # display nothing + return '' + else: + # convert selected_string to station ID using id_list + selection_index = options.index(selected_string) + selected_station_id = id_list[selection_index - 1] + single_buoy = gatheringInfo.Buoy(selected_station_id) + + buoy_name = single_buoy.getNAME() + wind_speed = single_buoy.getWSPD() + wave_height = single_buoy.getWVHT() + pressure = single_buoy.getPRES() + + region = gatheringInfo.BB(id_list) + + avg_wind_speed = region.get_SSI_WSPD() + avg_wave_height = region.get_SSI_WVHT() + avg_pressure = region.get_SSI_PRES() + + # determines storm strength + SSI = (0.5 * ((avg_wind_speed / 60) ** 2) + + 0.3 * (930 / avg_pressure) + + 0.2 * avg_wave_height / 12) + + if SSI < 0.2: + storm_strength = f"The expected storm should be a minimal storm" + if 0.21 < SSI < 0.4: + storm_strength = f"The expected storm should be a moderate storm" + if 0.41 < SSI < 0.6: + storm_strength = f"The expected storm should be a strong storm" + if 0.61 < SSI < 0.8: + storm_strength = f"The expected storm should be a severe storm" + if 0.81 < SSI: + storm_strength = f"The expected storm should be an extreme storm" + + return (f'Weather conditions at {buoy_name} buoy:\n' + f'Wind Speed: {wind_speed} m/s\n' + f'Wave Height: {wave_height} m\n' + f'Pressure: {pressure} millibars\n' + f'New York Metropolitan Region Weather Data:\n' + f'Average Wind Speed: {avg_wind_speed} m/s\n' + f'Average Wave Height: {avg_wave_height} m\n' + f'Average Pressure: {avg_pressure} millibars\n' + f'{storm_strength}' + ) \ No newline at end of file diff --git a/pages/massachusetts.py b/pages/massachusetts.py new file mode 100644 index 0000000..9d1adfe --- /dev/null +++ b/pages/massachusetts.py @@ -0,0 +1,76 @@ +import dash, gatheringInfo +from dash import html, dcc, callback, Input, Output + +dash.register_page(__name__) + +options = ['none','Boston','Cape Cod Bay','Nantucket Sound','Massachusetts Bay','New Bedford','Buzzards Bay'] + +# dropdown menu of buoys in selected region +layout = html.Div([ + html.Div('Massachusetts Region Buoy Selection', + style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}), + dcc.Dropdown(options,'none', id='mass_dropdown'), + html.Br(), + html.Div(id='mass_output') + ]) + +@callback( + Output(component_id='ga_output', component_property='children', allow_duplicate=True), + Input(component_id='ga_dropdown', component_property='value'), + prevent_initial_call='initial_duplicate' + +) +def display_single_buoy_data(selected_string): + """ + :param selected_string: this is the selection from the dropdown menu + :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection + """ + id_list = ['BHBM3','44090','44020','44029','NBGM3','BUZM3'] + + # display individual buoy data + if selected_string == 'none': + # display nothing + return '' + else: + # convert selected_string to station ID using id_list + selection_index = options.index(selected_string) + selected_station_id = id_list[selection_index - 1] + single_buoy = gatheringInfo.Buoy(selected_station_id) + + buoy_name = single_buoy.getNAME() + wind_speed = single_buoy.getWSPD() + wave_height = single_buoy.getWVHT() + pressure = single_buoy.getPRES() + + region = gatheringInfo.BB(id_list) + + avg_wind_speed = region.get_SSI_WSPD() + avg_wave_height = region.get_SSI_WVHT() + avg_pressure = region.get_SSI_PRES() + + # determines storm strength + SSI = (0.5 * ((avg_wind_speed / 60) ** 2) + + 0.3 * (930 / avg_pressure) + + 0.2 * avg_wave_height / 12) + + if SSI < 0.2: + storm_strength = f"The expected storm should be a minimal storm" + if 0.21 < SSI < 0.4: + storm_strength = f"The expected storm should be a moderate storm" + if 0.41 < SSI < 0.6: + storm_strength = f"The expected storm should be a strong storm" + if 0.61 < SSI < 0.8: + storm_strength = f"The expected storm should be a severe storm" + if 0.81 < SSI: + storm_strength = f"The expected storm should be an extreme storm" + + return (f'Weather conditions at {buoy_name} buoy:\n' + f'Wind Speed: {wind_speed} m/s\n' + f'Wave Height: {wave_height} m\n' + f'Pressure: {pressure} millibars\n' + f'New York Metropolitan Region Weather Data:\n' + f'Average Wind Speed: {avg_wind_speed} m/s\n' + f'Average Wave Height: {avg_wave_height} m\n' + f'Average Pressure: {avg_pressure} millibars\n' + f'{storm_strength}' + ) \ No newline at end of file diff --git a/pages/new_york.py b/pages/new_york.py new file mode 100644 index 0000000..09f176e --- /dev/null +++ b/pages/new_york.py @@ -0,0 +1,119 @@ +import dash, gatheringInfo +from dash import html, dcc, callback, Input, Output + +dash.register_page(__name__) + +options = ['none','Islip, NY','Breezy Point, NY','Sandy Hook, NJ','Kings Point, NY','Mariners Harbor, NY','Robbins Reef, NJ','The Battery, NY'] + +# dropdown menu of buoys in selected region +layout = html.Div([ + html.Div('New York Region Buoy Selection', + style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}), + + dcc.Dropdown( + options,'none', id='ny_dropdown'), + html.Br(), + html.Div(id='ny_output') +]) + + +@callback( + Output(component_id='ny_output', component_property='children', allow_duplicate=True), + Input(component_id='ny_dropdown', component_property='value'), + prevent_initial_call='initial_duplicate' + +) +def display_single_buoy_data(selected_string): + """ + :param selected_string: this is the selection from the dropdown menu + :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection + """ + id_list = ['44025', '44065', 'SDNH4', 'KPTN6', 'MHRN6', 'ROBN4', 'BATN6'] + + # display individual buoy data + if selected_string == 'none': + # display nothing + return '' + else: + # convert selected_string to station ID using id_list + selection_index = options.index(selected_string) + selected_station_id = id_list[selection_index - 1] + single_buoy = gatheringInfo.Buoy(selected_station_id) + + buoy_name = single_buoy.getNAME() + wind_speed = single_buoy.getWSPD() + wave_height = single_buoy.getWVHT() + pressure = single_buoy.getPRES() + + region = gatheringInfo.BB(id_list) + + avg_wind_speed = region.get_SSI_WSPD() + avg_wave_height = region.get_SSI_WVHT() + avg_pressure = region.get_SSI_PRES() + + # determines storm strength + SSI = (0.5 * ((avg_wind_speed / 60) ** 2) + + 0.3 * (930 / avg_pressure) + + 0.2 * avg_wave_height / 12) + + if SSI < 0.2: + storm_strength = f"The expected storm should be a minimal storm" + if 0.21 < SSI < 0.4: + storm_strength = f"The expected storm should be a moderate storm" + if 0.41 < SSI < 0.6: + storm_strength = f"The expected storm should be a strong storm" + if 0.61 < SSI < 0.8: + storm_strength = f"The expected storm should be a severe storm" + if 0.81 < SSI: + storm_strength = f"The expected storm should be an extreme storm" + + return (f'Weather conditions at {buoy_name} buoy:\n' + f'Wind Speed: {wind_speed} m/s\n' + f'Wave Height: {wave_height} m\n' + f'Pressure: {pressure} millibars\n' + f'New York Metropolitan Region Weather Data:\n' + f'Average Wind Speed: {avg_wind_speed} m/s\n' + f'Average Wave Height: {avg_wave_height} m\n' + f'Average Pressure: {avg_pressure} millibars\n' + f'{storm_strength}' + ) + +#@callback( +#Output(component_id='ny_output', component_property='children', allow_duplicate=True), +# Input(component_id='ny_dropdown', component_property='value'), +# prevent_initial_call='initial_duplicate' +#) +#def display_region_data(): +# """ +# :return: display averaged weather data over the selected region and determine safety of storm +# """ +# +# id_list = ['44025', '44065', 'SDNH4', 'KPTN6', 'MHRN6', 'ROBN4', 'BATN6'] +# region = gatheringInfo.BB(id_list) +# +# avg_wind_speed = region.get_SSI_WSPD() +# avg_wave_height = region.get_SSI_WVHT() +# avg_pressure = region.get_SSI_PRES() +# +# # determines storm strength +# SSI = (0.5 * ((avg_wind_speed / 60) ** 2) + +# 0.3 * (930 / avg_pressure) + +# 0.2 * avg_wave_height / 12) +# +# if SSI < 0.2: +# storm_strength = f"The expected storm should be a minimal storm" +# if 0.21 < SSI < 0.4: +# storm_strength = f"The expected storm should be a moderate storm" +# if 0.41 < SSI < 0.6: +# storm_strength = f"The expected storm should be a strong storm" +# if 0.61 < SSI < 0.8: +# storm_strength = f"The expected storm should be a severe storm" +# if 0.81 < SSI: +# storm_strength = f"The expected storm should be an extreme storm" +# +# return (f"New York Metropolitan Region Weather Data:\n" +# f"Wind Speed: {avg_wind_speed} \n" +# f"Wave Height: {avg_wave_height} \n" +# f"Pressure: {avg_pressure} \n" +# f"{storm_strength}") +# \ No newline at end of file diff --git a/pages/north_carolina.py b/pages/north_carolina.py new file mode 100644 index 0000000..0680465 --- /dev/null +++ b/pages/north_carolina.py @@ -0,0 +1,76 @@ +import dash, gatheringInfo +from dash import html, dcc, callback, Input, Output, dash_table + +dash.register_page(__name__) + +options = ['none','Frying Pan Shoals','Diamond Shoals','Beaufort','Hatteras','Nags Head','Wilmington Harbor'] + +# dropdown menu of buoys in selected region +layout = html.Div([ + html.Div(className='row', children='North Carolina Region Buoy Selection', + style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}), + dcc.Dropdown(options,'none', id='nc_dropdown'), + html.Br(), + html.Div(id='nc_output') + ]) + +@callback( + Output(component_id='ga_output', component_property='children', allow_duplicate=True), + Input(component_id='ga_dropdown', component_property='value'), + prevent_initial_call='initial_duplicate' + +) +def display_single_buoy_data(selected_string): + """ + :param selected_string: this is the selection from the dropdown menu + :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection + """ + id_list = ['41013','41025','BFTN7','HCGN7','44086','WLON7'] + + # display individual buoy data + if selected_string == 'none': + # display nothing + return '' + else: + # convert selected_string to station ID using id_list + selection_index = options.index(selected_string) + selected_station_id = id_list[selection_index - 1] + single_buoy = gatheringInfo.Buoy(selected_station_id) + + buoy_name = single_buoy.getNAME() + wind_speed = single_buoy.getWSPD() + wave_height = single_buoy.getWVHT() + pressure = single_buoy.getPRES() + + region = gatheringInfo.BB(id_list) + + avg_wind_speed = region.get_SSI_WSPD() + avg_wave_height = region.get_SSI_WVHT() + avg_pressure = region.get_SSI_PRES() + + # determines storm strength + SSI = (0.5 * ((avg_wind_speed / 60) ** 2) + + 0.3 * (930 / avg_pressure) + + 0.2 * avg_wave_height / 12) + + if SSI < 0.2: + storm_strength = f"The expected storm should be a minimal storm" + if 0.21 < SSI < 0.4: + storm_strength = f"The expected storm should be a moderate storm" + if 0.41 < SSI < 0.6: + storm_strength = f"The expected storm should be a strong storm" + if 0.61 < SSI < 0.8: + storm_strength = f"The expected storm should be a severe storm" + if 0.81 < SSI: + storm_strength = f"The expected storm should be an extreme storm" + + return (f'Weather conditions at {buoy_name} buoy:\n' + f'Wind Speed: {wind_speed} m/s\n' + f'Wave Height: {wave_height} m\n' + f'Pressure: {pressure} millibars\n' + f'New York Metropolitan Region Weather Data:\n' + f'Average Wind Speed: {avg_wind_speed} m/s\n' + f'Average Wave Height: {avg_wave_height} m\n' + f'Average Pressure: {avg_pressure} millibars\n' + f'{storm_strength}' + ) \ No newline at end of file diff --git a/pages/south_carolina.py b/pages/south_carolina.py new file mode 100644 index 0000000..ae304c5 --- /dev/null +++ b/pages/south_carolina.py @@ -0,0 +1,31 @@ +import dash, gatheringInfo +from dash import html, dcc, callback, Input, Output, dash_table + +dash.register_page(__name__) + +# dropdown menu of buoys in selected region +layout = html.Div([ + html.Div(className='row', children='South Carolina Region Buoy Selection', + style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}), + dcc.Dropdown(['none','Charleston','Winyah Bay Reserve','Springmaid Pier','Bennett\'s Point','Capers Nearshore','Fort Johnson'], + 'none', id='sc_dropdown'), + html.Br(), + html.Div(id='sc_output') +]) + + +@callback( + Output(component_id='sc_output', component_property='children'), + Input(component_id='sc_dropdown', component_property='value') +) +def display_data(selected_string): + """""" + id_list = ['CHTS1','WYSS1','MROS1','ACXS1','41029','FMNS1'] + if selected_string == "none": + # display nothing + return "" + else: + # convert selected_string to station ID by selecting from a dictionary + + # dash_table is set up through a dictionary + table = dash_table.DataTable({'Data Type': 'Wind Speed (m/s)', 'Value': gatheringInfo.gatherWindSpeed(layout[1])}) \ No newline at end of file diff --git a/pages/virginia.py b/pages/virginia.py new file mode 100644 index 0000000..288c296 --- /dev/null +++ b/pages/virginia.py @@ -0,0 +1,77 @@ +import dash, gatheringInfo +from dash import html, dcc, callback, Input, Output, dash_table + +dash.register_page(__name__) + +options = ['none','Virginia Beach','Cape Henry','York Spit','Rappahannock Light','Dahlgren','South Craney Island'] + +# dropdown menu of buoys in selected region +layout = html.Div([ + html.Div(className='row', children='New York Region Buoy Selection', + style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}), + dcc.Dropdown(options,'none', id='va_dropdown'), + html.Br(), + html.Div(id='va_output') + ]) + + +@callback( + Output(component_id='ga_output', component_property='children', allow_duplicate=True), + Input(component_id='ga_dropdown', component_property='value'), + prevent_initial_call='initial_duplicate' + +) +def display_single_buoy_data(selected_string): + """ + :param selected_string: this is the selection from the dropdown menu + :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection + """ + id_list = ['44088','44099','44072','RPLV2','NCDV2','CRYV2'] + + # display individual buoy data + if selected_string == 'none': + # display nothing + return '' + else: + # convert selected_string to station ID using id_list + selection_index = options.index(selected_string) + selected_station_id = id_list[selection_index - 1] + single_buoy = gatheringInfo.Buoy(selected_station_id) + + buoy_name = single_buoy.getNAME() + wind_speed = single_buoy.getWSPD() + wave_height = single_buoy.getWVHT() + pressure = single_buoy.getPRES() + + region = gatheringInfo.BB(id_list) + + avg_wind_speed = region.get_SSI_WSPD() + avg_wave_height = region.get_SSI_WVHT() + avg_pressure = region.get_SSI_PRES() + + # determines storm strength + SSI = (0.5 * ((avg_wind_speed / 60) ** 2) + + 0.3 * (930 / avg_pressure) + + 0.2 * avg_wave_height / 12) + + if SSI < 0.2: + storm_strength = f"The expected storm should be a minimal storm" + if 0.21 < SSI < 0.4: + storm_strength = f"The expected storm should be a moderate storm" + if 0.41 < SSI < 0.6: + storm_strength = f"The expected storm should be a strong storm" + if 0.61 < SSI < 0.8: + storm_strength = f"The expected storm should be a severe storm" + if 0.81 < SSI: + storm_strength = f"The expected storm should be an extreme storm" + + return (f'Weather conditions at {buoy_name} buoy:\n' + f'Wind Speed: {wind_speed} m/s\n' + f'Wave Height: {wave_height} m\n' + f'Pressure: {pressure} millibars\n' + f'New York Metropolitan Region Weather Data:\n' + f'Average Wind Speed: {avg_wind_speed} m/s\n' + f'Average Wave Height: {avg_wave_height} m\n' + f'Average Pressure: {avg_pressure} millibars\n' + f'{storm_strength}' + ) \ No newline at end of file From 5c631f528cc5cb08670e3d4cca5b3bcc765a3243 Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:08:50 -0400 Subject: [PATCH 05/47] Adding Edited NDBC_API for functionality Due to our project needing to grab the most recent information from the buoys, we sort by the most recent hour and minutes which the NDBC_API library does not natively support. We edit the API so that we can sort by hours & minutes, and are uploading the api to our project so someone could seamlessly install our project and have it work for them. All credit to the creators of the NDBC_API, I take no credit for any of the code that makes this api work. --- ndbc_api-0.24.12.20.1.dist-info/INSTALLER | 1 + ndbc_api-0.24.12.20.1.dist-info/LICENSE | 21 ++ ndbc_api-0.24.12.20.1.dist-info/METADATA | 293 ++++++++++++++++++++++ ndbc_api-0.24.12.20.1.dist-info/RECORD | 160 ++++++++++++ ndbc_api-0.24.12.20.1.dist-info/REQUESTED | 0 ndbc_api-0.24.12.20.1.dist-info/WHEEL | 4 + 6 files changed, 479 insertions(+) create mode 100644 ndbc_api-0.24.12.20.1.dist-info/INSTALLER create mode 100644 ndbc_api-0.24.12.20.1.dist-info/LICENSE create mode 100644 ndbc_api-0.24.12.20.1.dist-info/METADATA create mode 100644 ndbc_api-0.24.12.20.1.dist-info/RECORD create mode 100644 ndbc_api-0.24.12.20.1.dist-info/REQUESTED create mode 100644 ndbc_api-0.24.12.20.1.dist-info/WHEEL diff --git a/ndbc_api-0.24.12.20.1.dist-info/INSTALLER b/ndbc_api-0.24.12.20.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/ndbc_api-0.24.12.20.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/ndbc_api-0.24.12.20.1.dist-info/LICENSE b/ndbc_api-0.24.12.20.1.dist-info/LICENSE new file mode 100644 index 0000000..24ef9ba --- /dev/null +++ b/ndbc_api-0.24.12.20.1.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Christopher Jellen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/ndbc_api-0.24.12.20.1.dist-info/METADATA b/ndbc_api-0.24.12.20.1.dist-info/METADATA new file mode 100644 index 0000000..546824c --- /dev/null +++ b/ndbc_api-0.24.12.20.1.dist-info/METADATA @@ -0,0 +1,293 @@ +Metadata-Version: 2.1 +Name: ndbc-api +Version: 0.24.12.20.1 +Summary: A Python API for the National Data Buoy Center. +License: MIT +Author: cdjellen +Author-email: cdjellen@gmail.com +Requires-Python: >=3.9,<3.13 +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Requires-Dist: beautifulsoup4 (>=4,<5) +Requires-Dist: html5lib (>=1.1,<2.0) +Requires-Dist: numpy (>=1.26.3) +Requires-Dist: pandas (>=2.0.0) +Requires-Dist: requests (>=2.10.0) +Requires-Dist: scipy (>=1.7.3) +Requires-Dist: xarray (>=2022.6.0) +Description-Content-Type: text/markdown + +
+

NDBC API

+
+ + +[![Coverage Status](https://coveralls.io/repos/github/CDJellen/ndbc-api/badge.svg?branch=main)](https://coveralls.io/github/CDJellen/ndbc-api?branch=main) +[![PyPI](https://img.shields.io/pypi/v/ndbc-api)](https://pypi.org/project/ndbc-api/#history) +[![PyPI - Status](https://img.shields.io/pypi/status/ndbc-api)](https://pypi.org/project/ndbc-api/) +[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/ndbc-api)](https://pypi.org/project/ndbc-api/) +[![LinkedIn](https://img.shields.io/badge/LinkedIn-0077B5?style=for-the-badge&logo=linkedin&logoColor=white&style=flat-square)](https://www.linkedin.com/in/cdjellen/) +[![GitHub](https://img.shields.io/github/license/cdjellen/ndbc-api)](https://github.com/cdjellen/ndbc-api/blob/main/LICENSE) +[![PyPI - Downloads](https://img.shields.io/pypi/dm/ndbc-api)](https://pypi.org/project/ndbc-api/) + + +
+

A Python API for the National Data Buoy Center

+
+ + +The National Oceanic and Atmospheric Association's National Data Buoy Center maintains marine monitoring and observation stations around the world[^1]. These stations report atmospheric, oceanographic, and other meterological data at regular intervals to the NDBC. Measurements are made available over HTTP through the NDBC's data service. + +The ndbc-api is a python library that makes this data more widely accessible. + +The ndbc-api is primarily built to parse whitespace-delimited oceanographic and atmospheric data distributed as text files for available time ranges, on a station-by-station basis[^2]. Measurements are typically distributed as `utf-8` encoded, station-by-station, fixed-period text files. More information on the measurements and methodology are available [on the NDBC website](https://www.ndbc.noaa.gov/docs/ndbc_web_data_guide.pdf)[^3]. + +Please see [the included example notebook](/notebooks/overview.ipynb) for a more detailed walkthrough of the API's capabilities. + +[^1]: https://www.ndbc.noaa.gov/ +[^2]: https://www.ndbc.noaa.gov/obs.shtml +[^3]: https://www.ndbc.noaa.gov/docs/ndbc_web_data_guide.pdf + + + +#### Installation +The `ndbc-api` can be installed via PIP: + +```sh +pip install ndbc-api +``` + +Conda users can install the `ndbc-api` via the `conda-forge` channel: + +```sh +conda install -c conda-forge ndbc-api +``` + +Finally, to install the `ndbc-api` from source, clone the repository and run the following command: + +```sh +python setup.py install +``` + +#### Requirements +The `ndbc-api` has been tested on Python 3.6, 3.7, 3.8, 3.9, and 3.10. Python 2 support is not currently planned, but could be implemented based on the needs of the atmospheric research community. + +The API uses synchronous HTTP requests to compile data matching the user-supplied parameters. The `ndbc-api` package depends on: +* requests>=2.10.0 +* pandas +* bs4 +* html5lib>=1.1 +* xarray +* scipy + +##### Development +If you would like to contribute to the growth and maintenance of the `ndbc-api`, please feel free to open a PR with tests covering your changes. The tests leverage `pytest` and depend on the above requirements, as well as: +* coveralls +* httpretty +* pytest +* pytest-cov +* pyyaml +* pyarrow + +Breaking changes will be considered, especially in the current `alpha` state of the package on `PyPi`. As the API further matures, breaking changes will only be considered with new major versions (e.g. `N.0.0`). + +#### Example + +The `ndbc-api` exposes public methods through the `NdbcApi` class. + +```python3 +from ndbc_api import NdbcApi + +api = NdbcApi() +``` + +The `NdbcApi` provides a unified access point for NDBC data. All methods for obtaining data, metadata, and locating stations are available using the `api` object. The `get_data` method is the primary method for accessing NDBC data, and is used to retrieve measurements from a given station over a specified time range. This method can request data from the NDBC HTTP Data Service or the THREDDS data service, and return the data as a `pandas.DataFrame`, `xarray.Dataset` or python `dict` object. + +Data made available by the NDBC falls into two broad categories. + +1. Station metadata +2. Station measurements + +The `api` supports a range of public methods for accessing data from the above categories. + +##### Station metadata + +The `api` has five key public methods for accessing NDBC metadata. + +1. The `stations` method, which returns all NDBC stations. +2. The `nearest_station` method, which returns the station ID of the nearest station. +3. The `station` method, which returns station metadata from a given station ID. +4. The `available_realtime` method, which returns hyperlinks and measurement names for realtime measurements captured by a given station. +5. The `available_historical` method, which returns hyperlinks and measurement names for historical measurements captured by a given station. + +###### `stations` + +```python3 +# get all stations and some metadata as a Pandas DataFrame +stations_df = api.stations() +# parse the response as a dictionary +stations_dict = api.stations(as_df=False) +``` + +###### `nearest_station` + +```python3 +# specify desired latitude and longitude +lat = '38.88N' +lon = '76.43W' + +# find the station ID of the nearest NDBC station +nearest = api.nearest_station(lat=lat, lon=lon) +print(nearest_station) +``` + +```python3 +'tplm2' +``` + +###### `radial_search` + +```python3 +# specify desired latitude, longitude, radius, and units +lat = '38.88N' +lon = '76.43W' +radius = 100 +units = 'km' + +# find the station IDs of all NDBC stations within the radius +nearby_stations_df = api.radial_search(lat=lat, lon=lon, radius=radius, units=units) +``` + +```python3 +'tplm2' +``` + +###### `station` + +```python3 +# get station metadata +tplm2_meta = api.station(station_id='tplm2') +# parse the response as a Pandas DataFrame +tplm2_df = api.station(station_id='tplm2', as_df=True) +``` + +###### `available_realtime` + +```python3 +# get all available realtime measurements, periods, and hyperlinks +tplm2_realtime = api.available_realtime(station_id='tplm2') +# parse the response as a Pandas DataFrame +tplm2_realtime_df = api.available_realtime(station_id='tplm2', as_df=True) +``` + +###### `available_historical` + +```python3 +# get all available historical measurements, periods, and hyperlinks +tplm2_historical = api.available_historical(station_id='tplm2') +# parse the response as a Pandas DataFrame +tplm2_historical_df = api.available_historical(station_id='tplm2', as_df=True) +``` + +##### Station measurements + +The `api` has two public methods which support accessing supported NDBC station measurements. + +1. The `get_modes` method, which returns a list of supported `mode`s, corresponding to the data formats provided by the NDBC data service. For example, the `adcp` mode represents "Acoustic Doppler Current Profiler" measurements, providing information about ocean currents at different depths, while `cwind` represents "Continuous winds" data, offering high-frequency wind speed and direction measurements. + +Note that not all stations provide the same set of measurements. The `available_realtime` and `available_historical` methods can be called on a station-by station basis to ensure a station has the desired data available, before building and executing requests with `get_data`. + +2. The `get_data` method, which returns measurements of a given type for a given station. + +###### `get_modes` + +```python3 +# get the list of supported meterological measurement modes +modes = api.get_modes() +print(modes) +``` + +```python3 +[ + 'adcp', + 'cwind', + 'ocean', + 'spec', + 'stdmet', + 'supl', + 'swden', + 'swdir', + 'swdir2', + 'swr1', + 'swr2' +] +``` + +The mode values above map directly to the identifiers used buy the NDBC. Desriptions for each mode are presented below: +* `adcp`: Acoustic Doppler Current Profiler measurements, providing information about ocean currents at different depths. +* `cwind`: Continuous winds data, offering high-frequency wind speed and direction measurements. +* `ocean`: Oceanographic data, including water temperature, salinity, and wave measurements. +* `spec`: Spectral wave data, providing detailed information about wave energy and direction. +* `stdmet`: Standard meteorological data, including air temperature, pressure, wind speed, and visibility. +* `supl`: Supplemental measurements, which can vary depending on the specific buoy and its sensors. +* `swden`: Spectral wave density data, providing information about the distribution of wave energy across different frequencies. +* `swdir`: Spectral wave direction data, indicating the primary direction of wave energy. +* `swdir2`: Secondary spectral wave direction data, capturing additional wave direction information. +* `swr1`: First-order spectral wave data, providing basic wave height and period information. +* `swr2`: Second-order spectral wave data, offering more detailed wave measurements. + +###### `get_data` + +```python3 +# get all continuous wind (`cwind`) measurements for station tplm2 +cwind_df = api.get_data( + station_id='tplm2', + mode='cwind', + start_time='2020-01-01', + end_time='2022-09-15', +) +# return data as a dictionary +cwind_dict = api.get_data( + station_id='tplm2', + mode='cwind', + start_time='2020-01-01', + end_time='2022-09-15', + as_df=False +) +# get only the wind speed measurements +wspd_df = api.get_data( + station_id='tplm2', + mode='cwind', + start_time='2020-01-01', + end_time='2022-09-15', + as_df=True, + cols=['WSPD'] +) +# get all standard meterological (`stdmet`) measurements for stations tplm2 and apam2 +stdmet_df = api.get_data( + station_ids=['tplm2', 'apam2'], + mode='stdmet', + start_time='2022-01-01', + end_time='2023-01-01', +) +# get all (available) continuous wind and standard meterological measurements for stations tplm2 and apam2 +# for station apam2, this is unavailable and will log an error but not affect the rest of the results. +stdmet_df = api.get_data( + station_ids=['tplm2', 'apam2'], + modes=['stdmet', 'cwind'], + start_time='2022-01-01', + end_time='2023-01-01', +) +``` + +#### More Information +Please see [the included example notebook](/notebooks/overview.ipynb) for a more detailed walkthrough of the API's capabilities. + +#### Questions +If you have questions regarding the library please post them into +the [GitHub discussion forum](https://github.com/cdjellen/ndbc-api/discussions). + diff --git a/ndbc_api-0.24.12.20.1.dist-info/RECORD b/ndbc_api-0.24.12.20.1.dist-info/RECORD new file mode 100644 index 0000000..38e1689 --- /dev/null +++ b/ndbc_api-0.24.12.20.1.dist-info/RECORD @@ -0,0 +1,160 @@ +ndbc_api-0.24.12.20.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +ndbc_api-0.24.12.20.1.dist-info/LICENSE,sha256=TiryywJpeKrL8U5y3TzkgssiBcol3m0ggialHRSsFmo,1075 +ndbc_api-0.24.12.20.1.dist-info/METADATA,sha256=F2Pm0j6ZPP3IwQdPk7yADX_L3xwpTKyB2vLVw_hnP20,11143 +ndbc_api-0.24.12.20.1.dist-info/RECORD,, +ndbc_api-0.24.12.20.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ndbc_api-0.24.12.20.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88 +ndbc_api/__init__.py,sha256=TsGy0a1wrd6bDGUms3Agl-gBbzam9Gxye7swSPxoPGc,112 +ndbc_api/__pycache__/__init__.cpython-311.pyc,, +ndbc_api/__pycache__/exceptions.cpython-311.pyc,, +ndbc_api/__pycache__/ndbc_api.cpython-311.pyc,, +ndbc_api/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ndbc_api/api/__pycache__/__init__.cpython-311.pyc,, +ndbc_api/api/handlers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ndbc_api/api/handlers/__pycache__/__init__.cpython-311.pyc,, +ndbc_api/api/handlers/__pycache__/_base.cpython-311.pyc,, +ndbc_api/api/handlers/_base.py,sha256=j_I6tu8hvHAQ-zYCLJJghqTKRER2xGIafmDdCUeiPxo,30 +ndbc_api/api/handlers/http/__pycache__/data.cpython-311.pyc,, +ndbc_api/api/handlers/http/__pycache__/stations.cpython-311.pyc,, +ndbc_api/api/handlers/http/data.py,sha256=u1s5d0IMJZNVWSPSbELv_v8VZF-5rZjCzUILDs1inu4,12020 +ndbc_api/api/handlers/http/stations.py,sha256=cP8VLaDdPKGic8lGq6ia6P4uY6M4yYkJpdFO9jfvaO4,7893 +ndbc_api/api/handlers/opendap/__pycache__/data.cpython-311.pyc,, +ndbc_api/api/handlers/opendap/data.py,sha256=dzIiKkKX-ECgcxA3qhu-osJSgDpbKSuBC8_efpaoZJ4,7829 +ndbc_api/api/parsers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ndbc_api/api/parsers/__pycache__/__init__.cpython-311.pyc,, +ndbc_api/api/parsers/http/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ndbc_api/api/parsers/http/__pycache__/__init__.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/_base.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/_html.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/_station.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/_xml.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/active_stations.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/adcp.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/cwind.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/historical_stations.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/ocean.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/spec.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/station_historical.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/station_metadata.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/station_realtime.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/stdmet.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/supl.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/swden.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/swdir.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/swdir2.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/swr1.cpython-311.pyc,, +ndbc_api/api/parsers/http/__pycache__/swr2.cpython-311.pyc,, +ndbc_api/api/parsers/http/_base.py,sha256=Z8xBZYG15eCLNAy4t65tYsViM8Ta1J7NBMhdSS3qrHA,3517 +ndbc_api/api/parsers/http/_html.py,sha256=MTdBg_3VwIc0lU6c_FQSgV2lqms_H1XfSKimzLfk_Yw,630 +ndbc_api/api/parsers/http/_station.py,sha256=6dHJC0sFNP-i3fl7ivjV7zTTL8DX1ks_eCXSNxVahTU,1822 +ndbc_api/api/parsers/http/_xml.py,sha256=Xa41QRdGL4yybnrbp-MtjOI3FvTyqe2JEfmIhvbf6dc,727 +ndbc_api/api/parsers/http/active_stations.py,sha256=_pe77hDNH5ubmZz5x8yZ3TSOh6EArTEAutmD2y6EH2o,2456 +ndbc_api/api/parsers/http/adcp.py,sha256=U-ylFIe9Ssf65AnnwZI1yl_uD7mHq9eHMkoTfUi0H6E,2625 +ndbc_api/api/parsers/http/cwind.py,sha256=_mLfSkbZNo6eZLHIHHo-OvLLb6FtoVeGwF9WOLegtRM,459 +ndbc_api/api/parsers/http/historical_stations.py,sha256=hiWDLM_rWu9A9_o_RvQfcvHwZccrzSTV5GI2-QcMii8,2859 +ndbc_api/api/parsers/http/ocean.py,sha256=kXQfDxynyMfxM7wDaZAI1JfEVRqnUk4CrXVr8okNW8s,409 +ndbc_api/api/parsers/http/spec.py,sha256=IoVgf8k2Uhg6A3P9sWrpIT2iHLnfSn26OVrUSFl1_fc,468 +ndbc_api/api/parsers/http/station_historical.py,sha256=rzLCOH4_RVNGszW-kde5sA9etgLFcVIAwsKp6mZhQX4,1157 +ndbc_api/api/parsers/http/station_metadata.py,sha256=aAUrwUYwQAlbjrqDsOldz72dUouw0zj0yD-HiR0oFB4,1691 +ndbc_api/api/parsers/http/station_realtime.py,sha256=Br63W9w--vx6KJ1DxS6KkHygYRGuRLSCYAlfVVTMSfw,1005 +ndbc_api/api/parsers/http/stdmet.py,sha256=lcH3uSTYP7x0asX0KSlpoAwgI4q4m16aorc35-cUV5s,461 +ndbc_api/api/parsers/http/supl.py,sha256=JW_WPpqhFpCugNWen8ujNDH-gfSE9nXpRRZ0kKMcM4c,499 +ndbc_api/api/parsers/http/swden.py,sha256=vY1G7qr_JMUfzM4D01NeO0veL-CgTYWdDzILI6H7sg0,1420 +ndbc_api/api/parsers/http/swdir.py,sha256=prKFsbHWFRq4FzUCbzu31Qk5duzHF4DrjveVNLSSWtQ,1415 +ndbc_api/api/parsers/http/swdir2.py,sha256=kSRxn5dm-LB5bXIuqGUCoj3GoY9X7OAAot0IlNR55ug,1474 +ndbc_api/api/parsers/http/swr1.py,sha256=Cit-Qj7Jwf1psxkqReENgAxE_euR_DQlqDdBKkdRNiY,1413 +ndbc_api/api/parsers/http/swr2.py,sha256=-7X0UfpW19ZmEctJ1-P7mJ9P6WakZUtmE01LgddQ2NM,1413 +ndbc_api/api/parsers/opendap/__pycache__/_base.cpython-311.pyc,, +ndbc_api/api/parsers/opendap/__pycache__/adcp.cpython-311.pyc,, +ndbc_api/api/parsers/opendap/__pycache__/cwind.cpython-311.pyc,, +ndbc_api/api/parsers/opendap/__pycache__/ocean.cpython-311.pyc,, +ndbc_api/api/parsers/opendap/__pycache__/pwind.cpython-311.pyc,, +ndbc_api/api/parsers/opendap/__pycache__/stdmet.cpython-311.pyc,, +ndbc_api/api/parsers/opendap/__pycache__/swden.cpython-311.pyc,, +ndbc_api/api/parsers/opendap/__pycache__/wlevel.cpython-311.pyc,, +ndbc_api/api/parsers/opendap/_base.py,sha256=sHaTOjGjFD--IrzgT5kCIAENZ-0U0xK_yxBzkgaLA6w,2344 +ndbc_api/api/parsers/opendap/adcp.py,sha256=7h01yEDmVXq6LSGWHOS9SaCrZ7Ppdc-bNsb9NyR3E2w,471 +ndbc_api/api/parsers/opendap/cwind.py,sha256=2a19vj3XTYXRUmuqc25vNH8Q859bjJmESdSG7mz9Hlk,493 +ndbc_api/api/parsers/opendap/ocean.py,sha256=wrTcOns1AVBLbau2gWY5m5R5ZoekdOWj76ARcckqBpY,493 +ndbc_api/api/parsers/opendap/pwind.py,sha256=OC9EigROQbVnEibiyswWyJYpOSyFqCV8cLtGgyaXOG4,489 +ndbc_api/api/parsers/opendap/stdmet.py,sha256=IjiV0ghkZ4oxW0V5ST3NKh3L0VUb4BEL-3ehvJzU0vs,491 +ndbc_api/api/parsers/opendap/swden.py,sha256=0jthaqlYbabo0dogmgcJfUKbKRnf1okTGSJ_GLpqb20,477 +ndbc_api/api/parsers/opendap/wlevel.py,sha256=-VDOWhuhxSfqd5amh00o9o4WzDJIYlwkcwALAB-MkRY,479 +ndbc_api/api/requests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ndbc_api/api/requests/__pycache__/__init__.cpython-311.pyc,, +ndbc_api/api/requests/http/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ndbc_api/api/requests/http/__pycache__/__init__.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/_base.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/_core.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/active_stations.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/adcp.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/cwind.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/historical_stations.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/ocean.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/spec.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/station_historical.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/station_metadata.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/station_realtime.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/stdmet.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/supl.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/swden.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/swdir.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/swdir2.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/swr1.cpython-311.pyc,, +ndbc_api/api/requests/http/__pycache__/swr2.cpython-311.pyc,, +ndbc_api/api/requests/http/_base.py,sha256=KaTUW5bfUTdwVP9EC_zsjbgtCyNiqf4c9aOBAC-Yr_c,4139 +ndbc_api/api/requests/http/_core.py,sha256=_4BB7cFffjHvxLLnESFnxelZy-hP_A5mbL5Q3EwiQk0,152 +ndbc_api/api/requests/http/active_stations.py,sha256=HADq5sZ7B6FQmnGrzI8xLPq9UhBnJ1Q87MXC7y_Cukk,256 +ndbc_api/api/requests/http/adcp.py,sha256=mzsGcvHdPOzyaE6HAl7kDiHJYw1zPqIJonD6R_PyB6Q,522 +ndbc_api/api/requests/http/cwind.py,sha256=xlZWwHINLxK33s1U0F4JHqjfVycWHIfatO-pTG4p-fk,527 +ndbc_api/api/requests/http/historical_stations.py,sha256=qIYJfT6QBf7_s4Xxo8L5iSnxvGy5pY1hO4j3pnxk-1I,270 +ndbc_api/api/requests/http/ocean.py,sha256=Gh8g4sQcwo1IeBZCVCfBaysbrU7i1kFDf3fJVfM9hg8,527 +ndbc_api/api/requests/http/spec.py,sha256=MvDkj6v6rpwNdahANNwvvYhoRnsQOrH_tSTEjQd10F8,489 +ndbc_api/api/requests/http/station_historical.py,sha256=QiKg87g4xU6JEJmh_f0duD44eWWuhhQ2gPHJFYppmUs,311 +ndbc_api/api/requests/http/station_metadata.py,sha256=2lAOeH65TR3Skdmfoj7D2skzJuCeeS7OXegbKNfheCg,290 +ndbc_api/api/requests/http/station_realtime.py,sha256=8c_86VmLIQlxsh0qB0-qtb7OtTuOpwZ3IIoU95UeC9w,312 +ndbc_api/api/requests/http/stdmet.py,sha256=6Fzi-dKJd5HAjPTbXHAKPwA6JAQVPLaOpRWVlZyQHTc,496 +ndbc_api/api/requests/http/supl.py,sha256=xw_E8xmQ7lhYyL3j_NV77dGlHOz-I8_1ZVMrNNIMp3s,522 +ndbc_api/api/requests/http/swden.py,sha256=RdB_U1L1jbPRCKRiunWFtckcK4Ee-XE8nP0ln6evseE,527 +ndbc_api/api/requests/http/swdir.py,sha256=X5qn5YVyae4SRXUPOYcKofrd8M1EXunZyszKYkCsFrw,527 +ndbc_api/api/requests/http/swdir2.py,sha256=SvjdxyPCDUdDfU5Bln48SqitHpt5X-iqenMIEBPaoww,532 +ndbc_api/api/requests/http/swr1.py,sha256=yLsyVmja3xaD_08OfznudPtctm2TGT6dfdaPQxppY1Y,522 +ndbc_api/api/requests/http/swr2.py,sha256=ijx5_oY1TgUpEPPWG3NiAEwPAhvlAdUpkckxJ0-U6RM,522 +ndbc_api/api/requests/opendap/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ndbc_api/api/requests/opendap/__pycache__/__init__.cpython-311.pyc,, +ndbc_api/api/requests/opendap/__pycache__/_base.cpython-311.pyc,, +ndbc_api/api/requests/opendap/__pycache__/_core.cpython-311.pyc,, +ndbc_api/api/requests/opendap/__pycache__/adcp.cpython-311.pyc,, +ndbc_api/api/requests/opendap/__pycache__/cwind.cpython-311.pyc,, +ndbc_api/api/requests/opendap/__pycache__/ocean.cpython-311.pyc,, +ndbc_api/api/requests/opendap/__pycache__/pwind.cpython-311.pyc,, +ndbc_api/api/requests/opendap/__pycache__/stdmet.cpython-311.pyc,, +ndbc_api/api/requests/opendap/__pycache__/swden.cpython-311.pyc,, +ndbc_api/api/requests/opendap/__pycache__/wlevel.cpython-311.pyc,, +ndbc_api/api/requests/opendap/_base.py,sha256=TG38-3r2RYepaxMbjCfBSBxZ2VjpSzs_S5DIFBIC8ME,3082 +ndbc_api/api/requests/opendap/_core.py,sha256=w2bQ1slnkrhR9gRHrQ09WOm4lnx1BPac7bWma7rx1wE,161 +ndbc_api/api/requests/opendap/adcp.py,sha256=rj60TbOdHL-KwNaDQXNecyT63MpDpq42zzQIspj5v0E,498 +ndbc_api/api/requests/opendap/cwind.py,sha256=lVasJezPLRkefioRaPa-DRJknSXB_ft4AAewHMSIabM,502 +ndbc_api/api/requests/opendap/ocean.py,sha256=vUrpA3iwRnFeseaF_6SFSEKKu53k7T2ZbO74k4QV88o,502 +ndbc_api/api/requests/opendap/pwind.py,sha256=qVJtC9InphYZ2-ia18pTW3jd1kOtwxdBHppFnxTyuIE,502 +ndbc_api/api/requests/opendap/stdmet.py,sha256=M-QJwEw030t78wpqBHv3JNTLVCzHy2GkYuX2DAvQNI8,506 +ndbc_api/api/requests/opendap/swden.py,sha256=tIyINwMDGfClpMHqr4WWDSnuw5AQnX2Gg5i8PPTEUP8,502 +ndbc_api/api/requests/opendap/wlevel.py,sha256=m8LfXRwo7Cs_W2rSScthdRpiZzv2PGD5AtmjzBmI2So,506 +ndbc_api/config/__init__.py,sha256=DcBR3iarhVpyd_-CRzcVFQfGm8SRlLq74IwYpkpeEzI,988 +ndbc_api/config/__pycache__/__init__.cpython-311.pyc,, +ndbc_api/exceptions.py,sha256=-Rmb_iG2Utzn_LEX0pWfitOz5STqx8uzhzHATpJjBEI,810 +ndbc_api/ndbc_api.py,sha256=otqSljNY8GeRk67LIcDQwQY7A7sN981YQgjtOIeiDio,36090 +ndbc_api/utilities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ndbc_api/utilities/__pycache__/__init__.cpython-311.pyc,, +ndbc_api/utilities/__pycache__/log_formatter.cpython-311.pyc,, +ndbc_api/utilities/__pycache__/req_cache.cpython-311.pyc,, +ndbc_api/utilities/__pycache__/req_handler.cpython-311.pyc,, +ndbc_api/utilities/__pycache__/singleton.cpython-311.pyc,, +ndbc_api/utilities/log_formatter.py,sha256=DkgI_E4_ZmNyszFlPUn0kepCEVswdwbjuFt6Wb6TUf0,411 +ndbc_api/utilities/opendap/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ndbc_api/utilities/opendap/__pycache__/__init__.cpython-311.pyc,, +ndbc_api/utilities/opendap/__pycache__/dataset.cpython-311.pyc,, +ndbc_api/utilities/opendap/dataset.py,sha256=p_mlrQPVBwFxNkrknBuvi5-AgpHTygR0-GZIPwhhZbM,2837 +ndbc_api/utilities/req_cache.py,sha256=Gy2omhvFZ3VFgXcZ1ra33OLt4iT3RMniI_cAphbVecM,1538 +ndbc_api/utilities/req_handler.py,sha256=QUYiKy7kOr5n8pB3aaH1_bwi7Kqf87iQ5WakFH68KIo,9752 +ndbc_api/utilities/singleton.py,sha256=zg1Q1vENorS43FeOUovd05opjTU2J-uxuFSpK6uPU-4,337 diff --git a/ndbc_api-0.24.12.20.1.dist-info/REQUESTED b/ndbc_api-0.24.12.20.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/ndbc_api-0.24.12.20.1.dist-info/WHEEL b/ndbc_api-0.24.12.20.1.dist-info/WHEEL new file mode 100644 index 0000000..8b9b3a1 --- /dev/null +++ b/ndbc_api-0.24.12.20.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: poetry-core 1.9.1 +Root-Is-Purelib: true +Tag: py3-none-any From b255e65942d6ec7053520d150bfb617693195ba3 Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:11:55 -0400 Subject: [PATCH 06/47] Hoping the API upload works here We take no credit from the original creators of the NDBC-api for python. We simply needed to change a single line to get it to remain functional for what we needed it for and in order to make the transition from downloading this code from github to using it as easy as possible we are uploading it --- __init__.py | 0 config/__init__.py | 24 + config/__pycache__/__init__.cpython-311.pyc | Bin 0 -> 1224 bytes exceptions.py | 29 + ndbc_api.py | 832 ++++++++++++++++++ parsers/http/_base.py | 107 +++ parsers/http/_html.py | 21 + parsers/http/_station.py | 51 ++ parsers/http/_xml.py | 28 + parsers/http/active_stations.py | 66 ++ parsers/http/adcp.py | 138 +++ parsers/http/cwind.py | 17 + parsers/http/historical_stations.py | 75 ++ parsers/http/ocean.py | 16 + parsers/http/spec.py | 17 + parsers/http/station_historical.py | 34 + parsers/http/station_metadata.py | 49 ++ parsers/http/station_realtime.py | 29 + parsers/http/stdmet.py | 17 + parsers/http/supl.py | 17 + parsers/http/swden.py | 71 ++ parsers/http/swdir.py | 71 ++ parsers/http/swdir2.py | 72 ++ parsers/http/swr1.py | 71 ++ parsers/http/swr2.py | 71 ++ requests/__init__.py | 0 requests/__pycache__/__init__.cpython-311.pyc | Bin 0 -> 194 bytes requests/http/__init__.py | 0 .../http/__pycache__/__init__.cpython-311.pyc | Bin 0 -> 199 bytes .../http/__pycache__/_base.cpython-311.pyc | Bin 0 -> 6498 bytes .../http/__pycache__/_core.cpython-311.pyc | Bin 0 -> 701 bytes .../active_stations.cpython-311.pyc | Bin 0 -> 862 bytes .../http/__pycache__/adcp.cpython-311.pyc | Bin 0 -> 1225 bytes .../http/__pycache__/cwind.cpython-311.pyc | Bin 0 -> 1257 bytes .../historical_stations.cpython-311.pyc | Bin 0 -> 884 bytes .../http/__pycache__/ocean.cpython-311.pyc | Bin 0 -> 1257 bytes .../http/__pycache__/spec.cpython-311.pyc | Bin 0 -> 1189 bytes .../station_historical.cpython-311.pyc | Bin 0 -> 934 bytes .../station_metadata.cpython-311.pyc | Bin 0 -> 916 bytes .../station_realtime.cpython-311.pyc | Bin 0 -> 930 bytes .../http/__pycache__/stdmet.cpython-311.pyc | Bin 0 -> 1223 bytes .../http/__pycache__/supl.cpython-311.pyc | Bin 0 -> 1225 bytes .../http/__pycache__/swden.cpython-311.pyc | Bin 0 -> 1257 bytes .../http/__pycache__/swdir.cpython-311.pyc | Bin 0 -> 1257 bytes .../http/__pycache__/swdir2.cpython-311.pyc | Bin 0 -> 1262 bytes .../http/__pycache__/swr1.cpython-311.pyc | Bin 0 -> 1225 bytes .../http/__pycache__/swr2.cpython-311.pyc | Bin 0 -> 1225 bytes requests/http/_base.py | 105 +++ requests/http/_core.py | 7 + requests/http/active_stations.py | 10 + requests/http/adcp.py | 17 + requests/http/cwind.py | 17 + requests/http/historical_stations.py | 10 + requests/http/ocean.py | 17 + requests/http/spec.py | 16 + requests/http/station_historical.py | 10 + requests/http/station_metadata.py | 10 + requests/http/station_realtime.py | 10 + requests/http/stdmet.py | 16 + requests/http/supl.py | 17 + requests/http/swden.py | 17 + requests/http/swdir.py | 17 + requests/http/swdir2.py | 17 + requests/http/swr1.py | 17 + requests/http/swr2.py | 17 + requests/opendap/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 202 bytes .../opendap/__pycache__/_base.cpython-311.pyc | Bin 0 -> 4449 bytes .../opendap/__pycache__/_core.cpython-311.pyc | Bin 0 -> 713 bytes .../opendap/__pycache__/adcp.cpython-311.pyc | Bin 0 -> 1202 bytes .../opendap/__pycache__/cwind.cpython-311.pyc | Bin 0 -> 1233 bytes .../opendap/__pycache__/ocean.cpython-311.pyc | Bin 0 -> 1233 bytes .../opendap/__pycache__/pwind.cpython-311.pyc | Bin 0 -> 1233 bytes .../__pycache__/stdmet.cpython-311.pyc | Bin 0 -> 1237 bytes .../opendap/__pycache__/swden.cpython-311.pyc | Bin 0 -> 1233 bytes .../__pycache__/wlevel.cpython-311.pyc | Bin 0 -> 1237 bytes requests/opendap/_base.py | 82 ++ requests/opendap/_core.py | 7 + requests/opendap/adcp.py | 16 + requests/opendap/cwind.py | 16 + requests/opendap/ocean.py | 16 + requests/opendap/pwind.py | 16 + requests/opendap/stdmet.py | 16 + requests/opendap/swden.py | 16 + requests/opendap/wlevel.py | 16 + utilities/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 191 bytes .../__pycache__/log_formatter.cpython-311.pyc | Bin 0 -> 1178 bytes .../__pycache__/req_cache.cpython-311.pyc | Bin 0 -> 3410 bytes .../__pycache__/req_handler.cpython-311.pyc | Bin 0 -> 13291 bytes .../__pycache__/singleton.cpython-311.pyc | Bin 0 -> 937 bytes utilities/log_formatter.py | 16 + utilities/opendap/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 199 bytes .../__pycache__/dataset.cpython-311.pyc | Bin 0 -> 3713 bytes utilities/opendap/dataset.py | 88 ++ utilities/req_cache.py | 48 + utilities/req_handler.py | 229 +++++ utilities/singleton.py | 14 + 99 files changed, 2866 insertions(+) create mode 100644 __init__.py create mode 100644 config/__init__.py create mode 100644 config/__pycache__/__init__.cpython-311.pyc create mode 100644 exceptions.py create mode 100644 ndbc_api.py create mode 100644 parsers/http/_base.py create mode 100644 parsers/http/_html.py create mode 100644 parsers/http/_station.py create mode 100644 parsers/http/_xml.py create mode 100644 parsers/http/active_stations.py create mode 100644 parsers/http/adcp.py create mode 100644 parsers/http/cwind.py create mode 100644 parsers/http/historical_stations.py create mode 100644 parsers/http/ocean.py create mode 100644 parsers/http/spec.py create mode 100644 parsers/http/station_historical.py create mode 100644 parsers/http/station_metadata.py create mode 100644 parsers/http/station_realtime.py create mode 100644 parsers/http/stdmet.py create mode 100644 parsers/http/supl.py create mode 100644 parsers/http/swden.py create mode 100644 parsers/http/swdir.py create mode 100644 parsers/http/swdir2.py create mode 100644 parsers/http/swr1.py create mode 100644 parsers/http/swr2.py create mode 100644 requests/__init__.py create mode 100644 requests/__pycache__/__init__.cpython-311.pyc create mode 100644 requests/http/__init__.py create mode 100644 requests/http/__pycache__/__init__.cpython-311.pyc create mode 100644 requests/http/__pycache__/_base.cpython-311.pyc create mode 100644 requests/http/__pycache__/_core.cpython-311.pyc create mode 100644 requests/http/__pycache__/active_stations.cpython-311.pyc create mode 100644 requests/http/__pycache__/adcp.cpython-311.pyc create mode 100644 requests/http/__pycache__/cwind.cpython-311.pyc create mode 100644 requests/http/__pycache__/historical_stations.cpython-311.pyc create mode 100644 requests/http/__pycache__/ocean.cpython-311.pyc create mode 100644 requests/http/__pycache__/spec.cpython-311.pyc create mode 100644 requests/http/__pycache__/station_historical.cpython-311.pyc create mode 100644 requests/http/__pycache__/station_metadata.cpython-311.pyc create mode 100644 requests/http/__pycache__/station_realtime.cpython-311.pyc create mode 100644 requests/http/__pycache__/stdmet.cpython-311.pyc create mode 100644 requests/http/__pycache__/supl.cpython-311.pyc create mode 100644 requests/http/__pycache__/swden.cpython-311.pyc create mode 100644 requests/http/__pycache__/swdir.cpython-311.pyc create mode 100644 requests/http/__pycache__/swdir2.cpython-311.pyc create mode 100644 requests/http/__pycache__/swr1.cpython-311.pyc create mode 100644 requests/http/__pycache__/swr2.cpython-311.pyc create mode 100644 requests/http/_base.py create mode 100644 requests/http/_core.py create mode 100644 requests/http/active_stations.py create mode 100644 requests/http/adcp.py create mode 100644 requests/http/cwind.py create mode 100644 requests/http/historical_stations.py create mode 100644 requests/http/ocean.py create mode 100644 requests/http/spec.py create mode 100644 requests/http/station_historical.py create mode 100644 requests/http/station_metadata.py create mode 100644 requests/http/station_realtime.py create mode 100644 requests/http/stdmet.py create mode 100644 requests/http/supl.py create mode 100644 requests/http/swden.py create mode 100644 requests/http/swdir.py create mode 100644 requests/http/swdir2.py create mode 100644 requests/http/swr1.py create mode 100644 requests/http/swr2.py create mode 100644 requests/opendap/__init__.py create mode 100644 requests/opendap/__pycache__/__init__.cpython-311.pyc create mode 100644 requests/opendap/__pycache__/_base.cpython-311.pyc create mode 100644 requests/opendap/__pycache__/_core.cpython-311.pyc create mode 100644 requests/opendap/__pycache__/adcp.cpython-311.pyc create mode 100644 requests/opendap/__pycache__/cwind.cpython-311.pyc create mode 100644 requests/opendap/__pycache__/ocean.cpython-311.pyc create mode 100644 requests/opendap/__pycache__/pwind.cpython-311.pyc create mode 100644 requests/opendap/__pycache__/stdmet.cpython-311.pyc create mode 100644 requests/opendap/__pycache__/swden.cpython-311.pyc create mode 100644 requests/opendap/__pycache__/wlevel.cpython-311.pyc create mode 100644 requests/opendap/_base.py create mode 100644 requests/opendap/_core.py create mode 100644 requests/opendap/adcp.py create mode 100644 requests/opendap/cwind.py create mode 100644 requests/opendap/ocean.py create mode 100644 requests/opendap/pwind.py create mode 100644 requests/opendap/stdmet.py create mode 100644 requests/opendap/swden.py create mode 100644 requests/opendap/wlevel.py create mode 100644 utilities/__init__.py create mode 100644 utilities/__pycache__/__init__.cpython-311.pyc create mode 100644 utilities/__pycache__/log_formatter.cpython-311.pyc create mode 100644 utilities/__pycache__/req_cache.cpython-311.pyc create mode 100644 utilities/__pycache__/req_handler.cpython-311.pyc create mode 100644 utilities/__pycache__/singleton.cpython-311.pyc create mode 100644 utilities/log_formatter.py create mode 100644 utilities/opendap/__init__.py create mode 100644 utilities/opendap/__pycache__/__init__.cpython-311.pyc create mode 100644 utilities/opendap/__pycache__/dataset.cpython-311.pyc create mode 100644 utilities/opendap/dataset.py create mode 100644 utilities/req_cache.py create mode 100644 utilities/req_handler.py create mode 100644 utilities/singleton.py diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/config/__init__.py b/config/__init__.py new file mode 100644 index 0000000..f84c255 --- /dev/null +++ b/config/__init__.py @@ -0,0 +1,24 @@ +"""Stores the configuration information for the NDBC API. + +Attributes: + LOGGER_NAME (:str:): The name for the `logging.Logger` in the api instance. + DEFAULT_CACHE_LIMIT (:int:): The station level limit for caching NDBC data + service requests. + VERIFY_HTTPS (:bool:): Whether to execute requests using HTTPS rather than + HTTP. + HTTP_RETRY (:int:): The number of times to retry requests to the NDBC data + service. + HTTP_BACKOFF_FACTOR (:float:): The backoff factor used when executing retry + requests to the NDBC data service. + HTTP_DELAY (:int:) The delay between requests submitted to the NDBC data + service, in milliseconds. + HTTP_DEBUG (:bool:): Whether to log requests and responses to the NDBC API's + log (a `logging.Logger`) as debug messages. +""" +LOGGER_NAME = 'NDBC-API' +DEFAULT_CACHE_LIMIT = 36 +VERIFY_HTTPS = True +HTTP_RETRY = 5 +HTTP_BACKOFF_FACTOR = 0.8 +HTTP_DELAY = 2000 +HTTP_DEBUG = False diff --git a/config/__pycache__/__init__.cpython-311.pyc b/config/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e271f334741a975f84941070c5a779dfcdba0b9 GIT binary patch literal 1224 zcmZ`(U2hUW6kSRyNQjeVOKqa{&(05O^^EEJ`M(5zEf_&FAWcYB?iVv3_>ojZ_#p$SJUe1eoHs1YIR#PTFsrN4!F!0 zb6nUQ=jqa8>LQnUOvE%6wg$j};#>(9IEIrcavB~pi=vQziYOO+O7u?aHtPq*wyt;f zfWZs`S5mwmP|yU*4iNoJEhscOhaxTsrYAtHc-sK`36cCV1(%W0)%5Pc_2m7?GXXrX zkzk&bKLB(Q4zD=ijr8QdRV?6dsnxgbdflk2RlVH>BHImcqLYcOkAZC?8(UyCYCP4lhR_l33c%hnN*xm;7G7|WP(8gxNo8+yfb^A z%bxI<>oQKEBgxFBSjn2UwcohiBxtNFHtZ8v@T0)zQ*o0zpn0G1gi;>b#OP*MmJsHM zP&0B6n8UF{A&D

BS2GIC}z@Og~$J+4=rWj-BNavF$@w|hA~-PxDu1% zE&V)MTkO3_?y6ZP#W=oh>}*3Uc@l#jKa~r4`!CzoN`D_j%LD+%YfV0pI-jHVKAQw!zdpOFS9SbfJ82R6&3(r{>AVLOy}uT zDs{eeKec?W$my|C*ch#j*B_lK#c`oHS{vu@jdJ7l4Y|%gfH41Xl#@@GhvWYMQ&o6j literal 0 HcmV?d00001 diff --git a/exceptions.py b/exceptions.py new file mode 100644 index 0000000..2c6caaa --- /dev/null +++ b/exceptions.py @@ -0,0 +1,29 @@ +class NdbcException(Exception): + """Base exception that all other NDBC exceptions subclass from.""" + + def __init__(self, message: str = ''): # pragma: no cover + self.message = message + super().__init__(self.message) + + def __str__(self): # pragma: no cover + return f"NDBC API: {self.message or 'unspecified error'}" + + +class TimestampException(NdbcException): + """Unable to handle given timestamp.""" + + +class RequestException(NdbcException): + """Unable to build the given request.""" + + +class ResponseException(NdbcException): + """Unable to handle the given response.""" + + +class ParserException(NdbcException): + """Unable to parse the given response.""" + + +class HandlerException(NdbcException): + """Error when handling this API call.""" diff --git a/ndbc_api.py b/ndbc_api.py new file mode 100644 index 0000000..7da62ee --- /dev/null +++ b/ndbc_api.py @@ -0,0 +1,832 @@ +"""An API for retrieving data from the NDBC. + +This module defines the `NdbcApi`, the top-level object which creates, handles, +caches, parses, and returns NDBC data. + +Example: + ```python3 + from ndbc_api import NdbcApi + api = NdbcApi() + available_stations = api.stations() + modes = api.get_modes() + df_stdmet_tplm2 = api.get_data( + 'tplm2', + 'stdmet', + '2020-01-01', + '2022-01-01', + as_df=True + ) + ``` + +Attributes: + log (:obj:`logging.Logger`): The logger at which to register HTTP + request and response status codes and headers used for debug + purposes. + headers(:dict:): The request headers for use in the NDBC API's request + handler. +""" +import logging +import pickle +import warnings +from concurrent.futures import ThreadPoolExecutor, as_completed +from datetime import datetime, timedelta +from typing import Any, List, Sequence, Tuple, Union, Dict, Optional + +import xarray +import pandas as pd + +from .api.handlers.http.data import DataHandler +from .api.handlers.http.stations import StationsHandler +from .config import (DEFAULT_CACHE_LIMIT, HTTP_BACKOFF_FACTOR, HTTP_DEBUG, + HTTP_DELAY, HTTP_RETRY, LOGGER_NAME, VERIFY_HTTPS) +from .exceptions import (HandlerException, ParserException, RequestException, + ResponseException, TimestampException) +from .utilities.req_handler import RequestHandler +from .utilities.singleton import Singleton +from .utilities.log_formatter import LogFormatter +from .api.handlers.opendap.data import OpenDapDataHandler +from .utilities.opendap.dataset import concat_datasets, merge_datasets, filter_dataset_by_variable, filter_dataset_by_time_range + + +class NdbcApi(metaclass=Singleton): + """An API for querying the National Data Buoy Center. + + The `NdbcApi` is metaclassed as a singleton to conserve NDBC resources. It + uses two private handlers to build requests and parse responses to the NDBC + over HTTP(s). It also uses a LRU-cached request handler to execute requests + against the NDBC, logging response statuses as they are executed. + + Attributes: + logging_level: The `logging.Logger`s log level, 1 if the `debug` + flag is set in the `__init__` method, and 0 otherwise. + cache_limit: The handler's global limit for caching + `NdbcApi` responses. This is implemented as a least-recently + used cache, designed to conserve NDBC resources when querying + measurements for a given station over similar time ranges. + delay: The HTTP(s) request delay parameter, in seconds. + retries: = The number of times to retry a request to the NDBC data + service. + backoff_factor: The back-off parameter, used in conjunction with + `retries` to re-attempt requests to the NDBC data service. + verify_https: A flag which indicates whether to attempt requests to the + NDBC data service over HTTP or HTTPS. + debug: A flag for verbose logging and response-level status reporting. + Affects the instance's `logging.Logger` and the behavior of its + private `RequestHandler` instance. + """ + + logger = logging.getLogger(LOGGER_NAME) + warnings.simplefilter(action='ignore', category=FutureWarning) + + def __init__( + self, + logging_level: int = logging.WARNING if HTTP_DEBUG else logging.ERROR, + filename: Any = None, + cache_limit: int = DEFAULT_CACHE_LIMIT, + headers: dict = {}, + delay: int = HTTP_DELAY, + retries: int = HTTP_RETRY, + backoff_factor: float = HTTP_BACKOFF_FACTOR, + verify_https: bool = VERIFY_HTTPS, + debug: bool = HTTP_DEBUG, + ): + """Initializes the singleton `NdbcApi`, sets associated handlers.""" + self.cache_limit = cache_limit + self.headers = headers + self._handler = self._get_request_handler( + cache_limit=self.cache_limit, + delay=delay, + retries=retries, + backoff_factor=backoff_factor, + headers=self.headers, + debug=debug, + verify_https=verify_https, + ) + self._stations_api = StationsHandler + self._data_api = DataHandler + self._opendap_data_api = OpenDapDataHandler + self.configure_logging(level=logging_level, filename=filename) + + def dump_cache(self, dest_fp: Union[str, None] = None) -> Union[dict, None]: + """Dump the request cache to dict or the specified filepath. + + Dump the request, response pairs stored in the `NdbcApi`'s + `Request_handler` as a `dict`, either returning the object, if no + `dest_fp` is specified, or serializing (pickling) the object and writing + it to the specified `dest_fp`. + + Args: + dest_fp: The destination filepath for the serialized `RequestsCache` + contents. + + Returns: + The cached request, response pairs as a `dict`, or `None` if a + `dest_fp` is specified when calling the method. + """ + data = dict() + ids = [r.id_ for r in self._handler.stations] + caches = [r.reqs.cache for r in self._handler.stations] + if ids: + for station_id, cache in zip(ids, caches): + data[station_id] = dict() + reqs = cache.keys() + for req in reqs: + resp = cache[req].v + data[station_id][req] = resp + if dest_fp: + with open(dest_fp, 'wb') as f: + pickle.dump(data, f) + else: + return data + + def clear_cache(self) -> None: + """Clear the request cache and create a new handler.""" + del self._handler + self._handler = self._get_request_handler( + cache_limit=self.cache_limit, + delay=HTTP_DELAY, + retries=HTTP_RETRY, + backoff_factor=HTTP_BACKOFF_FACTOR, + headers=self.headers, + debug=HTTP_DEBUG, + verify_https=VERIFY_HTTPS, + ) + + def set_cache_limit(self, new_limit: int) -> None: + """Set the cache limit for the API's request cache.""" + self._handler.set_cache_limit(cache_limit=new_limit) + + def get_cache_limit(self) -> int: + """Get the cache limit for the API's request cache.""" + return self._handler.get_cache_limit() + + def get_headers(self) -> dict: + """Return the current headers used by the request handler.""" + return self._handler.get_headers() + + def update_headers(self, new: dict) -> None: + """Add new headers to the request handler.""" + self._handler.update_headers(new) + + def set_headers(self, request_headers: dict) -> None: + """Reset the request headers using the new supplied headers.""" + self._handler.set_headers(request_headers) + + def configure_logging(self, level=logging.WARNING, filename=None) -> None: + """Configures logging for the NdbcApi. + + Args: + level (int, optional): The logging level. Defaults to logging.WARNING. + filename (str, optional): If provided, logs to the specified file. + """ + self.logger.setLevel(level) + + handler: logging.Handler + formatter: logging.Formatter + + for handler in self.logger.handlers[:]: + self.logger.removeHandler(handler) + + if filename: + handler = logging.FileHandler(filename) + formatter = logging.Formatter( + '[%(asctime)s][%(levelname)s]: %(message)s') + else: + handler = logging.StreamHandler() + formatter = LogFormatter('[%(levelname)s]: %(message)s') + + handler.setFormatter(formatter) + self.logger.addHandler(handler) + + def log(self, + level: int, + station_id: Union[int, str, None] = None, + mode: Union[str, None] = None, + message: Union[str, None] = None, + **extra_data) -> None: + """Logs a structured message with metadata. + + Args: + level (int): The logging level. + station_id (str, optional): The NDBC station ID. + mode (str, optional): The data mode. + message (str, optional): The log message. + **extra_data: Additional key-value pairs to include in the log. + """ + log_data = {} + if station_id: + log_data['station_id'] = station_id + if mode: + log_data['mode'] = mode + if message: + log_data['message'] = message + for k, v in extra_data.items(): + log_data[k] = v + self.logger.log(level, log_data) + + def stations(self, as_df: bool = True) -> Union[pd.DataFrame, dict]: + """Get all stations and station metadata from the NDBC. + + Query the NDBC data service for the current available data buoys + (stations), both those maintained by the NDBC and those whose + measurements are managed by the NDBC. Stations are returned by default + as rows of a `pandas.DataFrame`, alongside their realtime data coverage + for some common measurements, their latitude and longitude, and current + station status notes maintained by the NDBC. + + Args: + as_df: Flag indicating whether to return current station data as a + `pandas.DataFrame` if set to `True` or as a `dict` if `False`. + + Returns: + The current station data from the NDBC data service, either as a + `pandas.DataFrame` or as a `dict` depending on the value of `as_df`. + + Raises: + ResponseException: An error occurred while retrieving and parsing + responses from the NDBC data service. + """ + try: + data = self._stations_api.stations(handler=self._handler) + return self._handle_data(data, as_df, cols=None) + except (ResponseException, ValueError, KeyError) as e: + raise ResponseException('Failed to handle returned data.') from e + + def historical_stations(self, + as_df: bool = True) -> Union[pd.DataFrame, dict]: + """Get historical stations and station metadata from the NDBC. + + Query the NDBC data service for the historical data buoys + (stations), both those maintained by the NDBC and those which are not. + Stations are returned by default as rows of a `pandas.DataFrame`, + alongside their historical data coverage, with one row per tuple of + (station, historical deployment). + + Args: + as_df: Flag indicating whether to return current station data as a + `pandas.DataFrame` if set to `True` or as a `dict` if `False`. + + Returns: + The current station data from the NDBC data service, either as a + `pandas.DataFrame` or as a `dict` depending on the value of `as_df`. + + Raises: + ResponseException: An error occurred while retrieving and parsing + responses from the NDBC data service. + """ + try: + data = self._stations_api.historical_stations(handler=self._handler) + return self._handle_data(data, as_df, cols=None) + except (ResponseException, ValueError, KeyError) as e: + raise ResponseException('Failed to handle returned data.') from e + + def nearest_station( + self, + lat: Union[str, float, None] = None, + lon: Union[str, float, None] = None, + ) -> str: + """Get nearest station to the specified lat/lon. + + Use the NDBC data service's current station data to determine the + nearest station to the specified latitude and longitude (either as + `float` or as DD.dd[E/W] strings). + + Args: + lat: The latitude of interest, used to determine the closest + maintained station to the given position. + lon: The longitude of interest, used to determine the closest + maintained station to the given position. + + Returns: + The station id (e.g. `'tplm2'` or `'41001'`) of the nearest station + with active measurements to the specified lat/lon pair. + + Raises: + ValueError: The latitude and longitude were not both specified when + querying for the closest station. + """ + if not (lat and lon): + raise ValueError('lat and lon must be specified.') + nearest_station = self._stations_api.nearest_station( + handler=self._handler, lat=lat, lon=lon) + return nearest_station + + def radial_search( + self, + lat: Union[str, float, None] = None, + lon: Union[str, float, None] = None, + radius: float = -1, + units: str = 'km', + ) -> pd.DataFrame: + """Get all stations within radius units of the specified lat/lon. + + Use the NDBC data service's current station data to determine the + stations within radius of the specified latitude and longitude + (passed either as `float` or as DD.dd[E/W] strings). + + Args: + lat (float): The latitude of interest, used to determine the maintained + stations within radius units of the given position. + lon (float): The longitude of interest, used to determine the maintained + stations within radius units of the given position. + radius (float): The radius in the specified units to search for stations + within. + units (str: 'nm', 'km', or 'mi'): The units of the radius, either 'nm', 'km', or 'mi'. + + Returns: + A `pandas.DataFrame` of the stations within the specified radius of + the given lat/lon pair. + + Raises: + ValueError: The latitude and longitude were not both specified when + querying for the closest station, or the radius or units are + invalid. + """ + if not (lat and lon): + raise ValueError('lat and lon must be specified.') + stations_in_radius = self._stations_api.radial_search( + handler=self._handler, lat=lat, lon=lon, radius=radius, units=units) + return stations_in_radius + + def station(self, + station_id: Union[str, int], + as_df: bool = False) -> Union[pd.DataFrame, dict]: + """Get metadata for the given station from the NDBC. + + The NDBC maintains some station-level metadata including status notes, + location information, inclement weather warnings, and measurement notes. + This method is used to request, handle, and parse the metadata for the + given station from the station's NDBC webpage. + + Args: + station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the + station of interest. + as_df: Whether to return station-level data as a `pandas.DataFrame`, + defaults to `False`, and a `dict` is returned. + + Returns: + The station metadata for the given station, either as a `dict` or as + a `pandas.DataFrame` if the `as_df` flag is set to `True`. + + Raises: + ResponseException: An error occurred when requesting and parsing + responses for the specified station. + """ + station_id = self._parse_station_id(station_id) + try: + data = self._stations_api.metadata(handler=self._handler, + station_id=station_id) + return self._handle_data(data, as_df, cols=None) + except (ResponseException, ValueError, KeyError) as e: + raise ResponseException('Failed to handle returned data.') from e + + def available_realtime( + self, + station_id: Union[str, int], + full_response: bool = False, + as_df: Optional[bool] = None, + ) -> Union[List[str], pd.DataFrame, dict]: + """Get the available realtime modalities for a station. + + While most data buoy (station) measurements are available over + multi-year time ranges, some measurements depreciate or become + unavailable for substantial periods of time. This method queries the + NDBC station webpage for those measurements, and their links, which are + available or were available over the last 45 days. + + Args: + station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the + station of interest. + full_response: Whether to return the full response from the NDBC + API, defaults to `False` and a list of modes from `get_modes()` + is returned. If `True`, the full URL for each data mode is + included in the returned `dict` or `pandas.DataFrame`. + as_df: Whether to return station-level data as a `pandas.DataFrame`, + defaults to `False`, and a `dict` is returned. + + Returns: + The available realtime measurements for the specified station, + alongside their NDBC data links, either as a `dict` or as a + `pandas.DataFrame` if the `as_df` flag is set to `True`. + + Raises: + ResponseException: An error occurred when requesting and parsing + responses for the specified station. + """ + station_id = self._parse_station_id(station_id) + try: + station_realtime = self._stations_api.realtime( + handler=self._handler, station_id=station_id) + full_data = {} + if full_response: + if as_df is None: + as_df = False + full_data = self._handle_data(station_realtime, + as_df, + cols=None) + return full_data + else: + full_data = self._handle_data(station_realtime, + as_df=False, + cols=None) + + # Parse the modes from the full response + _modes = self.get_modes() + station_modes = set() + for k in full_data: + for m in _modes: + if m in full_data[k]['description']: + station_modes.add(m) + return list(station_modes) + except (ResponseException, ValueError, KeyError) as e: + raise ResponseException('Failed to handle returned data.') from e + + def available_historical(self, + station_id: Union[str, int], + as_df: bool = False) -> Union[pd.DataFrame, dict]: + """Get the available historical measurements for a station. + + This method queries the NDBC station webpage for historical, quality + controlled measurements and their associated availability time ranges. + + Args: + station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the + station of interest. + as_df: Whether to return station-level data as a `pandas.DataFrame`, + defaults to `False`, and a `dict` is returned. + + Returns: + The available historical measurements for the specified station, + alongside their NDBC data links, either as a `dict` or as a + `pandas.DataFrame` if the `as_df` flag is set to `True`. + + Raises: + ResponseException: An error occurred when requesting and parsing + responses for the specified station. + """ + station_id = self._parse_station_id(station_id) + try: + data = self._stations_api.historical(handler=self._handler, + station_id=station_id) + return self._handle_data(data, as_df, cols=None) + except (ResponseException, ValueError, KeyError) as e: + raise ResponseException('Failed to handle returned data.') from e + + def get_data( + self, + station_id: Union[int, str, None] = None, + mode: Union[str, None] = None, + start_time: Union[str, datetime] = datetime.now() - timedelta(days=30), + end_time: Union[str, datetime] = datetime.now(), + use_timestamp: bool = True, + as_df: bool = True, + cols: List[str] = None, + station_ids: Union[Sequence[Union[int, str]], None] = None, + modes: Union[List[str], None] = None, + as_xarray_dataset: bool = False, + use_opendap: Optional[bool] = None, + ) -> Union[pd.DataFrame, xarray.Dataset, dict]: + """Execute data query against the specified NDBC station(s). + + Query the NDBC data service for station-level measurements, using the + `mode` parameter to determine the measurement type (e.g. `'stdmet'` for + standard meterological data or `'cwind'` for continuous winds data). The + time range and data columns of interest may also be specified, such that + a tailored set of requests are executed against the NDBC data service to + generate a single `pandas.DataFrame` or `dict` matching the conditions + specified in the method call. When calling `get_data` with `station_ids` + the station identifier is added as a column to the returned data. + + Args: + station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the + station of interest. + station_ids: A list of NDBC station IDs (e.g. `['tplm2', '41001']`) + for the stations of interest. + mode: The data measurement type to query for the station (e.g. + `'stdmet'` for standard meterological data or `'cwind'` for + continuous winds data). + modes: A list of data measurement types to query for the stations + (e.g. `['stdmet', 'cwind']`). + start_time: The first timestamp of interest (in UTC) for the data + query, defaulting to 30 days before the current system time. + end_time: The last timestamp of interest (in UTC) for the data + query, defaulting to the current system time. + use_timestamp: A flag indicating whether to parse the NDBC data + service column headers as a timestamp, and to use this timestamp + as the index. + as_df: Whether to return station-level data as a `pandas.DataFrame`, + defaults to `True`, if `False` a `dict` is returned unless + `as_xarray_dataset` is set to `True`. + as_xarray_dataset: Whether to return tbe data as an `xarray.Dataset`, + defaults to `False`. + cols: A list of columns of interest which are selected from the + available data columns, such that only the desired columns are + returned. All columns are returned if `None` is specified. + use_opendap: An alias for `as_xarray_dataset`. + + Returns: + The available station(s) measurements for the specified modes, time + range, and columns, either as a `dict` or as a `pandas.DataFrame` + if the `as_df` flag is set to `True`. + + Raises: + ValueError: Both `station_id` and `station_ids` are `None`, or both + are not `None`. This is also raised if `mode` and `modes` are + `None`, or both are not `None` + RequestException: The specified mode is not available. + ResponseException: There was an error in executing and parsing the + required requests against the NDBC data service. + HandlerException: There was an error in handling the returned data + as a `dict` or `pandas.DataFrame`. + """ + if use_opendap is not None: + as_xarray_dataset = use_opendap + + as_df = as_df and not as_xarray_dataset + + self.log(logging.DEBUG, + message=f"`get_data` called with arguments: {locals()}") + if station_id is None and station_ids is None: + raise ValueError('Both `station_id` and `station_ids` are `None`.') + if station_id is not None and station_ids is not None: + raise ValueError('`station_id` and `station_ids` cannot both be ' + 'specified.') + if mode is None and modes is None: + raise ValueError('Both `mode` and `modes` are `None`.') + if mode is not None and modes is not None: + raise ValueError('`mode` and `modes` cannot both be specified.') + + handle_station_ids: List[Union[int, str]] = [] + handle_modes: List[str] = [] + + if station_id is not None: + handle_station_ids.append(station_id) + if station_ids is not None: + handle_station_ids.extend(station_ids) + if mode is not None: + handle_modes.append(mode) + if modes is not None: + handle_modes.extend(modes) + + for mode in handle_modes: + if mode not in self.get_modes(use_opendap=as_xarray_dataset): + raise RequestException(f"Mode {mode} is not available.") + + self.log(logging.INFO, + message=(f"Processing request for station_ids " + f"{handle_station_ids} and modes " + f"{handle_modes}")) + + # accumulated_data records the handled response and parsed station_id + # as a tuple, with the data as the first value and the id as the second. + accumulated_data: Dict[str, Dict[str, Union[pd.DataFrame, dict]]] = {} + for mode in handle_modes: + accumulated_data[mode] = [] + + with ThreadPoolExecutor( + max_workers=len(handle_station_ids)) as station_executor: + station_futures = {} + for station_id in handle_station_ids: + station_futures[station_id] = station_executor.submit( + self._handle_get_data, + mode=mode, + station_id=station_id, + start_time=start_time, + end_time=end_time, + use_timestamp=use_timestamp, + as_df=as_df, + cols=cols, + use_opendap=as_xarray_dataset, + ) + + for future in as_completed(station_futures.values()): + try: + station_data, station_id = future.result() + self.log( + level=logging.DEBUG, + station_id=station_id, + message= + f"Successfully processed request for station_id {station_id}" + ) + if as_df: + station_data['station_id'] = station_id + accumulated_data[mode].append(station_data) + except (RequestException, ResponseException, + HandlerException) as e: + self.log( + level=logging.WARN, + station_id=station_id, + message=( + f"Failed to process request for station_id " + f"{station_id} with error: {e}")) + self.log(logging.INFO, message="Finished processing request.") + return self._handle_accumulate_data(accumulated_data) + + def get_modes(self, + use_opendap: bool = False, + as_xarray_dataset: Optional[bool] = None) -> List[str]: + """Get the list of supported modes for `get_data(...)`. + + Args: + use_opendap (bool): Whether to return the available + modes for opendap `xarray.Dataset` data. + as_xarray_dataset (bool): An alias for `use_opendap`. + + Returns: + (List[str]) the available modalities. + """ + if as_xarray_dataset is not None: + use_opendap = as_xarray_dataset + + if use_opendap: + return [ + v for v in vars(self._opendap_data_api) if not v.startswith('_') + ] + return [v for v in vars(self._data_api) if not v.startswith('_')] + + @staticmethod + def save_xarray_dataset(dataset: xarray.Dataset, output_filepath: str, + **kwargs) -> None: + """ + Saves an `xarray.Dataset` to netCDF a user-specified file path. + + Args: + dataset: The xarray dataset to save. + output_filepath: The path to save the dataset to. + **kwargs: Additional keyword arguments to pass to `dataset.to_netcdf`. + + Returns: + None: The dataset is written to disk + """ + dataset.to_netcdf(output_filepath, **kwargs) + + """ PRIVATE """ + + def _get_request_handler( + self, + cache_limit: int, + delay: int, + retries: int, + backoff_factor: float, + headers: dict, + debug: bool, + verify_https: bool, + ) -> Any: + """Build a new `RequestHandler` for the `NdbcApi`.""" + return RequestHandler( + cache_limit=cache_limit or self.cache_limit, + log=self.log, + delay=delay, + retries=retries, + backoff_factor=backoff_factor, + headers=headers, + debug=debug, + verify_https=verify_https, + ) + + @staticmethod + def _parse_station_id(station_id: Union[str, int]) -> str: + """Parse station id.""" + station_id = str(station_id) # expect string-valued station id + station_id = station_id.lower() # expect lowercased station id + return station_id + + @staticmethod + def _handle_timestamp(timestamp: Union[datetime, str]) -> datetime: + """Convert the specified timestamp to `datetime.datetime`.""" + if isinstance(timestamp, datetime): + return timestamp + else: + try: + return datetime.strptime(timestamp, '%Y-%m-%d %H:%M') + except ValueError as e: + raise TimestampException from e + + @staticmethod + def _enforce_timerange(df: pd.DataFrame, start_time: datetime, + end_time: datetime) -> pd.DataFrame: + """Down-select to the data within the specified `datetime` range.""" + try: + df = df.loc[(df.index.values >= pd.Timestamp(start_time)) & + (df.index.values <= pd.Timestamp(end_time))] + except ValueError as e: + raise TimestampException( + 'Failed to enforce `start_time` to `end_time` range.') from e + return df + + @staticmethod + def _handle_data(data: pd.DataFrame, + as_df: bool = True, + cols: List[str] = None) -> Union[pd.DataFrame, dict]: + """Apply column down selection and return format handling.""" + if cols: + try: + data = data[[*cols]] + except (KeyError, ValueError) as e: + raise ParserException( + 'Failed to parse column selection.') from e + if as_df and isinstance(data, pd.DataFrame): + return data + elif isinstance(data, pd.DataFrame) and not as_df: + return data.to_dict() + elif as_df: + try: + return pd.DataFrame().from_dict(data, orient='index') + except (NotImplementedError, ValueError, TypeError) as e: + raise HandlerException( + 'Failed to convert `pd.DataFrame` to `dict`.') from e + else: + return data + + def _handle_accumulate_data( + self, + accumulated_data: Dict[str, List[Union[pd.DataFrame, dict, + xarray.Dataset]]], + ) -> Union[pd.DataFrame, dict]: + """Accumulate the data from multiple stations and modes.""" + for k in list(accumulated_data.keys()): + if not accumulated_data[k]: + del accumulated_data[k] + + if not accumulated_data: + return {} + + return_as_df = isinstance( + accumulated_data[list(accumulated_data.keys())[-1]][0], + pd.DataFrame) + use_opendap = isinstance( + accumulated_data[list(accumulated_data.keys())[-1]][0], + xarray.Dataset) + + data: Union[List[pd.DataFrame], List[xarray.Dataset], + dict] = [] if return_as_df or use_opendap else {} + + for mode, station_data in accumulated_data.items(): + if return_as_df: + data.extend(station_data) + elif use_opendap: + data.extend(station_data) + else: + data[mode] = station_data + + if return_as_df: + df = pd.concat(data, axis=0) + df.reset_index(inplace=True, drop=False) + df.set_index(['timestamp', 'station_id'], inplace=True) + return df + elif use_opendap: + return merge_datasets(data) + return data + + def _handle_get_data( + self, + mode: str, + station_id: str, + start_time: datetime, + end_time: datetime, + use_timestamp: bool, + as_df: bool = True, + cols: List[str] = None, + use_opendap: bool = False, + ) -> Tuple[Union[pd.DataFrame, xarray.Dataset, dict], str]: + start_time = self._handle_timestamp(start_time) + end_time = self._handle_timestamp(end_time) + station_id = self._parse_station_id(station_id) + if use_opendap: + data_api_call = getattr(self._opendap_data_api, mode, None) + else: + data_api_call = getattr(self._data_api, mode, None) + if not data_api_call: + raise RequestException( + 'Please supply a supported mode from `get_modes()`.') + try: + data = data_api_call( + self._handler, + station_id, + start_time, + end_time, + use_timestamp, + ) + except (ResponseException, ValueError, TypeError, KeyError) as e: + raise ResponseException( + f'Failed to handle API call.\nRaised from {e}') from e + if use_timestamp: + if use_opendap: + data = filter_dataset_by_time_range(data, start_time, end_time) + else: + data = self._enforce_timerange(df=data, + start_time=start_time, + end_time=end_time) + try: + if use_opendap: + if cols: + handled_data = filter_dataset_by_variable(data, cols) + else: + handled_data = data + else: + handled_data = self._handle_data(data, as_df, cols) + except (ValueError, KeyError, AttributeError) as e: + raise ParserException( + f'Failed to handle returned data.\nRaised from {e}') from e + + return (handled_data, station_id) diff --git a/parsers/http/_base.py b/parsers/http/_base.py new file mode 100644 index 0000000..6465bf9 --- /dev/null +++ b/parsers/http/_base.py @@ -0,0 +1,107 @@ +from io import StringIO +from typing import List, Tuple + +import pandas as pd + +from ndbc_api.exceptions import ParserException + + +class BaseParser: + + HEADER_PREFIX = '#' + NAN_VALUES = ['MM'] + DATE_PARSER = '%Y %m %d %H %M' + PARSE_DATES = [0, 1, 2, 3, 4] + INDEX_COL = False + REVERT_COL_NAMES = [] + + @classmethod + def df_from_responses(cls, + responses: List[dict], + use_timestamp: bool = True) -> pd.DataFrame: + components = [] + for response in responses: + if response.get('status') == 200: + components.append( + cls._read_response(response, use_timestamp=use_timestamp)) + df = pd.concat(components) + if use_timestamp: + try: + df = df.reset_index().drop_duplicates(subset='timestamp', + keep='first') + df = df.set_index('timestamp').sort_index() + except KeyError as e: + raise ParserException from e + return df + + @classmethod + def _read_response(cls, response: dict, + use_timestamp: bool) -> pd.DataFrame: + body = response.get('body') + header, data = cls._parse_body(body) + names = cls._parse_header(header) + if not data: + return pd.DataFrame() + # check that parsed names match parsed values or revert + if len([v.strip() for v in data[0].strip('\n').split(' ') if v + ]) != len(names): + names = cls.REVERT_COL_NAMES + if '(' in data[0]: + data = cls._clean_data(data) + + try: + parse_dates = False + date_format = None + if use_timestamp: + parse_dates = [cls.PARSE_DATES] + date_format = cls.DATE_PARSER + df = pd.read_csv( + StringIO('\n'.join(data)), + names=names, + delim_whitespace=True, + na_values=cls.NAN_VALUES, + index_col=cls.INDEX_COL, + parse_dates=parse_dates, + date_format=date_format, + ) + if use_timestamp: + df.index.name = 'timestamp' + + except (NotImplementedError, TypeError, ValueError) as e: + print(e) + return pd.DataFrame() + + # check whether to parse dates + return df + + @staticmethod + def _parse_body(body: str) -> Tuple[List[str], List[str]]: + buf = StringIO(body) + data = [] + header = [] + + line = buf.readline() + while line: + if line.startswith('#'): + header.append(line) + else: + data.append(line) + line = buf.readline() + + return header, data + + @staticmethod + def _parse_header(header: List[str]) -> List[str]: + names = ([n for n in header[0].strip('#').strip('\n').split(' ') if n] + if isinstance(header, list) and len(header) > 0 else None) + return names # pass 'None' to pd.read_csv on error + + @staticmethod + def _clean_data(data: List[str]) -> List[str]: + vals = [ + ' '.join([v + for v in r.split(' ') + if v and '(' not in v]) + for r in data + ] + return vals or None # pass 'None' to pd.read_csv on error diff --git a/parsers/http/_html.py b/parsers/http/_html.py new file mode 100644 index 0000000..2159af0 --- /dev/null +++ b/parsers/http/_html.py @@ -0,0 +1,21 @@ +from typing import List + +import pandas as pd +from bs4 import BeautifulSoup + +from ndbc_api.api.parsers.http._base import BaseParser + + +class HtmlParser(BaseParser): + + INDEX_COL = None + + @classmethod + def dfs_from_responses(cls, responses: List[dict]) -> List[pd.DataFrame]: + components = [] + for response in responses: + if response.get('status') == 200: + soup = BeautifulSoup(response.get('body'), 'html.parser') + tables = soup.find_all('table') + components.extend(pd.read_html(str(tables), flavor='bs4')) + return components diff --git a/parsers/http/_station.py b/parsers/http/_station.py new file mode 100644 index 0000000..2d2634a --- /dev/null +++ b/parsers/http/_station.py @@ -0,0 +1,51 @@ +import os +from calendar import month_abbr +from collections import defaultdict +from datetime import datetime +from typing import List, Tuple + +import bs4 + + +class StationParser: + + BASE_URL = 'https://www.ndbc.noaa.gov' + + @classmethod + def _parse_li_urls(cls, + urls: List[bs4.element.Tag]) -> List[Tuple[str, str]]: + parsed = [] + if 'MOCKDATE' in os.environ: + now = datetime.strptime(os.getenv('MOCKDATE'), '%Y-%m-%d').date() + else: + now = datetime.now() + current_year = now.year + for raw_url in urls: + name = raw_url.text.strip() + name = f'{name} {current_year}' if name in month_abbr else name + url = f'{cls.BASE_URL}{raw_url.get("href")}' + parsed.append((name, url)) + return parsed + + @classmethod + def _build_available_measurements( + cls, line_items: List[bs4.element.Tag]) -> dict: + # unpack nested lists + nested = [li for li in line_items for li in li.find_all('li')] + nested = [ + li for li in nested + if li.get('href') is not None and 'plot' not in li.get('href') + ] + line_items = [li for li in line_items if len(li.find_all('li')) == 0] + line_items.extend(nested) + available_measurements = defaultdict(dict) + for li in line_items: + if 'Search' in li.text: + break # end of available measurements + new_measurement = cls._parse_list_item(li) + if new_measurement: + k = list(new_measurement.keys())[0] # guaranteed one key + else: + continue + available_measurements[k].update(new_measurement[k]) + return dict(available_measurements) diff --git a/parsers/http/_xml.py b/parsers/http/_xml.py new file mode 100644 index 0000000..486b306 --- /dev/null +++ b/parsers/http/_xml.py @@ -0,0 +1,28 @@ +import re +import xml.etree.ElementTree as ET + +from ndbc_api.api.parsers.http._base import BaseParser +from ndbc_api.exceptions import ParserException + + +class XMLParser(BaseParser): + """ + Parser for XML data. + """ + + @classmethod + def root_from_response(cls, response: dict) -> ET.ElementTree: + """Parse the response body (string-valued XML) to ET + + Args: + response (dict): The successful HTTP response + """ + + body = response.get('body') + + try: + root = ET.fromstring(body) + return ET.ElementTree(root) + except Exception as e: + raise ParserException( + "failed to obtain XML root from response body") from e diff --git a/parsers/http/active_stations.py b/parsers/http/active_stations.py new file mode 100644 index 0000000..c2ea1ef --- /dev/null +++ b/parsers/http/active_stations.py @@ -0,0 +1,66 @@ +import xml.etree.ElementTree as ET +import pandas as pd + +from ndbc_api.exceptions import ParserException +from ndbc_api.api.parsers.http._xml import XMLParser + + +class ActiveStationsParser(XMLParser): + """ + Parser for active station information from XML data. + """ + + @classmethod + def df_from_response(cls, + response: dict, + use_timestamp: bool = False) -> pd.DataFrame: + """ + Reads the response body and parses it into a DataFrame. + + Args: + response (dict): The response dictionary containing the 'body' key. + use_timestamp (bool): Flag to indicate if the timestamp should be used as an index (not applicable here). + + Returns: + pd.DataFrame: The parsed DataFrame containing station information. + """ + root = super(ActiveStationsParser, cls).root_from_response(response) + try: + station_data = [] + for station in root.findall('station'): + station_info = { + 'Station': + station.get('id'), + 'Lat': + float(station.get('lat')), + 'Lon': + float(station.get('lon')), + 'Elevation': + float(station.get('elev')) + if station.get('elev') else pd.NA, + 'Name': + station.get('name'), + 'Owner': + station.get('owner'), + 'Program': + station.get('pgm'), + 'Type': + station.get('type'), + 'Includes Meteorology': + station.get('met') == 'y', + 'Includes Currents': + station.get('currents') == 'y', + 'Includes Water Quality': + station.get('waterquality') == 'y', + 'DART Program': + station.get('dart') == 'y' + } + station_data.append(station_info) + + df = pd.DataFrame( + station_data) # Create DataFrame from the extracted data + + except ET.ParseError as e: + raise ParserException(f"Error parsing XML data: {e}") from e + + return df diff --git a/parsers/http/adcp.py b/parsers/http/adcp.py new file mode 100644 index 0000000..478319e --- /dev/null +++ b/parsers/http/adcp.py @@ -0,0 +1,138 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class AdcpParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = None + REVERT_COL_NAMES = [ + 'YY', + 'MM', + 'DD', + 'hh', + 'mm', + 'DEP01', + 'DIR01', + 'SPD01', + 'DEP02', + 'DIR02', + 'SPD02', + 'DEP03', + 'DIR03', + 'SPD03', + 'DEP04', + 'DIR04', + 'SPD04', + 'DEP05', + 'DIR05', + 'SPD05', + 'DEP06', + 'DIR06', + 'SPD06', + 'DEP07', + 'DIR07', + 'SPD07', + 'DEP08', + 'DIR08', + 'SPD08', + 'DEP09', + 'DIR09', + 'SPD09', + 'DEP10', + 'DIR10', + 'SPD10', + 'DEP11', + 'DIR11', + 'SPD11', + 'DEP12', + 'DIR12', + 'SPD12', + 'DEP13', + 'DIR13', + 'SPD13', + 'DEP14', + 'DIR14', + 'SPD14', + 'DEP15', + 'DIR15', + 'SPD15', + 'DEP16', + 'DIR16', + 'SPD16', + 'DEP17', + 'DIR17', + 'SPD17', + 'DEP18', + 'DIR18', + 'SPD18', + 'DEP19', + 'DIR19', + 'SPD19', + 'DEP20', + 'DIR20', + 'SPD20', + 'DEP21', + 'DIR21', + 'SPD21', + 'DEP22', + 'DIR22', + 'SPD22', + 'DEP23', + 'DIR23', + 'SPD23', + 'DEP24', + 'DIR24', + 'SPD24', + 'DEP25', + 'DIR25', + 'SPD25', + 'DEP26', + 'DIR26', + 'SPD26', + 'DEP27', + 'DIR27', + 'SPD27', + 'DEP28', + 'DIR28', + 'SPD28', + 'DEP29', + 'DIR29', + 'SPD29', + 'DEP30', + 'DIR30', + 'SPD30', + 'DEP31', + 'DIR31', + 'SPD31', + 'DEP32', + 'DIR32', + 'SPD32', + 'DEP33', + 'DIR33', + 'SPD33', + 'DEP34', + 'DIR34', + 'SPD34', + 'DEP35', + 'DIR35', + 'SPD35', + 'DEP36', + 'DIR36', + 'SPD36', + 'DEP37', + 'DIR37', + 'SPD37', + 'DEP38', + 'DIR38', + 'SPD38', + ] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(AdcpParser, cls).df_from_responses(responses, + use_timestamp) diff --git a/parsers/http/cwind.py b/parsers/http/cwind.py new file mode 100644 index 0000000..48616a9 --- /dev/null +++ b/parsers/http/cwind.py @@ -0,0 +1,17 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class CwindParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 9999, 9999.0, 'MM'] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(CwindParser, + cls).df_from_responses(responses, use_timestamp) diff --git a/parsers/http/historical_stations.py b/parsers/http/historical_stations.py new file mode 100644 index 0000000..3333e43 --- /dev/null +++ b/parsers/http/historical_stations.py @@ -0,0 +1,75 @@ +import xml.etree.ElementTree as ET +import pandas as pd + +from ndbc_api.exceptions import ParserException +from ndbc_api.api.parsers.http._xml import XMLParser + + +class HistoricalStationsParser(XMLParser): + """ + Parser for active station information from XML data. + """ + + @classmethod + def df_from_response(cls, + response: dict, + use_timestamp: bool = False) -> pd.DataFrame: + """ + Reads the response body and parses it into a DataFrame. + + Args: + response (dict): The response dictionary containing the 'body' key. + use_timestamp (bool): Flag to indicate if the timestamp should be used as an index (not applicable here). + + Returns: + pd.DataFrame: The parsed DataFrame containing station information. + """ + root = super(HistoricalStationsParser, cls).root_from_response(response) + try: + station_data = [] + for station in root.findall('station'): + station_id = station.get('id') + station_name = station.get('name') + station_owner = station.get('owner') + station_program = station.get('pgm') + station_type = station.get('type') + + for history in station.findall('history'): + station_info = { + 'Station': + station_id, + 'Lat': + float(history.get('lat')), + 'Lon': + float(history.get('lng')), + 'Elevation': + float(history.get('elev')) + if history.get('elev') else pd.NA, + 'Name': + station_name, + 'Owner': + station_owner, + 'Program': + station_program, + 'Type': + station_type, + 'Includes Meteorology': + history.get('met') == 'y', + 'Hull Type': + history.get('hull'), + 'Anemometer Height': + float(history.get('anemom_height')) + if history.get('anemom_height') else pd.NA, + 'Start Date': + history.get('start'), + 'End Date': + history.get('stop'), + } + station_data.append(station_info) + + df = pd.DataFrame(station_data) + + except ET.ParseError as e: + raise ParserException(f"Error parsing XML data: {e}") from e + + return df diff --git a/parsers/http/ocean.py b/parsers/http/ocean.py new file mode 100644 index 0000000..568812f --- /dev/null +++ b/parsers/http/ocean.py @@ -0,0 +1,16 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class OceanParser(BaseParser): + + INDEX_COL = 0 + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(OceanParser, + cls).df_from_responses(responses, use_timestamp) diff --git a/parsers/http/spec.py b/parsers/http/spec.py new file mode 100644 index 0000000..7266c79 --- /dev/null +++ b/parsers/http/spec.py @@ -0,0 +1,17 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class SpecParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = ['N/A'] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(SpecParser, cls).df_from_responses(responses, + use_timestamp) diff --git a/parsers/http/station_historical.py b/parsers/http/station_historical.py new file mode 100644 index 0000000..8c0fb29 --- /dev/null +++ b/parsers/http/station_historical.py @@ -0,0 +1,34 @@ +import re + +import bs4 + +from ndbc_api.api.parsers.http._station import StationParser + + +class HistoricalParser(StationParser): + + LIST_IDENTIFIER = re.compile( + 'Available historical data for station .{5} include:') + + @classmethod + def available_measurements(cls, response: dict) -> dict: + if response.get('status') == 200: + soup = bs4.BeautifulSoup(response.get('body'), 'html.parser') + p_tag = soup.find('p', text=cls.LIST_IDENTIFIER) + line_items = p_tag.find_next_siblings('ul')[0].find_all('li') + return cls._build_available_measurements(line_items=line_items) + else: + return dict() + + @classmethod + def _parse_list_item(cls, li: bs4.element.Tag) -> dict: + measurement_item = dict() + try: + title = li.find('b').text.strip(': ') + parsed = cls._parse_li_urls(li.find_all('a')) + except AttributeError: + return measurement_item + measurement_item[title] = dict() + for name, url in parsed: + measurement_item[title][name] = url + return measurement_item diff --git a/parsers/http/station_metadata.py b/parsers/http/station_metadata.py new file mode 100644 index 0000000..19096f8 --- /dev/null +++ b/parsers/http/station_metadata.py @@ -0,0 +1,49 @@ +from collections import ChainMap +from typing import List + +import bs4 + +from ndbc_api.api.parsers.http._station import StationParser + + +class MetadataParser(StationParser): + + @classmethod + def metadata(cls, response: dict) -> dict: + if response.get('status') == 200: + soup = bs4.BeautifulSoup(response.get('body'), 'html.parser') + metadata = cls._meta_from_respose(soup=soup) + return dict(ChainMap(*metadata)) + else: + return dict() + + @classmethod + def _meta_from_respose(cls, soup: bs4.BeautifulSoup): + metadata = [] + try: + metadata.append({'Name': soup.find('h1').text.strip()}) + items = soup.find('div', id='stn_metadata').find_all('p')[0].text + items = items.split('\n\n') + assert len(items) == 2 + except (AssertionError, AttributeError): + return metadata + metadata.extend(cls._parse_headers(items[0])) + metadata.extend(cls._parse_attrs(items[1])) + return metadata + + @classmethod + def _parse_headers(cls, line_meta): + station_headers = [] + headers = [i.strip() for i in line_meta.split('\n') if i] + station_headers.append({'Statation Type': ', '.join(headers[0:-1])}) + station_headers.append({'Location': headers[-1]}) + return station_headers + + @classmethod + def _parse_attrs(cls, line_attr: str) -> List[dict]: + station_attrs = [] + attrs = [i for i in line_attr.split('\n') if i] + for attr in attrs: + k, v = attr.split(': ') + station_attrs.append({k: v}) + return station_attrs diff --git a/parsers/http/station_realtime.py b/parsers/http/station_realtime.py new file mode 100644 index 0000000..46654d0 --- /dev/null +++ b/parsers/http/station_realtime.py @@ -0,0 +1,29 @@ +import bs4 + +from ndbc_api.api.parsers.http._station import StationParser + + +class RealtimeParser(StationParser): + + @classmethod + def available_measurements(cls, response: dict) -> dict: + if response.get('status') == 200: + soup = bs4.BeautifulSoup(response.get('body'), 'html.parser') + items = soup.find('section', {"class": "data"}) + line_items = items.find_all('li') + return cls._build_available_measurements(line_items=line_items) + else: + return dict() + + @classmethod + def _parse_list_item(cls, li: bs4.element.Tag) -> dict: + measurement_item = dict() + try: + title = li.text.split('\n')[0] + parsed = cls._parse_li_urls(li.find_all('a')) + except AttributeError: + return measurement_item + measurement_item[title] = dict() + for name, url in parsed: + measurement_item[title][name] = url + return measurement_item diff --git a/parsers/http/stdmet.py b/parsers/http/stdmet.py new file mode 100644 index 0000000..f81abf4 --- /dev/null +++ b/parsers/http/stdmet.py @@ -0,0 +1,17 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class StdmetParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = ['MM', 99.0, 999, 9999, 9999.0] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(StdmetParser, + cls).df_from_responses(responses, use_timestamp) diff --git a/parsers/http/supl.py b/parsers/http/supl.py new file mode 100644 index 0000000..096c93a --- /dev/null +++ b/parsers/http/supl.py @@ -0,0 +1,17 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class SuplParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(SuplParser, cls).df_from_responses(responses, + use_timestamp) diff --git a/parsers/http/swden.py b/parsers/http/swden.py new file mode 100644 index 0000000..0514aa3 --- /dev/null +++ b/parsers/http/swden.py @@ -0,0 +1,71 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class SwdenParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] + REVERT_COL_NAMES = [ + 'YY', + 'MM', + 'DD', + 'hh', + 'mm', + '.0200', + '.0325', + '.0375', + '.0425', + '.0475', + '.0525', + '.0575', + '.0625', + '.0675', + '.0725', + '.0775', + '.0825', + '.0875', + '.0925', + '.1000', + '.1100', + '.1200', + '.1300', + '.1400', + '.1500', + '.1600', + '.1700', + '.1800', + '.1900', + '.2000', + '.2100', + '.2200', + '.2300', + '.2400', + '.2500', + '.2600', + '.2700', + '.2800', + '.2900', + '.3000', + '.3100', + '.3200', + '.3300', + '.3400', + '.3500', + '.3650', + '.3850', + '.4050', + '.4250', + '.4450', + '.4650', + '.4850', + ] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(SwdenParser, + cls).df_from_responses(responses, use_timestamp) diff --git a/parsers/http/swdir.py b/parsers/http/swdir.py new file mode 100644 index 0000000..fcb54f0 --- /dev/null +++ b/parsers/http/swdir.py @@ -0,0 +1,71 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class SwdirParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] + REVERT_COL_NAMES = [ + 'YY', + 'MM', + 'DD', + 'hh', + 'mm', + '.0200', + '.0325', + '.0375', + '.0425', + '.0475', + '.0525', + '.0575', + '.0625', + '.0675', + '.0725', + '.0775', + '.0825', + '.0875', + '.0925', + '.1000', + '.1100', + '.1200', + '.1300', + '.1400', + '.1500', + '.1600', + '.1700', + '.1800', + '.1900', + '.2000', + '.2100', + '.2200', + '.2300', + '.2400', + '.2500', + '.2600', + '.2700', + '.2800', + '.2900', + '.3000', + '.3100', + '.3200', + '.3300', + '.3400', + '.3500', + '.3650', + '.3850', + '.4050', + '.4250', + '.4450', + '.4650', + '.4850', + ] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + df = super(SwdirParser, cls).df_from_responses(responses, use_timestamp) + return df diff --git a/parsers/http/swdir2.py b/parsers/http/swdir2.py new file mode 100644 index 0000000..85a0c99 --- /dev/null +++ b/parsers/http/swdir2.py @@ -0,0 +1,72 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class Swdir2Parser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] + REVERT_COL_NAMES = [ + 'YY', + 'MM', + 'DD', + 'hh', + 'mm', + '.0200', + '.0325', + '.0375', + '.0425', + '.0475', + '.0525', + '.0575', + '.0625', + '.0675', + '.0725', + '.0775', + '.0825', + '.0875', + '.0925', + '.1000', + '.1100', + '.1200', + '.1300', + '.1400', + '.1500', + '.1600', + '.1700', + '.1800', + '.1900', + '.2000', + '.2100', + '.2200', + '.2300', + '.2400', + '.2500', + '.2600', + '.2700', + '.2800', + '.2900', + '.3000', + '.3100', + '.3200', + '.3300', + '.3400', + '.3500', + '.3650', + '.3850', + '.4050', + '.4250', + '.4450', + '.4650', + '.4850', + ] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + df = super(Swdir2Parser, cls).df_from_responses(responses, + use_timestamp) + return df diff --git a/parsers/http/swr1.py b/parsers/http/swr1.py new file mode 100644 index 0000000..3bd4e12 --- /dev/null +++ b/parsers/http/swr1.py @@ -0,0 +1,71 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class Swr1Parser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] + REVERT_COL_NAMES = [ + 'YY', + 'MM', + 'DD', + 'hh', + 'mm', + '.0200', + '.0325', + '.0375', + '.0425', + '.0475', + '.0525', + '.0575', + '.0625', + '.0675', + '.0725', + '.0775', + '.0825', + '.0875', + '.0925', + '.1000', + '.1100', + '.1200', + '.1300', + '.1400', + '.1500', + '.1600', + '.1700', + '.1800', + '.1900', + '.2000', + '.2100', + '.2200', + '.2300', + '.2400', + '.2500', + '.2600', + '.2700', + '.2800', + '.2900', + '.3000', + '.3100', + '.3200', + '.3300', + '.3400', + '.3500', + '.3650', + '.3850', + '.4050', + '.4250', + '.4450', + '.4650', + '.4850', + ] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + df = super(Swr1Parser, cls).df_from_responses(responses, use_timestamp) + return df diff --git a/parsers/http/swr2.py b/parsers/http/swr2.py new file mode 100644 index 0000000..0b8d56b --- /dev/null +++ b/parsers/http/swr2.py @@ -0,0 +1,71 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class Swr2Parser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] + REVERT_COL_NAMES = [ + 'YY', + 'MM', + 'DD', + 'hh', + 'mm', + '.0200', + '.0325', + '.0375', + '.0425', + '.0475', + '.0525', + '.0575', + '.0625', + '.0675', + '.0725', + '.0775', + '.0825', + '.0875', + '.0925', + '.1000', + '.1100', + '.1200', + '.1300', + '.1400', + '.1500', + '.1600', + '.1700', + '.1800', + '.1900', + '.2000', + '.2100', + '.2200', + '.2300', + '.2400', + '.2500', + '.2600', + '.2700', + '.2800', + '.2900', + '.3000', + '.3100', + '.3200', + '.3300', + '.3400', + '.3500', + '.3650', + '.3850', + '.4050', + '.4250', + '.4450', + '.4650', + '.4850', + ] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + df = super(Swr2Parser, cls).df_from_responses(responses, use_timestamp) + return df diff --git a/requests/__init__.py b/requests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/requests/__pycache__/__init__.cpython-311.pyc b/requests/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b82388adf88e773b7aeecf9bb85c77a4a2d2cab5 GIT binary patch literal 194 zcmZ3^%ge<81pLQ-ri19mAOZ#$p^VRLK*n^26oz01O-8?!3`I;p{%4TnuTW>Jn9$1)WnjE)FLQ9CNCu^IX1)WnjE)FLQ9CNCu^IX)=dU}j`w{J;Ps IikN|70L$VwFaQ7m literal 0 HcmV?d00001 diff --git a/requests/http/__pycache__/_base.cpython-311.pyc b/requests/http/__pycache__/_base.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d76a5eb99c5d5527bfe2269adf57b176c7ab75e7 GIT binary patch literal 6498 zcmcIoU2Idy6`uR!e>-*@2l59oBmqoP6*hst@~S1eLb?)N}5&ukG-& zE4978K6B>G%$b=p-<&yf|Lk(v2|Vs^{(ZjHL&!fcQ+P~y=J`j!+#~`Km^cYx%Ep-q zHpDW>b8&8h3vm;Ch}ZIb+%#bhnKj-Nw@g?=R)({E3((L+a?7q~d^haaxq* zXkMe1IP;n3-vD!yh$O@aB*Y3#h!fZlFK{7~z=zC&DP$4MA**14aaclj(IHqxr(hFZ zf*t-2u|jm#8hS^qg7YeWAM%e3S;)Pnjq4Td>_Wv$?L7H*Yy02@_e*sv1rOM$Qgs}P zNQLPp-C&x?PAW#?XqlhwOj?5Sj2t|lOxm{yF}hzXPBvVMiI>B&I3tHgV{tJ!H8%Ag zGKt89xIgI_13OOASTqvfsWM|KQSFi(kz>lkbSy$APSMG6F)B+Vm&J%YCenO0jHjFTW}5dd z?@N1oGTxr~uKBKidh2eq{U&%LINz16ZMc<8*9IQPmIsvJ!F2FoCU|hA=HR?5>!`^Q zv#laqweCsPTPsy>W$Rn;t=*LMtRS*k|QQpMb1e)RZra8SKQl|#ufMW zw7WCo?o_Ou&tZf_vu4!5El6ze*Y;8n*7Otd2?MUTY(ELCZ0rC+M)`R%&y2DHchxy- zT2qrR-G^)1ne**T(9SHF-%zvAg9U0@u90!0cgZc6>=D6Qj`EM0^JLbV;*0xv)&~2K z)ApeQC$B$(ZP8xd@`6?{Yftg=8|$H_677zkR3D+L>vziT0uP?S@G^v?*rL|*k;#>> zX<=JauT&fNI&Fd@3F^vyT6yj;`OrV>N?8{W6)!z3Ntsfv@}A3c47t83RnmXTaY0)> z`nNaC-LO!(uGh|;8-HDkQY$#iwUy8FRhEFcZOpAxBN+hkYi;j`xWES{{vj`$>m zR?>mstCZ}-zJ3Sf`#F+bS9>F#Bg8k%Ep%m?_v`F(Qcu5a&m0$YRvAs?nJMW7As3m` zteyut3^6&#hS`fnnZ#Z~nZzFDjC#kolN`v7aYIEJbZnHnhB>+k=WfGcYw>v0zyZk7 zCu=ym3k>2&hP3dcm?B1J*%Z5AV5(39)BEs$WC&*2Yh3YE0qk(a$PjWoV2{b;uf&&W z^-2*}vYQccDL^pb&M`=63v?B*P+VBm9)%b~j>Kb0k!}S6-G*d4l2#-;fCNlBCUA*V zlV*Vud|;mt>l6@Dy>@Z6`pLupB}6nWKZCBMzW@P@tok4O zw^D0Gpr7u8TQ+~xU@5@a^|Bvb2bX=A7Q#RoXj9S!X87M9AP6xS(F_` zS(Ke9%0La&o?@rhiu3~4468%O6`iJ3Ovriol!5?*WDP!l1m-39kP`SXDds1^hq=a< z!biY-iJKBwgpU9qTQJ-ebZtE4V1%079dxFAU(%%Cv9>_fB#+lC00t_iyO3z;LAa=< z_~d1gLcqQo<+x}iaKS!{XxZ>ew1o5KN}l*%pr)Yh9q3s)4J41A*^F<;>(HZAoq_j3 z)nc8_Es1)%JpSZB-^zi$^np{E1E+KuQs)_Mp_9>5aS;M^{}i3P6ca>$#6LPoCn7Q? zei?kBzvvJBXih&E0PpKW-GMZq5(gG-7(8jncQR>?mh%43s4kb2Rgh?(yY zNOT<%^hg1R%Ec10%E84)+qkOzOe8)nc2hb@RbD#=akpwqqMB$Vab8rpiC984N2aFW zN=qA1i@Ug5#oLyCcM4+bb|h#_m50Ggs_BynMI^XZ>GyEet?gSB=*O_?fw{=x z$fZauu5Sj_rX43*Tf4^UP4K)vdlno^@zIVTS`P)umY)O+M5^=!nC5#}@|>5k4Rep% za|F}B=2JcQBh<{v-&L$N|1y)h z#yir&zPsKO@3k+a(oMTFO}kf{wk*utolQ5rooRY|nMpV8ea12VcV7@7 zFu=MF7+_rojvBlzkon%6nYb!{ef-noN=+NQ4?alOypySUC+*ymaqdyH7pgdY*Z1G* zUvac5j@H$UO{>23x9pjkZE0TsTs`FOaJP^PM6b!B{A zUq#ZsBhO51V=n{U1BI?#r}FeOJE*U$zUla_LuuFr@6yh%_N5#8G7WucPk+YKuW0Y8 z+pAPHrQOXLck_dq#f>ZOJ&Joz*5lI~?o}H0!Z4J=kYLsuT4B~3TAvZ7vJJS(HUJY4 zmWYZg*O2MOrp`!F5kU8?y?Y;ghM7O>%fBEvpT<3pM-Lg{M zlCIvGsopwo#}+BYIdp3my$rfYXAFtkieVcSdSexwau=rd0)V*TEk)dHW?19 z_AvNMVLFbyGaSA+9f|8D^cak$Bx2J7IQ()(14!^hrbmIO4(%M#4-vW##Rw#7M&d_; zo>e;n9ujQ;x@qvPO_AVVs)QqF&iFm8Z&Uc1?AEswzB#)gpzw9shJ6ZOo2_Y4_-gPi z$h+KgJz1}B?)X!aC&#)?hnNK^M=)Ks>S@-$EytlCyJOc<)8YquMlaLpGme-mul_=B zp4$wW9Q$*V?Nvt$IVwf}E99)#q%;*-6E^oC7&0u~*DTuh-^Wm?H&ZC`G{( zqbaqS>`UODAzAh5gg5S^7QRZVJ~^wy6At4l0!J_Q0W2ds@>`FBWIr!jX z@6InsDfu5g_pFnlf55@LG^fs3irvz#*x5JpR&PG`OTE4VxEkMn9be-4gPglntH5jn z!5A#q0SP0J?8*YT#$B!eq;&vITigZcv@2)kBN&5%fJq1}$pTx|q@|p;CGAn|3fb%G zDnQ`M8Y(%(>UOHtzBW?{&D)LRk&JZxlROIXaEqV z^s_ZIW4M5m1&QKpx5*0c)ysp|{BZvrVq3(<&_5fu@3%L@L!-0_-$vr&j_$VG-6Y&Q z6P-x+_H=rpL}tQ)in5NgHBa6@)?1&fsXUU*Sk$V7HK ze1}o5=euUfcv#A9M^Vp>8=`4Py?tHU>H7hviQmD!Ye(T+amr(D>JGvHMhoRn56YO2aPi2Zk?Fy4UxD*dlMDuP(5_Qz{fuyUVq$ zA}Ncg{syvyn=BYG4`>P*WYjXkApJ&*_Xq2v?5(jyvX73~7;w9Ua~pEEhp(r`OKqIm zWUW_iU71bI!Kpd3sX1n94rTc2q`Bd^h(*Jtg>u&%;8Fkp literal 0 HcmV?d00001 diff --git a/requests/http/__pycache__/adcp.cpython-311.pyc b/requests/http/__pycache__/adcp.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a3ff8ab1d5f3ad9b9d9025dcd7de74248912f8ae GIT binary patch literal 1225 zcmZuwO>7fK6n?XRcI;RVP=tb75rl-WDl8#UsX~P`Kn#WuPzb19ER8lh6S8#u!^~JE zD2E&(m18f6YfeEZ960va6X&$5RBKL=dg{$lKq{PgGyZ8!8PB};y?L{<-}~PBd7+R4 z9L3k~?&lnUzf3R~ejn^t5Ih40jHFP95QxD}O6pGNSnQ^5-3vVe4qO4|Z2E*wI_ zAaocBUFL)yb3>m8%;On+bCw0TGJK3@CWp5yXP>|_VcCuGE!^84?uWT2@Q8$Y5D1Y|w;REEF~Tb(S|+4+Vm&wrIrO@OA(rq7lJTe@dQ# zBL8L;NcIq;ql~u1TK$IEMQue(fqRx zZv&V({Pf=DiQkGR|0tgPvv~IR;@MZ>uSYvyo!@iG#Pl0L(*-g%y^l{oXB`r5@jO(`uAaS&6|^!8hE$ec25u*LwniUuni{B{e6 z)+?QZlk3+zpC8-tCU^abuH*XCo3ab&e#u(bnL5>VG3=Z^^XkOQ?=7~8kIbG6!GVo? z_J)sAHWZ%?xXhOVTZj(SYAb2nw*~2`LHo+szVe`bWz)VgUBdsQFrSAl<^}XFGbO&y ze$!L>f;f($sa-bFbX`J72fo-DogFx`Gdg$S>z&cr4RQn<`+xKEmwRUS%_mZp&<8o5 BH!A=D literal 0 HcmV?d00001 diff --git a/requests/http/__pycache__/cwind.cpython-311.pyc b/requests/http/__pycache__/cwind.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7f787b87895c53959f45906d7411794442e836ce GIT binary patch literal 1257 zcmZ`&O=uHA6n?XRN%KSMAEGE$JZJ<1A{9}QYHc)HtJI3RE)3h9X}h{f1i2CT7NEW%w4q*tVos(puK87pvCLpx zoh6M(;~d5h8-CP|IOXf(*UnGKY`HQzg4@Qf%axlG*T*Zv=SOkn;>g%UrCb>qm)V$| zDit%cM4DVMraQ@Wd*fjn?89eS7UMJzPnPnA%`b)L2S&V_Pw z``(4C&3y+}tvzdYf6FlKA+*F>p!K_SqMHLpTBde1j~#z|=*`{4NMc*%x(Uv<#e2z7 zTf>Qm?d)VWR&?UBq$B3)VQo4tPY!fCT2dV?bvjzo9WCKW^`|oZ#l)q(hU73^@UQgQ zg!gmopfWWRd`^j$i4bbS{?*>ugx#yXa}7?d_Rf}*LAufZme0?u$C1td#N*U{1OCc7 ALI3~& literal 0 HcmV?d00001 diff --git a/requests/http/__pycache__/historical_stations.cpython-311.pyc b/requests/http/__pycache__/historical_stations.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e598dc1b4fc64aab6443290e1b6613897713cb5f GIT binary patch literal 884 zcmZ`%&1(}u6o0du#Mm@eEd>P+!HWcB)p`+;LM>81N-Rx#i3`JYXJWSQX49Do+TtOH z{0TyZBJ@;|{vV!u@F05%c=DDKFZSe{>}E?*-|oDRc^~uMZ{~BkJOxNIUw^;8L;!vm zV?y~<89Auz0S8V32uXtwuwWUu)dy}1a%ck_;Yz4s@mw9M`JcwDlDM=KNwFrjyFzI% zjTwtcMp|7!gK)rX(y&F2TO!}IxZTekV(?Lx*C-^_OsbjodM-Emvl)HEGVIh~!jyxH{wPSe=kTmJ>eITu*O z9@RBuJs4b_`*8bsX8yqbR=JDP{Kff^!J$^Y5Em(JvrtePPf;31yc-yOn$qnq3lfjK zh_P|W3z$-fcq`(ut+bTWxcvoW8HFqvW}elUi~n7!9q8RRbyEt0?g(HjDa zGj?J_;Y$D2*m<^_lr~jsRXeUs&~p$yXM&z%&~vDVZ&8|?PMVlCTGrTU@li6_f}BH# jxj^*-+0Z70e1`|e=l@}$K(M#J*+J>V?EddSD literal 0 HcmV?d00001 diff --git a/requests/http/__pycache__/ocean.cpython-311.pyc b/requests/http/__pycache__/ocean.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..923416122cef22e891d9c6a90df7fb270de51a40 GIT binary patch literal 1257 zcmZ`&O=uJ`6n>e%-R=)tt0IbG#e=Tkf=ESFr2ceuwYE|#>NpUllhlsR{+MLZN)J60 z^yo#r_EeF2@aWN#=PH6rP6bciwp2kqnau32E@~!u?|biMlJ~wh^CF+mfEtB&KkgI^ zfX~|K4!Z+3%L+UP0t7`+1s9OUMue({YsA=$%&O&D2n@IY#99JkQ)|TpSXC<-u1%b4 zP)vQ%W)aCW@dXuOl`3qFhg>L~J?nEiMejE$7eVK}Sj9Oiw0TpB$0`>JE+XI>1i2eo6M zNV~4BM@}i$64hmf3txow8V*T}nZUXPnWi<8Acs-WWVOKRT>zkY)I7x5KSR$!px;rU zSnrMnSW(t$&$HY^-5pvlpao;W>}OYHk0y$yw0W~Z8B3{cGC$J{BZ67NXVI2s5OJod zWLYh8deZ>LLF99eG1qAhtFid=#<>&Tbxs-gF8RT|3oM+S4Qt-j`C!gx)vK(2mj;4+ z52!EZDC^i>jm!ktZ-ky+olChlCq%=G`71T%*`T`O(XKy9F7+juS8;wsiSLW>&4Iz~ z&%EWM?+XV$77lzWjC?4JyuSPPQ0v&~4HFHHqOYK38_>Wg+Egz=F(=a)*ZeBQSY|M; z)=4wcIEV55rXO`8PI+?b%9&}IEmy|JaL3qwv2tyCa;kFf%s8%`ADfu2lq+LXG8?l~ zrDCp5q{#(ix|2+|H?BtJI|K$aiZCuBHl#vaH+Yn7)w^^rVxa+ltb{7A`>f};EtXqD zyB06EcJE!YcCOolZNs!j(K2s?)^E~@ZVex7o7&MjeB|~1SGN))iTy1%OmMa=-A<1B zJDhmfwr*wDBCORA%#ZbwVHqa{3}{#2&Fn7Fi;kQ}B9{+T|N z@P3-@Q>JEu&nwY35kf84v(`Ubuw$)%uEX)Q{@HdiNH_Z5^6ANqIP&#B@i?_#HlI2J literal 0 HcmV?d00001 diff --git a/requests/http/__pycache__/spec.cpython-311.pyc b/requests/http/__pycache__/spec.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..498b275d2d8aa559e0e2fe2798d5d23bff935846 GIT binary patch literal 1189 zcmZuwOK2295Urly?q;)WK8QpR0|9Xn7la5RYW!eCjT(|DI$3BY)172=_LJ^jG$Dr^ z0_NyNa?L3s@sOiOPoAR)E^`Wa^0pxgnv>PDAKhqePt~ign(nGsv#&FmEOYA1tA4RYZ3>XQa2rdwfjgS-#*NCwhnnlaC2pDh^n6(Pb=GLYOu!T=UT$?$Q zkV|~iZjMAr<|(d%B1deE1yZ3+U-KlN;0skQmEU|XHgS!J_NP$2z*;!Cgn?@?;+o8G zEp9WDJIvwG>amw zlTwuWp(iD!QkOZ1vU2e7`h~&+$%QQ3_WY+eMKC)XlnUdE{=6rO5ZX} z*^{5MC%f8ai5VzzHD6|tDU}d0G;Tey4NQofN^@{i|M{ N-qX9^{}YM}`VBAjEfN3# literal 0 HcmV?d00001 diff --git a/requests/http/__pycache__/station_historical.cpython-311.pyc b/requests/http/__pycache__/station_historical.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..721183b9bbbf4310c2c7ef3e2b71b538d7ad133a GIT binary patch literal 934 zcmZ`%&1(}u6o0c{hNQ7pv(vlC*xG-#Yre^Erqca;M#X}GI z6NCsw=&36G2mA{>=iov1RPf|2rC#jGH`z_FsBd?E@4flGnK$q8K4i0LK$-pW>*XB+ z@Lf3*F%#p|Mq>aB7zvV$v>tH88AFrAY_9pDI8EOm_;708=El$$3}X1OhRneTMD z480^~tfsf$Mt1-l>VyHFCUu<~OyefrZEhLN=o^O^aWp+!x0s0;tZ1&_%djnc&kd3U zyVqsthJL$68)K?xcN(2X$=-t~Jz;$oS%QaM(ekwEy#T5RwN7s$4B!oHPsx4dVS{2p zkL<&H9iKr;c`ZskUaG9nTKQ=-U#Tuti|Zv?C|0WL<>z#xyz->@3}bXJkix?AQl-r0 z&e-yzQAi|nuPdKSCln<=vnH1Taxbi&O{!4c38aePB8#Cpy3>&S+FLW|San{!nI?gmbGM!Hayq z%VprjJ`*vEzKrGoI8+D&+)XMvH<-pvjBVBoX7r5{^f;aEtys*&3|5%l;(^QDz>UKn z-jabEG&*hSyF1+W>;98?;Zc~JQVkYbf(KpE_Ow|@09C)5<2wigcmumL@<>_On`od# zj^MqH6_9*Z3lmRsr8QbEJ`Wd4+qvzH7kOGJu07j$iJa~=rLb_HRH-tVi|XnFr!2XU z&Zg__t&2vj)@VD01F!CiRzY-jxfe+1HFtwL7qRWMS=FPi-*D6vQ75HS4+7tr9;y{L z!uAisRJGe^G8#J`F8$Rc`x2c{#VsN9V0eA;-QBbF%CY`6vyAfdCW87CD`RwkeD+G1 zptS9_IHh5d(pHCcn@XRjbieC1BSxtA6wY}~S4u3a-eI9G10hnl{0YPY3Nf!h-Twp( zROA*yZ}trr@AdLS^Xf<=W{w=o5h(dFmXQ_VxenH~{;SCGPa?{slH{{`n3*=w#wOa+ zCfcfrHudm@3U&EWO*fP_tMel9FgoaCaT5vk4rCc&q!U8E!Tqz#|H!gPj*Q^n_;&An L!u~sc=btZ^ z3BWfM%$7NIPHl7sz<`keTBJ@0Xs`xM>jTp{Inn`+amTB!F{27u^ScVOBwASQ2wvek zJuX9Unlmx;>US|50EaqZfPa&^&JCt<6Kk6dgBg9}2s4i7|JE&LVh1Z)s&F?5{no7f zURs83=y%#wObNT&>^_=)T#1r1+Gmj^c-RwdPn(woP(PyBEmtc%E73|mw_Yne&C_xv|D^C7lXNeT!omVl^~_|> zHdpUC8KdTe!x(ViT-=(zZA32(Ft|9G{QcNu3US2XSh^2)W2lzqy1DzP;a7e ziV0B4UWgKuw%rz|G)hw1>abp*{6$K4dTtOiLJe26;03OfEgm*IEYf8tL<&ED1aT3K zSX7{fVFCs^vV^cde@3@fhQ`g2d1vq%o*7?k}mwQ&^Vi4NA~{);&APa|~#0x#xgbNyC)JlXXhy*afwPv#HOzG;jl$mLygaald za`ZyDa>5uqaP;WOGa3_{J&}0wR#HrqlQX+*O@qG9zWLtI&b;@%?b~Fs3#6nz{a&gg zfbYs^iP;2=aS3*S06{(!!3LzT=A)u!Ya!Nsy=d450u3gB7@I&$YV7F%`|^pzHiDUitrcn?lj-bLTQi+Wzj|;hX-*q^hr7IQ6h1+~D+`<|qYRpUZTGm3rMg(k)AX_Jz zZBUcw)FK9r$(v)uqH$_nY@d*iZ$TcB*k;e3^zFC(c47-wk=-ThiU-L#L5fr~uMaF9 znrlWztcswE3rBd%CG3$9Gl5m@L7bLIge*oyg_T^ReL8?5P!Z5rkZK$4fRGfoL})Dp zunv3D+HZSy+vsnO%tGthy532vEFDd!^}ytnGG#0#^MmAK#q$Yf5s_&#&|ROigbXo7 zB1KZDb z$3CZqzNUu0rAEG_Mh@0Kj@GW$>&j8P zHhggP{gcRuVrTWS4p!gh)99$P;mE_zwQ8D9TVX?iC8}kwv=lZL47SFWkz>oW#+FfI z%iu-%KV<5&2%CBb(P669)A-GZ+FR_3G!+NDDpg%a2-V>7VdtvB`NPh21UC*lSKUe= SHOqhNH7fK6n?XRcI;SA`B4=FA{7$Cs%ohcm8uFz`H8`88c-5YyI2}+b|z%Y`iGgZ zN>C0tM1mt1#5JcNlpZ*8T^3*1gap;J{^I-UcwAds{BR zHm=?XeHJW0Df7mJC8Dz|Qd}inv}u9ts3kZPAFm;q3rML?eQw{)jvV zMLuN}NxQxxC6ERyBrq*1%J8m~m6eoZug z=dqHtKX{~8xah~V23v}0)Jke*^%0}F&eckjX;&&COcRCatgS>B`r5@jO(`uAaR5`&^!7(_(3~=Du*LwniUuni{B{cm z*DIX^lj}csjvn9fCU^abuH*XCo3ab&o@cG=Or7ew7c zd#0!KIq?;Srgqsx({%|U9XPf#Iy>;?&gk5Q?{`LLH^>oe?0?O}ulLOE+y6;fLLUJz CzBm~G literal 0 HcmV?d00001 diff --git a/requests/http/__pycache__/swden.cpython-311.pyc b/requests/http/__pycache__/swden.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c37c9ad08075ba511ad6cda32295e008dbf3b0e8 GIT binary patch literal 1257 zcmZ`&O-vI(6n?Y2-Io5Oh!O+Qi1DB`u?aC6qlSP=v5IH_qpq7wmYqRXx24QXgBT7R zNZ{zjc;$pKc;M*KlV>z0HhUuR%T?g2ZGa6Qm zm=lnTe^+4+1qtE^D!dYv*ckP=kUn+I;dGqNRVf#4a5!#T!W(d?6BY5&3X%l3Uv8Mkja?z0i*O;3Ae``&^(s-8YL9lXu69*M~-N@y77jWU){j9uHEX zIwdM*W=WuP!IN-NG2FJJBpBfz9YJaxj(pLLZHY+!Eo;+6(40|eg@|IEs_2kTM5)IYK&U@dRH#_sa?`>XXGD(ml`~K%_ zRs;B=g7%2Dz;023mq36ZAIe|>Qdsj*S+led>%Lw#ECYcCqd<&RATesJ>i`>aM#G8` za}x6LuPV%=AVC~Kg;%B$8)F_9(x8NxF*}p(9L7+cb zAysdWWmuQdMrUQMgW5ZkUq;K?vfiz(WRJ%4x};pMQO4qOTacNqdp^M|(lZ|mbl2xh zNy$>mq}8S#j9uU19AmC%_Q|pG>)zFi_FYaHw{JM^qfzF~%y6{k!w4w4v` ztEBELoW^*e?)a^YS-d@QYj`S16-#3yxD_0{UYeY`JyE(kJcdixM#iT~#nQ+`kP6i) zQ!!U1fzAbEs*_B$H>@Vev;=gi?_-=ttXHPEYVZ)-uXd?kghm1WR2o@a`?sAruu^RF z9$C5BIC^}`IJ_OpH8njpgw}Wylz*Fue53DlQ&$U(v;CVV-#mzdD7V+{=wR+!eHb0J zHyy3818rvGc{404FvU{MtIUSw1%2&~7Gy^Y?T!{yM+7VBqC3hnAlRaNWg@Bmjp$?JNv!w`T6YkzIXR3lSzV-?1!K8 zhYf(Q+UZEF4fcu(yaWOS1yBJ8kj6%UDu!c(*bK~yoL`D4V+Gaymty)F~HU`@PVFIV!YwPpNg43k3%ea14SR zlNgRgOeaQdV$ry|IYDeS)k9JR)OKiX#B8j#1mRj5Ht~<#Mc!r#m%B@dkrk@g@ zoo%NeZ63PI@q7d%@@0|>SNMxn?2`~Pfprx!L8~M}Hlw1>s-D%o13*)$sR#>rfnI_@ zf1*MW-;ovAQr>oVWwVPqJG5OvE5?f1ORy>cjps}m`LVE){kUoB5Ac?&M9OtQsC z)Dyh?l)7SpvbJBYl35SCHNUK9`%+$B5TaI&2+Y@(Siibs`B)Fp`JOa0Y6Blu>c=X4 zx1oRF#ly{!kJ+Km*`Y7lbDy&3-Y&nt*gSt_*F^mVv=3Uh0QD8no_Yy#X_>&d>Q*Sm zGKq0zk<w^<1?jVX>>}a zLVhY#EG!afa>1DHCDRQK%aNHjg&qw8jB|(`R3WZge3c#2yL2}~r2$`8N)^|6b}|F2 z#pc0dtGAoSPd2QhJF)(jVa5t*lea+IcZtY02ZvgwUTB^fetY`O{m6-8f9qWn>;r2L zqNDx}M=R`bC$sUK9hN0+u~hS`^I>^%u+!7L>S?~y)4c9!9?z=ZmFaILEbTQ!hv|ZU zB`!t0UuLJ2tC`?SO0`UcP!moxdPftEG7fK6n?XRcI;SAp$MfaYAPgzg|LJ~rK$qLPh)CG14>c^i>1+KXF?X%Kg^5; zK{@0QsT{c=t~sTu!hvItJ#kK}O10(`i4!+R0SP$u&G?6!c0BXm_vX#ce(!thUkZgB z;3&R)e<$YvyfVRH_SV6uYG1ZnT!;)F(}ksWk}@iKVwQaE z#;FuWN0B#X+#DLCG)^NaDV2ui6pr-|H!giw`$=*kYgeN9-er+2FDH%K_0@PK67}n% zd6&mZ)*kRkt#HwgYYny()2Nly%<8{X)>f2i)r@@Q)~cAmH$0mBm%!x_iUmx|GMabm z@HT*nLr-sS9{rrK{94IZcTemhG0nV@oQ{>KZ8I$x>I&d`2v=!fdU;anPJHZLr1wxrzoW8~k<) z2iGf|1C#4lJ70da<4x}R6J5vkr#EF6(EXORt}}JK>tfhBdFsW{=eI1jiBHU)3&Cd_ zx9ts|q--cYA8?r~1-1|!sMS`|xMK^_Q-k)Ev3=!1`^u($Wx9m_Nnt(@Tg7fK6n?XRcI;SAp$Mg-Y7i2tg=(o4l`7N{{$lDj4Jb(wES5%_oe5c3|1dKa z1m%!Jq~_R5#5JcN6b>9Y_SAEzD%F}(q@H?n6e_ixcr*S1(~f7}``*0S+3$UC{iIOH z0gmFY|K7ZnF4j~YOos`s_(6QJ}-MSZg1RS^u%-aCwb8p85 z*u~Wwq0fQ^C}sXKVTtH0ixgK$og?;U6RFVWFGZ5i^ZRWsRos8hI?EB!;Y~C@Ar}rI zVGufugf4SJkGY}G1LpAzzB$VRTp2#bGn2zxmb06%Ojvefd6}!NN}3ItFpGtvhQH482J4|faMc!#*c;vsU_>+`Sn6Bk zF(~pjt5EX6Sc4smc85FLLo_&H{2Ezv*4$BPkvf?xx!RZQ78fFe$8=$-ourJ4o|q+H zyKyRo(NW}$88?TxdN=l`nIfY~W^^MEtYClRYWbL~szIRn5%gaflc6~KoiA4Rn zXx`9 zGxIucg{gD3(q!6|N(j?LVLEFo(S^QtF;7!UOGF&Olr+7)AdZ?-rVZ8@AXm{~WrN>- z;plp$b7XS;`_73^_Poh`f1>NS{`9u&0=gGj>pD}Pc3liRpMC!P)U#U_+r)e3z=hz0 zjobEy_fj?#9}c+8l>%Fc4%BKZY22{|>8U~c%Gkd0pnYZ2zA|0H|D-UVhb`tQ^e!_c zzRrH#Q~JC(iJ_@oHqmrlLP!Tb-W#1AIKDSJ_u;F((b)}h1RMKb^YfVlv-|3QQkKvg D!lE|= literal 0 HcmV?d00001 diff --git a/requests/http/_base.py b/requests/http/_base.py new file mode 100644 index 0000000..517653c --- /dev/null +++ b/requests/http/_base.py @@ -0,0 +1,105 @@ +import os +from calendar import month_abbr +from datetime import datetime, timedelta +from typing import List + +from ndbc_api.api.requests.http._core import CoreRequest + + +class BaseRequest(CoreRequest): + + REAL_TIME_URL_PREFIX = 'data/realtime2/' + HISTORICAL_FILE_EXTENSION_SUFFIX = '.txt.gz' + HISTORICAL_DATA_PREFIX = '&dir=data/' + HISTORICAL_URL_PREFIX = 'view_text_file.php?filename=' + HISTORICAL_SUFFIX = 'historical/' + HISTORICAL_IDENTIFIER = 'h' + FORMAT = '' + FILE_FORMAT = '' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + + if 'MOCKDATE' in os.environ: + now = datetime.strptime(os.getenv('MOCKDATE'), '%Y-%m-%d') + else: + now = datetime.now() + is_historical = (now - start_time) >= timedelta(days=44) + if is_historical: + return cls._build_request_historical( + station_id=station_id, + start_time=start_time, + end_time=end_time, + now=now, + ) + return cls._build_request_realtime(station_id=station_id) + + @classmethod + def _build_request_historical( + cls, + station_id: str, + start_time: datetime, + end_time: datetime, + now: datetime, + ) -> List[str]: + + def req_hist_helper_year(req_year: int) -> str: + return f'{cls.BASE_URL}{cls.HISTORICAL_URL_PREFIX}{station_id}{cls.HISTORICAL_IDENTIFIER}{req_year}{cls.HISTORICAL_FILE_EXTENSION_SUFFIX}{cls.HISTORICAL_DATA_PREFIX}{cls.HISTORICAL_SUFFIX}{cls.FORMAT}/' + + def req_hist_helper_month(req_year: int, req_month: int) -> str: + month = month_abbr[req_month] + month = month.capitalize() + return f'{cls.BASE_URL}{cls.HISTORICAL_URL_PREFIX}{station_id}{req_month}{req_year}{cls.HISTORICAL_FILE_EXTENSION_SUFFIX}{cls.HISTORICAL_DATA_PREFIX}{cls.FORMAT}/{month}/' + + def req_hist_helper_month_current(current_month: int) -> str: + month = month_abbr[current_month] + month = month.capitalize() + return f'{cls.BASE_URL}data/{cls.FORMAT}/{month}/{station_id.lower()}.txt' + + if not cls.FORMAT: # pragma: no cover + raise ValueError( + 'Please provide a format for this historical data request, or call a formatted child class\'s method.' + ) + # store request urls + reqs = [] + + current_year = now.year + has_realtime = (now - end_time) < timedelta(days=44) + months_req_year = (now - timedelta(days=44)).year + last_avail_month = (now - timedelta(days=44)).month + + # handle year requests + for hist_year in range(int(start_time.year), + min(int(current_year), + int(end_time.year) + 1)): + reqs.append(req_hist_helper_year(hist_year)) + + # handle month requests + if end_time.year == months_req_year: + for hist_month in range( + int(start_time.month), + min(int(end_time.month), int(last_avail_month)) + 1): + reqs.append(req_hist_helper_month(months_req_year, hist_month)) + if int(last_avail_month) <= (end_time.month): + reqs.append(req_hist_helper_month_current( + int(last_avail_month))) + + if has_realtime: + reqs.append( + cls._build_request_realtime( + station_id=station_id)[0] # only one URL + ) + return reqs + + @classmethod + def _build_request_realtime(cls, station_id: str) -> List[str]: + if not cls.FILE_FORMAT: + raise ValueError( + 'Please provide a file format for this historical data request, or call a formatted child class\'s method.' + ) + + station_id = station_id.upper() + return [ + f'{cls.BASE_URL}{cls.REAL_TIME_URL_PREFIX}{station_id}{cls.FILE_FORMAT}' + ] diff --git a/requests/http/_core.py b/requests/http/_core.py new file mode 100644 index 0000000..8924923 --- /dev/null +++ b/requests/http/_core.py @@ -0,0 +1,7 @@ +class CoreRequest: + + BASE_URL = 'https://www.ndbc.noaa.gov/' + + @classmethod + def build_request(cls) -> str: + return cls.BASE_URL diff --git a/requests/http/active_stations.py b/requests/http/active_stations.py new file mode 100644 index 0000000..0f87aa6 --- /dev/null +++ b/requests/http/active_stations.py @@ -0,0 +1,10 @@ +from ndbc_api.api.requests.http._core import CoreRequest + + +class ActiveStationsRequest(CoreRequest): + + STATIONS_URL = 'activestations.xml' + + @classmethod + def build_request(cls) -> str: + return f'{cls.BASE_URL}{cls.STATIONS_URL}' diff --git a/requests/http/adcp.py b/requests/http/adcp.py new file mode 100644 index 0000000..b9d062a --- /dev/null +++ b/requests/http/adcp.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class AdcpRequest(BaseRequest): + + FORMAT = 'adcp' + FILE_FORMAT = '.adcp' + HISTORICAL_IDENTIFIER = 'a' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(AdcpRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/http/cwind.py b/requests/http/cwind.py new file mode 100644 index 0000000..dabc359 --- /dev/null +++ b/requests/http/cwind.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class CwindRequest(BaseRequest): + + FORMAT = 'cwind' + FILE_FORMAT = '.cwind' + HISTORICAL_IDENTIFIER = 'c' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(CwindRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/http/historical_stations.py b/requests/http/historical_stations.py new file mode 100644 index 0000000..022ac3b --- /dev/null +++ b/requests/http/historical_stations.py @@ -0,0 +1,10 @@ +from ndbc_api.api.requests.http._core import CoreRequest + + +class HistoricalStationsRequest(CoreRequest): + + STATIONS_URL = 'metadata/stationmetadata.xml' + + @classmethod + def build_request(cls) -> str: + return f'{cls.BASE_URL}{cls.STATIONS_URL}' diff --git a/requests/http/ocean.py b/requests/http/ocean.py new file mode 100644 index 0000000..da485ee --- /dev/null +++ b/requests/http/ocean.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class OceanRequest(BaseRequest): + + FORMAT = 'ocean' + FILE_FORMAT = '.ocean' + HISTORICAL_IDENTIFIER = 'o' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(OceanRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/http/spec.py b/requests/http/spec.py new file mode 100644 index 0000000..e3eccb5 --- /dev/null +++ b/requests/http/spec.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class SpecRequest(BaseRequest): + + FORMAT = 'spec' + FILE_FORMAT = '.spec' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(SpecRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/http/station_historical.py b/requests/http/station_historical.py new file mode 100644 index 0000000..c15eac0 --- /dev/null +++ b/requests/http/station_historical.py @@ -0,0 +1,10 @@ +from ndbc_api.api.requests.http._core import CoreRequest + + +class HistoricalRequest(CoreRequest): + + STATION_HISTORY_PREFIX = 'station_history.php?station=' + + @classmethod + def build_request(cls, station_id: str) -> str: + return f'{cls.BASE_URL}{cls.STATION_HISTORY_PREFIX}{station_id}' diff --git a/requests/http/station_metadata.py b/requests/http/station_metadata.py new file mode 100644 index 0000000..a754f92 --- /dev/null +++ b/requests/http/station_metadata.py @@ -0,0 +1,10 @@ +from ndbc_api.api.requests.http._core import CoreRequest + + +class MetadataRequest(CoreRequest): + + STATION_PREFIX = 'station_page.php?station=' + + @classmethod + def build_request(cls, station_id: str) -> str: + return f'{cls.BASE_URL}{cls.STATION_PREFIX}{station_id}' diff --git a/requests/http/station_realtime.py b/requests/http/station_realtime.py new file mode 100644 index 0000000..a483309 --- /dev/null +++ b/requests/http/station_realtime.py @@ -0,0 +1,10 @@ +from ndbc_api.api.requests.http._core import CoreRequest + + +class RealtimeRequest(CoreRequest): + + STATION_REALTIME_PREFIX = 'station_realtime.php?station=' + + @classmethod + def build_request(cls, station_id: str) -> str: + return f'{cls.BASE_URL}{cls.STATION_REALTIME_PREFIX}{station_id}' diff --git a/requests/http/stdmet.py b/requests/http/stdmet.py new file mode 100644 index 0000000..a44df9e --- /dev/null +++ b/requests/http/stdmet.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class StdmetRequest(BaseRequest): + + FORMAT = 'stdmet' + FILE_FORMAT = '.txt' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(StdmetRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/http/supl.py b/requests/http/supl.py new file mode 100644 index 0000000..86d1074 --- /dev/null +++ b/requests/http/supl.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class SuplRequest(BaseRequest): + + FORMAT = 'supl' + FILE_FORMAT = '.supl' + HISTORICAL_IDENTIFIER = 's' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(SuplRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/http/swden.py b/requests/http/swden.py new file mode 100644 index 0000000..0d1d2c5 --- /dev/null +++ b/requests/http/swden.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class SwdenRequest(BaseRequest): + + FORMAT = 'swden' + FILE_FORMAT = '.swden' + HISTORICAL_IDENTIFIER = 'w' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(SwdenRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/http/swdir.py b/requests/http/swdir.py new file mode 100644 index 0000000..720d6e6 --- /dev/null +++ b/requests/http/swdir.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class SwdirRequest(BaseRequest): + + FORMAT = 'swdir' + FILE_FORMAT = '.swdir' + HISTORICAL_IDENTIFIER = 'd' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(SwdirRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/http/swdir2.py b/requests/http/swdir2.py new file mode 100644 index 0000000..6b6fafd --- /dev/null +++ b/requests/http/swdir2.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class Swdir2Request(BaseRequest): + + FORMAT = 'swdir2' + FILE_FORMAT = '.swdir2' + HISTORICAL_IDENTIFIER = 'i' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(Swdir2Request, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/http/swr1.py b/requests/http/swr1.py new file mode 100644 index 0000000..6494f6b --- /dev/null +++ b/requests/http/swr1.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class Swr1Request(BaseRequest): + + FORMAT = 'swr1' + FILE_FORMAT = '.swr1' + HISTORICAL_IDENTIFIER = 'j' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(Swr1Request, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/http/swr2.py b/requests/http/swr2.py new file mode 100644 index 0000000..725a902 --- /dev/null +++ b/requests/http/swr2.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class Swr2Request(BaseRequest): + + FORMAT = 'swr2' + FILE_FORMAT = '.swr2' + HISTORICAL_IDENTIFIER = 'k' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(Swr2Request, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/opendap/__init__.py b/requests/opendap/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/requests/opendap/__pycache__/__init__.cpython-311.pyc b/requests/opendap/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cde1875501ec6ae97cc3cb5fa3128207eabc0cd0 GIT binary patch literal 202 zcmZ3^%ge<81pLQ-ri19mAOZ#$p^VRLK*n^26oz01O-8?!3`I;p{%4TnuUKcRn9$1)WnjE)FLQ9CNCu^IX2YQ*_{IB4ib?_#ZZXUbc~8q zAu3LXXo|F%7!zkhtj@DBOWYc=Qiw)pkZ8GqM5|t2xNgr8Pz zqM%4hG%l%jLPaU22!2Xs$D^_WwR13ur77w2Wl2`TMICwtJJ!qY@1eSbBov}V6rx2c z#E5i=g~uW?A*)m;vXV`-NOstZU96KFlFeJ3iqO1GwB2AILi>r?7jo9jvA!d&as&7g)iqI!_C7>3FHe`Djdx_kmS}J{75SBCI-OMNp#21RoW3 z9xJ?VFq@PR4bF-sWf>>Jta%EAtiThq?{-o86;euta1r{9auwmjO6j_xYtg(a#T)~@ zre=|P2>nk?nMgmRp$(5wR@g*yl|EB?|g=aV=Z7Gi39jR;FZjA-#&RVEyhYj&aQ zRNYazLWN8mfYZzC(BQ!AMOnh~?3fU~at24|=c9?)bJxQW0mskbFjo*JsEua-^nk`4( zN*zQDqel$!!D10DwVr%MkIfZZWn5Lpa+)sdtd0Stp`;a}(u{C~(secCDrL;uw25^o zP*;r()vY1)BW}f>w!S$=X_U64?bRz+w@X@B&=s*;61=Vm`g*S>BaZZeJ7lWnRre0y`}0$oj)VS&Z z!k?QO9vS_CY8{!J`Yd=swZxLwB&@c6e{}l7A)^NO%8(d18A}2(%AW+@ zq=bO9{2L&zR#BnNdpo8f)G|k}4L+DKxMzdE9WywC{;6Xtcz!c@J{O$M2d66~*_q2* zGxM7>^SPNwekNjwvWxMT2wkCuXmjWYJyCAJS|B)2U!Y;wdHQm##63r24z`yHpNmO= zoZJ#lUX6+pCvfvg92XQ)aS9xlD<7F7Zvk8pdV4t-00xMeQxr+$!VyRyTsS7k@*$av zOG+du1~7qI>;dwE_#JK~g!r7_p)%2gqB4+_@L}js9Wz2~SsKPTiB(p2Zap(uu#i}g zR3;uxs8(T#U_RcjD=1%WISCpmt(WLyVYjmC3NK?UB^3RBRaZoiA;%dBOttIxPnMF& z`{(7$Mvme*-9>N*^rgDLlSg#HP&o)Ed@SEdn>74I_yzhhOzW3I?c(9W5eVF%sxIyK7 z{W}cBoq*KEoq+W^PQrQ}C&?!Ib&?s?tjOK;^RquYo88w7|3{Z{`##F=`zYu7IPdy6 ztN*v1?HXbs*|{3Yzyc()#-7c_opp_&3a4C98Q0fAMF zqqSt5x@(3_R06w_t0AGg`wGY%z>qSYLEa*gVyS%}{E|vjYn7Tx18eYXg#w+X9)eQU zm{o~ZA`Lpp>&%|A2P41pNLbIzzJQNR8;NHOX&I^0`SNKPecJY>R7?uusTvW@J^QX7CddMXSXel z8r^6)O|8iqBK3yNs0-X-jUgR{qwhcStzR-4hRmQl46-)e_@Oaf>w<f1CY;atd3p9$acYHrd(C1NTi9Yd6^)M0TvYt|4{8!a2* zKX`SvOw1`TpxkSig%|>SxD6`9ge-8XRk^+tO)Tj4RDGq-ivWCHkd#io@Z~`9^CG~9 z;kzG`nAa_fKP4PT2zilsZ98dP>nHd$G)a<^GeF>zfT9Y>zN-HU=y3K;{{m{yR{yur biEQ;>vyM>|oa@c>7d<<=ljA6nkaKL4}G?RaLFJ7)z(?-K9=(Z1>Ivwd#-| zBYV2Cw5s|)n3+9A;t!+>3!Qiuy9o%Mb??6SzVr9qySr*OmjJEx?bp#)+<%zh-WoNS zt|1tK1zR9s2!dQ%05`ZQ3qV-=(6;$~!t8b(Onn3+kPr|7fh9;_%Z9L|)3t;>Y+NFH zQy&EgT+u)!r(E7lmE4mjh1B`k^C-_Vz251FRA@gDhulw6#{8r7Qzws<6he1OSIN9k z2`6(g0E0~*(-m|hIEUjoiB-r>o0Q(0*ZXhj!QM7vo5xz4f7b4T&Gqm=OQpm2jDOrx zy>7RcguACaVyeHZ(qqYU9e$Q9kEE*O&~!#w)(g#2Rr5MbGnojMg_Pq+es)?m4~t$b zXno-9$seQoe^Cy$qRs0JVBC8A<@~$%bZQ3JX+J1EN)y(Xl$J}B_ES;B20xPI`%RFXU_hlZXqO^6c)I&^PgEGR(GtOMm%$eEs2D-ug8@FBz-i@7=iA9`e7kmP^ zE!?dQ?&|Qvj9g?ZX`_laT&bG}SX8|8UR5n`s8u9pWnUtg*o2UY$WdLT$9&b$*1_E3YVeRcrRKaj)?ZA(L6&g92~;HF&J@7 zW;hm)F_YWO;&Hq=&TO9Gc5my1`NSH`6Bb|X+Q7Zd*1prR22Y67iM$djImEnj<8o!W z=qTcQBhYg6H}%aNTJ0ONo~4x zQU|48;7UoU)HM5WtQ^b?U(DZ=T*&+_*Lyf7{P}smn4eto7F|)86r~5;3uXQ>cf&<4 z8h*aW<~-__{k(n}m&$ypjMBK}yjCzU|vtQXpzp``=|9nD!5>Y9yp?;n&@E~!yF6~v(hpw(%meABpLP!-(Znw`W f9NTW6J8)^ceb(#_f}Q-=JU_duclZAjiU|Dzb8azC literal 0 HcmV?d00001 diff --git a/requests/opendap/__pycache__/cwind.cpython-311.pyc b/requests/opendap/__pycache__/cwind.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ceed0276fda689e691bc3e7dd7843922019b320e GIT binary patch literal 1233 zcmZ`&O=uHA6n?XRNz;Z@ZIOy%!GlIHAW{!jq_ws+Qme!Wx-JacovGcr`RUAt8tI`( zL62Uf*Pbd;4?TMHJVbU%)|C`}6U%;?v~Cxs zr!0Xyk>a5n`js;FNrYL5RoNm*%Opk?qhXDeJ)?6lfZ|T|zJEum=V%>5$!=Rht&IRy zU{hLK9nVGw{mzkDXhmDmyBU>LquH!3OkS%}#u74Lq~~gWKro9r%$h>?0?rhDtXHW% zmDGc=7r2~b%vG7gQdWQ36BnG@oHFiQbG--C%%7k4%g)WZx8SnUO;%Z?UdWwg>V^xH zwQQ$M<~;0HeMdd5OSx02N^0Dy6H%C})>*&2<Bjj>dphbLLtjAYCZL`%v@c(RY_CXSTy{$oW0AtRR3WuM z;XaI)YHrYqSW`C&*T;)u@M`{6@kSv(F+PLyS0-nR`KkP5LG(tPlxVn6AwuUNV`_d( z4K->&q+7IkGzc)xB6d)QxEkviJEXGI1R|vXe=L>kr~TVa53Ei#jvn7JhIY;Vrly-? zXoELF?XQ#Z-WWdB)Rm)g=Ir)~S9fD0j{U9obg&Mr-HVU{a_6T#W5$5om{6{kNl|+rbg!nww?$6}KnCMGvSel9lUYDw=BZL}o jY^QrQ;P6iO+J(`b?$xwvwk3_A>%y?znYOE&pU!O1N)JT} zdh{Z__EeF2@aWN#=O}_mLuNbz0K!b50#tIOV8tXd1hJ0eNO=3+$ zF7Z{FIV6(A4Qc3CsKmxipNDcEyzFvXq6-bmL$C8*v_&;av|X0!sjP*9jR@EpLAFjb z+n^@VsYMK$kT)lYMU&Jz)H`7|`3zw^PsH5wZtlT}hFi^r&0y+%C+T zvIMG$0Uo-cU#nuDM3{wGl`WFAN@8R&8a7zfGkOODDDG76+jpdTfmR`u>~1hbgKoGElK;7rlS29-*y zq!h+p;Bt;JS7i=MS^i~TIq%%&lyT>V>pd7}{_Lz@b*2`*d6!kDSnWRbLhd}IZa7a_ z$9Ae@#=~yichu9llsmP$q{gi~5rz5sA{&yoJU;w4zxlp8RaJaMsvpbn&4Qr=FPybA zAF?MtWlwz0p8c3T`{w?;Q>}BOTRIvlpdC=U4JcJW+wvvI4T>bjRkuPh76TYpYNQb; zoW^*e;Rc&WID8YGzc)xA+}eBxEgDL?NeE50+CXHKaooI)BSB`_AigMjvQ+mhc?Zjwx*i} zw8q<@_P5D+Zw;Sp>&nqOJ@V%G>$|ZL$9C&29jrYo_u`{=hhq=h-z{n)XGI+eD_pGm z)w!rMalAXiyc}V^JHmWtgn2w8|B+06B~fFqAU;pE`zv`dCVG?&OH=W{7o}?J2%#1n jZT7Dg9BlTlO}NnPUu`Re)MWpypI_LDB0K+yN2UD+%m*{I literal 0 HcmV?d00001 diff --git a/requests/opendap/__pycache__/pwind.cpython-311.pyc b/requests/opendap/__pycache__/pwind.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..190bd1d7462d08e6179d1a1bf0207ec2be844747 GIT binary patch literal 1233 zcmZ`&O=uHA6n?XRNt1@uKSWV1^iU%h5UB?hsXwhov@K}_T^EM!&eU$*{5Z2gBRv!; z*rS)yYflxa2ag^-d5$6&=2YS90u8PLF;;+>)Y#Mkw&W9wZ4zr1 za*5B%%ps8^Zb(DFOeHpM_&k(*`jX3Okv^zX9(tYkqAjXXqTP~IPh~9>Y(&7;2(ope z*#^D^s5!@lL)gAtFlFsR!EF2M#DO*ct-DF0L7i^efPFhYiJ!p$!=Fdt&0Gb zU{hLKJ0?rhDEUna_ zN*ch}3tY}I=BmtLDa$|Y%V(WioHFiQcfI>pnLj`8SDcxKx8Sn!4680uFXYZc>V^xH zb!?|X<~;1yd`CT zewQ8nkRAP)9ebZ0d$suHMC;6j9UTpgqc5Oz8_>Ww+LbRsE-jK6SKKniSfns6S4llk zco5?UbvNiltjX!(&5I>*;96m}G+iuQzIX!{u1rjo3X_G2qDV)alxet7B|_&RV`_d( z4K->&WID8YGzc)xA+}eBxEkv?+o!VB1R|vXe|dU29XZxC4sDx5ZA~}F z(FSjW+FvK*y)}Hitt&_Ct#cR2R2{oSIba#qxlu);>o zuiT3|6C>Rb=H&?U-4W(HBh2GD`Hy7kD~TF=0r7dN-Ji+xG0_*;urw79ydhOvM+mjx jXtRH{;9#?VZNs@{|7u$UNKN+N`q{aiDDve$@u;+4D77>u literal 0 HcmV?d00001 diff --git a/requests/opendap/__pycache__/stdmet.cpython-311.pyc b/requests/opendap/__pycache__/stdmet.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed34afb00dbdb1a1347cf080131434e0a9a27365 GIT binary patch literal 1237 zcmZ`&O-vI(6n?Y2-IhYr`a=v+BMBFJun8AT(5Mvws|W-z(WPdx>BeqX{w?HK}#$E2qz4MOE=`>xgQ0_a8_kt~`QKF++sb0%kC|HPqr4eN5 zM6(PUAv!gQL8J2KC^2b_nmw%(7GfK)fJk(`Ye)KaTYjr!170F4F6)Yk#Eef0)Nfob zOdgm5Er~eyZQm^wu}cEXe5?u=FW}#JWRd451){Q38x`^;fnKBlY`64l2aXo@r$YMGobjRaN5y(1~ z>QYG^7(1TLImTR-*(+u3WNvsU|A_J2+Ge@hO2Ne=FpD>yLG6eR2ChB(se)`S#%^{f^?^dVO+Ee6k`#`xKJV$ zPvI_%mn*i{h?t|3(+{(=qWfNMW_EHqH=G^Exw|71v$@gS$h7DTSdm}BQi%wi`;4gx zGBwtq1CeMD=TOhXIE~m@8RBZNx7j(BrDhN)1^6qeWIxT{L85zYw07yrzR`0KN!2wy zl11CR4r>1p3-?-Ye_dCO+SP%*zV}Z;BaEHahdP*N*5|^bPKQGeJKrp7B5ej83Dd8Z z-Qr@d7-9y`%YP+P-$~HeI|$EH?ViMLghbzDz0y=X@TydG9U)YM k%lqxC1{e0**8yDLZ(nt@1F6aWTfcd57(|Z$6Aw!J1DPc@MF0Q* literal 0 HcmV?d00001 diff --git a/requests/opendap/__pycache__/swden.cpython-311.pyc b/requests/opendap/__pycache__/swden.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eac5181690c54f82738bd350f5d36527ca980744 GIT binary patch literal 1233 zcmZ`&O-vI(6n?Y2-IhYrst|+GNWuYYViRKUAchnXum*tuCc17iS$3wdxcy;f8^mxR zhD45D2v<%Rg9nZtJ$XiBVzVa_Pu@yUqnw=CZL1ABoqhAY_hx6l_r2|_L?RAiB;Ws9 z_<;bvNT)F(b#Oc;z%~#d$b|wJfFxF2R8R~hz^bbjG($t6z&H?X14x8wyDGq*c*0>s zh&~JH=vV2cksl+LM?I%N1=c1U?uoViw#DfTU9M2>+4c8=CEzH~@tiO(1Q!YhB48*4 z87ff>jYfz{b)wOzxH(F68l!r5^MrhC6Y_{eH#&AjY_AzN+BV@SGU9?)%ZufmG9);t(MYA0(yekjZq+6GUk+ZdkG zKmcp7E4;mCWT%P#>=C=rnzE+0Dk@l`$+Rk{R?3vIsMzl(<|~d%Fbg$IM|{%D8#YvY(7IC!cqU=Jcw)XtBaHD=ksm= zxpVzPvhP!}?{jkCV{%}B>D|@pjiDnIrLyQ7NLvHcmPNFw)-(Fp)Xe?iIlt>}Zgy^JCO0xXfpd38C+BiwxzQQFJTJnD2hjIhoIsmZh+ROIX4 zYS}3+1eN){jTUA^3p0%tX6h}>;CbYsRLgXrlul< ls&MI`byng0LF+t(!GqRW)7y~j?7!{H!J{Da?LYZ|+HW=_G${Z8 literal 0 HcmV?d00001 diff --git a/requests/opendap/__pycache__/wlevel.cpython-311.pyc b/requests/opendap/__pycache__/wlevel.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8d0ab762ea2be83a2a6120c57a50437935f86923 GIT binary patch literal 1237 zcmZ`&O-vI(6n?Y2-IkxG^@kXuMiMUcU=uEwpiwIVRuKqd)TL&!>14oaZJXd34Gba*H-b%nkIXSc2)->qr?3?fX?96-L+rEv*+d)d=)9?9X z1n^xM4T;pj;fMrVK!6|*@?Zf{So2U`v$O!~o}M=>1AzvkK#X-D5o+w{0K4)D#flK~ z0i>col$k;zMr@z@Zk|ePjJe#Gd&e!C(qeO?(QoWY7P_PgIOC!kA ziDnrzLUd{pgGS}eQDV{f-%U{aS;cCM3Hy zpKcF*PV{|E^nFVVd`S%Kt$ZA;Ub%6gqhtmhfzmZVZ5ebZUxHMJh+$l?^Auyzj&Z(7 z%AUfV7%!G>uO2Z+CZ_IZrbXAC?1Slvsq9c@3}bCI`U~jTBDof2EPzvx@QptWAzx{aE+DP@nrHawLA4%3U zJ(5A&yasCj5DWKePhU+}j_T$9z25gvLL-cw)CW44r`Bh}qfUlH4?Ei^YCL5I9SPH4 zF1dyIpfl0i7-CuuG2Iwqx<14-o|XSfroNM)v3C%jr`kP^T?>i6&U&P&c;L%Y)pUeV l6)sj List[str]: + + if 'MOCKDATE' in os.environ: + now = datetime.strptime(os.getenv('MOCKDATE'), '%Y-%m-%d') + else: + now = datetime.now() + is_historical = (now - start_time) >= timedelta( + days=45) # we use 45 rather than 44 for opendap data + if is_historical: + return cls._build_request_historical( + station_id=station_id, + start_time=start_time, + end_time=end_time, + now=now, + ) + return cls._build_request_realtime(station_id=station_id) + + @classmethod + def _build_request_historical( + cls, + station_id: str, + start_time: datetime, + end_time: datetime, + now: datetime, + ) -> List[str]: + + def req_hist_helper_year(req_year: int) -> str: + return f'{cls.BASE_URL}{cls.URL_PREFIX}{cls.FORMAT}/{station_id.lower()}/{station_id.lower()}{cls.HISTORICAL_IDENTIFIER}{req_year}.{cls.FILE_FORMAT}' + + if not cls.FORMAT: # pragma: no cover + raise ValueError( + 'Please provide a format for this historical data request, or call a formatted child class\'s method.' + ) + # store request urls + reqs = [] + + current_year = now.year + has_realtime = (now - end_time) <= timedelta(days=45) + + # handle year requests + for hist_year in range(int(start_time.year), + min(int(current_year), + int(end_time.year) + 1)): + reqs.append(req_hist_helper_year(hist_year)) + + if has_realtime: + reqs.append( + cls._build_request_realtime( + station_id=station_id)[0] # only one URL + ) + return reqs + + @classmethod + def _build_request_realtime(cls, station_id: str) -> List[str]: + if not cls.FILE_FORMAT: + raise ValueError( + 'Please provide a file format for this historical data request, or call a formatted child class\'s method.' + ) + + station_id = station_id.upper() + # realtime data uses 9999 as the year part + return [ + f'{cls.BASE_URL}{cls.URL_PREFIX}{cls.FORMAT}/{station_id.lower()}/{station_id.lower()}{cls.HISTORICAL_IDENTIFIER}9999.{cls.FILE_FORMAT}' + ] diff --git a/requests/opendap/_core.py b/requests/opendap/_core.py new file mode 100644 index 0000000..0ff02ed --- /dev/null +++ b/requests/opendap/_core.py @@ -0,0 +1,7 @@ +class CoreRequest: + + BASE_URL = 'https://dods.ndbc.noaa.gov/thredds/' + + @classmethod + def build_request(cls) -> str: + return cls.BASE_URL diff --git a/requests/opendap/adcp.py b/requests/opendap/adcp.py new file mode 100644 index 0000000..fe4d749 --- /dev/null +++ b/requests/opendap/adcp.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class AdcpRequest(BaseRequest): + + FORMAT = 'adcp' + HISTORICAL_IDENTIFIER = 'a' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(AdcpRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/opendap/cwind.py b/requests/opendap/cwind.py new file mode 100644 index 0000000..469706c --- /dev/null +++ b/requests/opendap/cwind.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class CwindRequest(BaseRequest): + + FORMAT = 'cwind' + HISTORICAL_IDENTIFIER = 'c' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(CwindRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/opendap/ocean.py b/requests/opendap/ocean.py new file mode 100644 index 0000000..dde9ea1 --- /dev/null +++ b/requests/opendap/ocean.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class OceanRequest(BaseRequest): + + FORMAT = 'ocean' + HISTORICAL_IDENTIFIER = 'o' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(OceanRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/opendap/pwind.py b/requests/opendap/pwind.py new file mode 100644 index 0000000..0b5be60 --- /dev/null +++ b/requests/opendap/pwind.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class PwindRequest(BaseRequest): + + FORMAT = 'pwind' + HISTORICAL_IDENTIFIER = 'p' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(PwindRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/opendap/stdmet.py b/requests/opendap/stdmet.py new file mode 100644 index 0000000..da1dddc --- /dev/null +++ b/requests/opendap/stdmet.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class StdmetRequest(BaseRequest): + + FORMAT = 'stdmet' + HISTORICAL_IDENTIFIER = 'h' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(StdmetRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/opendap/swden.py b/requests/opendap/swden.py new file mode 100644 index 0000000..e1bc55a --- /dev/null +++ b/requests/opendap/swden.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class SwdenRequest(BaseRequest): + + FORMAT = 'swden' + HISTORICAL_IDENTIFIER = 'w' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(SwdenRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/requests/opendap/wlevel.py b/requests/opendap/wlevel.py new file mode 100644 index 0000000..11bd1db --- /dev/null +++ b/requests/opendap/wlevel.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class WlevelRequest(BaseRequest): + + FORMAT = 'wlevel' + HISTORICAL_IDENTIFIER = 'l' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(WlevelRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/utilities/__init__.py b/utilities/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/utilities/__pycache__/__init__.cpython-311.pyc b/utilities/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b0cf4806c8e1e7ad142347ffe922f0fc090cc48e GIT binary patch literal 191 zcmZ3^%ge<81pLQ-ri19mAOZ#$p^VRLK*n^26oz01O-8?!3`I;p{%4TnuOMfun9$1)WnjE)FLQ9CNCu^IXP9j(X zUw;=~VuZdaBV3WLaxe|b7IIJ)IoQ!WoW=0gJ*}u`b&NE$fE;5TIT5Pw=m_n?(^*-= z(Qctkv^ti&W)`WiY|rA{%m>Wmu3zw|2z*miN|a9~zpGCcGgv0B2drobL7CkR(|&1P z<6sPwCjf^cfPu1_gR}Yu$^)?Vc-GiJk8w5vNp+dJ85DZ4s?$By6w8({ONziIs0KlAE1;A#9xfKLOiYoBn3y5cU3kjetTsxrbh6dU`=IL#s z8s35Z-JWk%6Cu%XsD@X1^ojVFMEP=|DxlL9(+4mQio6oy`ZmD7~TdcUmf)#2D zo_jIp4uCps>jdm_nq0yKV5lqq0u+K*EilmI5CHtsgdoWkI@hPpB{d$ zHYR5GCT1I>vyIejJvGuCFza#i0Aj*)%QzvvRiuQ-Bq7DXDSJv!5%QpHd0kJ85GSw+ zX~UH1E-^OsJVG*w{!g%>FkmLa z&MF<|0Ld7zSb#r~shMl%dyhIt6^g_7l}zl%4>ousNQKo^*s@$ysBU0n`05iJ{(Vb)Bx~) zHK>WIpanoT1mhqZOIm>G`_wLwg;X!BsoRw9Y7ZAQ-MErW&1$X{+TIPiR;Y4Qyy}6q z@EWc~VBbbtzMZJk*6HS8Q!u#)Cbwt`Ic>=>g?Xwintp11)fB0w&*yWxmf|fu7)2hR z?MpyjkQHu)C~N}BiU&qqYp9YXoVCFQ-nOKYNwUdJP{dm1O+nYP(=-GFDgn_^FCLHY zOkO#k_(9jGp174vJ-ANO)6?l(;?ASgY?991q4|uKGW5jHTGE)+Xmgy%sXwKZP))`pVjWeBb`+3duNm+pdC9WeqfJsZ(1KTN;?8vS(t48W3JR|r5sEC1n5G_^~4LpYS+71 z@ALb``@ja@MdNRyBU{mtjmSo-8Xc=e$3Pr-7d!ekCU3>$jbj^nHFmZZJB!-&-oxvC zIKSFm?;BW`QMS6T06&%ou;=rt=88J5kwyI$Fc|d=^i~7j-qw!o?vOgQoh4wB&~C5+ zVA#NHHi5yd?e5buyb3;%woh%3 zR=D+HDIjW-S%ve(7P+7Y)2bZ?B+yZ zh3mEDFG2gY{RRgpmu=i^C15`P{q}mC-^tjOXObA0PR-`7LM^nt^a#w;VI(Jz;J#)% zxV1C!ym7-nqZu9WgJb~Jrhowap#y7bF;fY};a``+&u%}x4QD{GJYMTLRh33-(r85* z#fckFZ>-%bPL=(&*hp29Ym!`%_@qB5f7CByv;1TjZa`5@N;<>_iR%yoF?>cPl zNG$=nyF@Dfe;oX{h=Zk@XXg23N?!1B-yP`i} z>!x3V%&zAdyKeu&?}-cVwu!mw$TdD=XVo`wUz}u*wReM#Y(k(|wls9S?Dw zV08{xB@5O8U|(y@fRR_Ma;BpLaKo{V1e2d%=uAA290wi!6_C$Oy!Qhq-wupz4UD}R z|LsO~;A(B)DprHF@v3;JE=teDr{Y?)*jKt<+dotlhil?+MI3$~>Mj_o%fDV;yU6-qXJxSnn>}Y$bW1Dh<}8!HP6k4@Zl^(%0)3D&Z5>zu_m+DR_xxu`JOD zESX}#rZFV!jO5cf>yG2tkOy%ViyM`Ze1QbB#M%vb39=4Z#2d?t{uhwP_OBuMy>f$m zNmu*Z9>G~OW4l?~Q)5>fCTDft@&x=tebynUs!XtJ*i zS1{;0NjT^oK@I3OhFFJpH5lzlEC|r?37NA{a3ixLUbL>4mnfbL44!dd8a&5w4bK@4 QY=25VpzPx=6YP8c0-G3qB>(^b literal 0 HcmV?d00001 diff --git a/utilities/__pycache__/req_handler.cpython-311.pyc b/utilities/__pycache__/req_handler.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..712a37c1fb8e3d64225865719fd22388988c988a GIT binary patch literal 13291 zcmcIrU2Gdyb{_s}_#;vh^=nbm*p_X{ktru~b`vGqSdnZiiC1xBrMuO7OHrJWM1>-` zGeg^Anbtr9R0xd5D7=V@ZVDFIO%=osdgz1E0!5yReb@&|ARu-C0|tsdEZPDzl0Gy~ z{mz{qawrLnfpkQlnfrI{x%Zy)opbMu|Gu@=&*2LH%m1wQp5?fIql9^RjEAptc=(hP zxkXNNh|aWQ(Sf@w?c8uJx;EU4?hVhP$Dx;b7riKRr&~7oMILz`(VO;d_!s@`xg{Og z2rdR4oRho8iTp>L=#zqv{GYox?o0gBufuz6adU}`n3fblmVS|!6jc$_ zte~z*!u;%;Gh;$Bkz7k{D(+H%PAS^&2_AEeHMOu}m)r2aE!t$m$k}*NaLkx8-p+Zo- zZaTG*Qt_M-mL$YHgaic(l6jzT3X6&s%zQI1WQtu zJ}6n367175V%4BptlK#2cIw43TahTG6Pr^n*)wHHl~a;3HDS+QP9#6buB^mY5=qR^ z)Wz5<_Ht2L&aX}_$a%@0kI7A~Y{u7AHK#Nck6OZ*GmN1agNjLarDR@};)WL4$CQ`T zQ|DH)+1PSIK4)>!`C+2`{%IA1SVntApFLx06=7sDySzTRlujw?(&(fxEzpyYT@jXU z>a)MZ^i|g%OM}-EwLC?07HWo62x;FuC`+V9y@WFZsoQU3;5jTt&|WX=M7mAvveMm1 zw~IZxM7I7zQUJVqQQmP(IrY_t^2pKUOqLTU?>w@+U+R+hhVh2%=@qXSMceKp+YU%P z8=qGkM4g@^>kOd|%^S@FFAAvFi!}&Ied{O8Nyq%0M1H^M211_JLUvnC{MO5Hgd|FS zV+#;C$3G>bvT zlGtE!ZlD7a4m~EAAS?L{Yj92>PV%Z!=MfX8)2bwA0P{e&O`x|W++nd8vs8x7$DNc) zFe=p>fV(JRurhNQ!iXYCLOhr!#GNWgs%lb6xAk>7$z@k(}odgQe`Wv`M ztkT)#L>g;Lj)I&cAvmU(^-MhITACM^lhe7>Qr$bnzz3d)e&jZc#iezDM@cB^1zAdB z!PA>IMtKDqz`Dd>-!$gE0A%n~S_U0ua2njno7I*DCpxdhiVQAQoOR2`%t zD?BgY*YM&NGnys?!I|<}PKKO89%_)qcv;Ds1T>cwfz1WYICEw;M!CG4%OYB-%gUUm zeGMr98Bs>9UiB(DC@C~jlE(xX7|bb~5gYgz{xNMaWHS~^ zV!dR@zeGwSOBZ0$(nd}-g|tO~Ov=NOnbSWqDW;N^L}o6tQfsM=uz;DLF%O5kunBl* zBG}?${L9&FnyhbHSV<>VnWZ6ES;i_e9b=%@5dB?XVHs0qD32`VvOp7Pr>(Q;l@-7x zB!Z%*GElTkQX(KErEO4UrgctR)m6KDqfm4TqUW;+P>&77K@W}MZ zOJy*-;KxiDeZvH^gr`TXmQ4>^D7GL5evMsUpU2G}#E8yCx9CDB z;g-C#-GCczfs1Sry>OBIx7~IZ88afYM8Ng^xJpvn@55aZpYO$G5~J_cZR(PJU#^q5 zeXs6Q7w@~cP^x*98)Y2CICLGck?BG|Ky*XsO@?KK^8(_1m}K2ns40-c+6rXgNaKlI zDwbDM=@bPBF+=}Yly15mbL**R+6;s^n*AEO%5ahiqPxJQlo-cu6^N>119P~Bi_fkc z;sz)nAcnU-Wc~Glj5;+J*lKPXjpiq>hBz&r614zhukZOZF913(XXcZz9_ylK`uFwg zAfIwYN0EzLg=$01qVsd?LB6E@3%gLy+H;89P3{W^wm)(RWin4B3_|-MMM|&8ZIsgv zq7X;S(zox=Ox}7Qh^*Xt8xH3hwuac;-@37xTuaCs&@oyn<<=d-Thi zt4?-1mgwD`1mF1uHvO6l{zvn$a71e{9JUs+M+91n;c=-V_VQY=(W}TEV3+iPm=~!5 zQ=xyvL$eAMy*WpUgR;~W$JwYlt5jyGSdo#hLI_nH?Gen zkBaQPL<`58=98+>JhsLgCldAa{YM;Fps?ct^J&y4ccB-yWUN2=G$n_LoFQ_S$T=b- zAPS8ZMx|d&y_migdYWv&)cv1Rqh*kLT&`+T+c z1a7U>?ta_?)!reN8oEGrqGs;EC?!u+d!q-vWBa{hd%dwrZ;Wbg;g6|c(pr4`u62% zQDnn0gL8zCD|xsVW1C335Hr5Fb^x?AFQ9w*2P8rCu!8Rys5lLa}i#W^uyzN70(YbCdX3_BhQvlheFWvoi_A0_vn4YCQ z_XZuOrS!8g1{l*f-VzdNoB)Y*6ei+G$y8MRJq4ZR>ycD!;VOnc1{qj<^My;dM;eIc zP|T1*HP1bs2Va<%fWKodfj#74OG$bH=OAF%=O4+e}CkFKPXvly^&|p@yKnLkm{~m zP9F;KL#7ax>$eo*{Bz6aVe)y8l+T%TIuI`0e?@GeO={uIYp^N%ra%w+1s3Z=w^aQ_ z7g?8LTGo=GBZ$(MVudi^lnl_w6u$MdlQ;H$=?f<_46Ox+KT2X%0*I346+&0DdG?x0 zKlm*8X$#6+V{CrZulZAW9|nFncaRBU#6M z*Bsw43cJB`&C9j69|WWO!RU^^9E|P-Cn~{-a&V%m8<@w}SK~mM4Xmy6Q~s~`&w73} zP#um{PtNV0FQ1&#f9R>b4?VTNO+A64C!$`ix9ETMvgR^*A2v=>duE(>@nFZW zqaBgjU`odsAQa!K^~hm2>t-V{Q1DgLV-vtPsjInXJz6_~eWY+K8l4~YFy+v^iCj*~ zh%$L&`6Lkvx=as2^&9=7{0GQ51cwb8_qWvG#~{oT-funNqx*dHN$|u$@YH_r)XsW! ze4;Xb87SC#`WfeFeG}_J^qI@)o1wJ4xXWdG~6900M5$T6V*#V+9{;Pl#0L zuD2jHu@K5Z(cE>&XQ+qf0_RaL0~h&Sz?8%(u8ye7|0AfSnd`w?$97zdX*^PJNAnDqW5rEiqM3OnpFJPO#2 z6y_N$yvEFnBF^WxO_@-#(9SrJftr!L{D;Vwe?o<3G+`CJja;0?x$w1w4&`Sl!$~JV zY}2gqrsWm*_w+2G&|yW#R$MXake8^XUw#j_x~a=ID9LQt!y*tZ-1OCkzI$@z@~Y-S zj9{~UG5|U$%g_O)l4~-1`DZ9DH1DJAroM|R%71~tdxSa;LWBFE!JYHv(BNL^Es=4Yyb(1YZ|wTGELuK0Hi$jgiveoe6+cleSzR9WW zW=$Lk22k5F>Z3<02JOaOYS{18Iem`DFRkQlPHg@E;=I~)MlbgtH+o1wksG^?n1^HL z46TC4w;BxogM-DhKI`l#@R`6WAvM?#I15Uh7V6g!O8Jb_Z ztICNuohvPjx@FROR!Ixv6RYeU1QlWmpEamBkkt=a3+*`VU>_E;m?f`(>pN)m``;2f zLO(CXGI>RFFK5L~&6|dU1Df zFM7EWy?hY8x*xr|7e&cjxoxi6*0cRerT>M4{_*|(@!il~{~MM5HxBx*?Dt>U>%Ug% zzgBL$R`a+z{LeU7D1d>60<^EZ?)YySsowKQW%BKEg<={I{!VzLRzPt7h z8|CoqUU;?=o-OmU`lhZiup&GpeEIrMk^8(DD>@!I?a1Tc8ut_Rz{ZXoA39XaW)UJa zo~Sg2N=@Gbe&|#iLah3E$C#@g1K3Fe}?eXIke!R?&n|Y~HbdQf!_*j{b)kC*v`P{1{$Yh0|Ec27q zaQ9Y-ndYc#eqmuWD%0nkGJS3-_Y?fUX0OcT zA5w)^iA)ihCNfKeot&|-m(DU|vVNJ~qp|o@CXANp^@Lm`LX#~EM0$zP+Zz3(gw&Z- zO8Ff^k9*c%%{l1dYaEfwjxG-{`7n7#dEe9vUg!D-s@*5511D>dPS1Fa19{rjTk}xD z%Z0ih&((rw*>CU+=1u^M`6(lt-O5kTIv{WQpEQdrmeJ-su_HnV{T}-AnA& zOi<&@_5$UeX(lK-UmB*|^E<5ZdG_2uP~)Orba8i<@-H?M-s9;iNjo#Uk)63;XK@nf z>3R&7xHXed$kW$Y6UoQtY8-COL~wg%9F0{eOil3L8jo&@;NzwWI9L?A(1cRO66*jE@0u`$}cEW)ki3TyZpZ zM(M9yHIDAplV@r!_HaI?Kj`^3`V!oI%^mdglzJ;CUnKrL#}Zx9NxkW7c6{ZSU>_il zhIBK07X_ME-OK@Jb=zT<$ZV3#MhjwZnoccGu#ayTBGPftFZ9>7?28i3XZ;5cnc{NY zpagodjO<(fb^Ys%AITr1fLylnParjy!{Mko`yB2XN8}Rc2;U3QUzHmvAOCyGh3~Py pDmPfR|DJNE%l2QD6Uz2q&3oD9z?>aTw>R~YXRQ32V`{O%{V$!e{?GZGvxzesYfF$E z1bZmbgVKHgtw<646rL-1D2s=JUtmjm(4Ks=$p%FazS;TBd-LAB_uDt~K9|b^O8)cD z<^uxoT^Wg_M#|s{Dm&mn9WiQ-$2-eHgjZJt)>M5+Cg_E@; zAw$YzQFQY-2YrM$WCwaUR!|?xQzB+H2*!i~0weJOM(!bz{e%pxjN^L5g`W17yduA6 z>%lx8{&VNTI33a?)shirsq4$Y_8FI9N_IP32$eU?P|9r2qZDIw=E=~oMN@|9)wOt? zM7Qc5rBYEeO|ySmTexXGlw3$_(Ppa^;WiqsZ!N8}Wm~kCM0YyV*C5`K|JiZ|X} z{y2F!d-Yf+#kmte_%?U_QxKt9A%}PF50wZV6|%XoRo%VzW%}Y#erD_WF8i7<9ApXu z%nYTpYJ+dLIHlMq+G;ypPwCT?KIz)t&?D5X#yrE^s4I-eog-ojT~fV(yJA{Jsn*7j zR*qW;NIw}zXV3K)Bg04)BltfMxg=k;q=yrfI&FsZ4E2cPG9O?VqKcnp=7&FUTbxHz hv62f2*bG71)WnjE)FLQ9CNCu^IX7fK6rT0kj$_AhVoDNH2vbCxSOhBs6rs=}A<{IZO+X|ZY7EZCJ9aj#ciowB zAd;zk;Lrn=q6#TFa;Ru4IJ8nL_1II-j+BG7r${~ZW{LJtPJJ`$^-qGT3M%!|$$BzB zZ{F|s-ptq0Xc$3@{qkr2a0H=W#l&kcm+kq0g3uPyk*=7ifKo^iW1p!Md?{Z+O{sFO zn*Nj@)>Si*3U(vCPY)0u@vn7x?2dQ?qz6ez?`q%61@+K_K&tz1&UDiI_3j7$R9MOh zw{y}X1nE8SJ18TpM?ppe&h+b@XL^YWx`x^`RCB(zS*Prv9s@o-y1IxG@!y113B?KN z8YkQ+kc7|ie{3-B1a4S{Z7n!{u`2g9MmTf&IVluvs+k$xC}b=RHiMM#61B3vri&2r z!&rsi^GWEo5Qi9eZLK*jqq6c$c>?F2HqYMom3^R@Px}IvuGogfaG_*!qi7O*Poq@Z zz@V$t1(%p%BNaV(jvlsKm3(~?1f2Vd1V)=aa-M#^rTZqotJ+Ej87I%CFd zp`&d{+NHVc@6XIEVn*mXq2rw_!5B>L6-k!E0d7kkQpaTxkL7W^Y--%#C0&fcM3~!F z-d#B8giAEfrX>&IlBWO0M5J%JUQN!pUcHr=#+R@O8`#e6_jr?qV8d;^2JuNY7n^oV zSTO9eXPkEOY?5Jt9`L8jr^!b0hDbEkw5=qbAvq0TAgPycjtVD?j^k0!33BW?VD#3q ztCVDUU4paC{7Vh1Jd3ftvIc&}S=-{8VKpEnZ4-b3x47IOO;pewG9>7_%n8Dgti}^6 z6=i~kpcBc!grWlcV9AS5E>EX#K!{oTik7|e0X1?t!%AP<$O2vq*QhN-v-Dk}0r+TR zp0@OrY(^^@=@K_ggBygU?IN*st(b0jC0X2XVy^g%=P23-GKCUs1iA|PV+{4a@$lC6 z`D$Xi8ojU+y|5d-P?@bo`YZF#fdXZlnFy1An6bL@%(pLNm9hdEt7^XkyLm$M5YzuM zWo6X=&seucc4n2Av9FaF1rg%*#Bf436fkP&1X2WYqELjK@X;Osl@pNEFq0Y53rkFt zdW>mIZ1>9T7J>)F9{kub=m5bZ0>SC+v2OstQ#;YA-RM*Uz_cGIoUR7;oiO7X?&cL@cF)=N_)M%n5(Q~~k4`#0-tzr0q$}LPT*t+Ydm)kBq zDvLI@OPm2F_uEQI0pO}PYP!!+88MvzGmR`s1n3|Ho(_plJfPf?9?8jw5mGoQx_cgiGb>dRBr;}bu={Zshoxt+=R-O2gt z$ihzi>TdjM<)d0;pz0rR!R^o5ChK@riPVu89)t#4TdfCyNRxjD1tP=RY!Yw1h8^&tYeT}f@aA-* ze_RBty+{oojIpONJpsgX;()*mMEEEQ^;F8W*udw#wMcyP+{5>({vjF8ghGQrlTa2C zQ7VGy1o=i0a*K*8MFp-JCDSSu;CA;}MW?G#IRK=b%Ilqq3Mp|VMbo#$RFpzlPhW)I zh5RU)`WcO?7Q_JK9hs} zT~f&Ejw^$81YIq9=;7_U3ZuQgp?W|p2T^RW-X%sMH1wuR8g@lT#Hu*f)+N!&v3j6S nIU)odY4C&5UO2Yt-&}muy?t&owj0JSjc0>LFAweOq|^Tbp2GV3 literal 0 HcmV?d00001 diff --git a/utilities/opendap/dataset.py b/utilities/opendap/dataset.py new file mode 100644 index 0000000..68d1b36 --- /dev/null +++ b/utilities/opendap/dataset.py @@ -0,0 +1,88 @@ +from datetime import datetime +from typing import List, Union + +import xarray +import numpy as np + + +def concat_datasets( + datasets: List[xarray.Dataset], + temporal_dim_name: str = 'time', +) -> xarray.Dataset: + """Joins multiple xarray datasets using their shared dimensions. + + Handles cases where datasets might not have the same variables, + but requires that all datasets share the same dimensions. For + data stored on the THREDDS server, all datasets are expected to + have `time`, `latitude`, and `longitude` dimensions. + + Args: + datasets (List[xarray.Dataset]): A list of xarray datasets + to join. + dimension_names (List[str]): A list of dimension names to join + the datasets on. Defaults to `['time', 'latitude', 'longitude']`. + + Returns: + A xarray.Dataset object containing the joined data. + """ + result = xarray.concat(datasets, dim=temporal_dim_name) + return result + + +def merge_datasets(datasets: List[xarray.Dataset],) -> xarray.Dataset: + """Merges multiple xarray datasets using their shared dimensions. + + Handles cases where datasets might not have the same variables, + but requires that all datasets share the same dimensions. For + data stored on the THREDDS server, all datasets are expected to + have `time`, `latitude`, and `longitude` dimensions. + + Args: + datasets (List[xarray.Dataset]): A list of xarray datasets + to join. + + Returns: + A xarray.Dataset object containing the merged data. + """ + result = xarray.merge(datasets, compat='override') + return result + + +def filter_dataset_by_time_range( + dataset: xarray.Dataset, + start_time: datetime, + end_time: datetime, + temporal_dim_name: str = 'time', +) -> xarray.Dataset: + """ + Filters a netCDF4 Dataset to keep only data within a specified time range. + + Args: + dataset: The netCDF4 Dataset object. + start_time: The start of the time range (inclusive) as an ISO 8601 string (e.g., '2023-01-01T00:00:00Z'). + end_time: The end of the time range (inclusive) as an ISO 8601 string. + + Returns: + The modified netCDF4 Dataset object with data outside the time range removed. + """ + filtered_ds = dataset.sel({temporal_dim_name: slice(start_time, end_time)}) + return filtered_ds + + +def filter_dataset_by_variable( + dataset: xarray.Dataset, + cols: Union[List[str], None] = None, +) -> xarray.Dataset: + """ + Filters a netCDF4 Dataset to keep only data with variables whose names are in cols. + + Args: + dataset: The netCDF4 Dataset object. + cols: A list of variable names to keep. + + Returns: + The modified netCDF4 Dataset object with data with variables not in cols removed. + """ + if cols is None: + return dataset + return dataset[cols] diff --git a/utilities/req_cache.py b/utilities/req_cache.py new file mode 100644 index 0000000..457a9ba --- /dev/null +++ b/utilities/req_cache.py @@ -0,0 +1,48 @@ +class RequestCache: + + class Request: + + __slots__ = 'k', 'v', 'next', 'prev' + + def __init__(self, request: str, response: dict): + self.k = request + self.v = response + self.next = self.prev = None + + def __init__(self, capacity: int) -> None: + self.capacity = capacity + self.cache = dict() + self.left = RequestCache.Request('$', '$') + self.right = RequestCache.Request('$', '$') + self.left.next = self.right + self.right.prev = self.left + + def remove(self, node: Request) -> None: + node.prev.next = node.next + node.next.prev = node.prev + + def add(self, node: Request): + node.prev = self.right.prev + node.next = self.right + self.right.prev.next = node + self.right.prev = node + + def get(self, request: str) -> dict: + if request in self.cache: + self.remove(self.cache[request]) + self.add(self.cache[request]) + return self.cache[request].v + else: # request not made before + return dict() + + def put(self, request: str, response: dict) -> None: + if request in self.cache: + self.remove(self.cache[request]) + + self.cache[request] = RequestCache.Request(request, response) + self.add(self.cache[request]) + + if len(self.cache) > self.capacity: + to_remove = self.left.next + self.remove(to_remove) + del self.cache[to_remove.k] diff --git a/utilities/req_handler.py b/utilities/req_handler.py new file mode 100644 index 0000000..6e12889 --- /dev/null +++ b/utilities/req_handler.py @@ -0,0 +1,229 @@ +"""Handles requests to the NDBC, caching responses for each station. + +This module defines the `RequestHandler`, a singleton HTTP cache which serves +to handle requests to the NDBC over HTTP and store requests and responses in a +cache. The cache is segregated by station, such that a cache limit can be +enforced on a station level. + +Example: + ```python3 + handler = RequestHandler( + cache_limit=1, + delay=2, + retries=3, + backoff_factor=0.8, + debug=True, + verify_https=True, + ) + response = handler.execute_request( + url='foo.bar' + ) + ``` + +Attributes: + stations (:obj:`list`): A list of `Station`s to which requests have + been made. +""" +import logging +from typing import List, Union, Callable + +import requests +from urllib3.util import Retry + +from .req_cache import RequestCache +from .singleton import Singleton + + +class RequestHandler(metaclass=Singleton): + """The summary line for a class docstring should fit on one line. + + If the class has public attributes, they may be documented here + in an ``Attributes`` section and follow the same formatting as a + function's ``Args`` section. Alternatively, attributes may be documented + inline with the attribute's declaration (see __init__ method below). + + Properties created with the ``@property`` decorator should be documented + in the property's getter method. + + Attributes: + cache_limit (:int:): The handler's global limit for caching + `NdbcApi` responses. This is implemented as a least-recently + used cache, designed to conserve NDBC resources when querying + measurements for a given station over similar time ranges. + logger (:obj:`logging.Logger`): The logger at which to register HTTP + request and response status codes and headers used for debug + purposes. + delay (:int:): The HTTP(s) request delay parameter, in seconds. + retries (:int:): = The number of times to retry a request to the NDBC data + service. + backoff_factor (:float:): The back-off parameter, used in conjunction with + `retries` to re-attempt requests to the NDBC data service. + headers (:dict:): The headers with which to execute the requests to the NDBC data + service. + debug (:bool:): A flag for verbose logging and response-level status reporting. + Affects the instance's `logging.Logger` and the behavior of its + private `RequestHandler` instance. + verify_https (:bool:): A flag which indicates whether to attempt requests to the + NDBC data service over HTTP or HTTPS. + """ + + class Station: + """The summary line for a class docstring should fit on one line. + + If the class has public attributes, they may be documented here + in an ``Attributes`` section and follow the same formatting as a + function's ``Args`` section. Alternatively, attributes may be documented + inline with the attribute's declaration (see __init__ method below). + + Properties created with the ``@property`` decorator should be documented + in the property's getter method. + + Attributes: + id_ (:str:): The key for the `Station` object. + reqs (:obj:`ndbc_api.utilities.RequestCache`): The `RequestCache` + for the Station with the given `id_`, uses the cache limit of + its parent `RequestHandler`. + """ + __slots__ = 'id_', 'reqs' + + def __init__(self, station_id: str, cache_limit: int) -> None: + self.id_ = station_id + self.reqs = RequestCache(cache_limit) + + def __init__( + self, + cache_limit: int, + log: Callable[[Union[str, int, dict]], None], + delay: int, + retries: int, + backoff_factor: float, + headers: dict = None, + debug: bool = True, + verify_https: bool = True, + ) -> None: + self._cache_limit = cache_limit + self._request_headers = headers or {} + self.log = log + self.stations = [] + self._delay = delay + self._retries = retries + self._backoff_factor = backoff_factor + self._debug = debug + self._verify_https = verify_https + self._session = self._create_session() + + def get_cache_limit(self) -> int: + """Return the current station-level cache limit for NDBC requests.""" + return self._cache_limit + + def set_cache_limit(self, cache_limit: int) -> None: + """Set a new station-level cache limit for NDBC requests.""" + self._cache_limit = cache_limit + + def get_headers(self) -> dict: + """Add new headers to future NDBC data service requests.""" + return self._request_headers + + def update_headers(self, new: dict) -> None: + """Add new headers to future NDBC data service requests.""" + self._request_headers.update(new) + + def set_headers(self, request_headers: dict) -> None: + """Reset the request headers using the new supplied headers.""" + self._request_headers = request_headers + + def has_station(self, station_id: Union[str, int]) -> bool: + """Determine if the NDBC API already made a request to this station.""" + for s in self.stations: + if s.id_ == station_id: + return True + return False + + def get_station(self, station_id: Union[str, int]) -> Station: + """Get `RequestCache` with `id_` matching the supplied `station_id`.""" + if isinstance(station_id, int): + station_id = str(station_id) + if not self.has_station(station_id): + self.log(logging.DEBUG, + station_id=station_id, + message=f'Adding station {station_id} to cache.') + self.add_station(station_id=station_id) + for s in self.stations: + if s.id_ == station_id: + self.log(logging.DEBUG, + station_id=station_id, + message=f'Found station {station_id} in cache.') + return s + + def add_station(self, station_id: Union[str, int]) -> None: + """Add new new `RequestCache` for the supplied `station_id`.""" + self.stations.append( + RequestHandler.Station(station_id=station_id, + cache_limit=self._cache_limit)) + + def handle_requests(self, station_id: Union[str, int], + reqs: List[str]) -> List[str]: # pragma: no cover + """Handle many string-valued requests against a supplied station.""" + responses = [] + self.log( + logging.INFO, + message=f'Handling {len(reqs)} requests for station {station_id}.') + for req in reqs: + responses.append(self.handle_request(station_id=station_id, + req=req)) + return responses + + def handle_request(self, station_id: Union[str, int], req: str) -> dict: + """Handle a string-valued requests against a supplied station.""" + stn = self.get_station(station_id=station_id) + self.log(logging.DEBUG, message=f'Handling request {req}.') + if req not in stn.reqs.cache: + self.log(logging.DEBUG, message=f'Adding request {req} to cache.') + resp = self.execute_request(url=req, + station_id=station_id, + headers=self._request_headers) + stn.reqs.put(request=req, response=resp) + else: + self.log(logging.DEBUG, message=f'Request {req} already in cache.') + return stn.reqs.get(request=req) + + def execute_request(self, station_id: Union[str, int], url: str, + headers: dict) -> dict: # pragma: no cover + """Execute a request with the current headers to NDBC data service.""" + self.log(logging.DEBUG, + station_id=station_id, + message=f'GET: {url}', + extra_data={'headers': headers}) + response = self._session.get( + url=url, + headers=headers, + allow_redirects=True, + verify=self._verify_https, + ) + self.log(logging.DEBUG, + station_id=station_id, + message=f'Response status: {response.status_code}') + if response.status_code != 200: # web request did not succeed + return dict(status=response.status_code, body='') + elif any([ + 'netcdf' in response.headers.get('Content-Type').lower(), + 'octet' in response.headers.get('Content-Type').lower() + ]): + return dict(status=response.status_code, body=response.content) + return dict(status=response.status_code, body=response.text) + + """ PRIVATE """ + + def _create_session(self) -> requests.Session: + """create a new `Session` using `RequestHandler` configuration.""" + self.log(logging.DEBUG, message='Creating new session.') + session = requests.Session() + retry = Retry( + backoff_factor=self._backoff_factor, + total=self._retries, + ) + http_adapter = requests.adapters.HTTPAdapter(max_retries=retry) + session.mount('https://', http_adapter) + session.mount('http://', http_adapter) + self.log(logging.INFO, message='Created session.') + return session diff --git a/utilities/singleton.py b/utilities/singleton.py new file mode 100644 index 0000000..372a6e7 --- /dev/null +++ b/utilities/singleton.py @@ -0,0 +1,14 @@ +""" +A metaclass for singleton types. +""" + + +class Singleton(type): + + _instances = {} + + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super(Singleton, + cls).__call__(*args, **kwargs) + return cls._instances[cls] From 63267158154491bce95bf3bced225d953b76bb43 Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:13:57 -0400 Subject: [PATCH 07/47] Create IGNORE This is painful to try and get the api to upload properly :( --- ndbc_api/IGNORE | 1 + 1 file changed, 1 insertion(+) create mode 100644 ndbc_api/IGNORE diff --git a/ndbc_api/IGNORE b/ndbc_api/IGNORE new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/ndbc_api/IGNORE @@ -0,0 +1 @@ + From d6a999c9e1123359d97d149996433d0f5d4b84a9 Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:15:07 -0400 Subject: [PATCH 08/47] Delete parsers directory --- parsers/http/_base.py | 107 --------------------- parsers/http/_html.py | 21 ----- parsers/http/_station.py | 51 ---------- parsers/http/_xml.py | 28 ------ parsers/http/active_stations.py | 66 ------------- parsers/http/adcp.py | 138 ---------------------------- parsers/http/cwind.py | 17 ---- parsers/http/historical_stations.py | 75 --------------- parsers/http/ocean.py | 16 ---- parsers/http/spec.py | 17 ---- parsers/http/station_historical.py | 34 ------- parsers/http/station_metadata.py | 49 ---------- parsers/http/station_realtime.py | 29 ------ parsers/http/stdmet.py | 17 ---- parsers/http/supl.py | 17 ---- parsers/http/swden.py | 71 -------------- parsers/http/swdir.py | 71 -------------- parsers/http/swdir2.py | 72 --------------- parsers/http/swr1.py | 71 -------------- parsers/http/swr2.py | 71 -------------- 20 files changed, 1038 deletions(-) delete mode 100644 parsers/http/_base.py delete mode 100644 parsers/http/_html.py delete mode 100644 parsers/http/_station.py delete mode 100644 parsers/http/_xml.py delete mode 100644 parsers/http/active_stations.py delete mode 100644 parsers/http/adcp.py delete mode 100644 parsers/http/cwind.py delete mode 100644 parsers/http/historical_stations.py delete mode 100644 parsers/http/ocean.py delete mode 100644 parsers/http/spec.py delete mode 100644 parsers/http/station_historical.py delete mode 100644 parsers/http/station_metadata.py delete mode 100644 parsers/http/station_realtime.py delete mode 100644 parsers/http/stdmet.py delete mode 100644 parsers/http/supl.py delete mode 100644 parsers/http/swden.py delete mode 100644 parsers/http/swdir.py delete mode 100644 parsers/http/swdir2.py delete mode 100644 parsers/http/swr1.py delete mode 100644 parsers/http/swr2.py diff --git a/parsers/http/_base.py b/parsers/http/_base.py deleted file mode 100644 index 6465bf9..0000000 --- a/parsers/http/_base.py +++ /dev/null @@ -1,107 +0,0 @@ -from io import StringIO -from typing import List, Tuple - -import pandas as pd - -from ndbc_api.exceptions import ParserException - - -class BaseParser: - - HEADER_PREFIX = '#' - NAN_VALUES = ['MM'] - DATE_PARSER = '%Y %m %d %H %M' - PARSE_DATES = [0, 1, 2, 3, 4] - INDEX_COL = False - REVERT_COL_NAMES = [] - - @classmethod - def df_from_responses(cls, - responses: List[dict], - use_timestamp: bool = True) -> pd.DataFrame: - components = [] - for response in responses: - if response.get('status') == 200: - components.append( - cls._read_response(response, use_timestamp=use_timestamp)) - df = pd.concat(components) - if use_timestamp: - try: - df = df.reset_index().drop_duplicates(subset='timestamp', - keep='first') - df = df.set_index('timestamp').sort_index() - except KeyError as e: - raise ParserException from e - return df - - @classmethod - def _read_response(cls, response: dict, - use_timestamp: bool) -> pd.DataFrame: - body = response.get('body') - header, data = cls._parse_body(body) - names = cls._parse_header(header) - if not data: - return pd.DataFrame() - # check that parsed names match parsed values or revert - if len([v.strip() for v in data[0].strip('\n').split(' ') if v - ]) != len(names): - names = cls.REVERT_COL_NAMES - if '(' in data[0]: - data = cls._clean_data(data) - - try: - parse_dates = False - date_format = None - if use_timestamp: - parse_dates = [cls.PARSE_DATES] - date_format = cls.DATE_PARSER - df = pd.read_csv( - StringIO('\n'.join(data)), - names=names, - delim_whitespace=True, - na_values=cls.NAN_VALUES, - index_col=cls.INDEX_COL, - parse_dates=parse_dates, - date_format=date_format, - ) - if use_timestamp: - df.index.name = 'timestamp' - - except (NotImplementedError, TypeError, ValueError) as e: - print(e) - return pd.DataFrame() - - # check whether to parse dates - return df - - @staticmethod - def _parse_body(body: str) -> Tuple[List[str], List[str]]: - buf = StringIO(body) - data = [] - header = [] - - line = buf.readline() - while line: - if line.startswith('#'): - header.append(line) - else: - data.append(line) - line = buf.readline() - - return header, data - - @staticmethod - def _parse_header(header: List[str]) -> List[str]: - names = ([n for n in header[0].strip('#').strip('\n').split(' ') if n] - if isinstance(header, list) and len(header) > 0 else None) - return names # pass 'None' to pd.read_csv on error - - @staticmethod - def _clean_data(data: List[str]) -> List[str]: - vals = [ - ' '.join([v - for v in r.split(' ') - if v and '(' not in v]) - for r in data - ] - return vals or None # pass 'None' to pd.read_csv on error diff --git a/parsers/http/_html.py b/parsers/http/_html.py deleted file mode 100644 index 2159af0..0000000 --- a/parsers/http/_html.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import List - -import pandas as pd -from bs4 import BeautifulSoup - -from ndbc_api.api.parsers.http._base import BaseParser - - -class HtmlParser(BaseParser): - - INDEX_COL = None - - @classmethod - def dfs_from_responses(cls, responses: List[dict]) -> List[pd.DataFrame]: - components = [] - for response in responses: - if response.get('status') == 200: - soup = BeautifulSoup(response.get('body'), 'html.parser') - tables = soup.find_all('table') - components.extend(pd.read_html(str(tables), flavor='bs4')) - return components diff --git a/parsers/http/_station.py b/parsers/http/_station.py deleted file mode 100644 index 2d2634a..0000000 --- a/parsers/http/_station.py +++ /dev/null @@ -1,51 +0,0 @@ -import os -from calendar import month_abbr -from collections import defaultdict -from datetime import datetime -from typing import List, Tuple - -import bs4 - - -class StationParser: - - BASE_URL = 'https://www.ndbc.noaa.gov' - - @classmethod - def _parse_li_urls(cls, - urls: List[bs4.element.Tag]) -> List[Tuple[str, str]]: - parsed = [] - if 'MOCKDATE' in os.environ: - now = datetime.strptime(os.getenv('MOCKDATE'), '%Y-%m-%d').date() - else: - now = datetime.now() - current_year = now.year - for raw_url in urls: - name = raw_url.text.strip() - name = f'{name} {current_year}' if name in month_abbr else name - url = f'{cls.BASE_URL}{raw_url.get("href")}' - parsed.append((name, url)) - return parsed - - @classmethod - def _build_available_measurements( - cls, line_items: List[bs4.element.Tag]) -> dict: - # unpack nested lists - nested = [li for li in line_items for li in li.find_all('li')] - nested = [ - li for li in nested - if li.get('href') is not None and 'plot' not in li.get('href') - ] - line_items = [li for li in line_items if len(li.find_all('li')) == 0] - line_items.extend(nested) - available_measurements = defaultdict(dict) - for li in line_items: - if 'Search' in li.text: - break # end of available measurements - new_measurement = cls._parse_list_item(li) - if new_measurement: - k = list(new_measurement.keys())[0] # guaranteed one key - else: - continue - available_measurements[k].update(new_measurement[k]) - return dict(available_measurements) diff --git a/parsers/http/_xml.py b/parsers/http/_xml.py deleted file mode 100644 index 486b306..0000000 --- a/parsers/http/_xml.py +++ /dev/null @@ -1,28 +0,0 @@ -import re -import xml.etree.ElementTree as ET - -from ndbc_api.api.parsers.http._base import BaseParser -from ndbc_api.exceptions import ParserException - - -class XMLParser(BaseParser): - """ - Parser for XML data. - """ - - @classmethod - def root_from_response(cls, response: dict) -> ET.ElementTree: - """Parse the response body (string-valued XML) to ET - - Args: - response (dict): The successful HTTP response - """ - - body = response.get('body') - - try: - root = ET.fromstring(body) - return ET.ElementTree(root) - except Exception as e: - raise ParserException( - "failed to obtain XML root from response body") from e diff --git a/parsers/http/active_stations.py b/parsers/http/active_stations.py deleted file mode 100644 index c2ea1ef..0000000 --- a/parsers/http/active_stations.py +++ /dev/null @@ -1,66 +0,0 @@ -import xml.etree.ElementTree as ET -import pandas as pd - -from ndbc_api.exceptions import ParserException -from ndbc_api.api.parsers.http._xml import XMLParser - - -class ActiveStationsParser(XMLParser): - """ - Parser for active station information from XML data. - """ - - @classmethod - def df_from_response(cls, - response: dict, - use_timestamp: bool = False) -> pd.DataFrame: - """ - Reads the response body and parses it into a DataFrame. - - Args: - response (dict): The response dictionary containing the 'body' key. - use_timestamp (bool): Flag to indicate if the timestamp should be used as an index (not applicable here). - - Returns: - pd.DataFrame: The parsed DataFrame containing station information. - """ - root = super(ActiveStationsParser, cls).root_from_response(response) - try: - station_data = [] - for station in root.findall('station'): - station_info = { - 'Station': - station.get('id'), - 'Lat': - float(station.get('lat')), - 'Lon': - float(station.get('lon')), - 'Elevation': - float(station.get('elev')) - if station.get('elev') else pd.NA, - 'Name': - station.get('name'), - 'Owner': - station.get('owner'), - 'Program': - station.get('pgm'), - 'Type': - station.get('type'), - 'Includes Meteorology': - station.get('met') == 'y', - 'Includes Currents': - station.get('currents') == 'y', - 'Includes Water Quality': - station.get('waterquality') == 'y', - 'DART Program': - station.get('dart') == 'y' - } - station_data.append(station_info) - - df = pd.DataFrame( - station_data) # Create DataFrame from the extracted data - - except ET.ParseError as e: - raise ParserException(f"Error parsing XML data: {e}") from e - - return df diff --git a/parsers/http/adcp.py b/parsers/http/adcp.py deleted file mode 100644 index 478319e..0000000 --- a/parsers/http/adcp.py +++ /dev/null @@ -1,138 +0,0 @@ -from typing import List - -import pandas as pd - -from ndbc_api.api.parsers.http._base import BaseParser - - -class AdcpParser(BaseParser): - - INDEX_COL = 0 - NAN_VALUES = None - REVERT_COL_NAMES = [ - 'YY', - 'MM', - 'DD', - 'hh', - 'mm', - 'DEP01', - 'DIR01', - 'SPD01', - 'DEP02', - 'DIR02', - 'SPD02', - 'DEP03', - 'DIR03', - 'SPD03', - 'DEP04', - 'DIR04', - 'SPD04', - 'DEP05', - 'DIR05', - 'SPD05', - 'DEP06', - 'DIR06', - 'SPD06', - 'DEP07', - 'DIR07', - 'SPD07', - 'DEP08', - 'DIR08', - 'SPD08', - 'DEP09', - 'DIR09', - 'SPD09', - 'DEP10', - 'DIR10', - 'SPD10', - 'DEP11', - 'DIR11', - 'SPD11', - 'DEP12', - 'DIR12', - 'SPD12', - 'DEP13', - 'DIR13', - 'SPD13', - 'DEP14', - 'DIR14', - 'SPD14', - 'DEP15', - 'DIR15', - 'SPD15', - 'DEP16', - 'DIR16', - 'SPD16', - 'DEP17', - 'DIR17', - 'SPD17', - 'DEP18', - 'DIR18', - 'SPD18', - 'DEP19', - 'DIR19', - 'SPD19', - 'DEP20', - 'DIR20', - 'SPD20', - 'DEP21', - 'DIR21', - 'SPD21', - 'DEP22', - 'DIR22', - 'SPD22', - 'DEP23', - 'DIR23', - 'SPD23', - 'DEP24', - 'DIR24', - 'SPD24', - 'DEP25', - 'DIR25', - 'SPD25', - 'DEP26', - 'DIR26', - 'SPD26', - 'DEP27', - 'DIR27', - 'SPD27', - 'DEP28', - 'DIR28', - 'SPD28', - 'DEP29', - 'DIR29', - 'SPD29', - 'DEP30', - 'DIR30', - 'SPD30', - 'DEP31', - 'DIR31', - 'SPD31', - 'DEP32', - 'DIR32', - 'SPD32', - 'DEP33', - 'DIR33', - 'SPD33', - 'DEP34', - 'DIR34', - 'SPD34', - 'DEP35', - 'DIR35', - 'SPD35', - 'DEP36', - 'DIR36', - 'SPD36', - 'DEP37', - 'DIR37', - 'SPD37', - 'DEP38', - 'DIR38', - 'SPD38', - ] - - @classmethod - def df_from_responses(cls, responses: List[dict], - use_timestamp: bool) -> pd.DataFrame: - return super(AdcpParser, cls).df_from_responses(responses, - use_timestamp) diff --git a/parsers/http/cwind.py b/parsers/http/cwind.py deleted file mode 100644 index 48616a9..0000000 --- a/parsers/http/cwind.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import List - -import pandas as pd - -from ndbc_api.api.parsers.http._base import BaseParser - - -class CwindParser(BaseParser): - - INDEX_COL = 0 - NAN_VALUES = [99.0, 999, 9999, 9999.0, 'MM'] - - @classmethod - def df_from_responses(cls, responses: List[dict], - use_timestamp: bool) -> pd.DataFrame: - return super(CwindParser, - cls).df_from_responses(responses, use_timestamp) diff --git a/parsers/http/historical_stations.py b/parsers/http/historical_stations.py deleted file mode 100644 index 3333e43..0000000 --- a/parsers/http/historical_stations.py +++ /dev/null @@ -1,75 +0,0 @@ -import xml.etree.ElementTree as ET -import pandas as pd - -from ndbc_api.exceptions import ParserException -from ndbc_api.api.parsers.http._xml import XMLParser - - -class HistoricalStationsParser(XMLParser): - """ - Parser for active station information from XML data. - """ - - @classmethod - def df_from_response(cls, - response: dict, - use_timestamp: bool = False) -> pd.DataFrame: - """ - Reads the response body and parses it into a DataFrame. - - Args: - response (dict): The response dictionary containing the 'body' key. - use_timestamp (bool): Flag to indicate if the timestamp should be used as an index (not applicable here). - - Returns: - pd.DataFrame: The parsed DataFrame containing station information. - """ - root = super(HistoricalStationsParser, cls).root_from_response(response) - try: - station_data = [] - for station in root.findall('station'): - station_id = station.get('id') - station_name = station.get('name') - station_owner = station.get('owner') - station_program = station.get('pgm') - station_type = station.get('type') - - for history in station.findall('history'): - station_info = { - 'Station': - station_id, - 'Lat': - float(history.get('lat')), - 'Lon': - float(history.get('lng')), - 'Elevation': - float(history.get('elev')) - if history.get('elev') else pd.NA, - 'Name': - station_name, - 'Owner': - station_owner, - 'Program': - station_program, - 'Type': - station_type, - 'Includes Meteorology': - history.get('met') == 'y', - 'Hull Type': - history.get('hull'), - 'Anemometer Height': - float(history.get('anemom_height')) - if history.get('anemom_height') else pd.NA, - 'Start Date': - history.get('start'), - 'End Date': - history.get('stop'), - } - station_data.append(station_info) - - df = pd.DataFrame(station_data) - - except ET.ParseError as e: - raise ParserException(f"Error parsing XML data: {e}") from e - - return df diff --git a/parsers/http/ocean.py b/parsers/http/ocean.py deleted file mode 100644 index 568812f..0000000 --- a/parsers/http/ocean.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import List - -import pandas as pd - -from ndbc_api.api.parsers.http._base import BaseParser - - -class OceanParser(BaseParser): - - INDEX_COL = 0 - - @classmethod - def df_from_responses(cls, responses: List[dict], - use_timestamp: bool) -> pd.DataFrame: - return super(OceanParser, - cls).df_from_responses(responses, use_timestamp) diff --git a/parsers/http/spec.py b/parsers/http/spec.py deleted file mode 100644 index 7266c79..0000000 --- a/parsers/http/spec.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import List - -import pandas as pd - -from ndbc_api.api.parsers.http._base import BaseParser - - -class SpecParser(BaseParser): - - INDEX_COL = 0 - NAN_VALUES = ['N/A'] - - @classmethod - def df_from_responses(cls, responses: List[dict], - use_timestamp: bool) -> pd.DataFrame: - return super(SpecParser, cls).df_from_responses(responses, - use_timestamp) diff --git a/parsers/http/station_historical.py b/parsers/http/station_historical.py deleted file mode 100644 index 8c0fb29..0000000 --- a/parsers/http/station_historical.py +++ /dev/null @@ -1,34 +0,0 @@ -import re - -import bs4 - -from ndbc_api.api.parsers.http._station import StationParser - - -class HistoricalParser(StationParser): - - LIST_IDENTIFIER = re.compile( - 'Available historical data for station .{5} include:') - - @classmethod - def available_measurements(cls, response: dict) -> dict: - if response.get('status') == 200: - soup = bs4.BeautifulSoup(response.get('body'), 'html.parser') - p_tag = soup.find('p', text=cls.LIST_IDENTIFIER) - line_items = p_tag.find_next_siblings('ul')[0].find_all('li') - return cls._build_available_measurements(line_items=line_items) - else: - return dict() - - @classmethod - def _parse_list_item(cls, li: bs4.element.Tag) -> dict: - measurement_item = dict() - try: - title = li.find('b').text.strip(': ') - parsed = cls._parse_li_urls(li.find_all('a')) - except AttributeError: - return measurement_item - measurement_item[title] = dict() - for name, url in parsed: - measurement_item[title][name] = url - return measurement_item diff --git a/parsers/http/station_metadata.py b/parsers/http/station_metadata.py deleted file mode 100644 index 19096f8..0000000 --- a/parsers/http/station_metadata.py +++ /dev/null @@ -1,49 +0,0 @@ -from collections import ChainMap -from typing import List - -import bs4 - -from ndbc_api.api.parsers.http._station import StationParser - - -class MetadataParser(StationParser): - - @classmethod - def metadata(cls, response: dict) -> dict: - if response.get('status') == 200: - soup = bs4.BeautifulSoup(response.get('body'), 'html.parser') - metadata = cls._meta_from_respose(soup=soup) - return dict(ChainMap(*metadata)) - else: - return dict() - - @classmethod - def _meta_from_respose(cls, soup: bs4.BeautifulSoup): - metadata = [] - try: - metadata.append({'Name': soup.find('h1').text.strip()}) - items = soup.find('div', id='stn_metadata').find_all('p')[0].text - items = items.split('\n\n') - assert len(items) == 2 - except (AssertionError, AttributeError): - return metadata - metadata.extend(cls._parse_headers(items[0])) - metadata.extend(cls._parse_attrs(items[1])) - return metadata - - @classmethod - def _parse_headers(cls, line_meta): - station_headers = [] - headers = [i.strip() for i in line_meta.split('\n') if i] - station_headers.append({'Statation Type': ', '.join(headers[0:-1])}) - station_headers.append({'Location': headers[-1]}) - return station_headers - - @classmethod - def _parse_attrs(cls, line_attr: str) -> List[dict]: - station_attrs = [] - attrs = [i for i in line_attr.split('\n') if i] - for attr in attrs: - k, v = attr.split(': ') - station_attrs.append({k: v}) - return station_attrs diff --git a/parsers/http/station_realtime.py b/parsers/http/station_realtime.py deleted file mode 100644 index 46654d0..0000000 --- a/parsers/http/station_realtime.py +++ /dev/null @@ -1,29 +0,0 @@ -import bs4 - -from ndbc_api.api.parsers.http._station import StationParser - - -class RealtimeParser(StationParser): - - @classmethod - def available_measurements(cls, response: dict) -> dict: - if response.get('status') == 200: - soup = bs4.BeautifulSoup(response.get('body'), 'html.parser') - items = soup.find('section', {"class": "data"}) - line_items = items.find_all('li') - return cls._build_available_measurements(line_items=line_items) - else: - return dict() - - @classmethod - def _parse_list_item(cls, li: bs4.element.Tag) -> dict: - measurement_item = dict() - try: - title = li.text.split('\n')[0] - parsed = cls._parse_li_urls(li.find_all('a')) - except AttributeError: - return measurement_item - measurement_item[title] = dict() - for name, url in parsed: - measurement_item[title][name] = url - return measurement_item diff --git a/parsers/http/stdmet.py b/parsers/http/stdmet.py deleted file mode 100644 index f81abf4..0000000 --- a/parsers/http/stdmet.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import List - -import pandas as pd - -from ndbc_api.api.parsers.http._base import BaseParser - - -class StdmetParser(BaseParser): - - INDEX_COL = 0 - NAN_VALUES = ['MM', 99.0, 999, 9999, 9999.0] - - @classmethod - def df_from_responses(cls, responses: List[dict], - use_timestamp: bool) -> pd.DataFrame: - return super(StdmetParser, - cls).df_from_responses(responses, use_timestamp) diff --git a/parsers/http/supl.py b/parsers/http/supl.py deleted file mode 100644 index 096c93a..0000000 --- a/parsers/http/supl.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import List - -import pandas as pd - -from ndbc_api.api.parsers.http._base import BaseParser - - -class SuplParser(BaseParser): - - INDEX_COL = 0 - NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] - - @classmethod - def df_from_responses(cls, responses: List[dict], - use_timestamp: bool) -> pd.DataFrame: - return super(SuplParser, cls).df_from_responses(responses, - use_timestamp) diff --git a/parsers/http/swden.py b/parsers/http/swden.py deleted file mode 100644 index 0514aa3..0000000 --- a/parsers/http/swden.py +++ /dev/null @@ -1,71 +0,0 @@ -from typing import List - -import pandas as pd - -from ndbc_api.api.parsers.http._base import BaseParser - - -class SwdenParser(BaseParser): - - INDEX_COL = 0 - NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] - REVERT_COL_NAMES = [ - 'YY', - 'MM', - 'DD', - 'hh', - 'mm', - '.0200', - '.0325', - '.0375', - '.0425', - '.0475', - '.0525', - '.0575', - '.0625', - '.0675', - '.0725', - '.0775', - '.0825', - '.0875', - '.0925', - '.1000', - '.1100', - '.1200', - '.1300', - '.1400', - '.1500', - '.1600', - '.1700', - '.1800', - '.1900', - '.2000', - '.2100', - '.2200', - '.2300', - '.2400', - '.2500', - '.2600', - '.2700', - '.2800', - '.2900', - '.3000', - '.3100', - '.3200', - '.3300', - '.3400', - '.3500', - '.3650', - '.3850', - '.4050', - '.4250', - '.4450', - '.4650', - '.4850', - ] - - @classmethod - def df_from_responses(cls, responses: List[dict], - use_timestamp: bool) -> pd.DataFrame: - return super(SwdenParser, - cls).df_from_responses(responses, use_timestamp) diff --git a/parsers/http/swdir.py b/parsers/http/swdir.py deleted file mode 100644 index fcb54f0..0000000 --- a/parsers/http/swdir.py +++ /dev/null @@ -1,71 +0,0 @@ -from typing import List - -import pandas as pd - -from ndbc_api.api.parsers.http._base import BaseParser - - -class SwdirParser(BaseParser): - - INDEX_COL = 0 - NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] - REVERT_COL_NAMES = [ - 'YY', - 'MM', - 'DD', - 'hh', - 'mm', - '.0200', - '.0325', - '.0375', - '.0425', - '.0475', - '.0525', - '.0575', - '.0625', - '.0675', - '.0725', - '.0775', - '.0825', - '.0875', - '.0925', - '.1000', - '.1100', - '.1200', - '.1300', - '.1400', - '.1500', - '.1600', - '.1700', - '.1800', - '.1900', - '.2000', - '.2100', - '.2200', - '.2300', - '.2400', - '.2500', - '.2600', - '.2700', - '.2800', - '.2900', - '.3000', - '.3100', - '.3200', - '.3300', - '.3400', - '.3500', - '.3650', - '.3850', - '.4050', - '.4250', - '.4450', - '.4650', - '.4850', - ] - - @classmethod - def df_from_responses(cls, responses: List[dict], - use_timestamp: bool) -> pd.DataFrame: - df = super(SwdirParser, cls).df_from_responses(responses, use_timestamp) - return df diff --git a/parsers/http/swdir2.py b/parsers/http/swdir2.py deleted file mode 100644 index 85a0c99..0000000 --- a/parsers/http/swdir2.py +++ /dev/null @@ -1,72 +0,0 @@ -from typing import List - -import pandas as pd - -from ndbc_api.api.parsers.http._base import BaseParser - - -class Swdir2Parser(BaseParser): - - INDEX_COL = 0 - NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] - REVERT_COL_NAMES = [ - 'YY', - 'MM', - 'DD', - 'hh', - 'mm', - '.0200', - '.0325', - '.0375', - '.0425', - '.0475', - '.0525', - '.0575', - '.0625', - '.0675', - '.0725', - '.0775', - '.0825', - '.0875', - '.0925', - '.1000', - '.1100', - '.1200', - '.1300', - '.1400', - '.1500', - '.1600', - '.1700', - '.1800', - '.1900', - '.2000', - '.2100', - '.2200', - '.2300', - '.2400', - '.2500', - '.2600', - '.2700', - '.2800', - '.2900', - '.3000', - '.3100', - '.3200', - '.3300', - '.3400', - '.3500', - '.3650', - '.3850', - '.4050', - '.4250', - '.4450', - '.4650', - '.4850', - ] - - @classmethod - def df_from_responses(cls, responses: List[dict], - use_timestamp: bool) -> pd.DataFrame: - df = super(Swdir2Parser, cls).df_from_responses(responses, - use_timestamp) - return df diff --git a/parsers/http/swr1.py b/parsers/http/swr1.py deleted file mode 100644 index 3bd4e12..0000000 --- a/parsers/http/swr1.py +++ /dev/null @@ -1,71 +0,0 @@ -from typing import List - -import pandas as pd - -from ndbc_api.api.parsers.http._base import BaseParser - - -class Swr1Parser(BaseParser): - - INDEX_COL = 0 - NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] - REVERT_COL_NAMES = [ - 'YY', - 'MM', - 'DD', - 'hh', - 'mm', - '.0200', - '.0325', - '.0375', - '.0425', - '.0475', - '.0525', - '.0575', - '.0625', - '.0675', - '.0725', - '.0775', - '.0825', - '.0875', - '.0925', - '.1000', - '.1100', - '.1200', - '.1300', - '.1400', - '.1500', - '.1600', - '.1700', - '.1800', - '.1900', - '.2000', - '.2100', - '.2200', - '.2300', - '.2400', - '.2500', - '.2600', - '.2700', - '.2800', - '.2900', - '.3000', - '.3100', - '.3200', - '.3300', - '.3400', - '.3500', - '.3650', - '.3850', - '.4050', - '.4250', - '.4450', - '.4650', - '.4850', - ] - - @classmethod - def df_from_responses(cls, responses: List[dict], - use_timestamp: bool) -> pd.DataFrame: - df = super(Swr1Parser, cls).df_from_responses(responses, use_timestamp) - return df diff --git a/parsers/http/swr2.py b/parsers/http/swr2.py deleted file mode 100644 index 0b8d56b..0000000 --- a/parsers/http/swr2.py +++ /dev/null @@ -1,71 +0,0 @@ -from typing import List - -import pandas as pd - -from ndbc_api.api.parsers.http._base import BaseParser - - -class Swr2Parser(BaseParser): - - INDEX_COL = 0 - NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] - REVERT_COL_NAMES = [ - 'YY', - 'MM', - 'DD', - 'hh', - 'mm', - '.0200', - '.0325', - '.0375', - '.0425', - '.0475', - '.0525', - '.0575', - '.0625', - '.0675', - '.0725', - '.0775', - '.0825', - '.0875', - '.0925', - '.1000', - '.1100', - '.1200', - '.1300', - '.1400', - '.1500', - '.1600', - '.1700', - '.1800', - '.1900', - '.2000', - '.2100', - '.2200', - '.2300', - '.2400', - '.2500', - '.2600', - '.2700', - '.2800', - '.2900', - '.3000', - '.3100', - '.3200', - '.3300', - '.3400', - '.3500', - '.3650', - '.3850', - '.4050', - '.4250', - '.4450', - '.4650', - '.4850', - ] - - @classmethod - def df_from_responses(cls, responses: List[dict], - use_timestamp: bool) -> pd.DataFrame: - df = super(Swr2Parser, cls).df_from_responses(responses, use_timestamp) - return df From fc6efd12e006910833c8582de964e3c5da8490e2 Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:15:17 -0400 Subject: [PATCH 09/47] Delete utilities directory --- utilities/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 191 -> 0 bytes .../__pycache__/log_formatter.cpython-311.pyc | Bin 1178 -> 0 bytes .../__pycache__/req_cache.cpython-311.pyc | Bin 3410 -> 0 bytes .../__pycache__/req_handler.cpython-311.pyc | Bin 13291 -> 0 bytes .../__pycache__/singleton.cpython-311.pyc | Bin 937 -> 0 bytes utilities/log_formatter.py | 16 -- utilities/opendap/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 199 -> 0 bytes .../__pycache__/dataset.cpython-311.pyc | Bin 3713 -> 0 bytes utilities/opendap/dataset.py | 88 ------- utilities/req_cache.py | 48 ---- utilities/req_handler.py | 229 ------------------ utilities/singleton.py | 14 -- 14 files changed, 395 deletions(-) delete mode 100644 utilities/__init__.py delete mode 100644 utilities/__pycache__/__init__.cpython-311.pyc delete mode 100644 utilities/__pycache__/log_formatter.cpython-311.pyc delete mode 100644 utilities/__pycache__/req_cache.cpython-311.pyc delete mode 100644 utilities/__pycache__/req_handler.cpython-311.pyc delete mode 100644 utilities/__pycache__/singleton.cpython-311.pyc delete mode 100644 utilities/log_formatter.py delete mode 100644 utilities/opendap/__init__.py delete mode 100644 utilities/opendap/__pycache__/__init__.cpython-311.pyc delete mode 100644 utilities/opendap/__pycache__/dataset.cpython-311.pyc delete mode 100644 utilities/opendap/dataset.py delete mode 100644 utilities/req_cache.py delete mode 100644 utilities/req_handler.py delete mode 100644 utilities/singleton.py diff --git a/utilities/__init__.py b/utilities/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/utilities/__pycache__/__init__.cpython-311.pyc b/utilities/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index b0cf4806c8e1e7ad142347ffe922f0fc090cc48e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 191 zcmZ3^%ge<81pLQ-ri19mAOZ#$p^VRLK*n^26oz01O-8?!3`I;p{%4TnuOMfun9$1)WnjE)FLQ9CNCu^IXP9j(X zUw;=~VuZdaBV3WLaxe|b7IIJ)IoQ!WoW=0gJ*}u`b&NE$fE;5TIT5Pw=m_n?(^*-= z(Qctkv^ti&W)`WiY|rA{%m>Wmu3zw|2z*miN|a9~zpGCcGgv0B2drobL7CkR(|&1P z<6sPwCjf^cfPu1_gR}Yu$^)?Vc-GiJk8w5vNp+dJ85DZ4s?$By6w8({ONziIs0KlAE1;A#9xfKLOiYoBn3y5cU3kjetTsxrbh6dU`=IL#s z8s35Z-JWk%6Cu%XsD@X1^ojVFMEP=|DxlL9(+4mQio6oy`ZmD7~TdcUmf)#2D zo_jIp4uCps>jdm_nq0yKV5lqq0u+K*EilmI5CHtsgdoWkI@hPpB{d$ zHYR5GCT1I>vyIejJvGuCFza#i0Aj*)%QzvvRiuQ-Bq7DXDSJv!5%QpHd0kJ85GSw+ zX~UH1E-^OsJVG*w{!g%>FkmLa z&MF<|0Ld7zSb#r~shMl%dyhIt6^g_7l}zl%4>ousNQKo^*s@$ysBU0n`05iJ{(Vb)Bx~) zHK>WIpanoT1mhqZOIm>G`_wLwg;X!BsoRw9Y7ZAQ-MErW&1$X{+TIPiR;Y4Qyy}6q z@EWc~VBbbtzMZJk*6HS8Q!u#)Cbwt`Ic>=>g?Xwintp11)fB0w&*yWxmf|fu7)2hR z?MpyjkQHu)C~N}BiU&qqYp9YXoVCFQ-nOKYNwUdJP{dm1O+nYP(=-GFDgn_^FCLHY zOkO#k_(9jGp174vJ-ANO)6?l(;?ASgY?991q4|uKGW5jHTGE)+Xmgy%sXwKZP))`pVjWeBb`+3duNm+pdC9WeqfJsZ(1KTN;?8vS(t48W3JR|r5sEC1n5G_^~4LpYS+71 z@ALb``@ja@MdNRyBU{mtjmSo-8Xc=e$3Pr-7d!ekCU3>$jbj^nHFmZZJB!-&-oxvC zIKSFm?;BW`QMS6T06&%ou;=rt=88J5kwyI$Fc|d=^i~7j-qw!o?vOgQoh4wB&~C5+ zVA#NHHi5yd?e5buyb3;%woh%3 zR=D+HDIjW-S%ve(7P+7Y)2bZ?B+yZ zh3mEDFG2gY{RRgpmu=i^C15`P{q}mC-^tjOXObA0PR-`7LM^nt^a#w;VI(Jz;J#)% zxV1C!ym7-nqZu9WgJb~Jrhowap#y7bF;fY};a``+&u%}x4QD{GJYMTLRh33-(r85* z#fckFZ>-%bPL=(&*hp29Ym!`%_@qB5f7CByv;1TjZa`5@N;<>_iR%yoF?>cPl zNG$=nyF@Dfe;oX{h=Zk@XXg23N?!1B-yP`i} z>!x3V%&zAdyKeu&?}-cVwu!mw$TdD=XVo`wUz}u*wReM#Y(k(|wls9S?Dw zV08{xB@5O8U|(y@fRR_Ma;BpLaKo{V1e2d%=uAA290wi!6_C$Oy!Qhq-wupz4UD}R z|LsO~;A(B)DprHF@v3;JE=teDr{Y?)*jKt<+dotlhil?+MI3$~>Mj_o%fDV;yU6-qXJxSnn>}Y$bW1Dh<}8!HP6k4@Zl^(%0)3D&Z5>zu_m+DR_xxu`JOD zESX}#rZFV!jO5cf>yG2tkOy%ViyM`Ze1QbB#M%vb39=4Z#2d?t{uhwP_OBuMy>f$m zNmu*Z9>G~OW4l?~Q)5>fCTDft@&x=tebynUs!XtJ*i zS1{;0NjT^oK@I3OhFFJpH5lzlEC|r?37NA{a3ixLUbL>4mnfbL44!dd8a&5w4bK@4 QY=25VpzPx=6YP8c0-G3qB>(^b diff --git a/utilities/__pycache__/req_handler.cpython-311.pyc b/utilities/__pycache__/req_handler.cpython-311.pyc deleted file mode 100644 index 712a37c1fb8e3d64225865719fd22388988c988a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 13291 zcmcIrU2Gdyb{_s}_#;vh^=nbm*p_X{ktru~b`vGqSdnZiiC1xBrMuO7OHrJWM1>-` zGeg^Anbtr9R0xd5D7=V@ZVDFIO%=osdgz1E0!5yReb@&|ARu-C0|tsdEZPDzl0Gy~ z{mz{qawrLnfpkQlnfrI{x%Zy)opbMu|Gu@=&*2LH%m1wQp5?fIql9^RjEAptc=(hP zxkXNNh|aWQ(Sf@w?c8uJx;EU4?hVhP$Dx;b7riKRr&~7oMILz`(VO;d_!s@`xg{Og z2rdR4oRho8iTp>L=#zqv{GYox?o0gBufuz6adU}`n3fblmVS|!6jc$_ zte~z*!u;%;Gh;$Bkz7k{D(+H%PAS^&2_AEeHMOu}m)r2aE!t$m$k}*NaLkx8-p+Zo- zZaTG*Qt_M-mL$YHgaic(l6jzT3X6&s%zQI1WQtu zJ}6n367175V%4BptlK#2cIw43TahTG6Pr^n*)wHHl~a;3HDS+QP9#6buB^mY5=qR^ z)Wz5<_Ht2L&aX}_$a%@0kI7A~Y{u7AHK#Nck6OZ*GmN1agNjLarDR@};)WL4$CQ`T zQ|DH)+1PSIK4)>!`C+2`{%IA1SVntApFLx06=7sDySzTRlujw?(&(fxEzpyYT@jXU z>a)MZ^i|g%OM}-EwLC?07HWo62x;FuC`+V9y@WFZsoQU3;5jTt&|WX=M7mAvveMm1 zw~IZxM7I7zQUJVqQQmP(IrY_t^2pKUOqLTU?>w@+U+R+hhVh2%=@qXSMceKp+YU%P z8=qGkM4g@^>kOd|%^S@FFAAvFi!}&Ied{O8Nyq%0M1H^M211_JLUvnC{MO5Hgd|FS zV+#;C$3G>bvT zlGtE!ZlD7a4m~EAAS?L{Yj92>PV%Z!=MfX8)2bwA0P{e&O`x|W++nd8vs8x7$DNc) zFe=p>fV(JRurhNQ!iXYCLOhr!#GNWgs%lb6xAk>7$z@k(}odgQe`Wv`M ztkT)#L>g;Lj)I&cAvmU(^-MhITACM^lhe7>Qr$bnzz3d)e&jZc#iezDM@cB^1zAdB z!PA>IMtKDqz`Dd>-!$gE0A%n~S_U0ua2njno7I*DCpxdhiVQAQoOR2`%t zD?BgY*YM&NGnys?!I|<}PKKO89%_)qcv;Ds1T>cwfz1WYICEw;M!CG4%OYB-%gUUm zeGMr98Bs>9UiB(DC@C~jlE(xX7|bb~5gYgz{xNMaWHS~^ zV!dR@zeGwSOBZ0$(nd}-g|tO~Ov=NOnbSWqDW;N^L}o6tQfsM=uz;DLF%O5kunBl* zBG}?${L9&FnyhbHSV<>VnWZ6ES;i_e9b=%@5dB?XVHs0qD32`VvOp7Pr>(Q;l@-7x zB!Z%*GElTkQX(KErEO4UrgctR)m6KDqfm4TqUW;+P>&77K@W}MZ zOJy*-;KxiDeZvH^gr`TXmQ4>^D7GL5evMsUpU2G}#E8yCx9CDB z;g-C#-GCczfs1Sry>OBIx7~IZ88afYM8Ng^xJpvn@55aZpYO$G5~J_cZR(PJU#^q5 zeXs6Q7w@~cP^x*98)Y2CICLGck?BG|Ky*XsO@?KK^8(_1m}K2ns40-c+6rXgNaKlI zDwbDM=@bPBF+=}Yly15mbL**R+6;s^n*AEO%5ahiqPxJQlo-cu6^N>119P~Bi_fkc z;sz)nAcnU-Wc~Glj5;+J*lKPXjpiq>hBz&r614zhukZOZF913(XXcZz9_ylK`uFwg zAfIwYN0EzLg=$01qVsd?LB6E@3%gLy+H;89P3{W^wm)(RWin4B3_|-MMM|&8ZIsgv zq7X;S(zox=Ox}7Qh^*Xt8xH3hwuac;-@37xTuaCs&@oyn<<=d-Thi zt4?-1mgwD`1mF1uHvO6l{zvn$a71e{9JUs+M+91n;c=-V_VQY=(W}TEV3+iPm=~!5 zQ=xyvL$eAMy*WpUgR;~W$JwYlt5jyGSdo#hLI_nH?Gen zkBaQPL<`58=98+>JhsLgCldAa{YM;Fps?ct^J&y4ccB-yWUN2=G$n_LoFQ_S$T=b- zAPS8ZMx|d&y_migdYWv&)cv1Rqh*kLT&`+T+c z1a7U>?ta_?)!reN8oEGrqGs;EC?!u+d!q-vWBa{hd%dwrZ;Wbg;g6|c(pr4`u62% zQDnn0gL8zCD|xsVW1C335Hr5Fb^x?AFQ9w*2P8rCu!8Rys5lLa}i#W^uyzN70(YbCdX3_BhQvlheFWvoi_A0_vn4YCQ z_XZuOrS!8g1{l*f-VzdNoB)Y*6ei+G$y8MRJq4ZR>ycD!;VOnc1{qj<^My;dM;eIc zP|T1*HP1bs2Va<%fWKodfj#74OG$bH=OAF%=O4+e}CkFKPXvly^&|p@yKnLkm{~m zP9F;KL#7ax>$eo*{Bz6aVe)y8l+T%TIuI`0e?@GeO={uIYp^N%ra%w+1s3Z=w^aQ_ z7g?8LTGo=GBZ$(MVudi^lnl_w6u$MdlQ;H$=?f<_46Ox+KT2X%0*I346+&0DdG?x0 zKlm*8X$#6+V{CrZulZAW9|nFncaRBU#6M z*Bsw43cJB`&C9j69|WWO!RU^^9E|P-Cn~{-a&V%m8<@w}SK~mM4Xmy6Q~s~`&w73} zP#um{PtNV0FQ1&#f9R>b4?VTNO+A64C!$`ix9ETMvgR^*A2v=>duE(>@nFZW zqaBgjU`odsAQa!K^~hm2>t-V{Q1DgLV-vtPsjInXJz6_~eWY+K8l4~YFy+v^iCj*~ zh%$L&`6Lkvx=as2^&9=7{0GQ51cwb8_qWvG#~{oT-funNqx*dHN$|u$@YH_r)XsW! ze4;Xb87SC#`WfeFeG}_J^qI@)o1wJ4xXWdG~6900M5$T6V*#V+9{;Pl#0L zuD2jHu@K5Z(cE>&XQ+qf0_RaL0~h&Sz?8%(u8ye7|0AfSnd`w?$97zdX*^PJNAnDqW5rEiqM3OnpFJPO#2 z6y_N$yvEFnBF^WxO_@-#(9SrJftr!L{D;Vwe?o<3G+`CJja;0?x$w1w4&`Sl!$~JV zY}2gqrsWm*_w+2G&|yW#R$MXake8^XUw#j_x~a=ID9LQt!y*tZ-1OCkzI$@z@~Y-S zj9{~UG5|U$%g_O)l4~-1`DZ9DH1DJAroM|R%71~tdxSa;LWBFE!JYHv(BNL^Es=4Yyb(1YZ|wTGELuK0Hi$jgiveoe6+cleSzR9WW zW=$Lk22k5F>Z3<02JOaOYS{18Iem`DFRkQlPHg@E;=I~)MlbgtH+o1wksG^?n1^HL z46TC4w;BxogM-DhKI`l#@R`6WAvM?#I15Uh7V6g!O8Jb_Z ztICNuohvPjx@FROR!Ixv6RYeU1QlWmpEamBkkt=a3+*`VU>_E;m?f`(>pN)m``;2f zLO(CXGI>RFFK5L~&6|dU1Df zFM7EWy?hY8x*xr|7e&cjxoxi6*0cRerT>M4{_*|(@!il~{~MM5HxBx*?Dt>U>%Ug% zzgBL$R`a+z{LeU7D1d>60<^EZ?)YySsowKQW%BKEg<={I{!VzLRzPt7h z8|CoqUU;?=o-OmU`lhZiup&GpeEIrMk^8(DD>@!I?a1Tc8ut_Rz{ZXoA39XaW)UJa zo~Sg2N=@Gbe&|#iLah3E$C#@g1K3Fe}?eXIke!R?&n|Y~HbdQf!_*j{b)kC*v`P{1{$Yh0|Ec27q zaQ9Y-ndYc#eqmuWD%0nkGJS3-_Y?fUX0OcT zA5w)^iA)ihCNfKeot&|-m(DU|vVNJ~qp|o@CXANp^@Lm`LX#~EM0$zP+Zz3(gw&Z- zO8Ff^k9*c%%{l1dYaEfwjxG-{`7n7#dEe9vUg!D-s@*5511D>dPS1Fa19{rjTk}xD z%Z0ih&((rw*>CU+=1u^M`6(lt-O5kTIv{WQpEQdrmeJ-su_HnV{T}-AnA& zOi<&@_5$UeX(lK-UmB*|^E<5ZdG_2uP~)Orba8i<@-H?M-s9;iNjo#Uk)63;XK@nf z>3R&7xHXed$kW$Y6UoQtY8-COL~wg%9F0{eOil3L8jo&@;NzwWI9L?A(1cRO66*jE@0u`$}cEW)ki3TyZpZ zM(M9yHIDAplV@r!_HaI?Kj`^3`V!oI%^mdglzJ;CUnKrL#}Zx9NxkW7c6{ZSU>_il zhIBK07X_ME-OK@Jb=zT<$ZV3#MhjwZnoccGu#ayTBGPftFZ9>7?28i3XZ;5cnc{NY zpagodjO<(fb^Ys%AITr1fLylnParjy!{Mko`yB2XN8}Rc2;U3QUzHmvAOCyGh3~Py pDmPfR|DJNE%l2QD6Uz2q&3oD9z?>aTw>R~YXRQ32V`{O%{V$!e{?GZGvxzesYfF$E z1bZmbgVKHgtw<646rL-1D2s=JUtmjm(4Ks=$p%FazS;TBd-LAB_uDt~K9|b^O8)cD z<^uxoT^Wg_M#|s{Dm&mn9WiQ-$2-eHgjZJt)>M5+Cg_E@; zAw$YzQFQY-2YrM$WCwaUR!|?xQzB+H2*!i~0weJOM(!bz{e%pxjN^L5g`W17yduA6 z>%lx8{&VNTI33a?)shirsq4$Y_8FI9N_IP32$eU?P|9r2qZDIw=E=~oMN@|9)wOt? zM7Qc5rBYEeO|ySmTexXGlw3$_(Ppa^;WiqsZ!N8}Wm~kCM0YyV*C5`K|JiZ|X} z{y2F!d-Yf+#kmte_%?U_QxKt9A%}PF50wZV6|%XoRo%VzW%}Y#erD_WF8i7<9ApXu z%nYTpYJ+dLIHlMq+G;ypPwCT?KIz)t&?D5X#yrE^s4I-eog-ojT~fV(yJA{Jsn*7j zR*qW;NIw}zXV3K)Bg04)BltfMxg=k;q=yrfI&FsZ4E2cPG9O?VqKcnp=7&FUTbxHz hv62f2*bG71)WnjE)FLQ9CNCu^IX7fK6rT0kj$_AhVoDNH2vbCxSOhBs6rs=}A<{IZO+X|ZY7EZCJ9aj#ciowB zAd;zk;Lrn=q6#TFa;Ru4IJ8nL_1II-j+BG7r${~ZW{LJtPJJ`$^-qGT3M%!|$$BzB zZ{F|s-ptq0Xc$3@{qkr2a0H=W#l&kcm+kq0g3uPyk*=7ifKo^iW1p!Md?{Z+O{sFO zn*Nj@)>Si*3U(vCPY)0u@vn7x?2dQ?qz6ez?`q%61@+K_K&tz1&UDiI_3j7$R9MOh zw{y}X1nE8SJ18TpM?ppe&h+b@XL^YWx`x^`RCB(zS*Prv9s@o-y1IxG@!y113B?KN z8YkQ+kc7|ie{3-B1a4S{Z7n!{u`2g9MmTf&IVluvs+k$xC}b=RHiMM#61B3vri&2r z!&rsi^GWEo5Qi9eZLK*jqq6c$c>?F2HqYMom3^R@Px}IvuGogfaG_*!qi7O*Poq@Z zz@V$t1(%p%BNaV(jvlsKm3(~?1f2Vd1V)=aa-M#^rTZqotJ+Ej87I%CFd zp`&d{+NHVc@6XIEVn*mXq2rw_!5B>L6-k!E0d7kkQpaTxkL7W^Y--%#C0&fcM3~!F z-d#B8giAEfrX>&IlBWO0M5J%JUQN!pUcHr=#+R@O8`#e6_jr?qV8d;^2JuNY7n^oV zSTO9eXPkEOY?5Jt9`L8jr^!b0hDbEkw5=qbAvq0TAgPycjtVD?j^k0!33BW?VD#3q ztCVDUU4paC{7Vh1Jd3ftvIc&}S=-{8VKpEnZ4-b3x47IOO;pewG9>7_%n8Dgti}^6 z6=i~kpcBc!grWlcV9AS5E>EX#K!{oTik7|e0X1?t!%AP<$O2vq*QhN-v-Dk}0r+TR zp0@OrY(^^@=@K_ggBygU?IN*st(b0jC0X2XVy^g%=P23-GKCUs1iA|PV+{4a@$lC6 z`D$Xi8ojU+y|5d-P?@bo`YZF#fdXZlnFy1An6bL@%(pLNm9hdEt7^XkyLm$M5YzuM zWo6X=&seucc4n2Av9FaF1rg%*#Bf436fkP&1X2WYqELjK@X;Osl@pNEFq0Y53rkFt zdW>mIZ1>9T7J>)F9{kub=m5bZ0>SC+v2OstQ#;YA-RM*Uz_cGIoUR7;oiO7X?&cL@cF)=N_)M%n5(Q~~k4`#0-tzr0q$}LPT*t+Ydm)kBq zDvLI@OPm2F_uEQI0pO}PYP!!+88MvzGmR`s1n3|Ho(_plJfPf?9?8jw5mGoQx_cgiGb>dRBr;}bu={Zshoxt+=R-O2gt z$ihzi>TdjM<)d0;pz0rR!R^o5ChK@riPVu89)t#4TdfCyNRxjD1tP=RY!Yw1h8^&tYeT}f@aA-* ze_RBty+{oojIpONJpsgX;()*mMEEEQ^;F8W*udw#wMcyP+{5>({vjF8ghGQrlTa2C zQ7VGy1o=i0a*K*8MFp-JCDSSu;CA;}MW?G#IRK=b%Ilqq3Mp|VMbo#$RFpzlPhW)I zh5RU)`WcO?7Q_JK9hs} zT~f&Ejw^$81YIq9=;7_U3ZuQgp?W|p2T^RW-X%sMH1wuR8g@lT#Hu*f)+N!&v3j6S nIU)odY4C&5UO2Yt-&}muy?t&owj0JSjc0>LFAweOq|^Tbp2GV3 diff --git a/utilities/opendap/dataset.py b/utilities/opendap/dataset.py deleted file mode 100644 index 68d1b36..0000000 --- a/utilities/opendap/dataset.py +++ /dev/null @@ -1,88 +0,0 @@ -from datetime import datetime -from typing import List, Union - -import xarray -import numpy as np - - -def concat_datasets( - datasets: List[xarray.Dataset], - temporal_dim_name: str = 'time', -) -> xarray.Dataset: - """Joins multiple xarray datasets using their shared dimensions. - - Handles cases where datasets might not have the same variables, - but requires that all datasets share the same dimensions. For - data stored on the THREDDS server, all datasets are expected to - have `time`, `latitude`, and `longitude` dimensions. - - Args: - datasets (List[xarray.Dataset]): A list of xarray datasets - to join. - dimension_names (List[str]): A list of dimension names to join - the datasets on. Defaults to `['time', 'latitude', 'longitude']`. - - Returns: - A xarray.Dataset object containing the joined data. - """ - result = xarray.concat(datasets, dim=temporal_dim_name) - return result - - -def merge_datasets(datasets: List[xarray.Dataset],) -> xarray.Dataset: - """Merges multiple xarray datasets using their shared dimensions. - - Handles cases where datasets might not have the same variables, - but requires that all datasets share the same dimensions. For - data stored on the THREDDS server, all datasets are expected to - have `time`, `latitude`, and `longitude` dimensions. - - Args: - datasets (List[xarray.Dataset]): A list of xarray datasets - to join. - - Returns: - A xarray.Dataset object containing the merged data. - """ - result = xarray.merge(datasets, compat='override') - return result - - -def filter_dataset_by_time_range( - dataset: xarray.Dataset, - start_time: datetime, - end_time: datetime, - temporal_dim_name: str = 'time', -) -> xarray.Dataset: - """ - Filters a netCDF4 Dataset to keep only data within a specified time range. - - Args: - dataset: The netCDF4 Dataset object. - start_time: The start of the time range (inclusive) as an ISO 8601 string (e.g., '2023-01-01T00:00:00Z'). - end_time: The end of the time range (inclusive) as an ISO 8601 string. - - Returns: - The modified netCDF4 Dataset object with data outside the time range removed. - """ - filtered_ds = dataset.sel({temporal_dim_name: slice(start_time, end_time)}) - return filtered_ds - - -def filter_dataset_by_variable( - dataset: xarray.Dataset, - cols: Union[List[str], None] = None, -) -> xarray.Dataset: - """ - Filters a netCDF4 Dataset to keep only data with variables whose names are in cols. - - Args: - dataset: The netCDF4 Dataset object. - cols: A list of variable names to keep. - - Returns: - The modified netCDF4 Dataset object with data with variables not in cols removed. - """ - if cols is None: - return dataset - return dataset[cols] diff --git a/utilities/req_cache.py b/utilities/req_cache.py deleted file mode 100644 index 457a9ba..0000000 --- a/utilities/req_cache.py +++ /dev/null @@ -1,48 +0,0 @@ -class RequestCache: - - class Request: - - __slots__ = 'k', 'v', 'next', 'prev' - - def __init__(self, request: str, response: dict): - self.k = request - self.v = response - self.next = self.prev = None - - def __init__(self, capacity: int) -> None: - self.capacity = capacity - self.cache = dict() - self.left = RequestCache.Request('$', '$') - self.right = RequestCache.Request('$', '$') - self.left.next = self.right - self.right.prev = self.left - - def remove(self, node: Request) -> None: - node.prev.next = node.next - node.next.prev = node.prev - - def add(self, node: Request): - node.prev = self.right.prev - node.next = self.right - self.right.prev.next = node - self.right.prev = node - - def get(self, request: str) -> dict: - if request in self.cache: - self.remove(self.cache[request]) - self.add(self.cache[request]) - return self.cache[request].v - else: # request not made before - return dict() - - def put(self, request: str, response: dict) -> None: - if request in self.cache: - self.remove(self.cache[request]) - - self.cache[request] = RequestCache.Request(request, response) - self.add(self.cache[request]) - - if len(self.cache) > self.capacity: - to_remove = self.left.next - self.remove(to_remove) - del self.cache[to_remove.k] diff --git a/utilities/req_handler.py b/utilities/req_handler.py deleted file mode 100644 index 6e12889..0000000 --- a/utilities/req_handler.py +++ /dev/null @@ -1,229 +0,0 @@ -"""Handles requests to the NDBC, caching responses for each station. - -This module defines the `RequestHandler`, a singleton HTTP cache which serves -to handle requests to the NDBC over HTTP and store requests and responses in a -cache. The cache is segregated by station, such that a cache limit can be -enforced on a station level. - -Example: - ```python3 - handler = RequestHandler( - cache_limit=1, - delay=2, - retries=3, - backoff_factor=0.8, - debug=True, - verify_https=True, - ) - response = handler.execute_request( - url='foo.bar' - ) - ``` - -Attributes: - stations (:obj:`list`): A list of `Station`s to which requests have - been made. -""" -import logging -from typing import List, Union, Callable - -import requests -from urllib3.util import Retry - -from .req_cache import RequestCache -from .singleton import Singleton - - -class RequestHandler(metaclass=Singleton): - """The summary line for a class docstring should fit on one line. - - If the class has public attributes, they may be documented here - in an ``Attributes`` section and follow the same formatting as a - function's ``Args`` section. Alternatively, attributes may be documented - inline with the attribute's declaration (see __init__ method below). - - Properties created with the ``@property`` decorator should be documented - in the property's getter method. - - Attributes: - cache_limit (:int:): The handler's global limit for caching - `NdbcApi` responses. This is implemented as a least-recently - used cache, designed to conserve NDBC resources when querying - measurements for a given station over similar time ranges. - logger (:obj:`logging.Logger`): The logger at which to register HTTP - request and response status codes and headers used for debug - purposes. - delay (:int:): The HTTP(s) request delay parameter, in seconds. - retries (:int:): = The number of times to retry a request to the NDBC data - service. - backoff_factor (:float:): The back-off parameter, used in conjunction with - `retries` to re-attempt requests to the NDBC data service. - headers (:dict:): The headers with which to execute the requests to the NDBC data - service. - debug (:bool:): A flag for verbose logging and response-level status reporting. - Affects the instance's `logging.Logger` and the behavior of its - private `RequestHandler` instance. - verify_https (:bool:): A flag which indicates whether to attempt requests to the - NDBC data service over HTTP or HTTPS. - """ - - class Station: - """The summary line for a class docstring should fit on one line. - - If the class has public attributes, they may be documented here - in an ``Attributes`` section and follow the same formatting as a - function's ``Args`` section. Alternatively, attributes may be documented - inline with the attribute's declaration (see __init__ method below). - - Properties created with the ``@property`` decorator should be documented - in the property's getter method. - - Attributes: - id_ (:str:): The key for the `Station` object. - reqs (:obj:`ndbc_api.utilities.RequestCache`): The `RequestCache` - for the Station with the given `id_`, uses the cache limit of - its parent `RequestHandler`. - """ - __slots__ = 'id_', 'reqs' - - def __init__(self, station_id: str, cache_limit: int) -> None: - self.id_ = station_id - self.reqs = RequestCache(cache_limit) - - def __init__( - self, - cache_limit: int, - log: Callable[[Union[str, int, dict]], None], - delay: int, - retries: int, - backoff_factor: float, - headers: dict = None, - debug: bool = True, - verify_https: bool = True, - ) -> None: - self._cache_limit = cache_limit - self._request_headers = headers or {} - self.log = log - self.stations = [] - self._delay = delay - self._retries = retries - self._backoff_factor = backoff_factor - self._debug = debug - self._verify_https = verify_https - self._session = self._create_session() - - def get_cache_limit(self) -> int: - """Return the current station-level cache limit for NDBC requests.""" - return self._cache_limit - - def set_cache_limit(self, cache_limit: int) -> None: - """Set a new station-level cache limit for NDBC requests.""" - self._cache_limit = cache_limit - - def get_headers(self) -> dict: - """Add new headers to future NDBC data service requests.""" - return self._request_headers - - def update_headers(self, new: dict) -> None: - """Add new headers to future NDBC data service requests.""" - self._request_headers.update(new) - - def set_headers(self, request_headers: dict) -> None: - """Reset the request headers using the new supplied headers.""" - self._request_headers = request_headers - - def has_station(self, station_id: Union[str, int]) -> bool: - """Determine if the NDBC API already made a request to this station.""" - for s in self.stations: - if s.id_ == station_id: - return True - return False - - def get_station(self, station_id: Union[str, int]) -> Station: - """Get `RequestCache` with `id_` matching the supplied `station_id`.""" - if isinstance(station_id, int): - station_id = str(station_id) - if not self.has_station(station_id): - self.log(logging.DEBUG, - station_id=station_id, - message=f'Adding station {station_id} to cache.') - self.add_station(station_id=station_id) - for s in self.stations: - if s.id_ == station_id: - self.log(logging.DEBUG, - station_id=station_id, - message=f'Found station {station_id} in cache.') - return s - - def add_station(self, station_id: Union[str, int]) -> None: - """Add new new `RequestCache` for the supplied `station_id`.""" - self.stations.append( - RequestHandler.Station(station_id=station_id, - cache_limit=self._cache_limit)) - - def handle_requests(self, station_id: Union[str, int], - reqs: List[str]) -> List[str]: # pragma: no cover - """Handle many string-valued requests against a supplied station.""" - responses = [] - self.log( - logging.INFO, - message=f'Handling {len(reqs)} requests for station {station_id}.') - for req in reqs: - responses.append(self.handle_request(station_id=station_id, - req=req)) - return responses - - def handle_request(self, station_id: Union[str, int], req: str) -> dict: - """Handle a string-valued requests against a supplied station.""" - stn = self.get_station(station_id=station_id) - self.log(logging.DEBUG, message=f'Handling request {req}.') - if req not in stn.reqs.cache: - self.log(logging.DEBUG, message=f'Adding request {req} to cache.') - resp = self.execute_request(url=req, - station_id=station_id, - headers=self._request_headers) - stn.reqs.put(request=req, response=resp) - else: - self.log(logging.DEBUG, message=f'Request {req} already in cache.') - return stn.reqs.get(request=req) - - def execute_request(self, station_id: Union[str, int], url: str, - headers: dict) -> dict: # pragma: no cover - """Execute a request with the current headers to NDBC data service.""" - self.log(logging.DEBUG, - station_id=station_id, - message=f'GET: {url}', - extra_data={'headers': headers}) - response = self._session.get( - url=url, - headers=headers, - allow_redirects=True, - verify=self._verify_https, - ) - self.log(logging.DEBUG, - station_id=station_id, - message=f'Response status: {response.status_code}') - if response.status_code != 200: # web request did not succeed - return dict(status=response.status_code, body='') - elif any([ - 'netcdf' in response.headers.get('Content-Type').lower(), - 'octet' in response.headers.get('Content-Type').lower() - ]): - return dict(status=response.status_code, body=response.content) - return dict(status=response.status_code, body=response.text) - - """ PRIVATE """ - - def _create_session(self) -> requests.Session: - """create a new `Session` using `RequestHandler` configuration.""" - self.log(logging.DEBUG, message='Creating new session.') - session = requests.Session() - retry = Retry( - backoff_factor=self._backoff_factor, - total=self._retries, - ) - http_adapter = requests.adapters.HTTPAdapter(max_retries=retry) - session.mount('https://', http_adapter) - session.mount('http://', http_adapter) - self.log(logging.INFO, message='Created session.') - return session diff --git a/utilities/singleton.py b/utilities/singleton.py deleted file mode 100644 index 372a6e7..0000000 --- a/utilities/singleton.py +++ /dev/null @@ -1,14 +0,0 @@ -""" -A metaclass for singleton types. -""" - - -class Singleton(type): - - _instances = {} - - def __call__(cls, *args, **kwargs): - if cls not in cls._instances: - cls._instances[cls] = super(Singleton, - cls).__call__(*args, **kwargs) - return cls._instances[cls] From d640312dbe182e30352492e63e4d9ad6ceaf4d9e Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:15:25 -0400 Subject: [PATCH 10/47] Delete requests directory --- requests/__init__.py | 0 requests/__pycache__/__init__.cpython-311.pyc | Bin 194 -> 0 bytes requests/http/__init__.py | 0 .../http/__pycache__/__init__.cpython-311.pyc | Bin 199 -> 0 bytes .../http/__pycache__/_base.cpython-311.pyc | Bin 6498 -> 0 bytes .../http/__pycache__/_core.cpython-311.pyc | Bin 701 -> 0 bytes .../active_stations.cpython-311.pyc | Bin 862 -> 0 bytes .../http/__pycache__/adcp.cpython-311.pyc | Bin 1225 -> 0 bytes .../http/__pycache__/cwind.cpython-311.pyc | Bin 1257 -> 0 bytes .../historical_stations.cpython-311.pyc | Bin 884 -> 0 bytes .../http/__pycache__/ocean.cpython-311.pyc | Bin 1257 -> 0 bytes .../http/__pycache__/spec.cpython-311.pyc | Bin 1189 -> 0 bytes .../station_historical.cpython-311.pyc | Bin 934 -> 0 bytes .../station_metadata.cpython-311.pyc | Bin 916 -> 0 bytes .../station_realtime.cpython-311.pyc | Bin 930 -> 0 bytes .../http/__pycache__/stdmet.cpython-311.pyc | Bin 1223 -> 0 bytes .../http/__pycache__/supl.cpython-311.pyc | Bin 1225 -> 0 bytes .../http/__pycache__/swden.cpython-311.pyc | Bin 1257 -> 0 bytes .../http/__pycache__/swdir.cpython-311.pyc | Bin 1257 -> 0 bytes .../http/__pycache__/swdir2.cpython-311.pyc | Bin 1262 -> 0 bytes .../http/__pycache__/swr1.cpython-311.pyc | Bin 1225 -> 0 bytes .../http/__pycache__/swr2.cpython-311.pyc | Bin 1225 -> 0 bytes requests/http/_base.py | 105 ------------------ requests/http/_core.py | 7 -- requests/http/active_stations.py | 10 -- requests/http/adcp.py | 17 --- requests/http/cwind.py | 17 --- requests/http/historical_stations.py | 10 -- requests/http/ocean.py | 17 --- requests/http/spec.py | 16 --- requests/http/station_historical.py | 10 -- requests/http/station_metadata.py | 10 -- requests/http/station_realtime.py | 10 -- requests/http/stdmet.py | 16 --- requests/http/supl.py | 17 --- requests/http/swden.py | 17 --- requests/http/swdir.py | 17 --- requests/http/swdir2.py | 17 --- requests/http/swr1.py | 17 --- requests/http/swr2.py | 17 --- requests/opendap/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 202 -> 0 bytes .../opendap/__pycache__/_base.cpython-311.pyc | Bin 4449 -> 0 bytes .../opendap/__pycache__/_core.cpython-311.pyc | Bin 713 -> 0 bytes .../opendap/__pycache__/adcp.cpython-311.pyc | Bin 1202 -> 0 bytes .../opendap/__pycache__/cwind.cpython-311.pyc | Bin 1233 -> 0 bytes .../opendap/__pycache__/ocean.cpython-311.pyc | Bin 1233 -> 0 bytes .../opendap/__pycache__/pwind.cpython-311.pyc | Bin 1233 -> 0 bytes .../__pycache__/stdmet.cpython-311.pyc | Bin 1237 -> 0 bytes .../opendap/__pycache__/swden.cpython-311.pyc | Bin 1233 -> 0 bytes .../__pycache__/wlevel.cpython-311.pyc | Bin 1237 -> 0 bytes requests/opendap/_base.py | 82 -------------- requests/opendap/_core.py | 7 -- requests/opendap/adcp.py | 16 --- requests/opendap/cwind.py | 16 --- requests/opendap/ocean.py | 16 --- requests/opendap/pwind.py | 16 --- requests/opendap/stdmet.py | 16 --- requests/opendap/swden.py | 16 --- requests/opendap/wlevel.py | 16 --- 60 files changed, 548 deletions(-) delete mode 100644 requests/__init__.py delete mode 100644 requests/__pycache__/__init__.cpython-311.pyc delete mode 100644 requests/http/__init__.py delete mode 100644 requests/http/__pycache__/__init__.cpython-311.pyc delete mode 100644 requests/http/__pycache__/_base.cpython-311.pyc delete mode 100644 requests/http/__pycache__/_core.cpython-311.pyc delete mode 100644 requests/http/__pycache__/active_stations.cpython-311.pyc delete mode 100644 requests/http/__pycache__/adcp.cpython-311.pyc delete mode 100644 requests/http/__pycache__/cwind.cpython-311.pyc delete mode 100644 requests/http/__pycache__/historical_stations.cpython-311.pyc delete mode 100644 requests/http/__pycache__/ocean.cpython-311.pyc delete mode 100644 requests/http/__pycache__/spec.cpython-311.pyc delete mode 100644 requests/http/__pycache__/station_historical.cpython-311.pyc delete mode 100644 requests/http/__pycache__/station_metadata.cpython-311.pyc delete mode 100644 requests/http/__pycache__/station_realtime.cpython-311.pyc delete mode 100644 requests/http/__pycache__/stdmet.cpython-311.pyc delete mode 100644 requests/http/__pycache__/supl.cpython-311.pyc delete mode 100644 requests/http/__pycache__/swden.cpython-311.pyc delete mode 100644 requests/http/__pycache__/swdir.cpython-311.pyc delete mode 100644 requests/http/__pycache__/swdir2.cpython-311.pyc delete mode 100644 requests/http/__pycache__/swr1.cpython-311.pyc delete mode 100644 requests/http/__pycache__/swr2.cpython-311.pyc delete mode 100644 requests/http/_base.py delete mode 100644 requests/http/_core.py delete mode 100644 requests/http/active_stations.py delete mode 100644 requests/http/adcp.py delete mode 100644 requests/http/cwind.py delete mode 100644 requests/http/historical_stations.py delete mode 100644 requests/http/ocean.py delete mode 100644 requests/http/spec.py delete mode 100644 requests/http/station_historical.py delete mode 100644 requests/http/station_metadata.py delete mode 100644 requests/http/station_realtime.py delete mode 100644 requests/http/stdmet.py delete mode 100644 requests/http/supl.py delete mode 100644 requests/http/swden.py delete mode 100644 requests/http/swdir.py delete mode 100644 requests/http/swdir2.py delete mode 100644 requests/http/swr1.py delete mode 100644 requests/http/swr2.py delete mode 100644 requests/opendap/__init__.py delete mode 100644 requests/opendap/__pycache__/__init__.cpython-311.pyc delete mode 100644 requests/opendap/__pycache__/_base.cpython-311.pyc delete mode 100644 requests/opendap/__pycache__/_core.cpython-311.pyc delete mode 100644 requests/opendap/__pycache__/adcp.cpython-311.pyc delete mode 100644 requests/opendap/__pycache__/cwind.cpython-311.pyc delete mode 100644 requests/opendap/__pycache__/ocean.cpython-311.pyc delete mode 100644 requests/opendap/__pycache__/pwind.cpython-311.pyc delete mode 100644 requests/opendap/__pycache__/stdmet.cpython-311.pyc delete mode 100644 requests/opendap/__pycache__/swden.cpython-311.pyc delete mode 100644 requests/opendap/__pycache__/wlevel.cpython-311.pyc delete mode 100644 requests/opendap/_base.py delete mode 100644 requests/opendap/_core.py delete mode 100644 requests/opendap/adcp.py delete mode 100644 requests/opendap/cwind.py delete mode 100644 requests/opendap/ocean.py delete mode 100644 requests/opendap/pwind.py delete mode 100644 requests/opendap/stdmet.py delete mode 100644 requests/opendap/swden.py delete mode 100644 requests/opendap/wlevel.py diff --git a/requests/__init__.py b/requests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/requests/__pycache__/__init__.cpython-311.pyc b/requests/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index b82388adf88e773b7aeecf9bb85c77a4a2d2cab5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 194 zcmZ3^%ge<81pLQ-ri19mAOZ#$p^VRLK*n^26oz01O-8?!3`I;p{%4TnuTW>Jn9$1)WnjE)FLQ9CNCu^IX1)WnjE)FLQ9CNCu^IX)=dU}j`w{J;Ps IikN|70L$VwFaQ7m diff --git a/requests/http/__pycache__/_base.cpython-311.pyc b/requests/http/__pycache__/_base.cpython-311.pyc deleted file mode 100644 index d76a5eb99c5d5527bfe2269adf57b176c7ab75e7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6498 zcmcIoU2Idy6`uR!e>-*@2l59oBmqoP6*hst@~S1eLb?)N}5&ukG-& zE4978K6B>G%$b=p-<&yf|Lk(v2|Vs^{(ZjHL&!fcQ+P~y=J`j!+#~`Km^cYx%Ep-q zHpDW>b8&8h3vm;Ch}ZIb+%#bhnKj-Nw@g?=R)({E3((L+a?7q~d^haaxq* zXkMe1IP;n3-vD!yh$O@aB*Y3#h!fZlFK{7~z=zC&DP$4MA**14aaclj(IHqxr(hFZ zf*t-2u|jm#8hS^qg7YeWAM%e3S;)Pnjq4Td>_Wv$?L7H*Yy02@_e*sv1rOM$Qgs}P zNQLPp-C&x?PAW#?XqlhwOj?5Sj2t|lOxm{yF}hzXPBvVMiI>B&I3tHgV{tJ!H8%Ag zGKt89xIgI_13OOASTqvfsWM|KQSFi(kz>lkbSy$APSMG6F)B+Vm&J%YCenO0jHjFTW}5dd z?@N1oGTxr~uKBKidh2eq{U&%LINz16ZMc<8*9IQPmIsvJ!F2FoCU|hA=HR?5>!`^Q zv#laqweCsPTPsy>W$Rn;t=*LMtRS*k|QQpMb1e)RZra8SKQl|#ufMW zw7WCo?o_Ou&tZf_vu4!5El6ze*Y;8n*7Otd2?MUTY(ELCZ0rC+M)`R%&y2DHchxy- zT2qrR-G^)1ne**T(9SHF-%zvAg9U0@u90!0cgZc6>=D6Qj`EM0^JLbV;*0xv)&~2K z)ApeQC$B$(ZP8xd@`6?{Yftg=8|$H_677zkR3D+L>vziT0uP?S@G^v?*rL|*k;#>> zX<=JauT&fNI&Fd@3F^vyT6yj;`OrV>N?8{W6)!z3Ntsfv@}A3c47t83RnmXTaY0)> z`nNaC-LO!(uGh|;8-HDkQY$#iwUy8FRhEFcZOpAxBN+hkYi;j`xWES{{vj`$>m zR?>mstCZ}-zJ3Sf`#F+bS9>F#Bg8k%Ep%m?_v`F(Qcu5a&m0$YRvAs?nJMW7As3m` zteyut3^6&#hS`fnnZ#Z~nZzFDjC#kolN`v7aYIEJbZnHnhB>+k=WfGcYw>v0zyZk7 zCu=ym3k>2&hP3dcm?B1J*%Z5AV5(39)BEs$WC&*2Yh3YE0qk(a$PjWoV2{b;uf&&W z^-2*}vYQccDL^pb&M`=63v?B*P+VBm9)%b~j>Kb0k!}S6-G*d4l2#-;fCNlBCUA*V zlV*Vud|;mt>l6@Dy>@Z6`pLupB}6nWKZCBMzW@P@tok4O zw^D0Gpr7u8TQ+~xU@5@a^|Bvb2bX=A7Q#RoXj9S!X87M9AP6xS(F_` zS(Ke9%0La&o?@rhiu3~4468%O6`iJ3Ovriol!5?*WDP!l1m-39kP`SXDds1^hq=a< z!biY-iJKBwgpU9qTQJ-ebZtE4V1%079dxFAU(%%Cv9>_fB#+lC00t_iyO3z;LAa=< z_~d1gLcqQo<+x}iaKS!{XxZ>ew1o5KN}l*%pr)Yh9q3s)4J41A*^F<;>(HZAoq_j3 z)nc8_Es1)%JpSZB-^zi$^np{E1E+KuQs)_Mp_9>5aS;M^{}i3P6ca>$#6LPoCn7Q? zei?kBzvvJBXih&E0PpKW-GMZq5(gG-7(8jncQR>?mh%43s4kb2Rgh?(yY zNOT<%^hg1R%Ec10%E84)+qkOzOe8)nc2hb@RbD#=akpwqqMB$Vab8rpiC984N2aFW zN=qA1i@Ug5#oLyCcM4+bb|h#_m50Ggs_BynMI^XZ>GyEet?gSB=*O_?fw{=x z$fZauu5Sj_rX43*Tf4^UP4K)vdlno^@zIVTS`P)umY)O+M5^=!nC5#}@|>5k4Rep% za|F}B=2JcQBh<{v-&L$N|1y)h z#yir&zPsKO@3k+a(oMTFO}kf{wk*utolQ5rooRY|nMpV8ea12VcV7@7 zFu=MF7+_rojvBlzkon%6nYb!{ef-noN=+NQ4?alOypySUC+*ymaqdyH7pgdY*Z1G* zUvac5j@H$UO{>23x9pjkZE0TsTs`FOaJP^PM6b!B{A zUq#ZsBhO51V=n{U1BI?#r}FeOJE*U$zUla_LuuFr@6yh%_N5#8G7WucPk+YKuW0Y8 z+pAPHrQOXLck_dq#f>ZOJ&Joz*5lI~?o}H0!Z4J=kYLsuT4B~3TAvZ7vJJS(HUJY4 zmWYZg*O2MOrp`!F5kU8?y?Y;ghM7O>%fBEvpT<3pM-Lg{M zlCIvGsopwo#}+BYIdp3my$rfYXAFtkieVcSdSexwau=rd0)V*TEk)dHW?19 z_AvNMVLFbyGaSA+9f|8D^cak$Bx2J7IQ()(14!^hrbmIO4(%M#4-vW##Rw#7M&d_; zo>e;n9ujQ;x@qvPO_AVVs)QqF&iFm8Z&Uc1?AEswzB#)gpzw9shJ6ZOo2_Y4_-gPi z$h+KgJz1}B?)X!aC&#)?hnNK^M=)Ks>S@-$EytlCyJOc<)8YquMlaLpGme-mul_=B zp4$wW9Q$*V?Nvt$IVwf}E99)#q%;*-6E^oC7&0u~*DTuh-^Wm?H&ZC`G{( zqbaqS>`UODAzAh5gg5S^7QRZVJ~^wy6At4l0!J_Q0W2ds@>`FBWIr!jX z@6InsDfu5g_pFnlf55@LG^fs3irvz#*x5JpR&PG`OTE4VxEkMn9be-4gPglntH5jn z!5A#q0SP0J?8*YT#$B!eq;&vITigZcv@2)kBN&5%fJq1}$pTx|q@|p;CGAn|3fb%G zDnQ`M8Y(%(>UOHtzBW?{&D)LRk&JZxlROIXaEqV z^s_ZIW4M5m1&QKpx5*0c)ysp|{BZvrVq3(<&_5fu@3%L@L!-0_-$vr&j_$VG-6Y&Q z6P-x+_H=rpL}tQ)in5NgHBa6@)?1&fsXUU*Sk$V7HK ze1}o5=euUfcv#A9M^Vp>8=`4Py?tHU>H7hviQmD!Ye(T+amr(D>JGvHMhoRn56YO2aPi2Zk?Fy4UxD*dlMDuP(5_Qz{fuyUVq$ zA}Ncg{syvyn=BYG4`>P*WYjXkApJ&*_Xq2v?5(jyvX73~7;w9Ua~pEEhp(r`OKqIm zWUW_iU71bI!Kpd3sX1n94rTc2q`Bd^h(*Jtg>u&%;8Fkp diff --git a/requests/http/__pycache__/adcp.cpython-311.pyc b/requests/http/__pycache__/adcp.cpython-311.pyc deleted file mode 100644 index a3ff8ab1d5f3ad9b9d9025dcd7de74248912f8ae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1225 zcmZuwO>7fK6n?XRcI;RVP=tb75rl-WDl8#UsX~P`Kn#WuPzb19ER8lh6S8#u!^~JE zD2E&(m18f6YfeEZ960va6X&$5RBKL=dg{$lKq{PgGyZ8!8PB};y?L{<-}~PBd7+R4 z9L3k~?&lnUzf3R~ejn^t5Ih40jHFP95QxD}O6pGNSnQ^5-3vVe4qO4|Z2E*wI_ zAaocBUFL)yb3>m8%;On+bCw0TGJK3@CWp5yXP>|_VcCuGE!^84?uWT2@Q8$Y5D1Y|w;REEF~Tb(S|+4+Vm&wrIrO@OA(rq7lJTe@dQ# zBL8L;NcIq;ql~u1TK$IEMQue(fqRx zZv&V({Pf=DiQkGR|0tgPvv~IR;@MZ>uSYvyo!@iG#Pl0L(*-g%y^l{oXB`r5@jO(`uAaS&6|^!8hE$ec25u*LwniUuni{B{e6 z)+?QZlk3+zpC8-tCU^abuH*XCo3ab&e#u(bnL5>VG3=Z^^XkOQ?=7~8kIbG6!GVo? z_J)sAHWZ%?xXhOVTZj(SYAb2nw*~2`LHo+szVe`bWz)VgUBdsQFrSAl<^}XFGbO&y ze$!L>f;f($sa-bFbX`J72fo-DogFx`Gdg$S>z&cr4RQn<`+xKEmwRUS%_mZp&<8o5 BH!A=D diff --git a/requests/http/__pycache__/cwind.cpython-311.pyc b/requests/http/__pycache__/cwind.cpython-311.pyc deleted file mode 100644 index 7f787b87895c53959f45906d7411794442e836ce..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1257 zcmZ`&O=uHA6n?XRN%KSMAEGE$JZJ<1A{9}QYHc)HtJI3RE)3h9X}h{f1i2CT7NEW%w4q*tVos(puK87pvCLpx zoh6M(;~d5h8-CP|IOXf(*UnGKY`HQzg4@Qf%axlG*T*Zv=SOkn;>g%UrCb>qm)V$| zDit%cM4DVMraQ@Wd*fjn?89eS7UMJzPnPnA%`b)L2S&V_Pw z``(4C&3y+}tvzdYf6FlKA+*F>p!K_SqMHLpTBde1j~#z|=*`{4NMc*%x(Uv<#e2z7 zTf>Qm?d)VWR&?UBq$B3)VQo4tPY!fCT2dV?bvjzo9WCKW^`|oZ#l)q(hU73^@UQgQ zg!gmopfWWRd`^j$i4bbS{?*>ugx#yXa}7?d_Rf}*LAufZme0?u$C1td#N*U{1OCc7 ALI3~& diff --git a/requests/http/__pycache__/historical_stations.cpython-311.pyc b/requests/http/__pycache__/historical_stations.cpython-311.pyc deleted file mode 100644 index e598dc1b4fc64aab6443290e1b6613897713cb5f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 884 zcmZ`%&1(}u6o0du#Mm@eEd>P+!HWcB)p`+;LM>81N-Rx#i3`JYXJWSQX49Do+TtOH z{0TyZBJ@;|{vV!u@F05%c=DDKFZSe{>}E?*-|oDRc^~uMZ{~BkJOxNIUw^;8L;!vm zV?y~<89Auz0S8V32uXtwuwWUu)dy}1a%ck_;Yz4s@mw9M`JcwDlDM=KNwFrjyFzI% zjTwtcMp|7!gK)rX(y&F2TO!}IxZTekV(?Lx*C-^_OsbjodM-Emvl)HEGVIh~!jyxH{wPSe=kTmJ>eITu*O z9@RBuJs4b_`*8bsX8yqbR=JDP{Kff^!J$^Y5Em(JvrtePPf;31yc-yOn$qnq3lfjK zh_P|W3z$-fcq`(ut+bTWxcvoW8HFqvW}elUi~n7!9q8RRbyEt0?g(HjDa zGj?J_;Y$D2*m<^_lr~jsRXeUs&~p$yXM&z%&~vDVZ&8|?PMVlCTGrTU@li6_f}BH# jxj^*-+0Z70e1`|e=l@}$K(M#J*+J>V?EddSD diff --git a/requests/http/__pycache__/ocean.cpython-311.pyc b/requests/http/__pycache__/ocean.cpython-311.pyc deleted file mode 100644 index 923416122cef22e891d9c6a90df7fb270de51a40..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1257 zcmZ`&O=uJ`6n>e%-R=)tt0IbG#e=Tkf=ESFr2ceuwYE|#>NpUllhlsR{+MLZN)J60 z^yo#r_EeF2@aWN#=PH6rP6bciwp2kqnau32E@~!u?|biMlJ~wh^CF+mfEtB&KkgI^ zfX~|K4!Z+3%L+UP0t7`+1s9OUMue({YsA=$%&O&D2n@IY#99JkQ)|TpSXC<-u1%b4 zP)vQ%W)aCW@dXuOl`3qFhg>L~J?nEiMejE$7eVK}Sj9Oiw0TpB$0`>JE+XI>1i2eo6M zNV~4BM@}i$64hmf3txow8V*T}nZUXPnWi<8Acs-WWVOKRT>zkY)I7x5KSR$!px;rU zSnrMnSW(t$&$HY^-5pvlpao;W>}OYHk0y$yw0W~Z8B3{cGC$J{BZ67NXVI2s5OJod zWLYh8deZ>LLF99eG1qAhtFid=#<>&Tbxs-gF8RT|3oM+S4Qt-j`C!gx)vK(2mj;4+ z52!EZDC^i>jm!ktZ-ky+olChlCq%=G`71T%*`T`O(XKy9F7+juS8;wsiSLW>&4Iz~ z&%EWM?+XV$77lzWjC?4JyuSPPQ0v&~4HFHHqOYK38_>Wg+Egz=F(=a)*ZeBQSY|M; z)=4wcIEV55rXO`8PI+?b%9&}IEmy|JaL3qwv2tyCa;kFf%s8%`ADfu2lq+LXG8?l~ zrDCp5q{#(ix|2+|H?BtJI|K$aiZCuBHl#vaH+Yn7)w^^rVxa+ltb{7A`>f};EtXqD zyB06EcJE!YcCOolZNs!j(K2s?)^E~@ZVex7o7&MjeB|~1SGN))iTy1%OmMa=-A<1B zJDhmfwr*wDBCORA%#ZbwVHqa{3}{#2&Fn7Fi;kQ}B9{+T|N z@P3-@Q>JEu&nwY35kf84v(`Ubuw$)%uEX)Q{@HdiNH_Z5^6ANqIP&#B@i?_#HlI2J diff --git a/requests/http/__pycache__/spec.cpython-311.pyc b/requests/http/__pycache__/spec.cpython-311.pyc deleted file mode 100644 index 498b275d2d8aa559e0e2fe2798d5d23bff935846..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1189 zcmZuwOK2295Urly?q;)WK8QpR0|9Xn7la5RYW!eCjT(|DI$3BY)172=_LJ^jG$Dr^ z0_NyNa?L3s@sOiOPoAR)E^`Wa^0pxgnv>PDAKhqePt~ign(nGsv#&FmEOYA1tA4RYZ3>XQa2rdwfjgS-#*NCwhnnlaC2pDh^n6(Pb=GLYOu!T=UT$?$Q zkV|~iZjMAr<|(d%B1deE1yZ3+U-KlN;0skQmEU|XHgS!J_NP$2z*;!Cgn?@?;+o8G zEp9WDJIvwG>amw zlTwuWp(iD!QkOZ1vU2e7`h~&+$%QQ3_WY+eMKC)XlnUdE{=6rO5ZX} z*^{5MC%f8ai5VzzHD6|tDU}d0G;Tey4NQofN^@{i|M{ N-qX9^{}YM}`VBAjEfN3# diff --git a/requests/http/__pycache__/station_historical.cpython-311.pyc b/requests/http/__pycache__/station_historical.cpython-311.pyc deleted file mode 100644 index 721183b9bbbf4310c2c7ef3e2b71b538d7ad133a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 934 zcmZ`%&1(}u6o0c{hNQ7pv(vlC*xG-#Yre^Erqca;M#X}GI z6NCsw=&36G2mA{>=iov1RPf|2rC#jGH`z_FsBd?E@4flGnK$q8K4i0LK$-pW>*XB+ z@Lf3*F%#p|Mq>aB7zvV$v>tH88AFrAY_9pDI8EOm_;708=El$$3}X1OhRneTMD z480^~tfsf$Mt1-l>VyHFCUu<~OyefrZEhLN=o^O^aWp+!x0s0;tZ1&_%djnc&kd3U zyVqsthJL$68)K?xcN(2X$=-t~Jz;$oS%QaM(ekwEy#T5RwN7s$4B!oHPsx4dVS{2p zkL<&H9iKr;c`ZskUaG9nTKQ=-U#Tuti|Zv?C|0WL<>z#xyz->@3}bXJkix?AQl-r0 z&e-yzQAi|nuPdKSCln<=vnH1Taxbi&O{!4c38aePB8#Cpy3>&S+FLW|San{!nI?gmbGM!Hayq z%VprjJ`*vEzKrGoI8+D&+)XMvH<-pvjBVBoX7r5{^f;aEtys*&3|5%l;(^QDz>UKn z-jabEG&*hSyF1+W>;98?;Zc~JQVkYbf(KpE_Ow|@09C)5<2wigcmumL@<>_On`od# zj^MqH6_9*Z3lmRsr8QbEJ`Wd4+qvzH7kOGJu07j$iJa~=rLb_HRH-tVi|XnFr!2XU z&Zg__t&2vj)@VD01F!CiRzY-jxfe+1HFtwL7qRWMS=FPi-*D6vQ75HS4+7tr9;y{L z!uAisRJGe^G8#J`F8$Rc`x2c{#VsN9V0eA;-QBbF%CY`6vyAfdCW87CD`RwkeD+G1 zptS9_IHh5d(pHCcn@XRjbieC1BSxtA6wY}~S4u3a-eI9G10hnl{0YPY3Nf!h-Twp( zROA*yZ}trr@AdLS^Xf<=W{w=o5h(dFmXQ_VxenH~{;SCGPa?{slH{{`n3*=w#wOa+ zCfcfrHudm@3U&EWO*fP_tMel9FgoaCaT5vk4rCc&q!U8E!Tqz#|H!gPj*Q^n_;&An L!u~sc=btZ^ z3BWfM%$7NIPHl7sz<`keTBJ@0Xs`xM>jTp{Inn`+amTB!F{27u^ScVOBwASQ2wvek zJuX9Unlmx;>US|50EaqZfPa&^&JCt<6Kk6dgBg9}2s4i7|JE&LVh1Z)s&F?5{no7f zURs83=y%#wObNT&>^_=)T#1r1+Gmj^c-RwdPn(woP(PyBEmtc%E73|mw_Yne&C_xv|D^C7lXNeT!omVl^~_|> zHdpUC8KdTe!x(ViT-=(zZA32(Ft|9G{QcNu3US2XSh^2)W2lzqy1DzP;a7e ziV0B4UWgKuw%rz|G)hw1>abp*{6$K4dTtOiLJe26;03OfEgm*IEYf8tL<&ED1aT3K zSX7{fVFCs^vV^cde@3@fhQ`g2d1vq%o*7?k}mwQ&^Vi4NA~{);&APa|~#0x#xgbNyC)JlXXhy*afwPv#HOzG;jl$mLygaald za`ZyDa>5uqaP;WOGa3_{J&}0wR#HrqlQX+*O@qG9zWLtI&b;@%?b~Fs3#6nz{a&gg zfbYs^iP;2=aS3*S06{(!!3LzT=A)u!Ya!Nsy=d450u3gB7@I&$YV7F%`|^pzHiDUitrcn?lj-bLTQi+Wzj|;hX-*q^hr7IQ6h1+~D+`<|qYRpUZTGm3rMg(k)AX_Jz zZBUcw)FK9r$(v)uqH$_nY@d*iZ$TcB*k;e3^zFC(c47-wk=-ThiU-L#L5fr~uMaF9 znrlWztcswE3rBd%CG3$9Gl5m@L7bLIge*oyg_T^ReL8?5P!Z5rkZK$4fRGfoL})Dp zunv3D+HZSy+vsnO%tGthy532vEFDd!^}ytnGG#0#^MmAK#q$Yf5s_&#&|ROigbXo7 zB1KZDb z$3CZqzNUu0rAEG_Mh@0Kj@GW$>&j8P zHhggP{gcRuVrTWS4p!gh)99$P;mE_zwQ8D9TVX?iC8}kwv=lZL47SFWkz>oW#+FfI z%iu-%KV<5&2%CBb(P669)A-GZ+FR_3G!+NDDpg%a2-V>7VdtvB`NPh21UC*lSKUe= SHOqhNH7fK6n?XRcI;SA`B4=FA{7$Cs%ohcm8uFz`H8`88c-5YyI2}+b|z%Y`iGgZ zN>C0tM1mt1#5JcNlpZ*8T^3*1gap;J{^I-UcwAds{BR zHm=?XeHJW0Df7mJC8Dz|Qd}inv}u9ts3kZPAFm;q3rML?eQw{)jvV zMLuN}NxQxxC6ERyBrq*1%J8m~m6eoZug z=dqHtKX{~8xah~V23v}0)Jke*^%0}F&eckjX;&&COcRCatgS>B`r5@jO(`uAaR5`&^!7(_(3~=Du*LwniUuni{B{cm z*DIX^lj}csjvn9fCU^abuH*XCo3ab&o@cG=Or7ew7c zd#0!KIq?;Srgqsx({%|U9XPf#Iy>;?&gk5Q?{`LLH^>oe?0?O}ulLOE+y6;fLLUJz CzBm~G diff --git a/requests/http/__pycache__/swden.cpython-311.pyc b/requests/http/__pycache__/swden.cpython-311.pyc deleted file mode 100644 index c37c9ad08075ba511ad6cda32295e008dbf3b0e8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1257 zcmZ`&O-vI(6n?Y2-Io5Oh!O+Qi1DB`u?aC6qlSP=v5IH_qpq7wmYqRXx24QXgBT7R zNZ{zjc;$pKc;M*KlV>z0HhUuR%T?g2ZGa6Qm zm=lnTe^+4+1qtE^D!dYv*ckP=kUn+I;dGqNRVf#4a5!#T!W(d?6BY5&3X%l3Uv8Mkja?z0i*O;3Ae``&^(s-8YL9lXu69*M~-N@y77jWU){j9uHEX zIwdM*W=WuP!IN-NG2FJJBpBfz9YJaxj(pLLZHY+!Eo;+6(40|eg@|IEs_2kTM5)IYK&U@dRH#_sa?`>XXGD(ml`~K%_ zRs;B=g7%2Dz;023mq36ZAIe|>Qdsj*S+led>%Lw#ECYcCqd<&RATesJ>i`>aM#G8` za}x6LuPV%=AVC~Kg;%B$8)F_9(x8NxF*}p(9L7+cb zAysdWWmuQdMrUQMgW5ZkUq;K?vfiz(WRJ%4x};pMQO4qOTacNqdp^M|(lZ|mbl2xh zNy$>mq}8S#j9uU19AmC%_Q|pG>)zFi_FYaHw{JM^qfzF~%y6{k!w4w4v` ztEBELoW^*e?)a^YS-d@QYj`S16-#3yxD_0{UYeY`JyE(kJcdixM#iT~#nQ+`kP6i) zQ!!U1fzAbEs*_B$H>@Vev;=gi?_-=ttXHPEYVZ)-uXd?kghm1WR2o@a`?sAruu^RF z9$C5BIC^}`IJ_OpH8njpgw}Wylz*Fue53DlQ&$U(v;CVV-#mzdD7V+{=wR+!eHb0J zHyy3818rvGc{404FvU{MtIUSw1%2&~7Gy^Y?T!{yM+7VBqC3hnAlRaNWg@Bmjp$?JNv!w`T6YkzIXR3lSzV-?1!K8 zhYf(Q+UZEF4fcu(yaWOS1yBJ8kj6%UDu!c(*bK~yoL`D4V+Gaymty)F~HU`@PVFIV!YwPpNg43k3%ea14SR zlNgRgOeaQdV$ry|IYDeS)k9JR)OKiX#B8j#1mRj5Ht~<#Mc!r#m%B@dkrk@g@ zoo%NeZ63PI@q7d%@@0|>SNMxn?2`~Pfprx!L8~M}Hlw1>s-D%o13*)$sR#>rfnI_@ zf1*MW-;ovAQr>oVWwVPqJG5OvE5?f1ORy>cjps}m`LVE){kUoB5Ac?&M9OtQsC z)Dyh?l)7SpvbJBYl35SCHNUK9`%+$B5TaI&2+Y@(Siibs`B)Fp`JOa0Y6Blu>c=X4 zx1oRF#ly{!kJ+Km*`Y7lbDy&3-Y&nt*gSt_*F^mVv=3Uh0QD8no_Yy#X_>&d>Q*Sm zGKq0zk<w^<1?jVX>>}a zLVhY#EG!afa>1DHCDRQK%aNHjg&qw8jB|(`R3WZge3c#2yL2}~r2$`8N)^|6b}|F2 z#pc0dtGAoSPd2QhJF)(jVa5t*lea+IcZtY02ZvgwUTB^fetY`O{m6-8f9qWn>;r2L zqNDx}M=R`bC$sUK9hN0+u~hS`^I>^%u+!7L>S?~y)4c9!9?z=ZmFaILEbTQ!hv|ZU zB`!t0UuLJ2tC`?SO0`UcP!moxdPftEG7fK6n?XRcI;SAp$MfaYAPgzg|LJ~rK$qLPh)CG14>c^i>1+KXF?X%Kg^5; zK{@0QsT{c=t~sTu!hvItJ#kK}O10(`i4!+R0SP$u&G?6!c0BXm_vX#ce(!thUkZgB z;3&R)e<$YvyfVRH_SV6uYG1ZnT!;)F(}ksWk}@iKVwQaE z#;FuWN0B#X+#DLCG)^NaDV2ui6pr-|H!giw`$=*kYgeN9-er+2FDH%K_0@PK67}n% zd6&mZ)*kRkt#HwgYYny()2Nly%<8{X)>f2i)r@@Q)~cAmH$0mBm%!x_iUmx|GMabm z@HT*nLr-sS9{rrK{94IZcTemhG0nV@oQ{>KZ8I$x>I&d`2v=!fdU;anPJHZLr1wxrzoW8~k<) z2iGf|1C#4lJ70da<4x}R6J5vkr#EF6(EXORt}}JK>tfhBdFsW{=eI1jiBHU)3&Cd_ zx9ts|q--cYA8?r~1-1|!sMS`|xMK^_Q-k)Ev3=!1`^u($Wx9m_Nnt(@Tg7fK6n?XRcI;SAp$Mg-Y7i2tg=(o4l`7N{{$lDj4Jb(wES5%_oe5c3|1dKa z1m%!Jq~_R5#5JcN6b>9Y_SAEzD%F}(q@H?n6e_ixcr*S1(~f7}``*0S+3$UC{iIOH z0gmFY|K7ZnF4j~YOos`s_(6QJ}-MSZg1RS^u%-aCwb8p85 z*u~Wwq0fQ^C}sXKVTtH0ixgK$og?;U6RFVWFGZ5i^ZRWsRos8hI?EB!;Y~C@Ar}rI zVGufugf4SJkGY}G1LpAzzB$VRTp2#bGn2zxmb06%Ojvefd6}!NN}3ItFpGtvhQH482J4|faMc!#*c;vsU_>+`Sn6Bk zF(~pjt5EX6Sc4smc85FLLo_&H{2Ezv*4$BPkvf?xx!RZQ78fFe$8=$-ourJ4o|q+H zyKyRo(NW}$88?TxdN=l`nIfY~W^^MEtYClRYWbL~szIRn5%gaflc6~KoiA4Rn zXx`9 zGxIucg{gD3(q!6|N(j?LVLEFo(S^QtF;7!UOGF&Olr+7)AdZ?-rVZ8@AXm{~WrN>- z;plp$b7XS;`_73^_Poh`f1>NS{`9u&0=gGj>pD}Pc3liRpMC!P)U#U_+r)e3z=hz0 zjobEy_fj?#9}c+8l>%Fc4%BKZY22{|>8U~c%Gkd0pnYZ2zA|0H|D-UVhb`tQ^e!_c zzRrH#Q~JC(iJ_@oHqmrlLP!Tb-W#1AIKDSJ_u;F((b)}h1RMKb^YfVlv-|3QQkKvg D!lE|= diff --git a/requests/http/_base.py b/requests/http/_base.py deleted file mode 100644 index 517653c..0000000 --- a/requests/http/_base.py +++ /dev/null @@ -1,105 +0,0 @@ -import os -from calendar import month_abbr -from datetime import datetime, timedelta -from typing import List - -from ndbc_api.api.requests.http._core import CoreRequest - - -class BaseRequest(CoreRequest): - - REAL_TIME_URL_PREFIX = 'data/realtime2/' - HISTORICAL_FILE_EXTENSION_SUFFIX = '.txt.gz' - HISTORICAL_DATA_PREFIX = '&dir=data/' - HISTORICAL_URL_PREFIX = 'view_text_file.php?filename=' - HISTORICAL_SUFFIX = 'historical/' - HISTORICAL_IDENTIFIER = 'h' - FORMAT = '' - FILE_FORMAT = '' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - - if 'MOCKDATE' in os.environ: - now = datetime.strptime(os.getenv('MOCKDATE'), '%Y-%m-%d') - else: - now = datetime.now() - is_historical = (now - start_time) >= timedelta(days=44) - if is_historical: - return cls._build_request_historical( - station_id=station_id, - start_time=start_time, - end_time=end_time, - now=now, - ) - return cls._build_request_realtime(station_id=station_id) - - @classmethod - def _build_request_historical( - cls, - station_id: str, - start_time: datetime, - end_time: datetime, - now: datetime, - ) -> List[str]: - - def req_hist_helper_year(req_year: int) -> str: - return f'{cls.BASE_URL}{cls.HISTORICAL_URL_PREFIX}{station_id}{cls.HISTORICAL_IDENTIFIER}{req_year}{cls.HISTORICAL_FILE_EXTENSION_SUFFIX}{cls.HISTORICAL_DATA_PREFIX}{cls.HISTORICAL_SUFFIX}{cls.FORMAT}/' - - def req_hist_helper_month(req_year: int, req_month: int) -> str: - month = month_abbr[req_month] - month = month.capitalize() - return f'{cls.BASE_URL}{cls.HISTORICAL_URL_PREFIX}{station_id}{req_month}{req_year}{cls.HISTORICAL_FILE_EXTENSION_SUFFIX}{cls.HISTORICAL_DATA_PREFIX}{cls.FORMAT}/{month}/' - - def req_hist_helper_month_current(current_month: int) -> str: - month = month_abbr[current_month] - month = month.capitalize() - return f'{cls.BASE_URL}data/{cls.FORMAT}/{month}/{station_id.lower()}.txt' - - if not cls.FORMAT: # pragma: no cover - raise ValueError( - 'Please provide a format for this historical data request, or call a formatted child class\'s method.' - ) - # store request urls - reqs = [] - - current_year = now.year - has_realtime = (now - end_time) < timedelta(days=44) - months_req_year = (now - timedelta(days=44)).year - last_avail_month = (now - timedelta(days=44)).month - - # handle year requests - for hist_year in range(int(start_time.year), - min(int(current_year), - int(end_time.year) + 1)): - reqs.append(req_hist_helper_year(hist_year)) - - # handle month requests - if end_time.year == months_req_year: - for hist_month in range( - int(start_time.month), - min(int(end_time.month), int(last_avail_month)) + 1): - reqs.append(req_hist_helper_month(months_req_year, hist_month)) - if int(last_avail_month) <= (end_time.month): - reqs.append(req_hist_helper_month_current( - int(last_avail_month))) - - if has_realtime: - reqs.append( - cls._build_request_realtime( - station_id=station_id)[0] # only one URL - ) - return reqs - - @classmethod - def _build_request_realtime(cls, station_id: str) -> List[str]: - if not cls.FILE_FORMAT: - raise ValueError( - 'Please provide a file format for this historical data request, or call a formatted child class\'s method.' - ) - - station_id = station_id.upper() - return [ - f'{cls.BASE_URL}{cls.REAL_TIME_URL_PREFIX}{station_id}{cls.FILE_FORMAT}' - ] diff --git a/requests/http/_core.py b/requests/http/_core.py deleted file mode 100644 index 8924923..0000000 --- a/requests/http/_core.py +++ /dev/null @@ -1,7 +0,0 @@ -class CoreRequest: - - BASE_URL = 'https://www.ndbc.noaa.gov/' - - @classmethod - def build_request(cls) -> str: - return cls.BASE_URL diff --git a/requests/http/active_stations.py b/requests/http/active_stations.py deleted file mode 100644 index 0f87aa6..0000000 --- a/requests/http/active_stations.py +++ /dev/null @@ -1,10 +0,0 @@ -from ndbc_api.api.requests.http._core import CoreRequest - - -class ActiveStationsRequest(CoreRequest): - - STATIONS_URL = 'activestations.xml' - - @classmethod - def build_request(cls) -> str: - return f'{cls.BASE_URL}{cls.STATIONS_URL}' diff --git a/requests/http/adcp.py b/requests/http/adcp.py deleted file mode 100644 index b9d062a..0000000 --- a/requests/http/adcp.py +++ /dev/null @@ -1,17 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.http._base import BaseRequest - - -class AdcpRequest(BaseRequest): - - FORMAT = 'adcp' - FILE_FORMAT = '.adcp' - HISTORICAL_IDENTIFIER = 'a' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(AdcpRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/http/cwind.py b/requests/http/cwind.py deleted file mode 100644 index dabc359..0000000 --- a/requests/http/cwind.py +++ /dev/null @@ -1,17 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.http._base import BaseRequest - - -class CwindRequest(BaseRequest): - - FORMAT = 'cwind' - FILE_FORMAT = '.cwind' - HISTORICAL_IDENTIFIER = 'c' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(CwindRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/http/historical_stations.py b/requests/http/historical_stations.py deleted file mode 100644 index 022ac3b..0000000 --- a/requests/http/historical_stations.py +++ /dev/null @@ -1,10 +0,0 @@ -from ndbc_api.api.requests.http._core import CoreRequest - - -class HistoricalStationsRequest(CoreRequest): - - STATIONS_URL = 'metadata/stationmetadata.xml' - - @classmethod - def build_request(cls) -> str: - return f'{cls.BASE_URL}{cls.STATIONS_URL}' diff --git a/requests/http/ocean.py b/requests/http/ocean.py deleted file mode 100644 index da485ee..0000000 --- a/requests/http/ocean.py +++ /dev/null @@ -1,17 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.http._base import BaseRequest - - -class OceanRequest(BaseRequest): - - FORMAT = 'ocean' - FILE_FORMAT = '.ocean' - HISTORICAL_IDENTIFIER = 'o' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(OceanRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/http/spec.py b/requests/http/spec.py deleted file mode 100644 index e3eccb5..0000000 --- a/requests/http/spec.py +++ /dev/null @@ -1,16 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.http._base import BaseRequest - - -class SpecRequest(BaseRequest): - - FORMAT = 'spec' - FILE_FORMAT = '.spec' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(SpecRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/http/station_historical.py b/requests/http/station_historical.py deleted file mode 100644 index c15eac0..0000000 --- a/requests/http/station_historical.py +++ /dev/null @@ -1,10 +0,0 @@ -from ndbc_api.api.requests.http._core import CoreRequest - - -class HistoricalRequest(CoreRequest): - - STATION_HISTORY_PREFIX = 'station_history.php?station=' - - @classmethod - def build_request(cls, station_id: str) -> str: - return f'{cls.BASE_URL}{cls.STATION_HISTORY_PREFIX}{station_id}' diff --git a/requests/http/station_metadata.py b/requests/http/station_metadata.py deleted file mode 100644 index a754f92..0000000 --- a/requests/http/station_metadata.py +++ /dev/null @@ -1,10 +0,0 @@ -from ndbc_api.api.requests.http._core import CoreRequest - - -class MetadataRequest(CoreRequest): - - STATION_PREFIX = 'station_page.php?station=' - - @classmethod - def build_request(cls, station_id: str) -> str: - return f'{cls.BASE_URL}{cls.STATION_PREFIX}{station_id}' diff --git a/requests/http/station_realtime.py b/requests/http/station_realtime.py deleted file mode 100644 index a483309..0000000 --- a/requests/http/station_realtime.py +++ /dev/null @@ -1,10 +0,0 @@ -from ndbc_api.api.requests.http._core import CoreRequest - - -class RealtimeRequest(CoreRequest): - - STATION_REALTIME_PREFIX = 'station_realtime.php?station=' - - @classmethod - def build_request(cls, station_id: str) -> str: - return f'{cls.BASE_URL}{cls.STATION_REALTIME_PREFIX}{station_id}' diff --git a/requests/http/stdmet.py b/requests/http/stdmet.py deleted file mode 100644 index a44df9e..0000000 --- a/requests/http/stdmet.py +++ /dev/null @@ -1,16 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.http._base import BaseRequest - - -class StdmetRequest(BaseRequest): - - FORMAT = 'stdmet' - FILE_FORMAT = '.txt' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(StdmetRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/http/supl.py b/requests/http/supl.py deleted file mode 100644 index 86d1074..0000000 --- a/requests/http/supl.py +++ /dev/null @@ -1,17 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.http._base import BaseRequest - - -class SuplRequest(BaseRequest): - - FORMAT = 'supl' - FILE_FORMAT = '.supl' - HISTORICAL_IDENTIFIER = 's' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(SuplRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/http/swden.py b/requests/http/swden.py deleted file mode 100644 index 0d1d2c5..0000000 --- a/requests/http/swden.py +++ /dev/null @@ -1,17 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.http._base import BaseRequest - - -class SwdenRequest(BaseRequest): - - FORMAT = 'swden' - FILE_FORMAT = '.swden' - HISTORICAL_IDENTIFIER = 'w' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(SwdenRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/http/swdir.py b/requests/http/swdir.py deleted file mode 100644 index 720d6e6..0000000 --- a/requests/http/swdir.py +++ /dev/null @@ -1,17 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.http._base import BaseRequest - - -class SwdirRequest(BaseRequest): - - FORMAT = 'swdir' - FILE_FORMAT = '.swdir' - HISTORICAL_IDENTIFIER = 'd' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(SwdirRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/http/swdir2.py b/requests/http/swdir2.py deleted file mode 100644 index 6b6fafd..0000000 --- a/requests/http/swdir2.py +++ /dev/null @@ -1,17 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.http._base import BaseRequest - - -class Swdir2Request(BaseRequest): - - FORMAT = 'swdir2' - FILE_FORMAT = '.swdir2' - HISTORICAL_IDENTIFIER = 'i' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(Swdir2Request, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/http/swr1.py b/requests/http/swr1.py deleted file mode 100644 index 6494f6b..0000000 --- a/requests/http/swr1.py +++ /dev/null @@ -1,17 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.http._base import BaseRequest - - -class Swr1Request(BaseRequest): - - FORMAT = 'swr1' - FILE_FORMAT = '.swr1' - HISTORICAL_IDENTIFIER = 'j' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(Swr1Request, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/http/swr2.py b/requests/http/swr2.py deleted file mode 100644 index 725a902..0000000 --- a/requests/http/swr2.py +++ /dev/null @@ -1,17 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.http._base import BaseRequest - - -class Swr2Request(BaseRequest): - - FORMAT = 'swr2' - FILE_FORMAT = '.swr2' - HISTORICAL_IDENTIFIER = 'k' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(Swr2Request, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/opendap/__init__.py b/requests/opendap/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/requests/opendap/__pycache__/__init__.cpython-311.pyc b/requests/opendap/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index cde1875501ec6ae97cc3cb5fa3128207eabc0cd0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 202 zcmZ3^%ge<81pLQ-ri19mAOZ#$p^VRLK*n^26oz01O-8?!3`I;p{%4TnuUKcRn9$1)WnjE)FLQ9CNCu^IX2YQ*_{IB4ib?_#ZZXUbc~8q zAu3LXXo|F%7!zkhtj@DBOWYc=Qiw)pkZ8GqM5|t2xNgr8Pz zqM%4hG%l%jLPaU22!2Xs$D^_WwR13ur77w2Wl2`TMICwtJJ!qY@1eSbBov}V6rx2c z#E5i=g~uW?A*)m;vXV`-NOstZU96KFlFeJ3iqO1GwB2AILi>r?7jo9jvA!d&as&7g)iqI!_C7>3FHe`Djdx_kmS}J{75SBCI-OMNp#21RoW3 z9xJ?VFq@PR4bF-sWf>>Jta%EAtiThq?{-o86;euta1r{9auwmjO6j_xYtg(a#T)~@ zre=|P2>nk?nMgmRp$(5wR@g*yl|EB?|g=aV=Z7Gi39jR;FZjA-#&RVEyhYj&aQ zRNYazLWN8mfYZzC(BQ!AMOnh~?3fU~at24|=c9?)bJxQW0mskbFjo*JsEua-^nk`4( zN*zQDqel$!!D10DwVr%MkIfZZWn5Lpa+)sdtd0Stp`;a}(u{C~(secCDrL;uw25^o zP*;r()vY1)BW}f>w!S$=X_U64?bRz+w@X@B&=s*;61=Vm`g*S>BaZZeJ7lWnRre0y`}0$oj)VS&Z z!k?QO9vS_CY8{!J`Yd=swZxLwB&@c6e{}l7A)^NO%8(d18A}2(%AW+@ zq=bO9{2L&zR#BnNdpo8f)G|k}4L+DKxMzdE9WywC{;6Xtcz!c@J{O$M2d66~*_q2* zGxM7>^SPNwekNjwvWxMT2wkCuXmjWYJyCAJS|B)2U!Y;wdHQm##63r24z`yHpNmO= zoZJ#lUX6+pCvfvg92XQ)aS9xlD<7F7Zvk8pdV4t-00xMeQxr+$!VyRyTsS7k@*$av zOG+du1~7qI>;dwE_#JK~g!r7_p)%2gqB4+_@L}js9Wz2~SsKPTiB(p2Zap(uu#i}g zR3;uxs8(T#U_RcjD=1%WISCpmt(WLyVYjmC3NK?UB^3RBRaZoiA;%dBOttIxPnMF& z`{(7$Mvme*-9>N*^rgDLlSg#HP&o)Ed@SEdn>74I_yzhhOzW3I?c(9W5eVF%sxIyK7 z{W}cBoq*KEoq+W^PQrQ}C&?!Ib&?s?tjOK;^RquYo88w7|3{Z{`##F=`zYu7IPdy6 ztN*v1?HXbs*|{3Yzyc()#-7c_opp_&3a4C98Q0fAMF zqqSt5x@(3_R06w_t0AGg`wGY%z>qSYLEa*gVyS%}{E|vjYn7Tx18eYXg#w+X9)eQU zm{o~ZA`Lpp>&%|A2P41pNLbIzzJQNR8;NHOX&I^0`SNKPecJY>R7?uusTvW@J^QX7CddMXSXel z8r^6)O|8iqBK3yNs0-X-jUgR{qwhcStzR-4hRmQl46-)e_@Oaf>w<f1CY;atd3p9$acYHrd(C1NTi9Yd6^)M0TvYt|4{8!a2* zKX`SvOw1`TpxkSig%|>SxD6`9ge-8XRk^+tO)Tj4RDGq-ivWCHkd#io@Z~`9^CG~9 z;kzG`nAa_fKP4PT2zilsZ98dP>nHd$G)a<^GeF>zfT9Y>zN-HU=y3K;{{m{yR{yur biEQ;>vyM>|oa@c>7d<<=ljA6nkaKL4}G?RaLFJ7)z(?-K9=(Z1>Ivwd#-| zBYV2Cw5s|)n3+9A;t!+>3!Qiuy9o%Mb??6SzVr9qySr*OmjJEx?bp#)+<%zh-WoNS zt|1tK1zR9s2!dQ%05`ZQ3qV-=(6;$~!t8b(Onn3+kPr|7fh9;_%Z9L|)3t;>Y+NFH zQy&EgT+u)!r(E7lmE4mjh1B`k^C-_Vz251FRA@gDhulw6#{8r7Qzws<6he1OSIN9k z2`6(g0E0~*(-m|hIEUjoiB-r>o0Q(0*ZXhj!QM7vo5xz4f7b4T&Gqm=OQpm2jDOrx zy>7RcguACaVyeHZ(qqYU9e$Q9kEE*O&~!#w)(g#2Rr5MbGnojMg_Pq+es)?m4~t$b zXno-9$seQoe^Cy$qRs0JVBC8A<@~$%bZQ3JX+J1EN)y(Xl$J}B_ES;B20xPI`%RFXU_hlZXqO^6c)I&^PgEGR(GtOMm%$eEs2D-ug8@FBz-i@7=iA9`e7kmP^ zE!?dQ?&|Qvj9g?ZX`_laT&bG}SX8|8UR5n`s8u9pWnUtg*o2UY$WdLT$9&b$*1_E3YVeRcrRKaj)?ZA(L6&g92~;HF&J@7 zW;hm)F_YWO;&Hq=&TO9Gc5my1`NSH`6Bb|X+Q7Zd*1prR22Y67iM$djImEnj<8o!W z=qTcQBhYg6H}%aNTJ0ONo~4x zQU|48;7UoU)HM5WtQ^b?U(DZ=T*&+_*Lyf7{P}smn4eto7F|)86r~5;3uXQ>cf&<4 z8h*aW<~-__{k(n}m&$ypjMBK}yjCzU|vtQXpzp``=|9nD!5>Y9yp?;n&@E~!yF6~v(hpw(%meABpLP!-(Znw`W f9NTW6J8)^ceb(#_f}Q-=JU_duclZAjiU|Dzb8azC diff --git a/requests/opendap/__pycache__/cwind.cpython-311.pyc b/requests/opendap/__pycache__/cwind.cpython-311.pyc deleted file mode 100644 index ceed0276fda689e691bc3e7dd7843922019b320e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1233 zcmZ`&O=uHA6n?XRNz;Z@ZIOy%!GlIHAW{!jq_ws+Qme!Wx-JacovGcr`RUAt8tI`( zL62Uf*Pbd;4?TMHJVbU%)|C`}6U%;?v~Cxs zr!0Xyk>a5n`js;FNrYL5RoNm*%Opk?qhXDeJ)?6lfZ|T|zJEum=V%>5$!=Rht&IRy zU{hLK9nVGw{mzkDXhmDmyBU>LquH!3OkS%}#u74Lq~~gWKro9r%$h>?0?rhDtXHW% zmDGc=7r2~b%vG7gQdWQ36BnG@oHFiQbG--C%%7k4%g)WZx8SnUO;%Z?UdWwg>V^xH zwQQ$M<~;0HeMdd5OSx02N^0Dy6H%C})>*&2<Bjj>dphbLLtjAYCZL`%v@c(RY_CXSTy{$oW0AtRR3WuM z;XaI)YHrYqSW`C&*T;)u@M`{6@kSv(F+PLyS0-nR`KkP5LG(tPlxVn6AwuUNV`_d( z4K->&q+7IkGzc)xB6d)QxEkviJEXGI1R|vXe=L>kr~TVa53Ei#jvn7JhIY;Vrly-? zXoELF?XQ#Z-WWdB)Rm)g=Ir)~S9fD0j{U9obg&Mr-HVU{a_6T#W5$5om{6{kNl|+rbg!nww?$6}KnCMGvSel9lUYDw=BZL}o jY^QrQ;P6iO+J(`b?$xwvwk3_A>%y?znYOE&pU!O1N)JT} zdh{Z__EeF2@aWN#=O}_mLuNbz0K!b50#tIOV8tXd1hJ0eNO=3+$ zF7Z{FIV6(A4Qc3CsKmxipNDcEyzFvXq6-bmL$C8*v_&;av|X0!sjP*9jR@EpLAFjb z+n^@VsYMK$kT)lYMU&Jz)H`7|`3zw^PsH5wZtlT}hFi^r&0y+%C+T zvIMG$0Uo-cU#nuDM3{wGl`WFAN@8R&8a7zfGkOODDDG76+jpdTfmR`u>~1hbgKoGElK;7rlS29-*y zq!h+p;Bt;JS7i=MS^i~TIq%%&lyT>V>pd7}{_Lz@b*2`*d6!kDSnWRbLhd}IZa7a_ z$9Ae@#=~yichu9llsmP$q{gi~5rz5sA{&yoJU;w4zxlp8RaJaMsvpbn&4Qr=FPybA zAF?MtWlwz0p8c3T`{w?;Q>}BOTRIvlpdC=U4JcJW+wvvI4T>bjRkuPh76TYpYNQb; zoW^*e;Rc&WID8YGzc)xA+}eBxEgDL?NeE50+CXHKaooI)BSB`_AigMjvQ+mhc?Zjwx*i} zw8q<@_P5D+Zw;Sp>&nqOJ@V%G>$|ZL$9C&29jrYo_u`{=hhq=h-z{n)XGI+eD_pGm z)w!rMalAXiyc}V^JHmWtgn2w8|B+06B~fFqAU;pE`zv`dCVG?&OH=W{7o}?J2%#1n jZT7Dg9BlTlO}NnPUu`Re)MWpypI_LDB0K+yN2UD+%m*{I diff --git a/requests/opendap/__pycache__/pwind.cpython-311.pyc b/requests/opendap/__pycache__/pwind.cpython-311.pyc deleted file mode 100644 index 190bd1d7462d08e6179d1a1bf0207ec2be844747..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1233 zcmZ`&O=uHA6n?XRNt1@uKSWV1^iU%h5UB?hsXwhov@K}_T^EM!&eU$*{5Z2gBRv!; z*rS)yYflxa2ag^-d5$6&=2YS90u8PLF;;+>)Y#Mkw&W9wZ4zr1 za*5B%%ps8^Zb(DFOeHpM_&k(*`jX3Okv^zX9(tYkqAjXXqTP~IPh~9>Y(&7;2(ope z*#^D^s5!@lL)gAtFlFsR!EF2M#DO*ct-DF0L7i^efPFhYiJ!p$!=Fdt&0Gb zU{hLKJ0?rhDEUna_ zN*ch}3tY}I=BmtLDa$|Y%V(WioHFiQcfI>pnLj`8SDcxKx8Sn!4680uFXYZc>V^xH zb!?|X<~;1yd`CT zewQ8nkRAP)9ebZ0d$suHMC;6j9UTpgqc5Oz8_>Ww+LbRsE-jK6SKKniSfns6S4llk zco5?UbvNiltjX!(&5I>*;96m}G+iuQzIX!{u1rjo3X_G2qDV)alxet7B|_&RV`_d( z4K->&WID8YGzc)xA+}eBxEkv?+o!VB1R|vXe|dU29XZxC4sDx5ZA~}F z(FSjW+FvK*y)}Hitt&_Ct#cR2R2{oSIba#qxlu);>o zuiT3|6C>Rb=H&?U-4W(HBh2GD`Hy7kD~TF=0r7dN-Ji+xG0_*;urw79ydhOvM+mjx jXtRH{;9#?VZNs@{|7u$UNKN+N`q{aiDDve$@u;+4D77>u diff --git a/requests/opendap/__pycache__/stdmet.cpython-311.pyc b/requests/opendap/__pycache__/stdmet.cpython-311.pyc deleted file mode 100644 index ed34afb00dbdb1a1347cf080131434e0a9a27365..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1237 zcmZ`&O-vI(6n?Y2-IhYr`a=v+BMBFJun8AT(5Mvws|W-z(WPdx>BeqX{w?HK}#$E2qz4MOE=`>xgQ0_a8_kt~`QKF++sb0%kC|HPqr4eN5 zM6(PUAv!gQL8J2KC^2b_nmw%(7GfK)fJk(`Ye)KaTYjr!170F4F6)Yk#Eef0)Nfob zOdgm5Er~eyZQm^wu}cEXe5?u=FW}#JWRd451){Q38x`^;fnKBlY`64l2aXo@r$YMGobjRaN5y(1~ z>QYG^7(1TLImTR-*(+u3WNvsU|A_J2+Ge@hO2Ne=FpD>yLG6eR2ChB(se)`S#%^{f^?^dVO+Ee6k`#`xKJV$ zPvI_%mn*i{h?t|3(+{(=qWfNMW_EHqH=G^Exw|71v$@gS$h7DTSdm}BQi%wi`;4gx zGBwtq1CeMD=TOhXIE~m@8RBZNx7j(BrDhN)1^6qeWIxT{L85zYw07yrzR`0KN!2wy zl11CR4r>1p3-?-Ye_dCO+SP%*zV}Z;BaEHahdP*N*5|^bPKQGeJKrp7B5ej83Dd8Z z-Qr@d7-9y`%YP+P-$~HeI|$EH?ViMLghbzDz0y=X@TydG9U)YM k%lqxC1{e0**8yDLZ(nt@1F6aWTfcd57(|Z$6Aw!J1DPc@MF0Q* diff --git a/requests/opendap/__pycache__/swden.cpython-311.pyc b/requests/opendap/__pycache__/swden.cpython-311.pyc deleted file mode 100644 index eac5181690c54f82738bd350f5d36527ca980744..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1233 zcmZ`&O-vI(6n?Y2-IhYrst|+GNWuYYViRKUAchnXum*tuCc17iS$3wdxcy;f8^mxR zhD45D2v<%Rg9nZtJ$XiBVzVa_Pu@yUqnw=CZL1ABoqhAY_hx6l_r2|_L?RAiB;Ws9 z_<;bvNT)F(b#Oc;z%~#d$b|wJfFxF2R8R~hz^bbjG($t6z&H?X14x8wyDGq*c*0>s zh&~JH=vV2cksl+LM?I%N1=c1U?uoViw#DfTU9M2>+4c8=CEzH~@tiO(1Q!YhB48*4 z87ff>jYfz{b)wOzxH(F68l!r5^MrhC6Y_{eH#&AjY_AzN+BV@SGU9?)%ZufmG9);t(MYA0(yekjZq+6GUk+ZdkG zKmcp7E4;mCWT%P#>=C=rnzE+0Dk@l`$+Rk{R?3vIsMzl(<|~d%Fbg$IM|{%D8#YvY(7IC!cqU=Jcw)XtBaHD=ksm= zxpVzPvhP!}?{jkCV{%}B>D|@pjiDnIrLyQ7NLvHcmPNFw)-(Fp)Xe?iIlt>}Zgy^JCO0xXfpd38C+BiwxzQQFJTJnD2hjIhoIsmZh+ROIX4 zYS}3+1eN){jTUA^3p0%tX6h}>;CbYsRLgXrlul< ls&MI`byng0LF+t(!GqRW)7y~j?7!{H!J{Da?LYZ|+HW=_G${Z8 diff --git a/requests/opendap/__pycache__/wlevel.cpython-311.pyc b/requests/opendap/__pycache__/wlevel.cpython-311.pyc deleted file mode 100644 index 8d0ab762ea2be83a2a6120c57a50437935f86923..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1237 zcmZ`&O-vI(6n?Y2-IkxG^@kXuMiMUcU=uEwpiwIVRuKqd)TL&!>14oaZJXd34Gba*H-b%nkIXSc2)->qr?3?fX?96-L+rEv*+d)d=)9?9X z1n^xM4T;pj;fMrVK!6|*@?Zf{So2U`v$O!~o}M=>1AzvkK#X-D5o+w{0K4)D#flK~ z0i>col$k;zMr@z@Zk|ePjJe#Gd&e!C(qeO?(QoWY7P_PgIOC!kA ziDnrzLUd{pgGS}eQDV{f-%U{aS;cCM3Hy zpKcF*PV{|E^nFVVd`S%Kt$ZA;Ub%6gqhtmhfzmZVZ5ebZUxHMJh+$l?^Auyzj&Z(7 z%AUfV7%!G>uO2Z+CZ_IZrbXAC?1Slvsq9c@3}bCI`U~jTBDof2EPzvx@QptWAzx{aE+DP@nrHawLA4%3U zJ(5A&yasCj5DWKePhU+}j_T$9z25gvLL-cw)CW44r`Bh}qfUlH4?Ei^YCL5I9SPH4 zF1dyIpfl0i7-CuuG2Iwqx<14-o|XSfroNM)v3C%jr`kP^T?>i6&U&P&c;L%Y)pUeV l6)sj List[str]: - - if 'MOCKDATE' in os.environ: - now = datetime.strptime(os.getenv('MOCKDATE'), '%Y-%m-%d') - else: - now = datetime.now() - is_historical = (now - start_time) >= timedelta( - days=45) # we use 45 rather than 44 for opendap data - if is_historical: - return cls._build_request_historical( - station_id=station_id, - start_time=start_time, - end_time=end_time, - now=now, - ) - return cls._build_request_realtime(station_id=station_id) - - @classmethod - def _build_request_historical( - cls, - station_id: str, - start_time: datetime, - end_time: datetime, - now: datetime, - ) -> List[str]: - - def req_hist_helper_year(req_year: int) -> str: - return f'{cls.BASE_URL}{cls.URL_PREFIX}{cls.FORMAT}/{station_id.lower()}/{station_id.lower()}{cls.HISTORICAL_IDENTIFIER}{req_year}.{cls.FILE_FORMAT}' - - if not cls.FORMAT: # pragma: no cover - raise ValueError( - 'Please provide a format for this historical data request, or call a formatted child class\'s method.' - ) - # store request urls - reqs = [] - - current_year = now.year - has_realtime = (now - end_time) <= timedelta(days=45) - - # handle year requests - for hist_year in range(int(start_time.year), - min(int(current_year), - int(end_time.year) + 1)): - reqs.append(req_hist_helper_year(hist_year)) - - if has_realtime: - reqs.append( - cls._build_request_realtime( - station_id=station_id)[0] # only one URL - ) - return reqs - - @classmethod - def _build_request_realtime(cls, station_id: str) -> List[str]: - if not cls.FILE_FORMAT: - raise ValueError( - 'Please provide a file format for this historical data request, or call a formatted child class\'s method.' - ) - - station_id = station_id.upper() - # realtime data uses 9999 as the year part - return [ - f'{cls.BASE_URL}{cls.URL_PREFIX}{cls.FORMAT}/{station_id.lower()}/{station_id.lower()}{cls.HISTORICAL_IDENTIFIER}9999.{cls.FILE_FORMAT}' - ] diff --git a/requests/opendap/_core.py b/requests/opendap/_core.py deleted file mode 100644 index 0ff02ed..0000000 --- a/requests/opendap/_core.py +++ /dev/null @@ -1,7 +0,0 @@ -class CoreRequest: - - BASE_URL = 'https://dods.ndbc.noaa.gov/thredds/' - - @classmethod - def build_request(cls) -> str: - return cls.BASE_URL diff --git a/requests/opendap/adcp.py b/requests/opendap/adcp.py deleted file mode 100644 index fe4d749..0000000 --- a/requests/opendap/adcp.py +++ /dev/null @@ -1,16 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.opendap._base import BaseRequest - - -class AdcpRequest(BaseRequest): - - FORMAT = 'adcp' - HISTORICAL_IDENTIFIER = 'a' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(AdcpRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/opendap/cwind.py b/requests/opendap/cwind.py deleted file mode 100644 index 469706c..0000000 --- a/requests/opendap/cwind.py +++ /dev/null @@ -1,16 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.opendap._base import BaseRequest - - -class CwindRequest(BaseRequest): - - FORMAT = 'cwind' - HISTORICAL_IDENTIFIER = 'c' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(CwindRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/opendap/ocean.py b/requests/opendap/ocean.py deleted file mode 100644 index dde9ea1..0000000 --- a/requests/opendap/ocean.py +++ /dev/null @@ -1,16 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.opendap._base import BaseRequest - - -class OceanRequest(BaseRequest): - - FORMAT = 'ocean' - HISTORICAL_IDENTIFIER = 'o' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(OceanRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/opendap/pwind.py b/requests/opendap/pwind.py deleted file mode 100644 index 0b5be60..0000000 --- a/requests/opendap/pwind.py +++ /dev/null @@ -1,16 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.opendap._base import BaseRequest - - -class PwindRequest(BaseRequest): - - FORMAT = 'pwind' - HISTORICAL_IDENTIFIER = 'p' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(PwindRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/opendap/stdmet.py b/requests/opendap/stdmet.py deleted file mode 100644 index da1dddc..0000000 --- a/requests/opendap/stdmet.py +++ /dev/null @@ -1,16 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.opendap._base import BaseRequest - - -class StdmetRequest(BaseRequest): - - FORMAT = 'stdmet' - HISTORICAL_IDENTIFIER = 'h' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(StdmetRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/opendap/swden.py b/requests/opendap/swden.py deleted file mode 100644 index e1bc55a..0000000 --- a/requests/opendap/swden.py +++ /dev/null @@ -1,16 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.opendap._base import BaseRequest - - -class SwdenRequest(BaseRequest): - - FORMAT = 'swden' - HISTORICAL_IDENTIFIER = 'w' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(SwdenRequest, cls).build_request(station_id, start_time, - end_time) diff --git a/requests/opendap/wlevel.py b/requests/opendap/wlevel.py deleted file mode 100644 index 11bd1db..0000000 --- a/requests/opendap/wlevel.py +++ /dev/null @@ -1,16 +0,0 @@ -from datetime import datetime -from typing import List - -from ndbc_api.api.requests.opendap._base import BaseRequest - - -class WlevelRequest(BaseRequest): - - FORMAT = 'wlevel' - HISTORICAL_IDENTIFIER = 'l' - - @classmethod - def build_request(cls, station_id: str, start_time: datetime, - end_time: datetime) -> List[str]: - return super(WlevelRequest, cls).build_request(station_id, start_time, - end_time) From cb56d46e54dc9cd8d5da1571dea43fe3699e4b74 Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:15:33 -0400 Subject: [PATCH 11/47] Delete config directory --- config/__init__.py | 24 -------------------- config/__pycache__/__init__.cpython-311.pyc | Bin 1224 -> 0 bytes 2 files changed, 24 deletions(-) delete mode 100644 config/__init__.py delete mode 100644 config/__pycache__/__init__.cpython-311.pyc diff --git a/config/__init__.py b/config/__init__.py deleted file mode 100644 index f84c255..0000000 --- a/config/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Stores the configuration information for the NDBC API. - -Attributes: - LOGGER_NAME (:str:): The name for the `logging.Logger` in the api instance. - DEFAULT_CACHE_LIMIT (:int:): The station level limit for caching NDBC data - service requests. - VERIFY_HTTPS (:bool:): Whether to execute requests using HTTPS rather than - HTTP. - HTTP_RETRY (:int:): The number of times to retry requests to the NDBC data - service. - HTTP_BACKOFF_FACTOR (:float:): The backoff factor used when executing retry - requests to the NDBC data service. - HTTP_DELAY (:int:) The delay between requests submitted to the NDBC data - service, in milliseconds. - HTTP_DEBUG (:bool:): Whether to log requests and responses to the NDBC API's - log (a `logging.Logger`) as debug messages. -""" -LOGGER_NAME = 'NDBC-API' -DEFAULT_CACHE_LIMIT = 36 -VERIFY_HTTPS = True -HTTP_RETRY = 5 -HTTP_BACKOFF_FACTOR = 0.8 -HTTP_DELAY = 2000 -HTTP_DEBUG = False diff --git a/config/__pycache__/__init__.cpython-311.pyc b/config/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 4e271f334741a975f84941070c5a779dfcdba0b9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1224 zcmZ`(U2hUW6kSRyNQjeVOKqa{&(05O^^EEJ`M(5zEf_&FAWcYB?iVv3_>ojZ_#p$SJUe1eoHs1YIR#PTFsrN4!F!0 zb6nUQ=jqa8>LQnUOvE%6wg$j};#>(9IEIrcavB~pi=vQziYOO+O7u?aHtPq*wyt;f zfWZs`S5mwmP|yU*4iNoJEhscOhaxTsrYAtHc-sK`36cCV1(%W0)%5Pc_2m7?GXXrX zkzk&bKLB(Q4zD=ijr8QdRV?6dsnxgbdflk2RlVH>BHImcqLYcOkAZC?8(UyCYCP4lhR_l33c%hnN*xm;7G7|WP(8gxNo8+yfb^A z%bxI<>oQKEBgxFBSjn2UwcohiBxtNFHtZ8v@T0)zQ*o0zpn0G1gi;>b#OP*MmJsHM zP&0B6n8UF{A&D

BS2GIC}z@Og~$J+4=rWj-BNavF$@w|hA~-PxDu1% zE&V)MTkO3_?y6ZP#W=oh>}*3Uc@l#jKa~r4`!CzoN`D_j%LD+%YfV0pI-jHVKAQw!zdpOFS9SbfJ82R6&3(r{>AVLOy}uT zDs{eeKec?W$my|C*ch#j*B_lK#c`oHS{vu@jdJ7l4Y|%gfH41Xl#@@GhvWYMQ&o6j From ac6898bc1d36b2de890fdfa0dc3194884e109854 Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:16:25 -0400 Subject: [PATCH 12/47] Retrying to upload the NDBC_api correctly --- ndbc_api/__init__.py | 4 + ndbc_api/__pycache__/__init__.cpython-311.pyc | Bin 0 -> 334 bytes .../__pycache__/exceptions.cpython-311.pyc | Bin 0 -> 2358 bytes ndbc_api/__pycache__/ndbc_api.cpython-311.pyc | Bin 0 -> 41595 bytes ndbc_api/config/__init__.py | 24 + .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 1224 bytes ndbc_api/exceptions.py | 29 + ndbc_api/ndbc_api.py | 832 ++++++++++++++++++ ndbc_api/utilities/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 191 bytes .../__pycache__/log_formatter.cpython-311.pyc | Bin 0 -> 1178 bytes .../__pycache__/req_cache.cpython-311.pyc | Bin 0 -> 3410 bytes .../__pycache__/req_handler.cpython-311.pyc | Bin 0 -> 13291 bytes .../__pycache__/singleton.cpython-311.pyc | Bin 0 -> 937 bytes ndbc_api/utilities/log_formatter.py | 16 + ndbc_api/utilities/opendap/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 199 bytes .../__pycache__/dataset.cpython-311.pyc | Bin 0 -> 3713 bytes ndbc_api/utilities/opendap/dataset.py | 88 ++ ndbc_api/utilities/req_cache.py | 48 + ndbc_api/utilities/req_handler.py | 229 +++++ ndbc_api/utilities/singleton.py | 14 + 22 files changed, 1284 insertions(+) create mode 100644 ndbc_api/__init__.py create mode 100644 ndbc_api/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/__pycache__/exceptions.cpython-311.pyc create mode 100644 ndbc_api/__pycache__/ndbc_api.cpython-311.pyc create mode 100644 ndbc_api/config/__init__.py create mode 100644 ndbc_api/config/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/exceptions.py create mode 100644 ndbc_api/ndbc_api.py create mode 100644 ndbc_api/utilities/__init__.py create mode 100644 ndbc_api/utilities/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/utilities/__pycache__/log_formatter.cpython-311.pyc create mode 100644 ndbc_api/utilities/__pycache__/req_cache.cpython-311.pyc create mode 100644 ndbc_api/utilities/__pycache__/req_handler.cpython-311.pyc create mode 100644 ndbc_api/utilities/__pycache__/singleton.cpython-311.pyc create mode 100644 ndbc_api/utilities/log_formatter.py create mode 100644 ndbc_api/utilities/opendap/__init__.py create mode 100644 ndbc_api/utilities/opendap/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/utilities/opendap/__pycache__/dataset.cpython-311.pyc create mode 100644 ndbc_api/utilities/opendap/dataset.py create mode 100644 ndbc_api/utilities/req_cache.py create mode 100644 ndbc_api/utilities/req_handler.py create mode 100644 ndbc_api/utilities/singleton.py diff --git a/ndbc_api/__init__.py b/ndbc_api/__init__.py new file mode 100644 index 0000000..de30e98 --- /dev/null +++ b/ndbc_api/__init__.py @@ -0,0 +1,4 @@ +""" .. include:: ../README.md """ +__docformat__ = "restructuredtext" + +from ndbc_api.ndbc_api import NdbcApi diff --git a/ndbc_api/__pycache__/__init__.cpython-311.pyc b/ndbc_api/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8c8eb455ca6f87ab3fd9e6dea77889a7ebf1980c GIT binary patch literal 334 zcmZ3^%ge<81pLQ-rWXL|#~=<2FhLogrGSj-3@Hpz3@MCJj44bh%sEWC%u&pY3@I$Z z44SM}(h7Qd3YmGyIi)G7R#rfcevqr9i?6F*Zi>P!fuhvnlA_Y&lG38ol#4BO~Jt2E_{uiZ=w58h9Tt Ms9wN^ia3Fa0r+cS-T(jq literal 0 HcmV?d00001 diff --git a/ndbc_api/__pycache__/exceptions.cpython-311.pyc b/ndbc_api/__pycache__/exceptions.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3167b4bd1661b0b003db9b2f99f4011f510fbab3 GIT binary patch literal 2358 zcmb7FO>YxN7@pZ($D0t>38f)wL%TplWttey2$d#$6;xRfg^OirwcZ`aOV{gVW?jGv z5{F1lB@Ve%iBu|aN`;X6GkSKbN<}O6#2+v+hr+4vJG*N;c2mfB_SuX5SH(6Z`G zfNHI#Y?2yvw2fRj<775Sd18~?qvb60m5p*Gr)pomXR{!5n|}NJiY1uA(z+4Wt{(*bbU88&4LHho-dN?$+sqC{L9r|p~?eVvm3c+O)rvd(XNS+`4cvE{c{kNX`A^Rx%mp=c8 zKEHG1@xt%=sXz5ooiz6#n%MohLIvHCzJ3UWZP>lT&IHs^ej-0B;D=gjPsx>AUcGkC zXwSBM5ir}Wxy&&b=S^Oq(hzt}LQew6F!&+BC=1?%+%(%CjLt(g;}pd_0L&u4ymRUC z*PVq6oy7~C{4!>JnNI@={o;HJIxd87HJE8eQ>NKyIxP>&)24Z+WqAn$Lpn_xnpGG= zn#kXSucKVDS~l}MSV_w9cuTEcO068NkbwFH0|0Z=-*fFsN4wJ1l+0&c0`NqYul+%% znHAb4Z$~*rIj#*u_EWl0y~EHnaxCSdM2By>a9TpE5e&}X@ms!C^_UShjk@JKP+D`p zW4;llp~ccujpw0DK7%lefFnF5u?}1{K9>TzFR+%{0PCcE-br)(3GxS&K68WJX@T~E z@Dl^!)t2iyaC4;Ec^q3T@uR2*Y%0P3)MVd~_?*ivF87XH5jj=a$KLJrMn4~4GhgtN&>bF1@jhTzrIdCRo#Gk) zZ```}QZlcG`8i4#;ZhibontSt_0@pVDP6+IQP_HbkxLkvP!M?rBS&E;hmmJ6GNB-{ Yh>@eP)xgLiMkds|j}G6&I;7wK0)i|DQ2+n{ literal 0 HcmV?d00001 diff --git a/ndbc_api/__pycache__/ndbc_api.cpython-311.pyc b/ndbc_api/__pycache__/ndbc_api.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb1edc0e66ca896bd345ace8479590cefffe968b GIT binary patch literal 41595 zcmeIb3vioPb|(1aMFJp5fCL}lQ{YP^B@)!rl4w0l@gdQsDYry-x7c=*5PwjDj0gJ% zC|L~TI8JAV-O)_gUCNr7c5gT{j)yKQ8%?TbqRDn@<#v*B*VIe_TYg)wR{N zTeI-mUF+$cn(TM({ht72tJ85RsSOBT{Qv#$^W1aKJ?GqW&;Ku~tDOQqU;XojYr;Pg zgukF0kXpWfXOzhV@XJ)^aISYO*5o^>oXNy+MRhTH9HBuR^ zoU4r5=j`m>7I8$KbIxehTvgOH=ZaR(RYz;)YNGBrchocIVQI{f+PPX*X5E}utPm?B z_0fj820W_}?UBZ4(_9m~c0_#9=DFr*%Unyeb*?qqHrEzypKFhH%ymTf%t4G-&`NNc18N519JoHx;nBix_@pzyRL~0Mu+Bx z*tI(nhz`#UM-R*$h#s6f7(Fz1$RwDBbAsskrXbda2JY3pV-bY+@XtSUheh)xVZa;w z4`x9ajrm7knDQ^hCBGC(O5xCraO|pI3?_sAB`F^DC$EM4GvjB+0#4`bwQ#~8jf*Rh zkY5Zfg=3)vd%Q3sURfMn4lfL{+hlxsC=$96iumJKu7?(r{+rjri`V>%QYe@VB?kT1 zf-y0I3+G~R@ft-e2c-o4BA(i?BE=G{O4bV0JMn5Tx*Q3OIQ{r6EG#VFN?wb{4zpX< z#u)0K4=#uO;plQ)O8VvYumlu+N{Q(o;BhyC;YjdGBs8B$29x19>O18}!hmusr$b-S zMgHh&C^^sW`R#Z&B=_ zlGtEkUR*jgE3JfBfxJI}iPJfn#OPgF0f;%EBJr#K{t*np$O0~}VpIdu_zg)510()f zj4H*3B!5sDpky4-zZy;?5qW-g_64=YQs^5ip+r(1pTsiY74j3PRucY21|k}{YoVYR zk`n%vL`Y=lB!;f6TvZEMUXhmL80G+rkrVfih~dTLh};WGo~eBJCd;G6hDJRX^NHMF>rj7xbJfVUVYR7!?Kq_Jb9LdkG6 zly^`lh9b$}KxN)C8oQOZP6H5m`z5L=6eZ+kHYbje%C(d@ka!ZIOGF!EE7 zAYs6iuNueDp9e4^AqltcOEPvTQI&b`_{8Msi_^37W20l|C+4T8o}Ze{dkNa}XGh1L zyD&L9KRG%!d*Nc<$s)%m&c1j~y`CPuEMH%on7w#8Up0N<+_{O1^E0E*Pvl)+p13$Q zd3m1lT^g|EJ#wuRuP%mIJM-=rXrf74gy$lMqZU(pF~rALi>jLiY%tolL#df`I=}* zx*F2%Tb9BR6re=RU%54ZBPfMw>gL-@pV27JOTpOH&?2XFJZzJ1K7OW>mWq&oB|@O( zVa{R}Le}fbQX+&Z-oZ$|N2AF;bCq~*zwS`(MRUk`&+-na=6m?(ACnNOk}6S(ONoh9 z;I3M^6Rn||un=;GJfYf9U8riwA=*M%K0@_kC4TLp2GN0EC;qEKjiM{mBvyx-mg>Zs zZ`$U3;yo)yDH4PoaSg_}C97DA9IfK>V%;~bb8Vs*C3=v?D%Rt^9eKP< zX0ZX+9k_15btA6#h)pPor}N?7FH*_;z6JN4`kqmZD8Ci;bU6h26KBLW)YFdkcv&kg zx>k0SY5ATqaa5aA^rO^n@q*X|2vGleuJmbS;Kxr2lH}Xn&|lS1v(yFw~;O=vxIC>magZ7Z0F@p@aAo-HYP1nI|+e%J_2%$Q-MA1_J&G}2G#@m4$%8WxY@ z-#EwY;$%tQlc8e_PP5`ULkUE2ibwggDUe+}|4myFj82JDq0{L1AbO)8y9Pa!^>a+a z5d*$_cFk7tSwQ&=r$^$8Vh2X89sddx9*4Dr8J^HHMQziKzY(vDqk+}vIpEBwXocAD z<$R^gUkBg9%Bhndz$!_%NP1v017oa%5kD>Y{L7)q=^2sb$j_ zUmRk>i(05s9n}_M{ANh4kChj^8Vtw4Lo1aGGWMH*rL;ljsHp-bR^9RkrI1pXC_}B7 z$tOt4-2FV0xj1WI%<(5Ek)I_P^dIzxmsmR%MB>mFSo^3T`Yc6)S7|iBjwyWi!u)(V z7EaF3FVGlWi;G-RJK&F_Gt$j)BE)%m%FduN9|=dpN#2rcoErwuel-%m0(4-{n0!EK z0RThw8b&B!!4NaK6K#TY7YRiPiDV3kgo26WkQ7=(T;!IaASQ*d)(k>cO@yz;km>Pw z1Q|6H^D78!C@G3cRwPz$g0(y7zZ$-Q)UvSXXQ+}umm@*RPb{dPv7oY~Lr{Ta5xm9W zLEJe|NpJZ3XPt(pW;wCM`uS7o*tgkf3TA&bOPh?Ee(Y^^v# zg3+ZVC_~uP#vmgt#7azy;;G6Crm?#cx)!_xgiTezQj_2!C0ti;Q?^t^ zsuD3;E=S6K&-4z2y!Y_WKPZ*zWS^v6xuTNhRk1Irif$}k&71TX(yL-1TBq-qnkZ#T zneUm4#Y548Mbo-^dI~f&9E^llX~trFwdGAEdV@rEiT)?zi(!ltlq#HjCIatPN`t6K z3Q#ai0rAt)5Cw-QIEVn_Y-2=T+K;=LfhwMd66NdXNrNS?f%9^?dAo8YQD5`bszQv^ zX_A}b98CIV_kOysosUD77lX@ce7+V0VJUnSRB~R%sR3u+ng~UfqY# ziY`#S2rS=1H{~U4)g!YU0VU>3s59|v2woQoEkax0nl)?p<(fLytan^FU)v+!!EN6` zTx5Oyxw@uYeQU0DAXo3ZJ56`Zy#=?>eCQ*=-t7E@0_3XxS)I@^M8&l1H1{B)W~cKo z-FD>cu1EHcZF|S&%MZSqws&OglUe&@TKRsAMhN*ztX9eSrR97DCK^91Fu2|X7II{jRi`S2>C@H`W$jQXzyjyG7pM);m=uiP>KNr6OSj5Z{U z{4f;^>XrnD35HDV@V55K-@L8CN$3lSHl0T@HO~t9b zHGq_Jsf)n~L6HHGGvpehv8c{>ATm)3FaAF!1?Zc?RnUW%9k)#>leUO{$9%_mNqCo( z*Ost2|E@W24jjmvqy$k5zh63qpPxHVMxYEQ&H3roy$bylfyDEyw}F$9INFprjSD5+ zuOeTf1;Oh=y5=bVZq%-QE$An8HnE&HEhVZDMc5*L*l6XQ+Q_Wdm5x>nc@kqllh=ix`2|n?w{G0I zv1ZM&LD~@Cie@{GV2Cm`N3%6Y*DN^(f-UR4>TLgMbU#yjCR=-Ets+-dQxGZ~2X}h= zx4c_Rnckz>-lGMtKUi8O|4sV zt$PZBxfaP0tdHed{hQvmM&3HPKAsyqus*)glCAIAyp*XQNY@V(Y?QD-{d$g6kC)@V}{_rikYs zEZ6EHovct|1wqfZCUu-A(yOVGHM;aVzO@WLtR`f;z?Rj6W00fynqyp$zLUL!(k4a`ye{mzJZsMvF=RV+Z9DZHJFNl6kkMUPhHR~Rt(wnR2YSJ^69%@52qpc0 zOf!xb@RlN;5sDz%W}s?|h*)h>pjMRYdz88p0U&30-KlXrqu)Ieo-(e-6t`H!H#!dA{kMKG*SrnTDOZ*|1KqN zM1XF)y?1Uuau0612Q%(K)*VRO17A#Ek8y5*kNS!vfmbl@hF`hGo+-0S7QOyH}~Z>fF$!{O2WI?ca9yXWaX;?tN+dK92AfAbfdLA3^pf8QWATqMI5)eCZBPHRa*#!Q6uu1SXo|<5JNAIbwCA870q5bR)Nba z#c+udTvSZZ<)Eo&1WM<&y>oLoV?UI&A4)4xgPvAus~4=K zOuwPq1cT_-b{qO09bMJ8Lun6$$zrN{r#)qs29m1oMG!QRFJ(5SzHKq2Q`bhZIGr`t zWk{)tvqnOO@@mzl2@n|pC5__WhA6zLYnPT1020&CCOA_#phn9jX5zS8H=BP@onNdQ%ls_fO|ILfRQH?unMwch?<0?Nr~ zXdv-2Zdhwb4~g3me{VlD+=<{-+^x2K*3*Hid<7|=LK3*}P;#0K(DHUgf0nO?1|N$0 z^IV^nuaxyv`Kn2@M~QW)CM)^sOG(%dMdc^b9*k7pg>tnN&Y&pDj|Xggi9U){OxXpk zsa%l*x?^qylOTN~&=ZF}3siy3=& z*4~}AcNd(3ukG%yWSjPHjnZGXDFE)&G0cLsb2*pi&U24kz1yzd&8rz#AnOXG*>|@J zjC6%#!nD)ao^2f1>d!PD&Nd!iJGbN8ll2YcJWqdQF*Tn8x7>VcZE9`mXB9$IN49Z) zrg13SI7G>M(!FCD-+0zHzBaXMcgroioU!+1?R{x`A2eOmUea}K+*D)n0AEIiBnGO> z=w9ARR(1JGd6*Yftt_4qeIgm><1GYl3Sr@P1*m}yDwzt=L`)HsQ8p-`Dnv88v*ON* zI}5wBZ6ZbK$2iA9nVd{nRSDG+DVHiK@0}{pD$HU2d5kXFl&UDF0)mp~C1jl@><>Z@ zBdsh#j|ZwR_mJ(Fu-#4)`gA&l&ziesXRJYEpoxp6(~=YuZ2v<}z_sg)%W;z++jK?=dlfF*AWW0xr*#c`XrATfj!8b}S#wB&iG){49xX<2*n zrdOnk$dMqD&Za2^e5(E`qE@Svsfrvwri8zN0F$#rsBc`e=c*cL+8xOC4zHbGe<|bY z%2l~3rYEiiBVK)^lsmu^IJS?r*y{Y#Xm;?EP7Le_Hvn88w@?G22iBlS7k!cE_y-7C9u<@|0nsUziie;n5pX)g~#3Q3@^6(}D*e^0YuE*bAiM)~R&q`NAYrp`is`V;`?8ma$5K zK`ELVHmp^hd`WA|w6v&UVu<$a2qG9W!e~xyffWl3a)$N>VbW2gSBHqh3}sXkV36;a zKO{-0C%(vFDgxNxr3f39Qv@{rWN3qWOQ5UlSHPx1f6TzpV~yfNd2|aTYUW*2-ntl% zB!0eU5+?jWc?<<-wX|74UbF>U#2x@f_jm+KNq^i zuHLosmE+eb=a(s&DU={igz$o|2UMuGlPC{g;x7=JpjKj07_>TOOgV2;*4z8Y8`$;+ z?k9h6JM9f*yc1dP#M(F>J@WQ!dwVu#w~pRVroBBG@5!w9WZHG|(;ZLe?~VNat7-Rf zS_)=N`f#VNJKZypshiB!O@26s<8i-WEbHiQ4VRs}jVL*yMaG2mC{&!x|y{6vpFo8qfQ z6_ftrjwr-`kh84VsiKno%Vcyzry*!jQ43R6Kb-H;fDW<~iQgiVtpOcDEM;{6o_ZTr z<>F=K?>hCCh*h4E{sBU14S`IY^Ii>ARKpX#r|>OGN;F#fHU+;)!5suegm@iMtE~!S zUZ%9aq&($l@`-KliHALZI+XUF$ar7KdSCdWXtG3m7!XN!qi=a&EM`h2K5e5C5 zNiwI*3Af&8LM&3hzF~R-$yB(m)*{;^;a+3Qp@}1#Buj8oCX+Gj!eFml<%zJU5|5xM zZP5B*G+sA{VVA?wl<>?jX)DWOp-ho19HZcbDW^&}o}}_J?4-Y6wonLa9QeXgBpysE zT-o?|Koq|=G5nR6(J=`;2rPL&Q&Sd{ZB|wOl`QYbWErp-R4EXy_2EJjYn}7jtRdSi$gg9V{W>ja?Q;8?+b5(Iv9`TRh3SJMF2imGr zFZ3NbcmSst7Y3-kggZr}s#ZX~B$H7;*(1W7TQ5HlGglVxz(j@>yKySuVi^uqW94KB zmK8$$H!+&DqH+mM!*H_yH8hgyQS5$07H66 zsI4bP^2~;X{a~{+-F_-lcRE{lI_)~0Yw5_g3_fZ(x!rP-HdCM9Ox(8=EJEukY@D|q zg(|T1=$d=i-2t;Trf&G@U*eNDzY>L=T2KtH!1!VX2961X{*LtOM2S95BKYFy8=0WN z?h@4WJ))@WbK91(F&akzD^U;-212pKTkdBKR}fc2i7%4qDKZ0_3tCI$$zy0k8QP~J zYX34nFQJk`|1V7f{r!xHDhfws7Ciwe)JUG%KAJ@Ic}#Z=fd3r2Skx{>tMDZB@e6F2 zj6nvN$#JscQX5O&ThO=!mRs=lOqC*Y^Cb}D7^)E%f+8<~HeE*ieX(fYpug`G3L%5{ zMZGJRjLdVy z;MBjOnEI<(^L5H}f>AS$I}`Oy&}JvASXNPd5BIAL3TJF6e}tBW7x5g+!s*Xh7TP_XCaetz)Ap8KcW2t($ySHS@@(#ZL#4FMyouk^ zPqC%JZ4=X1t6J*EY{M9sU1+SKz1B`=)28z#R)EqE77gf=%WAB38?<^Ywcf-^)>vDU zv^^z%N;KA1rm?oZW;JN6|4?SSHN`aMuQn?bM$4cT(@oM@U?gL@6{-f4ThhnM`Q)Q0 zYT zt06vj62i`jk?krwG6b3@n_?*5KX@_x5FYhiZLh3xSLc!OsFey7xwjBD7%Xmvt}H`K ztpii3jzP`>drHnAvW#*hHpI%ZwT!C;Q(N3BKGW27O}_|fkf21eV&f~?StY1PVbxzW zT&fbf46)V5N~u-W@w06*S>4Rx2@OB9-X_5FdP2&kb^7Jb)#2~!GA&(G!Ev;xJF3-gc@jh25M`y3=wTG z-!*emei@TpugS3EJPbo)g?A@=UB=Yg6=*(Iqw%h6Hb7-czfJ+Eo27q*pvYJtUnw_e z)vb_pIfBkiC1^nbF+U(QwmfRszumBZ>)A}hv24S!HD|$WE2D|djf{hW*AIh&*H01! zN4Pe=`^6}w$@uflM6CV95I8U z{|&DiR+t*V%s}c$Szb4bmd{v4U<8C!#A@94~* zYIG-Aik?-bY%pjTq*F5Z?PF(RZ2I#uVnwrB?hWj@rYbj8<0th2E>ezS6Q(=;cZT%l zP^n5|3bxlvX20?$Nyfm+HCbfJA9>|;Cdn{Z{)k0orJQW#biU@)t(^8?2;d=6jwp$? z6RU1m!iWJpSTc+aM55P0g{#SoLISnro`x!ASv0y zagpuY(r#=32Y*4-K~ix3n&~B|n*fB`Cc|w`B1((3TN@=^{*sM2WCs$cLv?27|z< z!j_gb`oMgfPLpe9a`NiSmYE=631~o2W^Tgxw&nsE?6Uj*fzpFT`eoV=+c~}taxZGE zT`yjoW@8w_!6c5c%u)yWO4=sd%cw`bl=WX>2{4MK+r--XM&0|n!K`R#sLgyGax1)= zPJ1oSZzCgBy!plE4%t-l8j!Pnwdzs@=21Mg{@(y(Z8 zK1V4#?_krKMk-&yr&hjNNyKKF^af=#ML8oy0vaK={NZP$>Qp2G@4@X|R$CO#s`S~f z5|ecq0hp|_rm|dCGjUmay0d!@KH4*~y=Md`Ro<$;zYkoN|7mbp{?lNx{Lr+v?fP2Q zrgCnWc{!_3Xni!-y zMs>!!H|yP-cJ2KoEEf`FVEg8%d8|%&zh(3oem1@J%+_FnvUvO3$21d@Fc{1X--(^(5w`Xkd2OkBO-~6vmQD( z2lA8)Qwc^FD(X7-w}hwoyS7GZLu4EVS)=jytE93puwR7_ymh+eUzU#~4cS`n*lrHhmH^oKefK%ngEq}Kr)*!CWHQ2)dB+{sDO z0URV9zcC^Gv_1DqKdN0u1OYk9$f@F84LO-e7pqt5sHMWUo)qw zZaY&IakzGH(U}5a6OT(jN!1VCw!k&%1EoJpmF-g^m=W|{<>R# z=BQX%47(1~uOZd18B^JeLN-#FQI?~qtRlCilnP}zipnYyvfnUW6>humUyYg@Qm*pK zhhn8XdShp)X~Uu9ld>Dzi_~VMu6|6}Mdz}R+QCwnFS|q~k*bE0V+cm+WR_lDjp4jk z)S??E$*Wgmh;SP6JwbGSzv%qc8>TyfC&CN$1ejQra-k$wsv1~eEyV(;lxnEO>!z1S zZkyqZp(e#ntt824Q~o4LS0;Z_E+Z`ORU7Z&vc!!tJt-HU&<{5W?oB<`E5(q>zp5|- zq)fD1F8fY$-m7^B?SD`0L&}q4=W6Z@B-_;jgp_B~t=(x?M%*{H4sH7V+qKxhsom6_ zMt@vhb^@mkefWiDB90umQzka&JVs?2FsTA(flR47v6hS|2-ce7>oM3^XY7GVxN`-^ z7j&&pc4_TT_Gs5krFH1G_la7ss=d(qI%>V>O?er{l=R^;&H&!_zUIC24*%7ZVQ|Mu zUha!FyG%l?s=Nc5b<>@bPt-aU6U6#b3`Pk$rJmORRE-gGu)AmoehtXTJ#WKVJRKrp z#x#DtA4F|!)hn;a;i6`y^?9c=9&ox9s3iz9j9M<)*Ow5f; zIJOeUCLF~=IS8=8x#X;_f$Zc3b05fbN~|E*yRSsyqF!f$ffEw&_D3Em^;SkuK(0Vf zl)M(y?9qaLoIQy!UqEzn0cFU$BWxdAbuVcc7flr+d*%a})Gl8Q#c(17`b_9o0bW=E zm#URW*D3Er;p`*x^2pSMkO}yaFU@36!vWb|LRN8-lbZmk?BH!s_IJZ{X#|l4Zb7!7 z;|nz18e`hHLf4N|EzqXIhmu4)V$eK57-5@N9Hff9nW`$I!gaX!w?yR7b(1_2Dq2SU zoWcHOLc!FpDQ_I`%SL%GFYvbn=s8`L{Z}kFbjW2?3et>`-6_#5*2!{weqaXinO#&Q zZ;pyKO63cE`iVp;`X`=1C1sI_S!O~R(lK^X4>mTXihIGfQpvXAs7l%IBvyQ^05v3CE7((@pBtuVEAdExCh#@ zN4PY50hL4!+N+?KcR-j!Ur;6x2lY`&fgl*v5m4;Vt40`p@wxzZ z$c(vLQ+FB^!m8UlfL#*ar&5Zk3nfAl_t2(4!B)~Eqf}LP*92=0n?*!QWBvu6gB>cw z60=}bv(u22*)cIzODDSYbF|!wr|wW;gd=eL2vV+Y)|8H!@tmeyo^E}_F}Mgn^a+i2 zm`-Oab?W!PsPXhs7Kng<6q`l5lwRz{3C`(aka=rCWy{ zYqz1Q3Y4u*@+_>QZROM$j38o*2^7HqHpX=Dd7*;q@mnBe7JWl$e+^q8dV}t>bevYV z-osZR)QGx>1Y$t?xH~;&xko2(4Nanfq%>sw&C|)7s>B))DwD$PZ&;72*EJi>zOW zh8bGK@%kGp4EZ&lu*gk0pXf48HMU3geW5!MTE02#c&5yVYx9K&NJ)FBBMN<^Sy6Ia zS!=<}YL|{5Bo+}v#}5`rS69e%4HN~gwa7of>hLdyKY`WbI@BtMYm32{dWJyHfd^K* zc~xZ*NI|>R!7>Q;*do!1KcP<8o+lhxtt&;7)owVyg)@96?#M&Q$cm10^siR2Im6Hy zfXP=ygRjotj7zV;zwUsYdq-e&t3mdLy0I5x6*k5-|`YSea6IbFa}XSaeE zODp7=1ij!FaB#!L0p3w&heiTFk&5CE-$%78P|;39D>46xl+t|w7WPah&4Q~bUDcKI z?@znhcRdYh&v3e9INdV5W`lRS-?D$({@YD=+jGt5|77XkT>Ig*bn|(9p`CWrLObnv zmbB9d>EOz+SE>kz1t(#7}rgx7# z1=HqSSI?S@Rot0#b;v$-u4h~av#x__*TGy>BlS*w@=C^aDC;_ub{*mc=Ul!=u07kX zJ)72yt2gWFO}lz?Ri1QJoA$((aqZ2zXd896-jD5A#}xWYAr2_j=(1Z0&EZ?s1cqwU4U`7FGr2q>o{xrh=piWZy$EEH)i4KAgyEW3X!(=ndy7%vFk+Oq&5h3k_Ab6o=(arnd^o%+L5|MJWa&SZQC zHe8$6QSI;A|A~F4YvApnT;IXnw#y&RrQ0s!TW|=?ogWFd=9W(=fLl`pE}^Xxh}1l8 z%AFY7nBHp3v^|yc^>5W@eSt^5quait_ahnKSk^a|X5UYD+6MoZz!`)9N*iEWiB{;t zU9J8>)L*cgI-fD2|45DtR7JNZPw8*fhMmQ;*=5c6Zdg{}`+r;a#& ze9VN(AKgPt+PUnZ>TsFTo33NnY&s@XzPv z9k2#3fhwMpO&AqL4!H%y3nQ?k@x_@51PU4L4+H`O@Xn#oU-hC?Ii(XX{QXz(;zJw= zDsS_qa+qpqO7U6?@Prj!ZZrCZ<^+mtuWBj9_L z31Tl{2YwFuxvMA|dBrEl6s}i$m2FxYJ_gHj&avM>zQhTn$I>X6>)}aR3382155Dm~ zrT#ee;aKL(3)wR-fL7Pn@h50Q|l)6DeDtFd0Fv>$Am=gNhN|2I7k{s#{V!*C_%Ft zSjSzl5C^wfTLSiPQ1v370mLnWt36%SpKIB__S|k2RvwF^25X@V_nlU(%31$Ynbu?3 zR;*o!htXAi4UDc>is~9HqMm1|o!>vQX??pU(>k1O9mZ|OJ(We*>{G1ecyR;xJKBhL z$5)ceD|m+k9Mq88{u19S%%xoy8uzAPHhAPpnU+jpkgB&$|JWoufHVg~AQ|dBy%f9w zyWcWe7t&?NLdmi5Nq>+I-AF^4a7hipo`>ND0;S|bZa$YxxlV`8b=o#S;R)2NO*p{c zU(`*Wk*ZOWGrgHyEaHs0_w9QHXNuszzsL_9L=q_J6&a3-qBE}ayC4@8NXF;U)kSgX zbEybvWO%!jW%ry|90;uRIPGmwfLYT3ZM9So85zaq$2@U|LIFYhIg6j>o? z&M3+}{D*{;y&Ngq1V_tSW!B!jF`Tv^!FTJ+{8tV69JeF6@}YBSY)Lzh3**~AQl@2? zF=#FeagECO2b;d?mM_l7p~UypQn6KMhIq{0kH&p`FI?oR3R%48TUVq2U63c)LIC38 zAd2R)R)DUCDL6pE5Cw-QIEVm1qf0v0 z@4%Faoe43$X41C7;cordSUiT)2>eipM)K6oD1?K(=Q#i>z%CX7t2MoshkBz!y`sPO z{7CQfGk9aNGYkz1L}0v_OClbEUgsde))_A)W?pF85C?PU{~bTvwwR`vjGQ>w%oN4YF0iYN!u#XCs_XQg+ z^Ja0$z$+8r*&zRCTok*B)RPr$2=z8l*9l43r`THP6#^qIaq5Kl+!Bgxf?i=cR}nVz zDiGclVqLJz&>hnSx{&)w)y_pA1Vf(p)OyRgk7N_jvU++vels@2cNGX-h}h9`!4yv= zr}$|V;y46_G2S#CSUs#MvqE%;b`i7^SR`-#vDe@&DBEoda;6!2&p0$w@LNjzO45hu zR^GfU<{h)j_Mi^utkVC9=aQx>VyYQ}x=I}Lb@OuN%zTbDO*3gPefBi8#N7-AmHDgF!kxOFoM!@Qz2_(`!rb)pkY=lyzdCrXqogE9g2$>`b0kQG_Qt4Z z)GNI2b&ejgyx;B_9kRSXWWzN(9cjQ@!Y+ole~NJ?8BfBCa8vZ68}mOi8SrHQ_=3dB zM^Ctx%rf>cT;(aA2zyLYn|>Cg46aT!FXcu;#|h(KAzx$4l&Htjq9c+@Z;D_3g_lj4 zq_2Ybs@6Puuj%g~+k0x($Jot5&c&fd;D~0&$Vjx_8j7f&{7LE#{;AGfb#`rHksae> zXJf_cu~9h5za?+7`9(}kdBcxhHc2}&d^3*p+psO+&L5!z4y<-+(~$2I^KC3GcVI^P zztCQAc@^*ph#i8}{aO~-60l`tVOi90vNjDAamc9DE-*5R>m($)e)M(NNYaukD zx%yFR`WO)!gt_xm)c9xB#dSmUwi#gQLAt& zJ7G>)C79X9AW`Eywy8X!GZI6L!w54)t+-tY*Yb@i+omchVZ_x>@!4mXingP+UnKoe zyt))GFePE=&eN#tynaczt`hE)jrk}s5MpvvU;YK8crT&tUWrqBq$Z6+$w#ad?N`mB z^`65BJu>me5@MCyNewZCR1pbB3UaXJTs=CfZgFX>6En?V$E3+msJyL2iVIoE8Ib-v zv`3;P1oYS%d^MbyorK&|8ICPSf{XNyG%3E!S?$X#k@R2br&^+=OZpN8D*csyhTD7v z*IDE%i5Np^A-zV4K46bP-5242u?m}AP|PuwEALP*r7k2=9LCdJ=O+kVe5ILZWs1H|~AZxPM!D%|fO& zkgdgzf!Wc|g0<0{w|jFGC+&#q@6X}&XkSX#_kFtS#tA-?qcP`gSeyDu6ZWUtzCWAp zI`pXP*ml>kOxKBQ*NIH~NVa|C!J%yX>8$I_LsQmucE?q7XZjG}CM;?f4(;3%T*2VS$36QyJA3?Q$$J@CH*UWucFLt~7uyL_g#kB+K48>5ZtHGaR zNI%0I}y$oTMEqf{HWd1H@fg+qA0` zOJD3a>b4VthaD?GrBs`2HKeXIQrFwASWC)@un9HjdT+>s*N_yo$Q3(ObQnQril3yD zRq}_i-vpsfB?iK<^@$D|Q)55;KoTvSxstVdj zL9jNyHvZ|)YJ}P@xVFFbC;e&n7!`Ytcz890bNig<^n-(G&uM(As%Mze03yr;&MKO! z&ndh-vdDr6bPg4;O31lshaUUcJ57ht{Cy9vWtuKzn=X*Rv)k1As41}B6u935zel;_ z&){Ue53Y~QKH5R2aNSy{a5|=WDHYY&-S=WavDEhObf0=4J{-?xIdH%J{>lR}vu7;3XKdZ|DNew*eEF+!KvZ#A>mJlnkL#Te`fVgG+F+v$%g9z6TM+>6glyVoS7MHaLf*v@nfBt;r!Q| zcr;)F<28wdvq&^MGdXaUUYAZU;F_P$JLhQ|b0tF8uK9U7~e$!aqHgWOdg^N-x zl|m#=_dd}kN@#&cW{Fl@X@+iX%v)*RO8easnGi^{I!QmF;AILHC|IN*M8Q=Gu2T@D z;Ask&%@_&tjFIH8j991J-=^Su6#O#^XyQsF5J>-$f*(*o(ofzJi{F%Jy_1M7kp3G5 ze@VgrOu-fc*|chrpKg)&Y zg2T8-{J&__>*`;@?6cVm0tH8enmub@$yRr4{0bMvU!Tlbn_oYlwKi}1?mwTlp3PX# zX02y&0c<4!b(Ba_*{IUZLg91-@z@J!2|>+a*Xxx>dF)cvsSVKRN< zx$NO-eHueCCxw1kNbN6Fw%Z`^AlR+0TR)7qVd3Q|-P!7%f{h+j2sK_qEK6>Oua$yB z$;fi^0M1-ksSL@R3sv2=E?6|#x=PBX2c>0GEQoYt!9iC}LU2}uOK`gGv=yo;mWtB_ zGbUjV+Sg@pRysZ2zb}6Ja(D1Pdpr z$eh2Y;CPxvQn*{+ywSap*yz7|pCY5;|~fxuSc{@B*5_x2Pl6tUaW&!6ueh0yuK zlONWnpME}jbVi?vXO%+?!_dK8XK!x*v906xPQH8k-suNZ52ijmls-9=IXRO>xPK-$ zaCFOeukGEAdmXusu3Tdae3s`78fkL_ao8udSEWJORJ*g1f z_3L9B_3KmFnvOyxi?a(2EdbQ38$;Rp{(^(WIr$v<>PB_8Zf~KA#kmN2)$Fo{Xp@^= zdIUGH?M}E*%WmsvCLQ0L*y`R&Z1ul=A=`T7zL;(u$u>M)@Uj%3YYpqO8{Ky=Z(7&q zv+mwP1B-7I+WdtkcIguuJ2u3-UzfYnLW3jSNo{=jt*>v*rrig#jzfi3mXh_9_g4;M zR-tb&_tZ$iR&OKqD1~rJRIt#+Ztu{R`1aTNv)wae566CZOP`P@m%~A!qOM@BwjJI4 zGO`G`c(}-Zas#kU#4WpF!ABOP{pL$7HC~2it1npVZO+{W9~JJztii0Pq$@j7eFt4R ziRf3+6)8Rn)pW(dN>?6Q2Wsi6j^?eGuFw^Iz?)~awQPpA#_oH!&c7YUtKn@e54zcp z7O0KGLsUkcvfB=9zPNS#{-v!`*U{RVl$ML-P|K#W0())syDc3XiOun?hArt{>cMPo_~^aYz)&6_ zr&IgRAn0Ybq<+Ruw~m?$He|wh$fyOh>dZR23J!`DjKSAU7!8_fFnskMrbP_FZcS~$ zMmH4#hPjfiFp=5|4!XkhX)RRI6`wS8#Rr$Jv?)V(m@zH9*m`Qr%KF8-FKs^c_UZda z((qsEKK`IP4f{~X=|TggrmD*YoG&7lh%9)(Vg(oBm@qMLL~=o3VhpM@OAJ#DZu51d z4CHTK3&;lW% zNPz(^0ZWO%wPbRcY!8AG`BBKXls;56q>_gfg(fbAuW}}sG$D*f*0IBk$7bv>-etKM zV*c0?Y#&J?!IjIuO!RG|@)}l>S5+{t)MB7H)>Nqa&s z$>bYu;NSVnaGMRCkgY{|C;gPWS<9_|NQ&7`JZCyrE`Kv|oH4XUCimy|lU(*VE%jpH z@D&pC(69lsP1xFMF@Z0l;3>i6e!YhNazgd%>@O#{UuSf literal 0 HcmV?d00001 diff --git a/ndbc_api/config/__init__.py b/ndbc_api/config/__init__.py new file mode 100644 index 0000000..f84c255 --- /dev/null +++ b/ndbc_api/config/__init__.py @@ -0,0 +1,24 @@ +"""Stores the configuration information for the NDBC API. + +Attributes: + LOGGER_NAME (:str:): The name for the `logging.Logger` in the api instance. + DEFAULT_CACHE_LIMIT (:int:): The station level limit for caching NDBC data + service requests. + VERIFY_HTTPS (:bool:): Whether to execute requests using HTTPS rather than + HTTP. + HTTP_RETRY (:int:): The number of times to retry requests to the NDBC data + service. + HTTP_BACKOFF_FACTOR (:float:): The backoff factor used when executing retry + requests to the NDBC data service. + HTTP_DELAY (:int:) The delay between requests submitted to the NDBC data + service, in milliseconds. + HTTP_DEBUG (:bool:): Whether to log requests and responses to the NDBC API's + log (a `logging.Logger`) as debug messages. +""" +LOGGER_NAME = 'NDBC-API' +DEFAULT_CACHE_LIMIT = 36 +VERIFY_HTTPS = True +HTTP_RETRY = 5 +HTTP_BACKOFF_FACTOR = 0.8 +HTTP_DELAY = 2000 +HTTP_DEBUG = False diff --git a/ndbc_api/config/__pycache__/__init__.cpython-311.pyc b/ndbc_api/config/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e271f334741a975f84941070c5a779dfcdba0b9 GIT binary patch literal 1224 zcmZ`(U2hUW6kSRyNQjeVOKqa{&(05O^^EEJ`M(5zEf_&FAWcYB?iVv3_>ojZ_#p$SJUe1eoHs1YIR#PTFsrN4!F!0 zb6nUQ=jqa8>LQnUOvE%6wg$j};#>(9IEIrcavB~pi=vQziYOO+O7u?aHtPq*wyt;f zfWZs`S5mwmP|yU*4iNoJEhscOhaxTsrYAtHc-sK`36cCV1(%W0)%5Pc_2m7?GXXrX zkzk&bKLB(Q4zD=ijr8QdRV?6dsnxgbdflk2RlVH>BHImcqLYcOkAZC?8(UyCYCP4lhR_l33c%hnN*xm;7G7|WP(8gxNo8+yfb^A z%bxI<>oQKEBgxFBSjn2UwcohiBxtNFHtZ8v@T0)zQ*o0zpn0G1gi;>b#OP*MmJsHM zP&0B6n8UF{A&D

BS2GIC}z@Og~$J+4=rWj-BNavF$@w|hA~-PxDu1% zE&V)MTkO3_?y6ZP#W=oh>}*3Uc@l#jKa~r4`!CzoN`D_j%LD+%YfV0pI-jHVKAQw!zdpOFS9SbfJ82R6&3(r{>AVLOy}uT zDs{eeKec?W$my|C*ch#j*B_lK#c`oHS{vu@jdJ7l4Y|%gfH41Xl#@@GhvWYMQ&o6j literal 0 HcmV?d00001 diff --git a/ndbc_api/exceptions.py b/ndbc_api/exceptions.py new file mode 100644 index 0000000..2c6caaa --- /dev/null +++ b/ndbc_api/exceptions.py @@ -0,0 +1,29 @@ +class NdbcException(Exception): + """Base exception that all other NDBC exceptions subclass from.""" + + def __init__(self, message: str = ''): # pragma: no cover + self.message = message + super().__init__(self.message) + + def __str__(self): # pragma: no cover + return f"NDBC API: {self.message or 'unspecified error'}" + + +class TimestampException(NdbcException): + """Unable to handle given timestamp.""" + + +class RequestException(NdbcException): + """Unable to build the given request.""" + + +class ResponseException(NdbcException): + """Unable to handle the given response.""" + + +class ParserException(NdbcException): + """Unable to parse the given response.""" + + +class HandlerException(NdbcException): + """Error when handling this API call.""" diff --git a/ndbc_api/ndbc_api.py b/ndbc_api/ndbc_api.py new file mode 100644 index 0000000..7da62ee --- /dev/null +++ b/ndbc_api/ndbc_api.py @@ -0,0 +1,832 @@ +"""An API for retrieving data from the NDBC. + +This module defines the `NdbcApi`, the top-level object which creates, handles, +caches, parses, and returns NDBC data. + +Example: + ```python3 + from ndbc_api import NdbcApi + api = NdbcApi() + available_stations = api.stations() + modes = api.get_modes() + df_stdmet_tplm2 = api.get_data( + 'tplm2', + 'stdmet', + '2020-01-01', + '2022-01-01', + as_df=True + ) + ``` + +Attributes: + log (:obj:`logging.Logger`): The logger at which to register HTTP + request and response status codes and headers used for debug + purposes. + headers(:dict:): The request headers for use in the NDBC API's request + handler. +""" +import logging +import pickle +import warnings +from concurrent.futures import ThreadPoolExecutor, as_completed +from datetime import datetime, timedelta +from typing import Any, List, Sequence, Tuple, Union, Dict, Optional + +import xarray +import pandas as pd + +from .api.handlers.http.data import DataHandler +from .api.handlers.http.stations import StationsHandler +from .config import (DEFAULT_CACHE_LIMIT, HTTP_BACKOFF_FACTOR, HTTP_DEBUG, + HTTP_DELAY, HTTP_RETRY, LOGGER_NAME, VERIFY_HTTPS) +from .exceptions import (HandlerException, ParserException, RequestException, + ResponseException, TimestampException) +from .utilities.req_handler import RequestHandler +from .utilities.singleton import Singleton +from .utilities.log_formatter import LogFormatter +from .api.handlers.opendap.data import OpenDapDataHandler +from .utilities.opendap.dataset import concat_datasets, merge_datasets, filter_dataset_by_variable, filter_dataset_by_time_range + + +class NdbcApi(metaclass=Singleton): + """An API for querying the National Data Buoy Center. + + The `NdbcApi` is metaclassed as a singleton to conserve NDBC resources. It + uses two private handlers to build requests and parse responses to the NDBC + over HTTP(s). It also uses a LRU-cached request handler to execute requests + against the NDBC, logging response statuses as they are executed. + + Attributes: + logging_level: The `logging.Logger`s log level, 1 if the `debug` + flag is set in the `__init__` method, and 0 otherwise. + cache_limit: The handler's global limit for caching + `NdbcApi` responses. This is implemented as a least-recently + used cache, designed to conserve NDBC resources when querying + measurements for a given station over similar time ranges. + delay: The HTTP(s) request delay parameter, in seconds. + retries: = The number of times to retry a request to the NDBC data + service. + backoff_factor: The back-off parameter, used in conjunction with + `retries` to re-attempt requests to the NDBC data service. + verify_https: A flag which indicates whether to attempt requests to the + NDBC data service over HTTP or HTTPS. + debug: A flag for verbose logging and response-level status reporting. + Affects the instance's `logging.Logger` and the behavior of its + private `RequestHandler` instance. + """ + + logger = logging.getLogger(LOGGER_NAME) + warnings.simplefilter(action='ignore', category=FutureWarning) + + def __init__( + self, + logging_level: int = logging.WARNING if HTTP_DEBUG else logging.ERROR, + filename: Any = None, + cache_limit: int = DEFAULT_CACHE_LIMIT, + headers: dict = {}, + delay: int = HTTP_DELAY, + retries: int = HTTP_RETRY, + backoff_factor: float = HTTP_BACKOFF_FACTOR, + verify_https: bool = VERIFY_HTTPS, + debug: bool = HTTP_DEBUG, + ): + """Initializes the singleton `NdbcApi`, sets associated handlers.""" + self.cache_limit = cache_limit + self.headers = headers + self._handler = self._get_request_handler( + cache_limit=self.cache_limit, + delay=delay, + retries=retries, + backoff_factor=backoff_factor, + headers=self.headers, + debug=debug, + verify_https=verify_https, + ) + self._stations_api = StationsHandler + self._data_api = DataHandler + self._opendap_data_api = OpenDapDataHandler + self.configure_logging(level=logging_level, filename=filename) + + def dump_cache(self, dest_fp: Union[str, None] = None) -> Union[dict, None]: + """Dump the request cache to dict or the specified filepath. + + Dump the request, response pairs stored in the `NdbcApi`'s + `Request_handler` as a `dict`, either returning the object, if no + `dest_fp` is specified, or serializing (pickling) the object and writing + it to the specified `dest_fp`. + + Args: + dest_fp: The destination filepath for the serialized `RequestsCache` + contents. + + Returns: + The cached request, response pairs as a `dict`, or `None` if a + `dest_fp` is specified when calling the method. + """ + data = dict() + ids = [r.id_ for r in self._handler.stations] + caches = [r.reqs.cache for r in self._handler.stations] + if ids: + for station_id, cache in zip(ids, caches): + data[station_id] = dict() + reqs = cache.keys() + for req in reqs: + resp = cache[req].v + data[station_id][req] = resp + if dest_fp: + with open(dest_fp, 'wb') as f: + pickle.dump(data, f) + else: + return data + + def clear_cache(self) -> None: + """Clear the request cache and create a new handler.""" + del self._handler + self._handler = self._get_request_handler( + cache_limit=self.cache_limit, + delay=HTTP_DELAY, + retries=HTTP_RETRY, + backoff_factor=HTTP_BACKOFF_FACTOR, + headers=self.headers, + debug=HTTP_DEBUG, + verify_https=VERIFY_HTTPS, + ) + + def set_cache_limit(self, new_limit: int) -> None: + """Set the cache limit for the API's request cache.""" + self._handler.set_cache_limit(cache_limit=new_limit) + + def get_cache_limit(self) -> int: + """Get the cache limit for the API's request cache.""" + return self._handler.get_cache_limit() + + def get_headers(self) -> dict: + """Return the current headers used by the request handler.""" + return self._handler.get_headers() + + def update_headers(self, new: dict) -> None: + """Add new headers to the request handler.""" + self._handler.update_headers(new) + + def set_headers(self, request_headers: dict) -> None: + """Reset the request headers using the new supplied headers.""" + self._handler.set_headers(request_headers) + + def configure_logging(self, level=logging.WARNING, filename=None) -> None: + """Configures logging for the NdbcApi. + + Args: + level (int, optional): The logging level. Defaults to logging.WARNING. + filename (str, optional): If provided, logs to the specified file. + """ + self.logger.setLevel(level) + + handler: logging.Handler + formatter: logging.Formatter + + for handler in self.logger.handlers[:]: + self.logger.removeHandler(handler) + + if filename: + handler = logging.FileHandler(filename) + formatter = logging.Formatter( + '[%(asctime)s][%(levelname)s]: %(message)s') + else: + handler = logging.StreamHandler() + formatter = LogFormatter('[%(levelname)s]: %(message)s') + + handler.setFormatter(formatter) + self.logger.addHandler(handler) + + def log(self, + level: int, + station_id: Union[int, str, None] = None, + mode: Union[str, None] = None, + message: Union[str, None] = None, + **extra_data) -> None: + """Logs a structured message with metadata. + + Args: + level (int): The logging level. + station_id (str, optional): The NDBC station ID. + mode (str, optional): The data mode. + message (str, optional): The log message. + **extra_data: Additional key-value pairs to include in the log. + """ + log_data = {} + if station_id: + log_data['station_id'] = station_id + if mode: + log_data['mode'] = mode + if message: + log_data['message'] = message + for k, v in extra_data.items(): + log_data[k] = v + self.logger.log(level, log_data) + + def stations(self, as_df: bool = True) -> Union[pd.DataFrame, dict]: + """Get all stations and station metadata from the NDBC. + + Query the NDBC data service for the current available data buoys + (stations), both those maintained by the NDBC and those whose + measurements are managed by the NDBC. Stations are returned by default + as rows of a `pandas.DataFrame`, alongside their realtime data coverage + for some common measurements, their latitude and longitude, and current + station status notes maintained by the NDBC. + + Args: + as_df: Flag indicating whether to return current station data as a + `pandas.DataFrame` if set to `True` or as a `dict` if `False`. + + Returns: + The current station data from the NDBC data service, either as a + `pandas.DataFrame` or as a `dict` depending on the value of `as_df`. + + Raises: + ResponseException: An error occurred while retrieving and parsing + responses from the NDBC data service. + """ + try: + data = self._stations_api.stations(handler=self._handler) + return self._handle_data(data, as_df, cols=None) + except (ResponseException, ValueError, KeyError) as e: + raise ResponseException('Failed to handle returned data.') from e + + def historical_stations(self, + as_df: bool = True) -> Union[pd.DataFrame, dict]: + """Get historical stations and station metadata from the NDBC. + + Query the NDBC data service for the historical data buoys + (stations), both those maintained by the NDBC and those which are not. + Stations are returned by default as rows of a `pandas.DataFrame`, + alongside their historical data coverage, with one row per tuple of + (station, historical deployment). + + Args: + as_df: Flag indicating whether to return current station data as a + `pandas.DataFrame` if set to `True` or as a `dict` if `False`. + + Returns: + The current station data from the NDBC data service, either as a + `pandas.DataFrame` or as a `dict` depending on the value of `as_df`. + + Raises: + ResponseException: An error occurred while retrieving and parsing + responses from the NDBC data service. + """ + try: + data = self._stations_api.historical_stations(handler=self._handler) + return self._handle_data(data, as_df, cols=None) + except (ResponseException, ValueError, KeyError) as e: + raise ResponseException('Failed to handle returned data.') from e + + def nearest_station( + self, + lat: Union[str, float, None] = None, + lon: Union[str, float, None] = None, + ) -> str: + """Get nearest station to the specified lat/lon. + + Use the NDBC data service's current station data to determine the + nearest station to the specified latitude and longitude (either as + `float` or as DD.dd[E/W] strings). + + Args: + lat: The latitude of interest, used to determine the closest + maintained station to the given position. + lon: The longitude of interest, used to determine the closest + maintained station to the given position. + + Returns: + The station id (e.g. `'tplm2'` or `'41001'`) of the nearest station + with active measurements to the specified lat/lon pair. + + Raises: + ValueError: The latitude and longitude were not both specified when + querying for the closest station. + """ + if not (lat and lon): + raise ValueError('lat and lon must be specified.') + nearest_station = self._stations_api.nearest_station( + handler=self._handler, lat=lat, lon=lon) + return nearest_station + + def radial_search( + self, + lat: Union[str, float, None] = None, + lon: Union[str, float, None] = None, + radius: float = -1, + units: str = 'km', + ) -> pd.DataFrame: + """Get all stations within radius units of the specified lat/lon. + + Use the NDBC data service's current station data to determine the + stations within radius of the specified latitude and longitude + (passed either as `float` or as DD.dd[E/W] strings). + + Args: + lat (float): The latitude of interest, used to determine the maintained + stations within radius units of the given position. + lon (float): The longitude of interest, used to determine the maintained + stations within radius units of the given position. + radius (float): The radius in the specified units to search for stations + within. + units (str: 'nm', 'km', or 'mi'): The units of the radius, either 'nm', 'km', or 'mi'. + + Returns: + A `pandas.DataFrame` of the stations within the specified radius of + the given lat/lon pair. + + Raises: + ValueError: The latitude and longitude were not both specified when + querying for the closest station, or the radius or units are + invalid. + """ + if not (lat and lon): + raise ValueError('lat and lon must be specified.') + stations_in_radius = self._stations_api.radial_search( + handler=self._handler, lat=lat, lon=lon, radius=radius, units=units) + return stations_in_radius + + def station(self, + station_id: Union[str, int], + as_df: bool = False) -> Union[pd.DataFrame, dict]: + """Get metadata for the given station from the NDBC. + + The NDBC maintains some station-level metadata including status notes, + location information, inclement weather warnings, and measurement notes. + This method is used to request, handle, and parse the metadata for the + given station from the station's NDBC webpage. + + Args: + station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the + station of interest. + as_df: Whether to return station-level data as a `pandas.DataFrame`, + defaults to `False`, and a `dict` is returned. + + Returns: + The station metadata for the given station, either as a `dict` or as + a `pandas.DataFrame` if the `as_df` flag is set to `True`. + + Raises: + ResponseException: An error occurred when requesting and parsing + responses for the specified station. + """ + station_id = self._parse_station_id(station_id) + try: + data = self._stations_api.metadata(handler=self._handler, + station_id=station_id) + return self._handle_data(data, as_df, cols=None) + except (ResponseException, ValueError, KeyError) as e: + raise ResponseException('Failed to handle returned data.') from e + + def available_realtime( + self, + station_id: Union[str, int], + full_response: bool = False, + as_df: Optional[bool] = None, + ) -> Union[List[str], pd.DataFrame, dict]: + """Get the available realtime modalities for a station. + + While most data buoy (station) measurements are available over + multi-year time ranges, some measurements depreciate or become + unavailable for substantial periods of time. This method queries the + NDBC station webpage for those measurements, and their links, which are + available or were available over the last 45 days. + + Args: + station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the + station of interest. + full_response: Whether to return the full response from the NDBC + API, defaults to `False` and a list of modes from `get_modes()` + is returned. If `True`, the full URL for each data mode is + included in the returned `dict` or `pandas.DataFrame`. + as_df: Whether to return station-level data as a `pandas.DataFrame`, + defaults to `False`, and a `dict` is returned. + + Returns: + The available realtime measurements for the specified station, + alongside their NDBC data links, either as a `dict` or as a + `pandas.DataFrame` if the `as_df` flag is set to `True`. + + Raises: + ResponseException: An error occurred when requesting and parsing + responses for the specified station. + """ + station_id = self._parse_station_id(station_id) + try: + station_realtime = self._stations_api.realtime( + handler=self._handler, station_id=station_id) + full_data = {} + if full_response: + if as_df is None: + as_df = False + full_data = self._handle_data(station_realtime, + as_df, + cols=None) + return full_data + else: + full_data = self._handle_data(station_realtime, + as_df=False, + cols=None) + + # Parse the modes from the full response + _modes = self.get_modes() + station_modes = set() + for k in full_data: + for m in _modes: + if m in full_data[k]['description']: + station_modes.add(m) + return list(station_modes) + except (ResponseException, ValueError, KeyError) as e: + raise ResponseException('Failed to handle returned data.') from e + + def available_historical(self, + station_id: Union[str, int], + as_df: bool = False) -> Union[pd.DataFrame, dict]: + """Get the available historical measurements for a station. + + This method queries the NDBC station webpage for historical, quality + controlled measurements and their associated availability time ranges. + + Args: + station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the + station of interest. + as_df: Whether to return station-level data as a `pandas.DataFrame`, + defaults to `False`, and a `dict` is returned. + + Returns: + The available historical measurements for the specified station, + alongside their NDBC data links, either as a `dict` or as a + `pandas.DataFrame` if the `as_df` flag is set to `True`. + + Raises: + ResponseException: An error occurred when requesting and parsing + responses for the specified station. + """ + station_id = self._parse_station_id(station_id) + try: + data = self._stations_api.historical(handler=self._handler, + station_id=station_id) + return self._handle_data(data, as_df, cols=None) + except (ResponseException, ValueError, KeyError) as e: + raise ResponseException('Failed to handle returned data.') from e + + def get_data( + self, + station_id: Union[int, str, None] = None, + mode: Union[str, None] = None, + start_time: Union[str, datetime] = datetime.now() - timedelta(days=30), + end_time: Union[str, datetime] = datetime.now(), + use_timestamp: bool = True, + as_df: bool = True, + cols: List[str] = None, + station_ids: Union[Sequence[Union[int, str]], None] = None, + modes: Union[List[str], None] = None, + as_xarray_dataset: bool = False, + use_opendap: Optional[bool] = None, + ) -> Union[pd.DataFrame, xarray.Dataset, dict]: + """Execute data query against the specified NDBC station(s). + + Query the NDBC data service for station-level measurements, using the + `mode` parameter to determine the measurement type (e.g. `'stdmet'` for + standard meterological data or `'cwind'` for continuous winds data). The + time range and data columns of interest may also be specified, such that + a tailored set of requests are executed against the NDBC data service to + generate a single `pandas.DataFrame` or `dict` matching the conditions + specified in the method call. When calling `get_data` with `station_ids` + the station identifier is added as a column to the returned data. + + Args: + station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the + station of interest. + station_ids: A list of NDBC station IDs (e.g. `['tplm2', '41001']`) + for the stations of interest. + mode: The data measurement type to query for the station (e.g. + `'stdmet'` for standard meterological data or `'cwind'` for + continuous winds data). + modes: A list of data measurement types to query for the stations + (e.g. `['stdmet', 'cwind']`). + start_time: The first timestamp of interest (in UTC) for the data + query, defaulting to 30 days before the current system time. + end_time: The last timestamp of interest (in UTC) for the data + query, defaulting to the current system time. + use_timestamp: A flag indicating whether to parse the NDBC data + service column headers as a timestamp, and to use this timestamp + as the index. + as_df: Whether to return station-level data as a `pandas.DataFrame`, + defaults to `True`, if `False` a `dict` is returned unless + `as_xarray_dataset` is set to `True`. + as_xarray_dataset: Whether to return tbe data as an `xarray.Dataset`, + defaults to `False`. + cols: A list of columns of interest which are selected from the + available data columns, such that only the desired columns are + returned. All columns are returned if `None` is specified. + use_opendap: An alias for `as_xarray_dataset`. + + Returns: + The available station(s) measurements for the specified modes, time + range, and columns, either as a `dict` or as a `pandas.DataFrame` + if the `as_df` flag is set to `True`. + + Raises: + ValueError: Both `station_id` and `station_ids` are `None`, or both + are not `None`. This is also raised if `mode` and `modes` are + `None`, or both are not `None` + RequestException: The specified mode is not available. + ResponseException: There was an error in executing and parsing the + required requests against the NDBC data service. + HandlerException: There was an error in handling the returned data + as a `dict` or `pandas.DataFrame`. + """ + if use_opendap is not None: + as_xarray_dataset = use_opendap + + as_df = as_df and not as_xarray_dataset + + self.log(logging.DEBUG, + message=f"`get_data` called with arguments: {locals()}") + if station_id is None and station_ids is None: + raise ValueError('Both `station_id` and `station_ids` are `None`.') + if station_id is not None and station_ids is not None: + raise ValueError('`station_id` and `station_ids` cannot both be ' + 'specified.') + if mode is None and modes is None: + raise ValueError('Both `mode` and `modes` are `None`.') + if mode is not None and modes is not None: + raise ValueError('`mode` and `modes` cannot both be specified.') + + handle_station_ids: List[Union[int, str]] = [] + handle_modes: List[str] = [] + + if station_id is not None: + handle_station_ids.append(station_id) + if station_ids is not None: + handle_station_ids.extend(station_ids) + if mode is not None: + handle_modes.append(mode) + if modes is not None: + handle_modes.extend(modes) + + for mode in handle_modes: + if mode not in self.get_modes(use_opendap=as_xarray_dataset): + raise RequestException(f"Mode {mode} is not available.") + + self.log(logging.INFO, + message=(f"Processing request for station_ids " + f"{handle_station_ids} and modes " + f"{handle_modes}")) + + # accumulated_data records the handled response and parsed station_id + # as a tuple, with the data as the first value and the id as the second. + accumulated_data: Dict[str, Dict[str, Union[pd.DataFrame, dict]]] = {} + for mode in handle_modes: + accumulated_data[mode] = [] + + with ThreadPoolExecutor( + max_workers=len(handle_station_ids)) as station_executor: + station_futures = {} + for station_id in handle_station_ids: + station_futures[station_id] = station_executor.submit( + self._handle_get_data, + mode=mode, + station_id=station_id, + start_time=start_time, + end_time=end_time, + use_timestamp=use_timestamp, + as_df=as_df, + cols=cols, + use_opendap=as_xarray_dataset, + ) + + for future in as_completed(station_futures.values()): + try: + station_data, station_id = future.result() + self.log( + level=logging.DEBUG, + station_id=station_id, + message= + f"Successfully processed request for station_id {station_id}" + ) + if as_df: + station_data['station_id'] = station_id + accumulated_data[mode].append(station_data) + except (RequestException, ResponseException, + HandlerException) as e: + self.log( + level=logging.WARN, + station_id=station_id, + message=( + f"Failed to process request for station_id " + f"{station_id} with error: {e}")) + self.log(logging.INFO, message="Finished processing request.") + return self._handle_accumulate_data(accumulated_data) + + def get_modes(self, + use_opendap: bool = False, + as_xarray_dataset: Optional[bool] = None) -> List[str]: + """Get the list of supported modes for `get_data(...)`. + + Args: + use_opendap (bool): Whether to return the available + modes for opendap `xarray.Dataset` data. + as_xarray_dataset (bool): An alias for `use_opendap`. + + Returns: + (List[str]) the available modalities. + """ + if as_xarray_dataset is not None: + use_opendap = as_xarray_dataset + + if use_opendap: + return [ + v for v in vars(self._opendap_data_api) if not v.startswith('_') + ] + return [v for v in vars(self._data_api) if not v.startswith('_')] + + @staticmethod + def save_xarray_dataset(dataset: xarray.Dataset, output_filepath: str, + **kwargs) -> None: + """ + Saves an `xarray.Dataset` to netCDF a user-specified file path. + + Args: + dataset: The xarray dataset to save. + output_filepath: The path to save the dataset to. + **kwargs: Additional keyword arguments to pass to `dataset.to_netcdf`. + + Returns: + None: The dataset is written to disk + """ + dataset.to_netcdf(output_filepath, **kwargs) + + """ PRIVATE """ + + def _get_request_handler( + self, + cache_limit: int, + delay: int, + retries: int, + backoff_factor: float, + headers: dict, + debug: bool, + verify_https: bool, + ) -> Any: + """Build a new `RequestHandler` for the `NdbcApi`.""" + return RequestHandler( + cache_limit=cache_limit or self.cache_limit, + log=self.log, + delay=delay, + retries=retries, + backoff_factor=backoff_factor, + headers=headers, + debug=debug, + verify_https=verify_https, + ) + + @staticmethod + def _parse_station_id(station_id: Union[str, int]) -> str: + """Parse station id.""" + station_id = str(station_id) # expect string-valued station id + station_id = station_id.lower() # expect lowercased station id + return station_id + + @staticmethod + def _handle_timestamp(timestamp: Union[datetime, str]) -> datetime: + """Convert the specified timestamp to `datetime.datetime`.""" + if isinstance(timestamp, datetime): + return timestamp + else: + try: + return datetime.strptime(timestamp, '%Y-%m-%d %H:%M') + except ValueError as e: + raise TimestampException from e + + @staticmethod + def _enforce_timerange(df: pd.DataFrame, start_time: datetime, + end_time: datetime) -> pd.DataFrame: + """Down-select to the data within the specified `datetime` range.""" + try: + df = df.loc[(df.index.values >= pd.Timestamp(start_time)) & + (df.index.values <= pd.Timestamp(end_time))] + except ValueError as e: + raise TimestampException( + 'Failed to enforce `start_time` to `end_time` range.') from e + return df + + @staticmethod + def _handle_data(data: pd.DataFrame, + as_df: bool = True, + cols: List[str] = None) -> Union[pd.DataFrame, dict]: + """Apply column down selection and return format handling.""" + if cols: + try: + data = data[[*cols]] + except (KeyError, ValueError) as e: + raise ParserException( + 'Failed to parse column selection.') from e + if as_df and isinstance(data, pd.DataFrame): + return data + elif isinstance(data, pd.DataFrame) and not as_df: + return data.to_dict() + elif as_df: + try: + return pd.DataFrame().from_dict(data, orient='index') + except (NotImplementedError, ValueError, TypeError) as e: + raise HandlerException( + 'Failed to convert `pd.DataFrame` to `dict`.') from e + else: + return data + + def _handle_accumulate_data( + self, + accumulated_data: Dict[str, List[Union[pd.DataFrame, dict, + xarray.Dataset]]], + ) -> Union[pd.DataFrame, dict]: + """Accumulate the data from multiple stations and modes.""" + for k in list(accumulated_data.keys()): + if not accumulated_data[k]: + del accumulated_data[k] + + if not accumulated_data: + return {} + + return_as_df = isinstance( + accumulated_data[list(accumulated_data.keys())[-1]][0], + pd.DataFrame) + use_opendap = isinstance( + accumulated_data[list(accumulated_data.keys())[-1]][0], + xarray.Dataset) + + data: Union[List[pd.DataFrame], List[xarray.Dataset], + dict] = [] if return_as_df or use_opendap else {} + + for mode, station_data in accumulated_data.items(): + if return_as_df: + data.extend(station_data) + elif use_opendap: + data.extend(station_data) + else: + data[mode] = station_data + + if return_as_df: + df = pd.concat(data, axis=0) + df.reset_index(inplace=True, drop=False) + df.set_index(['timestamp', 'station_id'], inplace=True) + return df + elif use_opendap: + return merge_datasets(data) + return data + + def _handle_get_data( + self, + mode: str, + station_id: str, + start_time: datetime, + end_time: datetime, + use_timestamp: bool, + as_df: bool = True, + cols: List[str] = None, + use_opendap: bool = False, + ) -> Tuple[Union[pd.DataFrame, xarray.Dataset, dict], str]: + start_time = self._handle_timestamp(start_time) + end_time = self._handle_timestamp(end_time) + station_id = self._parse_station_id(station_id) + if use_opendap: + data_api_call = getattr(self._opendap_data_api, mode, None) + else: + data_api_call = getattr(self._data_api, mode, None) + if not data_api_call: + raise RequestException( + 'Please supply a supported mode from `get_modes()`.') + try: + data = data_api_call( + self._handler, + station_id, + start_time, + end_time, + use_timestamp, + ) + except (ResponseException, ValueError, TypeError, KeyError) as e: + raise ResponseException( + f'Failed to handle API call.\nRaised from {e}') from e + if use_timestamp: + if use_opendap: + data = filter_dataset_by_time_range(data, start_time, end_time) + else: + data = self._enforce_timerange(df=data, + start_time=start_time, + end_time=end_time) + try: + if use_opendap: + if cols: + handled_data = filter_dataset_by_variable(data, cols) + else: + handled_data = data + else: + handled_data = self._handle_data(data, as_df, cols) + except (ValueError, KeyError, AttributeError) as e: + raise ParserException( + f'Failed to handle returned data.\nRaised from {e}') from e + + return (handled_data, station_id) diff --git a/ndbc_api/utilities/__init__.py b/ndbc_api/utilities/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ndbc_api/utilities/__pycache__/__init__.cpython-311.pyc b/ndbc_api/utilities/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b0cf4806c8e1e7ad142347ffe922f0fc090cc48e GIT binary patch literal 191 zcmZ3^%ge<81pLQ-ri19mAOZ#$p^VRLK*n^26oz01O-8?!3`I;p{%4TnuOMfun9$1)WnjE)FLQ9CNCu^IXP9j(X zUw;=~VuZdaBV3WLaxe|b7IIJ)IoQ!WoW=0gJ*}u`b&NE$fE;5TIT5Pw=m_n?(^*-= z(Qctkv^ti&W)`WiY|rA{%m>Wmu3zw|2z*miN|a9~zpGCcGgv0B2drobL7CkR(|&1P z<6sPwCjf^cfPu1_gR}Yu$^)?Vc-GiJk8w5vNp+dJ85DZ4s?$By6w8({ONziIs0KlAE1;A#9xfKLOiYoBn3y5cU3kjetTsxrbh6dU`=IL#s z8s35Z-JWk%6Cu%XsD@X1^ojVFMEP=|DxlL9(+4mQio6oy`ZmD7~TdcUmf)#2D zo_jIp4uCps>jdm_nq0yKV5lqq0u+K*EilmI5CHtsgdoWkI@hPpB{d$ zHYR5GCT1I>vyIejJvGuCFza#i0Aj*)%QzvvRiuQ-Bq7DXDSJv!5%QpHd0kJ85GSw+ zX~UH1E-^OsJVG*w{!g%>FkmLa z&MF<|0Ld7zSb#r~shMl%dyhIt6^g_7l}zl%4>ousNQKo^*s@$ysBU0n`05iJ{(Vb)Bx~) zHK>WIpanoT1mhqZOIm>G`_wLwg;X!BsoRw9Y7ZAQ-MErW&1$X{+TIPiR;Y4Qyy}6q z@EWc~VBbbtzMZJk*6HS8Q!u#)Cbwt`Ic>=>g?Xwintp11)fB0w&*yWxmf|fu7)2hR z?MpyjkQHu)C~N}BiU&qqYp9YXoVCFQ-nOKYNwUdJP{dm1O+nYP(=-GFDgn_^FCLHY zOkO#k_(9jGp174vJ-ANO)6?l(;?ASgY?991q4|uKGW5jHTGE)+Xmgy%sXwKZP))`pVjWeBb`+3duNm+pdC9WeqfJsZ(1KTN;?8vS(t48W3JR|r5sEC1n5G_^~4LpYS+71 z@ALb``@ja@MdNRyBU{mtjmSo-8Xc=e$3Pr-7d!ekCU3>$jbj^nHFmZZJB!-&-oxvC zIKSFm?;BW`QMS6T06&%ou;=rt=88J5kwyI$Fc|d=^i~7j-qw!o?vOgQoh4wB&~C5+ zVA#NHHi5yd?e5buyb3;%woh%3 zR=D+HDIjW-S%ve(7P+7Y)2bZ?B+yZ zh3mEDFG2gY{RRgpmu=i^C15`P{q}mC-^tjOXObA0PR-`7LM^nt^a#w;VI(Jz;J#)% zxV1C!ym7-nqZu9WgJb~Jrhowap#y7bF;fY};a``+&u%}x4QD{GJYMTLRh33-(r85* z#fckFZ>-%bPL=(&*hp29Ym!`%_@qB5f7CByv;1TjZa`5@N;<>_iR%yoF?>cPl zNG$=nyF@Dfe;oX{h=Zk@XXg23N?!1B-yP`i} z>!x3V%&zAdyKeu&?}-cVwu!mw$TdD=XVo`wUz}u*wReM#Y(k(|wls9S?Dw zV08{xB@5O8U|(y@fRR_Ma;BpLaKo{V1e2d%=uAA290wi!6_C$Oy!Qhq-wupz4UD}R z|LsO~;A(B)DprHF@v3;JE=teDr{Y?)*jKt<+dotlhil?+MI3$~>Mj_o%fDV;yU6-qXJxSnn>}Y$bW1Dh<}8!HP6k4@Zl^(%0)3D&Z5>zu_m+DR_xxu`JOD zESX}#rZFV!jO5cf>yG2tkOy%ViyM`Ze1QbB#M%vb39=4Z#2d?t{uhwP_OBuMy>f$m zNmu*Z9>G~OW4l?~Q)5>fCTDft@&x=tebynUs!XtJ*i zS1{;0NjT^oK@I3OhFFJpH5lzlEC|r?37NA{a3ixLUbL>4mnfbL44!dd8a&5w4bK@4 QY=25VpzPx=6YP8c0-G3qB>(^b literal 0 HcmV?d00001 diff --git a/ndbc_api/utilities/__pycache__/req_handler.cpython-311.pyc b/ndbc_api/utilities/__pycache__/req_handler.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..712a37c1fb8e3d64225865719fd22388988c988a GIT binary patch literal 13291 zcmcIrU2Gdyb{_s}_#;vh^=nbm*p_X{ktru~b`vGqSdnZiiC1xBrMuO7OHrJWM1>-` zGeg^Anbtr9R0xd5D7=V@ZVDFIO%=osdgz1E0!5yReb@&|ARu-C0|tsdEZPDzl0Gy~ z{mz{qawrLnfpkQlnfrI{x%Zy)opbMu|Gu@=&*2LH%m1wQp5?fIql9^RjEAptc=(hP zxkXNNh|aWQ(Sf@w?c8uJx;EU4?hVhP$Dx;b7riKRr&~7oMILz`(VO;d_!s@`xg{Og z2rdR4oRho8iTp>L=#zqv{GYox?o0gBufuz6adU}`n3fblmVS|!6jc$_ zte~z*!u;%;Gh;$Bkz7k{D(+H%PAS^&2_AEeHMOu}m)r2aE!t$m$k}*NaLkx8-p+Zo- zZaTG*Qt_M-mL$YHgaic(l6jzT3X6&s%zQI1WQtu zJ}6n367175V%4BptlK#2cIw43TahTG6Pr^n*)wHHl~a;3HDS+QP9#6buB^mY5=qR^ z)Wz5<_Ht2L&aX}_$a%@0kI7A~Y{u7AHK#Nck6OZ*GmN1agNjLarDR@};)WL4$CQ`T zQ|DH)+1PSIK4)>!`C+2`{%IA1SVntApFLx06=7sDySzTRlujw?(&(fxEzpyYT@jXU z>a)MZ^i|g%OM}-EwLC?07HWo62x;FuC`+V9y@WFZsoQU3;5jTt&|WX=M7mAvveMm1 zw~IZxM7I7zQUJVqQQmP(IrY_t^2pKUOqLTU?>w@+U+R+hhVh2%=@qXSMceKp+YU%P z8=qGkM4g@^>kOd|%^S@FFAAvFi!}&Ied{O8Nyq%0M1H^M211_JLUvnC{MO5Hgd|FS zV+#;C$3G>bvT zlGtE!ZlD7a4m~EAAS?L{Yj92>PV%Z!=MfX8)2bwA0P{e&O`x|W++nd8vs8x7$DNc) zFe=p>fV(JRurhNQ!iXYCLOhr!#GNWgs%lb6xAk>7$z@k(}odgQe`Wv`M ztkT)#L>g;Lj)I&cAvmU(^-MhITACM^lhe7>Qr$bnzz3d)e&jZc#iezDM@cB^1zAdB z!PA>IMtKDqz`Dd>-!$gE0A%n~S_U0ua2njno7I*DCpxdhiVQAQoOR2`%t zD?BgY*YM&NGnys?!I|<}PKKO89%_)qcv;Ds1T>cwfz1WYICEw;M!CG4%OYB-%gUUm zeGMr98Bs>9UiB(DC@C~jlE(xX7|bb~5gYgz{xNMaWHS~^ zV!dR@zeGwSOBZ0$(nd}-g|tO~Ov=NOnbSWqDW;N^L}o6tQfsM=uz;DLF%O5kunBl* zBG}?${L9&FnyhbHSV<>VnWZ6ES;i_e9b=%@5dB?XVHs0qD32`VvOp7Pr>(Q;l@-7x zB!Z%*GElTkQX(KErEO4UrgctR)m6KDqfm4TqUW;+P>&77K@W}MZ zOJy*-;KxiDeZvH^gr`TXmQ4>^D7GL5evMsUpU2G}#E8yCx9CDB z;g-C#-GCczfs1Sry>OBIx7~IZ88afYM8Ng^xJpvn@55aZpYO$G5~J_cZR(PJU#^q5 zeXs6Q7w@~cP^x*98)Y2CICLGck?BG|Ky*XsO@?KK^8(_1m}K2ns40-c+6rXgNaKlI zDwbDM=@bPBF+=}Yly15mbL**R+6;s^n*AEO%5ahiqPxJQlo-cu6^N>119P~Bi_fkc z;sz)nAcnU-Wc~Glj5;+J*lKPXjpiq>hBz&r614zhukZOZF913(XXcZz9_ylK`uFwg zAfIwYN0EzLg=$01qVsd?LB6E@3%gLy+H;89P3{W^wm)(RWin4B3_|-MMM|&8ZIsgv zq7X;S(zox=Ox}7Qh^*Xt8xH3hwuac;-@37xTuaCs&@oyn<<=d-Thi zt4?-1mgwD`1mF1uHvO6l{zvn$a71e{9JUs+M+91n;c=-V_VQY=(W}TEV3+iPm=~!5 zQ=xyvL$eAMy*WpUgR;~W$JwYlt5jyGSdo#hLI_nH?Gen zkBaQPL<`58=98+>JhsLgCldAa{YM;Fps?ct^J&y4ccB-yWUN2=G$n_LoFQ_S$T=b- zAPS8ZMx|d&y_migdYWv&)cv1Rqh*kLT&`+T+c z1a7U>?ta_?)!reN8oEGrqGs;EC?!u+d!q-vWBa{hd%dwrZ;Wbg;g6|c(pr4`u62% zQDnn0gL8zCD|xsVW1C335Hr5Fb^x?AFQ9w*2P8rCu!8Rys5lLa}i#W^uyzN70(YbCdX3_BhQvlheFWvoi_A0_vn4YCQ z_XZuOrS!8g1{l*f-VzdNoB)Y*6ei+G$y8MRJq4ZR>ycD!;VOnc1{qj<^My;dM;eIc zP|T1*HP1bs2Va<%fWKodfj#74OG$bH=OAF%=O4+e}CkFKPXvly^&|p@yKnLkm{~m zP9F;KL#7ax>$eo*{Bz6aVe)y8l+T%TIuI`0e?@GeO={uIYp^N%ra%w+1s3Z=w^aQ_ z7g?8LTGo=GBZ$(MVudi^lnl_w6u$MdlQ;H$=?f<_46Ox+KT2X%0*I346+&0DdG?x0 zKlm*8X$#6+V{CrZulZAW9|nFncaRBU#6M z*Bsw43cJB`&C9j69|WWO!RU^^9E|P-Cn~{-a&V%m8<@w}SK~mM4Xmy6Q~s~`&w73} zP#um{PtNV0FQ1&#f9R>b4?VTNO+A64C!$`ix9ETMvgR^*A2v=>duE(>@nFZW zqaBgjU`odsAQa!K^~hm2>t-V{Q1DgLV-vtPsjInXJz6_~eWY+K8l4~YFy+v^iCj*~ zh%$L&`6Lkvx=as2^&9=7{0GQ51cwb8_qWvG#~{oT-funNqx*dHN$|u$@YH_r)XsW! ze4;Xb87SC#`WfeFeG}_J^qI@)o1wJ4xXWdG~6900M5$T6V*#V+9{;Pl#0L zuD2jHu@K5Z(cE>&XQ+qf0_RaL0~h&Sz?8%(u8ye7|0AfSnd`w?$97zdX*^PJNAnDqW5rEiqM3OnpFJPO#2 z6y_N$yvEFnBF^WxO_@-#(9SrJftr!L{D;Vwe?o<3G+`CJja;0?x$w1w4&`Sl!$~JV zY}2gqrsWm*_w+2G&|yW#R$MXake8^XUw#j_x~a=ID9LQt!y*tZ-1OCkzI$@z@~Y-S zj9{~UG5|U$%g_O)l4~-1`DZ9DH1DJAroM|R%71~tdxSa;LWBFE!JYHv(BNL^Es=4Yyb(1YZ|wTGELuK0Hi$jgiveoe6+cleSzR9WW zW=$Lk22k5F>Z3<02JOaOYS{18Iem`DFRkQlPHg@E;=I~)MlbgtH+o1wksG^?n1^HL z46TC4w;BxogM-DhKI`l#@R`6WAvM?#I15Uh7V6g!O8Jb_Z ztICNuohvPjx@FROR!Ixv6RYeU1QlWmpEamBkkt=a3+*`VU>_E;m?f`(>pN)m``;2f zLO(CXGI>RFFK5L~&6|dU1Df zFM7EWy?hY8x*xr|7e&cjxoxi6*0cRerT>M4{_*|(@!il~{~MM5HxBx*?Dt>U>%Ug% zzgBL$R`a+z{LeU7D1d>60<^EZ?)YySsowKQW%BKEg<={I{!VzLRzPt7h z8|CoqUU;?=o-OmU`lhZiup&GpeEIrMk^8(DD>@!I?a1Tc8ut_Rz{ZXoA39XaW)UJa zo~Sg2N=@Gbe&|#iLah3E$C#@g1K3Fe}?eXIke!R?&n|Y~HbdQf!_*j{b)kC*v`P{1{$Yh0|Ec27q zaQ9Y-ndYc#eqmuWD%0nkGJS3-_Y?fUX0OcT zA5w)^iA)ihCNfKeot&|-m(DU|vVNJ~qp|o@CXANp^@Lm`LX#~EM0$zP+Zz3(gw&Z- zO8Ff^k9*c%%{l1dYaEfwjxG-{`7n7#dEe9vUg!D-s@*5511D>dPS1Fa19{rjTk}xD z%Z0ih&((rw*>CU+=1u^M`6(lt-O5kTIv{WQpEQdrmeJ-su_HnV{T}-AnA& zOi<&@_5$UeX(lK-UmB*|^E<5ZdG_2uP~)Orba8i<@-H?M-s9;iNjo#Uk)63;XK@nf z>3R&7xHXed$kW$Y6UoQtY8-COL~wg%9F0{eOil3L8jo&@;NzwWI9L?A(1cRO66*jE@0u`$}cEW)ki3TyZpZ zM(M9yHIDAplV@r!_HaI?Kj`^3`V!oI%^mdglzJ;CUnKrL#}Zx9NxkW7c6{ZSU>_il zhIBK07X_ME-OK@Jb=zT<$ZV3#MhjwZnoccGu#ayTBGPftFZ9>7?28i3XZ;5cnc{NY zpagodjO<(fb^Ys%AITr1fLylnParjy!{Mko`yB2XN8}Rc2;U3QUzHmvAOCyGh3~Py pDmPfR|DJNE%l2QD6Uz2q&3oD9z?>aTw>R~YXRQ32V`{O%{V$!e{?GZGvxzesYfF$E z1bZmbgVKHgtw<646rL-1D2s=JUtmjm(4Ks=$p%FazS;TBd-LAB_uDt~K9|b^O8)cD z<^uxoT^Wg_M#|s{Dm&mn9WiQ-$2-eHgjZJt)>M5+Cg_E@; zAw$YzQFQY-2YrM$WCwaUR!|?xQzB+H2*!i~0weJOM(!bz{e%pxjN^L5g`W17yduA6 z>%lx8{&VNTI33a?)shirsq4$Y_8FI9N_IP32$eU?P|9r2qZDIw=E=~oMN@|9)wOt? zM7Qc5rBYEeO|ySmTexXGlw3$_(Ppa^;WiqsZ!N8}Wm~kCM0YyV*C5`K|JiZ|X} z{y2F!d-Yf+#kmte_%?U_QxKt9A%}PF50wZV6|%XoRo%VzW%}Y#erD_WF8i7<9ApXu z%nYTpYJ+dLIHlMq+G;ypPwCT?KIz)t&?D5X#yrE^s4I-eog-ojT~fV(yJA{Jsn*7j zR*qW;NIw}zXV3K)Bg04)BltfMxg=k;q=yrfI&FsZ4E2cPG9O?VqKcnp=7&FUTbxHz hv62f2*bG71)WnjE)FLQ9CNCu^IX7fK6rT0kj$_AhVoDNH2vbCxSOhBs6rs=}A<{IZO+X|ZY7EZCJ9aj#ciowB zAd;zk;Lrn=q6#TFa;Ru4IJ8nL_1II-j+BG7r${~ZW{LJtPJJ`$^-qGT3M%!|$$BzB zZ{F|s-ptq0Xc$3@{qkr2a0H=W#l&kcm+kq0g3uPyk*=7ifKo^iW1p!Md?{Z+O{sFO zn*Nj@)>Si*3U(vCPY)0u@vn7x?2dQ?qz6ez?`q%61@+K_K&tz1&UDiI_3j7$R9MOh zw{y}X1nE8SJ18TpM?ppe&h+b@XL^YWx`x^`RCB(zS*Prv9s@o-y1IxG@!y113B?KN z8YkQ+kc7|ie{3-B1a4S{Z7n!{u`2g9MmTf&IVluvs+k$xC}b=RHiMM#61B3vri&2r z!&rsi^GWEo5Qi9eZLK*jqq6c$c>?F2HqYMom3^R@Px}IvuGogfaG_*!qi7O*Poq@Z zz@V$t1(%p%BNaV(jvlsKm3(~?1f2Vd1V)=aa-M#^rTZqotJ+Ej87I%CFd zp`&d{+NHVc@6XIEVn*mXq2rw_!5B>L6-k!E0d7kkQpaTxkL7W^Y--%#C0&fcM3~!F z-d#B8giAEfrX>&IlBWO0M5J%JUQN!pUcHr=#+R@O8`#e6_jr?qV8d;^2JuNY7n^oV zSTO9eXPkEOY?5Jt9`L8jr^!b0hDbEkw5=qbAvq0TAgPycjtVD?j^k0!33BW?VD#3q ztCVDUU4paC{7Vh1Jd3ftvIc&}S=-{8VKpEnZ4-b3x47IOO;pewG9>7_%n8Dgti}^6 z6=i~kpcBc!grWlcV9AS5E>EX#K!{oTik7|e0X1?t!%AP<$O2vq*QhN-v-Dk}0r+TR zp0@OrY(^^@=@K_ggBygU?IN*st(b0jC0X2XVy^g%=P23-GKCUs1iA|PV+{4a@$lC6 z`D$Xi8ojU+y|5d-P?@bo`YZF#fdXZlnFy1An6bL@%(pLNm9hdEt7^XkyLm$M5YzuM zWo6X=&seucc4n2Av9FaF1rg%*#Bf436fkP&1X2WYqELjK@X;Osl@pNEFq0Y53rkFt zdW>mIZ1>9T7J>)F9{kub=m5bZ0>SC+v2OstQ#;YA-RM*Uz_cGIoUR7;oiO7X?&cL@cF)=N_)M%n5(Q~~k4`#0-tzr0q$}LPT*t+Ydm)kBq zDvLI@OPm2F_uEQI0pO}PYP!!+88MvzGmR`s1n3|Ho(_plJfPf?9?8jw5mGoQx_cgiGb>dRBr;}bu={Zshoxt+=R-O2gt z$ihzi>TdjM<)d0;pz0rR!R^o5ChK@riPVu89)t#4TdfCyNRxjD1tP=RY!Yw1h8^&tYeT}f@aA-* ze_RBty+{oojIpONJpsgX;()*mMEEEQ^;F8W*udw#wMcyP+{5>({vjF8ghGQrlTa2C zQ7VGy1o=i0a*K*8MFp-JCDSSu;CA;}MW?G#IRK=b%Ilqq3Mp|VMbo#$RFpzlPhW)I zh5RU)`WcO?7Q_JK9hs} zT~f&Ejw^$81YIq9=;7_U3ZuQgp?W|p2T^RW-X%sMH1wuR8g@lT#Hu*f)+N!&v3j6S nIU)odY4C&5UO2Yt-&}muy?t&owj0JSjc0>LFAweOq|^Tbp2GV3 literal 0 HcmV?d00001 diff --git a/ndbc_api/utilities/opendap/dataset.py b/ndbc_api/utilities/opendap/dataset.py new file mode 100644 index 0000000..68d1b36 --- /dev/null +++ b/ndbc_api/utilities/opendap/dataset.py @@ -0,0 +1,88 @@ +from datetime import datetime +from typing import List, Union + +import xarray +import numpy as np + + +def concat_datasets( + datasets: List[xarray.Dataset], + temporal_dim_name: str = 'time', +) -> xarray.Dataset: + """Joins multiple xarray datasets using their shared dimensions. + + Handles cases where datasets might not have the same variables, + but requires that all datasets share the same dimensions. For + data stored on the THREDDS server, all datasets are expected to + have `time`, `latitude`, and `longitude` dimensions. + + Args: + datasets (List[xarray.Dataset]): A list of xarray datasets + to join. + dimension_names (List[str]): A list of dimension names to join + the datasets on. Defaults to `['time', 'latitude', 'longitude']`. + + Returns: + A xarray.Dataset object containing the joined data. + """ + result = xarray.concat(datasets, dim=temporal_dim_name) + return result + + +def merge_datasets(datasets: List[xarray.Dataset],) -> xarray.Dataset: + """Merges multiple xarray datasets using their shared dimensions. + + Handles cases where datasets might not have the same variables, + but requires that all datasets share the same dimensions. For + data stored on the THREDDS server, all datasets are expected to + have `time`, `latitude`, and `longitude` dimensions. + + Args: + datasets (List[xarray.Dataset]): A list of xarray datasets + to join. + + Returns: + A xarray.Dataset object containing the merged data. + """ + result = xarray.merge(datasets, compat='override') + return result + + +def filter_dataset_by_time_range( + dataset: xarray.Dataset, + start_time: datetime, + end_time: datetime, + temporal_dim_name: str = 'time', +) -> xarray.Dataset: + """ + Filters a netCDF4 Dataset to keep only data within a specified time range. + + Args: + dataset: The netCDF4 Dataset object. + start_time: The start of the time range (inclusive) as an ISO 8601 string (e.g., '2023-01-01T00:00:00Z'). + end_time: The end of the time range (inclusive) as an ISO 8601 string. + + Returns: + The modified netCDF4 Dataset object with data outside the time range removed. + """ + filtered_ds = dataset.sel({temporal_dim_name: slice(start_time, end_time)}) + return filtered_ds + + +def filter_dataset_by_variable( + dataset: xarray.Dataset, + cols: Union[List[str], None] = None, +) -> xarray.Dataset: + """ + Filters a netCDF4 Dataset to keep only data with variables whose names are in cols. + + Args: + dataset: The netCDF4 Dataset object. + cols: A list of variable names to keep. + + Returns: + The modified netCDF4 Dataset object with data with variables not in cols removed. + """ + if cols is None: + return dataset + return dataset[cols] diff --git a/ndbc_api/utilities/req_cache.py b/ndbc_api/utilities/req_cache.py new file mode 100644 index 0000000..457a9ba --- /dev/null +++ b/ndbc_api/utilities/req_cache.py @@ -0,0 +1,48 @@ +class RequestCache: + + class Request: + + __slots__ = 'k', 'v', 'next', 'prev' + + def __init__(self, request: str, response: dict): + self.k = request + self.v = response + self.next = self.prev = None + + def __init__(self, capacity: int) -> None: + self.capacity = capacity + self.cache = dict() + self.left = RequestCache.Request('$', '$') + self.right = RequestCache.Request('$', '$') + self.left.next = self.right + self.right.prev = self.left + + def remove(self, node: Request) -> None: + node.prev.next = node.next + node.next.prev = node.prev + + def add(self, node: Request): + node.prev = self.right.prev + node.next = self.right + self.right.prev.next = node + self.right.prev = node + + def get(self, request: str) -> dict: + if request in self.cache: + self.remove(self.cache[request]) + self.add(self.cache[request]) + return self.cache[request].v + else: # request not made before + return dict() + + def put(self, request: str, response: dict) -> None: + if request in self.cache: + self.remove(self.cache[request]) + + self.cache[request] = RequestCache.Request(request, response) + self.add(self.cache[request]) + + if len(self.cache) > self.capacity: + to_remove = self.left.next + self.remove(to_remove) + del self.cache[to_remove.k] diff --git a/ndbc_api/utilities/req_handler.py b/ndbc_api/utilities/req_handler.py new file mode 100644 index 0000000..6e12889 --- /dev/null +++ b/ndbc_api/utilities/req_handler.py @@ -0,0 +1,229 @@ +"""Handles requests to the NDBC, caching responses for each station. + +This module defines the `RequestHandler`, a singleton HTTP cache which serves +to handle requests to the NDBC over HTTP and store requests and responses in a +cache. The cache is segregated by station, such that a cache limit can be +enforced on a station level. + +Example: + ```python3 + handler = RequestHandler( + cache_limit=1, + delay=2, + retries=3, + backoff_factor=0.8, + debug=True, + verify_https=True, + ) + response = handler.execute_request( + url='foo.bar' + ) + ``` + +Attributes: + stations (:obj:`list`): A list of `Station`s to which requests have + been made. +""" +import logging +from typing import List, Union, Callable + +import requests +from urllib3.util import Retry + +from .req_cache import RequestCache +from .singleton import Singleton + + +class RequestHandler(metaclass=Singleton): + """The summary line for a class docstring should fit on one line. + + If the class has public attributes, they may be documented here + in an ``Attributes`` section and follow the same formatting as a + function's ``Args`` section. Alternatively, attributes may be documented + inline with the attribute's declaration (see __init__ method below). + + Properties created with the ``@property`` decorator should be documented + in the property's getter method. + + Attributes: + cache_limit (:int:): The handler's global limit for caching + `NdbcApi` responses. This is implemented as a least-recently + used cache, designed to conserve NDBC resources when querying + measurements for a given station over similar time ranges. + logger (:obj:`logging.Logger`): The logger at which to register HTTP + request and response status codes and headers used for debug + purposes. + delay (:int:): The HTTP(s) request delay parameter, in seconds. + retries (:int:): = The number of times to retry a request to the NDBC data + service. + backoff_factor (:float:): The back-off parameter, used in conjunction with + `retries` to re-attempt requests to the NDBC data service. + headers (:dict:): The headers with which to execute the requests to the NDBC data + service. + debug (:bool:): A flag for verbose logging and response-level status reporting. + Affects the instance's `logging.Logger` and the behavior of its + private `RequestHandler` instance. + verify_https (:bool:): A flag which indicates whether to attempt requests to the + NDBC data service over HTTP or HTTPS. + """ + + class Station: + """The summary line for a class docstring should fit on one line. + + If the class has public attributes, they may be documented here + in an ``Attributes`` section and follow the same formatting as a + function's ``Args`` section. Alternatively, attributes may be documented + inline with the attribute's declaration (see __init__ method below). + + Properties created with the ``@property`` decorator should be documented + in the property's getter method. + + Attributes: + id_ (:str:): The key for the `Station` object. + reqs (:obj:`ndbc_api.utilities.RequestCache`): The `RequestCache` + for the Station with the given `id_`, uses the cache limit of + its parent `RequestHandler`. + """ + __slots__ = 'id_', 'reqs' + + def __init__(self, station_id: str, cache_limit: int) -> None: + self.id_ = station_id + self.reqs = RequestCache(cache_limit) + + def __init__( + self, + cache_limit: int, + log: Callable[[Union[str, int, dict]], None], + delay: int, + retries: int, + backoff_factor: float, + headers: dict = None, + debug: bool = True, + verify_https: bool = True, + ) -> None: + self._cache_limit = cache_limit + self._request_headers = headers or {} + self.log = log + self.stations = [] + self._delay = delay + self._retries = retries + self._backoff_factor = backoff_factor + self._debug = debug + self._verify_https = verify_https + self._session = self._create_session() + + def get_cache_limit(self) -> int: + """Return the current station-level cache limit for NDBC requests.""" + return self._cache_limit + + def set_cache_limit(self, cache_limit: int) -> None: + """Set a new station-level cache limit for NDBC requests.""" + self._cache_limit = cache_limit + + def get_headers(self) -> dict: + """Add new headers to future NDBC data service requests.""" + return self._request_headers + + def update_headers(self, new: dict) -> None: + """Add new headers to future NDBC data service requests.""" + self._request_headers.update(new) + + def set_headers(self, request_headers: dict) -> None: + """Reset the request headers using the new supplied headers.""" + self._request_headers = request_headers + + def has_station(self, station_id: Union[str, int]) -> bool: + """Determine if the NDBC API already made a request to this station.""" + for s in self.stations: + if s.id_ == station_id: + return True + return False + + def get_station(self, station_id: Union[str, int]) -> Station: + """Get `RequestCache` with `id_` matching the supplied `station_id`.""" + if isinstance(station_id, int): + station_id = str(station_id) + if not self.has_station(station_id): + self.log(logging.DEBUG, + station_id=station_id, + message=f'Adding station {station_id} to cache.') + self.add_station(station_id=station_id) + for s in self.stations: + if s.id_ == station_id: + self.log(logging.DEBUG, + station_id=station_id, + message=f'Found station {station_id} in cache.') + return s + + def add_station(self, station_id: Union[str, int]) -> None: + """Add new new `RequestCache` for the supplied `station_id`.""" + self.stations.append( + RequestHandler.Station(station_id=station_id, + cache_limit=self._cache_limit)) + + def handle_requests(self, station_id: Union[str, int], + reqs: List[str]) -> List[str]: # pragma: no cover + """Handle many string-valued requests against a supplied station.""" + responses = [] + self.log( + logging.INFO, + message=f'Handling {len(reqs)} requests for station {station_id}.') + for req in reqs: + responses.append(self.handle_request(station_id=station_id, + req=req)) + return responses + + def handle_request(self, station_id: Union[str, int], req: str) -> dict: + """Handle a string-valued requests against a supplied station.""" + stn = self.get_station(station_id=station_id) + self.log(logging.DEBUG, message=f'Handling request {req}.') + if req not in stn.reqs.cache: + self.log(logging.DEBUG, message=f'Adding request {req} to cache.') + resp = self.execute_request(url=req, + station_id=station_id, + headers=self._request_headers) + stn.reqs.put(request=req, response=resp) + else: + self.log(logging.DEBUG, message=f'Request {req} already in cache.') + return stn.reqs.get(request=req) + + def execute_request(self, station_id: Union[str, int], url: str, + headers: dict) -> dict: # pragma: no cover + """Execute a request with the current headers to NDBC data service.""" + self.log(logging.DEBUG, + station_id=station_id, + message=f'GET: {url}', + extra_data={'headers': headers}) + response = self._session.get( + url=url, + headers=headers, + allow_redirects=True, + verify=self._verify_https, + ) + self.log(logging.DEBUG, + station_id=station_id, + message=f'Response status: {response.status_code}') + if response.status_code != 200: # web request did not succeed + return dict(status=response.status_code, body='') + elif any([ + 'netcdf' in response.headers.get('Content-Type').lower(), + 'octet' in response.headers.get('Content-Type').lower() + ]): + return dict(status=response.status_code, body=response.content) + return dict(status=response.status_code, body=response.text) + + """ PRIVATE """ + + def _create_session(self) -> requests.Session: + """create a new `Session` using `RequestHandler` configuration.""" + self.log(logging.DEBUG, message='Creating new session.') + session = requests.Session() + retry = Retry( + backoff_factor=self._backoff_factor, + total=self._retries, + ) + http_adapter = requests.adapters.HTTPAdapter(max_retries=retry) + session.mount('https://', http_adapter) + session.mount('http://', http_adapter) + self.log(logging.INFO, message='Created session.') + return session diff --git a/ndbc_api/utilities/singleton.py b/ndbc_api/utilities/singleton.py new file mode 100644 index 0000000..372a6e7 --- /dev/null +++ b/ndbc_api/utilities/singleton.py @@ -0,0 +1,14 @@ +""" +A metaclass for singleton types. +""" + + +class Singleton(type): + + _instances = {} + + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super(Singleton, + cls).__call__(*args, **kwargs) + return cls._instances[cls] From eb29f81ac8a95f6e7865ab52e319cd614b77e33a Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:16:53 -0400 Subject: [PATCH 13/47] Delete ndbc_api.py --- ndbc_api.py | 832 ---------------------------------------------------- 1 file changed, 832 deletions(-) delete mode 100644 ndbc_api.py diff --git a/ndbc_api.py b/ndbc_api.py deleted file mode 100644 index 7da62ee..0000000 --- a/ndbc_api.py +++ /dev/null @@ -1,832 +0,0 @@ -"""An API for retrieving data from the NDBC. - -This module defines the `NdbcApi`, the top-level object which creates, handles, -caches, parses, and returns NDBC data. - -Example: - ```python3 - from ndbc_api import NdbcApi - api = NdbcApi() - available_stations = api.stations() - modes = api.get_modes() - df_stdmet_tplm2 = api.get_data( - 'tplm2', - 'stdmet', - '2020-01-01', - '2022-01-01', - as_df=True - ) - ``` - -Attributes: - log (:obj:`logging.Logger`): The logger at which to register HTTP - request and response status codes and headers used for debug - purposes. - headers(:dict:): The request headers for use in the NDBC API's request - handler. -""" -import logging -import pickle -import warnings -from concurrent.futures import ThreadPoolExecutor, as_completed -from datetime import datetime, timedelta -from typing import Any, List, Sequence, Tuple, Union, Dict, Optional - -import xarray -import pandas as pd - -from .api.handlers.http.data import DataHandler -from .api.handlers.http.stations import StationsHandler -from .config import (DEFAULT_CACHE_LIMIT, HTTP_BACKOFF_FACTOR, HTTP_DEBUG, - HTTP_DELAY, HTTP_RETRY, LOGGER_NAME, VERIFY_HTTPS) -from .exceptions import (HandlerException, ParserException, RequestException, - ResponseException, TimestampException) -from .utilities.req_handler import RequestHandler -from .utilities.singleton import Singleton -from .utilities.log_formatter import LogFormatter -from .api.handlers.opendap.data import OpenDapDataHandler -from .utilities.opendap.dataset import concat_datasets, merge_datasets, filter_dataset_by_variable, filter_dataset_by_time_range - - -class NdbcApi(metaclass=Singleton): - """An API for querying the National Data Buoy Center. - - The `NdbcApi` is metaclassed as a singleton to conserve NDBC resources. It - uses two private handlers to build requests and parse responses to the NDBC - over HTTP(s). It also uses a LRU-cached request handler to execute requests - against the NDBC, logging response statuses as they are executed. - - Attributes: - logging_level: The `logging.Logger`s log level, 1 if the `debug` - flag is set in the `__init__` method, and 0 otherwise. - cache_limit: The handler's global limit for caching - `NdbcApi` responses. This is implemented as a least-recently - used cache, designed to conserve NDBC resources when querying - measurements for a given station over similar time ranges. - delay: The HTTP(s) request delay parameter, in seconds. - retries: = The number of times to retry a request to the NDBC data - service. - backoff_factor: The back-off parameter, used in conjunction with - `retries` to re-attempt requests to the NDBC data service. - verify_https: A flag which indicates whether to attempt requests to the - NDBC data service over HTTP or HTTPS. - debug: A flag for verbose logging and response-level status reporting. - Affects the instance's `logging.Logger` and the behavior of its - private `RequestHandler` instance. - """ - - logger = logging.getLogger(LOGGER_NAME) - warnings.simplefilter(action='ignore', category=FutureWarning) - - def __init__( - self, - logging_level: int = logging.WARNING if HTTP_DEBUG else logging.ERROR, - filename: Any = None, - cache_limit: int = DEFAULT_CACHE_LIMIT, - headers: dict = {}, - delay: int = HTTP_DELAY, - retries: int = HTTP_RETRY, - backoff_factor: float = HTTP_BACKOFF_FACTOR, - verify_https: bool = VERIFY_HTTPS, - debug: bool = HTTP_DEBUG, - ): - """Initializes the singleton `NdbcApi`, sets associated handlers.""" - self.cache_limit = cache_limit - self.headers = headers - self._handler = self._get_request_handler( - cache_limit=self.cache_limit, - delay=delay, - retries=retries, - backoff_factor=backoff_factor, - headers=self.headers, - debug=debug, - verify_https=verify_https, - ) - self._stations_api = StationsHandler - self._data_api = DataHandler - self._opendap_data_api = OpenDapDataHandler - self.configure_logging(level=logging_level, filename=filename) - - def dump_cache(self, dest_fp: Union[str, None] = None) -> Union[dict, None]: - """Dump the request cache to dict or the specified filepath. - - Dump the request, response pairs stored in the `NdbcApi`'s - `Request_handler` as a `dict`, either returning the object, if no - `dest_fp` is specified, or serializing (pickling) the object and writing - it to the specified `dest_fp`. - - Args: - dest_fp: The destination filepath for the serialized `RequestsCache` - contents. - - Returns: - The cached request, response pairs as a `dict`, or `None` if a - `dest_fp` is specified when calling the method. - """ - data = dict() - ids = [r.id_ for r in self._handler.stations] - caches = [r.reqs.cache for r in self._handler.stations] - if ids: - for station_id, cache in zip(ids, caches): - data[station_id] = dict() - reqs = cache.keys() - for req in reqs: - resp = cache[req].v - data[station_id][req] = resp - if dest_fp: - with open(dest_fp, 'wb') as f: - pickle.dump(data, f) - else: - return data - - def clear_cache(self) -> None: - """Clear the request cache and create a new handler.""" - del self._handler - self._handler = self._get_request_handler( - cache_limit=self.cache_limit, - delay=HTTP_DELAY, - retries=HTTP_RETRY, - backoff_factor=HTTP_BACKOFF_FACTOR, - headers=self.headers, - debug=HTTP_DEBUG, - verify_https=VERIFY_HTTPS, - ) - - def set_cache_limit(self, new_limit: int) -> None: - """Set the cache limit for the API's request cache.""" - self._handler.set_cache_limit(cache_limit=new_limit) - - def get_cache_limit(self) -> int: - """Get the cache limit for the API's request cache.""" - return self._handler.get_cache_limit() - - def get_headers(self) -> dict: - """Return the current headers used by the request handler.""" - return self._handler.get_headers() - - def update_headers(self, new: dict) -> None: - """Add new headers to the request handler.""" - self._handler.update_headers(new) - - def set_headers(self, request_headers: dict) -> None: - """Reset the request headers using the new supplied headers.""" - self._handler.set_headers(request_headers) - - def configure_logging(self, level=logging.WARNING, filename=None) -> None: - """Configures logging for the NdbcApi. - - Args: - level (int, optional): The logging level. Defaults to logging.WARNING. - filename (str, optional): If provided, logs to the specified file. - """ - self.logger.setLevel(level) - - handler: logging.Handler - formatter: logging.Formatter - - for handler in self.logger.handlers[:]: - self.logger.removeHandler(handler) - - if filename: - handler = logging.FileHandler(filename) - formatter = logging.Formatter( - '[%(asctime)s][%(levelname)s]: %(message)s') - else: - handler = logging.StreamHandler() - formatter = LogFormatter('[%(levelname)s]: %(message)s') - - handler.setFormatter(formatter) - self.logger.addHandler(handler) - - def log(self, - level: int, - station_id: Union[int, str, None] = None, - mode: Union[str, None] = None, - message: Union[str, None] = None, - **extra_data) -> None: - """Logs a structured message with metadata. - - Args: - level (int): The logging level. - station_id (str, optional): The NDBC station ID. - mode (str, optional): The data mode. - message (str, optional): The log message. - **extra_data: Additional key-value pairs to include in the log. - """ - log_data = {} - if station_id: - log_data['station_id'] = station_id - if mode: - log_data['mode'] = mode - if message: - log_data['message'] = message - for k, v in extra_data.items(): - log_data[k] = v - self.logger.log(level, log_data) - - def stations(self, as_df: bool = True) -> Union[pd.DataFrame, dict]: - """Get all stations and station metadata from the NDBC. - - Query the NDBC data service for the current available data buoys - (stations), both those maintained by the NDBC and those whose - measurements are managed by the NDBC. Stations are returned by default - as rows of a `pandas.DataFrame`, alongside their realtime data coverage - for some common measurements, their latitude and longitude, and current - station status notes maintained by the NDBC. - - Args: - as_df: Flag indicating whether to return current station data as a - `pandas.DataFrame` if set to `True` or as a `dict` if `False`. - - Returns: - The current station data from the NDBC data service, either as a - `pandas.DataFrame` or as a `dict` depending on the value of `as_df`. - - Raises: - ResponseException: An error occurred while retrieving and parsing - responses from the NDBC data service. - """ - try: - data = self._stations_api.stations(handler=self._handler) - return self._handle_data(data, as_df, cols=None) - except (ResponseException, ValueError, KeyError) as e: - raise ResponseException('Failed to handle returned data.') from e - - def historical_stations(self, - as_df: bool = True) -> Union[pd.DataFrame, dict]: - """Get historical stations and station metadata from the NDBC. - - Query the NDBC data service for the historical data buoys - (stations), both those maintained by the NDBC and those which are not. - Stations are returned by default as rows of a `pandas.DataFrame`, - alongside their historical data coverage, with one row per tuple of - (station, historical deployment). - - Args: - as_df: Flag indicating whether to return current station data as a - `pandas.DataFrame` if set to `True` or as a `dict` if `False`. - - Returns: - The current station data from the NDBC data service, either as a - `pandas.DataFrame` or as a `dict` depending on the value of `as_df`. - - Raises: - ResponseException: An error occurred while retrieving and parsing - responses from the NDBC data service. - """ - try: - data = self._stations_api.historical_stations(handler=self._handler) - return self._handle_data(data, as_df, cols=None) - except (ResponseException, ValueError, KeyError) as e: - raise ResponseException('Failed to handle returned data.') from e - - def nearest_station( - self, - lat: Union[str, float, None] = None, - lon: Union[str, float, None] = None, - ) -> str: - """Get nearest station to the specified lat/lon. - - Use the NDBC data service's current station data to determine the - nearest station to the specified latitude and longitude (either as - `float` or as DD.dd[E/W] strings). - - Args: - lat: The latitude of interest, used to determine the closest - maintained station to the given position. - lon: The longitude of interest, used to determine the closest - maintained station to the given position. - - Returns: - The station id (e.g. `'tplm2'` or `'41001'`) of the nearest station - with active measurements to the specified lat/lon pair. - - Raises: - ValueError: The latitude and longitude were not both specified when - querying for the closest station. - """ - if not (lat and lon): - raise ValueError('lat and lon must be specified.') - nearest_station = self._stations_api.nearest_station( - handler=self._handler, lat=lat, lon=lon) - return nearest_station - - def radial_search( - self, - lat: Union[str, float, None] = None, - lon: Union[str, float, None] = None, - radius: float = -1, - units: str = 'km', - ) -> pd.DataFrame: - """Get all stations within radius units of the specified lat/lon. - - Use the NDBC data service's current station data to determine the - stations within radius of the specified latitude and longitude - (passed either as `float` or as DD.dd[E/W] strings). - - Args: - lat (float): The latitude of interest, used to determine the maintained - stations within radius units of the given position. - lon (float): The longitude of interest, used to determine the maintained - stations within radius units of the given position. - radius (float): The radius in the specified units to search for stations - within. - units (str: 'nm', 'km', or 'mi'): The units of the radius, either 'nm', 'km', or 'mi'. - - Returns: - A `pandas.DataFrame` of the stations within the specified radius of - the given lat/lon pair. - - Raises: - ValueError: The latitude and longitude were not both specified when - querying for the closest station, or the radius or units are - invalid. - """ - if not (lat and lon): - raise ValueError('lat and lon must be specified.') - stations_in_radius = self._stations_api.radial_search( - handler=self._handler, lat=lat, lon=lon, radius=radius, units=units) - return stations_in_radius - - def station(self, - station_id: Union[str, int], - as_df: bool = False) -> Union[pd.DataFrame, dict]: - """Get metadata for the given station from the NDBC. - - The NDBC maintains some station-level metadata including status notes, - location information, inclement weather warnings, and measurement notes. - This method is used to request, handle, and parse the metadata for the - given station from the station's NDBC webpage. - - Args: - station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the - station of interest. - as_df: Whether to return station-level data as a `pandas.DataFrame`, - defaults to `False`, and a `dict` is returned. - - Returns: - The station metadata for the given station, either as a `dict` or as - a `pandas.DataFrame` if the `as_df` flag is set to `True`. - - Raises: - ResponseException: An error occurred when requesting and parsing - responses for the specified station. - """ - station_id = self._parse_station_id(station_id) - try: - data = self._stations_api.metadata(handler=self._handler, - station_id=station_id) - return self._handle_data(data, as_df, cols=None) - except (ResponseException, ValueError, KeyError) as e: - raise ResponseException('Failed to handle returned data.') from e - - def available_realtime( - self, - station_id: Union[str, int], - full_response: bool = False, - as_df: Optional[bool] = None, - ) -> Union[List[str], pd.DataFrame, dict]: - """Get the available realtime modalities for a station. - - While most data buoy (station) measurements are available over - multi-year time ranges, some measurements depreciate or become - unavailable for substantial periods of time. This method queries the - NDBC station webpage for those measurements, and their links, which are - available or were available over the last 45 days. - - Args: - station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the - station of interest. - full_response: Whether to return the full response from the NDBC - API, defaults to `False` and a list of modes from `get_modes()` - is returned. If `True`, the full URL for each data mode is - included in the returned `dict` or `pandas.DataFrame`. - as_df: Whether to return station-level data as a `pandas.DataFrame`, - defaults to `False`, and a `dict` is returned. - - Returns: - The available realtime measurements for the specified station, - alongside their NDBC data links, either as a `dict` or as a - `pandas.DataFrame` if the `as_df` flag is set to `True`. - - Raises: - ResponseException: An error occurred when requesting and parsing - responses for the specified station. - """ - station_id = self._parse_station_id(station_id) - try: - station_realtime = self._stations_api.realtime( - handler=self._handler, station_id=station_id) - full_data = {} - if full_response: - if as_df is None: - as_df = False - full_data = self._handle_data(station_realtime, - as_df, - cols=None) - return full_data - else: - full_data = self._handle_data(station_realtime, - as_df=False, - cols=None) - - # Parse the modes from the full response - _modes = self.get_modes() - station_modes = set() - for k in full_data: - for m in _modes: - if m in full_data[k]['description']: - station_modes.add(m) - return list(station_modes) - except (ResponseException, ValueError, KeyError) as e: - raise ResponseException('Failed to handle returned data.') from e - - def available_historical(self, - station_id: Union[str, int], - as_df: bool = False) -> Union[pd.DataFrame, dict]: - """Get the available historical measurements for a station. - - This method queries the NDBC station webpage for historical, quality - controlled measurements and their associated availability time ranges. - - Args: - station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the - station of interest. - as_df: Whether to return station-level data as a `pandas.DataFrame`, - defaults to `False`, and a `dict` is returned. - - Returns: - The available historical measurements for the specified station, - alongside their NDBC data links, either as a `dict` or as a - `pandas.DataFrame` if the `as_df` flag is set to `True`. - - Raises: - ResponseException: An error occurred when requesting and parsing - responses for the specified station. - """ - station_id = self._parse_station_id(station_id) - try: - data = self._stations_api.historical(handler=self._handler, - station_id=station_id) - return self._handle_data(data, as_df, cols=None) - except (ResponseException, ValueError, KeyError) as e: - raise ResponseException('Failed to handle returned data.') from e - - def get_data( - self, - station_id: Union[int, str, None] = None, - mode: Union[str, None] = None, - start_time: Union[str, datetime] = datetime.now() - timedelta(days=30), - end_time: Union[str, datetime] = datetime.now(), - use_timestamp: bool = True, - as_df: bool = True, - cols: List[str] = None, - station_ids: Union[Sequence[Union[int, str]], None] = None, - modes: Union[List[str], None] = None, - as_xarray_dataset: bool = False, - use_opendap: Optional[bool] = None, - ) -> Union[pd.DataFrame, xarray.Dataset, dict]: - """Execute data query against the specified NDBC station(s). - - Query the NDBC data service for station-level measurements, using the - `mode` parameter to determine the measurement type (e.g. `'stdmet'` for - standard meterological data or `'cwind'` for continuous winds data). The - time range and data columns of interest may also be specified, such that - a tailored set of requests are executed against the NDBC data service to - generate a single `pandas.DataFrame` or `dict` matching the conditions - specified in the method call. When calling `get_data` with `station_ids` - the station identifier is added as a column to the returned data. - - Args: - station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the - station of interest. - station_ids: A list of NDBC station IDs (e.g. `['tplm2', '41001']`) - for the stations of interest. - mode: The data measurement type to query for the station (e.g. - `'stdmet'` for standard meterological data or `'cwind'` for - continuous winds data). - modes: A list of data measurement types to query for the stations - (e.g. `['stdmet', 'cwind']`). - start_time: The first timestamp of interest (in UTC) for the data - query, defaulting to 30 days before the current system time. - end_time: The last timestamp of interest (in UTC) for the data - query, defaulting to the current system time. - use_timestamp: A flag indicating whether to parse the NDBC data - service column headers as a timestamp, and to use this timestamp - as the index. - as_df: Whether to return station-level data as a `pandas.DataFrame`, - defaults to `True`, if `False` a `dict` is returned unless - `as_xarray_dataset` is set to `True`. - as_xarray_dataset: Whether to return tbe data as an `xarray.Dataset`, - defaults to `False`. - cols: A list of columns of interest which are selected from the - available data columns, such that only the desired columns are - returned. All columns are returned if `None` is specified. - use_opendap: An alias for `as_xarray_dataset`. - - Returns: - The available station(s) measurements for the specified modes, time - range, and columns, either as a `dict` or as a `pandas.DataFrame` - if the `as_df` flag is set to `True`. - - Raises: - ValueError: Both `station_id` and `station_ids` are `None`, or both - are not `None`. This is also raised if `mode` and `modes` are - `None`, or both are not `None` - RequestException: The specified mode is not available. - ResponseException: There was an error in executing and parsing the - required requests against the NDBC data service. - HandlerException: There was an error in handling the returned data - as a `dict` or `pandas.DataFrame`. - """ - if use_opendap is not None: - as_xarray_dataset = use_opendap - - as_df = as_df and not as_xarray_dataset - - self.log(logging.DEBUG, - message=f"`get_data` called with arguments: {locals()}") - if station_id is None and station_ids is None: - raise ValueError('Both `station_id` and `station_ids` are `None`.') - if station_id is not None and station_ids is not None: - raise ValueError('`station_id` and `station_ids` cannot both be ' - 'specified.') - if mode is None and modes is None: - raise ValueError('Both `mode` and `modes` are `None`.') - if mode is not None and modes is not None: - raise ValueError('`mode` and `modes` cannot both be specified.') - - handle_station_ids: List[Union[int, str]] = [] - handle_modes: List[str] = [] - - if station_id is not None: - handle_station_ids.append(station_id) - if station_ids is not None: - handle_station_ids.extend(station_ids) - if mode is not None: - handle_modes.append(mode) - if modes is not None: - handle_modes.extend(modes) - - for mode in handle_modes: - if mode not in self.get_modes(use_opendap=as_xarray_dataset): - raise RequestException(f"Mode {mode} is not available.") - - self.log(logging.INFO, - message=(f"Processing request for station_ids " - f"{handle_station_ids} and modes " - f"{handle_modes}")) - - # accumulated_data records the handled response and parsed station_id - # as a tuple, with the data as the first value and the id as the second. - accumulated_data: Dict[str, Dict[str, Union[pd.DataFrame, dict]]] = {} - for mode in handle_modes: - accumulated_data[mode] = [] - - with ThreadPoolExecutor( - max_workers=len(handle_station_ids)) as station_executor: - station_futures = {} - for station_id in handle_station_ids: - station_futures[station_id] = station_executor.submit( - self._handle_get_data, - mode=mode, - station_id=station_id, - start_time=start_time, - end_time=end_time, - use_timestamp=use_timestamp, - as_df=as_df, - cols=cols, - use_opendap=as_xarray_dataset, - ) - - for future in as_completed(station_futures.values()): - try: - station_data, station_id = future.result() - self.log( - level=logging.DEBUG, - station_id=station_id, - message= - f"Successfully processed request for station_id {station_id}" - ) - if as_df: - station_data['station_id'] = station_id - accumulated_data[mode].append(station_data) - except (RequestException, ResponseException, - HandlerException) as e: - self.log( - level=logging.WARN, - station_id=station_id, - message=( - f"Failed to process request for station_id " - f"{station_id} with error: {e}")) - self.log(logging.INFO, message="Finished processing request.") - return self._handle_accumulate_data(accumulated_data) - - def get_modes(self, - use_opendap: bool = False, - as_xarray_dataset: Optional[bool] = None) -> List[str]: - """Get the list of supported modes for `get_data(...)`. - - Args: - use_opendap (bool): Whether to return the available - modes for opendap `xarray.Dataset` data. - as_xarray_dataset (bool): An alias for `use_opendap`. - - Returns: - (List[str]) the available modalities. - """ - if as_xarray_dataset is not None: - use_opendap = as_xarray_dataset - - if use_opendap: - return [ - v for v in vars(self._opendap_data_api) if not v.startswith('_') - ] - return [v for v in vars(self._data_api) if not v.startswith('_')] - - @staticmethod - def save_xarray_dataset(dataset: xarray.Dataset, output_filepath: str, - **kwargs) -> None: - """ - Saves an `xarray.Dataset` to netCDF a user-specified file path. - - Args: - dataset: The xarray dataset to save. - output_filepath: The path to save the dataset to. - **kwargs: Additional keyword arguments to pass to `dataset.to_netcdf`. - - Returns: - None: The dataset is written to disk - """ - dataset.to_netcdf(output_filepath, **kwargs) - - """ PRIVATE """ - - def _get_request_handler( - self, - cache_limit: int, - delay: int, - retries: int, - backoff_factor: float, - headers: dict, - debug: bool, - verify_https: bool, - ) -> Any: - """Build a new `RequestHandler` for the `NdbcApi`.""" - return RequestHandler( - cache_limit=cache_limit or self.cache_limit, - log=self.log, - delay=delay, - retries=retries, - backoff_factor=backoff_factor, - headers=headers, - debug=debug, - verify_https=verify_https, - ) - - @staticmethod - def _parse_station_id(station_id: Union[str, int]) -> str: - """Parse station id.""" - station_id = str(station_id) # expect string-valued station id - station_id = station_id.lower() # expect lowercased station id - return station_id - - @staticmethod - def _handle_timestamp(timestamp: Union[datetime, str]) -> datetime: - """Convert the specified timestamp to `datetime.datetime`.""" - if isinstance(timestamp, datetime): - return timestamp - else: - try: - return datetime.strptime(timestamp, '%Y-%m-%d %H:%M') - except ValueError as e: - raise TimestampException from e - - @staticmethod - def _enforce_timerange(df: pd.DataFrame, start_time: datetime, - end_time: datetime) -> pd.DataFrame: - """Down-select to the data within the specified `datetime` range.""" - try: - df = df.loc[(df.index.values >= pd.Timestamp(start_time)) & - (df.index.values <= pd.Timestamp(end_time))] - except ValueError as e: - raise TimestampException( - 'Failed to enforce `start_time` to `end_time` range.') from e - return df - - @staticmethod - def _handle_data(data: pd.DataFrame, - as_df: bool = True, - cols: List[str] = None) -> Union[pd.DataFrame, dict]: - """Apply column down selection and return format handling.""" - if cols: - try: - data = data[[*cols]] - except (KeyError, ValueError) as e: - raise ParserException( - 'Failed to parse column selection.') from e - if as_df and isinstance(data, pd.DataFrame): - return data - elif isinstance(data, pd.DataFrame) and not as_df: - return data.to_dict() - elif as_df: - try: - return pd.DataFrame().from_dict(data, orient='index') - except (NotImplementedError, ValueError, TypeError) as e: - raise HandlerException( - 'Failed to convert `pd.DataFrame` to `dict`.') from e - else: - return data - - def _handle_accumulate_data( - self, - accumulated_data: Dict[str, List[Union[pd.DataFrame, dict, - xarray.Dataset]]], - ) -> Union[pd.DataFrame, dict]: - """Accumulate the data from multiple stations and modes.""" - for k in list(accumulated_data.keys()): - if not accumulated_data[k]: - del accumulated_data[k] - - if not accumulated_data: - return {} - - return_as_df = isinstance( - accumulated_data[list(accumulated_data.keys())[-1]][0], - pd.DataFrame) - use_opendap = isinstance( - accumulated_data[list(accumulated_data.keys())[-1]][0], - xarray.Dataset) - - data: Union[List[pd.DataFrame], List[xarray.Dataset], - dict] = [] if return_as_df or use_opendap else {} - - for mode, station_data in accumulated_data.items(): - if return_as_df: - data.extend(station_data) - elif use_opendap: - data.extend(station_data) - else: - data[mode] = station_data - - if return_as_df: - df = pd.concat(data, axis=0) - df.reset_index(inplace=True, drop=False) - df.set_index(['timestamp', 'station_id'], inplace=True) - return df - elif use_opendap: - return merge_datasets(data) - return data - - def _handle_get_data( - self, - mode: str, - station_id: str, - start_time: datetime, - end_time: datetime, - use_timestamp: bool, - as_df: bool = True, - cols: List[str] = None, - use_opendap: bool = False, - ) -> Tuple[Union[pd.DataFrame, xarray.Dataset, dict], str]: - start_time = self._handle_timestamp(start_time) - end_time = self._handle_timestamp(end_time) - station_id = self._parse_station_id(station_id) - if use_opendap: - data_api_call = getattr(self._opendap_data_api, mode, None) - else: - data_api_call = getattr(self._data_api, mode, None) - if not data_api_call: - raise RequestException( - 'Please supply a supported mode from `get_modes()`.') - try: - data = data_api_call( - self._handler, - station_id, - start_time, - end_time, - use_timestamp, - ) - except (ResponseException, ValueError, TypeError, KeyError) as e: - raise ResponseException( - f'Failed to handle API call.\nRaised from {e}') from e - if use_timestamp: - if use_opendap: - data = filter_dataset_by_time_range(data, start_time, end_time) - else: - data = self._enforce_timerange(df=data, - start_time=start_time, - end_time=end_time) - try: - if use_opendap: - if cols: - handled_data = filter_dataset_by_variable(data, cols) - else: - handled_data = data - else: - handled_data = self._handle_data(data, as_df, cols) - except (ValueError, KeyError, AttributeError) as e: - raise ParserException( - f'Failed to handle returned data.\nRaised from {e}') from e - - return (handled_data, station_id) From 0602663b807749cae87a53a60a4325db8fa1018c Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:17:02 -0400 Subject: [PATCH 14/47] Delete exceptions.py --- exceptions.py | 29 ----------------------------- 1 file changed, 29 deletions(-) delete mode 100644 exceptions.py diff --git a/exceptions.py b/exceptions.py deleted file mode 100644 index 2c6caaa..0000000 --- a/exceptions.py +++ /dev/null @@ -1,29 +0,0 @@ -class NdbcException(Exception): - """Base exception that all other NDBC exceptions subclass from.""" - - def __init__(self, message: str = ''): # pragma: no cover - self.message = message - super().__init__(self.message) - - def __str__(self): # pragma: no cover - return f"NDBC API: {self.message or 'unspecified error'}" - - -class TimestampException(NdbcException): - """Unable to handle given timestamp.""" - - -class RequestException(NdbcException): - """Unable to build the given request.""" - - -class ResponseException(NdbcException): - """Unable to handle the given response.""" - - -class ParserException(NdbcException): - """Unable to parse the given response.""" - - -class HandlerException(NdbcException): - """Error when handling this API call.""" From 283a9133c284477d6cd6e53b8da68b0d542c0ad8 Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:17:10 -0400 Subject: [PATCH 15/47] Delete __init__.py --- __init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 __init__.py diff --git a/__init__.py b/__init__.py deleted file mode 100644 index e69de29..0000000 From 20a7a52ce62f06e65981f26ff4ae7d6181151f8e Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:17:56 -0400 Subject: [PATCH 16/47] Create IGNORE --- ndbc_api/api/IGNORE | 1 + 1 file changed, 1 insertion(+) create mode 100644 ndbc_api/api/IGNORE diff --git a/ndbc_api/api/IGNORE b/ndbc_api/api/IGNORE new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/ndbc_api/api/IGNORE @@ -0,0 +1 @@ + From 91d41addac2765d36b984f3538f21f6272af5f38 Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:20:18 -0400 Subject: [PATCH 17/47] part 1 --- ndbc_api/api/__init__.py | 0 .../api/__pycache__/__init__.cpython-311.pyc | Bin 0 -> 185 bytes ndbc_api/api/requests/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 194 bytes ndbc_api/api/requests/http/__init__.py | 0 .../http/__pycache__/__init__.cpython-311.pyc | Bin 0 -> 199 bytes .../http/__pycache__/_base.cpython-311.pyc | Bin 0 -> 6498 bytes .../http/__pycache__/_core.cpython-311.pyc | Bin 0 -> 701 bytes .../active_stations.cpython-311.pyc | Bin 0 -> 862 bytes .../http/__pycache__/adcp.cpython-311.pyc | Bin 0 -> 1225 bytes .../http/__pycache__/cwind.cpython-311.pyc | Bin 0 -> 1257 bytes .../historical_stations.cpython-311.pyc | Bin 0 -> 884 bytes .../http/__pycache__/ocean.cpython-311.pyc | Bin 0 -> 1257 bytes .../http/__pycache__/spec.cpython-311.pyc | Bin 0 -> 1189 bytes .../station_historical.cpython-311.pyc | Bin 0 -> 934 bytes .../station_metadata.cpython-311.pyc | Bin 0 -> 916 bytes .../station_realtime.cpython-311.pyc | Bin 0 -> 930 bytes .../http/__pycache__/stdmet.cpython-311.pyc | Bin 0 -> 1223 bytes .../http/__pycache__/supl.cpython-311.pyc | Bin 0 -> 1225 bytes .../http/__pycache__/swden.cpython-311.pyc | Bin 0 -> 1257 bytes .../http/__pycache__/swdir.cpython-311.pyc | Bin 0 -> 1257 bytes .../http/__pycache__/swdir2.cpython-311.pyc | Bin 0 -> 1262 bytes .../http/__pycache__/swr1.cpython-311.pyc | Bin 0 -> 1225 bytes .../http/__pycache__/swr2.cpython-311.pyc | Bin 0 -> 1225 bytes ndbc_api/api/requests/http/_base.py | 105 ++++++++++++++++++ ndbc_api/api/requests/http/_core.py | 7 ++ ndbc_api/api/requests/http/active_stations.py | 10 ++ ndbc_api/api/requests/http/adcp.py | 17 +++ ndbc_api/api/requests/http/cwind.py | 17 +++ .../api/requests/http/historical_stations.py | 10 ++ ndbc_api/api/requests/http/ocean.py | 17 +++ ndbc_api/api/requests/http/spec.py | 16 +++ .../api/requests/http/station_historical.py | 10 ++ .../api/requests/http/station_metadata.py | 10 ++ .../api/requests/http/station_realtime.py | 10 ++ ndbc_api/api/requests/http/stdmet.py | 16 +++ ndbc_api/api/requests/http/supl.py | 17 +++ ndbc_api/api/requests/http/swden.py | 17 +++ ndbc_api/api/requests/http/swdir.py | 17 +++ ndbc_api/api/requests/http/swdir2.py | 17 +++ ndbc_api/api/requests/http/swr1.py | 17 +++ ndbc_api/api/requests/http/swr2.py | 17 +++ ndbc_api/api/requests/opendap/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 202 bytes .../opendap/__pycache__/_base.cpython-311.pyc | Bin 0 -> 4449 bytes .../opendap/__pycache__/_core.cpython-311.pyc | Bin 0 -> 713 bytes .../opendap/__pycache__/adcp.cpython-311.pyc | Bin 0 -> 1202 bytes .../opendap/__pycache__/cwind.cpython-311.pyc | Bin 0 -> 1233 bytes .../opendap/__pycache__/ocean.cpython-311.pyc | Bin 0 -> 1233 bytes .../opendap/__pycache__/pwind.cpython-311.pyc | Bin 0 -> 1233 bytes .../__pycache__/stdmet.cpython-311.pyc | Bin 0 -> 1237 bytes .../opendap/__pycache__/swden.cpython-311.pyc | Bin 0 -> 1233 bytes .../__pycache__/wlevel.cpython-311.pyc | Bin 0 -> 1237 bytes ndbc_api/api/requests/opendap/_base.py | 82 ++++++++++++++ ndbc_api/api/requests/opendap/_core.py | 7 ++ ndbc_api/api/requests/opendap/adcp.py | 16 +++ ndbc_api/api/requests/opendap/cwind.py | 16 +++ ndbc_api/api/requests/opendap/ocean.py | 16 +++ ndbc_api/api/requests/opendap/pwind.py | 16 +++ ndbc_api/api/requests/opendap/stdmet.py | 16 +++ ndbc_api/api/requests/opendap/swden.py | 16 +++ ndbc_api/api/requests/opendap/wlevel.py | 16 +++ 62 files changed, 548 insertions(+) create mode 100644 ndbc_api/api/__init__.py create mode 100644 ndbc_api/api/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/api/requests/__init__.py create mode 100644 ndbc_api/api/requests/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__init__.py create mode 100644 ndbc_api/api/requests/http/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/_base.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/_core.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/active_stations.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/adcp.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/cwind.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/historical_stations.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/ocean.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/spec.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/station_historical.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/station_metadata.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/station_realtime.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/stdmet.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/supl.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/swden.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/swdir.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/swdir2.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/swr1.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/__pycache__/swr2.cpython-311.pyc create mode 100644 ndbc_api/api/requests/http/_base.py create mode 100644 ndbc_api/api/requests/http/_core.py create mode 100644 ndbc_api/api/requests/http/active_stations.py create mode 100644 ndbc_api/api/requests/http/adcp.py create mode 100644 ndbc_api/api/requests/http/cwind.py create mode 100644 ndbc_api/api/requests/http/historical_stations.py create mode 100644 ndbc_api/api/requests/http/ocean.py create mode 100644 ndbc_api/api/requests/http/spec.py create mode 100644 ndbc_api/api/requests/http/station_historical.py create mode 100644 ndbc_api/api/requests/http/station_metadata.py create mode 100644 ndbc_api/api/requests/http/station_realtime.py create mode 100644 ndbc_api/api/requests/http/stdmet.py create mode 100644 ndbc_api/api/requests/http/supl.py create mode 100644 ndbc_api/api/requests/http/swden.py create mode 100644 ndbc_api/api/requests/http/swdir.py create mode 100644 ndbc_api/api/requests/http/swdir2.py create mode 100644 ndbc_api/api/requests/http/swr1.py create mode 100644 ndbc_api/api/requests/http/swr2.py create mode 100644 ndbc_api/api/requests/opendap/__init__.py create mode 100644 ndbc_api/api/requests/opendap/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/api/requests/opendap/__pycache__/_base.cpython-311.pyc create mode 100644 ndbc_api/api/requests/opendap/__pycache__/_core.cpython-311.pyc create mode 100644 ndbc_api/api/requests/opendap/__pycache__/adcp.cpython-311.pyc create mode 100644 ndbc_api/api/requests/opendap/__pycache__/cwind.cpython-311.pyc create mode 100644 ndbc_api/api/requests/opendap/__pycache__/ocean.cpython-311.pyc create mode 100644 ndbc_api/api/requests/opendap/__pycache__/pwind.cpython-311.pyc create mode 100644 ndbc_api/api/requests/opendap/__pycache__/stdmet.cpython-311.pyc create mode 100644 ndbc_api/api/requests/opendap/__pycache__/swden.cpython-311.pyc create mode 100644 ndbc_api/api/requests/opendap/__pycache__/wlevel.cpython-311.pyc create mode 100644 ndbc_api/api/requests/opendap/_base.py create mode 100644 ndbc_api/api/requests/opendap/_core.py create mode 100644 ndbc_api/api/requests/opendap/adcp.py create mode 100644 ndbc_api/api/requests/opendap/cwind.py create mode 100644 ndbc_api/api/requests/opendap/ocean.py create mode 100644 ndbc_api/api/requests/opendap/pwind.py create mode 100644 ndbc_api/api/requests/opendap/stdmet.py create mode 100644 ndbc_api/api/requests/opendap/swden.py create mode 100644 ndbc_api/api/requests/opendap/wlevel.py diff --git a/ndbc_api/api/__init__.py b/ndbc_api/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ndbc_api/api/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6726545c1f1421df534d942e5f01866c383af8ad GIT binary patch literal 185 zcmZ3^%ge<81pLQ-ri19mAOZ#$p^VRLK*n^26oz01O-8?!3`I;p{%4TnFCS;Cn9$1)WnjE)FLQ9CNCu^IX!U2!lj literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/__init__.py b/ndbc_api/api/requests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ndbc_api/api/requests/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/requests/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b82388adf88e773b7aeecf9bb85c77a4a2d2cab5 GIT binary patch literal 194 zcmZ3^%ge<81pLQ-ri19mAOZ#$p^VRLK*n^26oz01O-8?!3`I;p{%4TnuTW>Jn9$1)WnjE)FLQ9CNCu^IX1)WnjE)FLQ9CNCu^IX)=dU}j`w{J;Ps IikN|70L$VwFaQ7m literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/http/__pycache__/_base.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/_base.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d76a5eb99c5d5527bfe2269adf57b176c7ab75e7 GIT binary patch literal 6498 zcmcIoU2Idy6`uR!e>-*@2l59oBmqoP6*hst@~S1eLb?)N}5&ukG-& zE4978K6B>G%$b=p-<&yf|Lk(v2|Vs^{(ZjHL&!fcQ+P~y=J`j!+#~`Km^cYx%Ep-q zHpDW>b8&8h3vm;Ch}ZIb+%#bhnKj-Nw@g?=R)({E3((L+a?7q~d^haaxq* zXkMe1IP;n3-vD!yh$O@aB*Y3#h!fZlFK{7~z=zC&DP$4MA**14aaclj(IHqxr(hFZ zf*t-2u|jm#8hS^qg7YeWAM%e3S;)Pnjq4Td>_Wv$?L7H*Yy02@_e*sv1rOM$Qgs}P zNQLPp-C&x?PAW#?XqlhwOj?5Sj2t|lOxm{yF}hzXPBvVMiI>B&I3tHgV{tJ!H8%Ag zGKt89xIgI_13OOASTqvfsWM|KQSFi(kz>lkbSy$APSMG6F)B+Vm&J%YCenO0jHjFTW}5dd z?@N1oGTxr~uKBKidh2eq{U&%LINz16ZMc<8*9IQPmIsvJ!F2FoCU|hA=HR?5>!`^Q zv#laqweCsPTPsy>W$Rn;t=*LMtRS*k|QQpMb1e)RZra8SKQl|#ufMW zw7WCo?o_Ou&tZf_vu4!5El6ze*Y;8n*7Otd2?MUTY(ELCZ0rC+M)`R%&y2DHchxy- zT2qrR-G^)1ne**T(9SHF-%zvAg9U0@u90!0cgZc6>=D6Qj`EM0^JLbV;*0xv)&~2K z)ApeQC$B$(ZP8xd@`6?{Yftg=8|$H_677zkR3D+L>vziT0uP?S@G^v?*rL|*k;#>> zX<=JauT&fNI&Fd@3F^vyT6yj;`OrV>N?8{W6)!z3Ntsfv@}A3c47t83RnmXTaY0)> z`nNaC-LO!(uGh|;8-HDkQY$#iwUy8FRhEFcZOpAxBN+hkYi;j`xWES{{vj`$>m zR?>mstCZ}-zJ3Sf`#F+bS9>F#Bg8k%Ep%m?_v`F(Qcu5a&m0$YRvAs?nJMW7As3m` zteyut3^6&#hS`fnnZ#Z~nZzFDjC#kolN`v7aYIEJbZnHnhB>+k=WfGcYw>v0zyZk7 zCu=ym3k>2&hP3dcm?B1J*%Z5AV5(39)BEs$WC&*2Yh3YE0qk(a$PjWoV2{b;uf&&W z^-2*}vYQccDL^pb&M`=63v?B*P+VBm9)%b~j>Kb0k!}S6-G*d4l2#-;fCNlBCUA*V zlV*Vud|;mt>l6@Dy>@Z6`pLupB}6nWKZCBMzW@P@tok4O zw^D0Gpr7u8TQ+~xU@5@a^|Bvb2bX=A7Q#RoXj9S!X87M9AP6xS(F_` zS(Ke9%0La&o?@rhiu3~4468%O6`iJ3Ovriol!5?*WDP!l1m-39kP`SXDds1^hq=a< z!biY-iJKBwgpU9qTQJ-ebZtE4V1%079dxFAU(%%Cv9>_fB#+lC00t_iyO3z;LAa=< z_~d1gLcqQo<+x}iaKS!{XxZ>ew1o5KN}l*%pr)Yh9q3s)4J41A*^F<;>(HZAoq_j3 z)nc8_Es1)%JpSZB-^zi$^np{E1E+KuQs)_Mp_9>5aS;M^{}i3P6ca>$#6LPoCn7Q? zei?kBzvvJBXih&E0PpKW-GMZq5(gG-7(8jncQR>?mh%43s4kb2Rgh?(yY zNOT<%^hg1R%Ec10%E84)+qkOzOe8)nc2hb@RbD#=akpwqqMB$Vab8rpiC984N2aFW zN=qA1i@Ug5#oLyCcM4+bb|h#_m50Ggs_BynMI^XZ>GyEet?gSB=*O_?fw{=x z$fZauu5Sj_rX43*Tf4^UP4K)vdlno^@zIVTS`P)umY)O+M5^=!nC5#}@|>5k4Rep% za|F}B=2JcQBh<{v-&L$N|1y)h z#yir&zPsKO@3k+a(oMTFO}kf{wk*utolQ5rooRY|nMpV8ea12VcV7@7 zFu=MF7+_rojvBlzkon%6nYb!{ef-noN=+NQ4?alOypySUC+*ymaqdyH7pgdY*Z1G* zUvac5j@H$UO{>23x9pjkZE0TsTs`FOaJP^PM6b!B{A zUq#ZsBhO51V=n{U1BI?#r}FeOJE*U$zUla_LuuFr@6yh%_N5#8G7WucPk+YKuW0Y8 z+pAPHrQOXLck_dq#f>ZOJ&Joz*5lI~?o}H0!Z4J=kYLsuT4B~3TAvZ7vJJS(HUJY4 zmWYZg*O2MOrp`!F5kU8?y?Y;ghM7O>%fBEvpT<3pM-Lg{M zlCIvGsopwo#}+BYIdp3my$rfYXAFtkieVcSdSexwau=rd0)V*TEk)dHW?19 z_AvNMVLFbyGaSA+9f|8D^cak$Bx2J7IQ()(14!^hrbmIO4(%M#4-vW##Rw#7M&d_; zo>e;n9ujQ;x@qvPO_AVVs)QqF&iFm8Z&Uc1?AEswzB#)gpzw9shJ6ZOo2_Y4_-gPi z$h+KgJz1}B?)X!aC&#)?hnNK^M=)Ks>S@-$EytlCyJOc<)8YquMlaLpGme-mul_=B zp4$wW9Q$*V?Nvt$IVwf}E99)#q%;*-6E^oC7&0u~*DTuh-^Wm?H&ZC`G{( zqbaqS>`UODAzAh5gg5S^7QRZVJ~^wy6At4l0!J_Q0W2ds@>`FBWIr!jX z@6InsDfu5g_pFnlf55@LG^fs3irvz#*x5JpR&PG`OTE4VxEkMn9be-4gPglntH5jn z!5A#q0SP0J?8*YT#$B!eq;&vITigZcv@2)kBN&5%fJq1}$pTx|q@|p;CGAn|3fb%G zDnQ`M8Y(%(>UOHtzBW?{&D)LRk&JZxlROIXaEqV z^s_ZIW4M5m1&QKpx5*0c)ysp|{BZvrVq3(<&_5fu@3%L@L!-0_-$vr&j_$VG-6Y&Q z6P-x+_H=rpL}tQ)in5NgHBa6@)?1&fsXUU*Sk$V7HK ze1}o5=euUfcv#A9M^Vp>8=`4Py?tHU>H7hviQmD!Ye(T+amr(D>JGvHMhoRn56YO2aPi2Zk?Fy4UxD*dlMDuP(5_Qz{fuyUVq$ zA}Ncg{syvyn=BYG4`>P*WYjXkApJ&*_Xq2v?5(jyvX73~7;w9Ua~pEEhp(r`OKqIm zWUW_iU71bI!Kpd3sX1n94rTc2q`Bd^h(*Jtg>u&%;8Fkp literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/http/__pycache__/adcp.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/adcp.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a3ff8ab1d5f3ad9b9d9025dcd7de74248912f8ae GIT binary patch literal 1225 zcmZuwO>7fK6n?XRcI;RVP=tb75rl-WDl8#UsX~P`Kn#WuPzb19ER8lh6S8#u!^~JE zD2E&(m18f6YfeEZ960va6X&$5RBKL=dg{$lKq{PgGyZ8!8PB};y?L{<-}~PBd7+R4 z9L3k~?&lnUzf3R~ejn^t5Ih40jHFP95QxD}O6pGNSnQ^5-3vVe4qO4|Z2E*wI_ zAaocBUFL)yb3>m8%;On+bCw0TGJK3@CWp5yXP>|_VcCuGE!^84?uWT2@Q8$Y5D1Y|w;REEF~Tb(S|+4+Vm&wrIrO@OA(rq7lJTe@dQ# zBL8L;NcIq;ql~u1TK$IEMQue(fqRx zZv&V({Pf=DiQkGR|0tgPvv~IR;@MZ>uSYvyo!@iG#Pl0L(*-g%y^l{oXB`r5@jO(`uAaS&6|^!8hE$ec25u*LwniUuni{B{e6 z)+?QZlk3+zpC8-tCU^abuH*XCo3ab&e#u(bnL5>VG3=Z^^XkOQ?=7~8kIbG6!GVo? z_J)sAHWZ%?xXhOVTZj(SYAb2nw*~2`LHo+szVe`bWz)VgUBdsQFrSAl<^}XFGbO&y ze$!L>f;f($sa-bFbX`J72fo-DogFx`Gdg$S>z&cr4RQn<`+xKEmwRUS%_mZp&<8o5 BH!A=D literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/http/__pycache__/cwind.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/cwind.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7f787b87895c53959f45906d7411794442e836ce GIT binary patch literal 1257 zcmZ`&O=uHA6n?XRN%KSMAEGE$JZJ<1A{9}QYHc)HtJI3RE)3h9X}h{f1i2CT7NEW%w4q*tVos(puK87pvCLpx zoh6M(;~d5h8-CP|IOXf(*UnGKY`HQzg4@Qf%axlG*T*Zv=SOkn;>g%UrCb>qm)V$| zDit%cM4DVMraQ@Wd*fjn?89eS7UMJzPnPnA%`b)L2S&V_Pw z``(4C&3y+}tvzdYf6FlKA+*F>p!K_SqMHLpTBde1j~#z|=*`{4NMc*%x(Uv<#e2z7 zTf>Qm?d)VWR&?UBq$B3)VQo4tPY!fCT2dV?bvjzo9WCKW^`|oZ#l)q(hU73^@UQgQ zg!gmopfWWRd`^j$i4bbS{?*>ugx#yXa}7?d_Rf}*LAufZme0?u$C1td#N*U{1OCc7 ALI3~& literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/http/__pycache__/historical_stations.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/historical_stations.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e598dc1b4fc64aab6443290e1b6613897713cb5f GIT binary patch literal 884 zcmZ`%&1(}u6o0du#Mm@eEd>P+!HWcB)p`+;LM>81N-Rx#i3`JYXJWSQX49Do+TtOH z{0TyZBJ@;|{vV!u@F05%c=DDKFZSe{>}E?*-|oDRc^~uMZ{~BkJOxNIUw^;8L;!vm zV?y~<89Auz0S8V32uXtwuwWUu)dy}1a%ck_;Yz4s@mw9M`JcwDlDM=KNwFrjyFzI% zjTwtcMp|7!gK)rX(y&F2TO!}IxZTekV(?Lx*C-^_OsbjodM-Emvl)HEGVIh~!jyxH{wPSe=kTmJ>eITu*O z9@RBuJs4b_`*8bsX8yqbR=JDP{Kff^!J$^Y5Em(JvrtePPf;31yc-yOn$qnq3lfjK zh_P|W3z$-fcq`(ut+bTWxcvoW8HFqvW}elUi~n7!9q8RRbyEt0?g(HjDa zGj?J_;Y$D2*m<^_lr~jsRXeUs&~p$yXM&z%&~vDVZ&8|?PMVlCTGrTU@li6_f}BH# jxj^*-+0Z70e1`|e=l@}$K(M#J*+J>V?EddSD literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/http/__pycache__/ocean.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/ocean.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..923416122cef22e891d9c6a90df7fb270de51a40 GIT binary patch literal 1257 zcmZ`&O=uJ`6n>e%-R=)tt0IbG#e=Tkf=ESFr2ceuwYE|#>NpUllhlsR{+MLZN)J60 z^yo#r_EeF2@aWN#=PH6rP6bciwp2kqnau32E@~!u?|biMlJ~wh^CF+mfEtB&KkgI^ zfX~|K4!Z+3%L+UP0t7`+1s9OUMue({YsA=$%&O&D2n@IY#99JkQ)|TpSXC<-u1%b4 zP)vQ%W)aCW@dXuOl`3qFhg>L~J?nEiMejE$7eVK}Sj9Oiw0TpB$0`>JE+XI>1i2eo6M zNV~4BM@}i$64hmf3txow8V*T}nZUXPnWi<8Acs-WWVOKRT>zkY)I7x5KSR$!px;rU zSnrMnSW(t$&$HY^-5pvlpao;W>}OYHk0y$yw0W~Z8B3{cGC$J{BZ67NXVI2s5OJod zWLYh8deZ>LLF99eG1qAhtFid=#<>&Tbxs-gF8RT|3oM+S4Qt-j`C!gx)vK(2mj;4+ z52!EZDC^i>jm!ktZ-ky+olChlCq%=G`71T%*`T`O(XKy9F7+juS8;wsiSLW>&4Iz~ z&%EWM?+XV$77lzWjC?4JyuSPPQ0v&~4HFHHqOYK38_>Wg+Egz=F(=a)*ZeBQSY|M; z)=4wcIEV55rXO`8PI+?b%9&}IEmy|JaL3qwv2tyCa;kFf%s8%`ADfu2lq+LXG8?l~ zrDCp5q{#(ix|2+|H?BtJI|K$aiZCuBHl#vaH+Yn7)w^^rVxa+ltb{7A`>f};EtXqD zyB06EcJE!YcCOolZNs!j(K2s?)^E~@ZVex7o7&MjeB|~1SGN))iTy1%OmMa=-A<1B zJDhmfwr*wDBCORA%#ZbwVHqa{3}{#2&Fn7Fi;kQ}B9{+T|N z@P3-@Q>JEu&nwY35kf84v(`Ubuw$)%uEX)Q{@HdiNH_Z5^6ANqIP&#B@i?_#HlI2J literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/http/__pycache__/spec.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/spec.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..498b275d2d8aa559e0e2fe2798d5d23bff935846 GIT binary patch literal 1189 zcmZuwOK2295Urly?q;)WK8QpR0|9Xn7la5RYW!eCjT(|DI$3BY)172=_LJ^jG$Dr^ z0_NyNa?L3s@sOiOPoAR)E^`Wa^0pxgnv>PDAKhqePt~ign(nGsv#&FmEOYA1tA4RYZ3>XQa2rdwfjgS-#*NCwhnnlaC2pDh^n6(Pb=GLYOu!T=UT$?$Q zkV|~iZjMAr<|(d%B1deE1yZ3+U-KlN;0skQmEU|XHgS!J_NP$2z*;!Cgn?@?;+o8G zEp9WDJIvwG>amw zlTwuWp(iD!QkOZ1vU2e7`h~&+$%QQ3_WY+eMKC)XlnUdE{=6rO5ZX} z*^{5MC%f8ai5VzzHD6|tDU}d0G;Tey4NQofN^@{i|M{ N-qX9^{}YM}`VBAjEfN3# literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/http/__pycache__/station_historical.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/station_historical.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..721183b9bbbf4310c2c7ef3e2b71b538d7ad133a GIT binary patch literal 934 zcmZ`%&1(}u6o0c{hNQ7pv(vlC*xG-#Yre^Erqca;M#X}GI z6NCsw=&36G2mA{>=iov1RPf|2rC#jGH`z_FsBd?E@4flGnK$q8K4i0LK$-pW>*XB+ z@Lf3*F%#p|Mq>aB7zvV$v>tH88AFrAY_9pDI8EOm_;708=El$$3}X1OhRneTMD z480^~tfsf$Mt1-l>VyHFCUu<~OyefrZEhLN=o^O^aWp+!x0s0;tZ1&_%djnc&kd3U zyVqsthJL$68)K?xcN(2X$=-t~Jz;$oS%QaM(ekwEy#T5RwN7s$4B!oHPsx4dVS{2p zkL<&H9iKr;c`ZskUaG9nTKQ=-U#Tuti|Zv?C|0WL<>z#xyz->@3}bXJkix?AQl-r0 z&e-yzQAi|nuPdKSCln<=vnH1Taxbi&O{!4c38aePB8#Cpy3>&S+FLW|San{!nI?gmbGM!Hayq z%VprjJ`*vEzKrGoI8+D&+)XMvH<-pvjBVBoX7r5{^f;aEtys*&3|5%l;(^QDz>UKn z-jabEG&*hSyF1+W>;98?;Zc~JQVkYbf(KpE_Ow|@09C)5<2wigcmumL@<>_On`od# zj^MqH6_9*Z3lmRsr8QbEJ`Wd4+qvzH7kOGJu07j$iJa~=rLb_HRH-tVi|XnFr!2XU z&Zg__t&2vj)@VD01F!CiRzY-jxfe+1HFtwL7qRWMS=FPi-*D6vQ75HS4+7tr9;y{L z!uAisRJGe^G8#J`F8$Rc`x2c{#VsN9V0eA;-QBbF%CY`6vyAfdCW87CD`RwkeD+G1 zptS9_IHh5d(pHCcn@XRjbieC1BSxtA6wY}~S4u3a-eI9G10hnl{0YPY3Nf!h-Twp( zROA*yZ}trr@AdLS^Xf<=W{w=o5h(dFmXQ_VxenH~{;SCGPa?{slH{{`n3*=w#wOa+ zCfcfrHudm@3U&EWO*fP_tMel9FgoaCaT5vk4rCc&q!U8E!Tqz#|H!gPj*Q^n_;&An L!u~sc=btZ^ z3BWfM%$7NIPHl7sz<`keTBJ@0Xs`xM>jTp{Inn`+amTB!F{27u^ScVOBwASQ2wvek zJuX9Unlmx;>US|50EaqZfPa&^&JCt<6Kk6dgBg9}2s4i7|JE&LVh1Z)s&F?5{no7f zURs83=y%#wObNT&>^_=)T#1r1+Gmj^c-RwdPn(woP(PyBEmtc%E73|mw_Yne&C_xv|D^C7lXNeT!omVl^~_|> zHdpUC8KdTe!x(ViT-=(zZA32(Ft|9G{QcNu3US2XSh^2)W2lzqy1DzP;a7e ziV0B4UWgKuw%rz|G)hw1>abp*{6$K4dTtOiLJe26;03OfEgm*IEYf8tL<&ED1aT3K zSX7{fVFCs^vV^cde@3@fhQ`g2d1vq%o*7?k}mwQ&^Vi4NA~{);&APa|~#0x#xgbNyC)JlXXhy*afwPv#HOzG;jl$mLygaald za`ZyDa>5uqaP;WOGa3_{J&}0wR#HrqlQX+*O@qG9zWLtI&b;@%?b~Fs3#6nz{a&gg zfbYs^iP;2=aS3*S06{(!!3LzT=A)u!Ya!Nsy=d450u3gB7@I&$YV7F%`|^pzHiDUitrcn?lj-bLTQi+Wzj|;hX-*q^hr7IQ6h1+~D+`<|qYRpUZTGm3rMg(k)AX_Jz zZBUcw)FK9r$(v)uqH$_nY@d*iZ$TcB*k;e3^zFC(c47-wk=-ThiU-L#L5fr~uMaF9 znrlWztcswE3rBd%CG3$9Gl5m@L7bLIge*oyg_T^ReL8?5P!Z5rkZK$4fRGfoL})Dp zunv3D+HZSy+vsnO%tGthy532vEFDd!^}ytnGG#0#^MmAK#q$Yf5s_&#&|ROigbXo7 zB1KZDb z$3CZqzNUu0rAEG_Mh@0Kj@GW$>&j8P zHhggP{gcRuVrTWS4p!gh)99$P;mE_zwQ8D9TVX?iC8}kwv=lZL47SFWkz>oW#+FfI z%iu-%KV<5&2%CBb(P669)A-GZ+FR_3G!+NDDpg%a2-V>7VdtvB`NPh21UC*lSKUe= SHOqhNH7fK6n?XRcI;SA`B4=FA{7$Cs%ohcm8uFz`H8`88c-5YyI2}+b|z%Y`iGgZ zN>C0tM1mt1#5JcNlpZ*8T^3*1gap;J{^I-UcwAds{BR zHm=?XeHJW0Df7mJC8Dz|Qd}inv}u9ts3kZPAFm;q3rML?eQw{)jvV zMLuN}NxQxxC6ERyBrq*1%J8m~m6eoZug z=dqHtKX{~8xah~V23v}0)Jke*^%0}F&eckjX;&&COcRCatgS>B`r5@jO(`uAaR5`&^!7(_(3~=Du*LwniUuni{B{cm z*DIX^lj}csjvn9fCU^abuH*XCo3ab&o@cG=Or7ew7c zd#0!KIq?;Srgqsx({%|U9XPf#Iy>;?&gk5Q?{`LLH^>oe?0?O}ulLOE+y6;fLLUJz CzBm~G literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/http/__pycache__/swden.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/swden.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c37c9ad08075ba511ad6cda32295e008dbf3b0e8 GIT binary patch literal 1257 zcmZ`&O-vI(6n?Y2-Io5Oh!O+Qi1DB`u?aC6qlSP=v5IH_qpq7wmYqRXx24QXgBT7R zNZ{zjc;$pKc;M*KlV>z0HhUuR%T?g2ZGa6Qm zm=lnTe^+4+1qtE^D!dYv*ckP=kUn+I;dGqNRVf#4a5!#T!W(d?6BY5&3X%l3Uv8Mkja?z0i*O;3Ae``&^(s-8YL9lXu69*M~-N@y77jWU){j9uHEX zIwdM*W=WuP!IN-NG2FJJBpBfz9YJaxj(pLLZHY+!Eo;+6(40|eg@|IEs_2kTM5)IYK&U@dRH#_sa?`>XXGD(ml`~K%_ zRs;B=g7%2Dz;023mq36ZAIe|>Qdsj*S+led>%Lw#ECYcCqd<&RATesJ>i`>aM#G8` za}x6LuPV%=AVC~Kg;%B$8)F_9(x8NxF*}p(9L7+cb zAysdWWmuQdMrUQMgW5ZkUq;K?vfiz(WRJ%4x};pMQO4qOTacNqdp^M|(lZ|mbl2xh zNy$>mq}8S#j9uU19AmC%_Q|pG>)zFi_FYaHw{JM^qfzF~%y6{k!w4w4v` ztEBELoW^*e?)a^YS-d@QYj`S16-#3yxD_0{UYeY`JyE(kJcdixM#iT~#nQ+`kP6i) zQ!!U1fzAbEs*_B$H>@Vev;=gi?_-=ttXHPEYVZ)-uXd?kghm1WR2o@a`?sAruu^RF z9$C5BIC^}`IJ_OpH8njpgw}Wylz*Fue53DlQ&$U(v;CVV-#mzdD7V+{=wR+!eHb0J zHyy3818rvGc{404FvU{MtIUSw1%2&~7Gy^Y?T!{yM+7VBqC3hnAlRaNWg@Bmjp$?JNv!w`T6YkzIXR3lSzV-?1!K8 zhYf(Q+UZEF4fcu(yaWOS1yBJ8kj6%UDu!c(*bK~yoL`D4V+Gaymty)F~HU`@PVFIV!YwPpNg43k3%ea14SR zlNgRgOeaQdV$ry|IYDeS)k9JR)OKiX#B8j#1mRj5Ht~<#Mc!r#m%B@dkrk@g@ zoo%NeZ63PI@q7d%@@0|>SNMxn?2`~Pfprx!L8~M}Hlw1>s-D%o13*)$sR#>rfnI_@ zf1*MW-;ovAQr>oVWwVPqJG5OvE5?f1ORy>cjps}m`LVE){kUoB5Ac?&M9OtQsC z)Dyh?l)7SpvbJBYl35SCHNUK9`%+$B5TaI&2+Y@(Siibs`B)Fp`JOa0Y6Blu>c=X4 zx1oRF#ly{!kJ+Km*`Y7lbDy&3-Y&nt*gSt_*F^mVv=3Uh0QD8no_Yy#X_>&d>Q*Sm zGKq0zk<w^<1?jVX>>}a zLVhY#EG!afa>1DHCDRQK%aNHjg&qw8jB|(`R3WZge3c#2yL2}~r2$`8N)^|6b}|F2 z#pc0dtGAoSPd2QhJF)(jVa5t*lea+IcZtY02ZvgwUTB^fetY`O{m6-8f9qWn>;r2L zqNDx}M=R`bC$sUK9hN0+u~hS`^I>^%u+!7L>S?~y)4c9!9?z=ZmFaILEbTQ!hv|ZU zB`!t0UuLJ2tC`?SO0`UcP!moxdPftEG7fK6n?XRcI;SAp$MfaYAPgzg|LJ~rK$qLPh)CG14>c^i>1+KXF?X%Kg^5; zK{@0QsT{c=t~sTu!hvItJ#kK}O10(`i4!+R0SP$u&G?6!c0BXm_vX#ce(!thUkZgB z;3&R)e<$YvyfVRH_SV6uYG1ZnT!;)F(}ksWk}@iKVwQaE z#;FuWN0B#X+#DLCG)^NaDV2ui6pr-|H!giw`$=*kYgeN9-er+2FDH%K_0@PK67}n% zd6&mZ)*kRkt#HwgYYny()2Nly%<8{X)>f2i)r@@Q)~cAmH$0mBm%!x_iUmx|GMabm z@HT*nLr-sS9{rrK{94IZcTemhG0nV@oQ{>KZ8I$x>I&d`2v=!fdU;anPJHZLr1wxrzoW8~k<) z2iGf|1C#4lJ70da<4x}R6J5vkr#EF6(EXORt}}JK>tfhBdFsW{=eI1jiBHU)3&Cd_ zx9ts|q--cYA8?r~1-1|!sMS`|xMK^_Q-k)Ev3=!1`^u($Wx9m_Nnt(@Tg7fK6n?XRcI;SAp$Mg-Y7i2tg=(o4l`7N{{$lDj4Jb(wES5%_oe5c3|1dKa z1m%!Jq~_R5#5JcN6b>9Y_SAEzD%F}(q@H?n6e_ixcr*S1(~f7}``*0S+3$UC{iIOH z0gmFY|K7ZnF4j~YOos`s_(6QJ}-MSZg1RS^u%-aCwb8p85 z*u~Wwq0fQ^C}sXKVTtH0ixgK$og?;U6RFVWFGZ5i^ZRWsRos8hI?EB!;Y~C@Ar}rI zVGufugf4SJkGY}G1LpAzzB$VRTp2#bGn2zxmb06%Ojvefd6}!NN}3ItFpGtvhQH482J4|faMc!#*c;vsU_>+`Sn6Bk zF(~pjt5EX6Sc4smc85FLLo_&H{2Ezv*4$BPkvf?xx!RZQ78fFe$8=$-ourJ4o|q+H zyKyRo(NW}$88?TxdN=l`nIfY~W^^MEtYClRYWbL~szIRn5%gaflc6~KoiA4Rn zXx`9 zGxIucg{gD3(q!6|N(j?LVLEFo(S^QtF;7!UOGF&Olr+7)AdZ?-rVZ8@AXm{~WrN>- z;plp$b7XS;`_73^_Poh`f1>NS{`9u&0=gGj>pD}Pc3liRpMC!P)U#U_+r)e3z=hz0 zjobEy_fj?#9}c+8l>%Fc4%BKZY22{|>8U~c%Gkd0pnYZ2zA|0H|D-UVhb`tQ^e!_c zzRrH#Q~JC(iJ_@oHqmrlLP!Tb-W#1AIKDSJ_u;F((b)}h1RMKb^YfVlv-|3QQkKvg D!lE|= literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/http/_base.py b/ndbc_api/api/requests/http/_base.py new file mode 100644 index 0000000..517653c --- /dev/null +++ b/ndbc_api/api/requests/http/_base.py @@ -0,0 +1,105 @@ +import os +from calendar import month_abbr +from datetime import datetime, timedelta +from typing import List + +from ndbc_api.api.requests.http._core import CoreRequest + + +class BaseRequest(CoreRequest): + + REAL_TIME_URL_PREFIX = 'data/realtime2/' + HISTORICAL_FILE_EXTENSION_SUFFIX = '.txt.gz' + HISTORICAL_DATA_PREFIX = '&dir=data/' + HISTORICAL_URL_PREFIX = 'view_text_file.php?filename=' + HISTORICAL_SUFFIX = 'historical/' + HISTORICAL_IDENTIFIER = 'h' + FORMAT = '' + FILE_FORMAT = '' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + + if 'MOCKDATE' in os.environ: + now = datetime.strptime(os.getenv('MOCKDATE'), '%Y-%m-%d') + else: + now = datetime.now() + is_historical = (now - start_time) >= timedelta(days=44) + if is_historical: + return cls._build_request_historical( + station_id=station_id, + start_time=start_time, + end_time=end_time, + now=now, + ) + return cls._build_request_realtime(station_id=station_id) + + @classmethod + def _build_request_historical( + cls, + station_id: str, + start_time: datetime, + end_time: datetime, + now: datetime, + ) -> List[str]: + + def req_hist_helper_year(req_year: int) -> str: + return f'{cls.BASE_URL}{cls.HISTORICAL_URL_PREFIX}{station_id}{cls.HISTORICAL_IDENTIFIER}{req_year}{cls.HISTORICAL_FILE_EXTENSION_SUFFIX}{cls.HISTORICAL_DATA_PREFIX}{cls.HISTORICAL_SUFFIX}{cls.FORMAT}/' + + def req_hist_helper_month(req_year: int, req_month: int) -> str: + month = month_abbr[req_month] + month = month.capitalize() + return f'{cls.BASE_URL}{cls.HISTORICAL_URL_PREFIX}{station_id}{req_month}{req_year}{cls.HISTORICAL_FILE_EXTENSION_SUFFIX}{cls.HISTORICAL_DATA_PREFIX}{cls.FORMAT}/{month}/' + + def req_hist_helper_month_current(current_month: int) -> str: + month = month_abbr[current_month] + month = month.capitalize() + return f'{cls.BASE_URL}data/{cls.FORMAT}/{month}/{station_id.lower()}.txt' + + if not cls.FORMAT: # pragma: no cover + raise ValueError( + 'Please provide a format for this historical data request, or call a formatted child class\'s method.' + ) + # store request urls + reqs = [] + + current_year = now.year + has_realtime = (now - end_time) < timedelta(days=44) + months_req_year = (now - timedelta(days=44)).year + last_avail_month = (now - timedelta(days=44)).month + + # handle year requests + for hist_year in range(int(start_time.year), + min(int(current_year), + int(end_time.year) + 1)): + reqs.append(req_hist_helper_year(hist_year)) + + # handle month requests + if end_time.year == months_req_year: + for hist_month in range( + int(start_time.month), + min(int(end_time.month), int(last_avail_month)) + 1): + reqs.append(req_hist_helper_month(months_req_year, hist_month)) + if int(last_avail_month) <= (end_time.month): + reqs.append(req_hist_helper_month_current( + int(last_avail_month))) + + if has_realtime: + reqs.append( + cls._build_request_realtime( + station_id=station_id)[0] # only one URL + ) + return reqs + + @classmethod + def _build_request_realtime(cls, station_id: str) -> List[str]: + if not cls.FILE_FORMAT: + raise ValueError( + 'Please provide a file format for this historical data request, or call a formatted child class\'s method.' + ) + + station_id = station_id.upper() + return [ + f'{cls.BASE_URL}{cls.REAL_TIME_URL_PREFIX}{station_id}{cls.FILE_FORMAT}' + ] diff --git a/ndbc_api/api/requests/http/_core.py b/ndbc_api/api/requests/http/_core.py new file mode 100644 index 0000000..8924923 --- /dev/null +++ b/ndbc_api/api/requests/http/_core.py @@ -0,0 +1,7 @@ +class CoreRequest: + + BASE_URL = 'https://www.ndbc.noaa.gov/' + + @classmethod + def build_request(cls) -> str: + return cls.BASE_URL diff --git a/ndbc_api/api/requests/http/active_stations.py b/ndbc_api/api/requests/http/active_stations.py new file mode 100644 index 0000000..0f87aa6 --- /dev/null +++ b/ndbc_api/api/requests/http/active_stations.py @@ -0,0 +1,10 @@ +from ndbc_api.api.requests.http._core import CoreRequest + + +class ActiveStationsRequest(CoreRequest): + + STATIONS_URL = 'activestations.xml' + + @classmethod + def build_request(cls) -> str: + return f'{cls.BASE_URL}{cls.STATIONS_URL}' diff --git a/ndbc_api/api/requests/http/adcp.py b/ndbc_api/api/requests/http/adcp.py new file mode 100644 index 0000000..b9d062a --- /dev/null +++ b/ndbc_api/api/requests/http/adcp.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class AdcpRequest(BaseRequest): + + FORMAT = 'adcp' + FILE_FORMAT = '.adcp' + HISTORICAL_IDENTIFIER = 'a' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(AdcpRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/http/cwind.py b/ndbc_api/api/requests/http/cwind.py new file mode 100644 index 0000000..dabc359 --- /dev/null +++ b/ndbc_api/api/requests/http/cwind.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class CwindRequest(BaseRequest): + + FORMAT = 'cwind' + FILE_FORMAT = '.cwind' + HISTORICAL_IDENTIFIER = 'c' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(CwindRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/http/historical_stations.py b/ndbc_api/api/requests/http/historical_stations.py new file mode 100644 index 0000000..022ac3b --- /dev/null +++ b/ndbc_api/api/requests/http/historical_stations.py @@ -0,0 +1,10 @@ +from ndbc_api.api.requests.http._core import CoreRequest + + +class HistoricalStationsRequest(CoreRequest): + + STATIONS_URL = 'metadata/stationmetadata.xml' + + @classmethod + def build_request(cls) -> str: + return f'{cls.BASE_URL}{cls.STATIONS_URL}' diff --git a/ndbc_api/api/requests/http/ocean.py b/ndbc_api/api/requests/http/ocean.py new file mode 100644 index 0000000..da485ee --- /dev/null +++ b/ndbc_api/api/requests/http/ocean.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class OceanRequest(BaseRequest): + + FORMAT = 'ocean' + FILE_FORMAT = '.ocean' + HISTORICAL_IDENTIFIER = 'o' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(OceanRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/http/spec.py b/ndbc_api/api/requests/http/spec.py new file mode 100644 index 0000000..e3eccb5 --- /dev/null +++ b/ndbc_api/api/requests/http/spec.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class SpecRequest(BaseRequest): + + FORMAT = 'spec' + FILE_FORMAT = '.spec' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(SpecRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/http/station_historical.py b/ndbc_api/api/requests/http/station_historical.py new file mode 100644 index 0000000..c15eac0 --- /dev/null +++ b/ndbc_api/api/requests/http/station_historical.py @@ -0,0 +1,10 @@ +from ndbc_api.api.requests.http._core import CoreRequest + + +class HistoricalRequest(CoreRequest): + + STATION_HISTORY_PREFIX = 'station_history.php?station=' + + @classmethod + def build_request(cls, station_id: str) -> str: + return f'{cls.BASE_URL}{cls.STATION_HISTORY_PREFIX}{station_id}' diff --git a/ndbc_api/api/requests/http/station_metadata.py b/ndbc_api/api/requests/http/station_metadata.py new file mode 100644 index 0000000..a754f92 --- /dev/null +++ b/ndbc_api/api/requests/http/station_metadata.py @@ -0,0 +1,10 @@ +from ndbc_api.api.requests.http._core import CoreRequest + + +class MetadataRequest(CoreRequest): + + STATION_PREFIX = 'station_page.php?station=' + + @classmethod + def build_request(cls, station_id: str) -> str: + return f'{cls.BASE_URL}{cls.STATION_PREFIX}{station_id}' diff --git a/ndbc_api/api/requests/http/station_realtime.py b/ndbc_api/api/requests/http/station_realtime.py new file mode 100644 index 0000000..a483309 --- /dev/null +++ b/ndbc_api/api/requests/http/station_realtime.py @@ -0,0 +1,10 @@ +from ndbc_api.api.requests.http._core import CoreRequest + + +class RealtimeRequest(CoreRequest): + + STATION_REALTIME_PREFIX = 'station_realtime.php?station=' + + @classmethod + def build_request(cls, station_id: str) -> str: + return f'{cls.BASE_URL}{cls.STATION_REALTIME_PREFIX}{station_id}' diff --git a/ndbc_api/api/requests/http/stdmet.py b/ndbc_api/api/requests/http/stdmet.py new file mode 100644 index 0000000..a44df9e --- /dev/null +++ b/ndbc_api/api/requests/http/stdmet.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class StdmetRequest(BaseRequest): + + FORMAT = 'stdmet' + FILE_FORMAT = '.txt' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(StdmetRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/http/supl.py b/ndbc_api/api/requests/http/supl.py new file mode 100644 index 0000000..86d1074 --- /dev/null +++ b/ndbc_api/api/requests/http/supl.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class SuplRequest(BaseRequest): + + FORMAT = 'supl' + FILE_FORMAT = '.supl' + HISTORICAL_IDENTIFIER = 's' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(SuplRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/http/swden.py b/ndbc_api/api/requests/http/swden.py new file mode 100644 index 0000000..0d1d2c5 --- /dev/null +++ b/ndbc_api/api/requests/http/swden.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class SwdenRequest(BaseRequest): + + FORMAT = 'swden' + FILE_FORMAT = '.swden' + HISTORICAL_IDENTIFIER = 'w' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(SwdenRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/http/swdir.py b/ndbc_api/api/requests/http/swdir.py new file mode 100644 index 0000000..720d6e6 --- /dev/null +++ b/ndbc_api/api/requests/http/swdir.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class SwdirRequest(BaseRequest): + + FORMAT = 'swdir' + FILE_FORMAT = '.swdir' + HISTORICAL_IDENTIFIER = 'd' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(SwdirRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/http/swdir2.py b/ndbc_api/api/requests/http/swdir2.py new file mode 100644 index 0000000..6b6fafd --- /dev/null +++ b/ndbc_api/api/requests/http/swdir2.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class Swdir2Request(BaseRequest): + + FORMAT = 'swdir2' + FILE_FORMAT = '.swdir2' + HISTORICAL_IDENTIFIER = 'i' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(Swdir2Request, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/http/swr1.py b/ndbc_api/api/requests/http/swr1.py new file mode 100644 index 0000000..6494f6b --- /dev/null +++ b/ndbc_api/api/requests/http/swr1.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class Swr1Request(BaseRequest): + + FORMAT = 'swr1' + FILE_FORMAT = '.swr1' + HISTORICAL_IDENTIFIER = 'j' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(Swr1Request, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/http/swr2.py b/ndbc_api/api/requests/http/swr2.py new file mode 100644 index 0000000..725a902 --- /dev/null +++ b/ndbc_api/api/requests/http/swr2.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.http._base import BaseRequest + + +class Swr2Request(BaseRequest): + + FORMAT = 'swr2' + FILE_FORMAT = '.swr2' + HISTORICAL_IDENTIFIER = 'k' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(Swr2Request, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/opendap/__init__.py b/ndbc_api/api/requests/opendap/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ndbc_api/api/requests/opendap/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cde1875501ec6ae97cc3cb5fa3128207eabc0cd0 GIT binary patch literal 202 zcmZ3^%ge<81pLQ-ri19mAOZ#$p^VRLK*n^26oz01O-8?!3`I;p{%4TnuUKcRn9$1)WnjE)FLQ9CNCu^IX2YQ*_{IB4ib?_#ZZXUbc~8q zAu3LXXo|F%7!zkhtj@DBOWYc=Qiw)pkZ8GqM5|t2xNgr8Pz zqM%4hG%l%jLPaU22!2Xs$D^_WwR13ur77w2Wl2`TMICwtJJ!qY@1eSbBov}V6rx2c z#E5i=g~uW?A*)m;vXV`-NOstZU96KFlFeJ3iqO1GwB2AILi>r?7jo9jvA!d&as&7g)iqI!_C7>3FHe`Djdx_kmS}J{75SBCI-OMNp#21RoW3 z9xJ?VFq@PR4bF-sWf>>Jta%EAtiThq?{-o86;euta1r{9auwmjO6j_xYtg(a#T)~@ zre=|P2>nk?nMgmRp$(5wR@g*yl|EB?|g=aV=Z7Gi39jR;FZjA-#&RVEyhYj&aQ zRNYazLWN8mfYZzC(BQ!AMOnh~?3fU~at24|=c9?)bJxQW0mskbFjo*JsEua-^nk`4( zN*zQDqel$!!D10DwVr%MkIfZZWn5Lpa+)sdtd0Stp`;a}(u{C~(secCDrL;uw25^o zP*;r()vY1)BW}f>w!S$=X_U64?bRz+w@X@B&=s*;61=Vm`g*S>BaZZeJ7lWnRre0y`}0$oj)VS&Z z!k?QO9vS_CY8{!J`Yd=swZxLwB&@c6e{}l7A)^NO%8(d18A}2(%AW+@ zq=bO9{2L&zR#BnNdpo8f)G|k}4L+DKxMzdE9WywC{;6Xtcz!c@J{O$M2d66~*_q2* zGxM7>^SPNwekNjwvWxMT2wkCuXmjWYJyCAJS|B)2U!Y;wdHQm##63r24z`yHpNmO= zoZJ#lUX6+pCvfvg92XQ)aS9xlD<7F7Zvk8pdV4t-00xMeQxr+$!VyRyTsS7k@*$av zOG+du1~7qI>;dwE_#JK~g!r7_p)%2gqB4+_@L}js9Wz2~SsKPTiB(p2Zap(uu#i}g zR3;uxs8(T#U_RcjD=1%WISCpmt(WLyVYjmC3NK?UB^3RBRaZoiA;%dBOttIxPnMF& z`{(7$Mvme*-9>N*^rgDLlSg#HP&o)Ed@SEdn>74I_yzhhOzW3I?c(9W5eVF%sxIyK7 z{W}cBoq*KEoq+W^PQrQ}C&?!Ib&?s?tjOK;^RquYo88w7|3{Z{`##F=`zYu7IPdy6 ztN*v1?HXbs*|{3Yzyc()#-7c_opp_&3a4C98Q0fAMF zqqSt5x@(3_R06w_t0AGg`wGY%z>qSYLEa*gVyS%}{E|vjYn7Tx18eYXg#w+X9)eQU zm{o~ZA`Lpp>&%|A2P41pNLbIzzJQNR8;NHOX&I^0`SNKPecJY>R7?uusTvW@J^QX7CddMXSXel z8r^6)O|8iqBK3yNs0-X-jUgR{qwhcStzR-4hRmQl46-)e_@Oaf>w<f1CY;atd3p9$acYHrd(C1NTi9Yd6^)M0TvYt|4{8!a2* zKX`SvOw1`TpxkSig%|>SxD6`9ge-8XRk^+tO)Tj4RDGq-ivWCHkd#io@Z~`9^CG~9 z;kzG`nAa_fKP4PT2zilsZ98dP>nHd$G)a<^GeF>zfT9Y>zN-HU=y3K;{{m{yR{yur biEQ;>vyM>|oa@c>7d<<=ljA6nkaKL4}G?RaLFJ7)z(?-K9=(Z1>Ivwd#-| zBYV2Cw5s|)n3+9A;t!+>3!Qiuy9o%Mb??6SzVr9qySr*OmjJEx?bp#)+<%zh-WoNS zt|1tK1zR9s2!dQ%05`ZQ3qV-=(6;$~!t8b(Onn3+kPr|7fh9;_%Z9L|)3t;>Y+NFH zQy&EgT+u)!r(E7lmE4mjh1B`k^C-_Vz251FRA@gDhulw6#{8r7Qzws<6he1OSIN9k z2`6(g0E0~*(-m|hIEUjoiB-r>o0Q(0*ZXhj!QM7vo5xz4f7b4T&Gqm=OQpm2jDOrx zy>7RcguACaVyeHZ(qqYU9e$Q9kEE*O&~!#w)(g#2Rr5MbGnojMg_Pq+es)?m4~t$b zXno-9$seQoe^Cy$qRs0JVBC8A<@~$%bZQ3JX+J1EN)y(Xl$J}B_ES;B20xPI`%RFXU_hlZXqO^6c)I&^PgEGR(GtOMm%$eEs2D-ug8@FBz-i@7=iA9`e7kmP^ zE!?dQ?&|Qvj9g?ZX`_laT&bG}SX8|8UR5n`s8u9pWnUtg*o2UY$WdLT$9&b$*1_E3YVeRcrRKaj)?ZA(L6&g92~;HF&J@7 zW;hm)F_YWO;&Hq=&TO9Gc5my1`NSH`6Bb|X+Q7Zd*1prR22Y67iM$djImEnj<8o!W z=qTcQBhYg6H}%aNTJ0ONo~4x zQU|48;7UoU)HM5WtQ^b?U(DZ=T*&+_*Lyf7{P}smn4eto7F|)86r~5;3uXQ>cf&<4 z8h*aW<~-__{k(n}m&$ypjMBK}yjCzU|vtQXpzp``=|9nD!5>Y9yp?;n&@E~!yF6~v(hpw(%meABpLP!-(Znw`W f9NTW6J8)^ceb(#_f}Q-=JU_duclZAjiU|Dzb8azC literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/opendap/__pycache__/cwind.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/cwind.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ceed0276fda689e691bc3e7dd7843922019b320e GIT binary patch literal 1233 zcmZ`&O=uHA6n?XRNz;Z@ZIOy%!GlIHAW{!jq_ws+Qme!Wx-JacovGcr`RUAt8tI`( zL62Uf*Pbd;4?TMHJVbU%)|C`}6U%;?v~Cxs zr!0Xyk>a5n`js;FNrYL5RoNm*%Opk?qhXDeJ)?6lfZ|T|zJEum=V%>5$!=Rht&IRy zU{hLK9nVGw{mzkDXhmDmyBU>LquH!3OkS%}#u74Lq~~gWKro9r%$h>?0?rhDtXHW% zmDGc=7r2~b%vG7gQdWQ36BnG@oHFiQbG--C%%7k4%g)WZx8SnUO;%Z?UdWwg>V^xH zwQQ$M<~;0HeMdd5OSx02N^0Dy6H%C})>*&2<Bjj>dphbLLtjAYCZL`%v@c(RY_CXSTy{$oW0AtRR3WuM z;XaI)YHrYqSW`C&*T;)u@M`{6@kSv(F+PLyS0-nR`KkP5LG(tPlxVn6AwuUNV`_d( z4K->&q+7IkGzc)xB6d)QxEkviJEXGI1R|vXe=L>kr~TVa53Ei#jvn7JhIY;Vrly-? zXoELF?XQ#Z-WWdB)Rm)g=Ir)~S9fD0j{U9obg&Mr-HVU{a_6T#W5$5om{6{kNl|+rbg!nww?$6}KnCMGvSel9lUYDw=BZL}o jY^QrQ;P6iO+J(`b?$xwvwk3_A>%y?znYOE&pU!O1N)JT} zdh{Z__EeF2@aWN#=O}_mLuNbz0K!b50#tIOV8tXd1hJ0eNO=3+$ zF7Z{FIV6(A4Qc3CsKmxipNDcEyzFvXq6-bmL$C8*v_&;av|X0!sjP*9jR@EpLAFjb z+n^@VsYMK$kT)lYMU&Jz)H`7|`3zw^PsH5wZtlT}hFi^r&0y+%C+T zvIMG$0Uo-cU#nuDM3{wGl`WFAN@8R&8a7zfGkOODDDG76+jpdTfmR`u>~1hbgKoGElK;7rlS29-*y zq!h+p;Bt;JS7i=MS^i~TIq%%&lyT>V>pd7}{_Lz@b*2`*d6!kDSnWRbLhd}IZa7a_ z$9Ae@#=~yichu9llsmP$q{gi~5rz5sA{&yoJU;w4zxlp8RaJaMsvpbn&4Qr=FPybA zAF?MtWlwz0p8c3T`{w?;Q>}BOTRIvlpdC=U4JcJW+wvvI4T>bjRkuPh76TYpYNQb; zoW^*e;Rc&WID8YGzc)xA+}eBxEgDL?NeE50+CXHKaooI)BSB`_AigMjvQ+mhc?Zjwx*i} zw8q<@_P5D+Zw;Sp>&nqOJ@V%G>$|ZL$9C&29jrYo_u`{=hhq=h-z{n)XGI+eD_pGm z)w!rMalAXiyc}V^JHmWtgn2w8|B+06B~fFqAU;pE`zv`dCVG?&OH=W{7o}?J2%#1n jZT7Dg9BlTlO}NnPUu`Re)MWpypI_LDB0K+yN2UD+%m*{I literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/opendap/__pycache__/pwind.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/pwind.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..190bd1d7462d08e6179d1a1bf0207ec2be844747 GIT binary patch literal 1233 zcmZ`&O=uHA6n?XRNt1@uKSWV1^iU%h5UB?hsXwhov@K}_T^EM!&eU$*{5Z2gBRv!; z*rS)yYflxa2ag^-d5$6&=2YS90u8PLF;;+>)Y#Mkw&W9wZ4zr1 za*5B%%ps8^Zb(DFOeHpM_&k(*`jX3Okv^zX9(tYkqAjXXqTP~IPh~9>Y(&7;2(ope z*#^D^s5!@lL)gAtFlFsR!EF2M#DO*ct-DF0L7i^efPFhYiJ!p$!=Fdt&0Gb zU{hLKJ0?rhDEUna_ zN*ch}3tY}I=BmtLDa$|Y%V(WioHFiQcfI>pnLj`8SDcxKx8Sn!4680uFXYZc>V^xH zb!?|X<~;1yd`CT zewQ8nkRAP)9ebZ0d$suHMC;6j9UTpgqc5Oz8_>Ww+LbRsE-jK6SKKniSfns6S4llk zco5?UbvNiltjX!(&5I>*;96m}G+iuQzIX!{u1rjo3X_G2qDV)alxet7B|_&RV`_d( z4K->&WID8YGzc)xA+}eBxEkv?+o!VB1R|vXe|dU29XZxC4sDx5ZA~}F z(FSjW+FvK*y)}Hitt&_Ct#cR2R2{oSIba#qxlu);>o zuiT3|6C>Rb=H&?U-4W(HBh2GD`Hy7kD~TF=0r7dN-Ji+xG0_*;urw79ydhOvM+mjx jXtRH{;9#?VZNs@{|7u$UNKN+N`q{aiDDve$@u;+4D77>u literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/opendap/__pycache__/stdmet.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/stdmet.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed34afb00dbdb1a1347cf080131434e0a9a27365 GIT binary patch literal 1237 zcmZ`&O-vI(6n?Y2-IhYr`a=v+BMBFJun8AT(5Mvws|W-z(WPdx>BeqX{w?HK}#$E2qz4MOE=`>xgQ0_a8_kt~`QKF++sb0%kC|HPqr4eN5 zM6(PUAv!gQL8J2KC^2b_nmw%(7GfK)fJk(`Ye)KaTYjr!170F4F6)Yk#Eef0)Nfob zOdgm5Er~eyZQm^wu}cEXe5?u=FW}#JWRd451){Q38x`^;fnKBlY`64l2aXo@r$YMGobjRaN5y(1~ z>QYG^7(1TLImTR-*(+u3WNvsU|A_J2+Ge@hO2Ne=FpD>yLG6eR2ChB(se)`S#%^{f^?^dVO+Ee6k`#`xKJV$ zPvI_%mn*i{h?t|3(+{(=qWfNMW_EHqH=G^Exw|71v$@gS$h7DTSdm}BQi%wi`;4gx zGBwtq1CeMD=TOhXIE~m@8RBZNx7j(BrDhN)1^6qeWIxT{L85zYw07yrzR`0KN!2wy zl11CR4r>1p3-?-Ye_dCO+SP%*zV}Z;BaEHahdP*N*5|^bPKQGeJKrp7B5ej83Dd8Z z-Qr@d7-9y`%YP+P-$~HeI|$EH?ViMLghbzDz0y=X@TydG9U)YM k%lqxC1{e0**8yDLZ(nt@1F6aWTfcd57(|Z$6Aw!J1DPc@MF0Q* literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/opendap/__pycache__/swden.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/swden.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eac5181690c54f82738bd350f5d36527ca980744 GIT binary patch literal 1233 zcmZ`&O-vI(6n?Y2-IhYrst|+GNWuYYViRKUAchnXum*tuCc17iS$3wdxcy;f8^mxR zhD45D2v<%Rg9nZtJ$XiBVzVa_Pu@yUqnw=CZL1ABoqhAY_hx6l_r2|_L?RAiB;Ws9 z_<;bvNT)F(b#Oc;z%~#d$b|wJfFxF2R8R~hz^bbjG($t6z&H?X14x8wyDGq*c*0>s zh&~JH=vV2cksl+LM?I%N1=c1U?uoViw#DfTU9M2>+4c8=CEzH~@tiO(1Q!YhB48*4 z87ff>jYfz{b)wOzxH(F68l!r5^MrhC6Y_{eH#&AjY_AzN+BV@SGU9?)%ZufmG9);t(MYA0(yekjZq+6GUk+ZdkG zKmcp7E4;mCWT%P#>=C=rnzE+0Dk@l`$+Rk{R?3vIsMzl(<|~d%Fbg$IM|{%D8#YvY(7IC!cqU=Jcw)XtBaHD=ksm= zxpVzPvhP!}?{jkCV{%}B>D|@pjiDnIrLyQ7NLvHcmPNFw)-(Fp)Xe?iIlt>}Zgy^JCO0xXfpd38C+BiwxzQQFJTJnD2hjIhoIsmZh+ROIX4 zYS}3+1eN){jTUA^3p0%tX6h}>;CbYsRLgXrlul< ls&MI`byng0LF+t(!GqRW)7y~j?7!{H!J{Da?LYZ|+HW=_G${Z8 literal 0 HcmV?d00001 diff --git a/ndbc_api/api/requests/opendap/__pycache__/wlevel.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/wlevel.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8d0ab762ea2be83a2a6120c57a50437935f86923 GIT binary patch literal 1237 zcmZ`&O-vI(6n?Y2-IkxG^@kXuMiMUcU=uEwpiwIVRuKqd)TL&!>14oaZJXd34Gba*H-b%nkIXSc2)->qr?3?fX?96-L+rEv*+d)d=)9?9X z1n^xM4T;pj;fMrVK!6|*@?Zf{So2U`v$O!~o}M=>1AzvkK#X-D5o+w{0K4)D#flK~ z0i>col$k;zMr@z@Zk|ePjJe#Gd&e!C(qeO?(QoWY7P_PgIOC!kA ziDnrzLUd{pgGS}eQDV{f-%U{aS;cCM3Hy zpKcF*PV{|E^nFVVd`S%Kt$ZA;Ub%6gqhtmhfzmZVZ5ebZUxHMJh+$l?^Auyzj&Z(7 z%AUfV7%!G>uO2Z+CZ_IZrbXAC?1Slvsq9c@3}bCI`U~jTBDof2EPzvx@QptWAzx{aE+DP@nrHawLA4%3U zJ(5A&yasCj5DWKePhU+}j_T$9z25gvLL-cw)CW44r`Bh}qfUlH4?Ei^YCL5I9SPH4 zF1dyIpfl0i7-CuuG2Iwqx<14-o|XSfroNM)v3C%jr`kP^T?>i6&U&P&c;L%Y)pUeV l6)sj List[str]: + + if 'MOCKDATE' in os.environ: + now = datetime.strptime(os.getenv('MOCKDATE'), '%Y-%m-%d') + else: + now = datetime.now() + is_historical = (now - start_time) >= timedelta( + days=45) # we use 45 rather than 44 for opendap data + if is_historical: + return cls._build_request_historical( + station_id=station_id, + start_time=start_time, + end_time=end_time, + now=now, + ) + return cls._build_request_realtime(station_id=station_id) + + @classmethod + def _build_request_historical( + cls, + station_id: str, + start_time: datetime, + end_time: datetime, + now: datetime, + ) -> List[str]: + + def req_hist_helper_year(req_year: int) -> str: + return f'{cls.BASE_URL}{cls.URL_PREFIX}{cls.FORMAT}/{station_id.lower()}/{station_id.lower()}{cls.HISTORICAL_IDENTIFIER}{req_year}.{cls.FILE_FORMAT}' + + if not cls.FORMAT: # pragma: no cover + raise ValueError( + 'Please provide a format for this historical data request, or call a formatted child class\'s method.' + ) + # store request urls + reqs = [] + + current_year = now.year + has_realtime = (now - end_time) <= timedelta(days=45) + + # handle year requests + for hist_year in range(int(start_time.year), + min(int(current_year), + int(end_time.year) + 1)): + reqs.append(req_hist_helper_year(hist_year)) + + if has_realtime: + reqs.append( + cls._build_request_realtime( + station_id=station_id)[0] # only one URL + ) + return reqs + + @classmethod + def _build_request_realtime(cls, station_id: str) -> List[str]: + if not cls.FILE_FORMAT: + raise ValueError( + 'Please provide a file format for this historical data request, or call a formatted child class\'s method.' + ) + + station_id = station_id.upper() + # realtime data uses 9999 as the year part + return [ + f'{cls.BASE_URL}{cls.URL_PREFIX}{cls.FORMAT}/{station_id.lower()}/{station_id.lower()}{cls.HISTORICAL_IDENTIFIER}9999.{cls.FILE_FORMAT}' + ] diff --git a/ndbc_api/api/requests/opendap/_core.py b/ndbc_api/api/requests/opendap/_core.py new file mode 100644 index 0000000..0ff02ed --- /dev/null +++ b/ndbc_api/api/requests/opendap/_core.py @@ -0,0 +1,7 @@ +class CoreRequest: + + BASE_URL = 'https://dods.ndbc.noaa.gov/thredds/' + + @classmethod + def build_request(cls) -> str: + return cls.BASE_URL diff --git a/ndbc_api/api/requests/opendap/adcp.py b/ndbc_api/api/requests/opendap/adcp.py new file mode 100644 index 0000000..fe4d749 --- /dev/null +++ b/ndbc_api/api/requests/opendap/adcp.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class AdcpRequest(BaseRequest): + + FORMAT = 'adcp' + HISTORICAL_IDENTIFIER = 'a' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(AdcpRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/opendap/cwind.py b/ndbc_api/api/requests/opendap/cwind.py new file mode 100644 index 0000000..469706c --- /dev/null +++ b/ndbc_api/api/requests/opendap/cwind.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class CwindRequest(BaseRequest): + + FORMAT = 'cwind' + HISTORICAL_IDENTIFIER = 'c' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(CwindRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/opendap/ocean.py b/ndbc_api/api/requests/opendap/ocean.py new file mode 100644 index 0000000..dde9ea1 --- /dev/null +++ b/ndbc_api/api/requests/opendap/ocean.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class OceanRequest(BaseRequest): + + FORMAT = 'ocean' + HISTORICAL_IDENTIFIER = 'o' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(OceanRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/opendap/pwind.py b/ndbc_api/api/requests/opendap/pwind.py new file mode 100644 index 0000000..0b5be60 --- /dev/null +++ b/ndbc_api/api/requests/opendap/pwind.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class PwindRequest(BaseRequest): + + FORMAT = 'pwind' + HISTORICAL_IDENTIFIER = 'p' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(PwindRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/opendap/stdmet.py b/ndbc_api/api/requests/opendap/stdmet.py new file mode 100644 index 0000000..da1dddc --- /dev/null +++ b/ndbc_api/api/requests/opendap/stdmet.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class StdmetRequest(BaseRequest): + + FORMAT = 'stdmet' + HISTORICAL_IDENTIFIER = 'h' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(StdmetRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/opendap/swden.py b/ndbc_api/api/requests/opendap/swden.py new file mode 100644 index 0000000..e1bc55a --- /dev/null +++ b/ndbc_api/api/requests/opendap/swden.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class SwdenRequest(BaseRequest): + + FORMAT = 'swden' + HISTORICAL_IDENTIFIER = 'w' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(SwdenRequest, cls).build_request(station_id, start_time, + end_time) diff --git a/ndbc_api/api/requests/opendap/wlevel.py b/ndbc_api/api/requests/opendap/wlevel.py new file mode 100644 index 0000000..11bd1db --- /dev/null +++ b/ndbc_api/api/requests/opendap/wlevel.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import List + +from ndbc_api.api.requests.opendap._base import BaseRequest + + +class WlevelRequest(BaseRequest): + + FORMAT = 'wlevel' + HISTORICAL_IDENTIFIER = 'l' + + @classmethod + def build_request(cls, station_id: str, start_time: datetime, + end_time: datetime) -> List[str]: + return super(WlevelRequest, cls).build_request(station_id, start_time, + end_time) From 8f1da41fed760f4f82a3240df0838d5d845e356f Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:20:52 -0400 Subject: [PATCH 18/47] part 2 --- ndbc_api/api/parsers/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 193 bytes ndbc_api/api/parsers/http/__init__.py | 0 .../http/__pycache__/__init__.cpython-311.pyc | Bin 0 -> 198 bytes .../http/__pycache__/_base.cpython-311.pyc | Bin 0 -> 6499 bytes .../http/__pycache__/_station.cpython-311.pyc | Bin 0 -> 3881 bytes .../http/__pycache__/_xml.cpython-311.pyc | Bin 0 -> 1619 bytes .../active_stations.cpython-311.pyc | Bin 0 -> 3459 bytes .../http/__pycache__/adcp.cpython-311.pyc | Bin 0 -> 2079 bytes .../http/__pycache__/cwind.cpython-311.pyc | Bin 0 -> 1243 bytes .../historical_stations.cpython-311.pyc | Bin 0 -> 3843 bytes .../http/__pycache__/ocean.cpython-311.pyc | Bin 0 -> 1178 bytes .../http/__pycache__/spec.cpython-311.pyc | Bin 0 -> 1207 bytes .../station_historical.cpython-311.pyc | Bin 0 -> 2550 bytes .../station_metadata.cpython-311.pyc | Bin 0 -> 4159 bytes .../station_realtime.cpython-311.pyc | Bin 0 -> 2242 bytes .../http/__pycache__/stdmet.cpython-311.pyc | Bin 0 -> 1246 bytes .../http/__pycache__/supl.cpython-311.pyc | Bin 0 -> 1251 bytes .../http/__pycache__/swden.cpython-311.pyc | Bin 0 -> 1651 bytes .../http/__pycache__/swdir.cpython-311.pyc | Bin 0 -> 1633 bytes .../http/__pycache__/swdir2.cpython-311.pyc | Bin 0 -> 1653 bytes .../http/__pycache__/swr1.cpython-311.pyc | Bin 0 -> 1630 bytes .../http/__pycache__/swr2.cpython-311.pyc | Bin 0 -> 1630 bytes ndbc_api/api/parsers/http/_base.py | 107 ++++++++++++++ ndbc_api/api/parsers/http/_html.py | 21 +++ ndbc_api/api/parsers/http/_station.py | 51 +++++++ ndbc_api/api/parsers/http/_xml.py | 28 ++++ ndbc_api/api/parsers/http/active_stations.py | 66 +++++++++ ndbc_api/api/parsers/http/adcp.py | 138 ++++++++++++++++++ ndbc_api/api/parsers/http/cwind.py | 17 +++ .../api/parsers/http/historical_stations.py | 75 ++++++++++ ndbc_api/api/parsers/http/ocean.py | 16 ++ ndbc_api/api/parsers/http/spec.py | 17 +++ .../api/parsers/http/station_historical.py | 34 +++++ ndbc_api/api/parsers/http/station_metadata.py | 49 +++++++ ndbc_api/api/parsers/http/station_realtime.py | 29 ++++ ndbc_api/api/parsers/http/stdmet.py | 17 +++ ndbc_api/api/parsers/http/supl.py | 17 +++ ndbc_api/api/parsers/http/swden.py | 71 +++++++++ ndbc_api/api/parsers/http/swdir.py | 71 +++++++++ ndbc_api/api/parsers/http/swdir2.py | 72 +++++++++ ndbc_api/api/parsers/http/swr1.py | 71 +++++++++ ndbc_api/api/parsers/http/swr2.py | 71 +++++++++ ndbc_api/api/parsers/opendap/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 201 bytes .../opendap/__pycache__/_base.cpython-311.pyc | Bin 0 -> 3288 bytes .../opendap/__pycache__/adcp.cpython-311.pyc | Bin 0 -> 1256 bytes .../opendap/__pycache__/cwind.cpython-311.pyc | Bin 0 -> 1277 bytes .../opendap/__pycache__/ocean.cpython-311.pyc | Bin 0 -> 1277 bytes .../opendap/__pycache__/pwind.cpython-311.pyc | Bin 0 -> 1268 bytes .../__pycache__/stdmet.cpython-311.pyc | Bin 0 -> 1271 bytes .../opendap/__pycache__/swden.cpython-311.pyc | Bin 0 -> 1263 bytes .../__pycache__/wlevel.cpython-311.pyc | Bin 0 -> 1266 bytes ndbc_api/api/parsers/opendap/_base.py | 73 +++++++++ ndbc_api/api/parsers/opendap/adcp.py | 17 +++ ndbc_api/api/parsers/opendap/cwind.py | 17 +++ ndbc_api/api/parsers/opendap/ocean.py | 17 +++ ndbc_api/api/parsers/opendap/pwind.py | 17 +++ ndbc_api/api/parsers/opendap/stdmet.py | 17 +++ ndbc_api/api/parsers/opendap/swden.py | 17 +++ ndbc_api/api/parsers/opendap/wlevel.py | 17 +++ 61 files changed, 1230 insertions(+) create mode 100644 ndbc_api/api/parsers/__init__.py create mode 100644 ndbc_api/api/parsers/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__init__.py create mode 100644 ndbc_api/api/parsers/http/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/_base.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/_station.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/_xml.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/active_stations.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/adcp.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/cwind.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/historical_stations.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/ocean.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/spec.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/station_historical.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/station_metadata.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/station_realtime.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/stdmet.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/supl.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/swden.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/swdir.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/swdir2.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/swr1.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/__pycache__/swr2.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/http/_base.py create mode 100644 ndbc_api/api/parsers/http/_html.py create mode 100644 ndbc_api/api/parsers/http/_station.py create mode 100644 ndbc_api/api/parsers/http/_xml.py create mode 100644 ndbc_api/api/parsers/http/active_stations.py create mode 100644 ndbc_api/api/parsers/http/adcp.py create mode 100644 ndbc_api/api/parsers/http/cwind.py create mode 100644 ndbc_api/api/parsers/http/historical_stations.py create mode 100644 ndbc_api/api/parsers/http/ocean.py create mode 100644 ndbc_api/api/parsers/http/spec.py create mode 100644 ndbc_api/api/parsers/http/station_historical.py create mode 100644 ndbc_api/api/parsers/http/station_metadata.py create mode 100644 ndbc_api/api/parsers/http/station_realtime.py create mode 100644 ndbc_api/api/parsers/http/stdmet.py create mode 100644 ndbc_api/api/parsers/http/supl.py create mode 100644 ndbc_api/api/parsers/http/swden.py create mode 100644 ndbc_api/api/parsers/http/swdir.py create mode 100644 ndbc_api/api/parsers/http/swdir2.py create mode 100644 ndbc_api/api/parsers/http/swr1.py create mode 100644 ndbc_api/api/parsers/http/swr2.py create mode 100644 ndbc_api/api/parsers/opendap/__init__.py create mode 100644 ndbc_api/api/parsers/opendap/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/opendap/__pycache__/_base.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/opendap/__pycache__/adcp.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/opendap/__pycache__/cwind.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/opendap/__pycache__/ocean.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/opendap/__pycache__/pwind.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/opendap/__pycache__/stdmet.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/opendap/__pycache__/swden.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/opendap/__pycache__/wlevel.cpython-311.pyc create mode 100644 ndbc_api/api/parsers/opendap/_base.py create mode 100644 ndbc_api/api/parsers/opendap/adcp.py create mode 100644 ndbc_api/api/parsers/opendap/cwind.py create mode 100644 ndbc_api/api/parsers/opendap/ocean.py create mode 100644 ndbc_api/api/parsers/opendap/pwind.py create mode 100644 ndbc_api/api/parsers/opendap/stdmet.py create mode 100644 ndbc_api/api/parsers/opendap/swden.py create mode 100644 ndbc_api/api/parsers/opendap/wlevel.py diff --git a/ndbc_api/api/parsers/__init__.py b/ndbc_api/api/parsers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ndbc_api/api/parsers/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/parsers/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..39463d87fd24078f9c0f442147e3679cc8ebaf36 GIT binary patch literal 193 zcmZ3^%ge<81pLQ-ri19mAOZ#$p^VRLK*n^26oz01O-8?!3`I;p{%4TnuMlUen9$1)WnjE)FLQ9CNCu^IXBm>)=dU}j`w{J;PsikN|707hvu Ak^lez literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/__init__.py b/ndbc_api/api/parsers/http/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ndbc_api/api/parsers/http/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..575c5a536d10756525a1aed97333247fb3b4c167 GIT binary patch literal 198 zcmZ3^%ge<81pLQ-ri19mAOZ#$p^VRLK*n^26oz01O-8?!3`I;p{%4TnuSjRBn9$1)WnjE)FLQ9CNCu^IXFBie)^LU~p{r&V-l7 zC0g;rAfy_El(a%yt7>T~kjk#Q5%D>{FCIvJBjq0N0XjAaQAU5rRcC7Oswmd2nj+fw1hQHJOzp&%oo zAS*IKPGl#Uphe_@{3K}f6$6^_!J0`Fv`Kb55`AKgJ7^OR>ZmnPV9%Y;R&TQ-gl#&Mhb zz?h~Hp`WmzZoo65F~) z+PV%0cubD<8SFoLMmdgf#Bhd>>vk;3$wXY1WZg9_OCcpXCCN&7Dydtsq)g*@gh&^k z8emBI#h>YEPNEd2GK1*jp_E0nWQ$U!x5(TD*?VMm`iSf~YGrt{%4JKn#a8lGQMP1< zrh1FY-sjAAY95R z7rB$@0;BT8cSm(AoTM@>Kl>HL0}K}6rQ6FV%uhtItbj*b<>}M1r0AA&l9UWMb#4+0 z)^IW@#YMd?gr%?u#IO^cO^UiT0&X5wbSKnGN+=o^rHi^-#EE1`1Xzed8yJ+m+~$xI zxF}~kBwZT9IDs+Q)Ph$N=KB&E0_>$dW=;)KpfGTGZU<9U5z;H~iw z0D0u`cf*l$@8IafL^M8r^it$Z7*8F=iL+8fk;gAcVdad3m&)UD@pL2snVL|%j?Q+*0ZdYoR9n)kQr34p@yckLu+a;AkKb- zF=%>3b2ombURUQva_+9YyKCP2$lbl*?$(6n$3pufps2nK!HOIyPqSbVUUakG{Y`}==iPi?3SFrC-3ih?BDyyzc=UKm-p|> zuo?E%6YpA>(6zNpo-}mLZ(k6)U%^x0pb6fK?)tP{Cdl~Dj(*|54)oIw$AE+TX}@iN z=YHmS;BgnUnlQu;6%<2ifzBe8$wI~Qry4f^Y0xToQRUm?fF3Cec;c!!V)n$8Rpn*x z%D7?n$1$Wh%bJSpRlpCCD@TUaaKb#_D)_*gr3S08z}#A?hiWyqoui12YgBZ$gfS_r z2&=5r^*q9U*dcNn%O15#mlO~2xF%HtIAa6cbi>FrBOx6pnG~6+v@a=}X(wxogDY!` z9KphDDTbG80r{)uHLpvxiZ+;u00n)3Cx~Y7HLC5t{Vt%xy%PGS?4WC{=uquv>x*)s z5|+k?0a-T|>k(2mRq&h3`~(M>+nsVKTg!E-!>k#DOlvMh9kcaB*prZxr5v9+#+Kd{ zh;0YNc8=PvXcEHX$4x0lWxltp<~rtSZ?ZSmjG^yH7RSqw z#U;OEb++eCojEU)Fx%FK$;w|ldSj5*5$Nr^ytM-D%;?|K8%spOF}V+zqIeHELU z7M%)RI1`0pG z|K0~6Z9h0VIP@VISm%vMY9XuFy`u@`;1nd;QxFOzk%mm&e*98WGFZn6vJVE&Cm}so z0#4isb5P8su@rA1Q(BER>K39Y-a{nz5z}5dp|<_0v#x9_*`4#u*L%L*|JD8nzFg<-eCO`RoxP7bdmoPF zI{WjT{ZtYnoKOz|PN)a2)&pFv=ZU{*&VMs_JNRFYsci$X+O|bsW8SwZJ)}A7G8;0V z+tX0eqASU{aA815*s#23X@puyUL>!xFhM;1h6bSTWbHj{dj-ZVN<{Wsh2w zS2HN?SJ~ey3lLhdrt;v+b$xI`!-a*~XaJ2nUyxdUfXl1ZE3~lmtAT#J`T|-8>7QYcs za~fn{7Omc!wl2Co#PP0udgD{5sq+*N560{Ees<;hl{wk)M#w~69h%TE>$>Hd6LUgm zUg%t)zeT|}>%8Th8_Wsqd7*s)vQhAc-dofY!4sT_xF?KQs6ztGo>muZYcTrT|Q}!6+3+ZNA3gQ%c1(tlG3h<{TLp;X^d#f=?V3Dc6cdAy%b?E1SczD;b@I z%vAY^RY^&8Vit!F8YCWrVOU3LC^PK{(T~RO24I%rx3w6*AvV{HAxu&WN&+l?6L_78 zSH@=i2dKN;z1%hoTwf_BhoFbt0%Qg))Q=j!Ip)`X;4;e+tbw^Z= z#=(^15O2X&A!+y{X(iDR_Y)D89z2aCCO!M0@p98@dVUP8@_8UL=r=3!x6YmaM#=fQ z^S*AlPk0V7Pqy!TaN;j(e^~qQlfSk;XoXvg=LiG3&Yj5i-#PhZ=!?+&SgvJzzGeI4 zmOYPJ_B^!YTK4B#_CIbJc+@iR<3O%uB;PUu9dh-fdAewA_+1#qUbpD>rE87F5-&8J z53xyVu^qs`2Q&tfdQ^SJ?LU#u51|h=$w*hB5PHJ|$ zDvz_A@T_EX6*=Jq8EZQbk`uBGaE~k!t$D4srn$gFd9k%)zI{%eQwuz5Y+7vHLU?*i zu&j58UQ5E?K8Knrv@G*+KG?fsiV}!2_ zg+7@M$Cg@LBSZazL&ri#j|~kU{1A`8gm{RMK|&4^asY_#jKspSJS8b-5+Wvs1xaKy z0$++wClWE5G~>64aDWh!E9$l6n^QDmbkR8g4@{sPllX=SKwz?b!{TTRA-4}q+=vna zMzRDkd8YIS3ERwJ&C`%+n0Z(8bj%#mJnflnnQg!b{+WYXTVqhz6Slqks@9WAURPH&4to2tvDzK#E2H*>97w^f61q3f`xMb`PN0B*HRh#k znUs2gK)KeiR<1h_Lw&ZI-0P-0|LyZgOpN7^itn^z%-i6|C5p7#A{|eUA3YHLK)CxAm zEBsylG4MYr%S7uzx92D6_Jh3entX%erFI|Et}&=dflF0juU5)?;Oh!je4jaqzubWvB(FQ5tq{me-2VEYOQ(9^tbl z<=-l^RL(7CnFshw9s!>480b&RV+vb)p4lp0B~1v`+A7WIp_(~`66bb4i>sVenzxus z=!40$$MRH8En8QTt!}sJ5iG%Mu8hN;y_WC*&e`H0pwBp+v4pf>q1pNp&jVz6vi?%9 zx>fkv{ewK4_F8PVp_;3NTY}}CVrwNBv>dZoIBU;15y!4xynJc!=J2A}`TmK{#EDLY zFb4@ggVm|7U4%^<#H;G_ah%jKH7pSg6Q^hZ5Mf$1VTkZFBnYP`7YKhxl`-K>^+SjY zV2r0QJxwAG4c-`*-n)KQ*X$GcgfCwq-ZVGOp9e->NV^udLSDzQl z3lg{*;dL42<*3)upgFizet7@l*@^eSSB;5x<=E{}9G{ws>l4@R#AalixQ3InYRohy z7F5}sQSnxPg1WLKr{WXzb0-IR>gyAdQ4D}c>JHf@xkHi`rzeojq;$KFs)?JwGcTzV4_CGNVqY?cVv#Q0GRdGaou? zhmK}0IlZU8SC+F6U7$bx(OUDd+$x;fBC|8*^AqKteDg=e1-^){F~)loU{fMjwDBlxrF(*45zCp>HpLeR-YFpB}bP z4;QIxlPibHEP&v{AXQ@!dWx<=W&znU$jP5>kT3Tr?2mcvcH_-6Uq!%l>~bF%j>7?% zaGdZdEeY=o+y*^-96+_za1RjvXLEH>ts?*+OCc`ks{kj|;slyLng4ixW$HH{Eq{~? zbgfN28~XFepGMXP{y6^KcrNlT)B=YZ0*;G9?{ynezjbZ+D?qkvcs$KiU*}+>YBvPF zV%ct^TK`p;+ti29SI1ntb%Whl*R?Pu!;6g5R2_E#{(myvQBZ9R002*G87tP+>)(W) zUjO#i*SFT2^KXpUZ;a&oM(w`QzxHkPUCH%bS(yY-tgfkua04#M*i3QFA`#%Sgv$`~ zpcd{@cxK`~6m2ZuEr#i)T*6b2C;V;o4m`T%QuNyG^5T8Kj611>psR+dDrEQ0q2pSL z1ax(wI-&@3+n@)wb(gx)N%l+sDFzq2|BHYPgKpzr0KnTs6dYwB>{b^6qsM(0NI{@_PzkR7AVS8L)KrEP#B=nB6j#@i}5}Q*7N3TUU?d#dce4&x!5N z+k5_SGvD5CxA*77uoK)@Kw{k>Q-qlzC$ML=|KXWj;1E=&v1P^Mgxfd6{Tt!_r~Ucx zfE^ylTy+9rh=zvN=goUp4{Wxc*l0bGZ|${Pd-KgF?dFqDo9yOOE6fV>qH(X|Ka}mW z{hgcso(+G`n)K|K_KCOh{bNyH~f ziKH?I8BQX8N&5AitQAKvy#z>oOp^^Gp_(&E1=GAlxJlzQJjXOQ6%y{IJdNqm5e}YO zn1(ezMZtLhUm$lv_c$?qp}K&+w2a>ZSStTs1dcEL;)SQNz&3bxJNw(S9gn&n_1gRU z3mhe!IrnUMy<^>2Kla^K`^*)WSU@o1f|nc;n(p3mWeZ`z3v9Eer&bFkoH*h zC<~Sd?oe^bbhTaexDgs6LavA4ka6)HSszPk@L!AmJ2BjVDZWVs^G+(RPrF8PErJI@ zxVY_MiXEoFy?4a(MU!8^?*NVFAmcuO0>?0ngX)&tpMwtMYO8}fa<$b#`*XF`LE+p^ a^#$@Rxxa$Y#efZ72QMk@`yB?{h5s9NC79d* literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/__pycache__/_xml.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/_xml.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6de24b1a521d04ab9fdb4b8f9b7685be7a3a5ec4 GIT binary patch literal 1619 zcmZ`(O>7%Q6n?Wmj_o8(s1vCbRau&-sUv9?cE*XruGh@0 z$&VZqp&lxFfCExE<`h7^aNv&0fpa27sx?w1Bu?BMrH9fJZ+5*)($cp(-@G^P&Ad17 zz1<&2NArL*_QyZVk2HY4l#q})U{0#Y>;MHyJgCJp@ikW?;OjMms1?mK{Y)(r@wzwU zXKPsk8dQK9cY&HLv!??*z!6g|N6AenX8u;4i}80G!}E^b(ginX3%m6 z+oCBA>pF@%z@SDb)HF(JIy0!oOsccY>QJ(!VPFWx9*Pf=C8%Yosocxl?N2_AeQjRO zTS&2BH3Dv-%A!u_lp=-9awghAOE8(?EbQ=>i}^~)Ku+E69 zrY$W8VVV_y)%47+_Kc-|#xC8{)z8}9geeHK$@u`&z$dqiaQMF!`pn5)<7zW98v$g#CmyXwARHdnCg@IMBMtM?6OqqGU zEM`+lsnVma(WV;~XRRupFFLNvglKd;>%(ewF*QneN<(?xe68U$J#>lQf~C-DwxSyG zAPB7n5Bzjr6z2k#S6^nK)XG(vSNb?^Ib8Ob&st%XGbVFsk8xGPqA7LP6I^u!n@=#DD0JQ^mQ^?Ay1I{y4rw@m_+A0Gw;A}`T5KG!`%4%Mg8GL z6ZwT=PG)Vp<@n6DW!|>^fOb5^kJ$DX9mgAJhHRS#E-nwdo+E_M!j*tZL-ht9!Nd7E z1rrEA!wDkrf`Uu}s?o)#2;1q?*EBPWw?W84=B2)N&a9B1ru(4gU*7z6F6K_-v5tm! zKZ{LIEBK|vQ86zy&g7)36$`O*N~gQXjF#eHRoL>Cq}(MecWF@Wk}7w}Ucx^eSKqHJ zq*g5X(}}1J;|sp5oJSF;OFzwCkCQmZub`-YA>uv))`gHB zt{J546s!`kBENlWro}bdTJBG{O+1VS9B60-b5SB;)%OnVXLrb1K z$^|!3PQbxiiTj|r(>8-pHnNGIp-4ctSY|;=oE2I!{K`OB zNq9N^s0Q$_!Y9ZP{9utIcJL~?57ds&+48ZD3_?}mKIrfGHa4UP%A*arDsAoe>Z{-b z@>kJq;Sa_}pc>eceW85_7Q5{`vCib7I#RbzH+3Wna<@)5b*SG!w;VlvBwq)#x^0wV zPP~)xQpm#(jUm{rf2dsOh;rd0%0-SS7wwkAv1NB(9U5o+7s@Aoq5M-%mdDA@;v2!L zyyc&8uk!r2dZ5Yz?Bb7!ljz7SLBDH_>Vdtq5h|S-PIPRm&LUC07NIU! zxT5MMtd@CCwAF$G4aTulUA+n|Ws2%Wl8O2TCTQLs^C@o9H*^*k3@1IN&h59(dC=N) zT2T$FY!#pXkbxyh+4?GR{Y+k z+KbkTiPZ(dEnro*At^3S*3`2l%Te`m*#y-Elc@e9yO$}JG? zlx2TLPw35^v{NPpY~br@d(s+S|O~*uTET_Q`<(@ zl}SqBw&G>ti|pM>W9n==!2ZvWjPwZx-@V1uuu-xN;gd=(+2j#Hj*UFcdnEIU)KuoTX9S7iOUCw&}b z4vO&>WB&`z_~Q@JPyHx$;@cDH?a_L2sF57{E_r1)dF89$)RQxf? z+ML6y>Fz^H_}A?iIImK+zvH^=%jitb~mn`zI%D=X1y=d=*u)E;pEuofwqa~>jT4$f#JF`(ojZf z%EJrmM2Y#G3gxtkig$)bg!Yd}Y4GH#U6p{RwPi16Kh2?LmWy- z2HIW`I^`VTEa0+OY#)!F12q;b!)>SAZuab$cStkD`)9y*dkGuxXW;l|+Q&D;k8ef; zkCcC%y2bbc(U&>SOZ0PiytNvy&`BV9?d)3sV2K4`51pt<8 literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/__pycache__/adcp.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/adcp.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e138336c98a0b5cb1f59199e275836fce99fd024 GIT binary patch literal 2079 zcmaKt&u`mg7{}k&N!%uDX{#~uLrC<%4nYtWXRHMwM7xBlQyZd|R?I}Q+}LR{^25A# zwG|bpL}5FE&@_${hcZ--9QX@%E(nS0JyqhwEmemEJMnz|Bt=yPuJeB1=i}G*`|+E& z-;IuD1Z`sv|6ZS=-tQ;|TdLnVo~6z`;R#Q+L|fWYqHnYeJ3*OfCEF=GC50j8gqPeA zUdm7HCWP3d4{x^V8OtJT{(&+pb#kE@MrG<`X5G-QxGMBjtv@!7p?-0^MxEPqZ(rEb z6Sm<=x~E|$*Xh3BN;~D7UeX`(ho3n)o9>?WOh0qu+yK?hJEAVVp`DRkLVE)&?2$X- zmb6D{X2Uu&GB4h7^FP2)@dY0uU9!D-0%?y>2dVSmgQ_hr^1cESIVS8dcvzab=7S< zCsWNPb)wb^)hBdVeTp$@$Emg4Fm#*{bDW%<+T9 zCy(hAk$plBJ5I-K`;MbCj??aX8!hxl9OuIgx79zAo-Y?m?>SfBS+z-1YH8_RaHl-8NaS4nT-tur=z0okr;}c@7U|?#x7*T2;OTVHjoh~t-7$94qy1-D z^II*)$x8J(ofTrC-%?PES0!$Sao}reNObUbI6kpGe=u=j`&!SqC7(8X^u`{v`^hJJ zQ9_s#JtL8tlMh~a5dH9CPoRI0eYuy2JJ#hNU*awwIolrL57xz>+Ej^ZpU*&^%V`!&kq(nM+=_oFL(|Mo^#gdU!B6&DVF_S@FnpG9uL34>++`h zhDtn}@MCIv2`S})cxwOjekjiG5B5WGW&iZvGiMEn@4$b@m(L#Y(MSI?D4yOwnI=1V literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/__pycache__/cwind.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/cwind.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4de69f31dbfbc98d7870e05cb569c73b3bb333d2 GIT binary patch literal 1243 zcmZ`&&1(}u6rb6zHf=)_MAU#-L6G`oVx*z#h#HHCn#^3)5n6|t zh;32rCdymim6^xV81qC_1kiQO5p2TYf^oMQ8!sJAW9K%2C3u@58&hO!6x;eNy!{;8 z1~aM7EY@+PeUcii6JAf4|tN7~)X=mEBSAgd19J#gQv&?F76 zK}_D9jU(Sq^?To-3a|GhLh)98>0Q6n#>ZufGZ9uRf(eU?OsYP+FWtHC}Ps z)x%W=Ol9v(f?B~(Q3SniNu-`^$^u#iYrVbmtc_X`YF)q!+TJ**0~hnUG(|0BoLdki zv$Ragyc&>ps(C}|t}l2exOqy&qzG|+M+icM%CWDnuf97uaH(_)7!jpW$Gv-4~oM?{mrd_Jg8J9SrS5h;K85O0uC<;riYHlH%=V^H0;nBZ( zUD%hP3K3^P{i>s%7Rnr4nch5jcw5Jr+z$lmOYZci-08KE*N;BthCb(p8ahr5;~hAF z@@d&gNW}>lA+n2*ph|1L!aaoCt2uttlNu=w4o#7P8)LGoc(q6-ua4asx+%NkE(9!^ zt5U9*k_PqMNE$QMsxP&W%G96}Il~<8j|-9MX6Iby`-J2%?}bxB(b~^5pkoeVrMfL1 zgMy-3zpYHRKC+oTw-Jx}=$4sjXu3I!*G{ZOuTC|P!khWC4Lt_uFT6REz{Im(Z|lg) zE>0(x>~|+SJljePzIrQ~553B4Qmqv_73v6{g8+G~MP>oWEHue1C^8FV2L7;IeJXKj zFHv%(DtNc^a?jjlH8`rSLFrpnvNpr?z9y KkN^1Nr2YV%moFN@&B81WQ(=$hJOd6jhB?prutTR@~voUUHe) zRU|4@9TWztLs00@B7xe#LJqbHU2@DJMRO<$AGO97AqFrYpva*&8g2kLr@mQoNs%!D zk9Hs5ym`NQZ|1$#!*Do=U_}1;zw~ejp?|Z9Gdwlq*$N<^A_Xa&igH}i#i5)l!E;rL zJK=#9m+H;=5b=D(HCdCgfBdz{vn4=&4}M{k;1rZ_DO`fb?f_DJ3XeUC8+*a7 z@3zB_Qr;bu;uP<_@B@%Mtf@gL5!gYSTp|em`01I2tZrx|o0Qd?hRi(Ct3GT^1w|Ou ziI~!eC?}2Vhgj6BY+^PK#GEZ6rihjk!F^GY4LNQL&QU+XdI9ud8eZ42WMp#~WOD`T zCD>Re`D8sh;957lZ2s(?8=*gOpCW^+JB$oh123TmFgvVM_Ul-K2cZ)80OSwrI=1-$ zN~3LeiQhkIwJxHx@ZUhY+;9H2?J0To!Ji)w#nqzU+m@4W;q!ZyH`vTaqAz8uMXFgx zK4f$_`$g!s&%sN+lD`xv1>5=(YSyE4+_BGQTmEn}e_OuJX1=Ck>xjvJRVK?08cg%4>!w7YZtfuBlkeV1i>O zHub(;$y@eSP~!CfR5xV@NNI%SGBR_l#{A7KylE8{Z0oHDa)c;4S|woFN-KGfN0`S{5#rY^S46h)dw0M_(^s zLcDOIX%EpfLt<@La!wKTr66ZA?2}K!u01JLgMGLP3Uw{bP%k7A=M@^LXNJ0FS7^}g z@3vLK+Cr?0@<~-EtmqMTXQ`W6q8+s~ND7>6!Y&yNIwVL;ec4V>*jcG2D&kBFwW!s( zS@H<${0#-{tAe_e6y-1tNK#UjbzPG5E?~0HcmsU=gK~Xh^_TDwrLX=>PTrX(*%U-z zb-9?#$Rr0Tx{Z^DzPf>BxV5A|1s*1+Z6j& zblly$dr&k7#;t+z3L5AJdjNLlD_+z;_?>WWfAFLZQy8^`(Kcid5eHBDRN=Y)sn2I? zDpF>2)QXOp!k8tD9SLJku1tLGITnVFh1jt$Y$NCIzq|MDfyWf$mJt8){9|GCSg6vx zrf|^`E;dX|n*&#^fve2KJXg1&nb9dLI%NvemN0!JOh1{q{*OLKhtCu)S;8e}>Zly= zzi_&O0PHSSCQ;wHZ+izH_YNL(n7yM`@90tQC?HR#At>Kp<4{lUABE3UvwPU;9^SdO zbM5(ek?v2nD~N*>o`g?&&K*VH{^}P1;4ynBz!c{G(S+EnMLrUGV@OX#&T#UzOaB`K#V>ng)LB ztlPV%r<^(v$IicCx?KV3PwOjCA>y?P5oZ-5E`deD{vWV|@mEA{Fr08&hNv zrYv^)9{@m#Ij)SNM{S-mI(6hc$LQiw^HWA6N6k;gd!6GTvM+|86`$HG|828w`}=>Y CVaeJ6 literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/__pycache__/ocean.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/ocean.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cfbc505b62ee88623d8db377b8468c08b59c4d24 GIT binary patch literal 1178 zcmZ`&%TE(Q7@ye}ZE2;%5V47YgO>&A!59z57{j9kLx99c%+h4C>`ZBKx4X`4RSYH^ zNaW~+5O1Ckg9i`(4W7|>Aak*aCvPQSqMV%BM!&C@L%0tHM1tZf3U6Ma_&U|;N*ECVa| zfoc4Zq6xV++hF0G0J(9Skp-JFLY>4nI(ib}cu|0t;%x#fh=HYGXsJZQ3ekyixqAbv zqz7vxvzkp+HDe3ZAl5hYyF#^}s)3c;0?(k;E3@If^A53_@$*y>K0Hea3;ZS{j2Bvr zps?W)7TR9G4NAfmZ93hWgu5oTl%p)S;SLBz2U7}Vz=YPpu8{V-nm1jPddPDft}AE# zM3!*QRJqPt0io0oPP`x2P>uQ?>ZWRHTy zf#@@<-m{%Y)3i~mHJa6hHK%StPsF zec8HlutxLZf#+BL>SXy`g3^VJ3HECP{4_vuV0-CsVDLzV#nK57>|1H{OKEg(=EIB6 zrKzu_sg4S>6?iNz05i`s2sLewAjESBc|LBr66XJU>!_(Z9JA2RUz! zHME#!WA<<(Tnie_mAFJBu$$OsbWk|(p>%3xF|~3swX&RA87+(d8GQDY5JlbKsOLOU=MvMs93 zp}h57sd+4onW~6NAfzW;!Dd}982389@zU2(4jd3(!rctnm?B%F81B@Tux&7t>daz& zr+eq9!TRCu)Z@4!wOzK>Q5jQfZEy>;ot~#Xu#TQ%dk}K!lj*sTc}Wgj1D6%OG&U|% zoQber6HLfVLogCm112Ij2&KhY)ZjI*R~?)&U@HA!0oW#fg(ASROOcA&p;fd6+D>oh zbx-PgsC5;uYDfJbQ(VgH(iDx5ac+T=9HM1X=JkN|Qq3Du_k6(>e7s-zqzLhRR|rCc z%5ijbwE25}@}@HnhzsYw>pdy*YPnpkIkU@N#pS^)uRms9B%Ei=jVg?Hbf-oaJ>rH{ zN6j$qm~bjl6gmRnD}>8@5aO?#{R?K{Sb7=o5M#i8Hqj3Yf_?o@` zIeYy>s&!*}U&py3K17PQ5l$8H0USa3jO-_*<_3%qnId~7-`<$GjnZCH;Z`t{C4#2 zVjC&BmA~B9W8uox_m>heu^i|7Im~(n+KQILYHca0)C%1ibp=;{ ziaghCWC0pk=rppR8d)HV@H2AtOyaWMq9mj$_ILlS1n=8?6jX(R5Ww0x#<+z>c8`ud fG`!m#d+6Hk(b2Y2She=weB;c1Y&raoJx=NmjZ`Z$ literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/__pycache__/station_historical.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/station_historical.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5f535a39e75ad72876c4d634e82da0953e300b59 GIT binary patch literal 2550 zcmZ`4TTB#J^v+`+u!|tOi>|F@6+|{PSW-lxs|^LDtCpIgNu_J1vojYMoPG4pY*o6O zkj5s|G$|SzXwr{uV~|#p^3e}J^{46QE}4YQO-x7{Km1#xAJnhjJG(mz3*MPI_i^6m zo^$4YLqk0vqw(+mroQ6=_?K=f!&Y9rJWUphKn60Sf{D`1OfY26s%(s#;26MyQ6O_S zfy_g0nFHVf`ATDgmzi-8v^}TWu^@vR#&uCw67dTn(jbz`WaeZk{FZDN0R&XZ1j_=* z%@E^CLuQs4qJVs*F=2ytnS~CSgN{jF=5O-L1o5DP1`|$#x52s@(@}anDTQPym)NLBCmtPX67D1ZHlFiOh>$uEbHKm|=-HJAG&XW8_ zIo$RD%KS3nd0_En*fg8nBC%?TY)gacMLe|E@NsD_Te-q26IscQ8jViBh0ofp<(7q9 zx1A%yr$N@Sg}_>-`G@t}X~%)S*FuBs$v`gEHHPi~8Ap(b}r%-kq?4+WZp%b6~^e`7v}WwB*dY4jHaP+3_`3XU^66)YD>k+OwbEkNg%{ z?Z_YLH;(i_I+gc?4NrJ}xZvHt7`_u;l2$mQ^Mi$O-WxK!q4|-5|DAPz&ziqywLR}Y zVfarxVhsPlx_@ZRKlFGw?++XP@ch|=r)AyKyXNVAT%Ysw<~`>O&$*oY+%wO<+kJQY z=0AB!38^sU0921JbO-UIydf8o16~2G^K3dz)s{T!`xDhryMm2&tKj zEn`pH>3Ox<(>6-;!`?JYDv=+ z{CyO+XHX}|XiA482qh34aRm(s#%)qH3@Ms%A68+(pu{y2#P* z0JTyy4L+iIYXT=(XtxNMv!0EXz(VxyOu>2T1;_0B@D(6n?$e?j*!LD$hwZ?MTm)oV z)y=62UT}os<`*VSvDu`qw*|6o_qy&KD+01yC5&XM0*hnYX~|4#VbF$-QA16S4p&-c zH))yW--p&sTV}UV?wN@GN=v@=(v{`!YiFnw)zhetAZVY^t`IeKg bY=BU1`&+ahV2I&6!9wN*J$2ujso|9$5-3_^d$m3(pJl-*HKmJpA4B8K9`1VLb( zjFE9_f--d~Mo-YtM#mg+W`ZFQiLN5vv4D6+U|v!Py@tP4Cdznf90kj^aWzO7Z2vSD zO^k3U(COi*B#(kpF)nj*G?}=;iIN~jazo7Y2j$&&iy%1A`R>DV0n}g29M_JS!$5WWf`ny9v2HZ)ZYh2{sRWUj>6-|V0Jd8|pV*G}fyemXxDRf`p zG7{!e(GdPy%auaYvYZOxJpq=@b>WFxQxC->FxQ;V4wBaVN^lURD2PXB%jJ2}^SEc3 z)m-(ut6mx3a5by0=C_`sx~D<;>8sF-&{~t$*rhjiz4?*m8PYvNnSreD_@?jdhVSg! z1=V*}^Ig<^7c-x~_4t0vD$T0@W6jf|ds8u8bVWK z0}>suLo!0%LgVN@dO+Pm_X&vk9+nmDfv0BrF1A=&8ZuUKNA|VaYk00heeYp&aR%`1 z{NH^8uU(@CJ<7!egPd+NC_eh7;g;kg9e~tilsAZ!h!fBp>2z*mWg9=@pxa=$R7yzj z1`QkpDGLu|!y(CHG-a^3Da^%U(2$BnAs@tqgi+Zm0e3Ou21PL`LR|*y$yr&j>I`_m z4lBbZOT*Iw#{<_5cdpKXxfG;DFU)VI2$QUaBPt7VsgNYZqqw^k#9Vy|jrS)Y>`H=@ z5);~UB*X5IOnF__?R_%#cx?G6%FS1|Uff!de;@us7%F``L!~vvrT=9n`&p>zwZWT=$M-2D9E{OV^)W zR~XIvk?#E{GYI)&arSBV^KPZ)*_FlI@7>$#lYgvKt3KI5M0s6NWIZ2b#_(^4g7$as z-fgkfPpTeQEmNERGaLRhD;;Y!s~0u@1>Ju^V>@)VLuEU%MVsagfAh+bm6=t)=5N>i z?HYStXV0tb`D~RhQzhY;E%XKY8qs%+p1w=ecijX?Ti(Ih*~~lt1jWibJh5>2xmc24 z21-tF;4yi4#!xWY_7y<6gJ*t09)|P-veIF5lz}g%=;xr3z!G*evNx&WwB>>SZZ`-V zf;77ApNACtC;G5s-JUj|l3yJgN8i9Jme>F;z99{=^_)ROCA{50z{Kx+XHN{?xFX4T z>hfG$DU5COXzht5BV0_n3`%}TBY2Xqc!bp2Uh7h7-pqiu*tXR9tn+Et^R7aV=mSfG zaL!fYmpk)F4BUK}66VO3fJy!B|3ER=VVDQAH%N(1FhZ=y!|1!oXd*{SqdXQ(2qw#n zs{G-aXK`6hDIw~|;AHN^5AN_oFp=(q0FoZB%T#5}Eh*7g$rs5rUTeCfH(i3wXtlk1 zZEvP5%lbCilN;<$FUA_9YD)`8Fi zeMUtdb>?)2!@AM2+vOH)Cc;)l>mb#yHFY}t`wC*|m%YjU9W+v-C zw&@ORxC2VP@`Z9#b2sbmX4Sm1Rdp&`XKr^iSYfc?Z~{)~aM*B$!|^0P8^gLg9G;ow zV!0kyB*sZnT#%=eyoev4c`l0h?M1A?0w;D66V0IDd=_b3#oM+XBWAx93LF~|3;aqW zeGB4I;kV_ehQrZuT%mUm+*Z8itth^y5L2`8Gt*|L8uT5~?jK$!?05X?k zergzu{4f#jg*A{Xe1Oz z^FRV0a&SwbX$aUYfvP1qZXKwPJvNupqZ9-Y1A#!vA%}uOdT?{|e+RfZv6b-a+Pw0R+q&RNFb4gb)Wm420X04xKJx$Hov^AYfr4L(&nifUi(vj>r% zBkTQzgQLZRqrZPtP$!D&#Nx?PpmS;B&cw30Inl_fp{xYR-L@v6%C^V;rYdVjcUDg9>P6IPj+G#N zh$sTeh%xaJnnu^qb?FkiCcOPj88IV`foVoQi7vNC;6zy^>&m$Jek1RU!gapcnGvC8 zTo%xIe8Q-mZ**iAZN z+j$Pw{s#V?a$*WKsRez`abz$#p~o?kxHsNN9pra+lReyVdp(*>9pjr_n{c^VVHcR$ z4}mP8=X<)oAGtS@_jHy;xuvfZ=(u}kY4XlwuCEXnCrg<{|kU@h4BBslOmIIt@I z=KIyRexfjVv^aRQ5F9NAM;A{op5A!cwy%tYmc9llsl9oB^t9 zZaG-+bd^F~T>pKCd<9s)-CKI^NZ!A9`C`%En>$(Xhx7h$sjVyT>9Tp&x5qu>A@nHZ zA0Lz+4G2J|!d~XlG=4vt#=M#qPa0|HC>rx?+81fvs!DuL_ljdWmo%IwBYVLg;ZB(w zTTbg?(gpnrIRsn3AhM5(`2`c+YaGIRkxhUs)PK*E_Oj?!-dQ!u2&Q%FObzl+ zK6iA=yMQl?UZp$Nd;i`0N6HA;bufgfLFyJwyOnf!CSkdfbWg2^4M7j9Hfwu`_pl+Y z+QbMiwMzOQaISu&*PgM;HGNDz1OcyT`vMSnCP646?}GDthQ{*E|FUbh0I6;zw=cis LyZ>$@a-#kZY(yE8 literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/__pycache__/stdmet.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/stdmet.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4ec8cb589f1ec24b4c05eef85da081e803a6e671 GIT binary patch literal 1246 zcmZ`&&1(}u6rb6zHnpJ%B5Dp=L6G znN(*M>)PKrNe$Kw(KEM_n$&j1dW%Ba zPDN8{mA}vekI>c2H(G6H%=yn2_1JU?lcKCSo^?q{UfW=T)y$ zK3rqKRQlFcV2gMO#n9|F#VTZr=Fu`}tDT*v9jWc1)_FXyZTEvhaJi^UQ`95Ixdl!# zPb;LtYa!{RS~R5Y1%h{jnP-$wh7d1sg&;(z9Q*tG8^5nz7;vTl65(8Sz1x?#U#a+2 zXJXEqb$K|!Yqyve3+FC#<5|XAx>Kdo9&sbzQ8P*)70zrNM~+CkTZ-m*79!j~@OQUM zJ5p3W;smgt4fMl8xjl={`ksB8I?ffoBVeBk$37O0t&F~U@S!mLsW9Bsab^T>!2wjv z%5FlcZpa9cJ%ofcS`U=mOURwN8?-E$(elON>*T_fG1*fdDwC^2V^hPEa#zv^cn7mJ z$`w}9puQJNW4cxgq!v+`x#-632#5QVN@TXxJC6kcAw|sl;FM6n4)PrE1cijDev5~| zps4n@k;^wm*YhXWlF_)lVdk2eZjRuU!z=O2qfMmb_2P-9o(QK-zdoLdsbweM)RC26 zxRGA6)12<`d^<6C_*Og@`PG@US}Ss^)D^q{4)S0d&Jw^`YQb4jaF)n4{Igs=mZY>7 zD7{h@{JZ;X+VgY#5U8pXLI4AdFkVM{*S3!hw0o^RHqe>1?W1XB@Ca_AfAc2;n~CN7 Jf9y$8e*ig$HyZ!| literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/__pycache__/supl.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/supl.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6aed97bc4aa966db3e35475574828b06d77d3959 GIT binary patch literal 1251 zcmZ`&PiPZO6rb6D?Z$>Ah=?LsL6L|?5V0a6^-m2-%~vQDS%brNXVR|jChN=wt@IF~ zpf|mUSMgAldi3B?5WaI2!7`_UhYxQlQBY65+1*4+KVN3vyx+Xv%)CFF7x{b+ffWAz zIoHDoeNmt|m^HEzletRy{C6(=jp9P!&;Q2~m@o%Q`|UFjH|X zs!gM^^;N+#md2zn;u;vabFN@hE*Fe@88=ymGSIsL#shepAqP|BXcRm89K8J;I|ehU z&MY>xYjBbpY#3g*ZzegZ?S}ObH88c7MwS7s44BTghv+VLMj)yonVW7$K^g_ufKs;R z65_jSNC(8QNQeZx2oK4H2h|L zYQdX#c{s&eH<=fU`W@!R^NeS>-lVf0aU;L3W|VYR)aTIVj2zZEY;eseC(){5LFbp_uI1i820%nCHKk~OoUnpq*U@WL zh0-fk!oP-3r!_yr_W@Lu5CT+J#~81novWM2I@-S4AM5DE>gLh4Y^>V*|N8Onjl}cq IFa9K`-#kb)^8f$< literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/__pycache__/swden.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/swden.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0874ec08d915724010a3a9231e9930b05f54e66d GIT binary patch literal 1651 zcmZ`(&u<$=6rS0&H+B-!L~TT|gqi~axuuq~{*@vGNfW0^m6)iZQR`Y7ZM@^y#cP|H zwMwcW^Ln2 zl&DKehD%H42CV%xErkG0z%aMm5Fg@^*G?25gQ#oq;rkc6a+4iHKW|XnKoX9BS z?p>i1SH}FjGHNwtc;3TsGG(Vy8q#()fP4TMWEo^xWLac6WI1GcWO-!M$fl893)oaD zSYZkfR+z%|rqakV2w8+2LLOln;aUJNO;5K`#YPnyRcutTQN>0T8&zynu~7x$wMrUQ z(x{R~l{BiPQ6-(r29eVNWKsyQ1WeCl5HKVIAu1}kY`2<{OBL%#&iuN^rSEucWeM(g zM6+^o-Jk{`cr`y>1M`GFB|ZhG-+W!$e%m6u!1hmizB=&+5gNDXmT@``E(|RuOcj=$ zHW$KzAT{n*Syi+=c9Lo$q|8b~icuIAF&z_StkQ6#WK8NDCnqPjU#`u)Q@#zKfGpp1 zDj(et^=h@=EU#=BXCycmkNdTjD|GEt$gNj=6$9Dmw4@&Vqmy zgRQAp|12xK(O@h=#d(+|b)jd(1+YO?0u#{#UxI;c4t|GY z3`{q4q1U9u8t)lqc!BP{vFCqxxkt2rkht121INz%@HVrE9|a>X{%1T$>L0`E Bl5PM1 literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/__pycache__/swdir.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/swdir.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3ce8df60f6599638bcb5ea003048415759a25387 GIT binary patch literal 1633 zcmZux&u<$=6rS0&H@1^)>bCqkAbN;|wxX7^{#ihfGznF!#8?fD+Qrgn<6XyFytbKH ztE38&50Mg1aH)E&IE12b;J_bH35j!BRU(>GBo2t1tAGSO@n*)hNXvNkee=C}^SzmU zv;NLBQwZAhFaK=p#1Z;UkWm9J+wo<9`^ZBcZlMr67>lvi(i|O7Z^gp66URtHSCALm zMqb>H@97Bb!=svGc-ktmj6VdmFpn(|z+eY*`^do_ zax@P+`Ud3w7CSLO9Wd@2Ud*5H6K{+aUU4l=p-thvewnH_Dv5XBF`RL|c@H?DFvGUh*!Svfh*5CPgj%zC`R-S9OZc(vu zt}QNdt=Z&S7~YL%vUWBrAZHgOlqBR8%PUq;te{v?v7%xn#Y&2m6)P)tNwV3j%rFZG z8D>?!*_>i|1qB5~1tkS#1(zhiGO^rNDz;Lwm5QxYY^7o=6@lB2LFlu{4ZGZ+e$=Dw z`uMs*4Px*Je(Zp`hwq~Z!(iAVQQKkLLVLjW$DRk{(I`UT7T(fM#(k-8>2PUq#nO44 z^}0T#1_bi7*B}kr3CVc7R-Ehg7Nb)zO-&J_c@3T^!vo^R<(luM@W$)Wp-3-x8}eqo`YB z;yP!#n=}m>?!Nh-+nsr7mkSWWEHHof(O(8Sb@twS5A?_8%u{pbSF`xUEdIFp=;C9u z`qZrMYKPP5-Q+O@BkL4T5Yl!-pAeoRB9WJXM*m zkhS^c8%wMF^!25+rR$#vKdH>GfM-&^l+cfw9gm6v@|YLYBOY7tbXr{NdOW%4M(#%x zGRk9dbNC|beyc@@h3Pq1#YDB|=qq4lS)@sbiLb!G5l6qH)9L=z!*uCD_WrejR(RbQ zppiLOFzQFBcyO{0j1Tdd0eabdbK#lr{`o&sax4D= DAk>g> literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/__pycache__/swdir2.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/swdir2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f6c381e36ece9930728a2e055e156fb60bc3d150 GIT binary patch literal 1653 zcmZ`(O>7%Q6rR~#Z(`R@>Zbf%pa&{s1eUY@ZB-$siHTYzCTeJD7fYj!cOARz+Gb|0 zk}60(L~1y}rRuff5Q@T)14jfBXM>Q4<`k)iikqVp33}qqjBSwu834uK6Hb{eZ0y0TA72$f_XEYAeG$CR%x51-?)P09BLYVl@&Oz%x`(W{6?K*r6 ztajC@f3!?njYg|oTidEP9U83B&TX$Avf3Tb37Z}r@mky6s1v8#s)?s7KPFadhGDnH z%YgZ_kNz;x%-sD;59fYPoc}d({k7lA*s!NrdWbwTf z9;+@?$&H1T>y>qW`dZ~i<=XpVoKzQ9VdRv2JAoHAJ1!NK zLg!ryJIdqn#ORIIJ>MrJh3Q#X#YEZ5^aZf8KGIB!iO;~mNyop#)5-pogJf|ad;jWC z%fD<6(bx=(v%@w6?a`AIh zlF}E)38tpF8E$o3?M-!Rv+J~7htYEoz+W5JIt{f>k7}J3wN8@__$#I2+RGz;jg+J~ z__OItsv7T5_}4<>jW7mgsAG%|&`X1p{SeI!#{Cc#1}FQ_EMt6Ti2mDtaq&pbJpG?} GnbhBiwvpNZ literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/__pycache__/swr1.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/swr1.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..45437a8c6c185ca0a046178a5c52b24b5a5aca78 GIT binary patch literal 1630 zcmZux&u<$=6rS0&H@2H?>bCq8h)S)57J(J7f3`x9GznF!#EBYGwTq?E#=DNUcx^MY zR!J2kAA%ZAaH+}>aR^1>z=1!Y5)x+@RU(>Gq#h7AR{;rn;?0b0l$P=A`{sM|=6f^y zX8oOMrVzBLU;f$nJdV&~K}HR@Y=;*C?ja9(xQRmSU@XR3Q*(4cy%`JRP8=f*T|!=L z8+ma*zN;g&2ajrw;c2VLGX4>~0d|n|3_o@A*vM5kc2FIA6Wf!!;MyCxku$l2 zKE;j+yG!uY>Mfd8yP+C{SjL77d~kgx_#F)rWzhHFbpR-S7&Zcwpr zuFcPLti|1qB5~1tkS#1?MHeGO^rNDz;Lwm5QxYY^7o=6hQY8!qPD}fg?54MjXmFtN23UVTX;)58uyvLrNf=U6-(!F z*6sL|8W70SUY*owJ0#=nT5+z|nv71uG&Mzx=GA$UkXqAaj1VTivNOgEHHof&|d~Re(LUpuh~QM>?8B+Z)V|Fv+!f^;Pr>* z@*{J3N86uD?<5Z)7+J@8f{>OQ`h@TlAz|C=HpO_7kWad9b2yW{T$x|EPUe;ud8#s7 zA=hRX*A`a!iK`3O7Os9M{G>9w1fFB^m4trOXnRzYkH@^A7V+46yWQkk$K%O)H*!Cq zkWrqBo5R;w^P5dVEKEw+0A?`_?-HU7(%+>1{cZ?q|#s9hPo?Bo{w3B`JMz zv_Wfv8_{MbXlOwd!!`A!Jj7H zRn>Tp!e12eVO0`{(~m$*ueg Ddw!0{ literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/__pycache__/swr2.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/swr2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..19a1b794af9fb6762c790273f72b801d50ae740a GIT binary patch literal 1630 zcmZux&u<$=6rS0&H@2H?>bCq8h)S)57J=ogf3`x9GznF!#EBYGwTq?E#=DNUcx^MY zR!J2kAA%ZAaH+}>aR^1>z=1!Y5)x+@RU(>Gq#h7AR{;rn;?0b0l$P=A`{sM|=6f^y zX8oOMrVzBLU;f$nJdV&~K}HR@Y=;*C?ja9(xQRmSU@XR3Q*(4cy%`JRP8=f*T|!=L z8+ma*zN;g&2ajrw;c2VLGX4>~0d|n|3_o@A*vM5kc2FIA6Wf!!;MyCxku$l2 zKE;j+yG!uY>Md%k-B1leEMr3kKDa&;{0@gvto-FnF!kn;_3r#U!?mR)E6=qXH>lV* z*XHNB)@X1o3~$FXSv#8*kh2RCN)qyl{SW&T(VkO1Oij@^RFWGcfW|#(q z4AUy#bWX9nf`Wpgf|7!=g7Xq!nOJTs62POo#nO44 zbvr(#1_bi7S0{Da4#{}CR-Eg#CZm%uO-&J_d3By7q}Fs9BZP^s>h+*iUD>QPTpF&>_D#PQvFa_~jT$~3@@mUluMxKsRK?SgpAxG!qNr14 zaGNuoO`3)Tw_p43ZfBm`;~YdV3(Vg=^p}B-pSpYDYxdAQ`^Y@|n_2kPEc_Tec>STd z{K#D1(e|g(JIO-`M%HnjAf)AnJ|R3sNZ9teO);J%zh$4}lrcR%{!tpS4JzV*&P7ii~ydRva9`x)~@ho##e$;Ho1NlITF zZP1$FMzq-pS{v%rM#pV=E~954fWI*+a0UvT85TGr3Y;P9@b5{*U6)7v9w|w2@TZA) zRW;tD@K=Sz8({&MfsQfWM=$k{_5*aPKk5gl*gx6_#{2l>06lNMdi9C${`o&sax4D= Dg}aW+ literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/http/_base.py b/ndbc_api/api/parsers/http/_base.py new file mode 100644 index 0000000..6465bf9 --- /dev/null +++ b/ndbc_api/api/parsers/http/_base.py @@ -0,0 +1,107 @@ +from io import StringIO +from typing import List, Tuple + +import pandas as pd + +from ndbc_api.exceptions import ParserException + + +class BaseParser: + + HEADER_PREFIX = '#' + NAN_VALUES = ['MM'] + DATE_PARSER = '%Y %m %d %H %M' + PARSE_DATES = [0, 1, 2, 3, 4] + INDEX_COL = False + REVERT_COL_NAMES = [] + + @classmethod + def df_from_responses(cls, + responses: List[dict], + use_timestamp: bool = True) -> pd.DataFrame: + components = [] + for response in responses: + if response.get('status') == 200: + components.append( + cls._read_response(response, use_timestamp=use_timestamp)) + df = pd.concat(components) + if use_timestamp: + try: + df = df.reset_index().drop_duplicates(subset='timestamp', + keep='first') + df = df.set_index('timestamp').sort_index() + except KeyError as e: + raise ParserException from e + return df + + @classmethod + def _read_response(cls, response: dict, + use_timestamp: bool) -> pd.DataFrame: + body = response.get('body') + header, data = cls._parse_body(body) + names = cls._parse_header(header) + if not data: + return pd.DataFrame() + # check that parsed names match parsed values or revert + if len([v.strip() for v in data[0].strip('\n').split(' ') if v + ]) != len(names): + names = cls.REVERT_COL_NAMES + if '(' in data[0]: + data = cls._clean_data(data) + + try: + parse_dates = False + date_format = None + if use_timestamp: + parse_dates = [cls.PARSE_DATES] + date_format = cls.DATE_PARSER + df = pd.read_csv( + StringIO('\n'.join(data)), + names=names, + delim_whitespace=True, + na_values=cls.NAN_VALUES, + index_col=cls.INDEX_COL, + parse_dates=parse_dates, + date_format=date_format, + ) + if use_timestamp: + df.index.name = 'timestamp' + + except (NotImplementedError, TypeError, ValueError) as e: + print(e) + return pd.DataFrame() + + # check whether to parse dates + return df + + @staticmethod + def _parse_body(body: str) -> Tuple[List[str], List[str]]: + buf = StringIO(body) + data = [] + header = [] + + line = buf.readline() + while line: + if line.startswith('#'): + header.append(line) + else: + data.append(line) + line = buf.readline() + + return header, data + + @staticmethod + def _parse_header(header: List[str]) -> List[str]: + names = ([n for n in header[0].strip('#').strip('\n').split(' ') if n] + if isinstance(header, list) and len(header) > 0 else None) + return names # pass 'None' to pd.read_csv on error + + @staticmethod + def _clean_data(data: List[str]) -> List[str]: + vals = [ + ' '.join([v + for v in r.split(' ') + if v and '(' not in v]) + for r in data + ] + return vals or None # pass 'None' to pd.read_csv on error diff --git a/ndbc_api/api/parsers/http/_html.py b/ndbc_api/api/parsers/http/_html.py new file mode 100644 index 0000000..2159af0 --- /dev/null +++ b/ndbc_api/api/parsers/http/_html.py @@ -0,0 +1,21 @@ +from typing import List + +import pandas as pd +from bs4 import BeautifulSoup + +from ndbc_api.api.parsers.http._base import BaseParser + + +class HtmlParser(BaseParser): + + INDEX_COL = None + + @classmethod + def dfs_from_responses(cls, responses: List[dict]) -> List[pd.DataFrame]: + components = [] + for response in responses: + if response.get('status') == 200: + soup = BeautifulSoup(response.get('body'), 'html.parser') + tables = soup.find_all('table') + components.extend(pd.read_html(str(tables), flavor='bs4')) + return components diff --git a/ndbc_api/api/parsers/http/_station.py b/ndbc_api/api/parsers/http/_station.py new file mode 100644 index 0000000..2d2634a --- /dev/null +++ b/ndbc_api/api/parsers/http/_station.py @@ -0,0 +1,51 @@ +import os +from calendar import month_abbr +from collections import defaultdict +from datetime import datetime +from typing import List, Tuple + +import bs4 + + +class StationParser: + + BASE_URL = 'https://www.ndbc.noaa.gov' + + @classmethod + def _parse_li_urls(cls, + urls: List[bs4.element.Tag]) -> List[Tuple[str, str]]: + parsed = [] + if 'MOCKDATE' in os.environ: + now = datetime.strptime(os.getenv('MOCKDATE'), '%Y-%m-%d').date() + else: + now = datetime.now() + current_year = now.year + for raw_url in urls: + name = raw_url.text.strip() + name = f'{name} {current_year}' if name in month_abbr else name + url = f'{cls.BASE_URL}{raw_url.get("href")}' + parsed.append((name, url)) + return parsed + + @classmethod + def _build_available_measurements( + cls, line_items: List[bs4.element.Tag]) -> dict: + # unpack nested lists + nested = [li for li in line_items for li in li.find_all('li')] + nested = [ + li for li in nested + if li.get('href') is not None and 'plot' not in li.get('href') + ] + line_items = [li for li in line_items if len(li.find_all('li')) == 0] + line_items.extend(nested) + available_measurements = defaultdict(dict) + for li in line_items: + if 'Search' in li.text: + break # end of available measurements + new_measurement = cls._parse_list_item(li) + if new_measurement: + k = list(new_measurement.keys())[0] # guaranteed one key + else: + continue + available_measurements[k].update(new_measurement[k]) + return dict(available_measurements) diff --git a/ndbc_api/api/parsers/http/_xml.py b/ndbc_api/api/parsers/http/_xml.py new file mode 100644 index 0000000..486b306 --- /dev/null +++ b/ndbc_api/api/parsers/http/_xml.py @@ -0,0 +1,28 @@ +import re +import xml.etree.ElementTree as ET + +from ndbc_api.api.parsers.http._base import BaseParser +from ndbc_api.exceptions import ParserException + + +class XMLParser(BaseParser): + """ + Parser for XML data. + """ + + @classmethod + def root_from_response(cls, response: dict) -> ET.ElementTree: + """Parse the response body (string-valued XML) to ET + + Args: + response (dict): The successful HTTP response + """ + + body = response.get('body') + + try: + root = ET.fromstring(body) + return ET.ElementTree(root) + except Exception as e: + raise ParserException( + "failed to obtain XML root from response body") from e diff --git a/ndbc_api/api/parsers/http/active_stations.py b/ndbc_api/api/parsers/http/active_stations.py new file mode 100644 index 0000000..c2ea1ef --- /dev/null +++ b/ndbc_api/api/parsers/http/active_stations.py @@ -0,0 +1,66 @@ +import xml.etree.ElementTree as ET +import pandas as pd + +from ndbc_api.exceptions import ParserException +from ndbc_api.api.parsers.http._xml import XMLParser + + +class ActiveStationsParser(XMLParser): + """ + Parser for active station information from XML data. + """ + + @classmethod + def df_from_response(cls, + response: dict, + use_timestamp: bool = False) -> pd.DataFrame: + """ + Reads the response body and parses it into a DataFrame. + + Args: + response (dict): The response dictionary containing the 'body' key. + use_timestamp (bool): Flag to indicate if the timestamp should be used as an index (not applicable here). + + Returns: + pd.DataFrame: The parsed DataFrame containing station information. + """ + root = super(ActiveStationsParser, cls).root_from_response(response) + try: + station_data = [] + for station in root.findall('station'): + station_info = { + 'Station': + station.get('id'), + 'Lat': + float(station.get('lat')), + 'Lon': + float(station.get('lon')), + 'Elevation': + float(station.get('elev')) + if station.get('elev') else pd.NA, + 'Name': + station.get('name'), + 'Owner': + station.get('owner'), + 'Program': + station.get('pgm'), + 'Type': + station.get('type'), + 'Includes Meteorology': + station.get('met') == 'y', + 'Includes Currents': + station.get('currents') == 'y', + 'Includes Water Quality': + station.get('waterquality') == 'y', + 'DART Program': + station.get('dart') == 'y' + } + station_data.append(station_info) + + df = pd.DataFrame( + station_data) # Create DataFrame from the extracted data + + except ET.ParseError as e: + raise ParserException(f"Error parsing XML data: {e}") from e + + return df diff --git a/ndbc_api/api/parsers/http/adcp.py b/ndbc_api/api/parsers/http/adcp.py new file mode 100644 index 0000000..478319e --- /dev/null +++ b/ndbc_api/api/parsers/http/adcp.py @@ -0,0 +1,138 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class AdcpParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = None + REVERT_COL_NAMES = [ + 'YY', + 'MM', + 'DD', + 'hh', + 'mm', + 'DEP01', + 'DIR01', + 'SPD01', + 'DEP02', + 'DIR02', + 'SPD02', + 'DEP03', + 'DIR03', + 'SPD03', + 'DEP04', + 'DIR04', + 'SPD04', + 'DEP05', + 'DIR05', + 'SPD05', + 'DEP06', + 'DIR06', + 'SPD06', + 'DEP07', + 'DIR07', + 'SPD07', + 'DEP08', + 'DIR08', + 'SPD08', + 'DEP09', + 'DIR09', + 'SPD09', + 'DEP10', + 'DIR10', + 'SPD10', + 'DEP11', + 'DIR11', + 'SPD11', + 'DEP12', + 'DIR12', + 'SPD12', + 'DEP13', + 'DIR13', + 'SPD13', + 'DEP14', + 'DIR14', + 'SPD14', + 'DEP15', + 'DIR15', + 'SPD15', + 'DEP16', + 'DIR16', + 'SPD16', + 'DEP17', + 'DIR17', + 'SPD17', + 'DEP18', + 'DIR18', + 'SPD18', + 'DEP19', + 'DIR19', + 'SPD19', + 'DEP20', + 'DIR20', + 'SPD20', + 'DEP21', + 'DIR21', + 'SPD21', + 'DEP22', + 'DIR22', + 'SPD22', + 'DEP23', + 'DIR23', + 'SPD23', + 'DEP24', + 'DIR24', + 'SPD24', + 'DEP25', + 'DIR25', + 'SPD25', + 'DEP26', + 'DIR26', + 'SPD26', + 'DEP27', + 'DIR27', + 'SPD27', + 'DEP28', + 'DIR28', + 'SPD28', + 'DEP29', + 'DIR29', + 'SPD29', + 'DEP30', + 'DIR30', + 'SPD30', + 'DEP31', + 'DIR31', + 'SPD31', + 'DEP32', + 'DIR32', + 'SPD32', + 'DEP33', + 'DIR33', + 'SPD33', + 'DEP34', + 'DIR34', + 'SPD34', + 'DEP35', + 'DIR35', + 'SPD35', + 'DEP36', + 'DIR36', + 'SPD36', + 'DEP37', + 'DIR37', + 'SPD37', + 'DEP38', + 'DIR38', + 'SPD38', + ] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(AdcpParser, cls).df_from_responses(responses, + use_timestamp) diff --git a/ndbc_api/api/parsers/http/cwind.py b/ndbc_api/api/parsers/http/cwind.py new file mode 100644 index 0000000..48616a9 --- /dev/null +++ b/ndbc_api/api/parsers/http/cwind.py @@ -0,0 +1,17 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class CwindParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 9999, 9999.0, 'MM'] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(CwindParser, + cls).df_from_responses(responses, use_timestamp) diff --git a/ndbc_api/api/parsers/http/historical_stations.py b/ndbc_api/api/parsers/http/historical_stations.py new file mode 100644 index 0000000..3333e43 --- /dev/null +++ b/ndbc_api/api/parsers/http/historical_stations.py @@ -0,0 +1,75 @@ +import xml.etree.ElementTree as ET +import pandas as pd + +from ndbc_api.exceptions import ParserException +from ndbc_api.api.parsers.http._xml import XMLParser + + +class HistoricalStationsParser(XMLParser): + """ + Parser for active station information from XML data. + """ + + @classmethod + def df_from_response(cls, + response: dict, + use_timestamp: bool = False) -> pd.DataFrame: + """ + Reads the response body and parses it into a DataFrame. + + Args: + response (dict): The response dictionary containing the 'body' key. + use_timestamp (bool): Flag to indicate if the timestamp should be used as an index (not applicable here). + + Returns: + pd.DataFrame: The parsed DataFrame containing station information. + """ + root = super(HistoricalStationsParser, cls).root_from_response(response) + try: + station_data = [] + for station in root.findall('station'): + station_id = station.get('id') + station_name = station.get('name') + station_owner = station.get('owner') + station_program = station.get('pgm') + station_type = station.get('type') + + for history in station.findall('history'): + station_info = { + 'Station': + station_id, + 'Lat': + float(history.get('lat')), + 'Lon': + float(history.get('lng')), + 'Elevation': + float(history.get('elev')) + if history.get('elev') else pd.NA, + 'Name': + station_name, + 'Owner': + station_owner, + 'Program': + station_program, + 'Type': + station_type, + 'Includes Meteorology': + history.get('met') == 'y', + 'Hull Type': + history.get('hull'), + 'Anemometer Height': + float(history.get('anemom_height')) + if history.get('anemom_height') else pd.NA, + 'Start Date': + history.get('start'), + 'End Date': + history.get('stop'), + } + station_data.append(station_info) + + df = pd.DataFrame(station_data) + + except ET.ParseError as e: + raise ParserException(f"Error parsing XML data: {e}") from e + + return df diff --git a/ndbc_api/api/parsers/http/ocean.py b/ndbc_api/api/parsers/http/ocean.py new file mode 100644 index 0000000..568812f --- /dev/null +++ b/ndbc_api/api/parsers/http/ocean.py @@ -0,0 +1,16 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class OceanParser(BaseParser): + + INDEX_COL = 0 + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(OceanParser, + cls).df_from_responses(responses, use_timestamp) diff --git a/ndbc_api/api/parsers/http/spec.py b/ndbc_api/api/parsers/http/spec.py new file mode 100644 index 0000000..7266c79 --- /dev/null +++ b/ndbc_api/api/parsers/http/spec.py @@ -0,0 +1,17 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class SpecParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = ['N/A'] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(SpecParser, cls).df_from_responses(responses, + use_timestamp) diff --git a/ndbc_api/api/parsers/http/station_historical.py b/ndbc_api/api/parsers/http/station_historical.py new file mode 100644 index 0000000..8c0fb29 --- /dev/null +++ b/ndbc_api/api/parsers/http/station_historical.py @@ -0,0 +1,34 @@ +import re + +import bs4 + +from ndbc_api.api.parsers.http._station import StationParser + + +class HistoricalParser(StationParser): + + LIST_IDENTIFIER = re.compile( + 'Available historical data for station .{5} include:') + + @classmethod + def available_measurements(cls, response: dict) -> dict: + if response.get('status') == 200: + soup = bs4.BeautifulSoup(response.get('body'), 'html.parser') + p_tag = soup.find('p', text=cls.LIST_IDENTIFIER) + line_items = p_tag.find_next_siblings('ul')[0].find_all('li') + return cls._build_available_measurements(line_items=line_items) + else: + return dict() + + @classmethod + def _parse_list_item(cls, li: bs4.element.Tag) -> dict: + measurement_item = dict() + try: + title = li.find('b').text.strip(': ') + parsed = cls._parse_li_urls(li.find_all('a')) + except AttributeError: + return measurement_item + measurement_item[title] = dict() + for name, url in parsed: + measurement_item[title][name] = url + return measurement_item diff --git a/ndbc_api/api/parsers/http/station_metadata.py b/ndbc_api/api/parsers/http/station_metadata.py new file mode 100644 index 0000000..19096f8 --- /dev/null +++ b/ndbc_api/api/parsers/http/station_metadata.py @@ -0,0 +1,49 @@ +from collections import ChainMap +from typing import List + +import bs4 + +from ndbc_api.api.parsers.http._station import StationParser + + +class MetadataParser(StationParser): + + @classmethod + def metadata(cls, response: dict) -> dict: + if response.get('status') == 200: + soup = bs4.BeautifulSoup(response.get('body'), 'html.parser') + metadata = cls._meta_from_respose(soup=soup) + return dict(ChainMap(*metadata)) + else: + return dict() + + @classmethod + def _meta_from_respose(cls, soup: bs4.BeautifulSoup): + metadata = [] + try: + metadata.append({'Name': soup.find('h1').text.strip()}) + items = soup.find('div', id='stn_metadata').find_all('p')[0].text + items = items.split('\n\n') + assert len(items) == 2 + except (AssertionError, AttributeError): + return metadata + metadata.extend(cls._parse_headers(items[0])) + metadata.extend(cls._parse_attrs(items[1])) + return metadata + + @classmethod + def _parse_headers(cls, line_meta): + station_headers = [] + headers = [i.strip() for i in line_meta.split('\n') if i] + station_headers.append({'Statation Type': ', '.join(headers[0:-1])}) + station_headers.append({'Location': headers[-1]}) + return station_headers + + @classmethod + def _parse_attrs(cls, line_attr: str) -> List[dict]: + station_attrs = [] + attrs = [i for i in line_attr.split('\n') if i] + for attr in attrs: + k, v = attr.split(': ') + station_attrs.append({k: v}) + return station_attrs diff --git a/ndbc_api/api/parsers/http/station_realtime.py b/ndbc_api/api/parsers/http/station_realtime.py new file mode 100644 index 0000000..46654d0 --- /dev/null +++ b/ndbc_api/api/parsers/http/station_realtime.py @@ -0,0 +1,29 @@ +import bs4 + +from ndbc_api.api.parsers.http._station import StationParser + + +class RealtimeParser(StationParser): + + @classmethod + def available_measurements(cls, response: dict) -> dict: + if response.get('status') == 200: + soup = bs4.BeautifulSoup(response.get('body'), 'html.parser') + items = soup.find('section', {"class": "data"}) + line_items = items.find_all('li') + return cls._build_available_measurements(line_items=line_items) + else: + return dict() + + @classmethod + def _parse_list_item(cls, li: bs4.element.Tag) -> dict: + measurement_item = dict() + try: + title = li.text.split('\n')[0] + parsed = cls._parse_li_urls(li.find_all('a')) + except AttributeError: + return measurement_item + measurement_item[title] = dict() + for name, url in parsed: + measurement_item[title][name] = url + return measurement_item diff --git a/ndbc_api/api/parsers/http/stdmet.py b/ndbc_api/api/parsers/http/stdmet.py new file mode 100644 index 0000000..f81abf4 --- /dev/null +++ b/ndbc_api/api/parsers/http/stdmet.py @@ -0,0 +1,17 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class StdmetParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = ['MM', 99.0, 999, 9999, 9999.0] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(StdmetParser, + cls).df_from_responses(responses, use_timestamp) diff --git a/ndbc_api/api/parsers/http/supl.py b/ndbc_api/api/parsers/http/supl.py new file mode 100644 index 0000000..096c93a --- /dev/null +++ b/ndbc_api/api/parsers/http/supl.py @@ -0,0 +1,17 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class SuplParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(SuplParser, cls).df_from_responses(responses, + use_timestamp) diff --git a/ndbc_api/api/parsers/http/swden.py b/ndbc_api/api/parsers/http/swden.py new file mode 100644 index 0000000..0514aa3 --- /dev/null +++ b/ndbc_api/api/parsers/http/swden.py @@ -0,0 +1,71 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class SwdenParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] + REVERT_COL_NAMES = [ + 'YY', + 'MM', + 'DD', + 'hh', + 'mm', + '.0200', + '.0325', + '.0375', + '.0425', + '.0475', + '.0525', + '.0575', + '.0625', + '.0675', + '.0725', + '.0775', + '.0825', + '.0875', + '.0925', + '.1000', + '.1100', + '.1200', + '.1300', + '.1400', + '.1500', + '.1600', + '.1700', + '.1800', + '.1900', + '.2000', + '.2100', + '.2200', + '.2300', + '.2400', + '.2500', + '.2600', + '.2700', + '.2800', + '.2900', + '.3000', + '.3100', + '.3200', + '.3300', + '.3400', + '.3500', + '.3650', + '.3850', + '.4050', + '.4250', + '.4450', + '.4650', + '.4850', + ] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + return super(SwdenParser, + cls).df_from_responses(responses, use_timestamp) diff --git a/ndbc_api/api/parsers/http/swdir.py b/ndbc_api/api/parsers/http/swdir.py new file mode 100644 index 0000000..fcb54f0 --- /dev/null +++ b/ndbc_api/api/parsers/http/swdir.py @@ -0,0 +1,71 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class SwdirParser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] + REVERT_COL_NAMES = [ + 'YY', + 'MM', + 'DD', + 'hh', + 'mm', + '.0200', + '.0325', + '.0375', + '.0425', + '.0475', + '.0525', + '.0575', + '.0625', + '.0675', + '.0725', + '.0775', + '.0825', + '.0875', + '.0925', + '.1000', + '.1100', + '.1200', + '.1300', + '.1400', + '.1500', + '.1600', + '.1700', + '.1800', + '.1900', + '.2000', + '.2100', + '.2200', + '.2300', + '.2400', + '.2500', + '.2600', + '.2700', + '.2800', + '.2900', + '.3000', + '.3100', + '.3200', + '.3300', + '.3400', + '.3500', + '.3650', + '.3850', + '.4050', + '.4250', + '.4450', + '.4650', + '.4850', + ] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + df = super(SwdirParser, cls).df_from_responses(responses, use_timestamp) + return df diff --git a/ndbc_api/api/parsers/http/swdir2.py b/ndbc_api/api/parsers/http/swdir2.py new file mode 100644 index 0000000..85a0c99 --- /dev/null +++ b/ndbc_api/api/parsers/http/swdir2.py @@ -0,0 +1,72 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class Swdir2Parser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] + REVERT_COL_NAMES = [ + 'YY', + 'MM', + 'DD', + 'hh', + 'mm', + '.0200', + '.0325', + '.0375', + '.0425', + '.0475', + '.0525', + '.0575', + '.0625', + '.0675', + '.0725', + '.0775', + '.0825', + '.0875', + '.0925', + '.1000', + '.1100', + '.1200', + '.1300', + '.1400', + '.1500', + '.1600', + '.1700', + '.1800', + '.1900', + '.2000', + '.2100', + '.2200', + '.2300', + '.2400', + '.2500', + '.2600', + '.2700', + '.2800', + '.2900', + '.3000', + '.3100', + '.3200', + '.3300', + '.3400', + '.3500', + '.3650', + '.3850', + '.4050', + '.4250', + '.4450', + '.4650', + '.4850', + ] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + df = super(Swdir2Parser, cls).df_from_responses(responses, + use_timestamp) + return df diff --git a/ndbc_api/api/parsers/http/swr1.py b/ndbc_api/api/parsers/http/swr1.py new file mode 100644 index 0000000..3bd4e12 --- /dev/null +++ b/ndbc_api/api/parsers/http/swr1.py @@ -0,0 +1,71 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class Swr1Parser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] + REVERT_COL_NAMES = [ + 'YY', + 'MM', + 'DD', + 'hh', + 'mm', + '.0200', + '.0325', + '.0375', + '.0425', + '.0475', + '.0525', + '.0575', + '.0625', + '.0675', + '.0725', + '.0775', + '.0825', + '.0875', + '.0925', + '.1000', + '.1100', + '.1200', + '.1300', + '.1400', + '.1500', + '.1600', + '.1700', + '.1800', + '.1900', + '.2000', + '.2100', + '.2200', + '.2300', + '.2400', + '.2500', + '.2600', + '.2700', + '.2800', + '.2900', + '.3000', + '.3100', + '.3200', + '.3300', + '.3400', + '.3500', + '.3650', + '.3850', + '.4050', + '.4250', + '.4450', + '.4650', + '.4850', + ] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + df = super(Swr1Parser, cls).df_from_responses(responses, use_timestamp) + return df diff --git a/ndbc_api/api/parsers/http/swr2.py b/ndbc_api/api/parsers/http/swr2.py new file mode 100644 index 0000000..0b8d56b --- /dev/null +++ b/ndbc_api/api/parsers/http/swr2.py @@ -0,0 +1,71 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.http._base import BaseParser + + +class Swr2Parser(BaseParser): + + INDEX_COL = 0 + NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM'] + REVERT_COL_NAMES = [ + 'YY', + 'MM', + 'DD', + 'hh', + 'mm', + '.0200', + '.0325', + '.0375', + '.0425', + '.0475', + '.0525', + '.0575', + '.0625', + '.0675', + '.0725', + '.0775', + '.0825', + '.0875', + '.0925', + '.1000', + '.1100', + '.1200', + '.1300', + '.1400', + '.1500', + '.1600', + '.1700', + '.1800', + '.1900', + '.2000', + '.2100', + '.2200', + '.2300', + '.2400', + '.2500', + '.2600', + '.2700', + '.2800', + '.2900', + '.3000', + '.3100', + '.3200', + '.3300', + '.3400', + '.3500', + '.3650', + '.3850', + '.4050', + '.4250', + '.4450', + '.4650', + '.4850', + ] + + @classmethod + def df_from_responses(cls, responses: List[dict], + use_timestamp: bool) -> pd.DataFrame: + df = super(Swr2Parser, cls).df_from_responses(responses, use_timestamp) + return df diff --git a/ndbc_api/api/parsers/opendap/__init__.py b/ndbc_api/api/parsers/opendap/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ndbc_api/api/parsers/opendap/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..228e6e0d0a5bbb58e9f61a0301d266578ffa2dad GIT binary patch literal 201 zcmZ3^%ge<81YxJeGC=fW5CH>>P{wCAAY(d13PUi1CZpdlymp-5$h z@<$;T#v+FvaxfMJoI@AzE{Zfo_7CWBvxmhV_Mj97Rv-|-z@B1XuPS|Es*7MCd?EqmQc^%7`UqLdJ!Gj5MT8#1FT6@AOSpmZ+D~kHEiVT0 zRh#-j;d#>r^g&;d?5{k){^)lv6#wvSm+4K#|^i7I=TJE6nSE4g{@mC;r5JpVv zl4uDf$r3H8B9>&VB$1V|6s%(HWW3i>al+DYa)o<`v6oaUaZh;&)Z%pvr@48 zfb0RBKAgsx1M}ZvrVn8rvy$M2Y>>JH_JsBZvTs*$kZ?@j_Ujf7l8)zAI5{0838ppA zrI-epI>iPPrM_9M1uDUQow#KwRu~}#Xf}UwQD7dapiQYEOrhKNHf0|;(bbUsI4%hF z#)*tuLPK6fWVj*k9*~OmBFK}6fb}@Wv&1`G2O`{mzad*v*B_gTD>md!h=H$l_pd~m zhZ;&R(x%#wc9{)_u7=txDWLmzUPXSoD|i->%%v9^}A4E&u)V|c#gf+A-eU8?I z2HK6XrYi_kL+Ztc7%Ap3n0TpfJC^P*W8KC6<*8}ClZpBg@v5932dJ3RJ6B9hk_w&B z<0)H?#r4-&0o=0evVS{2p-(!_fwdTYZsz)xsi_-15DW(DWEB(GQ89_-U`lmwNrz(b zvFqy|(P!r7W?^pOBec5C6NgUpfUcW_m>Wf}hTW*zCiJ;Oc6Z?d74p!p)1Uqc{tM(q z&)T?_PX;NQ+AgfPWgN)NxIn64KTt#bAbs!yfoj&^dsdK)Yo4(12U(+vNd-rW(0n40 z$_^zg-Xy~$5>ZD81QKW+!vlGpSTqnYWkF_cV%M*qU!Iu12@a$4pP1!49}#B_1($s~zlKc+GwICdU2Cyym^FKz{c5~1(Rrr4W!C16MX26lZ6oM+%La>; z5r_4Qk#1!d53TltUi1kJTc~{+4Ick0(dr*=qrT*ueE4eeS@z_EGk=IG-roYK=YunoX@TqOB zEy|fUpAEgdH}uA%-#s0AcW>z3ZMl^@v6KB{c2|Cy8{5l`HTm<)vw@K|5;E}ZXn1yf z?9toJT;UhE+7fWtTSd8ne`iO3%#QA!f0`}qWed$jft~;>o~ZfBbK;Y8sY{yjWFm1% zRQ@h1Kwl#xup0FHxLWgw=@^z>HC(fbd%6|t1H4YYx`KOp6MY>UBuS3M*Bc=oCVZEgjUsE`@W47E^H*%$P2# zB)tr=rTQA=0`7uT?aH#RyPmHvo2!@?J_UR9Rg>7}B5W&k%Y3o!Lt);n+mI9>Y5KYe z#T_MVs6!XwfG+y9M`CrDyiR?O>Fc>%zS9gk#b4-!1;cc_UV~hQ4ZauSaD5lpxv@~t z7otN3XfPM%(NVK-v{QHoi?LWa9g1nEk#!o#RCrSJmZm$<;aFn|d*^$x4|)JOsHd+$ z6uaGpF=Nm0)S;d4P@nXI$6`9mFz|SYb@&_5%;F$;Zc(4YOC}@@-?4COoVjScppQo` z;&kMm@!JbszXY+DJ;KUVt=|*K}UE27gu(29JPY1S!L)dRE=RK&K7k zZrya4(+z`+!G<7xV|H@x<51}vL0{Q1DXn6E*|P}Km<%!@I4V4c9A~7~@emnhtE?Xc z62zBV^gSZ4F)lZe^>e}ob^_49!LSwI`vWJpuC@kG@AfqZ3R_qAm0UZ4UOC&=Mil65 zFzgQuJ&?biXe*4Nq7x(GQUaxN_fNNzkr*vWXFRosK*EruAo_SeLRVMH&I z_hEtcTDk)Rbbmo;q2w07TPVB5?-n}KJnGp;nJs>|B4QuC)$Dy*=wh??X{#3m@ay5> MIph6&!~$3R|Nll~>;M1& literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/opendap/__pycache__/adcp.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/adcp.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3cf84b864ec449171ccc02801d614b7f561b3a63 GIT binary patch literal 1256 zcmZ`&&1(}u6rag{G}|Vm8buU^qV}+$0S{U%A~tG8sTynrSp&m%XVOOZ%b5*Y@z6s- zFM=2GDk4?sKj6W$=O}_@P61EeB2`dNzS&LE7Ik*!?R)!vGwI$RMlee6e=U5* z2z^mTe;7SwqYBD1L=nY43b2E*(sO>!(crE5dSEyPMmcl=QGEqbgBhzDLTj+H;g~cx zi){0&GHopNnWjjtfHGWl1*^GSFz)rR=_}teHs(Qj2(cM*Fhx#|VnPa(T zwkGqwn>3RaWzzDaa3NC-%Hm{kT3VcmI0^+5ve*)gs68TagIJoJB`qF$gYqGw4ole^ zb0D7LXDESY_eYZJYY06+t6;4SzC0gjn;1ahRoK5aI0H(+S0H0Uv|`4&30S!!^hkq8 z0U0D}>r(T4!AHQ&3kqWq;`y!+gb0=4*x1(_x0QNQGRw=eK!qtOiOwIy%S zOLoT5(FKs7EmisThnyLxSEhMc>2Sr%q%Q*#$HV^)r&O$b=B7*Vchk~(dA ziPYz#$d|d8O6#1PxYHbhq*cgbuW=speL`%^cfu~A_&CGMAX5rbp{}xc3<65(Kb`X4 z<(c*J@pd|wXFA4c*F?uobPdfoi@Qk4^>O?4;g?QVQ%1)=n)y1Dr`{de3d_J-;G{Fx5KpyP#S^-`wJzgt4UMplC{$8#w zPFm?3l%1;T|D8XT@o<_S22(Xg2oPNjW4w;`w71U=D!2QygHE=$&#q}<)s}zDC;Pso Lp6~zhr%C+*3ot+K literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/opendap/__pycache__/cwind.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/cwind.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc7c95e11c91f87e1699bb608b4dd2861c8371d7 GIT binary patch literal 1277 zcmZ`&&1)M+6ra(4Sdt|a{+olt9d!1U=;tP;d)LPI-Lu=3&Bw6KO8`-P?sR_1CTxVr${=EnsfwW|1KFWWmBJ+6%wai*h2))x#Zpafdy;WNSt_@dRf#i^Cb3{b zRXc(aeTtA?lq#Du+2OH2?g%7d!qWV39n5Wf2TADvXvo3{A@m2@1GYbYb9ap1$3PU` zgZ=yCGoUJb#W9p6Iw|AahFCQf`=rg2h>R13A`L$jyaH}s)iEVP{LmAE5TP@io15#t zz1CQ0T?LIq>yqdHa*+q^b`ZCkJN}l(qb5&oFkgz+HuK~b<3rwx>4s0dG-&CSW*UoD zlCqe3Y0KB$uBSVE3hwy*t0T>?f0Uz6OPm1nx{LlVqGL0+uiriKw08Pg?ez27{6Dq% zzh|G;8qaEtUa@fqVNm$8Q&mMmVlQHZs4^i@LOY?x6+(XPc;V1fS^x2J^QWKBuac#e z%c`>0Jioq@lQlJ&wIO11E1_HqrA!+5Qkfe`5~@N zp1ki43{CpZx4Eysu=x0!qhWdQQTxh3_SEk6LE-&bN8LH*GovJ(31!QjG>A9z%I(wx z*#tih0qX0KvUO0lK2)|oRJKkw;4jVfr;}Cs2<4~h`rj58a~*!*r-16l2mxkbV2lsY km%R^XA5Hg0vyZ;-eK-ep3G23e+TJ?zD)apJ8Gn}4J7GOZ+yDRo literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/opendap/__pycache__/ocean.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/ocean.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..67f101a84293a7f556898c79dc75c777f071bff3 GIT binary patch literal 1277 zcmZ`&&1)1%6t9~8$V@WBBxFT$a0z}vi#xEFMMV}hF%pS7nCxNOfuhn?leG5NR@LBU z*+UM2^&)r?ud+x~@(=7`&z?sS40Rgt)jcytR%@zW|9<^mRlWCqJ&$X( zaRj6O?ETtXjL>UChQk?vy$b*yAch!@P>g+y4K76`--4$V*|Fn07?sc^#Oy7^9PVse z2<^ZshVQb{GVKr2(hwo^JK6@eGkWuIgg(VU6yApY zJEJq8Dty(mv?Dti7s7>DJ(UEcEz+2b5``kIAd;dAZc#Ha6+(iDN=b+`8P3kmZhrWA z?o8_^&`7o})8N)65w_c5(rRu5>r})|k>2EilC9gEs&y^~yp^!kfY2;#nU&=l%T}84 zgwd>(nr=6;4KW3G{B~$x^Be!m(WE7hfcb9|{aZ$p2kzf^c;tEg_{;k7SM^gb>Zksi zd0wA;S)c2c=k_2B3SaPQx=ctyV@`-3CnQc;Co;H7$S)ll4LsGAi&vW0uAg5b^NUw> zb-8(dWw9X3dLnN_%+-3zgb_;HEDV&kSJO1orHtvyJXLf-z$JMZx;AKCz@vx|4~uEo zC5v7C9_N5<|R^PU`i z|imMYSNQv8Ay40sV05&NN1sTyoOxCVyp&ZJ%4FJ~sS6%RQS z>_zY*UPYuT{RcdF_FP4<%qif>Tcir=$v3-6tf;dyZ{Kg;n|U9vUjB@A_qUIW+d1h`H2yMd3hLfkc zIb>U(HMFrZXMG{Bf-*Si3RZKuVBG8aroTcL>@0xt0Ae%bV2U6fc8o=c{R}%M%NG$X zP?K3~V61-{q8F&einr6c^mfBqM-5B|*2)`zHv68=&^lVdP6;wAs-fDlAJHroWlBF} zwy6q%D}C9dOcjGTTFlfzKN6BRLl#LjwCqa831x{~x~Phri6o8$6RO-4jObg0bi+hh zoXIASynaU@2@{saJM$nO<0nW$|9eB`dKyCa(FUN+{?60B_S*-%@CKaU?B4-Z;j6Zx z@}ikA&Moj&BaufMJPt`eQ7F>z0>MjQ=4I_uB*Y6`AqWvV!}0O)*3X+$XX-bABT>KX zdUq~yztQlcdTrHPa(P(e@onZwQD0`RTw=V7>k(b>h@1Fzy^@q;QI8WAQ8%e4y4%%c zm5)G-d;51azxr2>IxTSs#P=5ZRzSmhAK!X<=w0RLhsx29l~eC4r(Tb}t4w{UOl=jW zcEAk^zi5|Lfsn`z86j$rkT9mrK!ewMWNVv)iLHTj^}gw)4Xs3!OOG$s74;+(BAyAF*E^e(rP(4chkc z%+@}0{!MkaTjso5eld_WvUaPJ`+e7LbEfmXy(Dc*S#mY;qs6p#egF*WV2{%(a9Zth zTJ3ULB@569t{+WW;VYEgrtAJ$IGZtWjvof78zK@A9Rp*$jrMK*Ioqh*O20Nbz4hnp VSVgRR@o)L)*q7Au^*{DBqhAiCM!^68 literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/opendap/__pycache__/stdmet.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/stdmet.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c1f8f4d7540ed8f6a6ca6019ff198ebbd7ba7d8a GIT binary patch literal 1271 zcmZ`%&1(}u6rag%wn>{1OBL}V#Sdf;21F2AL|TnXrE0MCvMvnUok_d8U(QU>iiaEu zdJ(*cR}ra7{{au4Jy#Jda|(D8FOe#!C*SNQv7*k-ynVlUZ)V(E; zL`&3U78}_&JPogxsKv_Hv%K_n%es$Rn2xMf*8yz|J)N=p=r(pL5LsDO79@?B%wtie z3?gQmsuX%MkZsCTIZWc^TpbN!A$dDuu~cKXJjpnrER`$gRGBl8Cb3{bRoj9QeT$G@ zlq!oe+2*l7ED1zm!qRwq5yT_>7)c;tFl1q%A#@k51KJqwJQ-@geZUK^!}*Qj9gr5j zWE-j|+9~7Q0$(*A`=rH_hztXTBn>|lyaHxk)jnlH{LmAE5TPTSoSf|Zbk0n>SAitq zUhw=I=Xua-1+m*)^;bL|HF5^VuYwsLZXDWL#iIKoD-Ur8NM52 zMnooblZyu+AgRIEo!Hlz-+PnGZ?4oKX`?_dq^Uv9{ U%2?Op-}2$~m(23@KlUu5U%XmKV*mgE literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/opendap/__pycache__/swden.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/swden.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c0727bc44237be2c1fc52b6fce5a9e0c2aec3ce9 GIT binary patch literal 1263 zcmZ`&&1(}u6rag{G-(r3ts*@LMQdS2171W$L~U)Uv>I$ZxCVyp&ZLd*mopo*;-QCv zUIZ`VwTM)u|9}V2o}&ns0U_YYTcir=$v3;3v_+krdHdeJ-^}~?&Au#^`VoxsyI+f+ zF+yLI(H%ynY+VH98KQ_{5Bb=}Sm`-0XKV1*Jl!{J1EU<8LR4Qx)L_QChR_DAbl4`% z%^}PDs!R(@eWoGeE1>jWbOft9Trlot*yNSZjI9Mw9ztw}Y)p}zquACKA@*}@>&z%1 znx{H5S>JGPH$=}DyLhqss;i`rf0#7m54b}gU_E^(qpO|2+lRn)?W1=NXZLiM*2E%QN$asS|s zc31w&u>=rt7{t#N^dpZ3_dmV4cKCgH>|=TCQ~A_~@~Jn&@5|#K%j2#5_!hvR@JXvA z^MnMB&j^wIg!mzCdP*-6a;NEdnWs2^X|{Ux+JzZ1F+D4bbJYv;)2T5h2a-1UEM5vJ zS42sjHr!b13t{NVTtsDI!ik+p4ndMKWGQQ%%RG+|3-i6OODIOp^C6H435ifQT08~; zC3T(%HByQ3+iZ5>a2?K5ZJ9@_~^ z!8_$Q4VimZZ+3FqXYF*SI^Wk#((03Dyc{)x#k6uWa)36$4+20Q>M~maW-A%9m5kX6 zS%9CItE-b#`WmIDs``KD&!#+_&z0TF~25kF!@sTyoWSQm!v&ZLd*mopo+;-QCv zUIZ`VRYa=Nf53xh&rt-+Tnu>f7O8^vjwISLumErjK_|k@bdeXT9v)klZTg_%G zbQ+7^g3E&jkFGN>5zcMqCJT(`wiD7hkGOHmQ7cYq6;2ehkh-yRJ8f_+UgVcng7UA6w9TJIF=>i~n&r>u%B z5)!%rBSa1p5=6A^E4@s}&9>|3p7QLO3yn*cPfnBixeKy9(>OVME;DB2NZN>iB?}Sd ziYlqomX}CZ5G1WbR`TX~%=ZbgFy9KhgyQ4`9|M`vkP3CF#X}HK zQvd0W?Od8(8=L5)b7{J3jP^`)_-N12j5D~0lw8|yz1;WQ?rF;CT8A=Ucky}K`rOul+gi?TE$6mI z=HNf(>hh$OzCzils{W1Qv5be~d>@#qF+zaoX&B=*w4?Lq?4q$we|FJC=g-+QOIWq# Q-}2Gq*VOavKmIhSU(w7(P5=M^ literal 0 HcmV?d00001 diff --git a/ndbc_api/api/parsers/opendap/_base.py b/ndbc_api/api/parsers/opendap/_base.py new file mode 100644 index 0000000..027e225 --- /dev/null +++ b/ndbc_api/api/parsers/opendap/_base.py @@ -0,0 +1,73 @@ +import os +import tempfile +from typing import List, Optional + +import xarray +import xarray + +from ndbc_api.exceptions import ParserException +from ndbc_api.utilities.opendap.dataset import concat_datasets + + +class BaseParser: + + TEMPORAL_DIM = 'time' + SPATIAL_DIMS = ['latitude', 'longitude'] + + @classmethod + def nc_from_responses( + cls, + responses: List[dict], + use_timestamp: bool = False, + ) -> xarray.Dataset: + """Build the netCDF dataset from the responses. + + Args: + responses (List[dict]): All responses from the THREDDS + server regardless of content or HTTP code. + + Returns: + xarray.open_dataset: The netCDF dataset. + """ + datasets = [] + for r in responses: + if isinstance(r, dict): + if 'status' in r and r.get("status") != 200: + continue + content = r['body'] + else: + content = r + try: + xrds = xarray.open_dataset(content) + datasets.append(xrds) + except Exception as e: + raise ParserException from e + + return cls._merge_datasets(datasets) + + @classmethod + def _merge_datasets( + cls, + datasets: List[xarray.Dataset], + temporal_dim_name: Optional[str] = None, + ) -> xarray.Dataset: + """Joins multiple xarray datasets using their shared dimensions. + + Handles cases where datasets might not have the same variables, + but requires that all datasets share the same dimensions. For + data stored on the THREDDS server, all datasets are expected to + have `time`, `latitude`, and `longitude` dimensions. + + Args: + temporal_dim_name (List[xarray.Dataset]): A list of netCDF4 datasets + to join. + dimension_names (List[str]): A list of dimension names to join + the datasets on. Defaults to `['time', 'latitude', 'longitude']`. + + Returns: + A netCDF4.Dataset object containing the joined data. + """ + return concat_datasets( + datasets, + temporal_dim_name if temporal_dim_name else cls.TEMPORAL_DIM, + ) diff --git a/ndbc_api/api/parsers/opendap/adcp.py b/ndbc_api/api/parsers/opendap/adcp.py new file mode 100644 index 0000000..851bc40 --- /dev/null +++ b/ndbc_api/api/parsers/opendap/adcp.py @@ -0,0 +1,17 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.opendap._base import BaseParser + + +class AdcpParser(BaseParser): + + TEMPORAL_DIM = 'time' + SPATIAL_DIMS = ['latitude', 'longitude', 'depth'] + + @classmethod + def nc_from_responses(cls, + responses: List[dict], + use_timestamp: bool = False) -> pd.DataFrame: + return super(AdcpParser, cls).nc_from_responses(responses) diff --git a/ndbc_api/api/parsers/opendap/cwind.py b/ndbc_api/api/parsers/opendap/cwind.py new file mode 100644 index 0000000..d32b355 --- /dev/null +++ b/ndbc_api/api/parsers/opendap/cwind.py @@ -0,0 +1,17 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.opendap._base import BaseParser + + +class CwindParser(BaseParser): + + TEMPORAL_DIM = 'time' + SPATIAL_DIMS = ['latitude', 'longitude', 'instrument', 'water_depth'] + + @classmethod + def nc_from_responses(cls, + responses: List[dict], + use_timestamp: bool = False) -> pd.DataFrame: + return super(CwindParser, cls).nc_from_responses(responses) diff --git a/ndbc_api/api/parsers/opendap/ocean.py b/ndbc_api/api/parsers/opendap/ocean.py new file mode 100644 index 0000000..df5661e --- /dev/null +++ b/ndbc_api/api/parsers/opendap/ocean.py @@ -0,0 +1,17 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.opendap._base import BaseParser + + +class OceanParser(BaseParser): + + TEMPORAL_DIM = 'time' + SPATIAL_DIMS = ['latitude', 'longitude', 'instrument', 'water_depth'] + + @classmethod + def nc_from_responses(cls, + responses: List[dict], + use_timestamp: bool = False) -> pd.DataFrame: + return super(OceanParser, cls).nc_from_responses(responses) diff --git a/ndbc_api/api/parsers/opendap/pwind.py b/ndbc_api/api/parsers/opendap/pwind.py new file mode 100644 index 0000000..455b641 --- /dev/null +++ b/ndbc_api/api/parsers/opendap/pwind.py @@ -0,0 +1,17 @@ +from typing import List + +import xarray + +from ndbc_api.api.parsers.opendap._base import BaseParser + + +class PwindParser(BaseParser): + + TEMPORAL_DIM = 'time' + SPATIAL_DIMS = ['latitude', 'longitude', 'instrument', 'water_depth'] + + @classmethod + def nc_from_responses(cls, + responses: List[dict], + use_timestamp: bool = False) -> xarray.Dataset: + return super(PwindParser, cls).nc_from_responses(responses) diff --git a/ndbc_api/api/parsers/opendap/stdmet.py b/ndbc_api/api/parsers/opendap/stdmet.py new file mode 100644 index 0000000..0544912 --- /dev/null +++ b/ndbc_api/api/parsers/opendap/stdmet.py @@ -0,0 +1,17 @@ +from typing import List + +import xarray + +from ndbc_api.api.parsers.opendap._base import BaseParser + + +class StdmetParser(BaseParser): + + TEMPORAL_DIM = 'time' + SPATIAL_DIMS = ['latitude', 'longitude', 'instrument', 'water_depth'] + + @classmethod + def nc_from_responses(cls, + responses: List[dict], + use_timestamp: bool = False) -> xarray.Dataset: + return super(StdmetParser, cls).nc_from_responses(responses) diff --git a/ndbc_api/api/parsers/opendap/swden.py b/ndbc_api/api/parsers/opendap/swden.py new file mode 100644 index 0000000..8956d5e --- /dev/null +++ b/ndbc_api/api/parsers/opendap/swden.py @@ -0,0 +1,17 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.opendap._base import BaseParser + + +class SwdenParser(BaseParser): + + TEMPORAL_DIM = 'time' + SPATIAL_DIMS = ['latitude', 'longitude', 'frequency'] + + @classmethod + def nc_from_responses(cls, + responses: List[dict], + use_timestamp: bool = False) -> pd.DataFrame: + return super(SwdenParser, cls).nc_from_responses(responses) diff --git a/ndbc_api/api/parsers/opendap/wlevel.py b/ndbc_api/api/parsers/opendap/wlevel.py new file mode 100644 index 0000000..44709c9 --- /dev/null +++ b/ndbc_api/api/parsers/opendap/wlevel.py @@ -0,0 +1,17 @@ +from typing import List + +import pandas as pd + +from ndbc_api.api.parsers.opendap._base import BaseParser + + +class WlevelParser(BaseParser): + + TEMPORAL_DIM = 'time' + SPATIAL_DIMS = ['latitude', 'longitude', 'frequency'] + + @classmethod + def nc_from_responses(cls, + responses: List[dict], + use_timestamp: bool = False) -> pd.DataFrame: + return super(WlevelParser, cls).nc_from_responses(responses) From f1c3826c200129790c3236a8aab8274b8db7655a Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:21:35 -0400 Subject: [PATCH 19/47] part 3 --- ndbc_api/api/handlers/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 194 bytes .../__pycache__/_base.cpython-311.pyc | Bin 0 -> 412 bytes ndbc_api/api/handlers/_base.py | 2 + ndbc_api/api/handlers/http/__init__.py | 0 .../http/__pycache__/__init__.cpython-311.pyc | Bin 0 -> 199 bytes .../http/__pycache__/data.cpython-311.pyc | Bin 0 -> 11855 bytes .../http/__pycache__/stations.cpython-311.pyc | Bin 0 -> 11985 bytes ndbc_api/api/handlers/http/data.py | 285 ++++++++++++++++++ ndbc_api/api/handlers/http/stations.py | 186 ++++++++++++ ndbc_api/api/handlers/opendap/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 202 bytes .../opendap/__pycache__/data.cpython-311.pyc | Bin 0 -> 8303 bytes ndbc_api/api/handlers/opendap/data.py | 185 ++++++++++++ 14 files changed, 658 insertions(+) create mode 100644 ndbc_api/api/handlers/__init__.py create mode 100644 ndbc_api/api/handlers/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/api/handlers/__pycache__/_base.cpython-311.pyc create mode 100644 ndbc_api/api/handlers/_base.py create mode 100644 ndbc_api/api/handlers/http/__init__.py create mode 100644 ndbc_api/api/handlers/http/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/api/handlers/http/__pycache__/data.cpython-311.pyc create mode 100644 ndbc_api/api/handlers/http/__pycache__/stations.cpython-311.pyc create mode 100644 ndbc_api/api/handlers/http/data.py create mode 100644 ndbc_api/api/handlers/http/stations.py create mode 100644 ndbc_api/api/handlers/opendap/__init__.py create mode 100644 ndbc_api/api/handlers/opendap/__pycache__/__init__.cpython-311.pyc create mode 100644 ndbc_api/api/handlers/opendap/__pycache__/data.cpython-311.pyc create mode 100644 ndbc_api/api/handlers/opendap/data.py diff --git a/ndbc_api/api/handlers/__init__.py b/ndbc_api/api/handlers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ndbc_api/api/handlers/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/handlers/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bf1edb4fa5213b63ba57f537fbffbc3ff4a176ec GIT binary patch literal 194 zcmZ3^%ge<81pLQ-ri19mAOZ#$p^VRLK*n^26oz01O-8?!3`I;p{%4TnuTW>Jn9$1)WnjE)FLQ9CNCu^IX3vV12F_dF{CgB zGiWj=!vsJyBb4)*56GC#kjfCnn8Fanl)?y9$#jd`DX}=!BQY-}C$-2=llc}$e0*MF zZfbn|Ew1?Z-29Z%91xo)KEALtF$W^ElHoJR$X`LuRxzQ)sYS&xUWv)s?nRktX_C%v WKQJ*ea(-aIBtC+~zF?668w>y=i(dZ# literal 0 HcmV?d00001 diff --git a/ndbc_api/api/handlers/_base.py b/ndbc_api/api/handlers/_base.py new file mode 100644 index 0000000..51592f6 --- /dev/null +++ b/ndbc_api/api/handlers/_base.py @@ -0,0 +1,2 @@ +class BaseHandler: + pass diff --git a/ndbc_api/api/handlers/http/__init__.py b/ndbc_api/api/handlers/http/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ndbc_api/api/handlers/http/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/handlers/http/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f17d81b4aa83f6d0513e522da901e870c3c30f0f GIT binary patch literal 199 zcmZ3^%ge<81YxJeGC=fW5CH>>P{wCAAY(d13PUi1CZpd7j|5q>>A|3998|KcAoENg5AFuw-k-7)_rOAtWFiqxsd_y<_)Tj_;IH1JT zxDaPJs3g>+kYqTdq|~&KW;m?0s~tiI!x5!Z?Gm~eZd1C|9-)WfsIo=v6?z$tDSc|c z(9dvO8Bn(hTNzF$+tfi}km01VUELwaZ}(aECIY z?iO}4+^J;LJ;EM_yOh1^K4Blj-O8xCU)axZk8(ggC>&G|35Pi1B4>!SQ;{yB-O^BL&RxC=2Oo0t% zrF{9KNHxZVPRtdGQj`5jUKWcDHaJz5^9?RMrAw-;H~8SxY*}e=p{Y4ZZZ<`>KpRiP zj2&(8!KpdA&&D;Mg;2sv@+Y&hrh~v?CgKvF$=KvWC}fk3m{K+iLMUdFMM>nMoiETP zA2B6u7Qi>5`|Rwf#k!1G9iF?vd+0EU}01F|V7jD4kkK{0uL-WuN4c{gPL9OFsDc z<$x5BgV#bT)67I_jIDo?ycp5%tglW37AwW0DPTjWV^s643_dD^8kgaGYE=%lENb?5$FAH34R zVY(&phW1m#ch&UyMo`m5^h~*e#4y!!EL!l(Mae{wSxsgLYN}_ERmKuw|U=Z zE{VV8yX~9f=E!yamt>9$ka_NpOL}RxhH<@*-1lAJ9^qqNxBRP&+Hie@NFKXOU1?N3 zV)Y2$v?1}lf8O)RYabC*;xpVLgf9$$=Vx`=M{|(VjcUpU}I9)0T6?e4G%Wav;eRJ z*7fG+Cyq}441!B-dQ!~)<_s;&%oK{#7jNXRid4NwOV{MQu1(L$qJCATb$+@iUC!sk za$y=jb-UH3uj+bv8iHFfQ@%lW!6-LlOUylk^Slj87ykj|Q&LNL0;jp>$-af;$dh38 z$VBzfc+KNVeaO8eNMA8}_vGD^um9~Ov9227p#D<&Jh81Z_@ifu@rA_rH`z+~)N8nE zJXS;C<^z*lHP!R?)bQU@!;f!%>#n4RpQS!pNPTqo)ZJ6BpGP~fw@KEgMV}`IE8DZr z5)%uFiEp_|_?*?}B=-5QXxBn?__16)dIrXcodZ#kzG4(RJI5`enm_C4CVvQy@qdaP z;Q#CyqKl_)_a-V~#=bGV|?UNbY{pT^&17 zJ^TThZ>D9#*$r*>TAb2UR8YaLo`vY{C!Xr@bLbV$Gp}%-d4=;VHagF3TIP~n&AMMJkk53q z%MM4o46kXIXVETgp|%D?{tnx1zE+lPN!O`7kA~T2N9}bwY7h6K&To}T)SfNWWMe+U zhMO+^`>U@&9o5(6s6Nd8JFD-;(?65DP1zVG+)MCCcJ zy)BN)i?6A?ODidF)DPL|rt&Tfbr}9!t|+Q(nN21dTPesBKE+xKoi z4m=J#p_RQ8&-P9%0PUZs_6~jVi+j^wethraYTve6B;`2=fj80@@l?&jST9L;)_e^5 zNxHWdU@%CMJ+%;nVbZy!-n5Mb!@u29i?VXokJ;Nxo_OPTepPd^2P43&TQJNMr96|C zV8B{}0ki}IXbA?;5)5EdFko0eCS4*JJox_-?44C5ZthZ^^Oh=swNwevQYAo3l>jYO z0&J=h7}k$Tm#7jRyp<~7*%`Bpw!SKXwPp;UHDds+83Sm|7{KO?0rLYfe#T8XOk$Tti2g+hiWo@ai|ph2j;{b9&2g zGA(LUfT~81b@rEmv%idW_Ls?BhF`5wJb^a)Y&FX4r)ydU4+q&1pm~hhzj`I}bO+C9 z42N1~t{pSxP6M)(E+%G;fz=1Brw%az=-3zLaWEU>B$k;kz+L|uwP0I{n@dNz=Gg8B z_b`rfVT_HD@*0&!DMn<=#Hyb_FV#Jm?3bM&D0a_M4e?rMd5~Lty-fLBWoksm=D?d zwW4MoWwKMu0jNW#k+3y<6wn+0aWgy~r`LgnK$>V{@B<6T>vCL`ykA-Au9EhO{eD4` z73+RM_E+q8l^m?t?J=;2gK3D3eb@W!5Tl;A(X>E0^LTvX+ s-@Ub;))HGM(C141w2pqFLRPqI-dj9}@A%dKeLnoseEQ0v5;Lv;0*{E)XaE2J literal 0 HcmV?d00001 diff --git a/ndbc_api/api/handlers/http/__pycache__/stations.cpython-311.pyc b/ndbc_api/api/handlers/http/__pycache__/stations.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4085caedbc213f9bc0418c11356ce1346d11bdca GIT binary patch literal 11985 zcmeHNYit`=cD^&i;ZuBzdRdle%Zg3Ml4Hwpys|7uvSj%o%Z4AjD5Hd?I3t@jAIi+g zj`6!1p{B8kx}-Dg zRKwwmg8m>#U zBsYXNs9|TKHMudoQ4PBiZOQg z9UOCpgnXZ4G-OgtQ8AuUm{?j==&8735?>Xh5Unr+scQ;*E)`Fw#zK_h7>tVik!Xrb z@B&0VN8+NC7UHpJ;#5=+)il=$UW#&2DOyfB%|{bbJjs_*>IY&{{0e_Yib}v(ETuP< z`PJlk%Xz2ySEqSVf)2R##wwEZ*2+l%V{_$<<}y#cJw20F+*-GWUyJcm*iFTInir>_ zyL>4rRvb+A;VMQR_tPjoJPA`A3DZ2oQ8Nxg!X}OmvmA4sgw31@sD)$0R^G;$c{^v} z9hd5KW|Jn)dfjvzqIdNq5_Yar(pFQ_RV-=e-MokQPSVXKE}UbHGz;&Uv~qPde0;pk zMz}V$e^1NH#d|quwd#H9Km^&U_wg5d>8&Yk=6p4MseeLge@*Ftp|rblZ25-D@t8Do z^)8w$FzT%}H8yY? zK`q-Aw=R%HZTtK~_=<6P7a=jK^gRhNLl|}X@l7CrSyYBxAwnldOBJOuWU(SvuFX-m zA^mPC&lsBlJWOqRDn5*{rZEPuV;WPaGoSxATnaUIhgR6hL^>)dtSAZbsXLTHy(VHO zf?tIX^^Ttd91+KlMPrvog!trSJT-pmTI^y}NS+eXm-v_@j$h@Y(nVe<#>Z3KL@W}W zijU*}q6SXl_(e&Y8W)QQ(>-FDvdz0520Is{?_c=&g{9a#iFXsZV2>Q^ zfjSiR7zg_Q=*Rk=umL)+P{YDTcnU!v+7}WwW4r^%`lHx_MYbU61QKEtI+axD%kWRe z6-)60DQ1C}riD}tz_N7V46I7?uOV=QRLb6^vXnwz85n128_7_Mm0U{fKpW`WkaM?` zJIkbYb`S}*rg%wKJw%A4+6G#DlG#Fpz^qAfmh(B11$hXz)1$R_xpV~TTPI-cMn z#gd9k`x@o=fCMTz0c?i_AuTBO??w~T#jx${wJ9wq1b~%*1EYAj$q058IB0X@9Ttp; zgedI5Tn5@vOqiumyog^nh?govidBEtT1nD#z(OP9bs%%(H)i7ABD;3wTs^X@XPzn0 zCfmWhzvY2Hbl)FZ+Ofjq{C%>&Z+>Wg=*x$mw)gBG9L>7-76@%SSSAmB9eK~*M-1iP z_XPoxKhR%bDCfa1;VLjt6rRwMXN&CKvwZfmK-PUS=RPUBPiCzrALCp9v|$Ii*ErC` ze9U+T{LIIG7Q+88!&2~K$r4;shFvJbKG<4h6#i&JR#8ihj0>}?>Lgl&^u&*xk($gUTvB`mwW zcjZvd|Dx=F@pmI)+hq5?mHPb1C{aT#fJ~v{b_6VnsQNQj5UO9gx9 ztXZnpQ{dgq0Pggx`E_#{+%xPVSn{=((Y9SIS@M-7IhX=CQ=Br6@+>RiOk2Ewe?@Qg zR6>mWjbqNSW&2*8Ql7;rCB;Vpb|qaGwdq8h;$!j2I7}ZPR69YYy6;dbP>(5=qOK^+ zxv}E{&X*y#&<_vA9*2b?=u|4kD-0NHLL;VHMhDJDP7Iuaz{xQ+V2u>pRFzh-9_O#A zbBLzMIIxOEN=LYO4CMI&FDa%3K!#$zf-{U5vIxCUMD2}m2tz|ygXSg`ODvHV;a!9S zkRn=usFR33jkt@=>UuxZUaj_31rS}t{{S*a3LfI}{@M6jCu zh~4VRIof4MJF4Q&hrUe@e4+ck&{9Xv*Dd?H=MOzqp#sg=LivW~8;Q3Qi@O)ar9-)f z=j4Xx=11m7V2*0OweP)f*8Lo|5h@dCZSmD?-8OAbf-DHE`kHU-e0yik*CG2l9{9H2 z_ibIWEf23ezucem4a&a3dCNn$e_`at*xO^d`Ym$(mYlm&c7tguySHVn+eEZ%Ki=Co z=pvuE8V7eXpLALWyO>Y9SO^Q-ArtJeC>Ng=6?Qrmm&915mf@JIW?7SlHf)Amf+X#_ zLJ1oxwH}Nv_OHN2QdtJ#%q`X^w{K7x(1*(^6Txrbz-CNP!+fhs|DguUSuU%_oM499 zR$QXYno5u*GO1A_B6;E^3y^7-l21feo_;d8D|^wxp-1Np#)oBU5+ znLj*#IPdf=aBsal|1zds$OSs*53f2s-!`fSJ01jg-w*Cy-n}fY9Qvps7d#>dk7VmM z!u<`g)g8=QgDOIPYHvE!K|bwh9I`N<_F9K%=2tWeVO)Z#pDq&TViooiDcNDahEK5G zf;B_eSYc?%tg~oU?HB{*0K$EmtHPO5fQHa;B z9d&kb$gNmAw#lw%muIrB!JKPQb`4gW_ey4X?C<^ESodwR`4=iVW^cVw+Q z)V1S{hHmmVjRP&r$E;@{zlT)Ktx@O4X~=Eca9myc+YD-ESaB9**_3CI+yq)V~t{iqc&Jx9WN*MVq@ z$KSt*fsZ2R?v>rWS!=HlgO2=xOVd6CC!;i#*ZfZ}Pw2w?8hJt^P~efQDLnRavokH# zb9XuK2f#oVAS-bPWmnfqOV%}%a}CL^p>G?bg0g$>GGJ7n?C#52`&0o^SXdfHqVPzA z&ryp_Xt4={2conlcm?bZ+<2&T*1{Y{W?+aaZOKb8r)d{j=f;_Zqj85!tpVKhEA!O# zjcq?|tJn{YG1O%;)HmrjmQ}fE{fCZ&f2H)Egj_1w3KH6Q#Y@6@uXurN4sTrcpcrGw z8)x0I3tI8jgP$Y!&vB4KW&tgFZ8>fq8Xk$9JsCMYFr+w#jt-oNoID&E9yopWh=5%X z5ZpqnfODd73CU$72_TB?V)P1ZxWrR64q6*5*1_(9h5t-*wG5ZC{uFqKe+OiatOlBH z^`JF)?#-8PrRBDLxw?H|Ww!Q$k=fdt?>m5zesFX(^#5C+>1N=)c6rnOT=V{1pid6; zJ*-WF@(l;_FAn^}p?pjGV&`9WeQ;2Ib~x9Jw(BsZ8n5^Ojsy5$=!PTI>apXGSh``9 zdc>N5&f9)tC!Thgg+Gw~c4(#f7bibFncF!e?;OgxhqBfob&Eo^&JECsBZ~v~SO)SV z>PPhYJ~ocx=nM3{npIhbS*(y{U{k#2sd;*mh8x zQ0}hI6yz?$l%>sBKV#C?#$}EmQ`VV&9W1}bS9D(8tUKSOI%>+UBZV17&*W%21)B|K zae6}JCBtqPA_O{iALA@V#L{xwv7yaULWbL5u$ zLDR1LO}mymR{|d~9}WM?{@J0tC-4PBouC>)6q}ns+?$)`wB0WujlH4Zc*SdRQL*X@ z1qUw7oRFSM0mMe9rV`f_CW*%{pkYu`Tr8Hmt~hXyC7zIYf#+ZinS|{kaEFyNQ1gYK zKVMur3MZg;t<9o)=l%k6MO4}^;av(=(%5_>{&swcU7B8eeIcG}*ef^eop%(>q@(-6 z=AQeTdkQq=Y{CU%HCgrg3Kr6_=MiB#>c2oT?^rmNv$hxN$i{6;cDc3ZLF)_mTVKev zen)QoPS)BCTs<3p{DYg{U+R_H_hx&K-)}!IyGOIuQB@5M^GitsKZCyhuQbqJkEM=|jIcuM6MPm(AX>-@o zODipM*U)F?`(0!5CfK8FTXlD4t)1%l!Er6su^VbttdU46n&cx9#U6MGCC8DATOY4hiPC#IXLt>m_8-inAhXr7*ZWRcF zSRNgbng^{7L+6mZj3kO=5{P2LO#tB{#u2QQI`y-}itG`;)Gu(SCfm42&U|3d61O>@Wc_04l90I)oJ0I)oJ=0*ypa(K0_ZEkp#^%Q6;dy2X#76?8U zg?BSrsFFTnh}nDnC9Np<)Xd%jZDYr%rH%r@=kn$ccW9wXTB?Ob3Jq#4FB@9*))!cK z!Z&mI7kX~83%#*6glCYii!`(poEzCq1p-8E0V7o{ zU{X~Jm}Dp3hJph_b;RS>*tm$j?#HbKH>M&>m6Y1WgnspXtZbJybY05~j#$?U{mVKv zP$s2nkX;-+E4yE-wqL8ZU$3?wt7)X7+Wyj*A6WF`5Fg05Z!R$S+`41w^u6(wJ=v~< z^41sCG_`(Px&S4m?(P^^tGi zQeYseMpRO&Z%EZJ5fvb`MKLAe0F-9g<2XAJOwABqN0GC z+{y<1tW-CgmFm{dN_9sj;8>@COB7{?p~RGWs#EO7!&Kcx@T}a%$7Qgx{p+jfPB{UW zK2K9Ycd-bn<0iCz3);CDee+M$PNj6MU%tHfF4w_5SH)PTctB?TH%A|V@~okqwd<5# z9UY=iRipIo=+~+mEMpthFOOR~O7{~wwbt9Cjjs4hm3aNUtEld5Xs0h!^hMKA6GKE~ zI8UcdCH(>}MbO?6<3Qm!14ZS@mh9Kvd9pFP*1bynS^ZumJz3+OCr7i!J5PqQ#(S-l zV_9Oj^W=2ac<0IStntp1k*x7vD`hlG40oQK%^L4K8O*MA7tE711)9G$UAX*6&G}-T IeDzcO4`%i2TL1t6 literal 0 HcmV?d00001 diff --git a/ndbc_api/api/handlers/http/data.py b/ndbc_api/api/handlers/http/data.py new file mode 100644 index 0000000..e0952ec --- /dev/null +++ b/ndbc_api/api/handlers/http/data.py @@ -0,0 +1,285 @@ +from datetime import datetime, timedelta +from typing import Any + +import pandas as pd + +from ndbc_api.api.handlers._base import BaseHandler +from ndbc_api.api.parsers.http.adcp import AdcpParser +from ndbc_api.api.parsers.http.cwind import CwindParser +from ndbc_api.api.parsers.http.ocean import OceanParser +from ndbc_api.api.parsers.http.spec import SpecParser +from ndbc_api.api.parsers.http.stdmet import StdmetParser +from ndbc_api.api.parsers.http.supl import SuplParser +from ndbc_api.api.parsers.http.swden import SwdenParser +from ndbc_api.api.parsers.http.swdir import SwdirParser +from ndbc_api.api.parsers.http.swdir2 import Swdir2Parser +from ndbc_api.api.parsers.http.swr1 import Swr1Parser +from ndbc_api.api.parsers.http.swr2 import Swr2Parser +from ndbc_api.api.requests.http.adcp import AdcpRequest +from ndbc_api.api.requests.http.cwind import CwindRequest +from ndbc_api.api.requests.http.ocean import OceanRequest +from ndbc_api.api.requests.http.spec import SpecRequest +from ndbc_api.api.requests.http.stdmet import StdmetRequest +from ndbc_api.api.requests.http.supl import SuplRequest +from ndbc_api.api.requests.http.swden import SwdenRequest +from ndbc_api.api.requests.http.swdir import SwdirRequest +from ndbc_api.api.requests.http.swdir2 import Swdir2Request +from ndbc_api.api.requests.http.swr1 import Swr1Request +from ndbc_api.api.requests.http.swr2 import Swr2Request +from ndbc_api.exceptions import RequestException, ResponseException + + +class DataHandler(BaseHandler): + + @classmethod + def adcp( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> pd.DataFrame: + """adcp""" + try: + reqs = AdcpRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return AdcpParser.df_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def cwind( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> pd.DataFrame: + """cwind""" + try: + reqs = CwindRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return CwindParser.df_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def ocean( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> pd.DataFrame: + """ocean""" + try: + reqs = OceanRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return OceanParser.df_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def spec( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> pd.DataFrame: + """spec""" + try: + reqs = SpecRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return SpecParser.df_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def stdmet( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> pd.DataFrame: + """stdmet""" + try: + reqs = StdmetRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return StdmetParser.df_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def supl( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> pd.DataFrame: + """supl""" + try: + reqs = SuplRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return SuplParser.df_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def swden( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> pd.DataFrame: + """swden""" + try: + reqs = SwdenRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return SwdenParser.df_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def swdir( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> pd.DataFrame: + """swdir""" + try: + reqs = SwdirRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return SwdirParser.df_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def swdir2( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> pd.DataFrame: + """swdir2""" + try: + reqs = Swdir2Request.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return Swdir2Parser.df_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def swr1( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> pd.DataFrame: + """swr1""" + try: + reqs = Swr1Request.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return Swr1Parser.df_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def swr2( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> pd.DataFrame: + """swr2""" + try: + reqs = Swr2Request.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return Swr2Parser.df_from_responses(responses=resps, + use_timestamp=use_timestamp) diff --git a/ndbc_api/api/handlers/http/stations.py b/ndbc_api/api/handlers/http/stations.py new file mode 100644 index 0000000..7071f6b --- /dev/null +++ b/ndbc_api/api/handlers/http/stations.py @@ -0,0 +1,186 @@ +from math import asin, cos, pi, sqrt +from typing import Any, Union + +import pandas as pd + +from ndbc_api.api.handlers._base import BaseHandler +from ndbc_api.api.parsers.http.station_historical import HistoricalParser +from ndbc_api.api.parsers.http.station_metadata import MetadataParser +from ndbc_api.api.parsers.http.station_realtime import RealtimeParser +from ndbc_api.api.parsers.http.active_stations import ActiveStationsParser +from ndbc_api.api.parsers.http.historical_stations import HistoricalStationsParser +from ndbc_api.api.requests.http.station_historical import HistoricalRequest +from ndbc_api.api.requests.http.station_metadata import MetadataRequest +from ndbc_api.api.requests.http.station_realtime import RealtimeRequest +from ndbc_api.api.requests.http.active_stations import ActiveStationsRequest +from ndbc_api.api.requests.http.historical_stations import HistoricalStationsRequest +from ndbc_api.exceptions import ParserException, ResponseException + + +class StationsHandler(BaseHandler): + + DEG_TO_RAD = pi / 180 + DIAM_OF_EARTH = 12756 # km + LAT_MAP = (lambda x: -1 * float(x.strip('S')) + if 'S' in x else float(x.strip('N'))) + LON_MAP = (lambda x: -1 * float(x.strip('W')) + if 'W' in x else float(x.strip('E'))) + UNITS = ('nm', 'km', 'mi') + + @classmethod + def stations(cls, handler: Any) -> pd.DataFrame: + """Get all active stations from NDBC.""" + req = ActiveStationsRequest.build_request() + try: + resp = handler.handle_request('stn_active', req) + except (AttributeError, ValueError, TypeError) as e: + raise ResponseException( + 'Failed to execute `station` request.') from e + return ActiveStationsParser.df_from_response(resp, use_timestamp=False) + + @classmethod + def historical_stations(cls, handler: Any) -> pd.DataFrame: + """Get historical stations from NDBC.""" + req = HistoricalStationsRequest.build_request() + try: + resp = handler.handle_request('stn_historical', req) + except (AttributeError, ValueError, TypeError) as e: + raise ResponseException( + 'Failed to execute `station` request.') from e + return HistoricalStationsParser.df_from_response(resp, + use_timestamp=False) + + @classmethod + def nearest_station( + cls, + handler: Any, + lat: Union[str, float], + lon: Union[str, float], + ) -> str: + """Get nearest station from specified lat/lon.""" + df = cls.stations(handler=handler) + if isinstance(lat, str): + lat = StationsHandler.LAT_MAP(lat) + if isinstance(lon, str): + lon = StationsHandler.LON_MAP(lon) + try: + closest = cls._nearest(df, lat, lon) + except (TypeError, KeyError, ValueError) as e: + raise ParserException from e + closest = closest.to_dict().get('Station', {'UNK': 'UNK'}) + return list(closest.values())[0] + + @classmethod + def radial_search( + cls, + handler: Any, + lat: Union[str, float], + lon: Union[str, float], + radius: float, + units: str = 'km', + ) -> pd.DataFrame: + """Get stations within of the specified lat/lon.""" + if units not in cls.UNITS: + raise ValueError( + f'Invalid unit: {units}, must be one of {cls.UNITS}.') + if radius < 0: + raise ValueError(f'Invalid radius: {radius}, must be non-negative.') + # pass the radius in km + if units == 'nm': + radius = radius * 1.852 + elif units == 'mi': + radius = radius * 1.60934 + + df = cls.stations(handler=handler) + if isinstance(lat, str): + lat = StationsHandler.LAT_MAP(lat) + if isinstance(lon, str): + lon = StationsHandler.LON_MAP(lon) + try: + sations_in_radius = cls._radial_search(df, lat, lon, radius) + except (TypeError, KeyError, ValueError) as e: + raise ParserException from e + return sations_in_radius + + @classmethod + def metadata(cls, handler: Any, station_id: str) -> pd.DataFrame: + """Get station description.""" + req = MetadataRequest.build_request(station_id=station_id) + try: + resp = handler.handle_request(station_id, req) + except (AttributeError, ValueError, TypeError) as e: + raise ResponseException( + 'Failed to execute `station` request.') from e + return MetadataParser.metadata(resp) + + @classmethod + def realtime(cls, handler: Any, station_id: str) -> pd.DataFrame: + """Get the available realtime measurements for a station.""" + req = RealtimeRequest.build_request(station_id=station_id) + try: + resp = handler.handle_request(station_id, req) + except (AttributeError, ValueError, TypeError) as e: + raise ResponseException( + 'Failed to execute `station` request.') from e + return RealtimeParser.available_measurements(resp) + + @classmethod + def historical(cls, handler: Any, + station_id: str) -> Union[pd.DataFrame, dict]: + """Get the available historical measurements for a station.""" + req = HistoricalRequest.build_request(station_id=station_id) + try: + resp = handler.handle_request(station_id, req) + except (AttributeError, ValueError, TypeError) as e: + raise ResponseException( + 'Failed to execute `station` request.') from e + return HistoricalParser.available_measurements(resp) + + """ PRIVATE """ + + def _distance(lat_a: float, lon_a: float, lat_b: float, + lon_b: float) -> float: + haversine = (0.5 - cos( + (lat_b - lat_a) * StationsHandler.DEG_TO_RAD) / 2 + + cos(lat_a * StationsHandler.DEG_TO_RAD) * + cos(lat_b * StationsHandler.DEG_TO_RAD) * (1 - cos( + (lon_b - lon_a) * StationsHandler.DEG_TO_RAD)) / 2) + return StationsHandler.DIAM_OF_EARTH * asin(sqrt(haversine)) + + @staticmethod + def _nearest(df: pd.DataFrame, lat_a: float, lon_a: float): + """Get the nearest station from specified `float`-valued lat/lon.""" + # Drop rows with missing latitude or longitude + df_filtered = df.dropna(subset=['Lat', 'Lon']) + + # Calculate distances using Haversine formula + df_filtered['distance'] = df_filtered.apply( + lambda row: StationsHandler._distance(lat_a, lon_a, row['Lat'], row[ + 'Lon']), + axis=1) + + # Find the index of the closest row + smallest_distance = df_filtered['distance'].min() + + # Return the row corresponding to the nearest station + return df_filtered.loc[df_filtered['distance'] == smallest_distance] + + @staticmethod + def _radial_search(df: pd.DataFrame, lat_a: float, lon_a: float, + radius: float): + """Get the stations within radius km from specified `float`-valued lat/lon.""" + # Drop rows with missing latitude or longitude + df_filtered = df.dropna(subset=['Lat', 'Lon']) + + # Calculate distances using Haversine formula + df_filtered['distance'] = df_filtered.apply( + lambda row: StationsHandler._distance(lat_a, lon_a, row['Lat'], row[ + 'Lon']), + axis=1) + + df_filtered.sort_values(by='distance', inplace=True) + + # Filter rows within the radius + stations_within_radius = df_filtered[df_filtered['distance'] <= radius] + + return stations_within_radius diff --git a/ndbc_api/api/handlers/opendap/__init__.py b/ndbc_api/api/handlers/opendap/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ndbc_api/api/handlers/opendap/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/handlers/opendap/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..25ae91256ef1a2e3d3f3cb081d988fc38726386f GIT binary patch literal 202 zcmZ3^%ge<81YxJeGC=fW5CH>>P{wCAAY(d13PUi1CZpdx%axu=*PNPJ*sWMurn K03(W+fnopc0aZ&TiUg+~`lJ1jzyid~ndO!|v+@}TgkPNJV#;>*MVL8n4fELl) z<#vt-wWuDGV;m1@9eP}jbG%JU=t()r@vzpZcgbBGk7x(M|?b<=TNABTxRO{6b z$%i-|(+=x>av#S#w0?a+9^iOfdqE$R2RWY5hV)^1SU(~k5r~JJCTj8qQ9G&cj!4K| zc-4L!A#qeXMRkheNMphmHfH|SJlJqK8oLWykTz9U!~AISYL7%wV0pT=m#osT|{G?KxR7|B7FTa6gL?Cu~Rb|=uiRHg! zhfynJmC^$o}t5PgEg z+>9oKT;9Z2JS*ag_nzQR0#|+c!69OL>->fZ>%8hW{q-JoTZ~%qf9g@itNv^LMPZRF zi64?hAxKt)-)-SipAB+-_q=yJ5ar;pud9LQNZpKJgs74`GB%THMjVdtrkfhC1XiSb zKDR`$jNkGqkV#fWb_iy%!wAqW&${1L<~6Dgn1z9h3-g*f!0cp`I=kxOw3ZiojAy-d zI=V!23nq0ajnt~g3bI;~FjzmZ9SJbZ3s!{l%+`2X!OaxHI&nB^`+_rWurBCj_%2pQ zK9`+i1s%rh>|?A3EitDVtQTqAZ^31<7jd3IEeWR>Mi80<&h_lWiSgN=LcTU;rCx%vF;x#ip?h3V&5;WEvc#_S?h%uAHj+Ov7}VlJx`=V$S$`K>Wq02f!4;w)?# zlvHt<4T7L62O1qeg)80zgT`L~Tqo6}6ns~BnCL4f#@0iXW0}gEC##Yt`Ht|IAbi5% z&B>dSPyg;Cv4d420R2|@Fg{osdh0>_WI2BFt8^)R>M2}R5y=qoiot22lI;C+a`aEh z(X}gIdrQgD2g&!!$@gxax_Rp9!)P~ho8~+n_%J?H8csimXUg%+*Fq^gA)p^t`*_*wu2SNfugAux#%p;-?!qn&E+nw=reeI zq2`!2LTe27^CsOOLRK_PO$KJtYk>SZA!;?kkJC`ha z%h99jQsu-HCW{%KEM|DJnBhC88DT%nY&rex(m^Ey3tlb#GttsN!#n!t+vuOo_A*@b zuW|l5>_Ca%_}<1X01>* zs~6M-4e@X``|*OJ)kfNKnqnAGfxT2v8CDJ~F<)3@czS18Ot3`5WR~}0p`cm5C516% z+45sG!=NTV>G7R1!~F=ubjC0kGW0lx`mT}) z>GsO!*KS{13$6v%S?RURgV!=;fc=@u;gMTEzdif;h1(Y@eS_6VQksJBgYZ#DvMTYW zk92ld{TvOD&coFpM?)mhTW#ZLm~HNk*suDkQQpt_*xVkA#Mg1-7gY}*7=y;H z6~j$*Qkr%YgQlYxppIgII*I}6C#}Vc`GEl+K&hzDj8R_g8GoQnDXAQDx*M9>93)vV7)p zaX$YMk6~;INUUC`dX<9eRm!PerLq?xlrgNKS}(XLiv0gsMygic;?*m=7B|?E&aO>h zg~4h~XrB`CthWQ9eGtLpgf-MsbZxJTJJuao+M5pCI_$u*me(*lZDUz0Vs2X?UgmUb z*+6Ko5LRfm32&gZmk1BbJ5kzegaDzzI7&pJW%HiDh~N3*NBb7K93Uxzy~gkgk&ZencYI z`QIb*ddYoP$gz_9u8{GP`~G%4CrZS1H=E@m0Qzr7(>SZK=VXbv?uuI(F3Uv#^c6Bu aYIawBibud`_57b(y^rnDCoPD$9sdR4t2KfE literal 0 HcmV?d00001 diff --git a/ndbc_api/api/handlers/opendap/data.py b/ndbc_api/api/handlers/opendap/data.py new file mode 100644 index 0000000..08a2468 --- /dev/null +++ b/ndbc_api/api/handlers/opendap/data.py @@ -0,0 +1,185 @@ +from datetime import datetime, timedelta +from typing import Any + +import xarray + +from ndbc_api.api.handlers._base import BaseHandler +from ndbc_api.api.parsers.opendap.adcp import AdcpParser +from ndbc_api.api.parsers.opendap.cwind import CwindParser +from ndbc_api.api.parsers.opendap.ocean import OceanParser +from ndbc_api.api.parsers.opendap.pwind import PwindParser +from ndbc_api.api.parsers.opendap.stdmet import StdmetParser +from ndbc_api.api.parsers.opendap.swden import SwdenParser +from ndbc_api.api.parsers.opendap.wlevel import WlevelParser +from ndbc_api.api.requests.opendap.adcp import AdcpRequest +from ndbc_api.api.requests.opendap.cwind import CwindRequest +from ndbc_api.api.requests.opendap.ocean import OceanRequest +from ndbc_api.api.requests.opendap.pwind import PwindRequest +from ndbc_api.api.requests.opendap.stdmet import StdmetRequest +from ndbc_api.api.requests.opendap.swden import SwdenRequest +from ndbc_api.api.requests.opendap.wlevel import WlevelRequest +from ndbc_api.exceptions import RequestException, ResponseException + + +class OpenDapDataHandler(BaseHandler): + + @classmethod + def adcp( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> xarray.Dataset: + """adcp""" + try: + reqs = AdcpRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return AdcpParser.nc_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def cwind( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> xarray.Dataset: + """cwind""" + try: + reqs = CwindRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return CwindParser.nc_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def ocean( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> xarray.Dataset: + """ocean""" + try: + reqs = OceanRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return OceanParser.nc_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def pwind( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> xarray.Dataset: + """pwind""" + try: + reqs = PwindRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return PwindParser.nc_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def stdmet( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> xarray.Dataset: + """stdmet""" + try: + reqs = StdmetRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return StdmetParser.nc_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def swden( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> xarray.Dataset: + """swden""" + try: + reqs = SwdenRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return SwdenParser.nc_from_responses(responses=resps, + use_timestamp=use_timestamp) + + @classmethod + def wlevel( + cls, + handler: Any, + station_id: str, + start_time: datetime = datetime.now() - timedelta(days=30), + end_time: datetime = datetime.now(), + use_timestamp: bool = True, + ) -> xarray.Dataset: + """wlevel""" + try: + reqs = WlevelRequest.build_request(station_id=station_id, + start_time=start_time, + end_time=end_time) + except Exception as e: + raise RequestException('Failed to build request.') from e + try: + resps = handler.handle_requests(station_id=station_id, reqs=reqs) + except Exception as e: + raise ResponseException('Failed to execute requests.') from e + return WlevelParser.nc_from_responses(responses=resps, + use_timestamp=use_timestamp) From d3f609c00fdc32393d7f6a915235f2bdd7ba6fa4 Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:22:35 -0400 Subject: [PATCH 20/47] Delete ndbc_api/IGNORE --- ndbc_api/IGNORE | 1 - 1 file changed, 1 deletion(-) delete mode 100644 ndbc_api/IGNORE diff --git a/ndbc_api/IGNORE b/ndbc_api/IGNORE deleted file mode 100644 index 8b13789..0000000 --- a/ndbc_api/IGNORE +++ /dev/null @@ -1 +0,0 @@ - From e046ff6f794de9f97895ceaa6a4a6e33740f0053 Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sat, 3 May 2025 22:22:45 -0400 Subject: [PATCH 21/47] Delete ndbc_api/api/IGNORE --- ndbc_api/api/IGNORE | 1 - 1 file changed, 1 deletion(-) delete mode 100644 ndbc_api/api/IGNORE diff --git a/ndbc_api/api/IGNORE b/ndbc_api/api/IGNORE deleted file mode 100644 index 8b13789..0000000 --- a/ndbc_api/api/IGNORE +++ /dev/null @@ -1 +0,0 @@ - From e828a46cb86ecc64eee1e7f8a2b40674dcf4bccf Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sun, 4 May 2025 02:45:24 -0400 Subject: [PATCH 22/47] User Defined locations Allows the user to give longitude, latitude for any location and will pull the nearest stations to give storm data --- gatheringInfo.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/gatheringInfo.py b/gatheringInfo.py index 4516a76..b57d239 100644 --- a/gatheringInfo.py +++ b/gatheringInfo.py @@ -239,6 +239,35 @@ def getPRES(self): def getNAME(self): return self.__NAME + +# We now define the radial search function to search for stations in a new location, defined by the user +def Add_New_Location(lat, long): + # First we define a int variable to track how many stations we need (7 is a good number) + num = 0 + # Then an empty list to store the station_ids for our eventual BB object definition + location_station_list = [] + + # Use the ANDBC_API raidal search feature to grab the nearest stations and allow us to quickly find the station name + station_list = api.radial_search(lat=lat, lon=long, radius=350, units='km').iloc[0:10] + station_list = station_list.reset_index() + + for _, row in station_list.iterrows(): + if row['Includes Meteorology'] is True and num < 8: + # If the station gives meteorology data then we add it to the list of stations to use, otherwise goto next + location_station_list.append(row['Station']) + num += 1 + else: + pass + +# Finally we check if the station id list is empty, if it isn't we return the new object to be saved +# Otherwise we raise an error and pass + if location_station_list != None: + newLoc = BB(location_station_list) + return newLoc + else: + pass + + def getLOC(self): return self.__LOC From d53fe652385e2bde972284993281a055d91a8319 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 15:03:58 -0400 Subject: [PATCH 23/47] Update florida.py removed prevent_initial_call from callback to allow button functionality --- pages/florida.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/pages/florida.py b/pages/florida.py index ce6f2bd..4f13f6f 100644 --- a/pages/florida.py +++ b/pages/florida.py @@ -15,11 +15,10 @@ html.Div(id='fl_output') ]) - +# callback decorator identifying the input and output, and a function that takes the input and outputs data to the GUI @callback( - Output(component_id='ny_output', component_property='children', allow_duplicate=True), - Input(component_id='ny_dropdown', component_property='value'), - prevent_initial_call='initial_duplicate' + Output(component_id='fl_output', component_property='children'), + Input(component_id='fl_dropdown', component_property='value') ) def display_single_buoy_data(selected_string): From 25d6fe511653bffcf3b67d3d8f89a11278c5d172 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 15:04:58 -0400 Subject: [PATCH 24/47] Update georgia.py --- pages/georgia.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/pages/georgia.py b/pages/georgia.py index 8462369..8d390a9 100644 --- a/pages/georgia.py +++ b/pages/georgia.py @@ -16,10 +16,8 @@ @callback( - Output(component_id='ga_output', component_property='children', allow_duplicate=True), - Input(component_id='ga_dropdown', component_property='value'), - prevent_initial_call='initial_duplicate' - + Output(component_id='ga_output', component_property='children'), + Input(component_id='ga_dropdown', component_property='value') ) def display_single_buoy_data(selected_string): """ @@ -74,4 +72,4 @@ def display_single_buoy_data(selected_string): f'Average Wave Height: {avg_wave_height} m\n' f'Average Pressure: {avg_pressure} millibars\n' f'{storm_strength}' - ) \ No newline at end of file + ) From 032b2fd33b82e61dc9b40d76a36986fbd61bc183 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 15:08:01 -0400 Subject: [PATCH 25/47] Update maine.py removed prevent_initial_callback for button functionality --- pages/maine.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/pages/maine.py b/pages/maine.py index bec353c..e70c834 100644 --- a/pages/maine.py +++ b/pages/maine.py @@ -16,10 +16,8 @@ @callback( - Output(component_id='ga_output', component_property='children', allow_duplicate=True), - Input(component_id='ga_dropdown', component_property='value'), - prevent_initial_call='initial_duplicate' - + Output(component_id='me_output', component_property='children'), + Input(component_id='me_dropdown', component_property='value') ) def display_single_buoy_data(selected_string): """ @@ -74,4 +72,4 @@ def display_single_buoy_data(selected_string): f'Average Wave Height: {avg_wave_height} m\n' f'Average Pressure: {avg_pressure} millibars\n' f'{storm_strength}' - ) \ No newline at end of file + ) From 64e74d4f03b9852ac5a4f3b175280881a944f226 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 15:08:43 -0400 Subject: [PATCH 26/47] Update maryland.py removed prevent_initial_callback for button functionality --- pages/maryland.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/pages/maryland.py b/pages/maryland.py index da7cec7..6c29f87 100644 --- a/pages/maryland.py +++ b/pages/maryland.py @@ -15,10 +15,8 @@ ]) @callback( - Output(component_id='ga_output', component_property='children', allow_duplicate=True), - Input(component_id='ga_dropdown', component_property='value'), - prevent_initial_call='initial_duplicate' - + Output(component_id='md_output', component_property='children'), + Input(component_id='md_dropdown', component_property='value') ) def display_single_buoy_data(selected_string): """ @@ -73,4 +71,4 @@ def display_single_buoy_data(selected_string): f'Average Wave Height: {avg_wave_height} m\n' f'Average Pressure: {avg_pressure} millibars\n' f'{storm_strength}' - ) \ No newline at end of file + ) From afd5f8e092f98c3c8f2b9289118710462451bf17 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 15:09:33 -0400 Subject: [PATCH 27/47] Update massachusetts.py removed prevent_initial_callback for button functionality --- pages/massachusetts.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/pages/massachusetts.py b/pages/massachusetts.py index 9d1adfe..f37a77a 100644 --- a/pages/massachusetts.py +++ b/pages/massachusetts.py @@ -15,10 +15,8 @@ ]) @callback( - Output(component_id='ga_output', component_property='children', allow_duplicate=True), - Input(component_id='ga_dropdown', component_property='value'), - prevent_initial_call='initial_duplicate' - + Output(component_id='mass_output', component_property='children'), + Input(component_id='mass_dropdown', component_property='value') ) def display_single_buoy_data(selected_string): """ @@ -73,4 +71,4 @@ def display_single_buoy_data(selected_string): f'Average Wave Height: {avg_wave_height} m\n' f'Average Pressure: {avg_pressure} millibars\n' f'{storm_strength}' - ) \ No newline at end of file + ) From a0d2029097156d12cb27bd1d4a803f24ebdda28c Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 15:13:40 -0400 Subject: [PATCH 28/47] Update new_york.py removed prevent_initial_callback for button functionality --- pages/new_york.py | 48 +++-------------------------------------------- 1 file changed, 3 insertions(+), 45 deletions(-) diff --git a/pages/new_york.py b/pages/new_york.py index 09f176e..d8f5f7d 100644 --- a/pages/new_york.py +++ b/pages/new_york.py @@ -16,12 +16,10 @@ html.Div(id='ny_output') ]) - +# callback decorator identifying input and output and a function that takes the input and returns an output, to be displayed on the GUI @callback( - Output(component_id='ny_output', component_property='children', allow_duplicate=True), - Input(component_id='ny_dropdown', component_property='value'), - prevent_initial_call='initial_duplicate' - + Output(component_id='ny_output', component_property='children'), + Input(component_id='ny_dropdown', component_property='value') ) def display_single_buoy_data(selected_string): """ @@ -77,43 +75,3 @@ def display_single_buoy_data(selected_string): f'Average Pressure: {avg_pressure} millibars\n' f'{storm_strength}' ) - -#@callback( -#Output(component_id='ny_output', component_property='children', allow_duplicate=True), -# Input(component_id='ny_dropdown', component_property='value'), -# prevent_initial_call='initial_duplicate' -#) -#def display_region_data(): -# """ -# :return: display averaged weather data over the selected region and determine safety of storm -# """ -# -# id_list = ['44025', '44065', 'SDNH4', 'KPTN6', 'MHRN6', 'ROBN4', 'BATN6'] -# region = gatheringInfo.BB(id_list) -# -# avg_wind_speed = region.get_SSI_WSPD() -# avg_wave_height = region.get_SSI_WVHT() -# avg_pressure = region.get_SSI_PRES() -# -# # determines storm strength -# SSI = (0.5 * ((avg_wind_speed / 60) ** 2) + -# 0.3 * (930 / avg_pressure) + -# 0.2 * avg_wave_height / 12) -# -# if SSI < 0.2: -# storm_strength = f"The expected storm should be a minimal storm" -# if 0.21 < SSI < 0.4: -# storm_strength = f"The expected storm should be a moderate storm" -# if 0.41 < SSI < 0.6: -# storm_strength = f"The expected storm should be a strong storm" -# if 0.61 < SSI < 0.8: -# storm_strength = f"The expected storm should be a severe storm" -# if 0.81 < SSI: -# storm_strength = f"The expected storm should be an extreme storm" -# -# return (f"New York Metropolitan Region Weather Data:\n" -# f"Wind Speed: {avg_wind_speed} \n" -# f"Wave Height: {avg_wave_height} \n" -# f"Pressure: {avg_pressure} \n" -# f"{storm_strength}") -# \ No newline at end of file From 0ad1406c88ec8c557647e68dd6ec0a87f75d88bd Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 15:15:44 -0400 Subject: [PATCH 29/47] Update north_carolina.py removed prevent_initial_callback for button functionality --- pages/north_carolina.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/pages/north_carolina.py b/pages/north_carolina.py index 0680465..0e6adee 100644 --- a/pages/north_carolina.py +++ b/pages/north_carolina.py @@ -15,11 +15,9 @@ ]) @callback( - Output(component_id='ga_output', component_property='children', allow_duplicate=True), - Input(component_id='ga_dropdown', component_property='value'), - prevent_initial_call='initial_duplicate' - -) + Output(component_id='nc_output', component_property='children'), + Input(component_id='nc_dropdown', component_property='value') + ) def display_single_buoy_data(selected_string): """ :param selected_string: this is the selection from the dropdown menu @@ -73,4 +71,4 @@ def display_single_buoy_data(selected_string): f'Average Wave Height: {avg_wave_height} m\n' f'Average Pressure: {avg_pressure} millibars\n' f'{storm_strength}' - ) \ No newline at end of file + ) From f1d8104dd8ebe037a170ed6cc7af2bb7ba0ed191 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 15:17:18 -0400 Subject: [PATCH 30/47] Update virginia.py removed prevent_initial_callback for button functionality --- pages/virginia.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/pages/virginia.py b/pages/virginia.py index 288c296..6b0cd1c 100644 --- a/pages/virginia.py +++ b/pages/virginia.py @@ -14,12 +14,10 @@ html.Div(id='va_output') ]) - +# callback decorator identifying input and output, and a function that takes in the input, returns an output to be displayed on the GUI @callback( - Output(component_id='ga_output', component_property='children', allow_duplicate=True), - Input(component_id='ga_dropdown', component_property='value'), - prevent_initial_call='initial_duplicate' - + Output(component_id='va_output', component_property='children'), + Input(component_id='va_dropdown', component_property='value') ) def display_single_buoy_data(selected_string): """ @@ -74,4 +72,4 @@ def display_single_buoy_data(selected_string): f'Average Wave Height: {avg_wave_height} m\n' f'Average Pressure: {avg_pressure} millibars\n' f'{storm_strength}' - ) \ No newline at end of file + ) From 26a4421231386f58a33bc3f2c284a497856edadb Mon Sep 17 00:00:00 2001 From: Jack Griffin <106121980+BluIsBest@users.noreply.github.com> Date: Sun, 4 May 2025 17:36:03 -0400 Subject: [PATCH 31/47] Update gatheringInfo.py Debugging how this works with NDBC_api --- gatheringInfo.py | 52 ++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 50 insertions(+), 2 deletions(-) diff --git a/gatheringInfo.py b/gatheringInfo.py index b57d239..fdf5241 100644 --- a/gatheringInfo.py +++ b/gatheringInfo.py @@ -66,7 +66,8 @@ def get_SSI_WSPD(self): print(f"Finished API call for buoy: {buoy}") # Flip resulting Dataframe (using pandas) to see 5 most recent results - lastFive = BuoyWspd.tail() + lastFive = pd.DataFrame(BuoyWspd) + lastFive = lastFive.tail() # Remove Multi Indexing so we can sort by timestamp lastFive = lastFive.reset_index() @@ -109,7 +110,8 @@ def get_SSI_WVHT(self): print(f"Finished API call for buoy: {buoy}") # Flip resulting Dataframe (using pandas) to see 5 most recent results - lastFive = BuoyWvht.tail() + lastFive = pd.DataFrame(BuoyWvht) + lastFive = lastFive.tail() # Remove Multi Indexing so we can sort by timestamp lastFive = lastFive.reset_index() @@ -132,6 +134,52 @@ def get_SSI_WVHT(self): return sum_of_WVHT / WVHT_counter + def get_SSI_PRES(self): + # Set initial counting variable to determine which Buoys are giving the information we are searching for + PRES_counter = 0 + sum_of_PRES = 0.0 + # Iterate through the list of buoys provided + for buoy in self.__id_list: + # For tracking purposes + print(buoy) + # API Call for {buoy} in station list + BuoyPres = api.get_data( + station_id=buoy, + mode='stdmet', + start_time=given_startTime, + end_time=given_endTime, + as_df=True + ) + # For tracking progress + print(f"Finished API call for buoy: {buoy}") + + # Flip resulting Dataframe (using pandas) to see 5 most recent results + lastFive = pd.DataFrame(BuoyPres) + lastFive = lastFive.tail() + + # Remove Multi Indexing so we can sort by timestamp + lastFive = lastFive.reset_index() + lastFive = lastFive.sort_values(by='timestamp', ascending=False) + + # Check if the stations most recent readings have been submitted, else grab the next furthest back, or if + # not within 3 spaces, skip this buoys reading. If read properly, increment the PRES_counter += 1 + lastFive = lastFive.iloc[0:3, 9] + + PRES_values = lastFive.tolist() + # Iterate through the PRES_values list + for value in PRES_values: + # If the value is not a nan value (its usually regarded as a float and will break calculations) + if not math.isnan(value): + sum_of_PRES += value + PRES_counter += 1 + break + else: + # Pass if it is a nan + pass + + # Now we gather the final WSPD Average and return it + return sum_of_PRES / PRES_counter + def get_SSI_PRES(self): # Set initial counting variable to determine which Buoys are giving the information we are searching for PRES_counter = 0 From be728ab712d46e4a8b952914210840f59c6e4d79 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 22:50:31 -0400 Subject: [PATCH 32/47] Update florida.py changed return statement such that line breaks occur between each string --- pages/florida.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/pages/florida.py b/pages/florida.py index 4f13f6f..7600fbc 100644 --- a/pages/florida.py +++ b/pages/florida.py @@ -65,13 +65,20 @@ def display_single_buoy_data(selected_string): if 0.81 < SSI: storm_strength = f"The expected storm should be an extreme storm" - return (f'Weather conditions at {buoy_name} buoy:\n' - f'Wind Speed: {wind_speed} m/s\n' - f'Wave Height: {wave_height} m\n' - f'Pressure: {pressure} millibars\n' - f'New York Metropolitan Region Weather Data:\n' - f'Average Wind Speed: {avg_wind_speed} m/s\n' - f'Average Wave Height: {avg_wave_height} m\n' - f'Average Pressure: {avg_pressure} millibars\n' - f'{storm_strength}' - ) + return [f'Weather conditions at {buoy_name} buoy:', + html.Br(), + f'Wind Speed: {wind_speed} m/s', + html.Br(), + f'Wave Height: {wave_height} m', + html.Br(), + f'Pressure: {pressure} millibars', + html.Br(), + f'New York Metropolitan Region Weather Data:', + html.Br(), + f'Average Wind Speed: {avg_wind_speed} m/s', + html.Br(), + f'Average Wave Height: {avg_wave_height} m', + html.Br(), + f'Average Pressure: {avg_pressure} millibars', + storm_strength + ] From 8247e0df736ad904470fc30e1559be1e5c053d0a Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 22:54:10 -0400 Subject: [PATCH 33/47] Update georgia.py update return statement with line breaks --- pages/georgia.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/pages/georgia.py b/pages/georgia.py index 8d390a9..d0213c6 100644 --- a/pages/georgia.py +++ b/pages/georgia.py @@ -63,13 +63,20 @@ def display_single_buoy_data(selected_string): if 0.81 < SSI: storm_strength = f"The expected storm should be an extreme storm" - return (f'Weather conditions at {buoy_name} buoy:\n' - f'Wind Speed: {wind_speed} m/s\n' - f'Wave Height: {wave_height} m\n' - f'Pressure: {pressure} millibars\n' - f'New York Metropolitan Region Weather Data:\n' - f'Average Wind Speed: {avg_wind_speed} m/s\n' - f'Average Wave Height: {avg_wave_height} m\n' - f'Average Pressure: {avg_pressure} millibars\n' - f'{storm_strength}' - ) + return [f'Weather conditions at {buoy_name} buoy:', + html.Br(), + f'Wind Speed: {wind_speed} m/s', + html.Br(), + f'Wave Height: {wave_height} m', + html.Br(), + f'Pressure: {pressure} millibars', + html.Br(), + f'New York Metropolitan Region Weather Data:', + html.Br(), + f'Average Wind Speed: {avg_wind_speed} m/s', + html.Br(), + f'Average Wave Height: {avg_wave_height} m', + html.Br(), + f'Average Pressure: {avg_pressure} millibars', + storm_strength + ] From dfd7c55ba12294091b32722a9cfd310df2bdf59b Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 22:54:41 -0400 Subject: [PATCH 34/47] Update maine.py update return statement with line breaks --- pages/maine.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/pages/maine.py b/pages/maine.py index e70c834..b51b641 100644 --- a/pages/maine.py +++ b/pages/maine.py @@ -63,13 +63,20 @@ def display_single_buoy_data(selected_string): if 0.81 < SSI: storm_strength = f"The expected storm should be an extreme storm" - return (f'Weather conditions at {buoy_name} buoy:\n' - f'Wind Speed: {wind_speed} m/s\n' - f'Wave Height: {wave_height} m\n' - f'Pressure: {pressure} millibars\n' - f'New York Metropolitan Region Weather Data:\n' - f'Average Wind Speed: {avg_wind_speed} m/s\n' - f'Average Wave Height: {avg_wave_height} m\n' - f'Average Pressure: {avg_pressure} millibars\n' - f'{storm_strength}' - ) + return [f'Weather conditions at {buoy_name} buoy:', + html.Br(), + f'Wind Speed: {wind_speed} m/s', + html.Br(), + f'Wave Height: {wave_height} m', + html.Br(), + f'Pressure: {pressure} millibars', + html.Br(), + f'New York Metropolitan Region Weather Data:', + html.Br(), + f'Average Wind Speed: {avg_wind_speed} m/s', + html.Br(), + f'Average Wave Height: {avg_wave_height} m', + html.Br(), + f'Average Pressure: {avg_pressure} millibars', + storm_strength + ] From d639167a8753b3cc0326c6503cbb8f3567dba39c Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 23:01:11 -0400 Subject: [PATCH 35/47] Update maryland.py update return statement with correct line breaks --- pages/maryland.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/pages/maryland.py b/pages/maryland.py index 6c29f87..8a09c15 100644 --- a/pages/maryland.py +++ b/pages/maryland.py @@ -62,13 +62,20 @@ def display_single_buoy_data(selected_string): if 0.81 < SSI: storm_strength = f"The expected storm should be an extreme storm" - return (f'Weather conditions at {buoy_name} buoy:\n' - f'Wind Speed: {wind_speed} m/s\n' - f'Wave Height: {wave_height} m\n' - f'Pressure: {pressure} millibars\n' - f'New York Metropolitan Region Weather Data:\n' - f'Average Wind Speed: {avg_wind_speed} m/s\n' - f'Average Wave Height: {avg_wave_height} m\n' - f'Average Pressure: {avg_pressure} millibars\n' - f'{storm_strength}' - ) + return [f'Weather conditions at {buoy_name} buoy:', + html.Br(), + f'Wind Speed: {wind_speed} m/s', + html.Br(), + f'Wave Height: {wave_height} m', + html.Br(), + f'Pressure: {pressure} millibars', + html.Br(), + f'New York Metropolitan Region Weather Data:', + html.Br(), + f'Average Wind Speed: {avg_wind_speed} m/s', + html.Br(), + f'Average Wave Height: {avg_wave_height} m', + html.Br(), + f'Average Pressure: {avg_pressure} millibars', + storm_strength + ] From 596f86d7ea907c1012864959f2c3e82d6ace2370 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 23:01:46 -0400 Subject: [PATCH 36/47] Update massachusetts.py update return statement with correct line breaks for html --- pages/massachusetts.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/pages/massachusetts.py b/pages/massachusetts.py index f37a77a..5a8894d 100644 --- a/pages/massachusetts.py +++ b/pages/massachusetts.py @@ -62,13 +62,20 @@ def display_single_buoy_data(selected_string): if 0.81 < SSI: storm_strength = f"The expected storm should be an extreme storm" - return (f'Weather conditions at {buoy_name} buoy:\n' - f'Wind Speed: {wind_speed} m/s\n' - f'Wave Height: {wave_height} m\n' - f'Pressure: {pressure} millibars\n' - f'New York Metropolitan Region Weather Data:\n' - f'Average Wind Speed: {avg_wind_speed} m/s\n' - f'Average Wave Height: {avg_wave_height} m\n' - f'Average Pressure: {avg_pressure} millibars\n' - f'{storm_strength}' - ) + return [f'Weather conditions at {buoy_name} buoy:', + html.Br(), + f'Wind Speed: {wind_speed} m/s', + html.Br(), + f'Wave Height: {wave_height} m', + html.Br(), + f'Pressure: {pressure} millibars', + html.Br(), + f'New York Metropolitan Region Weather Data:', + html.Br(), + f'Average Wind Speed: {avg_wind_speed} m/s', + html.Br(), + f'Average Wave Height: {avg_wave_height} m', + html.Br(), + f'Average Pressure: {avg_pressure} millibars', + storm_strength + ] From c85825463342301af09837d4c57ff885ca53de8f Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 23:02:26 -0400 Subject: [PATCH 37/47] Update new_york.py update return statement with correct line breaks for html --- pages/new_york.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/pages/new_york.py b/pages/new_york.py index d8f5f7d..38c913e 100644 --- a/pages/new_york.py +++ b/pages/new_york.py @@ -65,13 +65,20 @@ def display_single_buoy_data(selected_string): if 0.81 < SSI: storm_strength = f"The expected storm should be an extreme storm" - return (f'Weather conditions at {buoy_name} buoy:\n' - f'Wind Speed: {wind_speed} m/s\n' - f'Wave Height: {wave_height} m\n' - f'Pressure: {pressure} millibars\n' - f'New York Metropolitan Region Weather Data:\n' - f'Average Wind Speed: {avg_wind_speed} m/s\n' - f'Average Wave Height: {avg_wave_height} m\n' - f'Average Pressure: {avg_pressure} millibars\n' - f'{storm_strength}' - ) + return [f'Weather conditions at {buoy_name} buoy:', + html.Br(), + f'Wind Speed: {wind_speed} m/s', + html.Br(), + f'Wave Height: {wave_height} m', + html.Br(), + f'Pressure: {pressure} millibars', + html.Br(), + f'New York Metropolitan Region Weather Data:', + html.Br(), + f'Average Wind Speed: {avg_wind_speed} m/s', + html.Br(), + f'Average Wave Height: {avg_wave_height} m', + html.Br(), + f'Average Pressure: {avg_pressure} millibars', + storm_strength + ] From 609b5b68afb24f1f0722f6cabd199a7bca0c94f0 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 23:03:00 -0400 Subject: [PATCH 38/47] Update north_carolina.py update return statement with correct line breaks for html --- pages/north_carolina.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/pages/north_carolina.py b/pages/north_carolina.py index 0e6adee..2bf282f 100644 --- a/pages/north_carolina.py +++ b/pages/north_carolina.py @@ -62,13 +62,20 @@ def display_single_buoy_data(selected_string): if 0.81 < SSI: storm_strength = f"The expected storm should be an extreme storm" - return (f'Weather conditions at {buoy_name} buoy:\n' - f'Wind Speed: {wind_speed} m/s\n' - f'Wave Height: {wave_height} m\n' - f'Pressure: {pressure} millibars\n' - f'New York Metropolitan Region Weather Data:\n' - f'Average Wind Speed: {avg_wind_speed} m/s\n' - f'Average Wave Height: {avg_wave_height} m\n' - f'Average Pressure: {avg_pressure} millibars\n' - f'{storm_strength}' - ) + return [f'Weather conditions at {buoy_name} buoy:', + html.Br(), + f'Wind Speed: {wind_speed} m/s', + html.Br(), + f'Wave Height: {wave_height} m', + html.Br(), + f'Pressure: {pressure} millibars', + html.Br(), + f'New York Metropolitan Region Weather Data:', + html.Br(), + f'Average Wind Speed: {avg_wind_speed} m/s', + html.Br(), + f'Average Wave Height: {avg_wave_height} m', + html.Br(), + f'Average Pressure: {avg_pressure} millibars', + storm_strength + ] From 845e6e66a7d5edc5041b7276ed3b55e54b6c7f14 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 23:06:04 -0400 Subject: [PATCH 39/47] Update virginia.py update return statement with correct line breaks for html --- pages/virginia.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/pages/virginia.py b/pages/virginia.py index 6b0cd1c..899c379 100644 --- a/pages/virginia.py +++ b/pages/virginia.py @@ -63,13 +63,20 @@ def display_single_buoy_data(selected_string): if 0.81 < SSI: storm_strength = f"The expected storm should be an extreme storm" - return (f'Weather conditions at {buoy_name} buoy:\n' - f'Wind Speed: {wind_speed} m/s\n' - f'Wave Height: {wave_height} m\n' - f'Pressure: {pressure} millibars\n' - f'New York Metropolitan Region Weather Data:\n' - f'Average Wind Speed: {avg_wind_speed} m/s\n' - f'Average Wave Height: {avg_wave_height} m\n' - f'Average Pressure: {avg_pressure} millibars\n' - f'{storm_strength}' - ) + return [f'Weather conditions at {buoy_name} buoy:', + html.Br(), + f'Wind Speed: {wind_speed} m/s', + html.Br(), + f'Wave Height: {wave_height} m', + html.Br(), + f'Pressure: {pressure} millibars', + html.Br(), + f'New York Metropolitan Region Weather Data:', + html.Br(), + f'Average Wind Speed: {avg_wind_speed} m/s', + html.Br(), + f'Average Wave Height: {avg_wave_height} m', + html.Br(), + f'Average Pressure: {avg_pressure} millibars', + storm_strength + ] From e95be54857894f879bafe3a68b7707b9a8253f49 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Sun, 4 May 2025 23:13:56 -0400 Subject: [PATCH 40/47] Update south_carolina.py finish callback function for south carolina and update return statement with correct line breaks for html --- pages/south_carolina.py | 75 ++++++++++++++++++++++++++++++++++------- 1 file changed, 63 insertions(+), 12 deletions(-) diff --git a/pages/south_carolina.py b/pages/south_carolina.py index ae304c5..ff91191 100644 --- a/pages/south_carolina.py +++ b/pages/south_carolina.py @@ -1,31 +1,82 @@ import dash, gatheringInfo -from dash import html, dcc, callback, Input, Output, dash_table +from dash import html, dcc, callback, Input, Output dash.register_page(__name__) +options = ['none','Charleston','Winyah Bay Reserve','Springmaid Pier','Bennett\'s Point','Capers Nearshore','Fort Johnson'] + # dropdown menu of buoys in selected region layout = html.Div([ html.Div(className='row', children='South Carolina Region Buoy Selection', style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}), - dcc.Dropdown(['none','Charleston','Winyah Bay Reserve','Springmaid Pier','Bennett\'s Point','Capers Nearshore','Fort Johnson'], - 'none', id='sc_dropdown'), + dcc.Dropdown(options,'none', id='sc_dropdown'), html.Br(), html.Div(id='sc_output') ]) @callback( - Output(component_id='sc_output', component_property='children'), - Input(component_id='sc_dropdown', component_property='value') + Output(component_id='mass_output', component_property='children'), + Input(component_id='mass_dropdown', component_property='value') ) -def display_data(selected_string): - """""" +def display_single_buoy_data(selected_string): + """ + :param selected_string: this is the selection from the dropdown menu + :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection + """ id_list = ['CHTS1','WYSS1','MROS1','ACXS1','41029','FMNS1'] - if selected_string == "none": + + # display individual buoy data + if selected_string == 'none': # display nothing - return "" + return '' else: - # convert selected_string to station ID by selecting from a dictionary + # convert selected_string to station ID using id_list + selection_index = options.index(selected_string) + selected_station_id = id_list[selection_index - 1] + single_buoy = gatheringInfo.Buoy(selected_station_id) + + buoy_name = single_buoy.getNAME() + wind_speed = single_buoy.getWSPD() + wave_height = single_buoy.getWVHT() + pressure = single_buoy.getPRES() + + region = gatheringInfo.BB(id_list) + + avg_wind_speed = region.get_SSI_WSPD() + avg_wave_height = region.get_SSI_WVHT() + avg_pressure = region.get_SSI_PRES() + + # determines storm strength + SSI = (0.5 * ((avg_wind_speed / 60) ** 2) + + 0.3 * (930 / avg_pressure) + + 0.2 * avg_wave_height / 12) + + if SSI < 0.2: + storm_strength = f"The expected storm should be a minimal storm" + if 0.21 < SSI < 0.4: + storm_strength = f"The expected storm should be a moderate storm" + if 0.41 < SSI < 0.6: + storm_strength = f"The expected storm should be a strong storm" + if 0.61 < SSI < 0.8: + storm_strength = f"The expected storm should be a severe storm" + if 0.81 < SSI: + storm_strength = f"The expected storm should be an extreme storm" - # dash_table is set up through a dictionary - table = dash_table.DataTable({'Data Type': 'Wind Speed (m/s)', 'Value': gatheringInfo.gatherWindSpeed(layout[1])}) \ No newline at end of file + return [f'Weather conditions at {buoy_name} buoy:', + html.Br(), + f'Wind Speed: {wind_speed} m/s', + html.Br(), + f'Wave Height: {wave_height} m', + html.Br(), + f'Pressure: {pressure} millibars', + html.Br(), + f'New York Metropolitan Region Weather Data:', + html.Br(), + f'Average Wind Speed: {avg_wind_speed} m/s', + html.Br(), + f'Average Wave Height: {avg_wave_height} m', + html.Br(), + f'Average Pressure: {avg_pressure} millibars', + storm_strength + ] From eb5941b3785a67430818edbbeaa1bb0c69910d91 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Mon, 5 May 2025 01:35:05 -0400 Subject: [PATCH 41/47] Update south_carolina.py fixed duplicate mass_output and mass_dropdown to sc_output and sc_dropdown --- pages/south_carolina.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pages/south_carolina.py b/pages/south_carolina.py index ff91191..b247a0b 100644 --- a/pages/south_carolina.py +++ b/pages/south_carolina.py @@ -16,8 +16,8 @@ @callback( - Output(component_id='mass_output', component_property='children'), - Input(component_id='mass_dropdown', component_property='value') + Output(component_id='sc_output', component_property='children'), + Input(component_id='sc_dropdown', component_property='value') ) def display_single_buoy_data(selected_string): """ From 131148e44b0569342e69dae01308c45634db580c Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Mon, 5 May 2025 03:05:20 -0400 Subject: [PATCH 42/47] adding another option to /pages Page for a custom location for the user. Needs further function definition with the use of radial search. --- pages/custom.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 pages/custom.py diff --git a/pages/custom.py b/pages/custom.py new file mode 100644 index 0000000..6dfac77 --- /dev/null +++ b/pages/custom.py @@ -0,0 +1,32 @@ +import dash +from dash import html, dcc, callback, Input, Output + +dash.register_page(__name__) + +# layout for the user inserting a custom +layout = html.Div([ + html.H5("Input a custom location to access weather data and SSI"), + html.Div([ + dcc.Input(id='custom-input', value='none', type='text') + ]), + html.Br(), + html.Div(id='custom-output') +]) + +# callback decorator to display weather data based on input, using radial search +@callback( + Output(component_id='custom-output', component_property='children'), + Input(component_id='custom-input', component_property='value') +) +def custom_data_display(custom_string): + """ + + :param custom_string: custom string user inputs in GUI + :return: strings of data separated by html line breaks + """ + + return [ + f'', + html.Br(), + f'' + ] \ No newline at end of file From 28b8532d9baa3b216fe9ac3eb856644e1e18877c Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Mon, 5 May 2025 10:29:00 -0400 Subject: [PATCH 43/47] Update custom.py latitude and longitude input gui --- pages/custom.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/pages/custom.py b/pages/custom.py index 6dfac77..12f96bc 100644 --- a/pages/custom.py +++ b/pages/custom.py @@ -7,10 +7,14 @@ layout = html.Div([ html.H5("Input a custom location to access weather data and SSI"), html.Div([ - dcc.Input(id='custom-input', value='none', type='text') + html.H6('Latitude: ') + dcc.Input(id='lat-custom-input', value='none', type='text'), + html.H6('Longitude: '), + dcc.Input(id='long-custom-input', value='none', type='text'), + id='custom-input'=['lat-custom-input','long-custom-input'] ]), html.Br(), - html.Div(id='custom-output') + html.Div(id='lat-custom-output') ]) # callback decorator to display weather data based on input, using radial search @@ -29,4 +33,4 @@ def custom_data_display(custom_string): f'', html.Br(), f'' - ] \ No newline at end of file + ] From cccf3723d04d51a9130dde9358dd857c14310819 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Mon, 5 May 2025 11:04:51 -0400 Subject: [PATCH 44/47] finished custom location page Double check for correct usage of gatheringInfo.Add_New_Location(lat, long) function --- pages/custom.py | 54 +++++++++++++++++++++++++++++++++++++------------ 1 file changed, 41 insertions(+), 13 deletions(-) diff --git a/pages/custom.py b/pages/custom.py index 12f96bc..88f8e0e 100644 --- a/pages/custom.py +++ b/pages/custom.py @@ -1,4 +1,4 @@ -import dash +import dash, gatheringInfo from dash import html, dcc, callback, Input, Output dash.register_page(__name__) @@ -7,11 +7,10 @@ layout = html.Div([ html.H5("Input a custom location to access weather data and SSI"), html.Div([ - html.H6('Latitude: ') + html.H6('Latitude: '), dcc.Input(id='lat-custom-input', value='none', type='text'), html.H6('Longitude: '), - dcc.Input(id='long-custom-input', value='none', type='text'), - id='custom-input'=['lat-custom-input','long-custom-input'] + dcc.Input(id='long-custom-input', value='none', type='text') ]), html.Br(), html.Div(id='lat-custom-output') @@ -20,17 +19,46 @@ # callback decorator to display weather data based on input, using radial search @callback( Output(component_id='custom-output', component_property='children'), - Input(component_id='custom-input', component_property='value') + Input(component_id='lat-custom-input', component_property='value'), + Input(component_id='long-custom-input',component_property='value') ) -def custom_data_display(custom_string): +def custom_data_display(cust_lat, cust_long): """ - - :param custom_string: custom string user inputs in GUI + :param cust_lat: string from first input GUI + :param cust_long: string from second input GUI :return: strings of data separated by html line breaks """ - return [ - f'', - html.Br(), - f'' - ] + # define new object through Add_New_Location(lat, long) function from gatheringInfo class file + new_loc_buoy = gatheringInfo.Add_New_Location(float(cust_lat), float(cust_long)) + + # obtain average weather data of the new, user-defined location + avg_wind_speed = new_loc_buoy.get_SSI_WSPD() + avg_wave_height = new_loc_buoy.get_SSI_WVHT() + avg_pressure = new_loc_buoy.get_SSI_PRES() + + # determines storm strength + SSI = (0.5 * ((avg_wind_speed / 60) ** 2) + + 0.3 * (930 / avg_pressure) + + 0.2 * avg_wave_height / 12) + + if SSI < 0.2: + storm_strength = f"The expected storm should be a minimal storm" + if 0.21 < SSI < 0.4: + storm_strength = f"The expected storm should be a moderate storm" + if 0.41 < SSI < 0.6: + storm_strength = f"The expected storm should be a strong storm" + if 0.61 < SSI < 0.8: + storm_strength = f"The expected storm should be a severe storm" + if 0.81 < SSI: + storm_strength = f"The expected storm should be an extreme storm" + + return [f'Weather conditions at location: ', + html.Br(), + f'Average Wind Speed: {avg_wind_speed} m/s', + html.Br(), + f'Average Wave Height: {avg_wave_height} m', + html.Br(), + f'Average Pressure: {avg_pressure} millibars', + storm_strength + ] \ No newline at end of file From c4f0e85745e61383b1fa7ff183e53eb25f1cdfdb Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Mon, 5 May 2025 12:57:12 -0400 Subject: [PATCH 45/47] Update custom.py --- pages/custom.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pages/custom.py b/pages/custom.py index 88f8e0e..5cc610d 100644 --- a/pages/custom.py +++ b/pages/custom.py @@ -7,9 +7,9 @@ layout = html.Div([ html.H5("Input a custom location to access weather data and SSI"), html.Div([ - html.H6('Latitude: '), + html.H6('Latitude (in ° ' " format): '), dcc.Input(id='lat-custom-input', value='none', type='text'), - html.H6('Longitude: '), + html.H6('Longitude (in ° ' " format): '), dcc.Input(id='long-custom-input', value='none', type='text') ]), html.Br(), @@ -61,4 +61,4 @@ def custom_data_display(cust_lat, cust_long): html.Br(), f'Average Pressure: {avg_pressure} millibars', storm_strength - ] \ No newline at end of file + ] From 9deb10eae6bde369932ee6a96234d983e50d3d15 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Mon, 5 May 2025 12:59:07 -0400 Subject: [PATCH 46/47] Update custom.py --- pages/custom.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pages/custom.py b/pages/custom.py index 5cc610d..5f2292d 100644 --- a/pages/custom.py +++ b/pages/custom.py @@ -7,9 +7,9 @@ layout = html.Div([ html.H5("Input a custom location to access weather data and SSI"), html.Div([ - html.H6('Latitude (in ° ' " format): '), + html.H6('Latitude (in °): '), dcc.Input(id='lat-custom-input', value='none', type='text'), - html.H6('Longitude (in ° ' " format): '), + html.H6('Longitude (in °): '), dcc.Input(id='long-custom-input', value='none', type='text') ]), html.Br(), From 066729fda0bb39c0cf8ab78585566b3214d5e1c2 Mon Sep 17 00:00:00 2001 From: Joshua Bauer Date: Mon, 5 May 2025 19:43:00 -0400 Subject: [PATCH 47/47] Update custom.py --- pages/custom.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pages/custom.py b/pages/custom.py index 5f2292d..f4a45df 100644 --- a/pages/custom.py +++ b/pages/custom.py @@ -8,9 +8,9 @@ html.H5("Input a custom location to access weather data and SSI"), html.Div([ html.H6('Latitude (in °): '), - dcc.Input(id='lat-custom-input', value='none', type='text'), + dcc.Input(id='lat-custom-input', type='text', placeholder=''), html.H6('Longitude (in °): '), - dcc.Input(id='long-custom-input', value='none', type='text') + dcc.Input(id='long-custom-input', type='text', placeholder='', debounce=True) ]), html.Br(), html.Div(id='lat-custom-output')