diff --git a/app_welcome.py b/app_welcome.py
new file mode 100644
index 0000000..0c73b94
--- /dev/null
+++ b/app_welcome.py
@@ -0,0 +1,39 @@
+# Author: Joshua Bauer
+# Date: May 1, 2025
+# Description: updating multipage GUI with a home page
+
+# Import packages
+import dash
+from dash import Dash, html, dcc
+
+# Initialize the app and css
+external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
+app = dash.Dash(__name__, use_pages=True, external_stylesheets=external_stylesheets)
+
+# App layout, containing title for app, a secondary title prompting to choose a region, and buttons that will take user to page with dropdown menu
+app.layout = html.Div([
+ html.H1(className='row', children='Welcome to Weather Risk Assessment!', style={'textAlign': 'center', 'color': 'blue', 'fontSize': 30}),
+ html.Div([
+ html.Div([
+ html.H3(className='row', children='How To Use:',style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}),
+ html.Div([
+ html.H4(className='row',children='1. Select a region of the eastern United States using the buttons',style={'textAlign': 'left', 'color': 'black', 'fontSize': 20}),
+ html.H4(className='row',children='2. Select a buoy in your selected region',style={'textAlign': 'left', 'color': 'black', 'fontSize': 20}),
+ html.H4(className='row',children='3. Average data from the buoy and the selected region will be displayed with a predicted storm strength',style={'textAlign': 'left', 'color': 'black', 'fontSize': 20})])
+ ]),
+
+ html.H2(className='row', children='Choose a Region in the Eastern United States:', style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}),
+ html.Div([
+ dcc.Link(html.Button(page['name']), href=page['path'], style={'width': '5px'}) for page in dash.page_registry.values()
+
+ ])
+ ]),
+
+ html.Br(),
+ dash.page_container
+])
+
+#print(dash.page_registry.values())
+
+if __name__ == '__main__':
+ app.run(debug=True)
diff --git a/gatheringInfo.py b/gatheringInfo.py
index 385167d..fdf5241 100644
--- a/gatheringInfo.py
+++ b/gatheringInfo.py
@@ -1,106 +1,321 @@
from ndbc_api import NdbcApi
import datetime
+import pandas as pd
+import math
+# Initialize the NDBC API so we can gather information from buoys
api = NdbcApi()
+# The NDBC uses the UTC time as its standard, so we use this to be able to grab the most recent data.
+# Initially this is set to gather readings from within 1 hour
+utc_now = datetime.datetime.now(tz=datetime.UTC)
+given_endTime = utc_now.strftime("%Y-%m-%d %H:%M")
+start = utc_now - datetime.timedelta(hours=1)
+given_startTime = start.strftime("%Y-%m-%d %H:%M")
+
+
+# Formatting for debugging & testing
+pd.set_option('display.max_rows', None)
+pd.set_option('display.max_columns', None)
+pd.set_option('display.width', None)
+pd.set_option('display.max_colwidth', None)
+# ______________________________________________________________________________________
+
+'''
+ print(lastFive.iloc[0:3, 3])
+ ^ Prints the first 3 values for WSPD. As long as it below lastFive.reset_index() its sorted by recent
+
+ print(lastFive)
+ ^ Prints dataframe, sorted like above, but includes everything
+
+ NaN/nan is what will show in empty value spaces. Use math.isnan(value) to check for if nan. ITS A FLOAT!!
+
+ WSPD == 3
+ WVHT == 5
+ PRES == 10
+'''
+
+
+# This class will hold one location per object, and within that its respective closest Buoys.
+class BB:
+ # First the class initialization is overwritten and will throw an error if no list is given
+ def __init__(self, id_list):
+ self.__id_list = id_list
+
+ def get_station_ID_list(self):
+ return self.__id_list
+
+# This function will get the average Wind Speed from the locations buoy list, and will be returned to the user
+ def get_SSI_WSPD(self):
+ # Set initial counting variable to determine which Buoys are giving the information we are searching for
+ WSPD_counter = 0
+ sum_of_WSPD = 0.0
+ # Iterate through the list of buoys provided
+ for buoy in self.__id_list:
+ # For tracking purposes
+ print(buoy)
+ # API Call for {buoy} in station list
+ BuoyWspd = api.get_data(
+ station_id=buoy,
+ mode='stdmet',
+ start_time=given_startTime,
+ end_time=given_endTime,
+ as_df=True
+ )
+ # For tracking progress
+ print(f"Finished API call for buoy: {buoy}")
+
+ # Flip resulting Dataframe (using pandas) to see 5 most recent results
+ lastFive = pd.DataFrame(BuoyWspd)
+ lastFive = lastFive.tail()
+
+ # Remove Multi Indexing so we can sort by timestamp
+ lastFive = lastFive.reset_index()
+ lastFive = lastFive.sort_values(by='timestamp', ascending=False)
+
+ # Check if the stations most recent readings have been submitted, else grab the next furthest back, or if
+ # not within 3 spaces, skip this buoys reading. If read properly, increment the WSPD_counter += 1
+ lastFive = lastFive.iloc[0:3, 3]
+ WSPD_values = lastFive.tolist()
+ # Iterate through the WSPD_values list
+ for value in WSPD_values:
+ # If the value is not a nan value (its usually regarded as a float and will break calculations)
+ if not math.isnan(value):
+ sum_of_WSPD += value
+ WSPD_counter += 1
+ break
+ else:
+ # Pass if it is a nan
+ pass
+
+ return sum_of_WSPD / WSPD_counter
+
+ def get_SSI_WVHT(self):
+ # Set initial counting variable to determine which Buoys are giving the information we are searching for
+ WVHT_counter = 0
+ sum_of_WVHT = 0.0
+ # Iterate through the list of buoys provided
+ for buoy in self.__id_list:
+ # For tracking purposes
+ print(buoy)
+ # API Call for {buoy} in station list
+ BuoyWvht = api.get_data(
+ station_id=buoy,
+ mode='stdmet',
+ start_time=given_startTime,
+ end_time=given_endTime,
+ as_df=True
+ )
+ # For tracking progress
+ print(f"Finished API call for buoy: {buoy}")
+
+ # Flip resulting Dataframe (using pandas) to see 5 most recent results
+ lastFive = pd.DataFrame(BuoyWvht)
+ lastFive = lastFive.tail()
+
+ # Remove Multi Indexing so we can sort by timestamp
+ lastFive = lastFive.reset_index()
+ lastFive = lastFive.sort_values(by='timestamp', ascending=False)
+
+ # Check if the stations most recent readings have been submitted, else grab the next furthest back, or if
+ # not within 3 spaces, skip this buoys reading. If read properly, increment the WVHT_counter += 1
+ lastFive = lastFive.iloc[0:3, 5]
+ WVHT_values = lastFive.tolist()
+ # Iterate through the WVHT_values list
+ for value in WVHT_values:
+ # If the value is not a nan value (its usually regarded as a float and will break calculations)
+ if not math.isnan(value):
+ sum_of_WVHT += value
+ WVHT_counter += 1
+ break
+ else:
+ # Pass if it is a nan
+ pass
+
+ return sum_of_WVHT / WVHT_counter
+
+ def get_SSI_PRES(self):
+ # Set initial counting variable to determine which Buoys are giving the information we are searching for
+ PRES_counter = 0
+ sum_of_PRES = 0.0
+ # Iterate through the list of buoys provided
+ for buoy in self.__id_list:
+ # For tracking purposes
+ print(buoy)
+ # API Call for {buoy} in station list
+ BuoyPres = api.get_data(
+ station_id=buoy,
+ mode='stdmet',
+ start_time=given_startTime,
+ end_time=given_endTime,
+ as_df=True
+ )
+ # For tracking progress
+ print(f"Finished API call for buoy: {buoy}")
+
+ # Flip resulting Dataframe (using pandas) to see 5 most recent results
+ lastFive = pd.DataFrame(BuoyPres)
+ lastFive = lastFive.tail()
+
+ # Remove Multi Indexing so we can sort by timestamp
+ lastFive = lastFive.reset_index()
+ lastFive = lastFive.sort_values(by='timestamp', ascending=False)
+
+ # Check if the stations most recent readings have been submitted, else grab the next furthest back, or if
+ # not within 3 spaces, skip this buoys reading. If read properly, increment the PRES_counter += 1
+ lastFive = lastFive.iloc[0:3, 9]
+
+ PRES_values = lastFive.tolist()
+ # Iterate through the PRES_values list
+ for value in PRES_values:
+ # If the value is not a nan value (its usually regarded as a float and will break calculations)
+ if not math.isnan(value):
+ sum_of_PRES += value
+ PRES_counter += 1
+ break
+ else:
+ # Pass if it is a nan
+ pass
+
+ # Now we gather the final WSPD Average and return it
+ return sum_of_PRES / PRES_counter
+
+ def get_SSI_PRES(self):
+ # Set initial counting variable to determine which Buoys are giving the information we are searching for
+ PRES_counter = 0
+ sum_of_PRES = 0.0
+ # Iterate through the list of buoys provided
+ for buoy in self.__id_list:
+ # For tracking purposes
+ print(buoy)
+ # API Call for {buoy} in station list
+ BuoyPres = api.get_data(
+ station_id=buoy,
+ mode='stdmet',
+ start_time=given_startTime,
+ end_time=given_endTime,
+ as_df=True
+ )
+ # For tracking progress
+ print(f"Finished API call for buoy: {buoy}")
+
+ # Flip resulting Dataframe (using pandas) to see 5 most recent results
+ lastFive = BuoyPres.tail()
+
+ # Remove Multi Indexing so we can sort by timestamp
+ lastFive = lastFive.reset_index()
+ lastFive = lastFive.sort_values(by='timestamp', ascending=False)
+
+ # Check if the stations most recent readings have been submitted, else grab the next furthest back, or if
+ # not within 3 spaces, skip this buoys reading. If read properly, increment the PRES_counter += 1
+ lastFive = lastFive.iloc[0:3, 9]
+
+ PRES_values = lastFive.tolist()
+ # Iterate through the PRES_values list
+ for value in PRES_values:
+ # If the value is not a nan value (its usually regarded as a float and will break calculations)
+ if not math.isnan(value):
+ sum_of_PRES += value
+ PRES_counter += 1
+ break
+ else:
+ # Pass if it is a nan
+ pass
+
+ # Now we gather the final WSPD Average and return it
+ return sum_of_PRES / PRES_counter
+
+
+# In order to pull information about each individual buoy, we define another class that will use the API to gather the
+# individual information about hte buoy sucha as WSPD, WVHT, PRES, but also the location (longitude, latitude)
+# and the nearest town to said buoy ==> EX: station_id 44025 is 30 NM south of Islip, NY
+class Buoy:
+ def __init__(self, id):
+ # First we set the ID to the member variable, and call for the information from the NDBC_API
+ self.__id = id
+ buoy = api.get_data(station_id=id,
+ mode='stdmet',
+ start_time=given_startTime,
+ end_time=given_endTime,
+ as_df=True)
+ # Similar to the BB class, we call the most recent entries and sort them
+ lastFive = buoy.tail().reset_index()
+ lastFive = lastFive.sort_values(by='timestamp', ascending=False)
+ # We set individual lists of the 3 most recent entries (~30 minutes of reporting) in order to still give a value
+ # in case a buoy does not report (some reports are hourly, on a 30-minute mark, etc.)
+ WSPD_val = lastFive.iloc[0:3, 3].tolist()
+ WVHT_val = lastFive.iloc[0:3, 5].tolist()
+ PRES_val = lastFive.iloc[0:3, 9].tolist()
+
+ # We then check through each of these lists for a non-NaN answer and save it to the self.__VARIABLE
+ for value in WSPD_val:
+ if not math.isnan(value):
+ self.__WSPD = value
+ break
+ else:
+ pass
+
+ for value in WVHT_val:
+ if not math.isnan(value):
+ self.__WVHT = value
+ break
+ else:
+ pass
+
+ for value in PRES_val:
+ if not math.isnan(value):
+ self.__PRES = value
+ break
+ else:
+ pass
+
+ # We then call the station information to gather info such as Latitude and Longitude, and the Name
+ lastFive = api.station(station_id=id, as_df=True)
+ self.__NAME = "".join(str(lastFive.loc['Name'].tolist()))
+ self.__LOC = "".join(str(lastFive.loc['Location'].tolist()))
+
+ # Define the getter functions to get the needed information from individual buoys
+ def getWSPD(self):
+ return self.__WSPD
+
+ def getWVHT(self):
+ return self.__WVHT
+
+ def getPRES(self):
+ return self.__PRES
+
+ def getNAME(self):
+ return self.__NAME
+
+
+# We now define the radial search function to search for stations in a new location, defined by the user
+def Add_New_Location(lat, long):
+ # First we define a int variable to track how many stations we need (7 is a good number)
+ num = 0
+ # Then an empty list to store the station_ids for our eventual BB object definition
+ location_station_list = []
+
+ # Use the ANDBC_API raidal search feature to grab the nearest stations and allow us to quickly find the station name
+ station_list = api.radial_search(lat=lat, lon=long, radius=350, units='km').iloc[0:10]
+ station_list = station_list.reset_index()
+
+ for _, row in station_list.iterrows():
+ if row['Includes Meteorology'] is True and num < 8:
+ # If the station gives meteorology data then we add it to the list of stations to use, otherwise goto next
+ location_station_list.append(row['Station'])
+ num += 1
+ else:
+ pass
+
+# Finally we check if the station id list is empty, if it isn't we return the new object to be saved
+# Otherwise we raise an error and pass
+ if location_station_list != None:
+ newLoc = BB(location_station_list)
+ return newLoc
+ else:
+ pass
+
+
+ def getLOC(self):
+ return self.__LOC
-def gatherWindSpeed(BuoyList):
- utc_now = datetime.datetime.now(tz=datetime.UTC)
- formatted_endTime = utc_now.strftime("%Y-%m-%d")
- unformatted_startTime = utc_now - datetime.timedelta(days=1)
- formatted_startTime = unformatted_startTime.strftime("%Y-%m-%d")
- List = []
- avgWindSpeed = 0.0
- for buoy in BuoyList:
- buoyWind = api.get_data(
- station_id=buoy,
- mode='stdmet',
- start_time=formatted_startTime,
- end_time=formatted_endTime,
- as_df=True
- )
-
- try:
- subsetWindOnly = buoyWind.iloc[0:3, 1]
- avgWindSpeed += buoyWind.iloc[0:3, 1].sum()
- List.append(subsetWindOnly)
-
- except KeyError as e:
- print(f"Warning. {e}")
- continue
- except AttributeError as a:
- print(f"Warning. {a}")
- continue
-
- print(f"Average Wind speed for stations {BuoyList}: {avgWindSpeed / 15}")
- return avgWindSpeed / 15
-
-
-def gatherPres(BuoyList):
- utc_now = datetime.datetime.now(tz=datetime.UTC)
- formatted_endTime = utc_now.strftime("%Y-%m-%d")
- unformatted_startTime = utc_now - datetime.timedelta(days=1)
- formatted_startTime = unformatted_startTime.strftime("%Y-%m-%d")
- avgPres = 0.0
- List = []
- for buoy in BuoyList:
- buoyPres = api.get_data(
- station_id=buoy,
- mode='stdmet',
- start_time=formatted_startTime,
- end_time=formatted_endTime,
- as_df=True
- )
-
- try:
- subsetPresOnly = buoyPres.iloc[0:3, 7]
- avgPres += buoyPres.iloc[0:3, 7].sum()
- List.append(subsetPresOnly)
-
- except KeyError as e:
- print(f"Warning. {e}")
- continue
- except AttributeError as a:
- print(f"Warning. {a}")
- continue
-
- print(f"Average Pressure for stations {BuoyList}: {avgPres / 15}")
- return avgPres / 15
-
-
-def gatherWaveHeight(BuoyList):
- utc_now = datetime.datetime.now(tz=datetime.UTC)
- formatted_endTime = utc_now.strftime("%Y-%m-%d")
- unformatted_startTime = utc_now - datetime.timedelta(days=1)
- formatted_startTime = unformatted_startTime.strftime("%Y-%m-%d")
- avgWave = 0.0
- List = []
- print(BuoyList)
- for buoy in BuoyList:
- buoyWave = api.get_data(
- station_id=buoy,
- mode='stdmet',
- start_time=formatted_startTime,
- end_time=formatted_endTime,
- as_df=True
- )
-
- try:
- subsetWaveOnly = buoyWave.iloc[0:3, 3]
- avgWave += buoyWave.iloc[0:3, 3].sum()
- List.append(subsetWaveOnly)
-
- except KeyError as e:
- print(f"Warning. {e}")
- continue
- except AttributeError as a:
- print(f"Warning. {a}")
- continue
-
- return avgWave / 2
-
-
-nyStationList = ['44025', '44065', 'SDNH4', 'KPTN6', 'MHRN6', 'ROBN4', 'BATN6']
-
-
-print(gatherWaveHeight(nyStationList))
diff --git a/main.py b/main.py
index d2ea84f..32b9511 100644
--- a/main.py
+++ b/main.py
@@ -1,27 +1,21 @@
import gatheringInfo
-nyStationList = ['44025', '44065', 'SDNH4', 'KPTN6', 'MHRN6', 'ROBN4', 'BATN6']
+nyStationList = ['44025', '44065', 'SDHN4', 'KPTN6', 'MHRN6', 'ROBN4', 'BATN6']
-print(gatheringInfo.gatherWindSpeed(nyStationList))
-print(gatheringInfo.gatherWindSpeed(nyStationList) ** 2)
+newYork = gatheringInfo.BB(nyStationList)
-SSI = (0.5 * ((gatheringInfo.gatherWindSpeed(nyStationList) / 60)**2) +
- 0.3 * (930 / gatheringInfo.gatherPres(nyStationList)) +
- 0.2 * (gatheringInfo.gatherWaveHeight(nyStationList)) / 12)
+SSI = (0.5 * ((newYork.get_SSI_WSPD() / 60) ** 2) +
+ 0.3 * (930 / newYork.get_SSI_PRES()) +
+ 0.2 * (newYork.get_SSI_WVHT()) / 12)
if SSI < 0.2:
- print("\nNote that some values are hard coded. Further research must be done to validate our prediction values")
print("The expected storm should be a minimal storm")
if 0.21 < SSI < 0.4:
- print("\nNote that some values are hard coded. Further research must be done to validate our prediction values")
print("The expected storm should be a moderate storm")
if 0.41 < SSI < 0.6:
- print("\nNote that some values are hard coded. Further research must be done to validate our prediction values")
print("The expected storm should be a strong storm")
if 0.61 < SSI < 0.8:
- print("\nNote that some values are hard coded. Further research must be done to validate our prediction values")
print("The expected storm should be a severe storm")
if 0.81 < SSI:
- print("\nNote that some values are hard coded. Further research must be done to validate our prediction values")
print("The expected storm should be an extreme storm")
diff --git a/ndbc_api-0.24.12.20.1.dist-info/INSTALLER b/ndbc_api-0.24.12.20.1.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/ndbc_api-0.24.12.20.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/ndbc_api-0.24.12.20.1.dist-info/LICENSE b/ndbc_api-0.24.12.20.1.dist-info/LICENSE
new file mode 100644
index 0000000..24ef9ba
--- /dev/null
+++ b/ndbc_api-0.24.12.20.1.dist-info/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2024 Christopher Jellen
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/ndbc_api-0.24.12.20.1.dist-info/METADATA b/ndbc_api-0.24.12.20.1.dist-info/METADATA
new file mode 100644
index 0000000..546824c
--- /dev/null
+++ b/ndbc_api-0.24.12.20.1.dist-info/METADATA
@@ -0,0 +1,293 @@
+Metadata-Version: 2.1
+Name: ndbc-api
+Version: 0.24.12.20.1
+Summary: A Python API for the National Data Buoy Center.
+License: MIT
+Author: cdjellen
+Author-email: cdjellen@gmail.com
+Requires-Python: >=3.9,<3.13
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Requires-Dist: beautifulsoup4 (>=4,<5)
+Requires-Dist: html5lib (>=1.1,<2.0)
+Requires-Dist: numpy (>=1.26.3)
+Requires-Dist: pandas (>=2.0.0)
+Requires-Dist: requests (>=2.10.0)
+Requires-Dist: scipy (>=1.7.3)
+Requires-Dist: xarray (>=2022.6.0)
+Description-Content-Type: text/markdown
+
+
+
NDBC API
+
+
+
+[](https://coveralls.io/github/CDJellen/ndbc-api?branch=main)
+[](https://pypi.org/project/ndbc-api/#history)
+[](https://pypi.org/project/ndbc-api/)
+[](https://pypi.org/project/ndbc-api/)
+[](https://www.linkedin.com/in/cdjellen/)
+[](https://github.com/cdjellen/ndbc-api/blob/main/LICENSE)
+[](https://pypi.org/project/ndbc-api/)
+
+
+
+
A Python API for the National Data Buoy Center
+
+
+
+The National Oceanic and Atmospheric Association's National Data Buoy Center maintains marine monitoring and observation stations around the world[^1]. These stations report atmospheric, oceanographic, and other meterological data at regular intervals to the NDBC. Measurements are made available over HTTP through the NDBC's data service.
+
+The ndbc-api is a python library that makes this data more widely accessible.
+
+The ndbc-api is primarily built to parse whitespace-delimited oceanographic and atmospheric data distributed as text files for available time ranges, on a station-by-station basis[^2]. Measurements are typically distributed as `utf-8` encoded, station-by-station, fixed-period text files. More information on the measurements and methodology are available [on the NDBC website](https://www.ndbc.noaa.gov/docs/ndbc_web_data_guide.pdf)[^3].
+
+Please see [the included example notebook](/notebooks/overview.ipynb) for a more detailed walkthrough of the API's capabilities.
+
+[^1]: https://www.ndbc.noaa.gov/
+[^2]: https://www.ndbc.noaa.gov/obs.shtml
+[^3]: https://www.ndbc.noaa.gov/docs/ndbc_web_data_guide.pdf
+
+
+
+#### Installation
+The `ndbc-api` can be installed via PIP:
+
+```sh
+pip install ndbc-api
+```
+
+Conda users can install the `ndbc-api` via the `conda-forge` channel:
+
+```sh
+conda install -c conda-forge ndbc-api
+```
+
+Finally, to install the `ndbc-api` from source, clone the repository and run the following command:
+
+```sh
+python setup.py install
+```
+
+#### Requirements
+The `ndbc-api` has been tested on Python 3.6, 3.7, 3.8, 3.9, and 3.10. Python 2 support is not currently planned, but could be implemented based on the needs of the atmospheric research community.
+
+The API uses synchronous HTTP requests to compile data matching the user-supplied parameters. The `ndbc-api` package depends on:
+* requests>=2.10.0
+* pandas
+* bs4
+* html5lib>=1.1
+* xarray
+* scipy
+
+##### Development
+If you would like to contribute to the growth and maintenance of the `ndbc-api`, please feel free to open a PR with tests covering your changes. The tests leverage `pytest` and depend on the above requirements, as well as:
+* coveralls
+* httpretty
+* pytest
+* pytest-cov
+* pyyaml
+* pyarrow
+
+Breaking changes will be considered, especially in the current `alpha` state of the package on `PyPi`. As the API further matures, breaking changes will only be considered with new major versions (e.g. `N.0.0`).
+
+#### Example
+
+The `ndbc-api` exposes public methods through the `NdbcApi` class.
+
+```python3
+from ndbc_api import NdbcApi
+
+api = NdbcApi()
+```
+
+The `NdbcApi` provides a unified access point for NDBC data. All methods for obtaining data, metadata, and locating stations are available using the `api` object. The `get_data` method is the primary method for accessing NDBC data, and is used to retrieve measurements from a given station over a specified time range. This method can request data from the NDBC HTTP Data Service or the THREDDS data service, and return the data as a `pandas.DataFrame`, `xarray.Dataset` or python `dict` object.
+
+Data made available by the NDBC falls into two broad categories.
+
+1. Station metadata
+2. Station measurements
+
+The `api` supports a range of public methods for accessing data from the above categories.
+
+##### Station metadata
+
+The `api` has five key public methods for accessing NDBC metadata.
+
+1. The `stations` method, which returns all NDBC stations.
+2. The `nearest_station` method, which returns the station ID of the nearest station.
+3. The `station` method, which returns station metadata from a given station ID.
+4. The `available_realtime` method, which returns hyperlinks and measurement names for realtime measurements captured by a given station.
+5. The `available_historical` method, which returns hyperlinks and measurement names for historical measurements captured by a given station.
+
+###### `stations`
+
+```python3
+# get all stations and some metadata as a Pandas DataFrame
+stations_df = api.stations()
+# parse the response as a dictionary
+stations_dict = api.stations(as_df=False)
+```
+
+###### `nearest_station`
+
+```python3
+# specify desired latitude and longitude
+lat = '38.88N'
+lon = '76.43W'
+
+# find the station ID of the nearest NDBC station
+nearest = api.nearest_station(lat=lat, lon=lon)
+print(nearest_station)
+```
+
+```python3
+'tplm2'
+```
+
+###### `radial_search`
+
+```python3
+# specify desired latitude, longitude, radius, and units
+lat = '38.88N'
+lon = '76.43W'
+radius = 100
+units = 'km'
+
+# find the station IDs of all NDBC stations within the radius
+nearby_stations_df = api.radial_search(lat=lat, lon=lon, radius=radius, units=units)
+```
+
+```python3
+'tplm2'
+```
+
+###### `station`
+
+```python3
+# get station metadata
+tplm2_meta = api.station(station_id='tplm2')
+# parse the response as a Pandas DataFrame
+tplm2_df = api.station(station_id='tplm2', as_df=True)
+```
+
+###### `available_realtime`
+
+```python3
+# get all available realtime measurements, periods, and hyperlinks
+tplm2_realtime = api.available_realtime(station_id='tplm2')
+# parse the response as a Pandas DataFrame
+tplm2_realtime_df = api.available_realtime(station_id='tplm2', as_df=True)
+```
+
+###### `available_historical`
+
+```python3
+# get all available historical measurements, periods, and hyperlinks
+tplm2_historical = api.available_historical(station_id='tplm2')
+# parse the response as a Pandas DataFrame
+tplm2_historical_df = api.available_historical(station_id='tplm2', as_df=True)
+```
+
+##### Station measurements
+
+The `api` has two public methods which support accessing supported NDBC station measurements.
+
+1. The `get_modes` method, which returns a list of supported `mode`s, corresponding to the data formats provided by the NDBC data service. For example, the `adcp` mode represents "Acoustic Doppler Current Profiler" measurements, providing information about ocean currents at different depths, while `cwind` represents "Continuous winds" data, offering high-frequency wind speed and direction measurements.
+
+Note that not all stations provide the same set of measurements. The `available_realtime` and `available_historical` methods can be called on a station-by station basis to ensure a station has the desired data available, before building and executing requests with `get_data`.
+
+2. The `get_data` method, which returns measurements of a given type for a given station.
+
+###### `get_modes`
+
+```python3
+# get the list of supported meterological measurement modes
+modes = api.get_modes()
+print(modes)
+```
+
+```python3
+[
+ 'adcp',
+ 'cwind',
+ 'ocean',
+ 'spec',
+ 'stdmet',
+ 'supl',
+ 'swden',
+ 'swdir',
+ 'swdir2',
+ 'swr1',
+ 'swr2'
+]
+```
+
+The mode values above map directly to the identifiers used buy the NDBC. Desriptions for each mode are presented below:
+* `adcp`: Acoustic Doppler Current Profiler measurements, providing information about ocean currents at different depths.
+* `cwind`: Continuous winds data, offering high-frequency wind speed and direction measurements.
+* `ocean`: Oceanographic data, including water temperature, salinity, and wave measurements.
+* `spec`: Spectral wave data, providing detailed information about wave energy and direction.
+* `stdmet`: Standard meteorological data, including air temperature, pressure, wind speed, and visibility.
+* `supl`: Supplemental measurements, which can vary depending on the specific buoy and its sensors.
+* `swden`: Spectral wave density data, providing information about the distribution of wave energy across different frequencies.
+* `swdir`: Spectral wave direction data, indicating the primary direction of wave energy.
+* `swdir2`: Secondary spectral wave direction data, capturing additional wave direction information.
+* `swr1`: First-order spectral wave data, providing basic wave height and period information.
+* `swr2`: Second-order spectral wave data, offering more detailed wave measurements.
+
+###### `get_data`
+
+```python3
+# get all continuous wind (`cwind`) measurements for station tplm2
+cwind_df = api.get_data(
+ station_id='tplm2',
+ mode='cwind',
+ start_time='2020-01-01',
+ end_time='2022-09-15',
+)
+# return data as a dictionary
+cwind_dict = api.get_data(
+ station_id='tplm2',
+ mode='cwind',
+ start_time='2020-01-01',
+ end_time='2022-09-15',
+ as_df=False
+)
+# get only the wind speed measurements
+wspd_df = api.get_data(
+ station_id='tplm2',
+ mode='cwind',
+ start_time='2020-01-01',
+ end_time='2022-09-15',
+ as_df=True,
+ cols=['WSPD']
+)
+# get all standard meterological (`stdmet`) measurements for stations tplm2 and apam2
+stdmet_df = api.get_data(
+ station_ids=['tplm2', 'apam2'],
+ mode='stdmet',
+ start_time='2022-01-01',
+ end_time='2023-01-01',
+)
+# get all (available) continuous wind and standard meterological measurements for stations tplm2 and apam2
+# for station apam2, this is unavailable and will log an error but not affect the rest of the results.
+stdmet_df = api.get_data(
+ station_ids=['tplm2', 'apam2'],
+ modes=['stdmet', 'cwind'],
+ start_time='2022-01-01',
+ end_time='2023-01-01',
+)
+```
+
+#### More Information
+Please see [the included example notebook](/notebooks/overview.ipynb) for a more detailed walkthrough of the API's capabilities.
+
+#### Questions
+If you have questions regarding the library please post them into
+the [GitHub discussion forum](https://github.com/cdjellen/ndbc-api/discussions).
+
diff --git a/ndbc_api-0.24.12.20.1.dist-info/RECORD b/ndbc_api-0.24.12.20.1.dist-info/RECORD
new file mode 100644
index 0000000..38e1689
--- /dev/null
+++ b/ndbc_api-0.24.12.20.1.dist-info/RECORD
@@ -0,0 +1,160 @@
+ndbc_api-0.24.12.20.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+ndbc_api-0.24.12.20.1.dist-info/LICENSE,sha256=TiryywJpeKrL8U5y3TzkgssiBcol3m0ggialHRSsFmo,1075
+ndbc_api-0.24.12.20.1.dist-info/METADATA,sha256=F2Pm0j6ZPP3IwQdPk7yADX_L3xwpTKyB2vLVw_hnP20,11143
+ndbc_api-0.24.12.20.1.dist-info/RECORD,,
+ndbc_api-0.24.12.20.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ndbc_api-0.24.12.20.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
+ndbc_api/__init__.py,sha256=TsGy0a1wrd6bDGUms3Agl-gBbzam9Gxye7swSPxoPGc,112
+ndbc_api/__pycache__/__init__.cpython-311.pyc,,
+ndbc_api/__pycache__/exceptions.cpython-311.pyc,,
+ndbc_api/__pycache__/ndbc_api.cpython-311.pyc,,
+ndbc_api/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ndbc_api/api/__pycache__/__init__.cpython-311.pyc,,
+ndbc_api/api/handlers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ndbc_api/api/handlers/__pycache__/__init__.cpython-311.pyc,,
+ndbc_api/api/handlers/__pycache__/_base.cpython-311.pyc,,
+ndbc_api/api/handlers/_base.py,sha256=j_I6tu8hvHAQ-zYCLJJghqTKRER2xGIafmDdCUeiPxo,30
+ndbc_api/api/handlers/http/__pycache__/data.cpython-311.pyc,,
+ndbc_api/api/handlers/http/__pycache__/stations.cpython-311.pyc,,
+ndbc_api/api/handlers/http/data.py,sha256=u1s5d0IMJZNVWSPSbELv_v8VZF-5rZjCzUILDs1inu4,12020
+ndbc_api/api/handlers/http/stations.py,sha256=cP8VLaDdPKGic8lGq6ia6P4uY6M4yYkJpdFO9jfvaO4,7893
+ndbc_api/api/handlers/opendap/__pycache__/data.cpython-311.pyc,,
+ndbc_api/api/handlers/opendap/data.py,sha256=dzIiKkKX-ECgcxA3qhu-osJSgDpbKSuBC8_efpaoZJ4,7829
+ndbc_api/api/parsers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ndbc_api/api/parsers/__pycache__/__init__.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ndbc_api/api/parsers/http/__pycache__/__init__.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/_base.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/_html.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/_station.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/_xml.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/active_stations.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/adcp.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/cwind.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/historical_stations.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/ocean.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/spec.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/station_historical.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/station_metadata.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/station_realtime.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/stdmet.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/supl.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/swden.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/swdir.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/swdir2.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/swr1.cpython-311.pyc,,
+ndbc_api/api/parsers/http/__pycache__/swr2.cpython-311.pyc,,
+ndbc_api/api/parsers/http/_base.py,sha256=Z8xBZYG15eCLNAy4t65tYsViM8Ta1J7NBMhdSS3qrHA,3517
+ndbc_api/api/parsers/http/_html.py,sha256=MTdBg_3VwIc0lU6c_FQSgV2lqms_H1XfSKimzLfk_Yw,630
+ndbc_api/api/parsers/http/_station.py,sha256=6dHJC0sFNP-i3fl7ivjV7zTTL8DX1ks_eCXSNxVahTU,1822
+ndbc_api/api/parsers/http/_xml.py,sha256=Xa41QRdGL4yybnrbp-MtjOI3FvTyqe2JEfmIhvbf6dc,727
+ndbc_api/api/parsers/http/active_stations.py,sha256=_pe77hDNH5ubmZz5x8yZ3TSOh6EArTEAutmD2y6EH2o,2456
+ndbc_api/api/parsers/http/adcp.py,sha256=U-ylFIe9Ssf65AnnwZI1yl_uD7mHq9eHMkoTfUi0H6E,2625
+ndbc_api/api/parsers/http/cwind.py,sha256=_mLfSkbZNo6eZLHIHHo-OvLLb6FtoVeGwF9WOLegtRM,459
+ndbc_api/api/parsers/http/historical_stations.py,sha256=hiWDLM_rWu9A9_o_RvQfcvHwZccrzSTV5GI2-QcMii8,2859
+ndbc_api/api/parsers/http/ocean.py,sha256=kXQfDxynyMfxM7wDaZAI1JfEVRqnUk4CrXVr8okNW8s,409
+ndbc_api/api/parsers/http/spec.py,sha256=IoVgf8k2Uhg6A3P9sWrpIT2iHLnfSn26OVrUSFl1_fc,468
+ndbc_api/api/parsers/http/station_historical.py,sha256=rzLCOH4_RVNGszW-kde5sA9etgLFcVIAwsKp6mZhQX4,1157
+ndbc_api/api/parsers/http/station_metadata.py,sha256=aAUrwUYwQAlbjrqDsOldz72dUouw0zj0yD-HiR0oFB4,1691
+ndbc_api/api/parsers/http/station_realtime.py,sha256=Br63W9w--vx6KJ1DxS6KkHygYRGuRLSCYAlfVVTMSfw,1005
+ndbc_api/api/parsers/http/stdmet.py,sha256=lcH3uSTYP7x0asX0KSlpoAwgI4q4m16aorc35-cUV5s,461
+ndbc_api/api/parsers/http/supl.py,sha256=JW_WPpqhFpCugNWen8ujNDH-gfSE9nXpRRZ0kKMcM4c,499
+ndbc_api/api/parsers/http/swden.py,sha256=vY1G7qr_JMUfzM4D01NeO0veL-CgTYWdDzILI6H7sg0,1420
+ndbc_api/api/parsers/http/swdir.py,sha256=prKFsbHWFRq4FzUCbzu31Qk5duzHF4DrjveVNLSSWtQ,1415
+ndbc_api/api/parsers/http/swdir2.py,sha256=kSRxn5dm-LB5bXIuqGUCoj3GoY9X7OAAot0IlNR55ug,1474
+ndbc_api/api/parsers/http/swr1.py,sha256=Cit-Qj7Jwf1psxkqReENgAxE_euR_DQlqDdBKkdRNiY,1413
+ndbc_api/api/parsers/http/swr2.py,sha256=-7X0UfpW19ZmEctJ1-P7mJ9P6WakZUtmE01LgddQ2NM,1413
+ndbc_api/api/parsers/opendap/__pycache__/_base.cpython-311.pyc,,
+ndbc_api/api/parsers/opendap/__pycache__/adcp.cpython-311.pyc,,
+ndbc_api/api/parsers/opendap/__pycache__/cwind.cpython-311.pyc,,
+ndbc_api/api/parsers/opendap/__pycache__/ocean.cpython-311.pyc,,
+ndbc_api/api/parsers/opendap/__pycache__/pwind.cpython-311.pyc,,
+ndbc_api/api/parsers/opendap/__pycache__/stdmet.cpython-311.pyc,,
+ndbc_api/api/parsers/opendap/__pycache__/swden.cpython-311.pyc,,
+ndbc_api/api/parsers/opendap/__pycache__/wlevel.cpython-311.pyc,,
+ndbc_api/api/parsers/opendap/_base.py,sha256=sHaTOjGjFD--IrzgT5kCIAENZ-0U0xK_yxBzkgaLA6w,2344
+ndbc_api/api/parsers/opendap/adcp.py,sha256=7h01yEDmVXq6LSGWHOS9SaCrZ7Ppdc-bNsb9NyR3E2w,471
+ndbc_api/api/parsers/opendap/cwind.py,sha256=2a19vj3XTYXRUmuqc25vNH8Q859bjJmESdSG7mz9Hlk,493
+ndbc_api/api/parsers/opendap/ocean.py,sha256=wrTcOns1AVBLbau2gWY5m5R5ZoekdOWj76ARcckqBpY,493
+ndbc_api/api/parsers/opendap/pwind.py,sha256=OC9EigROQbVnEibiyswWyJYpOSyFqCV8cLtGgyaXOG4,489
+ndbc_api/api/parsers/opendap/stdmet.py,sha256=IjiV0ghkZ4oxW0V5ST3NKh3L0VUb4BEL-3ehvJzU0vs,491
+ndbc_api/api/parsers/opendap/swden.py,sha256=0jthaqlYbabo0dogmgcJfUKbKRnf1okTGSJ_GLpqb20,477
+ndbc_api/api/parsers/opendap/wlevel.py,sha256=-VDOWhuhxSfqd5amh00o9o4WzDJIYlwkcwALAB-MkRY,479
+ndbc_api/api/requests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ndbc_api/api/requests/__pycache__/__init__.cpython-311.pyc,,
+ndbc_api/api/requests/http/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ndbc_api/api/requests/http/__pycache__/__init__.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/_base.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/_core.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/active_stations.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/adcp.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/cwind.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/historical_stations.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/ocean.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/spec.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/station_historical.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/station_metadata.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/station_realtime.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/stdmet.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/supl.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/swden.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/swdir.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/swdir2.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/swr1.cpython-311.pyc,,
+ndbc_api/api/requests/http/__pycache__/swr2.cpython-311.pyc,,
+ndbc_api/api/requests/http/_base.py,sha256=KaTUW5bfUTdwVP9EC_zsjbgtCyNiqf4c9aOBAC-Yr_c,4139
+ndbc_api/api/requests/http/_core.py,sha256=_4BB7cFffjHvxLLnESFnxelZy-hP_A5mbL5Q3EwiQk0,152
+ndbc_api/api/requests/http/active_stations.py,sha256=HADq5sZ7B6FQmnGrzI8xLPq9UhBnJ1Q87MXC7y_Cukk,256
+ndbc_api/api/requests/http/adcp.py,sha256=mzsGcvHdPOzyaE6HAl7kDiHJYw1zPqIJonD6R_PyB6Q,522
+ndbc_api/api/requests/http/cwind.py,sha256=xlZWwHINLxK33s1U0F4JHqjfVycWHIfatO-pTG4p-fk,527
+ndbc_api/api/requests/http/historical_stations.py,sha256=qIYJfT6QBf7_s4Xxo8L5iSnxvGy5pY1hO4j3pnxk-1I,270
+ndbc_api/api/requests/http/ocean.py,sha256=Gh8g4sQcwo1IeBZCVCfBaysbrU7i1kFDf3fJVfM9hg8,527
+ndbc_api/api/requests/http/spec.py,sha256=MvDkj6v6rpwNdahANNwvvYhoRnsQOrH_tSTEjQd10F8,489
+ndbc_api/api/requests/http/station_historical.py,sha256=QiKg87g4xU6JEJmh_f0duD44eWWuhhQ2gPHJFYppmUs,311
+ndbc_api/api/requests/http/station_metadata.py,sha256=2lAOeH65TR3Skdmfoj7D2skzJuCeeS7OXegbKNfheCg,290
+ndbc_api/api/requests/http/station_realtime.py,sha256=8c_86VmLIQlxsh0qB0-qtb7OtTuOpwZ3IIoU95UeC9w,312
+ndbc_api/api/requests/http/stdmet.py,sha256=6Fzi-dKJd5HAjPTbXHAKPwA6JAQVPLaOpRWVlZyQHTc,496
+ndbc_api/api/requests/http/supl.py,sha256=xw_E8xmQ7lhYyL3j_NV77dGlHOz-I8_1ZVMrNNIMp3s,522
+ndbc_api/api/requests/http/swden.py,sha256=RdB_U1L1jbPRCKRiunWFtckcK4Ee-XE8nP0ln6evseE,527
+ndbc_api/api/requests/http/swdir.py,sha256=X5qn5YVyae4SRXUPOYcKofrd8M1EXunZyszKYkCsFrw,527
+ndbc_api/api/requests/http/swdir2.py,sha256=SvjdxyPCDUdDfU5Bln48SqitHpt5X-iqenMIEBPaoww,532
+ndbc_api/api/requests/http/swr1.py,sha256=yLsyVmja3xaD_08OfznudPtctm2TGT6dfdaPQxppY1Y,522
+ndbc_api/api/requests/http/swr2.py,sha256=ijx5_oY1TgUpEPPWG3NiAEwPAhvlAdUpkckxJ0-U6RM,522
+ndbc_api/api/requests/opendap/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ndbc_api/api/requests/opendap/__pycache__/__init__.cpython-311.pyc,,
+ndbc_api/api/requests/opendap/__pycache__/_base.cpython-311.pyc,,
+ndbc_api/api/requests/opendap/__pycache__/_core.cpython-311.pyc,,
+ndbc_api/api/requests/opendap/__pycache__/adcp.cpython-311.pyc,,
+ndbc_api/api/requests/opendap/__pycache__/cwind.cpython-311.pyc,,
+ndbc_api/api/requests/opendap/__pycache__/ocean.cpython-311.pyc,,
+ndbc_api/api/requests/opendap/__pycache__/pwind.cpython-311.pyc,,
+ndbc_api/api/requests/opendap/__pycache__/stdmet.cpython-311.pyc,,
+ndbc_api/api/requests/opendap/__pycache__/swden.cpython-311.pyc,,
+ndbc_api/api/requests/opendap/__pycache__/wlevel.cpython-311.pyc,,
+ndbc_api/api/requests/opendap/_base.py,sha256=TG38-3r2RYepaxMbjCfBSBxZ2VjpSzs_S5DIFBIC8ME,3082
+ndbc_api/api/requests/opendap/_core.py,sha256=w2bQ1slnkrhR9gRHrQ09WOm4lnx1BPac7bWma7rx1wE,161
+ndbc_api/api/requests/opendap/adcp.py,sha256=rj60TbOdHL-KwNaDQXNecyT63MpDpq42zzQIspj5v0E,498
+ndbc_api/api/requests/opendap/cwind.py,sha256=lVasJezPLRkefioRaPa-DRJknSXB_ft4AAewHMSIabM,502
+ndbc_api/api/requests/opendap/ocean.py,sha256=vUrpA3iwRnFeseaF_6SFSEKKu53k7T2ZbO74k4QV88o,502
+ndbc_api/api/requests/opendap/pwind.py,sha256=qVJtC9InphYZ2-ia18pTW3jd1kOtwxdBHppFnxTyuIE,502
+ndbc_api/api/requests/opendap/stdmet.py,sha256=M-QJwEw030t78wpqBHv3JNTLVCzHy2GkYuX2DAvQNI8,506
+ndbc_api/api/requests/opendap/swden.py,sha256=tIyINwMDGfClpMHqr4WWDSnuw5AQnX2Gg5i8PPTEUP8,502
+ndbc_api/api/requests/opendap/wlevel.py,sha256=m8LfXRwo7Cs_W2rSScthdRpiZzv2PGD5AtmjzBmI2So,506
+ndbc_api/config/__init__.py,sha256=DcBR3iarhVpyd_-CRzcVFQfGm8SRlLq74IwYpkpeEzI,988
+ndbc_api/config/__pycache__/__init__.cpython-311.pyc,,
+ndbc_api/exceptions.py,sha256=-Rmb_iG2Utzn_LEX0pWfitOz5STqx8uzhzHATpJjBEI,810
+ndbc_api/ndbc_api.py,sha256=otqSljNY8GeRk67LIcDQwQY7A7sN981YQgjtOIeiDio,36090
+ndbc_api/utilities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ndbc_api/utilities/__pycache__/__init__.cpython-311.pyc,,
+ndbc_api/utilities/__pycache__/log_formatter.cpython-311.pyc,,
+ndbc_api/utilities/__pycache__/req_cache.cpython-311.pyc,,
+ndbc_api/utilities/__pycache__/req_handler.cpython-311.pyc,,
+ndbc_api/utilities/__pycache__/singleton.cpython-311.pyc,,
+ndbc_api/utilities/log_formatter.py,sha256=DkgI_E4_ZmNyszFlPUn0kepCEVswdwbjuFt6Wb6TUf0,411
+ndbc_api/utilities/opendap/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ndbc_api/utilities/opendap/__pycache__/__init__.cpython-311.pyc,,
+ndbc_api/utilities/opendap/__pycache__/dataset.cpython-311.pyc,,
+ndbc_api/utilities/opendap/dataset.py,sha256=p_mlrQPVBwFxNkrknBuvi5-AgpHTygR0-GZIPwhhZbM,2837
+ndbc_api/utilities/req_cache.py,sha256=Gy2omhvFZ3VFgXcZ1ra33OLt4iT3RMniI_cAphbVecM,1538
+ndbc_api/utilities/req_handler.py,sha256=QUYiKy7kOr5n8pB3aaH1_bwi7Kqf87iQ5WakFH68KIo,9752
+ndbc_api/utilities/singleton.py,sha256=zg1Q1vENorS43FeOUovd05opjTU2J-uxuFSpK6uPU-4,337
diff --git a/ndbc_api-0.24.12.20.1.dist-info/REQUESTED b/ndbc_api-0.24.12.20.1.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/ndbc_api-0.24.12.20.1.dist-info/WHEEL b/ndbc_api-0.24.12.20.1.dist-info/WHEEL
new file mode 100644
index 0000000..8b9b3a1
--- /dev/null
+++ b/ndbc_api-0.24.12.20.1.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: poetry-core 1.9.1
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/ndbc_api/__init__.py b/ndbc_api/__init__.py
new file mode 100644
index 0000000..de30e98
--- /dev/null
+++ b/ndbc_api/__init__.py
@@ -0,0 +1,4 @@
+""" .. include:: ../README.md """
+__docformat__ = "restructuredtext"
+
+from ndbc_api.ndbc_api import NdbcApi
diff --git a/ndbc_api/__pycache__/__init__.cpython-311.pyc b/ndbc_api/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..8c8eb45
Binary files /dev/null and b/ndbc_api/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/__pycache__/exceptions.cpython-311.pyc b/ndbc_api/__pycache__/exceptions.cpython-311.pyc
new file mode 100644
index 0000000..3167b4b
Binary files /dev/null and b/ndbc_api/__pycache__/exceptions.cpython-311.pyc differ
diff --git a/ndbc_api/__pycache__/ndbc_api.cpython-311.pyc b/ndbc_api/__pycache__/ndbc_api.cpython-311.pyc
new file mode 100644
index 0000000..bb1edc0
Binary files /dev/null and b/ndbc_api/__pycache__/ndbc_api.cpython-311.pyc differ
diff --git a/ndbc_api/api/__init__.py b/ndbc_api/api/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ndbc_api/api/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..6726545
Binary files /dev/null and b/ndbc_api/api/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/api/handlers/__init__.py b/ndbc_api/api/handlers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ndbc_api/api/handlers/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/handlers/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..bf1edb4
Binary files /dev/null and b/ndbc_api/api/handlers/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/api/handlers/__pycache__/_base.cpython-311.pyc b/ndbc_api/api/handlers/__pycache__/_base.cpython-311.pyc
new file mode 100644
index 0000000..0777c92
Binary files /dev/null and b/ndbc_api/api/handlers/__pycache__/_base.cpython-311.pyc differ
diff --git a/ndbc_api/api/handlers/_base.py b/ndbc_api/api/handlers/_base.py
new file mode 100644
index 0000000..51592f6
--- /dev/null
+++ b/ndbc_api/api/handlers/_base.py
@@ -0,0 +1,2 @@
+class BaseHandler:
+ pass
diff --git a/ndbc_api/api/handlers/http/__init__.py b/ndbc_api/api/handlers/http/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ndbc_api/api/handlers/http/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/handlers/http/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..f17d81b
Binary files /dev/null and b/ndbc_api/api/handlers/http/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/api/handlers/http/__pycache__/data.cpython-311.pyc b/ndbc_api/api/handlers/http/__pycache__/data.cpython-311.pyc
new file mode 100644
index 0000000..7a6f285
Binary files /dev/null and b/ndbc_api/api/handlers/http/__pycache__/data.cpython-311.pyc differ
diff --git a/ndbc_api/api/handlers/http/__pycache__/stations.cpython-311.pyc b/ndbc_api/api/handlers/http/__pycache__/stations.cpython-311.pyc
new file mode 100644
index 0000000..4085cae
Binary files /dev/null and b/ndbc_api/api/handlers/http/__pycache__/stations.cpython-311.pyc differ
diff --git a/ndbc_api/api/handlers/http/data.py b/ndbc_api/api/handlers/http/data.py
new file mode 100644
index 0000000..e0952ec
--- /dev/null
+++ b/ndbc_api/api/handlers/http/data.py
@@ -0,0 +1,285 @@
+from datetime import datetime, timedelta
+from typing import Any
+
+import pandas as pd
+
+from ndbc_api.api.handlers._base import BaseHandler
+from ndbc_api.api.parsers.http.adcp import AdcpParser
+from ndbc_api.api.parsers.http.cwind import CwindParser
+from ndbc_api.api.parsers.http.ocean import OceanParser
+from ndbc_api.api.parsers.http.spec import SpecParser
+from ndbc_api.api.parsers.http.stdmet import StdmetParser
+from ndbc_api.api.parsers.http.supl import SuplParser
+from ndbc_api.api.parsers.http.swden import SwdenParser
+from ndbc_api.api.parsers.http.swdir import SwdirParser
+from ndbc_api.api.parsers.http.swdir2 import Swdir2Parser
+from ndbc_api.api.parsers.http.swr1 import Swr1Parser
+from ndbc_api.api.parsers.http.swr2 import Swr2Parser
+from ndbc_api.api.requests.http.adcp import AdcpRequest
+from ndbc_api.api.requests.http.cwind import CwindRequest
+from ndbc_api.api.requests.http.ocean import OceanRequest
+from ndbc_api.api.requests.http.spec import SpecRequest
+from ndbc_api.api.requests.http.stdmet import StdmetRequest
+from ndbc_api.api.requests.http.supl import SuplRequest
+from ndbc_api.api.requests.http.swden import SwdenRequest
+from ndbc_api.api.requests.http.swdir import SwdirRequest
+from ndbc_api.api.requests.http.swdir2 import Swdir2Request
+from ndbc_api.api.requests.http.swr1 import Swr1Request
+from ndbc_api.api.requests.http.swr2 import Swr2Request
+from ndbc_api.exceptions import RequestException, ResponseException
+
+
+class DataHandler(BaseHandler):
+
+ @classmethod
+ def adcp(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> pd.DataFrame:
+ """adcp"""
+ try:
+ reqs = AdcpRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return AdcpParser.df_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def cwind(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> pd.DataFrame:
+ """cwind"""
+ try:
+ reqs = CwindRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return CwindParser.df_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def ocean(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> pd.DataFrame:
+ """ocean"""
+ try:
+ reqs = OceanRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return OceanParser.df_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def spec(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> pd.DataFrame:
+ """spec"""
+ try:
+ reqs = SpecRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return SpecParser.df_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def stdmet(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> pd.DataFrame:
+ """stdmet"""
+ try:
+ reqs = StdmetRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return StdmetParser.df_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def supl(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> pd.DataFrame:
+ """supl"""
+ try:
+ reqs = SuplRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return SuplParser.df_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def swden(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> pd.DataFrame:
+ """swden"""
+ try:
+ reqs = SwdenRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return SwdenParser.df_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def swdir(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> pd.DataFrame:
+ """swdir"""
+ try:
+ reqs = SwdirRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return SwdirParser.df_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def swdir2(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> pd.DataFrame:
+ """swdir2"""
+ try:
+ reqs = Swdir2Request.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return Swdir2Parser.df_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def swr1(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> pd.DataFrame:
+ """swr1"""
+ try:
+ reqs = Swr1Request.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return Swr1Parser.df_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def swr2(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> pd.DataFrame:
+ """swr2"""
+ try:
+ reqs = Swr2Request.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return Swr2Parser.df_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
diff --git a/ndbc_api/api/handlers/http/stations.py b/ndbc_api/api/handlers/http/stations.py
new file mode 100644
index 0000000..7071f6b
--- /dev/null
+++ b/ndbc_api/api/handlers/http/stations.py
@@ -0,0 +1,186 @@
+from math import asin, cos, pi, sqrt
+from typing import Any, Union
+
+import pandas as pd
+
+from ndbc_api.api.handlers._base import BaseHandler
+from ndbc_api.api.parsers.http.station_historical import HistoricalParser
+from ndbc_api.api.parsers.http.station_metadata import MetadataParser
+from ndbc_api.api.parsers.http.station_realtime import RealtimeParser
+from ndbc_api.api.parsers.http.active_stations import ActiveStationsParser
+from ndbc_api.api.parsers.http.historical_stations import HistoricalStationsParser
+from ndbc_api.api.requests.http.station_historical import HistoricalRequest
+from ndbc_api.api.requests.http.station_metadata import MetadataRequest
+from ndbc_api.api.requests.http.station_realtime import RealtimeRequest
+from ndbc_api.api.requests.http.active_stations import ActiveStationsRequest
+from ndbc_api.api.requests.http.historical_stations import HistoricalStationsRequest
+from ndbc_api.exceptions import ParserException, ResponseException
+
+
+class StationsHandler(BaseHandler):
+
+ DEG_TO_RAD = pi / 180
+ DIAM_OF_EARTH = 12756 # km
+ LAT_MAP = (lambda x: -1 * float(x.strip('S'))
+ if 'S' in x else float(x.strip('N')))
+ LON_MAP = (lambda x: -1 * float(x.strip('W'))
+ if 'W' in x else float(x.strip('E')))
+ UNITS = ('nm', 'km', 'mi')
+
+ @classmethod
+ def stations(cls, handler: Any) -> pd.DataFrame:
+ """Get all active stations from NDBC."""
+ req = ActiveStationsRequest.build_request()
+ try:
+ resp = handler.handle_request('stn_active', req)
+ except (AttributeError, ValueError, TypeError) as e:
+ raise ResponseException(
+ 'Failed to execute `station` request.') from e
+ return ActiveStationsParser.df_from_response(resp, use_timestamp=False)
+
+ @classmethod
+ def historical_stations(cls, handler: Any) -> pd.DataFrame:
+ """Get historical stations from NDBC."""
+ req = HistoricalStationsRequest.build_request()
+ try:
+ resp = handler.handle_request('stn_historical', req)
+ except (AttributeError, ValueError, TypeError) as e:
+ raise ResponseException(
+ 'Failed to execute `station` request.') from e
+ return HistoricalStationsParser.df_from_response(resp,
+ use_timestamp=False)
+
+ @classmethod
+ def nearest_station(
+ cls,
+ handler: Any,
+ lat: Union[str, float],
+ lon: Union[str, float],
+ ) -> str:
+ """Get nearest station from specified lat/lon."""
+ df = cls.stations(handler=handler)
+ if isinstance(lat, str):
+ lat = StationsHandler.LAT_MAP(lat)
+ if isinstance(lon, str):
+ lon = StationsHandler.LON_MAP(lon)
+ try:
+ closest = cls._nearest(df, lat, lon)
+ except (TypeError, KeyError, ValueError) as e:
+ raise ParserException from e
+ closest = closest.to_dict().get('Station', {'UNK': 'UNK'})
+ return list(closest.values())[0]
+
+ @classmethod
+ def radial_search(
+ cls,
+ handler: Any,
+ lat: Union[str, float],
+ lon: Union[str, float],
+ radius: float,
+ units: str = 'km',
+ ) -> pd.DataFrame:
+ """Get stations within of the specified lat/lon."""
+ if units not in cls.UNITS:
+ raise ValueError(
+ f'Invalid unit: {units}, must be one of {cls.UNITS}.')
+ if radius < 0:
+ raise ValueError(f'Invalid radius: {radius}, must be non-negative.')
+ # pass the radius in km
+ if units == 'nm':
+ radius = radius * 1.852
+ elif units == 'mi':
+ radius = radius * 1.60934
+
+ df = cls.stations(handler=handler)
+ if isinstance(lat, str):
+ lat = StationsHandler.LAT_MAP(lat)
+ if isinstance(lon, str):
+ lon = StationsHandler.LON_MAP(lon)
+ try:
+ sations_in_radius = cls._radial_search(df, lat, lon, radius)
+ except (TypeError, KeyError, ValueError) as e:
+ raise ParserException from e
+ return sations_in_radius
+
+ @classmethod
+ def metadata(cls, handler: Any, station_id: str) -> pd.DataFrame:
+ """Get station description."""
+ req = MetadataRequest.build_request(station_id=station_id)
+ try:
+ resp = handler.handle_request(station_id, req)
+ except (AttributeError, ValueError, TypeError) as e:
+ raise ResponseException(
+ 'Failed to execute `station` request.') from e
+ return MetadataParser.metadata(resp)
+
+ @classmethod
+ def realtime(cls, handler: Any, station_id: str) -> pd.DataFrame:
+ """Get the available realtime measurements for a station."""
+ req = RealtimeRequest.build_request(station_id=station_id)
+ try:
+ resp = handler.handle_request(station_id, req)
+ except (AttributeError, ValueError, TypeError) as e:
+ raise ResponseException(
+ 'Failed to execute `station` request.') from e
+ return RealtimeParser.available_measurements(resp)
+
+ @classmethod
+ def historical(cls, handler: Any,
+ station_id: str) -> Union[pd.DataFrame, dict]:
+ """Get the available historical measurements for a station."""
+ req = HistoricalRequest.build_request(station_id=station_id)
+ try:
+ resp = handler.handle_request(station_id, req)
+ except (AttributeError, ValueError, TypeError) as e:
+ raise ResponseException(
+ 'Failed to execute `station` request.') from e
+ return HistoricalParser.available_measurements(resp)
+
+ """ PRIVATE """
+
+ def _distance(lat_a: float, lon_a: float, lat_b: float,
+ lon_b: float) -> float:
+ haversine = (0.5 - cos(
+ (lat_b - lat_a) * StationsHandler.DEG_TO_RAD) / 2 +
+ cos(lat_a * StationsHandler.DEG_TO_RAD) *
+ cos(lat_b * StationsHandler.DEG_TO_RAD) * (1 - cos(
+ (lon_b - lon_a) * StationsHandler.DEG_TO_RAD)) / 2)
+ return StationsHandler.DIAM_OF_EARTH * asin(sqrt(haversine))
+
+ @staticmethod
+ def _nearest(df: pd.DataFrame, lat_a: float, lon_a: float):
+ """Get the nearest station from specified `float`-valued lat/lon."""
+ # Drop rows with missing latitude or longitude
+ df_filtered = df.dropna(subset=['Lat', 'Lon'])
+
+ # Calculate distances using Haversine formula
+ df_filtered['distance'] = df_filtered.apply(
+ lambda row: StationsHandler._distance(lat_a, lon_a, row['Lat'], row[
+ 'Lon']),
+ axis=1)
+
+ # Find the index of the closest row
+ smallest_distance = df_filtered['distance'].min()
+
+ # Return the row corresponding to the nearest station
+ return df_filtered.loc[df_filtered['distance'] == smallest_distance]
+
+ @staticmethod
+ def _radial_search(df: pd.DataFrame, lat_a: float, lon_a: float,
+ radius: float):
+ """Get the stations within radius km from specified `float`-valued lat/lon."""
+ # Drop rows with missing latitude or longitude
+ df_filtered = df.dropna(subset=['Lat', 'Lon'])
+
+ # Calculate distances using Haversine formula
+ df_filtered['distance'] = df_filtered.apply(
+ lambda row: StationsHandler._distance(lat_a, lon_a, row['Lat'], row[
+ 'Lon']),
+ axis=1)
+
+ df_filtered.sort_values(by='distance', inplace=True)
+
+ # Filter rows within the radius
+ stations_within_radius = df_filtered[df_filtered['distance'] <= radius]
+
+ return stations_within_radius
diff --git a/ndbc_api/api/handlers/opendap/__init__.py b/ndbc_api/api/handlers/opendap/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ndbc_api/api/handlers/opendap/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/handlers/opendap/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..25ae912
Binary files /dev/null and b/ndbc_api/api/handlers/opendap/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/api/handlers/opendap/__pycache__/data.cpython-311.pyc b/ndbc_api/api/handlers/opendap/__pycache__/data.cpython-311.pyc
new file mode 100644
index 0000000..d975438
Binary files /dev/null and b/ndbc_api/api/handlers/opendap/__pycache__/data.cpython-311.pyc differ
diff --git a/ndbc_api/api/handlers/opendap/data.py b/ndbc_api/api/handlers/opendap/data.py
new file mode 100644
index 0000000..08a2468
--- /dev/null
+++ b/ndbc_api/api/handlers/opendap/data.py
@@ -0,0 +1,185 @@
+from datetime import datetime, timedelta
+from typing import Any
+
+import xarray
+
+from ndbc_api.api.handlers._base import BaseHandler
+from ndbc_api.api.parsers.opendap.adcp import AdcpParser
+from ndbc_api.api.parsers.opendap.cwind import CwindParser
+from ndbc_api.api.parsers.opendap.ocean import OceanParser
+from ndbc_api.api.parsers.opendap.pwind import PwindParser
+from ndbc_api.api.parsers.opendap.stdmet import StdmetParser
+from ndbc_api.api.parsers.opendap.swden import SwdenParser
+from ndbc_api.api.parsers.opendap.wlevel import WlevelParser
+from ndbc_api.api.requests.opendap.adcp import AdcpRequest
+from ndbc_api.api.requests.opendap.cwind import CwindRequest
+from ndbc_api.api.requests.opendap.ocean import OceanRequest
+from ndbc_api.api.requests.opendap.pwind import PwindRequest
+from ndbc_api.api.requests.opendap.stdmet import StdmetRequest
+from ndbc_api.api.requests.opendap.swden import SwdenRequest
+from ndbc_api.api.requests.opendap.wlevel import WlevelRequest
+from ndbc_api.exceptions import RequestException, ResponseException
+
+
+class OpenDapDataHandler(BaseHandler):
+
+ @classmethod
+ def adcp(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> xarray.Dataset:
+ """adcp"""
+ try:
+ reqs = AdcpRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return AdcpParser.nc_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def cwind(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> xarray.Dataset:
+ """cwind"""
+ try:
+ reqs = CwindRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return CwindParser.nc_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def ocean(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> xarray.Dataset:
+ """ocean"""
+ try:
+ reqs = OceanRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return OceanParser.nc_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def pwind(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> xarray.Dataset:
+ """pwind"""
+ try:
+ reqs = PwindRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return PwindParser.nc_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def stdmet(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> xarray.Dataset:
+ """stdmet"""
+ try:
+ reqs = StdmetRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return StdmetParser.nc_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def swden(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> xarray.Dataset:
+ """swden"""
+ try:
+ reqs = SwdenRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return SwdenParser.nc_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
+
+ @classmethod
+ def wlevel(
+ cls,
+ handler: Any,
+ station_id: str,
+ start_time: datetime = datetime.now() - timedelta(days=30),
+ end_time: datetime = datetime.now(),
+ use_timestamp: bool = True,
+ ) -> xarray.Dataset:
+ """wlevel"""
+ try:
+ reqs = WlevelRequest.build_request(station_id=station_id,
+ start_time=start_time,
+ end_time=end_time)
+ except Exception as e:
+ raise RequestException('Failed to build request.') from e
+ try:
+ resps = handler.handle_requests(station_id=station_id, reqs=reqs)
+ except Exception as e:
+ raise ResponseException('Failed to execute requests.') from e
+ return WlevelParser.nc_from_responses(responses=resps,
+ use_timestamp=use_timestamp)
diff --git a/ndbc_api/api/parsers/__init__.py b/ndbc_api/api/parsers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ndbc_api/api/parsers/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/parsers/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..39463d8
Binary files /dev/null and b/ndbc_api/api/parsers/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__init__.py b/ndbc_api/api/parsers/http/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ndbc_api/api/parsers/http/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..575c5a5
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/_base.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/_base.cpython-311.pyc
new file mode 100644
index 0000000..19c1c05
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/_base.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/_station.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/_station.cpython-311.pyc
new file mode 100644
index 0000000..2d7c47f
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/_station.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/_xml.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/_xml.cpython-311.pyc
new file mode 100644
index 0000000..6de24b1
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/_xml.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/active_stations.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/active_stations.cpython-311.pyc
new file mode 100644
index 0000000..6135241
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/active_stations.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/adcp.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/adcp.cpython-311.pyc
new file mode 100644
index 0000000..e138336
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/adcp.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/cwind.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/cwind.cpython-311.pyc
new file mode 100644
index 0000000..4de69f3
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/cwind.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/historical_stations.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/historical_stations.cpython-311.pyc
new file mode 100644
index 0000000..956b685
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/historical_stations.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/ocean.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/ocean.cpython-311.pyc
new file mode 100644
index 0000000..cfbc505
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/ocean.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/spec.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/spec.cpython-311.pyc
new file mode 100644
index 0000000..446ff09
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/spec.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/station_historical.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/station_historical.cpython-311.pyc
new file mode 100644
index 0000000..5f535a3
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/station_historical.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/station_metadata.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/station_metadata.cpython-311.pyc
new file mode 100644
index 0000000..07301c6
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/station_metadata.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/station_realtime.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/station_realtime.cpython-311.pyc
new file mode 100644
index 0000000..e715f27
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/station_realtime.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/stdmet.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/stdmet.cpython-311.pyc
new file mode 100644
index 0000000..4ec8cb5
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/stdmet.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/supl.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/supl.cpython-311.pyc
new file mode 100644
index 0000000..6aed97b
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/supl.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/swden.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/swden.cpython-311.pyc
new file mode 100644
index 0000000..0874ec0
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/swden.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/swdir.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/swdir.cpython-311.pyc
new file mode 100644
index 0000000..3ce8df6
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/swdir.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/swdir2.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/swdir2.cpython-311.pyc
new file mode 100644
index 0000000..f6c381e
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/swdir2.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/swr1.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/swr1.cpython-311.pyc
new file mode 100644
index 0000000..45437a8
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/swr1.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/__pycache__/swr2.cpython-311.pyc b/ndbc_api/api/parsers/http/__pycache__/swr2.cpython-311.pyc
new file mode 100644
index 0000000..19a1b79
Binary files /dev/null and b/ndbc_api/api/parsers/http/__pycache__/swr2.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/http/_base.py b/ndbc_api/api/parsers/http/_base.py
new file mode 100644
index 0000000..6465bf9
--- /dev/null
+++ b/ndbc_api/api/parsers/http/_base.py
@@ -0,0 +1,107 @@
+from io import StringIO
+from typing import List, Tuple
+
+import pandas as pd
+
+from ndbc_api.exceptions import ParserException
+
+
+class BaseParser:
+
+ HEADER_PREFIX = '#'
+ NAN_VALUES = ['MM']
+ DATE_PARSER = '%Y %m %d %H %M'
+ PARSE_DATES = [0, 1, 2, 3, 4]
+ INDEX_COL = False
+ REVERT_COL_NAMES = []
+
+ @classmethod
+ def df_from_responses(cls,
+ responses: List[dict],
+ use_timestamp: bool = True) -> pd.DataFrame:
+ components = []
+ for response in responses:
+ if response.get('status') == 200:
+ components.append(
+ cls._read_response(response, use_timestamp=use_timestamp))
+ df = pd.concat(components)
+ if use_timestamp:
+ try:
+ df = df.reset_index().drop_duplicates(subset='timestamp',
+ keep='first')
+ df = df.set_index('timestamp').sort_index()
+ except KeyError as e:
+ raise ParserException from e
+ return df
+
+ @classmethod
+ def _read_response(cls, response: dict,
+ use_timestamp: bool) -> pd.DataFrame:
+ body = response.get('body')
+ header, data = cls._parse_body(body)
+ names = cls._parse_header(header)
+ if not data:
+ return pd.DataFrame()
+ # check that parsed names match parsed values or revert
+ if len([v.strip() for v in data[0].strip('\n').split(' ') if v
+ ]) != len(names):
+ names = cls.REVERT_COL_NAMES
+ if '(' in data[0]:
+ data = cls._clean_data(data)
+
+ try:
+ parse_dates = False
+ date_format = None
+ if use_timestamp:
+ parse_dates = [cls.PARSE_DATES]
+ date_format = cls.DATE_PARSER
+ df = pd.read_csv(
+ StringIO('\n'.join(data)),
+ names=names,
+ delim_whitespace=True,
+ na_values=cls.NAN_VALUES,
+ index_col=cls.INDEX_COL,
+ parse_dates=parse_dates,
+ date_format=date_format,
+ )
+ if use_timestamp:
+ df.index.name = 'timestamp'
+
+ except (NotImplementedError, TypeError, ValueError) as e:
+ print(e)
+ return pd.DataFrame()
+
+ # check whether to parse dates
+ return df
+
+ @staticmethod
+ def _parse_body(body: str) -> Tuple[List[str], List[str]]:
+ buf = StringIO(body)
+ data = []
+ header = []
+
+ line = buf.readline()
+ while line:
+ if line.startswith('#'):
+ header.append(line)
+ else:
+ data.append(line)
+ line = buf.readline()
+
+ return header, data
+
+ @staticmethod
+ def _parse_header(header: List[str]) -> List[str]:
+ names = ([n for n in header[0].strip('#').strip('\n').split(' ') if n]
+ if isinstance(header, list) and len(header) > 0 else None)
+ return names # pass 'None' to pd.read_csv on error
+
+ @staticmethod
+ def _clean_data(data: List[str]) -> List[str]:
+ vals = [
+ ' '.join([v
+ for v in r.split(' ')
+ if v and '(' not in v])
+ for r in data
+ ]
+ return vals or None # pass 'None' to pd.read_csv on error
diff --git a/ndbc_api/api/parsers/http/_html.py b/ndbc_api/api/parsers/http/_html.py
new file mode 100644
index 0000000..2159af0
--- /dev/null
+++ b/ndbc_api/api/parsers/http/_html.py
@@ -0,0 +1,21 @@
+from typing import List
+
+import pandas as pd
+from bs4 import BeautifulSoup
+
+from ndbc_api.api.parsers.http._base import BaseParser
+
+
+class HtmlParser(BaseParser):
+
+ INDEX_COL = None
+
+ @classmethod
+ def dfs_from_responses(cls, responses: List[dict]) -> List[pd.DataFrame]:
+ components = []
+ for response in responses:
+ if response.get('status') == 200:
+ soup = BeautifulSoup(response.get('body'), 'html.parser')
+ tables = soup.find_all('table')
+ components.extend(pd.read_html(str(tables), flavor='bs4'))
+ return components
diff --git a/ndbc_api/api/parsers/http/_station.py b/ndbc_api/api/parsers/http/_station.py
new file mode 100644
index 0000000..2d2634a
--- /dev/null
+++ b/ndbc_api/api/parsers/http/_station.py
@@ -0,0 +1,51 @@
+import os
+from calendar import month_abbr
+from collections import defaultdict
+from datetime import datetime
+from typing import List, Tuple
+
+import bs4
+
+
+class StationParser:
+
+ BASE_URL = 'https://www.ndbc.noaa.gov'
+
+ @classmethod
+ def _parse_li_urls(cls,
+ urls: List[bs4.element.Tag]) -> List[Tuple[str, str]]:
+ parsed = []
+ if 'MOCKDATE' in os.environ:
+ now = datetime.strptime(os.getenv('MOCKDATE'), '%Y-%m-%d').date()
+ else:
+ now = datetime.now()
+ current_year = now.year
+ for raw_url in urls:
+ name = raw_url.text.strip()
+ name = f'{name} {current_year}' if name in month_abbr else name
+ url = f'{cls.BASE_URL}{raw_url.get("href")}'
+ parsed.append((name, url))
+ return parsed
+
+ @classmethod
+ def _build_available_measurements(
+ cls, line_items: List[bs4.element.Tag]) -> dict:
+ # unpack nested lists
+ nested = [li for li in line_items for li in li.find_all('li')]
+ nested = [
+ li for li in nested
+ if li.get('href') is not None and 'plot' not in li.get('href')
+ ]
+ line_items = [li for li in line_items if len(li.find_all('li')) == 0]
+ line_items.extend(nested)
+ available_measurements = defaultdict(dict)
+ for li in line_items:
+ if 'Search' in li.text:
+ break # end of available measurements
+ new_measurement = cls._parse_list_item(li)
+ if new_measurement:
+ k = list(new_measurement.keys())[0] # guaranteed one key
+ else:
+ continue
+ available_measurements[k].update(new_measurement[k])
+ return dict(available_measurements)
diff --git a/ndbc_api/api/parsers/http/_xml.py b/ndbc_api/api/parsers/http/_xml.py
new file mode 100644
index 0000000..486b306
--- /dev/null
+++ b/ndbc_api/api/parsers/http/_xml.py
@@ -0,0 +1,28 @@
+import re
+import xml.etree.ElementTree as ET
+
+from ndbc_api.api.parsers.http._base import BaseParser
+from ndbc_api.exceptions import ParserException
+
+
+class XMLParser(BaseParser):
+ """
+ Parser for XML data.
+ """
+
+ @classmethod
+ def root_from_response(cls, response: dict) -> ET.ElementTree:
+ """Parse the response body (string-valued XML) to ET
+
+ Args:
+ response (dict): The successful HTTP response
+ """
+
+ body = response.get('body')
+
+ try:
+ root = ET.fromstring(body)
+ return ET.ElementTree(root)
+ except Exception as e:
+ raise ParserException(
+ "failed to obtain XML root from response body") from e
diff --git a/ndbc_api/api/parsers/http/active_stations.py b/ndbc_api/api/parsers/http/active_stations.py
new file mode 100644
index 0000000..c2ea1ef
--- /dev/null
+++ b/ndbc_api/api/parsers/http/active_stations.py
@@ -0,0 +1,66 @@
+import xml.etree.ElementTree as ET
+import pandas as pd
+
+from ndbc_api.exceptions import ParserException
+from ndbc_api.api.parsers.http._xml import XMLParser
+
+
+class ActiveStationsParser(XMLParser):
+ """
+ Parser for active station information from XML data.
+ """
+
+ @classmethod
+ def df_from_response(cls,
+ response: dict,
+ use_timestamp: bool = False) -> pd.DataFrame:
+ """
+ Reads the response body and parses it into a DataFrame.
+
+ Args:
+ response (dict): The response dictionary containing the 'body' key.
+ use_timestamp (bool): Flag to indicate if the timestamp should be used as an index (not applicable here).
+
+ Returns:
+ pd.DataFrame: The parsed DataFrame containing station information.
+ """
+ root = super(ActiveStationsParser, cls).root_from_response(response)
+ try:
+ station_data = []
+ for station in root.findall('station'):
+ station_info = {
+ 'Station':
+ station.get('id'),
+ 'Lat':
+ float(station.get('lat')),
+ 'Lon':
+ float(station.get('lon')),
+ 'Elevation':
+ float(station.get('elev'))
+ if station.get('elev') else pd.NA,
+ 'Name':
+ station.get('name'),
+ 'Owner':
+ station.get('owner'),
+ 'Program':
+ station.get('pgm'),
+ 'Type':
+ station.get('type'),
+ 'Includes Meteorology':
+ station.get('met') == 'y',
+ 'Includes Currents':
+ station.get('currents') == 'y',
+ 'Includes Water Quality':
+ station.get('waterquality') == 'y',
+ 'DART Program':
+ station.get('dart') == 'y'
+ }
+ station_data.append(station_info)
+
+ df = pd.DataFrame(
+ station_data) # Create DataFrame from the extracted data
+
+ except ET.ParseError as e:
+ raise ParserException(f"Error parsing XML data: {e}") from e
+
+ return df
diff --git a/ndbc_api/api/parsers/http/adcp.py b/ndbc_api/api/parsers/http/adcp.py
new file mode 100644
index 0000000..478319e
--- /dev/null
+++ b/ndbc_api/api/parsers/http/adcp.py
@@ -0,0 +1,138 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.http._base import BaseParser
+
+
+class AdcpParser(BaseParser):
+
+ INDEX_COL = 0
+ NAN_VALUES = None
+ REVERT_COL_NAMES = [
+ 'YY',
+ 'MM',
+ 'DD',
+ 'hh',
+ 'mm',
+ 'DEP01',
+ 'DIR01',
+ 'SPD01',
+ 'DEP02',
+ 'DIR02',
+ 'SPD02',
+ 'DEP03',
+ 'DIR03',
+ 'SPD03',
+ 'DEP04',
+ 'DIR04',
+ 'SPD04',
+ 'DEP05',
+ 'DIR05',
+ 'SPD05',
+ 'DEP06',
+ 'DIR06',
+ 'SPD06',
+ 'DEP07',
+ 'DIR07',
+ 'SPD07',
+ 'DEP08',
+ 'DIR08',
+ 'SPD08',
+ 'DEP09',
+ 'DIR09',
+ 'SPD09',
+ 'DEP10',
+ 'DIR10',
+ 'SPD10',
+ 'DEP11',
+ 'DIR11',
+ 'SPD11',
+ 'DEP12',
+ 'DIR12',
+ 'SPD12',
+ 'DEP13',
+ 'DIR13',
+ 'SPD13',
+ 'DEP14',
+ 'DIR14',
+ 'SPD14',
+ 'DEP15',
+ 'DIR15',
+ 'SPD15',
+ 'DEP16',
+ 'DIR16',
+ 'SPD16',
+ 'DEP17',
+ 'DIR17',
+ 'SPD17',
+ 'DEP18',
+ 'DIR18',
+ 'SPD18',
+ 'DEP19',
+ 'DIR19',
+ 'SPD19',
+ 'DEP20',
+ 'DIR20',
+ 'SPD20',
+ 'DEP21',
+ 'DIR21',
+ 'SPD21',
+ 'DEP22',
+ 'DIR22',
+ 'SPD22',
+ 'DEP23',
+ 'DIR23',
+ 'SPD23',
+ 'DEP24',
+ 'DIR24',
+ 'SPD24',
+ 'DEP25',
+ 'DIR25',
+ 'SPD25',
+ 'DEP26',
+ 'DIR26',
+ 'SPD26',
+ 'DEP27',
+ 'DIR27',
+ 'SPD27',
+ 'DEP28',
+ 'DIR28',
+ 'SPD28',
+ 'DEP29',
+ 'DIR29',
+ 'SPD29',
+ 'DEP30',
+ 'DIR30',
+ 'SPD30',
+ 'DEP31',
+ 'DIR31',
+ 'SPD31',
+ 'DEP32',
+ 'DIR32',
+ 'SPD32',
+ 'DEP33',
+ 'DIR33',
+ 'SPD33',
+ 'DEP34',
+ 'DIR34',
+ 'SPD34',
+ 'DEP35',
+ 'DIR35',
+ 'SPD35',
+ 'DEP36',
+ 'DIR36',
+ 'SPD36',
+ 'DEP37',
+ 'DIR37',
+ 'SPD37',
+ 'DEP38',
+ 'DIR38',
+ 'SPD38',
+ ]
+
+ @classmethod
+ def df_from_responses(cls, responses: List[dict],
+ use_timestamp: bool) -> pd.DataFrame:
+ return super(AdcpParser, cls).df_from_responses(responses,
+ use_timestamp)
diff --git a/ndbc_api/api/parsers/http/cwind.py b/ndbc_api/api/parsers/http/cwind.py
new file mode 100644
index 0000000..48616a9
--- /dev/null
+++ b/ndbc_api/api/parsers/http/cwind.py
@@ -0,0 +1,17 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.http._base import BaseParser
+
+
+class CwindParser(BaseParser):
+
+ INDEX_COL = 0
+ NAN_VALUES = [99.0, 999, 9999, 9999.0, 'MM']
+
+ @classmethod
+ def df_from_responses(cls, responses: List[dict],
+ use_timestamp: bool) -> pd.DataFrame:
+ return super(CwindParser,
+ cls).df_from_responses(responses, use_timestamp)
diff --git a/ndbc_api/api/parsers/http/historical_stations.py b/ndbc_api/api/parsers/http/historical_stations.py
new file mode 100644
index 0000000..3333e43
--- /dev/null
+++ b/ndbc_api/api/parsers/http/historical_stations.py
@@ -0,0 +1,75 @@
+import xml.etree.ElementTree as ET
+import pandas as pd
+
+from ndbc_api.exceptions import ParserException
+from ndbc_api.api.parsers.http._xml import XMLParser
+
+
+class HistoricalStationsParser(XMLParser):
+ """
+ Parser for active station information from XML data.
+ """
+
+ @classmethod
+ def df_from_response(cls,
+ response: dict,
+ use_timestamp: bool = False) -> pd.DataFrame:
+ """
+ Reads the response body and parses it into a DataFrame.
+
+ Args:
+ response (dict): The response dictionary containing the 'body' key.
+ use_timestamp (bool): Flag to indicate if the timestamp should be used as an index (not applicable here).
+
+ Returns:
+ pd.DataFrame: The parsed DataFrame containing station information.
+ """
+ root = super(HistoricalStationsParser, cls).root_from_response(response)
+ try:
+ station_data = []
+ for station in root.findall('station'):
+ station_id = station.get('id')
+ station_name = station.get('name')
+ station_owner = station.get('owner')
+ station_program = station.get('pgm')
+ station_type = station.get('type')
+
+ for history in station.findall('history'):
+ station_info = {
+ 'Station':
+ station_id,
+ 'Lat':
+ float(history.get('lat')),
+ 'Lon':
+ float(history.get('lng')),
+ 'Elevation':
+ float(history.get('elev'))
+ if history.get('elev') else pd.NA,
+ 'Name':
+ station_name,
+ 'Owner':
+ station_owner,
+ 'Program':
+ station_program,
+ 'Type':
+ station_type,
+ 'Includes Meteorology':
+ history.get('met') == 'y',
+ 'Hull Type':
+ history.get('hull'),
+ 'Anemometer Height':
+ float(history.get('anemom_height'))
+ if history.get('anemom_height') else pd.NA,
+ 'Start Date':
+ history.get('start'),
+ 'End Date':
+ history.get('stop'),
+ }
+ station_data.append(station_info)
+
+ df = pd.DataFrame(station_data)
+
+ except ET.ParseError as e:
+ raise ParserException(f"Error parsing XML data: {e}") from e
+
+ return df
diff --git a/ndbc_api/api/parsers/http/ocean.py b/ndbc_api/api/parsers/http/ocean.py
new file mode 100644
index 0000000..568812f
--- /dev/null
+++ b/ndbc_api/api/parsers/http/ocean.py
@@ -0,0 +1,16 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.http._base import BaseParser
+
+
+class OceanParser(BaseParser):
+
+ INDEX_COL = 0
+
+ @classmethod
+ def df_from_responses(cls, responses: List[dict],
+ use_timestamp: bool) -> pd.DataFrame:
+ return super(OceanParser,
+ cls).df_from_responses(responses, use_timestamp)
diff --git a/ndbc_api/api/parsers/http/spec.py b/ndbc_api/api/parsers/http/spec.py
new file mode 100644
index 0000000..7266c79
--- /dev/null
+++ b/ndbc_api/api/parsers/http/spec.py
@@ -0,0 +1,17 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.http._base import BaseParser
+
+
+class SpecParser(BaseParser):
+
+ INDEX_COL = 0
+ NAN_VALUES = ['N/A']
+
+ @classmethod
+ def df_from_responses(cls, responses: List[dict],
+ use_timestamp: bool) -> pd.DataFrame:
+ return super(SpecParser, cls).df_from_responses(responses,
+ use_timestamp)
diff --git a/ndbc_api/api/parsers/http/station_historical.py b/ndbc_api/api/parsers/http/station_historical.py
new file mode 100644
index 0000000..8c0fb29
--- /dev/null
+++ b/ndbc_api/api/parsers/http/station_historical.py
@@ -0,0 +1,34 @@
+import re
+
+import bs4
+
+from ndbc_api.api.parsers.http._station import StationParser
+
+
+class HistoricalParser(StationParser):
+
+ LIST_IDENTIFIER = re.compile(
+ 'Available historical data for station .{5} include:')
+
+ @classmethod
+ def available_measurements(cls, response: dict) -> dict:
+ if response.get('status') == 200:
+ soup = bs4.BeautifulSoup(response.get('body'), 'html.parser')
+ p_tag = soup.find('p', text=cls.LIST_IDENTIFIER)
+ line_items = p_tag.find_next_siblings('ul')[0].find_all('li')
+ return cls._build_available_measurements(line_items=line_items)
+ else:
+ return dict()
+
+ @classmethod
+ def _parse_list_item(cls, li: bs4.element.Tag) -> dict:
+ measurement_item = dict()
+ try:
+ title = li.find('b').text.strip(': ')
+ parsed = cls._parse_li_urls(li.find_all('a'))
+ except AttributeError:
+ return measurement_item
+ measurement_item[title] = dict()
+ for name, url in parsed:
+ measurement_item[title][name] = url
+ return measurement_item
diff --git a/ndbc_api/api/parsers/http/station_metadata.py b/ndbc_api/api/parsers/http/station_metadata.py
new file mode 100644
index 0000000..19096f8
--- /dev/null
+++ b/ndbc_api/api/parsers/http/station_metadata.py
@@ -0,0 +1,49 @@
+from collections import ChainMap
+from typing import List
+
+import bs4
+
+from ndbc_api.api.parsers.http._station import StationParser
+
+
+class MetadataParser(StationParser):
+
+ @classmethod
+ def metadata(cls, response: dict) -> dict:
+ if response.get('status') == 200:
+ soup = bs4.BeautifulSoup(response.get('body'), 'html.parser')
+ metadata = cls._meta_from_respose(soup=soup)
+ return dict(ChainMap(*metadata))
+ else:
+ return dict()
+
+ @classmethod
+ def _meta_from_respose(cls, soup: bs4.BeautifulSoup):
+ metadata = []
+ try:
+ metadata.append({'Name': soup.find('h1').text.strip()})
+ items = soup.find('div', id='stn_metadata').find_all('p')[0].text
+ items = items.split('\n\n')
+ assert len(items) == 2
+ except (AssertionError, AttributeError):
+ return metadata
+ metadata.extend(cls._parse_headers(items[0]))
+ metadata.extend(cls._parse_attrs(items[1]))
+ return metadata
+
+ @classmethod
+ def _parse_headers(cls, line_meta):
+ station_headers = []
+ headers = [i.strip() for i in line_meta.split('\n') if i]
+ station_headers.append({'Statation Type': ', '.join(headers[0:-1])})
+ station_headers.append({'Location': headers[-1]})
+ return station_headers
+
+ @classmethod
+ def _parse_attrs(cls, line_attr: str) -> List[dict]:
+ station_attrs = []
+ attrs = [i for i in line_attr.split('\n') if i]
+ for attr in attrs:
+ k, v = attr.split(': ')
+ station_attrs.append({k: v})
+ return station_attrs
diff --git a/ndbc_api/api/parsers/http/station_realtime.py b/ndbc_api/api/parsers/http/station_realtime.py
new file mode 100644
index 0000000..46654d0
--- /dev/null
+++ b/ndbc_api/api/parsers/http/station_realtime.py
@@ -0,0 +1,29 @@
+import bs4
+
+from ndbc_api.api.parsers.http._station import StationParser
+
+
+class RealtimeParser(StationParser):
+
+ @classmethod
+ def available_measurements(cls, response: dict) -> dict:
+ if response.get('status') == 200:
+ soup = bs4.BeautifulSoup(response.get('body'), 'html.parser')
+ items = soup.find('section', {"class": "data"})
+ line_items = items.find_all('li')
+ return cls._build_available_measurements(line_items=line_items)
+ else:
+ return dict()
+
+ @classmethod
+ def _parse_list_item(cls, li: bs4.element.Tag) -> dict:
+ measurement_item = dict()
+ try:
+ title = li.text.split('\n')[0]
+ parsed = cls._parse_li_urls(li.find_all('a'))
+ except AttributeError:
+ return measurement_item
+ measurement_item[title] = dict()
+ for name, url in parsed:
+ measurement_item[title][name] = url
+ return measurement_item
diff --git a/ndbc_api/api/parsers/http/stdmet.py b/ndbc_api/api/parsers/http/stdmet.py
new file mode 100644
index 0000000..f81abf4
--- /dev/null
+++ b/ndbc_api/api/parsers/http/stdmet.py
@@ -0,0 +1,17 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.http._base import BaseParser
+
+
+class StdmetParser(BaseParser):
+
+ INDEX_COL = 0
+ NAN_VALUES = ['MM', 99.0, 999, 9999, 9999.0]
+
+ @classmethod
+ def df_from_responses(cls, responses: List[dict],
+ use_timestamp: bool) -> pd.DataFrame:
+ return super(StdmetParser,
+ cls).df_from_responses(responses, use_timestamp)
diff --git a/ndbc_api/api/parsers/http/supl.py b/ndbc_api/api/parsers/http/supl.py
new file mode 100644
index 0000000..096c93a
--- /dev/null
+++ b/ndbc_api/api/parsers/http/supl.py
@@ -0,0 +1,17 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.http._base import BaseParser
+
+
+class SuplParser(BaseParser):
+
+ INDEX_COL = 0
+ NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM']
+
+ @classmethod
+ def df_from_responses(cls, responses: List[dict],
+ use_timestamp: bool) -> pd.DataFrame:
+ return super(SuplParser, cls).df_from_responses(responses,
+ use_timestamp)
diff --git a/ndbc_api/api/parsers/http/swden.py b/ndbc_api/api/parsers/http/swden.py
new file mode 100644
index 0000000..0514aa3
--- /dev/null
+++ b/ndbc_api/api/parsers/http/swden.py
@@ -0,0 +1,71 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.http._base import BaseParser
+
+
+class SwdenParser(BaseParser):
+
+ INDEX_COL = 0
+ NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM']
+ REVERT_COL_NAMES = [
+ 'YY',
+ 'MM',
+ 'DD',
+ 'hh',
+ 'mm',
+ '.0200',
+ '.0325',
+ '.0375',
+ '.0425',
+ '.0475',
+ '.0525',
+ '.0575',
+ '.0625',
+ '.0675',
+ '.0725',
+ '.0775',
+ '.0825',
+ '.0875',
+ '.0925',
+ '.1000',
+ '.1100',
+ '.1200',
+ '.1300',
+ '.1400',
+ '.1500',
+ '.1600',
+ '.1700',
+ '.1800',
+ '.1900',
+ '.2000',
+ '.2100',
+ '.2200',
+ '.2300',
+ '.2400',
+ '.2500',
+ '.2600',
+ '.2700',
+ '.2800',
+ '.2900',
+ '.3000',
+ '.3100',
+ '.3200',
+ '.3300',
+ '.3400',
+ '.3500',
+ '.3650',
+ '.3850',
+ '.4050',
+ '.4250',
+ '.4450',
+ '.4650',
+ '.4850',
+ ]
+
+ @classmethod
+ def df_from_responses(cls, responses: List[dict],
+ use_timestamp: bool) -> pd.DataFrame:
+ return super(SwdenParser,
+ cls).df_from_responses(responses, use_timestamp)
diff --git a/ndbc_api/api/parsers/http/swdir.py b/ndbc_api/api/parsers/http/swdir.py
new file mode 100644
index 0000000..fcb54f0
--- /dev/null
+++ b/ndbc_api/api/parsers/http/swdir.py
@@ -0,0 +1,71 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.http._base import BaseParser
+
+
+class SwdirParser(BaseParser):
+
+ INDEX_COL = 0
+ NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM']
+ REVERT_COL_NAMES = [
+ 'YY',
+ 'MM',
+ 'DD',
+ 'hh',
+ 'mm',
+ '.0200',
+ '.0325',
+ '.0375',
+ '.0425',
+ '.0475',
+ '.0525',
+ '.0575',
+ '.0625',
+ '.0675',
+ '.0725',
+ '.0775',
+ '.0825',
+ '.0875',
+ '.0925',
+ '.1000',
+ '.1100',
+ '.1200',
+ '.1300',
+ '.1400',
+ '.1500',
+ '.1600',
+ '.1700',
+ '.1800',
+ '.1900',
+ '.2000',
+ '.2100',
+ '.2200',
+ '.2300',
+ '.2400',
+ '.2500',
+ '.2600',
+ '.2700',
+ '.2800',
+ '.2900',
+ '.3000',
+ '.3100',
+ '.3200',
+ '.3300',
+ '.3400',
+ '.3500',
+ '.3650',
+ '.3850',
+ '.4050',
+ '.4250',
+ '.4450',
+ '.4650',
+ '.4850',
+ ]
+
+ @classmethod
+ def df_from_responses(cls, responses: List[dict],
+ use_timestamp: bool) -> pd.DataFrame:
+ df = super(SwdirParser, cls).df_from_responses(responses, use_timestamp)
+ return df
diff --git a/ndbc_api/api/parsers/http/swdir2.py b/ndbc_api/api/parsers/http/swdir2.py
new file mode 100644
index 0000000..85a0c99
--- /dev/null
+++ b/ndbc_api/api/parsers/http/swdir2.py
@@ -0,0 +1,72 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.http._base import BaseParser
+
+
+class Swdir2Parser(BaseParser):
+
+ INDEX_COL = 0
+ NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM']
+ REVERT_COL_NAMES = [
+ 'YY',
+ 'MM',
+ 'DD',
+ 'hh',
+ 'mm',
+ '.0200',
+ '.0325',
+ '.0375',
+ '.0425',
+ '.0475',
+ '.0525',
+ '.0575',
+ '.0625',
+ '.0675',
+ '.0725',
+ '.0775',
+ '.0825',
+ '.0875',
+ '.0925',
+ '.1000',
+ '.1100',
+ '.1200',
+ '.1300',
+ '.1400',
+ '.1500',
+ '.1600',
+ '.1700',
+ '.1800',
+ '.1900',
+ '.2000',
+ '.2100',
+ '.2200',
+ '.2300',
+ '.2400',
+ '.2500',
+ '.2600',
+ '.2700',
+ '.2800',
+ '.2900',
+ '.3000',
+ '.3100',
+ '.3200',
+ '.3300',
+ '.3400',
+ '.3500',
+ '.3650',
+ '.3850',
+ '.4050',
+ '.4250',
+ '.4450',
+ '.4650',
+ '.4850',
+ ]
+
+ @classmethod
+ def df_from_responses(cls, responses: List[dict],
+ use_timestamp: bool) -> pd.DataFrame:
+ df = super(Swdir2Parser, cls).df_from_responses(responses,
+ use_timestamp)
+ return df
diff --git a/ndbc_api/api/parsers/http/swr1.py b/ndbc_api/api/parsers/http/swr1.py
new file mode 100644
index 0000000..3bd4e12
--- /dev/null
+++ b/ndbc_api/api/parsers/http/swr1.py
@@ -0,0 +1,71 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.http._base import BaseParser
+
+
+class Swr1Parser(BaseParser):
+
+ INDEX_COL = 0
+ NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM']
+ REVERT_COL_NAMES = [
+ 'YY',
+ 'MM',
+ 'DD',
+ 'hh',
+ 'mm',
+ '.0200',
+ '.0325',
+ '.0375',
+ '.0425',
+ '.0475',
+ '.0525',
+ '.0575',
+ '.0625',
+ '.0675',
+ '.0725',
+ '.0775',
+ '.0825',
+ '.0875',
+ '.0925',
+ '.1000',
+ '.1100',
+ '.1200',
+ '.1300',
+ '.1400',
+ '.1500',
+ '.1600',
+ '.1700',
+ '.1800',
+ '.1900',
+ '.2000',
+ '.2100',
+ '.2200',
+ '.2300',
+ '.2400',
+ '.2500',
+ '.2600',
+ '.2700',
+ '.2800',
+ '.2900',
+ '.3000',
+ '.3100',
+ '.3200',
+ '.3300',
+ '.3400',
+ '.3500',
+ '.3650',
+ '.3850',
+ '.4050',
+ '.4250',
+ '.4450',
+ '.4650',
+ '.4850',
+ ]
+
+ @classmethod
+ def df_from_responses(cls, responses: List[dict],
+ use_timestamp: bool) -> pd.DataFrame:
+ df = super(Swr1Parser, cls).df_from_responses(responses, use_timestamp)
+ return df
diff --git a/ndbc_api/api/parsers/http/swr2.py b/ndbc_api/api/parsers/http/swr2.py
new file mode 100644
index 0000000..0b8d56b
--- /dev/null
+++ b/ndbc_api/api/parsers/http/swr2.py
@@ -0,0 +1,71 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.http._base import BaseParser
+
+
+class Swr2Parser(BaseParser):
+
+ INDEX_COL = 0
+ NAN_VALUES = [99.0, 999, 999.0, 9999, 9999.0, 'MM']
+ REVERT_COL_NAMES = [
+ 'YY',
+ 'MM',
+ 'DD',
+ 'hh',
+ 'mm',
+ '.0200',
+ '.0325',
+ '.0375',
+ '.0425',
+ '.0475',
+ '.0525',
+ '.0575',
+ '.0625',
+ '.0675',
+ '.0725',
+ '.0775',
+ '.0825',
+ '.0875',
+ '.0925',
+ '.1000',
+ '.1100',
+ '.1200',
+ '.1300',
+ '.1400',
+ '.1500',
+ '.1600',
+ '.1700',
+ '.1800',
+ '.1900',
+ '.2000',
+ '.2100',
+ '.2200',
+ '.2300',
+ '.2400',
+ '.2500',
+ '.2600',
+ '.2700',
+ '.2800',
+ '.2900',
+ '.3000',
+ '.3100',
+ '.3200',
+ '.3300',
+ '.3400',
+ '.3500',
+ '.3650',
+ '.3850',
+ '.4050',
+ '.4250',
+ '.4450',
+ '.4650',
+ '.4850',
+ ]
+
+ @classmethod
+ def df_from_responses(cls, responses: List[dict],
+ use_timestamp: bool) -> pd.DataFrame:
+ df = super(Swr2Parser, cls).df_from_responses(responses, use_timestamp)
+ return df
diff --git a/ndbc_api/api/parsers/opendap/__init__.py b/ndbc_api/api/parsers/opendap/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ndbc_api/api/parsers/opendap/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..228e6e0
Binary files /dev/null and b/ndbc_api/api/parsers/opendap/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/opendap/__pycache__/_base.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/_base.cpython-311.pyc
new file mode 100644
index 0000000..c24edd1
Binary files /dev/null and b/ndbc_api/api/parsers/opendap/__pycache__/_base.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/opendap/__pycache__/adcp.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/adcp.cpython-311.pyc
new file mode 100644
index 0000000..3cf84b8
Binary files /dev/null and b/ndbc_api/api/parsers/opendap/__pycache__/adcp.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/opendap/__pycache__/cwind.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/cwind.cpython-311.pyc
new file mode 100644
index 0000000..cc7c95e
Binary files /dev/null and b/ndbc_api/api/parsers/opendap/__pycache__/cwind.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/opendap/__pycache__/ocean.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/ocean.cpython-311.pyc
new file mode 100644
index 0000000..67f101a
Binary files /dev/null and b/ndbc_api/api/parsers/opendap/__pycache__/ocean.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/opendap/__pycache__/pwind.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/pwind.cpython-311.pyc
new file mode 100644
index 0000000..77a87bc
Binary files /dev/null and b/ndbc_api/api/parsers/opendap/__pycache__/pwind.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/opendap/__pycache__/stdmet.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/stdmet.cpython-311.pyc
new file mode 100644
index 0000000..c1f8f4d
Binary files /dev/null and b/ndbc_api/api/parsers/opendap/__pycache__/stdmet.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/opendap/__pycache__/swden.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/swden.cpython-311.pyc
new file mode 100644
index 0000000..c0727bc
Binary files /dev/null and b/ndbc_api/api/parsers/opendap/__pycache__/swden.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/opendap/__pycache__/wlevel.cpython-311.pyc b/ndbc_api/api/parsers/opendap/__pycache__/wlevel.cpython-311.pyc
new file mode 100644
index 0000000..c752040
Binary files /dev/null and b/ndbc_api/api/parsers/opendap/__pycache__/wlevel.cpython-311.pyc differ
diff --git a/ndbc_api/api/parsers/opendap/_base.py b/ndbc_api/api/parsers/opendap/_base.py
new file mode 100644
index 0000000..027e225
--- /dev/null
+++ b/ndbc_api/api/parsers/opendap/_base.py
@@ -0,0 +1,73 @@
+import os
+import tempfile
+from typing import List, Optional
+
+import xarray
+import xarray
+
+from ndbc_api.exceptions import ParserException
+from ndbc_api.utilities.opendap.dataset import concat_datasets
+
+
+class BaseParser:
+
+ TEMPORAL_DIM = 'time'
+ SPATIAL_DIMS = ['latitude', 'longitude']
+
+ @classmethod
+ def nc_from_responses(
+ cls,
+ responses: List[dict],
+ use_timestamp: bool = False,
+ ) -> xarray.Dataset:
+ """Build the netCDF dataset from the responses.
+
+ Args:
+ responses (List[dict]): All responses from the THREDDS
+ server regardless of content or HTTP code.
+
+ Returns:
+ xarray.open_dataset: The netCDF dataset.
+ """
+ datasets = []
+ for r in responses:
+ if isinstance(r, dict):
+ if 'status' in r and r.get("status") != 200:
+ continue
+ content = r['body']
+ else:
+ content = r
+ try:
+ xrds = xarray.open_dataset(content)
+ datasets.append(xrds)
+ except Exception as e:
+ raise ParserException from e
+
+ return cls._merge_datasets(datasets)
+
+ @classmethod
+ def _merge_datasets(
+ cls,
+ datasets: List[xarray.Dataset],
+ temporal_dim_name: Optional[str] = None,
+ ) -> xarray.Dataset:
+ """Joins multiple xarray datasets using their shared dimensions.
+
+ Handles cases where datasets might not have the same variables,
+ but requires that all datasets share the same dimensions. For
+ data stored on the THREDDS server, all datasets are expected to
+ have `time`, `latitude`, and `longitude` dimensions.
+
+ Args:
+ temporal_dim_name (List[xarray.Dataset]): A list of netCDF4 datasets
+ to join.
+ dimension_names (List[str]): A list of dimension names to join
+ the datasets on. Defaults to `['time', 'latitude', 'longitude']`.
+
+ Returns:
+ A netCDF4.Dataset object containing the joined data.
+ """
+ return concat_datasets(
+ datasets,
+ temporal_dim_name if temporal_dim_name else cls.TEMPORAL_DIM,
+ )
diff --git a/ndbc_api/api/parsers/opendap/adcp.py b/ndbc_api/api/parsers/opendap/adcp.py
new file mode 100644
index 0000000..851bc40
--- /dev/null
+++ b/ndbc_api/api/parsers/opendap/adcp.py
@@ -0,0 +1,17 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.opendap._base import BaseParser
+
+
+class AdcpParser(BaseParser):
+
+ TEMPORAL_DIM = 'time'
+ SPATIAL_DIMS = ['latitude', 'longitude', 'depth']
+
+ @classmethod
+ def nc_from_responses(cls,
+ responses: List[dict],
+ use_timestamp: bool = False) -> pd.DataFrame:
+ return super(AdcpParser, cls).nc_from_responses(responses)
diff --git a/ndbc_api/api/parsers/opendap/cwind.py b/ndbc_api/api/parsers/opendap/cwind.py
new file mode 100644
index 0000000..d32b355
--- /dev/null
+++ b/ndbc_api/api/parsers/opendap/cwind.py
@@ -0,0 +1,17 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.opendap._base import BaseParser
+
+
+class CwindParser(BaseParser):
+
+ TEMPORAL_DIM = 'time'
+ SPATIAL_DIMS = ['latitude', 'longitude', 'instrument', 'water_depth']
+
+ @classmethod
+ def nc_from_responses(cls,
+ responses: List[dict],
+ use_timestamp: bool = False) -> pd.DataFrame:
+ return super(CwindParser, cls).nc_from_responses(responses)
diff --git a/ndbc_api/api/parsers/opendap/ocean.py b/ndbc_api/api/parsers/opendap/ocean.py
new file mode 100644
index 0000000..df5661e
--- /dev/null
+++ b/ndbc_api/api/parsers/opendap/ocean.py
@@ -0,0 +1,17 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.opendap._base import BaseParser
+
+
+class OceanParser(BaseParser):
+
+ TEMPORAL_DIM = 'time'
+ SPATIAL_DIMS = ['latitude', 'longitude', 'instrument', 'water_depth']
+
+ @classmethod
+ def nc_from_responses(cls,
+ responses: List[dict],
+ use_timestamp: bool = False) -> pd.DataFrame:
+ return super(OceanParser, cls).nc_from_responses(responses)
diff --git a/ndbc_api/api/parsers/opendap/pwind.py b/ndbc_api/api/parsers/opendap/pwind.py
new file mode 100644
index 0000000..455b641
--- /dev/null
+++ b/ndbc_api/api/parsers/opendap/pwind.py
@@ -0,0 +1,17 @@
+from typing import List
+
+import xarray
+
+from ndbc_api.api.parsers.opendap._base import BaseParser
+
+
+class PwindParser(BaseParser):
+
+ TEMPORAL_DIM = 'time'
+ SPATIAL_DIMS = ['latitude', 'longitude', 'instrument', 'water_depth']
+
+ @classmethod
+ def nc_from_responses(cls,
+ responses: List[dict],
+ use_timestamp: bool = False) -> xarray.Dataset:
+ return super(PwindParser, cls).nc_from_responses(responses)
diff --git a/ndbc_api/api/parsers/opendap/stdmet.py b/ndbc_api/api/parsers/opendap/stdmet.py
new file mode 100644
index 0000000..0544912
--- /dev/null
+++ b/ndbc_api/api/parsers/opendap/stdmet.py
@@ -0,0 +1,17 @@
+from typing import List
+
+import xarray
+
+from ndbc_api.api.parsers.opendap._base import BaseParser
+
+
+class StdmetParser(BaseParser):
+
+ TEMPORAL_DIM = 'time'
+ SPATIAL_DIMS = ['latitude', 'longitude', 'instrument', 'water_depth']
+
+ @classmethod
+ def nc_from_responses(cls,
+ responses: List[dict],
+ use_timestamp: bool = False) -> xarray.Dataset:
+ return super(StdmetParser, cls).nc_from_responses(responses)
diff --git a/ndbc_api/api/parsers/opendap/swden.py b/ndbc_api/api/parsers/opendap/swden.py
new file mode 100644
index 0000000..8956d5e
--- /dev/null
+++ b/ndbc_api/api/parsers/opendap/swden.py
@@ -0,0 +1,17 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.opendap._base import BaseParser
+
+
+class SwdenParser(BaseParser):
+
+ TEMPORAL_DIM = 'time'
+ SPATIAL_DIMS = ['latitude', 'longitude', 'frequency']
+
+ @classmethod
+ def nc_from_responses(cls,
+ responses: List[dict],
+ use_timestamp: bool = False) -> pd.DataFrame:
+ return super(SwdenParser, cls).nc_from_responses(responses)
diff --git a/ndbc_api/api/parsers/opendap/wlevel.py b/ndbc_api/api/parsers/opendap/wlevel.py
new file mode 100644
index 0000000..44709c9
--- /dev/null
+++ b/ndbc_api/api/parsers/opendap/wlevel.py
@@ -0,0 +1,17 @@
+from typing import List
+
+import pandas as pd
+
+from ndbc_api.api.parsers.opendap._base import BaseParser
+
+
+class WlevelParser(BaseParser):
+
+ TEMPORAL_DIM = 'time'
+ SPATIAL_DIMS = ['latitude', 'longitude', 'frequency']
+
+ @classmethod
+ def nc_from_responses(cls,
+ responses: List[dict],
+ use_timestamp: bool = False) -> pd.DataFrame:
+ return super(WlevelParser, cls).nc_from_responses(responses)
diff --git a/ndbc_api/api/requests/__init__.py b/ndbc_api/api/requests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ndbc_api/api/requests/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/requests/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..b82388a
Binary files /dev/null and b/ndbc_api/api/requests/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__init__.py b/ndbc_api/api/requests/http/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ndbc_api/api/requests/http/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..81391d0
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/_base.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/_base.cpython-311.pyc
new file mode 100644
index 0000000..d76a5eb
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/_base.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/_core.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/_core.cpython-311.pyc
new file mode 100644
index 0000000..e8e1d2b
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/_core.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/active_stations.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/active_stations.cpython-311.pyc
new file mode 100644
index 0000000..c0f3edd
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/active_stations.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/adcp.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/adcp.cpython-311.pyc
new file mode 100644
index 0000000..a3ff8ab
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/adcp.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/cwind.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/cwind.cpython-311.pyc
new file mode 100644
index 0000000..7f787b8
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/cwind.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/historical_stations.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/historical_stations.cpython-311.pyc
new file mode 100644
index 0000000..e598dc1
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/historical_stations.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/ocean.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/ocean.cpython-311.pyc
new file mode 100644
index 0000000..9234161
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/ocean.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/spec.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/spec.cpython-311.pyc
new file mode 100644
index 0000000..498b275
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/spec.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/station_historical.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/station_historical.cpython-311.pyc
new file mode 100644
index 0000000..721183b
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/station_historical.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/station_metadata.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/station_metadata.cpython-311.pyc
new file mode 100644
index 0000000..51d8782
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/station_metadata.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/station_realtime.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/station_realtime.cpython-311.pyc
new file mode 100644
index 0000000..cfac70b
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/station_realtime.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/stdmet.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/stdmet.cpython-311.pyc
new file mode 100644
index 0000000..4c04923
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/stdmet.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/supl.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/supl.cpython-311.pyc
new file mode 100644
index 0000000..da8bad7
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/supl.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/swden.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/swden.cpython-311.pyc
new file mode 100644
index 0000000..c37c9ad
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/swden.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/swdir.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/swdir.cpython-311.pyc
new file mode 100644
index 0000000..cb7abe8
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/swdir.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/swdir2.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/swdir2.cpython-311.pyc
new file mode 100644
index 0000000..939a0ec
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/swdir2.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/swr1.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/swr1.cpython-311.pyc
new file mode 100644
index 0000000..dc8c2ec
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/swr1.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/__pycache__/swr2.cpython-311.pyc b/ndbc_api/api/requests/http/__pycache__/swr2.cpython-311.pyc
new file mode 100644
index 0000000..a9e11d2
Binary files /dev/null and b/ndbc_api/api/requests/http/__pycache__/swr2.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/http/_base.py b/ndbc_api/api/requests/http/_base.py
new file mode 100644
index 0000000..517653c
--- /dev/null
+++ b/ndbc_api/api/requests/http/_base.py
@@ -0,0 +1,105 @@
+import os
+from calendar import month_abbr
+from datetime import datetime, timedelta
+from typing import List
+
+from ndbc_api.api.requests.http._core import CoreRequest
+
+
+class BaseRequest(CoreRequest):
+
+ REAL_TIME_URL_PREFIX = 'data/realtime2/'
+ HISTORICAL_FILE_EXTENSION_SUFFIX = '.txt.gz'
+ HISTORICAL_DATA_PREFIX = '&dir=data/'
+ HISTORICAL_URL_PREFIX = 'view_text_file.php?filename='
+ HISTORICAL_SUFFIX = 'historical/'
+ HISTORICAL_IDENTIFIER = 'h'
+ FORMAT = ''
+ FILE_FORMAT = ''
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+
+ if 'MOCKDATE' in os.environ:
+ now = datetime.strptime(os.getenv('MOCKDATE'), '%Y-%m-%d')
+ else:
+ now = datetime.now()
+ is_historical = (now - start_time) >= timedelta(days=44)
+ if is_historical:
+ return cls._build_request_historical(
+ station_id=station_id,
+ start_time=start_time,
+ end_time=end_time,
+ now=now,
+ )
+ return cls._build_request_realtime(station_id=station_id)
+
+ @classmethod
+ def _build_request_historical(
+ cls,
+ station_id: str,
+ start_time: datetime,
+ end_time: datetime,
+ now: datetime,
+ ) -> List[str]:
+
+ def req_hist_helper_year(req_year: int) -> str:
+ return f'{cls.BASE_URL}{cls.HISTORICAL_URL_PREFIX}{station_id}{cls.HISTORICAL_IDENTIFIER}{req_year}{cls.HISTORICAL_FILE_EXTENSION_SUFFIX}{cls.HISTORICAL_DATA_PREFIX}{cls.HISTORICAL_SUFFIX}{cls.FORMAT}/'
+
+ def req_hist_helper_month(req_year: int, req_month: int) -> str:
+ month = month_abbr[req_month]
+ month = month.capitalize()
+ return f'{cls.BASE_URL}{cls.HISTORICAL_URL_PREFIX}{station_id}{req_month}{req_year}{cls.HISTORICAL_FILE_EXTENSION_SUFFIX}{cls.HISTORICAL_DATA_PREFIX}{cls.FORMAT}/{month}/'
+
+ def req_hist_helper_month_current(current_month: int) -> str:
+ month = month_abbr[current_month]
+ month = month.capitalize()
+ return f'{cls.BASE_URL}data/{cls.FORMAT}/{month}/{station_id.lower()}.txt'
+
+ if not cls.FORMAT: # pragma: no cover
+ raise ValueError(
+ 'Please provide a format for this historical data request, or call a formatted child class\'s method.'
+ )
+ # store request urls
+ reqs = []
+
+ current_year = now.year
+ has_realtime = (now - end_time) < timedelta(days=44)
+ months_req_year = (now - timedelta(days=44)).year
+ last_avail_month = (now - timedelta(days=44)).month
+
+ # handle year requests
+ for hist_year in range(int(start_time.year),
+ min(int(current_year),
+ int(end_time.year) + 1)):
+ reqs.append(req_hist_helper_year(hist_year))
+
+ # handle month requests
+ if end_time.year == months_req_year:
+ for hist_month in range(
+ int(start_time.month),
+ min(int(end_time.month), int(last_avail_month)) + 1):
+ reqs.append(req_hist_helper_month(months_req_year, hist_month))
+ if int(last_avail_month) <= (end_time.month):
+ reqs.append(req_hist_helper_month_current(
+ int(last_avail_month)))
+
+ if has_realtime:
+ reqs.append(
+ cls._build_request_realtime(
+ station_id=station_id)[0] # only one URL
+ )
+ return reqs
+
+ @classmethod
+ def _build_request_realtime(cls, station_id: str) -> List[str]:
+ if not cls.FILE_FORMAT:
+ raise ValueError(
+ 'Please provide a file format for this historical data request, or call a formatted child class\'s method.'
+ )
+
+ station_id = station_id.upper()
+ return [
+ f'{cls.BASE_URL}{cls.REAL_TIME_URL_PREFIX}{station_id}{cls.FILE_FORMAT}'
+ ]
diff --git a/ndbc_api/api/requests/http/_core.py b/ndbc_api/api/requests/http/_core.py
new file mode 100644
index 0000000..8924923
--- /dev/null
+++ b/ndbc_api/api/requests/http/_core.py
@@ -0,0 +1,7 @@
+class CoreRequest:
+
+ BASE_URL = 'https://www.ndbc.noaa.gov/'
+
+ @classmethod
+ def build_request(cls) -> str:
+ return cls.BASE_URL
diff --git a/ndbc_api/api/requests/http/active_stations.py b/ndbc_api/api/requests/http/active_stations.py
new file mode 100644
index 0000000..0f87aa6
--- /dev/null
+++ b/ndbc_api/api/requests/http/active_stations.py
@@ -0,0 +1,10 @@
+from ndbc_api.api.requests.http._core import CoreRequest
+
+
+class ActiveStationsRequest(CoreRequest):
+
+ STATIONS_URL = 'activestations.xml'
+
+ @classmethod
+ def build_request(cls) -> str:
+ return f'{cls.BASE_URL}{cls.STATIONS_URL}'
diff --git a/ndbc_api/api/requests/http/adcp.py b/ndbc_api/api/requests/http/adcp.py
new file mode 100644
index 0000000..b9d062a
--- /dev/null
+++ b/ndbc_api/api/requests/http/adcp.py
@@ -0,0 +1,17 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.http._base import BaseRequest
+
+
+class AdcpRequest(BaseRequest):
+
+ FORMAT = 'adcp'
+ FILE_FORMAT = '.adcp'
+ HISTORICAL_IDENTIFIER = 'a'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(AdcpRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/http/cwind.py b/ndbc_api/api/requests/http/cwind.py
new file mode 100644
index 0000000..dabc359
--- /dev/null
+++ b/ndbc_api/api/requests/http/cwind.py
@@ -0,0 +1,17 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.http._base import BaseRequest
+
+
+class CwindRequest(BaseRequest):
+
+ FORMAT = 'cwind'
+ FILE_FORMAT = '.cwind'
+ HISTORICAL_IDENTIFIER = 'c'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(CwindRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/http/historical_stations.py b/ndbc_api/api/requests/http/historical_stations.py
new file mode 100644
index 0000000..022ac3b
--- /dev/null
+++ b/ndbc_api/api/requests/http/historical_stations.py
@@ -0,0 +1,10 @@
+from ndbc_api.api.requests.http._core import CoreRequest
+
+
+class HistoricalStationsRequest(CoreRequest):
+
+ STATIONS_URL = 'metadata/stationmetadata.xml'
+
+ @classmethod
+ def build_request(cls) -> str:
+ return f'{cls.BASE_URL}{cls.STATIONS_URL}'
diff --git a/ndbc_api/api/requests/http/ocean.py b/ndbc_api/api/requests/http/ocean.py
new file mode 100644
index 0000000..da485ee
--- /dev/null
+++ b/ndbc_api/api/requests/http/ocean.py
@@ -0,0 +1,17 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.http._base import BaseRequest
+
+
+class OceanRequest(BaseRequest):
+
+ FORMAT = 'ocean'
+ FILE_FORMAT = '.ocean'
+ HISTORICAL_IDENTIFIER = 'o'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(OceanRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/http/spec.py b/ndbc_api/api/requests/http/spec.py
new file mode 100644
index 0000000..e3eccb5
--- /dev/null
+++ b/ndbc_api/api/requests/http/spec.py
@@ -0,0 +1,16 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.http._base import BaseRequest
+
+
+class SpecRequest(BaseRequest):
+
+ FORMAT = 'spec'
+ FILE_FORMAT = '.spec'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(SpecRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/http/station_historical.py b/ndbc_api/api/requests/http/station_historical.py
new file mode 100644
index 0000000..c15eac0
--- /dev/null
+++ b/ndbc_api/api/requests/http/station_historical.py
@@ -0,0 +1,10 @@
+from ndbc_api.api.requests.http._core import CoreRequest
+
+
+class HistoricalRequest(CoreRequest):
+
+ STATION_HISTORY_PREFIX = 'station_history.php?station='
+
+ @classmethod
+ def build_request(cls, station_id: str) -> str:
+ return f'{cls.BASE_URL}{cls.STATION_HISTORY_PREFIX}{station_id}'
diff --git a/ndbc_api/api/requests/http/station_metadata.py b/ndbc_api/api/requests/http/station_metadata.py
new file mode 100644
index 0000000..a754f92
--- /dev/null
+++ b/ndbc_api/api/requests/http/station_metadata.py
@@ -0,0 +1,10 @@
+from ndbc_api.api.requests.http._core import CoreRequest
+
+
+class MetadataRequest(CoreRequest):
+
+ STATION_PREFIX = 'station_page.php?station='
+
+ @classmethod
+ def build_request(cls, station_id: str) -> str:
+ return f'{cls.BASE_URL}{cls.STATION_PREFIX}{station_id}'
diff --git a/ndbc_api/api/requests/http/station_realtime.py b/ndbc_api/api/requests/http/station_realtime.py
new file mode 100644
index 0000000..a483309
--- /dev/null
+++ b/ndbc_api/api/requests/http/station_realtime.py
@@ -0,0 +1,10 @@
+from ndbc_api.api.requests.http._core import CoreRequest
+
+
+class RealtimeRequest(CoreRequest):
+
+ STATION_REALTIME_PREFIX = 'station_realtime.php?station='
+
+ @classmethod
+ def build_request(cls, station_id: str) -> str:
+ return f'{cls.BASE_URL}{cls.STATION_REALTIME_PREFIX}{station_id}'
diff --git a/ndbc_api/api/requests/http/stdmet.py b/ndbc_api/api/requests/http/stdmet.py
new file mode 100644
index 0000000..a44df9e
--- /dev/null
+++ b/ndbc_api/api/requests/http/stdmet.py
@@ -0,0 +1,16 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.http._base import BaseRequest
+
+
+class StdmetRequest(BaseRequest):
+
+ FORMAT = 'stdmet'
+ FILE_FORMAT = '.txt'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(StdmetRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/http/supl.py b/ndbc_api/api/requests/http/supl.py
new file mode 100644
index 0000000..86d1074
--- /dev/null
+++ b/ndbc_api/api/requests/http/supl.py
@@ -0,0 +1,17 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.http._base import BaseRequest
+
+
+class SuplRequest(BaseRequest):
+
+ FORMAT = 'supl'
+ FILE_FORMAT = '.supl'
+ HISTORICAL_IDENTIFIER = 's'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(SuplRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/http/swden.py b/ndbc_api/api/requests/http/swden.py
new file mode 100644
index 0000000..0d1d2c5
--- /dev/null
+++ b/ndbc_api/api/requests/http/swden.py
@@ -0,0 +1,17 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.http._base import BaseRequest
+
+
+class SwdenRequest(BaseRequest):
+
+ FORMAT = 'swden'
+ FILE_FORMAT = '.swden'
+ HISTORICAL_IDENTIFIER = 'w'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(SwdenRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/http/swdir.py b/ndbc_api/api/requests/http/swdir.py
new file mode 100644
index 0000000..720d6e6
--- /dev/null
+++ b/ndbc_api/api/requests/http/swdir.py
@@ -0,0 +1,17 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.http._base import BaseRequest
+
+
+class SwdirRequest(BaseRequest):
+
+ FORMAT = 'swdir'
+ FILE_FORMAT = '.swdir'
+ HISTORICAL_IDENTIFIER = 'd'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(SwdirRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/http/swdir2.py b/ndbc_api/api/requests/http/swdir2.py
new file mode 100644
index 0000000..6b6fafd
--- /dev/null
+++ b/ndbc_api/api/requests/http/swdir2.py
@@ -0,0 +1,17 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.http._base import BaseRequest
+
+
+class Swdir2Request(BaseRequest):
+
+ FORMAT = 'swdir2'
+ FILE_FORMAT = '.swdir2'
+ HISTORICAL_IDENTIFIER = 'i'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(Swdir2Request, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/http/swr1.py b/ndbc_api/api/requests/http/swr1.py
new file mode 100644
index 0000000..6494f6b
--- /dev/null
+++ b/ndbc_api/api/requests/http/swr1.py
@@ -0,0 +1,17 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.http._base import BaseRequest
+
+
+class Swr1Request(BaseRequest):
+
+ FORMAT = 'swr1'
+ FILE_FORMAT = '.swr1'
+ HISTORICAL_IDENTIFIER = 'j'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(Swr1Request, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/http/swr2.py b/ndbc_api/api/requests/http/swr2.py
new file mode 100644
index 0000000..725a902
--- /dev/null
+++ b/ndbc_api/api/requests/http/swr2.py
@@ -0,0 +1,17 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.http._base import BaseRequest
+
+
+class Swr2Request(BaseRequest):
+
+ FORMAT = 'swr2'
+ FILE_FORMAT = '.swr2'
+ HISTORICAL_IDENTIFIER = 'k'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(Swr2Request, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/opendap/__init__.py b/ndbc_api/api/requests/opendap/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ndbc_api/api/requests/opendap/__pycache__/__init__.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..cde1875
Binary files /dev/null and b/ndbc_api/api/requests/opendap/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/opendap/__pycache__/_base.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/_base.cpython-311.pyc
new file mode 100644
index 0000000..dc3dc2b
Binary files /dev/null and b/ndbc_api/api/requests/opendap/__pycache__/_base.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/opendap/__pycache__/_core.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/_core.cpython-311.pyc
new file mode 100644
index 0000000..6be1442
Binary files /dev/null and b/ndbc_api/api/requests/opendap/__pycache__/_core.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/opendap/__pycache__/adcp.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/adcp.cpython-311.pyc
new file mode 100644
index 0000000..884e7a1
Binary files /dev/null and b/ndbc_api/api/requests/opendap/__pycache__/adcp.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/opendap/__pycache__/cwind.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/cwind.cpython-311.pyc
new file mode 100644
index 0000000..ceed027
Binary files /dev/null and b/ndbc_api/api/requests/opendap/__pycache__/cwind.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/opendap/__pycache__/ocean.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/ocean.cpython-311.pyc
new file mode 100644
index 0000000..1e57c6c
Binary files /dev/null and b/ndbc_api/api/requests/opendap/__pycache__/ocean.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/opendap/__pycache__/pwind.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/pwind.cpython-311.pyc
new file mode 100644
index 0000000..190bd1d
Binary files /dev/null and b/ndbc_api/api/requests/opendap/__pycache__/pwind.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/opendap/__pycache__/stdmet.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/stdmet.cpython-311.pyc
new file mode 100644
index 0000000..ed34afb
Binary files /dev/null and b/ndbc_api/api/requests/opendap/__pycache__/stdmet.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/opendap/__pycache__/swden.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/swden.cpython-311.pyc
new file mode 100644
index 0000000..eac5181
Binary files /dev/null and b/ndbc_api/api/requests/opendap/__pycache__/swden.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/opendap/__pycache__/wlevel.cpython-311.pyc b/ndbc_api/api/requests/opendap/__pycache__/wlevel.cpython-311.pyc
new file mode 100644
index 0000000..8d0ab76
Binary files /dev/null and b/ndbc_api/api/requests/opendap/__pycache__/wlevel.cpython-311.pyc differ
diff --git a/ndbc_api/api/requests/opendap/_base.py b/ndbc_api/api/requests/opendap/_base.py
new file mode 100644
index 0000000..d3e9da4
--- /dev/null
+++ b/ndbc_api/api/requests/opendap/_base.py
@@ -0,0 +1,82 @@
+import os
+from datetime import datetime, timedelta
+from typing import List
+
+from ndbc_api.api.requests.opendap._core import CoreRequest
+
+
+class BaseRequest(CoreRequest):
+
+ # example url: https://dods.ndbc.noaa.gov/thredds/fileServer/data/adcp/41001/41001a2010.nc
+ # example url: https://dods.ndbc.noaa.gov/thredds/fileServer/data/stdmet/tplm2/tplm2h2021.nc
+ URL_PREFIX = 'fileServer/data/'
+ FORMAT = ''
+ HISTORICAL_IDENTIFIER = '' # we keep the same structure as the http requests
+ FILE_FORMAT = 'nc'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+
+ if 'MOCKDATE' in os.environ:
+ now = datetime.strptime(os.getenv('MOCKDATE'), '%Y-%m-%d')
+ else:
+ now = datetime.now()
+ is_historical = (now - start_time) >= timedelta(
+ days=45) # we use 45 rather than 44 for opendap data
+ if is_historical:
+ return cls._build_request_historical(
+ station_id=station_id,
+ start_time=start_time,
+ end_time=end_time,
+ now=now,
+ )
+ return cls._build_request_realtime(station_id=station_id)
+
+ @classmethod
+ def _build_request_historical(
+ cls,
+ station_id: str,
+ start_time: datetime,
+ end_time: datetime,
+ now: datetime,
+ ) -> List[str]:
+
+ def req_hist_helper_year(req_year: int) -> str:
+ return f'{cls.BASE_URL}{cls.URL_PREFIX}{cls.FORMAT}/{station_id.lower()}/{station_id.lower()}{cls.HISTORICAL_IDENTIFIER}{req_year}.{cls.FILE_FORMAT}'
+
+ if not cls.FORMAT: # pragma: no cover
+ raise ValueError(
+ 'Please provide a format for this historical data request, or call a formatted child class\'s method.'
+ )
+ # store request urls
+ reqs = []
+
+ current_year = now.year
+ has_realtime = (now - end_time) <= timedelta(days=45)
+
+ # handle year requests
+ for hist_year in range(int(start_time.year),
+ min(int(current_year),
+ int(end_time.year) + 1)):
+ reqs.append(req_hist_helper_year(hist_year))
+
+ if has_realtime:
+ reqs.append(
+ cls._build_request_realtime(
+ station_id=station_id)[0] # only one URL
+ )
+ return reqs
+
+ @classmethod
+ def _build_request_realtime(cls, station_id: str) -> List[str]:
+ if not cls.FILE_FORMAT:
+ raise ValueError(
+ 'Please provide a file format for this historical data request, or call a formatted child class\'s method.'
+ )
+
+ station_id = station_id.upper()
+ # realtime data uses 9999 as the year part
+ return [
+ f'{cls.BASE_URL}{cls.URL_PREFIX}{cls.FORMAT}/{station_id.lower()}/{station_id.lower()}{cls.HISTORICAL_IDENTIFIER}9999.{cls.FILE_FORMAT}'
+ ]
diff --git a/ndbc_api/api/requests/opendap/_core.py b/ndbc_api/api/requests/opendap/_core.py
new file mode 100644
index 0000000..0ff02ed
--- /dev/null
+++ b/ndbc_api/api/requests/opendap/_core.py
@@ -0,0 +1,7 @@
+class CoreRequest:
+
+ BASE_URL = 'https://dods.ndbc.noaa.gov/thredds/'
+
+ @classmethod
+ def build_request(cls) -> str:
+ return cls.BASE_URL
diff --git a/ndbc_api/api/requests/opendap/adcp.py b/ndbc_api/api/requests/opendap/adcp.py
new file mode 100644
index 0000000..fe4d749
--- /dev/null
+++ b/ndbc_api/api/requests/opendap/adcp.py
@@ -0,0 +1,16 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.opendap._base import BaseRequest
+
+
+class AdcpRequest(BaseRequest):
+
+ FORMAT = 'adcp'
+ HISTORICAL_IDENTIFIER = 'a'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(AdcpRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/opendap/cwind.py b/ndbc_api/api/requests/opendap/cwind.py
new file mode 100644
index 0000000..469706c
--- /dev/null
+++ b/ndbc_api/api/requests/opendap/cwind.py
@@ -0,0 +1,16 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.opendap._base import BaseRequest
+
+
+class CwindRequest(BaseRequest):
+
+ FORMAT = 'cwind'
+ HISTORICAL_IDENTIFIER = 'c'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(CwindRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/opendap/ocean.py b/ndbc_api/api/requests/opendap/ocean.py
new file mode 100644
index 0000000..dde9ea1
--- /dev/null
+++ b/ndbc_api/api/requests/opendap/ocean.py
@@ -0,0 +1,16 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.opendap._base import BaseRequest
+
+
+class OceanRequest(BaseRequest):
+
+ FORMAT = 'ocean'
+ HISTORICAL_IDENTIFIER = 'o'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(OceanRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/opendap/pwind.py b/ndbc_api/api/requests/opendap/pwind.py
new file mode 100644
index 0000000..0b5be60
--- /dev/null
+++ b/ndbc_api/api/requests/opendap/pwind.py
@@ -0,0 +1,16 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.opendap._base import BaseRequest
+
+
+class PwindRequest(BaseRequest):
+
+ FORMAT = 'pwind'
+ HISTORICAL_IDENTIFIER = 'p'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(PwindRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/opendap/stdmet.py b/ndbc_api/api/requests/opendap/stdmet.py
new file mode 100644
index 0000000..da1dddc
--- /dev/null
+++ b/ndbc_api/api/requests/opendap/stdmet.py
@@ -0,0 +1,16 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.opendap._base import BaseRequest
+
+
+class StdmetRequest(BaseRequest):
+
+ FORMAT = 'stdmet'
+ HISTORICAL_IDENTIFIER = 'h'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(StdmetRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/opendap/swden.py b/ndbc_api/api/requests/opendap/swden.py
new file mode 100644
index 0000000..e1bc55a
--- /dev/null
+++ b/ndbc_api/api/requests/opendap/swden.py
@@ -0,0 +1,16 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.opendap._base import BaseRequest
+
+
+class SwdenRequest(BaseRequest):
+
+ FORMAT = 'swden'
+ HISTORICAL_IDENTIFIER = 'w'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(SwdenRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/api/requests/opendap/wlevel.py b/ndbc_api/api/requests/opendap/wlevel.py
new file mode 100644
index 0000000..11bd1db
--- /dev/null
+++ b/ndbc_api/api/requests/opendap/wlevel.py
@@ -0,0 +1,16 @@
+from datetime import datetime
+from typing import List
+
+from ndbc_api.api.requests.opendap._base import BaseRequest
+
+
+class WlevelRequest(BaseRequest):
+
+ FORMAT = 'wlevel'
+ HISTORICAL_IDENTIFIER = 'l'
+
+ @classmethod
+ def build_request(cls, station_id: str, start_time: datetime,
+ end_time: datetime) -> List[str]:
+ return super(WlevelRequest, cls).build_request(station_id, start_time,
+ end_time)
diff --git a/ndbc_api/config/__init__.py b/ndbc_api/config/__init__.py
new file mode 100644
index 0000000..f84c255
--- /dev/null
+++ b/ndbc_api/config/__init__.py
@@ -0,0 +1,24 @@
+"""Stores the configuration information for the NDBC API.
+
+Attributes:
+ LOGGER_NAME (:str:): The name for the `logging.Logger` in the api instance.
+ DEFAULT_CACHE_LIMIT (:int:): The station level limit for caching NDBC data
+ service requests.
+ VERIFY_HTTPS (:bool:): Whether to execute requests using HTTPS rather than
+ HTTP.
+ HTTP_RETRY (:int:): The number of times to retry requests to the NDBC data
+ service.
+ HTTP_BACKOFF_FACTOR (:float:): The backoff factor used when executing retry
+ requests to the NDBC data service.
+ HTTP_DELAY (:int:) The delay between requests submitted to the NDBC data
+ service, in milliseconds.
+ HTTP_DEBUG (:bool:): Whether to log requests and responses to the NDBC API's
+ log (a `logging.Logger`) as debug messages.
+"""
+LOGGER_NAME = 'NDBC-API'
+DEFAULT_CACHE_LIMIT = 36
+VERIFY_HTTPS = True
+HTTP_RETRY = 5
+HTTP_BACKOFF_FACTOR = 0.8
+HTTP_DELAY = 2000
+HTTP_DEBUG = False
diff --git a/ndbc_api/config/__pycache__/__init__.cpython-311.pyc b/ndbc_api/config/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..4e271f3
Binary files /dev/null and b/ndbc_api/config/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/exceptions.py b/ndbc_api/exceptions.py
new file mode 100644
index 0000000..2c6caaa
--- /dev/null
+++ b/ndbc_api/exceptions.py
@@ -0,0 +1,29 @@
+class NdbcException(Exception):
+ """Base exception that all other NDBC exceptions subclass from."""
+
+ def __init__(self, message: str = ''): # pragma: no cover
+ self.message = message
+ super().__init__(self.message)
+
+ def __str__(self): # pragma: no cover
+ return f"NDBC API: {self.message or 'unspecified error'}"
+
+
+class TimestampException(NdbcException):
+ """Unable to handle given timestamp."""
+
+
+class RequestException(NdbcException):
+ """Unable to build the given request."""
+
+
+class ResponseException(NdbcException):
+ """Unable to handle the given response."""
+
+
+class ParserException(NdbcException):
+ """Unable to parse the given response."""
+
+
+class HandlerException(NdbcException):
+ """Error when handling this API call."""
diff --git a/ndbc_api/ndbc_api.py b/ndbc_api/ndbc_api.py
new file mode 100644
index 0000000..7da62ee
--- /dev/null
+++ b/ndbc_api/ndbc_api.py
@@ -0,0 +1,832 @@
+"""An API for retrieving data from the NDBC.
+
+This module defines the `NdbcApi`, the top-level object which creates, handles,
+caches, parses, and returns NDBC data.
+
+Example:
+ ```python3
+ from ndbc_api import NdbcApi
+ api = NdbcApi()
+ available_stations = api.stations()
+ modes = api.get_modes()
+ df_stdmet_tplm2 = api.get_data(
+ 'tplm2',
+ 'stdmet',
+ '2020-01-01',
+ '2022-01-01',
+ as_df=True
+ )
+ ```
+
+Attributes:
+ log (:obj:`logging.Logger`): The logger at which to register HTTP
+ request and response status codes and headers used for debug
+ purposes.
+ headers(:dict:): The request headers for use in the NDBC API's request
+ handler.
+"""
+import logging
+import pickle
+import warnings
+from concurrent.futures import ThreadPoolExecutor, as_completed
+from datetime import datetime, timedelta
+from typing import Any, List, Sequence, Tuple, Union, Dict, Optional
+
+import xarray
+import pandas as pd
+
+from .api.handlers.http.data import DataHandler
+from .api.handlers.http.stations import StationsHandler
+from .config import (DEFAULT_CACHE_LIMIT, HTTP_BACKOFF_FACTOR, HTTP_DEBUG,
+ HTTP_DELAY, HTTP_RETRY, LOGGER_NAME, VERIFY_HTTPS)
+from .exceptions import (HandlerException, ParserException, RequestException,
+ ResponseException, TimestampException)
+from .utilities.req_handler import RequestHandler
+from .utilities.singleton import Singleton
+from .utilities.log_formatter import LogFormatter
+from .api.handlers.opendap.data import OpenDapDataHandler
+from .utilities.opendap.dataset import concat_datasets, merge_datasets, filter_dataset_by_variable, filter_dataset_by_time_range
+
+
+class NdbcApi(metaclass=Singleton):
+ """An API for querying the National Data Buoy Center.
+
+ The `NdbcApi` is metaclassed as a singleton to conserve NDBC resources. It
+ uses two private handlers to build requests and parse responses to the NDBC
+ over HTTP(s). It also uses a LRU-cached request handler to execute requests
+ against the NDBC, logging response statuses as they are executed.
+
+ Attributes:
+ logging_level: The `logging.Logger`s log level, 1 if the `debug`
+ flag is set in the `__init__` method, and 0 otherwise.
+ cache_limit: The handler's global limit for caching
+ `NdbcApi` responses. This is implemented as a least-recently
+ used cache, designed to conserve NDBC resources when querying
+ measurements for a given station over similar time ranges.
+ delay: The HTTP(s) request delay parameter, in seconds.
+ retries: = The number of times to retry a request to the NDBC data
+ service.
+ backoff_factor: The back-off parameter, used in conjunction with
+ `retries` to re-attempt requests to the NDBC data service.
+ verify_https: A flag which indicates whether to attempt requests to the
+ NDBC data service over HTTP or HTTPS.
+ debug: A flag for verbose logging and response-level status reporting.
+ Affects the instance's `logging.Logger` and the behavior of its
+ private `RequestHandler` instance.
+ """
+
+ logger = logging.getLogger(LOGGER_NAME)
+ warnings.simplefilter(action='ignore', category=FutureWarning)
+
+ def __init__(
+ self,
+ logging_level: int = logging.WARNING if HTTP_DEBUG else logging.ERROR,
+ filename: Any = None,
+ cache_limit: int = DEFAULT_CACHE_LIMIT,
+ headers: dict = {},
+ delay: int = HTTP_DELAY,
+ retries: int = HTTP_RETRY,
+ backoff_factor: float = HTTP_BACKOFF_FACTOR,
+ verify_https: bool = VERIFY_HTTPS,
+ debug: bool = HTTP_DEBUG,
+ ):
+ """Initializes the singleton `NdbcApi`, sets associated handlers."""
+ self.cache_limit = cache_limit
+ self.headers = headers
+ self._handler = self._get_request_handler(
+ cache_limit=self.cache_limit,
+ delay=delay,
+ retries=retries,
+ backoff_factor=backoff_factor,
+ headers=self.headers,
+ debug=debug,
+ verify_https=verify_https,
+ )
+ self._stations_api = StationsHandler
+ self._data_api = DataHandler
+ self._opendap_data_api = OpenDapDataHandler
+ self.configure_logging(level=logging_level, filename=filename)
+
+ def dump_cache(self, dest_fp: Union[str, None] = None) -> Union[dict, None]:
+ """Dump the request cache to dict or the specified filepath.
+
+ Dump the request, response pairs stored in the `NdbcApi`'s
+ `Request_handler` as a `dict`, either returning the object, if no
+ `dest_fp` is specified, or serializing (pickling) the object and writing
+ it to the specified `dest_fp`.
+
+ Args:
+ dest_fp: The destination filepath for the serialized `RequestsCache`
+ contents.
+
+ Returns:
+ The cached request, response pairs as a `dict`, or `None` if a
+ `dest_fp` is specified when calling the method.
+ """
+ data = dict()
+ ids = [r.id_ for r in self._handler.stations]
+ caches = [r.reqs.cache for r in self._handler.stations]
+ if ids:
+ for station_id, cache in zip(ids, caches):
+ data[station_id] = dict()
+ reqs = cache.keys()
+ for req in reqs:
+ resp = cache[req].v
+ data[station_id][req] = resp
+ if dest_fp:
+ with open(dest_fp, 'wb') as f:
+ pickle.dump(data, f)
+ else:
+ return data
+
+ def clear_cache(self) -> None:
+ """Clear the request cache and create a new handler."""
+ del self._handler
+ self._handler = self._get_request_handler(
+ cache_limit=self.cache_limit,
+ delay=HTTP_DELAY,
+ retries=HTTP_RETRY,
+ backoff_factor=HTTP_BACKOFF_FACTOR,
+ headers=self.headers,
+ debug=HTTP_DEBUG,
+ verify_https=VERIFY_HTTPS,
+ )
+
+ def set_cache_limit(self, new_limit: int) -> None:
+ """Set the cache limit for the API's request cache."""
+ self._handler.set_cache_limit(cache_limit=new_limit)
+
+ def get_cache_limit(self) -> int:
+ """Get the cache limit for the API's request cache."""
+ return self._handler.get_cache_limit()
+
+ def get_headers(self) -> dict:
+ """Return the current headers used by the request handler."""
+ return self._handler.get_headers()
+
+ def update_headers(self, new: dict) -> None:
+ """Add new headers to the request handler."""
+ self._handler.update_headers(new)
+
+ def set_headers(self, request_headers: dict) -> None:
+ """Reset the request headers using the new supplied headers."""
+ self._handler.set_headers(request_headers)
+
+ def configure_logging(self, level=logging.WARNING, filename=None) -> None:
+ """Configures logging for the NdbcApi.
+
+ Args:
+ level (int, optional): The logging level. Defaults to logging.WARNING.
+ filename (str, optional): If provided, logs to the specified file.
+ """
+ self.logger.setLevel(level)
+
+ handler: logging.Handler
+ formatter: logging.Formatter
+
+ for handler in self.logger.handlers[:]:
+ self.logger.removeHandler(handler)
+
+ if filename:
+ handler = logging.FileHandler(filename)
+ formatter = logging.Formatter(
+ '[%(asctime)s][%(levelname)s]: %(message)s')
+ else:
+ handler = logging.StreamHandler()
+ formatter = LogFormatter('[%(levelname)s]: %(message)s')
+
+ handler.setFormatter(formatter)
+ self.logger.addHandler(handler)
+
+ def log(self,
+ level: int,
+ station_id: Union[int, str, None] = None,
+ mode: Union[str, None] = None,
+ message: Union[str, None] = None,
+ **extra_data) -> None:
+ """Logs a structured message with metadata.
+
+ Args:
+ level (int): The logging level.
+ station_id (str, optional): The NDBC station ID.
+ mode (str, optional): The data mode.
+ message (str, optional): The log message.
+ **extra_data: Additional key-value pairs to include in the log.
+ """
+ log_data = {}
+ if station_id:
+ log_data['station_id'] = station_id
+ if mode:
+ log_data['mode'] = mode
+ if message:
+ log_data['message'] = message
+ for k, v in extra_data.items():
+ log_data[k] = v
+ self.logger.log(level, log_data)
+
+ def stations(self, as_df: bool = True) -> Union[pd.DataFrame, dict]:
+ """Get all stations and station metadata from the NDBC.
+
+ Query the NDBC data service for the current available data buoys
+ (stations), both those maintained by the NDBC and those whose
+ measurements are managed by the NDBC. Stations are returned by default
+ as rows of a `pandas.DataFrame`, alongside their realtime data coverage
+ for some common measurements, their latitude and longitude, and current
+ station status notes maintained by the NDBC.
+
+ Args:
+ as_df: Flag indicating whether to return current station data as a
+ `pandas.DataFrame` if set to `True` or as a `dict` if `False`.
+
+ Returns:
+ The current station data from the NDBC data service, either as a
+ `pandas.DataFrame` or as a `dict` depending on the value of `as_df`.
+
+ Raises:
+ ResponseException: An error occurred while retrieving and parsing
+ responses from the NDBC data service.
+ """
+ try:
+ data = self._stations_api.stations(handler=self._handler)
+ return self._handle_data(data, as_df, cols=None)
+ except (ResponseException, ValueError, KeyError) as e:
+ raise ResponseException('Failed to handle returned data.') from e
+
+ def historical_stations(self,
+ as_df: bool = True) -> Union[pd.DataFrame, dict]:
+ """Get historical stations and station metadata from the NDBC.
+
+ Query the NDBC data service for the historical data buoys
+ (stations), both those maintained by the NDBC and those which are not.
+ Stations are returned by default as rows of a `pandas.DataFrame`,
+ alongside their historical data coverage, with one row per tuple of
+ (station, historical deployment).
+
+ Args:
+ as_df: Flag indicating whether to return current station data as a
+ `pandas.DataFrame` if set to `True` or as a `dict` if `False`.
+
+ Returns:
+ The current station data from the NDBC data service, either as a
+ `pandas.DataFrame` or as a `dict` depending on the value of `as_df`.
+
+ Raises:
+ ResponseException: An error occurred while retrieving and parsing
+ responses from the NDBC data service.
+ """
+ try:
+ data = self._stations_api.historical_stations(handler=self._handler)
+ return self._handle_data(data, as_df, cols=None)
+ except (ResponseException, ValueError, KeyError) as e:
+ raise ResponseException('Failed to handle returned data.') from e
+
+ def nearest_station(
+ self,
+ lat: Union[str, float, None] = None,
+ lon: Union[str, float, None] = None,
+ ) -> str:
+ """Get nearest station to the specified lat/lon.
+
+ Use the NDBC data service's current station data to determine the
+ nearest station to the specified latitude and longitude (either as
+ `float` or as DD.dd[E/W] strings).
+
+ Args:
+ lat: The latitude of interest, used to determine the closest
+ maintained station to the given position.
+ lon: The longitude of interest, used to determine the closest
+ maintained station to the given position.
+
+ Returns:
+ The station id (e.g. `'tplm2'` or `'41001'`) of the nearest station
+ with active measurements to the specified lat/lon pair.
+
+ Raises:
+ ValueError: The latitude and longitude were not both specified when
+ querying for the closest station.
+ """
+ if not (lat and lon):
+ raise ValueError('lat and lon must be specified.')
+ nearest_station = self._stations_api.nearest_station(
+ handler=self._handler, lat=lat, lon=lon)
+ return nearest_station
+
+ def radial_search(
+ self,
+ lat: Union[str, float, None] = None,
+ lon: Union[str, float, None] = None,
+ radius: float = -1,
+ units: str = 'km',
+ ) -> pd.DataFrame:
+ """Get all stations within radius units of the specified lat/lon.
+
+ Use the NDBC data service's current station data to determine the
+ stations within radius of the specified latitude and longitude
+ (passed either as `float` or as DD.dd[E/W] strings).
+
+ Args:
+ lat (float): The latitude of interest, used to determine the maintained
+ stations within radius units of the given position.
+ lon (float): The longitude of interest, used to determine the maintained
+ stations within radius units of the given position.
+ radius (float): The radius in the specified units to search for stations
+ within.
+ units (str: 'nm', 'km', or 'mi'): The units of the radius, either 'nm', 'km', or 'mi'.
+
+ Returns:
+ A `pandas.DataFrame` of the stations within the specified radius of
+ the given lat/lon pair.
+
+ Raises:
+ ValueError: The latitude and longitude were not both specified when
+ querying for the closest station, or the radius or units are
+ invalid.
+ """
+ if not (lat and lon):
+ raise ValueError('lat and lon must be specified.')
+ stations_in_radius = self._stations_api.radial_search(
+ handler=self._handler, lat=lat, lon=lon, radius=radius, units=units)
+ return stations_in_radius
+
+ def station(self,
+ station_id: Union[str, int],
+ as_df: bool = False) -> Union[pd.DataFrame, dict]:
+ """Get metadata for the given station from the NDBC.
+
+ The NDBC maintains some station-level metadata including status notes,
+ location information, inclement weather warnings, and measurement notes.
+ This method is used to request, handle, and parse the metadata for the
+ given station from the station's NDBC webpage.
+
+ Args:
+ station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the
+ station of interest.
+ as_df: Whether to return station-level data as a `pandas.DataFrame`,
+ defaults to `False`, and a `dict` is returned.
+
+ Returns:
+ The station metadata for the given station, either as a `dict` or as
+ a `pandas.DataFrame` if the `as_df` flag is set to `True`.
+
+ Raises:
+ ResponseException: An error occurred when requesting and parsing
+ responses for the specified station.
+ """
+ station_id = self._parse_station_id(station_id)
+ try:
+ data = self._stations_api.metadata(handler=self._handler,
+ station_id=station_id)
+ return self._handle_data(data, as_df, cols=None)
+ except (ResponseException, ValueError, KeyError) as e:
+ raise ResponseException('Failed to handle returned data.') from e
+
+ def available_realtime(
+ self,
+ station_id: Union[str, int],
+ full_response: bool = False,
+ as_df: Optional[bool] = None,
+ ) -> Union[List[str], pd.DataFrame, dict]:
+ """Get the available realtime modalities for a station.
+
+ While most data buoy (station) measurements are available over
+ multi-year time ranges, some measurements depreciate or become
+ unavailable for substantial periods of time. This method queries the
+ NDBC station webpage for those measurements, and their links, which are
+ available or were available over the last 45 days.
+
+ Args:
+ station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the
+ station of interest.
+ full_response: Whether to return the full response from the NDBC
+ API, defaults to `False` and a list of modes from `get_modes()`
+ is returned. If `True`, the full URL for each data mode is
+ included in the returned `dict` or `pandas.DataFrame`.
+ as_df: Whether to return station-level data as a `pandas.DataFrame`,
+ defaults to `False`, and a `dict` is returned.
+
+ Returns:
+ The available realtime measurements for the specified station,
+ alongside their NDBC data links, either as a `dict` or as a
+ `pandas.DataFrame` if the `as_df` flag is set to `True`.
+
+ Raises:
+ ResponseException: An error occurred when requesting and parsing
+ responses for the specified station.
+ """
+ station_id = self._parse_station_id(station_id)
+ try:
+ station_realtime = self._stations_api.realtime(
+ handler=self._handler, station_id=station_id)
+ full_data = {}
+ if full_response:
+ if as_df is None:
+ as_df = False
+ full_data = self._handle_data(station_realtime,
+ as_df,
+ cols=None)
+ return full_data
+ else:
+ full_data = self._handle_data(station_realtime,
+ as_df=False,
+ cols=None)
+
+ # Parse the modes from the full response
+ _modes = self.get_modes()
+ station_modes = set()
+ for k in full_data:
+ for m in _modes:
+ if m in full_data[k]['description']:
+ station_modes.add(m)
+ return list(station_modes)
+ except (ResponseException, ValueError, KeyError) as e:
+ raise ResponseException('Failed to handle returned data.') from e
+
+ def available_historical(self,
+ station_id: Union[str, int],
+ as_df: bool = False) -> Union[pd.DataFrame, dict]:
+ """Get the available historical measurements for a station.
+
+ This method queries the NDBC station webpage for historical, quality
+ controlled measurements and their associated availability time ranges.
+
+ Args:
+ station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the
+ station of interest.
+ as_df: Whether to return station-level data as a `pandas.DataFrame`,
+ defaults to `False`, and a `dict` is returned.
+
+ Returns:
+ The available historical measurements for the specified station,
+ alongside their NDBC data links, either as a `dict` or as a
+ `pandas.DataFrame` if the `as_df` flag is set to `True`.
+
+ Raises:
+ ResponseException: An error occurred when requesting and parsing
+ responses for the specified station.
+ """
+ station_id = self._parse_station_id(station_id)
+ try:
+ data = self._stations_api.historical(handler=self._handler,
+ station_id=station_id)
+ return self._handle_data(data, as_df, cols=None)
+ except (ResponseException, ValueError, KeyError) as e:
+ raise ResponseException('Failed to handle returned data.') from e
+
+ def get_data(
+ self,
+ station_id: Union[int, str, None] = None,
+ mode: Union[str, None] = None,
+ start_time: Union[str, datetime] = datetime.now() - timedelta(days=30),
+ end_time: Union[str, datetime] = datetime.now(),
+ use_timestamp: bool = True,
+ as_df: bool = True,
+ cols: List[str] = None,
+ station_ids: Union[Sequence[Union[int, str]], None] = None,
+ modes: Union[List[str], None] = None,
+ as_xarray_dataset: bool = False,
+ use_opendap: Optional[bool] = None,
+ ) -> Union[pd.DataFrame, xarray.Dataset, dict]:
+ """Execute data query against the specified NDBC station(s).
+
+ Query the NDBC data service for station-level measurements, using the
+ `mode` parameter to determine the measurement type (e.g. `'stdmet'` for
+ standard meterological data or `'cwind'` for continuous winds data). The
+ time range and data columns of interest may also be specified, such that
+ a tailored set of requests are executed against the NDBC data service to
+ generate a single `pandas.DataFrame` or `dict` matching the conditions
+ specified in the method call. When calling `get_data` with `station_ids`
+ the station identifier is added as a column to the returned data.
+
+ Args:
+ station_id: The NDBC station ID (e.g. `'tplm2'` or `41001`) for the
+ station of interest.
+ station_ids: A list of NDBC station IDs (e.g. `['tplm2', '41001']`)
+ for the stations of interest.
+ mode: The data measurement type to query for the station (e.g.
+ `'stdmet'` for standard meterological data or `'cwind'` for
+ continuous winds data).
+ modes: A list of data measurement types to query for the stations
+ (e.g. `['stdmet', 'cwind']`).
+ start_time: The first timestamp of interest (in UTC) for the data
+ query, defaulting to 30 days before the current system time.
+ end_time: The last timestamp of interest (in UTC) for the data
+ query, defaulting to the current system time.
+ use_timestamp: A flag indicating whether to parse the NDBC data
+ service column headers as a timestamp, and to use this timestamp
+ as the index.
+ as_df: Whether to return station-level data as a `pandas.DataFrame`,
+ defaults to `True`, if `False` a `dict` is returned unless
+ `as_xarray_dataset` is set to `True`.
+ as_xarray_dataset: Whether to return tbe data as an `xarray.Dataset`,
+ defaults to `False`.
+ cols: A list of columns of interest which are selected from the
+ available data columns, such that only the desired columns are
+ returned. All columns are returned if `None` is specified.
+ use_opendap: An alias for `as_xarray_dataset`.
+
+ Returns:
+ The available station(s) measurements for the specified modes, time
+ range, and columns, either as a `dict` or as a `pandas.DataFrame`
+ if the `as_df` flag is set to `True`.
+
+ Raises:
+ ValueError: Both `station_id` and `station_ids` are `None`, or both
+ are not `None`. This is also raised if `mode` and `modes` are
+ `None`, or both are not `None`
+ RequestException: The specified mode is not available.
+ ResponseException: There was an error in executing and parsing the
+ required requests against the NDBC data service.
+ HandlerException: There was an error in handling the returned data
+ as a `dict` or `pandas.DataFrame`.
+ """
+ if use_opendap is not None:
+ as_xarray_dataset = use_opendap
+
+ as_df = as_df and not as_xarray_dataset
+
+ self.log(logging.DEBUG,
+ message=f"`get_data` called with arguments: {locals()}")
+ if station_id is None and station_ids is None:
+ raise ValueError('Both `station_id` and `station_ids` are `None`.')
+ if station_id is not None and station_ids is not None:
+ raise ValueError('`station_id` and `station_ids` cannot both be '
+ 'specified.')
+ if mode is None and modes is None:
+ raise ValueError('Both `mode` and `modes` are `None`.')
+ if mode is not None and modes is not None:
+ raise ValueError('`mode` and `modes` cannot both be specified.')
+
+ handle_station_ids: List[Union[int, str]] = []
+ handle_modes: List[str] = []
+
+ if station_id is not None:
+ handle_station_ids.append(station_id)
+ if station_ids is not None:
+ handle_station_ids.extend(station_ids)
+ if mode is not None:
+ handle_modes.append(mode)
+ if modes is not None:
+ handle_modes.extend(modes)
+
+ for mode in handle_modes:
+ if mode not in self.get_modes(use_opendap=as_xarray_dataset):
+ raise RequestException(f"Mode {mode} is not available.")
+
+ self.log(logging.INFO,
+ message=(f"Processing request for station_ids "
+ f"{handle_station_ids} and modes "
+ f"{handle_modes}"))
+
+ # accumulated_data records the handled response and parsed station_id
+ # as a tuple, with the data as the first value and the id as the second.
+ accumulated_data: Dict[str, Dict[str, Union[pd.DataFrame, dict]]] = {}
+ for mode in handle_modes:
+ accumulated_data[mode] = []
+
+ with ThreadPoolExecutor(
+ max_workers=len(handle_station_ids)) as station_executor:
+ station_futures = {}
+ for station_id in handle_station_ids:
+ station_futures[station_id] = station_executor.submit(
+ self._handle_get_data,
+ mode=mode,
+ station_id=station_id,
+ start_time=start_time,
+ end_time=end_time,
+ use_timestamp=use_timestamp,
+ as_df=as_df,
+ cols=cols,
+ use_opendap=as_xarray_dataset,
+ )
+
+ for future in as_completed(station_futures.values()):
+ try:
+ station_data, station_id = future.result()
+ self.log(
+ level=logging.DEBUG,
+ station_id=station_id,
+ message=
+ f"Successfully processed request for station_id {station_id}"
+ )
+ if as_df:
+ station_data['station_id'] = station_id
+ accumulated_data[mode].append(station_data)
+ except (RequestException, ResponseException,
+ HandlerException) as e:
+ self.log(
+ level=logging.WARN,
+ station_id=station_id,
+ message=(
+ f"Failed to process request for station_id "
+ f"{station_id} with error: {e}"))
+ self.log(logging.INFO, message="Finished processing request.")
+ return self._handle_accumulate_data(accumulated_data)
+
+ def get_modes(self,
+ use_opendap: bool = False,
+ as_xarray_dataset: Optional[bool] = None) -> List[str]:
+ """Get the list of supported modes for `get_data(...)`.
+
+ Args:
+ use_opendap (bool): Whether to return the available
+ modes for opendap `xarray.Dataset` data.
+ as_xarray_dataset (bool): An alias for `use_opendap`.
+
+ Returns:
+ (List[str]) the available modalities.
+ """
+ if as_xarray_dataset is not None:
+ use_opendap = as_xarray_dataset
+
+ if use_opendap:
+ return [
+ v for v in vars(self._opendap_data_api) if not v.startswith('_')
+ ]
+ return [v for v in vars(self._data_api) if not v.startswith('_')]
+
+ @staticmethod
+ def save_xarray_dataset(dataset: xarray.Dataset, output_filepath: str,
+ **kwargs) -> None:
+ """
+ Saves an `xarray.Dataset` to netCDF a user-specified file path.
+
+ Args:
+ dataset: The xarray dataset to save.
+ output_filepath: The path to save the dataset to.
+ **kwargs: Additional keyword arguments to pass to `dataset.to_netcdf`.
+
+ Returns:
+ None: The dataset is written to disk
+ """
+ dataset.to_netcdf(output_filepath, **kwargs)
+
+ """ PRIVATE """
+
+ def _get_request_handler(
+ self,
+ cache_limit: int,
+ delay: int,
+ retries: int,
+ backoff_factor: float,
+ headers: dict,
+ debug: bool,
+ verify_https: bool,
+ ) -> Any:
+ """Build a new `RequestHandler` for the `NdbcApi`."""
+ return RequestHandler(
+ cache_limit=cache_limit or self.cache_limit,
+ log=self.log,
+ delay=delay,
+ retries=retries,
+ backoff_factor=backoff_factor,
+ headers=headers,
+ debug=debug,
+ verify_https=verify_https,
+ )
+
+ @staticmethod
+ def _parse_station_id(station_id: Union[str, int]) -> str:
+ """Parse station id."""
+ station_id = str(station_id) # expect string-valued station id
+ station_id = station_id.lower() # expect lowercased station id
+ return station_id
+
+ @staticmethod
+ def _handle_timestamp(timestamp: Union[datetime, str]) -> datetime:
+ """Convert the specified timestamp to `datetime.datetime`."""
+ if isinstance(timestamp, datetime):
+ return timestamp
+ else:
+ try:
+ return datetime.strptime(timestamp, '%Y-%m-%d %H:%M')
+ except ValueError as e:
+ raise TimestampException from e
+
+ @staticmethod
+ def _enforce_timerange(df: pd.DataFrame, start_time: datetime,
+ end_time: datetime) -> pd.DataFrame:
+ """Down-select to the data within the specified `datetime` range."""
+ try:
+ df = df.loc[(df.index.values >= pd.Timestamp(start_time)) &
+ (df.index.values <= pd.Timestamp(end_time))]
+ except ValueError as e:
+ raise TimestampException(
+ 'Failed to enforce `start_time` to `end_time` range.') from e
+ return df
+
+ @staticmethod
+ def _handle_data(data: pd.DataFrame,
+ as_df: bool = True,
+ cols: List[str] = None) -> Union[pd.DataFrame, dict]:
+ """Apply column down selection and return format handling."""
+ if cols:
+ try:
+ data = data[[*cols]]
+ except (KeyError, ValueError) as e:
+ raise ParserException(
+ 'Failed to parse column selection.') from e
+ if as_df and isinstance(data, pd.DataFrame):
+ return data
+ elif isinstance(data, pd.DataFrame) and not as_df:
+ return data.to_dict()
+ elif as_df:
+ try:
+ return pd.DataFrame().from_dict(data, orient='index')
+ except (NotImplementedError, ValueError, TypeError) as e:
+ raise HandlerException(
+ 'Failed to convert `pd.DataFrame` to `dict`.') from e
+ else:
+ return data
+
+ def _handle_accumulate_data(
+ self,
+ accumulated_data: Dict[str, List[Union[pd.DataFrame, dict,
+ xarray.Dataset]]],
+ ) -> Union[pd.DataFrame, dict]:
+ """Accumulate the data from multiple stations and modes."""
+ for k in list(accumulated_data.keys()):
+ if not accumulated_data[k]:
+ del accumulated_data[k]
+
+ if not accumulated_data:
+ return {}
+
+ return_as_df = isinstance(
+ accumulated_data[list(accumulated_data.keys())[-1]][0],
+ pd.DataFrame)
+ use_opendap = isinstance(
+ accumulated_data[list(accumulated_data.keys())[-1]][0],
+ xarray.Dataset)
+
+ data: Union[List[pd.DataFrame], List[xarray.Dataset],
+ dict] = [] if return_as_df or use_opendap else {}
+
+ for mode, station_data in accumulated_data.items():
+ if return_as_df:
+ data.extend(station_data)
+ elif use_opendap:
+ data.extend(station_data)
+ else:
+ data[mode] = station_data
+
+ if return_as_df:
+ df = pd.concat(data, axis=0)
+ df.reset_index(inplace=True, drop=False)
+ df.set_index(['timestamp', 'station_id'], inplace=True)
+ return df
+ elif use_opendap:
+ return merge_datasets(data)
+ return data
+
+ def _handle_get_data(
+ self,
+ mode: str,
+ station_id: str,
+ start_time: datetime,
+ end_time: datetime,
+ use_timestamp: bool,
+ as_df: bool = True,
+ cols: List[str] = None,
+ use_opendap: bool = False,
+ ) -> Tuple[Union[pd.DataFrame, xarray.Dataset, dict], str]:
+ start_time = self._handle_timestamp(start_time)
+ end_time = self._handle_timestamp(end_time)
+ station_id = self._parse_station_id(station_id)
+ if use_opendap:
+ data_api_call = getattr(self._opendap_data_api, mode, None)
+ else:
+ data_api_call = getattr(self._data_api, mode, None)
+ if not data_api_call:
+ raise RequestException(
+ 'Please supply a supported mode from `get_modes()`.')
+ try:
+ data = data_api_call(
+ self._handler,
+ station_id,
+ start_time,
+ end_time,
+ use_timestamp,
+ )
+ except (ResponseException, ValueError, TypeError, KeyError) as e:
+ raise ResponseException(
+ f'Failed to handle API call.\nRaised from {e}') from e
+ if use_timestamp:
+ if use_opendap:
+ data = filter_dataset_by_time_range(data, start_time, end_time)
+ else:
+ data = self._enforce_timerange(df=data,
+ start_time=start_time,
+ end_time=end_time)
+ try:
+ if use_opendap:
+ if cols:
+ handled_data = filter_dataset_by_variable(data, cols)
+ else:
+ handled_data = data
+ else:
+ handled_data = self._handle_data(data, as_df, cols)
+ except (ValueError, KeyError, AttributeError) as e:
+ raise ParserException(
+ f'Failed to handle returned data.\nRaised from {e}') from e
+
+ return (handled_data, station_id)
diff --git a/ndbc_api/utilities/__init__.py b/ndbc_api/utilities/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ndbc_api/utilities/__pycache__/__init__.cpython-311.pyc b/ndbc_api/utilities/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..b0cf480
Binary files /dev/null and b/ndbc_api/utilities/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/utilities/__pycache__/log_formatter.cpython-311.pyc b/ndbc_api/utilities/__pycache__/log_formatter.cpython-311.pyc
new file mode 100644
index 0000000..9f204bd
Binary files /dev/null and b/ndbc_api/utilities/__pycache__/log_formatter.cpython-311.pyc differ
diff --git a/ndbc_api/utilities/__pycache__/req_cache.cpython-311.pyc b/ndbc_api/utilities/__pycache__/req_cache.cpython-311.pyc
new file mode 100644
index 0000000..a1c1518
Binary files /dev/null and b/ndbc_api/utilities/__pycache__/req_cache.cpython-311.pyc differ
diff --git a/ndbc_api/utilities/__pycache__/req_handler.cpython-311.pyc b/ndbc_api/utilities/__pycache__/req_handler.cpython-311.pyc
new file mode 100644
index 0000000..712a37c
Binary files /dev/null and b/ndbc_api/utilities/__pycache__/req_handler.cpython-311.pyc differ
diff --git a/ndbc_api/utilities/__pycache__/singleton.cpython-311.pyc b/ndbc_api/utilities/__pycache__/singleton.cpython-311.pyc
new file mode 100644
index 0000000..f207ea8
Binary files /dev/null and b/ndbc_api/utilities/__pycache__/singleton.cpython-311.pyc differ
diff --git a/ndbc_api/utilities/log_formatter.py b/ndbc_api/utilities/log_formatter.py
new file mode 100644
index 0000000..f1be1a1
--- /dev/null
+++ b/ndbc_api/utilities/log_formatter.py
@@ -0,0 +1,16 @@
+"""
+A metaclass for singleton types.
+"""
+
+import pprint
+from logging import Formatter
+
+
+class LogFormatter(Formatter):
+ """Formatter that pretty-prints dictionaries in log messages."""
+
+ def format(self, record):
+ formatted_message = super().format(record)
+ if isinstance(record.msg, dict):
+ formatted_message += "\n" + pprint.pformat(record.msg)
+ return formatted_message
diff --git a/ndbc_api/utilities/opendap/__init__.py b/ndbc_api/utilities/opendap/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ndbc_api/utilities/opendap/__pycache__/__init__.cpython-311.pyc b/ndbc_api/utilities/opendap/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..90e63f0
Binary files /dev/null and b/ndbc_api/utilities/opendap/__pycache__/__init__.cpython-311.pyc differ
diff --git a/ndbc_api/utilities/opendap/__pycache__/dataset.cpython-311.pyc b/ndbc_api/utilities/opendap/__pycache__/dataset.cpython-311.pyc
new file mode 100644
index 0000000..812ea06
Binary files /dev/null and b/ndbc_api/utilities/opendap/__pycache__/dataset.cpython-311.pyc differ
diff --git a/ndbc_api/utilities/opendap/dataset.py b/ndbc_api/utilities/opendap/dataset.py
new file mode 100644
index 0000000..68d1b36
--- /dev/null
+++ b/ndbc_api/utilities/opendap/dataset.py
@@ -0,0 +1,88 @@
+from datetime import datetime
+from typing import List, Union
+
+import xarray
+import numpy as np
+
+
+def concat_datasets(
+ datasets: List[xarray.Dataset],
+ temporal_dim_name: str = 'time',
+) -> xarray.Dataset:
+ """Joins multiple xarray datasets using their shared dimensions.
+
+ Handles cases where datasets might not have the same variables,
+ but requires that all datasets share the same dimensions. For
+ data stored on the THREDDS server, all datasets are expected to
+ have `time`, `latitude`, and `longitude` dimensions.
+
+ Args:
+ datasets (List[xarray.Dataset]): A list of xarray datasets
+ to join.
+ dimension_names (List[str]): A list of dimension names to join
+ the datasets on. Defaults to `['time', 'latitude', 'longitude']`.
+
+ Returns:
+ A xarray.Dataset object containing the joined data.
+ """
+ result = xarray.concat(datasets, dim=temporal_dim_name)
+ return result
+
+
+def merge_datasets(datasets: List[xarray.Dataset],) -> xarray.Dataset:
+ """Merges multiple xarray datasets using their shared dimensions.
+
+ Handles cases where datasets might not have the same variables,
+ but requires that all datasets share the same dimensions. For
+ data stored on the THREDDS server, all datasets are expected to
+ have `time`, `latitude`, and `longitude` dimensions.
+
+ Args:
+ datasets (List[xarray.Dataset]): A list of xarray datasets
+ to join.
+
+ Returns:
+ A xarray.Dataset object containing the merged data.
+ """
+ result = xarray.merge(datasets, compat='override')
+ return result
+
+
+def filter_dataset_by_time_range(
+ dataset: xarray.Dataset,
+ start_time: datetime,
+ end_time: datetime,
+ temporal_dim_name: str = 'time',
+) -> xarray.Dataset:
+ """
+ Filters a netCDF4 Dataset to keep only data within a specified time range.
+
+ Args:
+ dataset: The netCDF4 Dataset object.
+ start_time: The start of the time range (inclusive) as an ISO 8601 string (e.g., '2023-01-01T00:00:00Z').
+ end_time: The end of the time range (inclusive) as an ISO 8601 string.
+
+ Returns:
+ The modified netCDF4 Dataset object with data outside the time range removed.
+ """
+ filtered_ds = dataset.sel({temporal_dim_name: slice(start_time, end_time)})
+ return filtered_ds
+
+
+def filter_dataset_by_variable(
+ dataset: xarray.Dataset,
+ cols: Union[List[str], None] = None,
+) -> xarray.Dataset:
+ """
+ Filters a netCDF4 Dataset to keep only data with variables whose names are in cols.
+
+ Args:
+ dataset: The netCDF4 Dataset object.
+ cols: A list of variable names to keep.
+
+ Returns:
+ The modified netCDF4 Dataset object with data with variables not in cols removed.
+ """
+ if cols is None:
+ return dataset
+ return dataset[cols]
diff --git a/ndbc_api/utilities/req_cache.py b/ndbc_api/utilities/req_cache.py
new file mode 100644
index 0000000..457a9ba
--- /dev/null
+++ b/ndbc_api/utilities/req_cache.py
@@ -0,0 +1,48 @@
+class RequestCache:
+
+ class Request:
+
+ __slots__ = 'k', 'v', 'next', 'prev'
+
+ def __init__(self, request: str, response: dict):
+ self.k = request
+ self.v = response
+ self.next = self.prev = None
+
+ def __init__(self, capacity: int) -> None:
+ self.capacity = capacity
+ self.cache = dict()
+ self.left = RequestCache.Request('$', '$')
+ self.right = RequestCache.Request('$', '$')
+ self.left.next = self.right
+ self.right.prev = self.left
+
+ def remove(self, node: Request) -> None:
+ node.prev.next = node.next
+ node.next.prev = node.prev
+
+ def add(self, node: Request):
+ node.prev = self.right.prev
+ node.next = self.right
+ self.right.prev.next = node
+ self.right.prev = node
+
+ def get(self, request: str) -> dict:
+ if request in self.cache:
+ self.remove(self.cache[request])
+ self.add(self.cache[request])
+ return self.cache[request].v
+ else: # request not made before
+ return dict()
+
+ def put(self, request: str, response: dict) -> None:
+ if request in self.cache:
+ self.remove(self.cache[request])
+
+ self.cache[request] = RequestCache.Request(request, response)
+ self.add(self.cache[request])
+
+ if len(self.cache) > self.capacity:
+ to_remove = self.left.next
+ self.remove(to_remove)
+ del self.cache[to_remove.k]
diff --git a/ndbc_api/utilities/req_handler.py b/ndbc_api/utilities/req_handler.py
new file mode 100644
index 0000000..6e12889
--- /dev/null
+++ b/ndbc_api/utilities/req_handler.py
@@ -0,0 +1,229 @@
+"""Handles requests to the NDBC, caching responses for each station.
+
+This module defines the `RequestHandler`, a singleton HTTP cache which serves
+to handle requests to the NDBC over HTTP and store requests and responses in a
+cache. The cache is segregated by station, such that a cache limit can be
+enforced on a station level.
+
+Example:
+ ```python3
+ handler = RequestHandler(
+ cache_limit=1,
+ delay=2,
+ retries=3,
+ backoff_factor=0.8,
+ debug=True,
+ verify_https=True,
+ )
+ response = handler.execute_request(
+ url='foo.bar'
+ )
+ ```
+
+Attributes:
+ stations (:obj:`list`): A list of `Station`s to which requests have
+ been made.
+"""
+import logging
+from typing import List, Union, Callable
+
+import requests
+from urllib3.util import Retry
+
+from .req_cache import RequestCache
+from .singleton import Singleton
+
+
+class RequestHandler(metaclass=Singleton):
+ """The summary line for a class docstring should fit on one line.
+
+ If the class has public attributes, they may be documented here
+ in an ``Attributes`` section and follow the same formatting as a
+ function's ``Args`` section. Alternatively, attributes may be documented
+ inline with the attribute's declaration (see __init__ method below).
+
+ Properties created with the ``@property`` decorator should be documented
+ in the property's getter method.
+
+ Attributes:
+ cache_limit (:int:): The handler's global limit for caching
+ `NdbcApi` responses. This is implemented as a least-recently
+ used cache, designed to conserve NDBC resources when querying
+ measurements for a given station over similar time ranges.
+ logger (:obj:`logging.Logger`): The logger at which to register HTTP
+ request and response status codes and headers used for debug
+ purposes.
+ delay (:int:): The HTTP(s) request delay parameter, in seconds.
+ retries (:int:): = The number of times to retry a request to the NDBC data
+ service.
+ backoff_factor (:float:): The back-off parameter, used in conjunction with
+ `retries` to re-attempt requests to the NDBC data service.
+ headers (:dict:): The headers with which to execute the requests to the NDBC data
+ service.
+ debug (:bool:): A flag for verbose logging and response-level status reporting.
+ Affects the instance's `logging.Logger` and the behavior of its
+ private `RequestHandler` instance.
+ verify_https (:bool:): A flag which indicates whether to attempt requests to the
+ NDBC data service over HTTP or HTTPS.
+ """
+
+ class Station:
+ """The summary line for a class docstring should fit on one line.
+
+ If the class has public attributes, they may be documented here
+ in an ``Attributes`` section and follow the same formatting as a
+ function's ``Args`` section. Alternatively, attributes may be documented
+ inline with the attribute's declaration (see __init__ method below).
+
+ Properties created with the ``@property`` decorator should be documented
+ in the property's getter method.
+
+ Attributes:
+ id_ (:str:): The key for the `Station` object.
+ reqs (:obj:`ndbc_api.utilities.RequestCache`): The `RequestCache`
+ for the Station with the given `id_`, uses the cache limit of
+ its parent `RequestHandler`.
+ """
+ __slots__ = 'id_', 'reqs'
+
+ def __init__(self, station_id: str, cache_limit: int) -> None:
+ self.id_ = station_id
+ self.reqs = RequestCache(cache_limit)
+
+ def __init__(
+ self,
+ cache_limit: int,
+ log: Callable[[Union[str, int, dict]], None],
+ delay: int,
+ retries: int,
+ backoff_factor: float,
+ headers: dict = None,
+ debug: bool = True,
+ verify_https: bool = True,
+ ) -> None:
+ self._cache_limit = cache_limit
+ self._request_headers = headers or {}
+ self.log = log
+ self.stations = []
+ self._delay = delay
+ self._retries = retries
+ self._backoff_factor = backoff_factor
+ self._debug = debug
+ self._verify_https = verify_https
+ self._session = self._create_session()
+
+ def get_cache_limit(self) -> int:
+ """Return the current station-level cache limit for NDBC requests."""
+ return self._cache_limit
+
+ def set_cache_limit(self, cache_limit: int) -> None:
+ """Set a new station-level cache limit for NDBC requests."""
+ self._cache_limit = cache_limit
+
+ def get_headers(self) -> dict:
+ """Add new headers to future NDBC data service requests."""
+ return self._request_headers
+
+ def update_headers(self, new: dict) -> None:
+ """Add new headers to future NDBC data service requests."""
+ self._request_headers.update(new)
+
+ def set_headers(self, request_headers: dict) -> None:
+ """Reset the request headers using the new supplied headers."""
+ self._request_headers = request_headers
+
+ def has_station(self, station_id: Union[str, int]) -> bool:
+ """Determine if the NDBC API already made a request to this station."""
+ for s in self.stations:
+ if s.id_ == station_id:
+ return True
+ return False
+
+ def get_station(self, station_id: Union[str, int]) -> Station:
+ """Get `RequestCache` with `id_` matching the supplied `station_id`."""
+ if isinstance(station_id, int):
+ station_id = str(station_id)
+ if not self.has_station(station_id):
+ self.log(logging.DEBUG,
+ station_id=station_id,
+ message=f'Adding station {station_id} to cache.')
+ self.add_station(station_id=station_id)
+ for s in self.stations:
+ if s.id_ == station_id:
+ self.log(logging.DEBUG,
+ station_id=station_id,
+ message=f'Found station {station_id} in cache.')
+ return s
+
+ def add_station(self, station_id: Union[str, int]) -> None:
+ """Add new new `RequestCache` for the supplied `station_id`."""
+ self.stations.append(
+ RequestHandler.Station(station_id=station_id,
+ cache_limit=self._cache_limit))
+
+ def handle_requests(self, station_id: Union[str, int],
+ reqs: List[str]) -> List[str]: # pragma: no cover
+ """Handle many string-valued requests against a supplied station."""
+ responses = []
+ self.log(
+ logging.INFO,
+ message=f'Handling {len(reqs)} requests for station {station_id}.')
+ for req in reqs:
+ responses.append(self.handle_request(station_id=station_id,
+ req=req))
+ return responses
+
+ def handle_request(self, station_id: Union[str, int], req: str) -> dict:
+ """Handle a string-valued requests against a supplied station."""
+ stn = self.get_station(station_id=station_id)
+ self.log(logging.DEBUG, message=f'Handling request {req}.')
+ if req not in stn.reqs.cache:
+ self.log(logging.DEBUG, message=f'Adding request {req} to cache.')
+ resp = self.execute_request(url=req,
+ station_id=station_id,
+ headers=self._request_headers)
+ stn.reqs.put(request=req, response=resp)
+ else:
+ self.log(logging.DEBUG, message=f'Request {req} already in cache.')
+ return stn.reqs.get(request=req)
+
+ def execute_request(self, station_id: Union[str, int], url: str,
+ headers: dict) -> dict: # pragma: no cover
+ """Execute a request with the current headers to NDBC data service."""
+ self.log(logging.DEBUG,
+ station_id=station_id,
+ message=f'GET: {url}',
+ extra_data={'headers': headers})
+ response = self._session.get(
+ url=url,
+ headers=headers,
+ allow_redirects=True,
+ verify=self._verify_https,
+ )
+ self.log(logging.DEBUG,
+ station_id=station_id,
+ message=f'Response status: {response.status_code}')
+ if response.status_code != 200: # web request did not succeed
+ return dict(status=response.status_code, body='')
+ elif any([
+ 'netcdf' in response.headers.get('Content-Type').lower(),
+ 'octet' in response.headers.get('Content-Type').lower()
+ ]):
+ return dict(status=response.status_code, body=response.content)
+ return dict(status=response.status_code, body=response.text)
+
+ """ PRIVATE """
+
+ def _create_session(self) -> requests.Session:
+ """create a new `Session` using `RequestHandler` configuration."""
+ self.log(logging.DEBUG, message='Creating new session.')
+ session = requests.Session()
+ retry = Retry(
+ backoff_factor=self._backoff_factor,
+ total=self._retries,
+ )
+ http_adapter = requests.adapters.HTTPAdapter(max_retries=retry)
+ session.mount('https://', http_adapter)
+ session.mount('http://', http_adapter)
+ self.log(logging.INFO, message='Created session.')
+ return session
diff --git a/ndbc_api/utilities/singleton.py b/ndbc_api/utilities/singleton.py
new file mode 100644
index 0000000..372a6e7
--- /dev/null
+++ b/ndbc_api/utilities/singleton.py
@@ -0,0 +1,14 @@
+"""
+A metaclass for singleton types.
+"""
+
+
+class Singleton(type):
+
+ _instances = {}
+
+ def __call__(cls, *args, **kwargs):
+ if cls not in cls._instances:
+ cls._instances[cls] = super(Singleton,
+ cls).__call__(*args, **kwargs)
+ return cls._instances[cls]
diff --git a/pages/custom.py b/pages/custom.py
new file mode 100644
index 0000000..f4a45df
--- /dev/null
+++ b/pages/custom.py
@@ -0,0 +1,64 @@
+import dash, gatheringInfo
+from dash import html, dcc, callback, Input, Output
+
+dash.register_page(__name__)
+
+# layout for the user inserting a custom
+layout = html.Div([
+ html.H5("Input a custom location to access weather data and SSI"),
+ html.Div([
+ html.H6('Latitude (in °): '),
+ dcc.Input(id='lat-custom-input', type='text', placeholder=''),
+ html.H6('Longitude (in °): '),
+ dcc.Input(id='long-custom-input', type='text', placeholder='', debounce=True)
+ ]),
+ html.Br(),
+ html.Div(id='lat-custom-output')
+])
+
+# callback decorator to display weather data based on input, using radial search
+@callback(
+ Output(component_id='custom-output', component_property='children'),
+ Input(component_id='lat-custom-input', component_property='value'),
+ Input(component_id='long-custom-input',component_property='value')
+)
+def custom_data_display(cust_lat, cust_long):
+ """
+ :param cust_lat: string from first input GUI
+ :param cust_long: string from second input GUI
+ :return: strings of data separated by html line breaks
+ """
+
+ # define new object through Add_New_Location(lat, long) function from gatheringInfo class file
+ new_loc_buoy = gatheringInfo.Add_New_Location(float(cust_lat), float(cust_long))
+
+ # obtain average weather data of the new, user-defined location
+ avg_wind_speed = new_loc_buoy.get_SSI_WSPD()
+ avg_wave_height = new_loc_buoy.get_SSI_WVHT()
+ avg_pressure = new_loc_buoy.get_SSI_PRES()
+
+ # determines storm strength
+ SSI = (0.5 * ((avg_wind_speed / 60) ** 2) +
+ 0.3 * (930 / avg_pressure) +
+ 0.2 * avg_wave_height / 12)
+
+ if SSI < 0.2:
+ storm_strength = f"The expected storm should be a minimal storm"
+ if 0.21 < SSI < 0.4:
+ storm_strength = f"The expected storm should be a moderate storm"
+ if 0.41 < SSI < 0.6:
+ storm_strength = f"The expected storm should be a strong storm"
+ if 0.61 < SSI < 0.8:
+ storm_strength = f"The expected storm should be a severe storm"
+ if 0.81 < SSI:
+ storm_strength = f"The expected storm should be an extreme storm"
+
+ return [f'Weather conditions at location: ',
+ html.Br(),
+ f'Average Wind Speed: {avg_wind_speed} m/s',
+ html.Br(),
+ f'Average Wave Height: {avg_wave_height} m',
+ html.Br(),
+ f'Average Pressure: {avg_pressure} millibars',
+ storm_strength
+ ]
diff --git a/pages/florida.py b/pages/florida.py
new file mode 100644
index 0000000..7600fbc
--- /dev/null
+++ b/pages/florida.py
@@ -0,0 +1,84 @@
+import dash, gatheringInfo
+from dash import html, dcc, callback, Input, Output
+
+
+dash.register_page(__name__)
+
+options = ['none','Port Everglades','Virginia Key','Little Madeira', 'Murray Key', 'Watson Place', 'Fort Myers']
+
+# dropdown menu of buoys in selected region
+layout = html.Div([
+ html.Div(className='row', children='Southern Florida Buoy Selection',
+ style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}),
+ dcc.Dropdown(options,'none', id='fl_dropdown'),
+ html.Br(),
+ html.Div(id='fl_output')
+ ])
+
+# callback decorator identifying the input and output, and a function that takes the input and outputs data to the GUI
+@callback(
+ Output(component_id='fl_output', component_property='children'),
+ Input(component_id='fl_dropdown', component_property='value')
+
+)
+def display_single_buoy_data(selected_string):
+ """
+ :param selected_string: this is the selection from the dropdown menu
+ :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection
+ """
+ id_list = ['PEGF1', 'VAKF1', 'LMDF1', 'MUKF1', 'WPLF1', 'FMRF1']
+
+ # display individual buoy data
+ if selected_string == 'none':
+ # display nothing
+ return ''
+ else:
+ # convert selected_string to station ID using id_list
+ selection_index = options.index(selected_string)
+ selected_station_id = id_list[selection_index - 1]
+ single_buoy = gatheringInfo.Buoy(selected_station_id)
+
+ buoy_name = single_buoy.getNAME()
+ wind_speed = single_buoy.getWSPD()
+ wave_height = single_buoy.getWVHT()
+ pressure = single_buoy.getPRES()
+
+ region = gatheringInfo.BB(id_list)
+
+ avg_wind_speed = region.get_SSI_WSPD()
+ avg_wave_height = region.get_SSI_WVHT()
+ avg_pressure = region.get_SSI_PRES()
+
+ # determines storm strength
+ SSI = (0.5 * ((avg_wind_speed / 60) ** 2) +
+ 0.3 * (930 / avg_pressure) +
+ 0.2 * avg_wave_height / 12)
+
+ if SSI < 0.2:
+ storm_strength = f"The expected storm should be a minimal storm"
+ if 0.21 < SSI < 0.4:
+ storm_strength = f"The expected storm should be a moderate storm"
+ if 0.41 < SSI < 0.6:
+ storm_strength = f"The expected storm should be a strong storm"
+ if 0.61 < SSI < 0.8:
+ storm_strength = f"The expected storm should be a severe storm"
+ if 0.81 < SSI:
+ storm_strength = f"The expected storm should be an extreme storm"
+
+ return [f'Weather conditions at {buoy_name} buoy:',
+ html.Br(),
+ f'Wind Speed: {wind_speed} m/s',
+ html.Br(),
+ f'Wave Height: {wave_height} m',
+ html.Br(),
+ f'Pressure: {pressure} millibars',
+ html.Br(),
+ f'New York Metropolitan Region Weather Data:',
+ html.Br(),
+ f'Average Wind Speed: {avg_wind_speed} m/s',
+ html.Br(),
+ f'Average Wave Height: {avg_wave_height} m',
+ html.Br(),
+ f'Average Pressure: {avg_pressure} millibars',
+ storm_strength
+ ]
diff --git a/pages/georgia.py b/pages/georgia.py
new file mode 100644
index 0000000..d0213c6
--- /dev/null
+++ b/pages/georgia.py
@@ -0,0 +1,82 @@
+import dash, gatheringInfo
+from dash import html, dcc, callback, Input, Output
+
+dash.register_page(__name__)
+
+options = ['none','Fort Pulaski','Grays Reef','Kings Bay MSF Pier', 'Sapelo Island Reserve']
+
+# dropdown menu of buoys in selected region
+layout = html.Div([
+ html.Div(className='row', children='Georgia Region Buoy Selection',
+ style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}),
+ dcc.Dropdown(options,'none', id='ga_dropdown'),
+ html.Br(),
+ html.Div(id='ga_output')
+])
+
+
+@callback(
+ Output(component_id='ga_output', component_property='children'),
+ Input(component_id='ga_dropdown', component_property='value')
+)
+def display_single_buoy_data(selected_string):
+ """
+ :param selected_string: this is the selection from the dropdown menu
+ :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection
+ """
+ id_list = ['FPKG1', '41008', 'KBMG1', 'SAQG1']
+
+ # display individual buoy data
+ if selected_string == 'none':
+ # display nothing
+ return ''
+ else:
+ # convert selected_string to station ID using id_list
+ selection_index = options.index(selected_string)
+ selected_station_id = id_list[selection_index - 1]
+ single_buoy = gatheringInfo.Buoy(selected_station_id)
+
+ buoy_name = single_buoy.getNAME()
+ wind_speed = single_buoy.getWSPD()
+ wave_height = single_buoy.getWVHT()
+ pressure = single_buoy.getPRES()
+
+ region = gatheringInfo.BB(id_list)
+
+ avg_wind_speed = region.get_SSI_WSPD()
+ avg_wave_height = region.get_SSI_WVHT()
+ avg_pressure = region.get_SSI_PRES()
+
+ # determines storm strength
+ SSI = (0.5 * ((avg_wind_speed / 60) ** 2) +
+ 0.3 * (930 / avg_pressure) +
+ 0.2 * avg_wave_height / 12)
+
+ if SSI < 0.2:
+ storm_strength = f"The expected storm should be a minimal storm"
+ if 0.21 < SSI < 0.4:
+ storm_strength = f"The expected storm should be a moderate storm"
+ if 0.41 < SSI < 0.6:
+ storm_strength = f"The expected storm should be a strong storm"
+ if 0.61 < SSI < 0.8:
+ storm_strength = f"The expected storm should be a severe storm"
+ if 0.81 < SSI:
+ storm_strength = f"The expected storm should be an extreme storm"
+
+ return [f'Weather conditions at {buoy_name} buoy:',
+ html.Br(),
+ f'Wind Speed: {wind_speed} m/s',
+ html.Br(),
+ f'Wave Height: {wave_height} m',
+ html.Br(),
+ f'Pressure: {pressure} millibars',
+ html.Br(),
+ f'New York Metropolitan Region Weather Data:',
+ html.Br(),
+ f'Average Wind Speed: {avg_wind_speed} m/s',
+ html.Br(),
+ f'Average Wave Height: {avg_wave_height} m',
+ html.Br(),
+ f'Average Pressure: {avg_pressure} millibars',
+ storm_strength
+ ]
diff --git a/pages/maine.py b/pages/maine.py
new file mode 100644
index 0000000..b51b641
--- /dev/null
+++ b/pages/maine.py
@@ -0,0 +1,82 @@
+import dash, gatheringInfo
+from dash import html, dcc, callback, Input, Output
+
+dash.register_page(__name__)
+
+options = ['none','Eastport','Bar Harbor','Portland','Western Maine Shelf','Matinicus Rock','Jonesport']
+
+# dropdown menu of buoys in selected region
+layout = html.Div([
+ html.Div(className='row', children='Maine Region Buoy Selection',
+ style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}),
+ dcc.Dropdown(options,'none', id='me_dropdown'),
+ html.Br(),
+ html.Div(id='me_output')
+])
+
+
+@callback(
+ Output(component_id='me_output', component_property='children'),
+ Input(component_id='me_dropdown', component_property='value')
+)
+def display_single_buoy_data(selected_string):
+ """
+ :param selected_string: this is the selection from the dropdown menu
+ :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection
+ """
+ id_list = ['PSBM1','ATGM1','CASM1','44030','MISM1','44027']
+
+ # display individual buoy data
+ if selected_string == 'none':
+ # display nothing
+ return ''
+ else:
+ # convert selected_string to station ID using id_list
+ selection_index = options.index(selected_string)
+ selected_station_id = id_list[selection_index - 1]
+ single_buoy = gatheringInfo.Buoy(selected_station_id)
+
+ buoy_name = single_buoy.getNAME()
+ wind_speed = single_buoy.getWSPD()
+ wave_height = single_buoy.getWVHT()
+ pressure = single_buoy.getPRES()
+
+ region = gatheringInfo.BB(id_list)
+
+ avg_wind_speed = region.get_SSI_WSPD()
+ avg_wave_height = region.get_SSI_WVHT()
+ avg_pressure = region.get_SSI_PRES()
+
+ # determines storm strength
+ SSI = (0.5 * ((avg_wind_speed / 60) ** 2) +
+ 0.3 * (930 / avg_pressure) +
+ 0.2 * avg_wave_height / 12)
+
+ if SSI < 0.2:
+ storm_strength = f"The expected storm should be a minimal storm"
+ if 0.21 < SSI < 0.4:
+ storm_strength = f"The expected storm should be a moderate storm"
+ if 0.41 < SSI < 0.6:
+ storm_strength = f"The expected storm should be a strong storm"
+ if 0.61 < SSI < 0.8:
+ storm_strength = f"The expected storm should be a severe storm"
+ if 0.81 < SSI:
+ storm_strength = f"The expected storm should be an extreme storm"
+
+ return [f'Weather conditions at {buoy_name} buoy:',
+ html.Br(),
+ f'Wind Speed: {wind_speed} m/s',
+ html.Br(),
+ f'Wave Height: {wave_height} m',
+ html.Br(),
+ f'Pressure: {pressure} millibars',
+ html.Br(),
+ f'New York Metropolitan Region Weather Data:',
+ html.Br(),
+ f'Average Wind Speed: {avg_wind_speed} m/s',
+ html.Br(),
+ f'Average Wave Height: {avg_wave_height} m',
+ html.Br(),
+ f'Average Pressure: {avg_pressure} millibars',
+ storm_strength
+ ]
diff --git a/pages/maryland.py b/pages/maryland.py
new file mode 100644
index 0000000..8a09c15
--- /dev/null
+++ b/pages/maryland.py
@@ -0,0 +1,81 @@
+import dash, gatheringInfo
+from dash import html, dcc, callback, Input, Output
+
+dash.register_page(__name__)
+
+options = ['none','Baltimore','Chesapeake Bay','Annapolis','Washington D.C.','Cambridge','Cooperative Oxford Laboratory']
+
+# dropdown menu of buoys in selected region
+layout = html.Div([
+ html.Div(className='row', children='Maryland Region Buoy Selection',
+ style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}),
+ dcc.Dropdown(options,'none', id='md_dropdown'),
+ html.Br(),
+ html.Div(id='md_output')
+ ])
+
+@callback(
+ Output(component_id='md_output', component_property='children'),
+ Input(component_id='md_dropdown', component_property='value')
+)
+def display_single_buoy_data(selected_string):
+ """
+ :param selected_string: this is the selection from the dropdown menu
+ :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection
+ """
+ id_list = ['BLTM2', 'CPVM2', 'APAM2', 'WASD2', 'CAMM2', 'CXLM2']
+
+ # display individual buoy data
+ if selected_string == 'none':
+ # display nothing
+ return ''
+ else:
+ # convert selected_string to station ID using id_list
+ selection_index = options.index(selected_string)
+ selected_station_id = id_list[selection_index - 1]
+ single_buoy = gatheringInfo.Buoy(selected_station_id)
+
+ buoy_name = single_buoy.getNAME()
+ wind_speed = single_buoy.getWSPD()
+ wave_height = single_buoy.getWVHT()
+ pressure = single_buoy.getPRES()
+
+ region = gatheringInfo.BB(id_list)
+
+ avg_wind_speed = region.get_SSI_WSPD()
+ avg_wave_height = region.get_SSI_WVHT()
+ avg_pressure = region.get_SSI_PRES()
+
+ # determines storm strength
+ SSI = (0.5 * ((avg_wind_speed / 60) ** 2) +
+ 0.3 * (930 / avg_pressure) +
+ 0.2 * avg_wave_height / 12)
+
+ if SSI < 0.2:
+ storm_strength = f"The expected storm should be a minimal storm"
+ if 0.21 < SSI < 0.4:
+ storm_strength = f"The expected storm should be a moderate storm"
+ if 0.41 < SSI < 0.6:
+ storm_strength = f"The expected storm should be a strong storm"
+ if 0.61 < SSI < 0.8:
+ storm_strength = f"The expected storm should be a severe storm"
+ if 0.81 < SSI:
+ storm_strength = f"The expected storm should be an extreme storm"
+
+ return [f'Weather conditions at {buoy_name} buoy:',
+ html.Br(),
+ f'Wind Speed: {wind_speed} m/s',
+ html.Br(),
+ f'Wave Height: {wave_height} m',
+ html.Br(),
+ f'Pressure: {pressure} millibars',
+ html.Br(),
+ f'New York Metropolitan Region Weather Data:',
+ html.Br(),
+ f'Average Wind Speed: {avg_wind_speed} m/s',
+ html.Br(),
+ f'Average Wave Height: {avg_wave_height} m',
+ html.Br(),
+ f'Average Pressure: {avg_pressure} millibars',
+ storm_strength
+ ]
diff --git a/pages/massachusetts.py b/pages/massachusetts.py
new file mode 100644
index 0000000..5a8894d
--- /dev/null
+++ b/pages/massachusetts.py
@@ -0,0 +1,81 @@
+import dash, gatheringInfo
+from dash import html, dcc, callback, Input, Output
+
+dash.register_page(__name__)
+
+options = ['none','Boston','Cape Cod Bay','Nantucket Sound','Massachusetts Bay','New Bedford','Buzzards Bay']
+
+# dropdown menu of buoys in selected region
+layout = html.Div([
+ html.Div('Massachusetts Region Buoy Selection',
+ style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}),
+ dcc.Dropdown(options,'none', id='mass_dropdown'),
+ html.Br(),
+ html.Div(id='mass_output')
+ ])
+
+@callback(
+ Output(component_id='mass_output', component_property='children'),
+ Input(component_id='mass_dropdown', component_property='value')
+)
+def display_single_buoy_data(selected_string):
+ """
+ :param selected_string: this is the selection from the dropdown menu
+ :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection
+ """
+ id_list = ['BHBM3','44090','44020','44029','NBGM3','BUZM3']
+
+ # display individual buoy data
+ if selected_string == 'none':
+ # display nothing
+ return ''
+ else:
+ # convert selected_string to station ID using id_list
+ selection_index = options.index(selected_string)
+ selected_station_id = id_list[selection_index - 1]
+ single_buoy = gatheringInfo.Buoy(selected_station_id)
+
+ buoy_name = single_buoy.getNAME()
+ wind_speed = single_buoy.getWSPD()
+ wave_height = single_buoy.getWVHT()
+ pressure = single_buoy.getPRES()
+
+ region = gatheringInfo.BB(id_list)
+
+ avg_wind_speed = region.get_SSI_WSPD()
+ avg_wave_height = region.get_SSI_WVHT()
+ avg_pressure = region.get_SSI_PRES()
+
+ # determines storm strength
+ SSI = (0.5 * ((avg_wind_speed / 60) ** 2) +
+ 0.3 * (930 / avg_pressure) +
+ 0.2 * avg_wave_height / 12)
+
+ if SSI < 0.2:
+ storm_strength = f"The expected storm should be a minimal storm"
+ if 0.21 < SSI < 0.4:
+ storm_strength = f"The expected storm should be a moderate storm"
+ if 0.41 < SSI < 0.6:
+ storm_strength = f"The expected storm should be a strong storm"
+ if 0.61 < SSI < 0.8:
+ storm_strength = f"The expected storm should be a severe storm"
+ if 0.81 < SSI:
+ storm_strength = f"The expected storm should be an extreme storm"
+
+ return [f'Weather conditions at {buoy_name} buoy:',
+ html.Br(),
+ f'Wind Speed: {wind_speed} m/s',
+ html.Br(),
+ f'Wave Height: {wave_height} m',
+ html.Br(),
+ f'Pressure: {pressure} millibars',
+ html.Br(),
+ f'New York Metropolitan Region Weather Data:',
+ html.Br(),
+ f'Average Wind Speed: {avg_wind_speed} m/s',
+ html.Br(),
+ f'Average Wave Height: {avg_wave_height} m',
+ html.Br(),
+ f'Average Pressure: {avg_pressure} millibars',
+ storm_strength
+ ]
diff --git a/pages/new_york.py b/pages/new_york.py
new file mode 100644
index 0000000..38c913e
--- /dev/null
+++ b/pages/new_york.py
@@ -0,0 +1,84 @@
+import dash, gatheringInfo
+from dash import html, dcc, callback, Input, Output
+
+dash.register_page(__name__)
+
+options = ['none','Islip, NY','Breezy Point, NY','Sandy Hook, NJ','Kings Point, NY','Mariners Harbor, NY','Robbins Reef, NJ','The Battery, NY']
+
+# dropdown menu of buoys in selected region
+layout = html.Div([
+ html.Div('New York Region Buoy Selection',
+ style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}),
+
+ dcc.Dropdown(
+ options,'none', id='ny_dropdown'),
+ html.Br(),
+ html.Div(id='ny_output')
+])
+
+# callback decorator identifying input and output and a function that takes the input and returns an output, to be displayed on the GUI
+@callback(
+ Output(component_id='ny_output', component_property='children'),
+ Input(component_id='ny_dropdown', component_property='value')
+)
+def display_single_buoy_data(selected_string):
+ """
+ :param selected_string: this is the selection from the dropdown menu
+ :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection
+ """
+ id_list = ['44025', '44065', 'SDNH4', 'KPTN6', 'MHRN6', 'ROBN4', 'BATN6']
+
+ # display individual buoy data
+ if selected_string == 'none':
+ # display nothing
+ return ''
+ else:
+ # convert selected_string to station ID using id_list
+ selection_index = options.index(selected_string)
+ selected_station_id = id_list[selection_index - 1]
+ single_buoy = gatheringInfo.Buoy(selected_station_id)
+
+ buoy_name = single_buoy.getNAME()
+ wind_speed = single_buoy.getWSPD()
+ wave_height = single_buoy.getWVHT()
+ pressure = single_buoy.getPRES()
+
+ region = gatheringInfo.BB(id_list)
+
+ avg_wind_speed = region.get_SSI_WSPD()
+ avg_wave_height = region.get_SSI_WVHT()
+ avg_pressure = region.get_SSI_PRES()
+
+ # determines storm strength
+ SSI = (0.5 * ((avg_wind_speed / 60) ** 2) +
+ 0.3 * (930 / avg_pressure) +
+ 0.2 * avg_wave_height / 12)
+
+ if SSI < 0.2:
+ storm_strength = f"The expected storm should be a minimal storm"
+ if 0.21 < SSI < 0.4:
+ storm_strength = f"The expected storm should be a moderate storm"
+ if 0.41 < SSI < 0.6:
+ storm_strength = f"The expected storm should be a strong storm"
+ if 0.61 < SSI < 0.8:
+ storm_strength = f"The expected storm should be a severe storm"
+ if 0.81 < SSI:
+ storm_strength = f"The expected storm should be an extreme storm"
+
+ return [f'Weather conditions at {buoy_name} buoy:',
+ html.Br(),
+ f'Wind Speed: {wind_speed} m/s',
+ html.Br(),
+ f'Wave Height: {wave_height} m',
+ html.Br(),
+ f'Pressure: {pressure} millibars',
+ html.Br(),
+ f'New York Metropolitan Region Weather Data:',
+ html.Br(),
+ f'Average Wind Speed: {avg_wind_speed} m/s',
+ html.Br(),
+ f'Average Wave Height: {avg_wave_height} m',
+ html.Br(),
+ f'Average Pressure: {avg_pressure} millibars',
+ storm_strength
+ ]
diff --git a/pages/north_carolina.py b/pages/north_carolina.py
new file mode 100644
index 0000000..2bf282f
--- /dev/null
+++ b/pages/north_carolina.py
@@ -0,0 +1,81 @@
+import dash, gatheringInfo
+from dash import html, dcc, callback, Input, Output, dash_table
+
+dash.register_page(__name__)
+
+options = ['none','Frying Pan Shoals','Diamond Shoals','Beaufort','Hatteras','Nags Head','Wilmington Harbor']
+
+# dropdown menu of buoys in selected region
+layout = html.Div([
+ html.Div(className='row', children='North Carolina Region Buoy Selection',
+ style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}),
+ dcc.Dropdown(options,'none', id='nc_dropdown'),
+ html.Br(),
+ html.Div(id='nc_output')
+ ])
+
+@callback(
+ Output(component_id='nc_output', component_property='children'),
+ Input(component_id='nc_dropdown', component_property='value')
+ )
+def display_single_buoy_data(selected_string):
+ """
+ :param selected_string: this is the selection from the dropdown menu
+ :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection
+ """
+ id_list = ['41013','41025','BFTN7','HCGN7','44086','WLON7']
+
+ # display individual buoy data
+ if selected_string == 'none':
+ # display nothing
+ return ''
+ else:
+ # convert selected_string to station ID using id_list
+ selection_index = options.index(selected_string)
+ selected_station_id = id_list[selection_index - 1]
+ single_buoy = gatheringInfo.Buoy(selected_station_id)
+
+ buoy_name = single_buoy.getNAME()
+ wind_speed = single_buoy.getWSPD()
+ wave_height = single_buoy.getWVHT()
+ pressure = single_buoy.getPRES()
+
+ region = gatheringInfo.BB(id_list)
+
+ avg_wind_speed = region.get_SSI_WSPD()
+ avg_wave_height = region.get_SSI_WVHT()
+ avg_pressure = region.get_SSI_PRES()
+
+ # determines storm strength
+ SSI = (0.5 * ((avg_wind_speed / 60) ** 2) +
+ 0.3 * (930 / avg_pressure) +
+ 0.2 * avg_wave_height / 12)
+
+ if SSI < 0.2:
+ storm_strength = f"The expected storm should be a minimal storm"
+ if 0.21 < SSI < 0.4:
+ storm_strength = f"The expected storm should be a moderate storm"
+ if 0.41 < SSI < 0.6:
+ storm_strength = f"The expected storm should be a strong storm"
+ if 0.61 < SSI < 0.8:
+ storm_strength = f"The expected storm should be a severe storm"
+ if 0.81 < SSI:
+ storm_strength = f"The expected storm should be an extreme storm"
+
+ return [f'Weather conditions at {buoy_name} buoy:',
+ html.Br(),
+ f'Wind Speed: {wind_speed} m/s',
+ html.Br(),
+ f'Wave Height: {wave_height} m',
+ html.Br(),
+ f'Pressure: {pressure} millibars',
+ html.Br(),
+ f'New York Metropolitan Region Weather Data:',
+ html.Br(),
+ f'Average Wind Speed: {avg_wind_speed} m/s',
+ html.Br(),
+ f'Average Wave Height: {avg_wave_height} m',
+ html.Br(),
+ f'Average Pressure: {avg_pressure} millibars',
+ storm_strength
+ ]
diff --git a/pages/south_carolina.py b/pages/south_carolina.py
new file mode 100644
index 0000000..b247a0b
--- /dev/null
+++ b/pages/south_carolina.py
@@ -0,0 +1,82 @@
+import dash, gatheringInfo
+from dash import html, dcc, callback, Input, Output
+
+dash.register_page(__name__)
+
+options = ['none','Charleston','Winyah Bay Reserve','Springmaid Pier','Bennett\'s Point','Capers Nearshore','Fort Johnson']
+
+# dropdown menu of buoys in selected region
+layout = html.Div([
+ html.Div(className='row', children='South Carolina Region Buoy Selection',
+ style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}),
+ dcc.Dropdown(options,'none', id='sc_dropdown'),
+ html.Br(),
+ html.Div(id='sc_output')
+])
+
+
+@callback(
+ Output(component_id='sc_output', component_property='children'),
+ Input(component_id='sc_dropdown', component_property='value')
+)
+def display_single_buoy_data(selected_string):
+ """
+ :param selected_string: this is the selection from the dropdown menu
+ :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection
+ """
+ id_list = ['CHTS1','WYSS1','MROS1','ACXS1','41029','FMNS1']
+
+ # display individual buoy data
+ if selected_string == 'none':
+ # display nothing
+ return ''
+ else:
+ # convert selected_string to station ID using id_list
+ selection_index = options.index(selected_string)
+ selected_station_id = id_list[selection_index - 1]
+ single_buoy = gatheringInfo.Buoy(selected_station_id)
+
+ buoy_name = single_buoy.getNAME()
+ wind_speed = single_buoy.getWSPD()
+ wave_height = single_buoy.getWVHT()
+ pressure = single_buoy.getPRES()
+
+ region = gatheringInfo.BB(id_list)
+
+ avg_wind_speed = region.get_SSI_WSPD()
+ avg_wave_height = region.get_SSI_WVHT()
+ avg_pressure = region.get_SSI_PRES()
+
+ # determines storm strength
+ SSI = (0.5 * ((avg_wind_speed / 60) ** 2) +
+ 0.3 * (930 / avg_pressure) +
+ 0.2 * avg_wave_height / 12)
+
+ if SSI < 0.2:
+ storm_strength = f"The expected storm should be a minimal storm"
+ if 0.21 < SSI < 0.4:
+ storm_strength = f"The expected storm should be a moderate storm"
+ if 0.41 < SSI < 0.6:
+ storm_strength = f"The expected storm should be a strong storm"
+ if 0.61 < SSI < 0.8:
+ storm_strength = f"The expected storm should be a severe storm"
+ if 0.81 < SSI:
+ storm_strength = f"The expected storm should be an extreme storm"
+
+ return [f'Weather conditions at {buoy_name} buoy:',
+ html.Br(),
+ f'Wind Speed: {wind_speed} m/s',
+ html.Br(),
+ f'Wave Height: {wave_height} m',
+ html.Br(),
+ f'Pressure: {pressure} millibars',
+ html.Br(),
+ f'New York Metropolitan Region Weather Data:',
+ html.Br(),
+ f'Average Wind Speed: {avg_wind_speed} m/s',
+ html.Br(),
+ f'Average Wave Height: {avg_wave_height} m',
+ html.Br(),
+ f'Average Pressure: {avg_pressure} millibars',
+ storm_strength
+ ]
diff --git a/pages/virginia.py b/pages/virginia.py
new file mode 100644
index 0000000..899c379
--- /dev/null
+++ b/pages/virginia.py
@@ -0,0 +1,82 @@
+import dash, gatheringInfo
+from dash import html, dcc, callback, Input, Output, dash_table
+
+dash.register_page(__name__)
+
+options = ['none','Virginia Beach','Cape Henry','York Spit','Rappahannock Light','Dahlgren','South Craney Island']
+
+# dropdown menu of buoys in selected region
+layout = html.Div([
+ html.Div(className='row', children='New York Region Buoy Selection',
+ style={'textAlign': 'left', 'color': 'black', 'fontSize': 24}),
+ dcc.Dropdown(options,'none', id='va_dropdown'),
+ html.Br(),
+ html.Div(id='va_output')
+ ])
+
+# callback decorator identifying input and output, and a function that takes in the input, returns an output to be displayed on the GUI
+@callback(
+ Output(component_id='va_output', component_property='children'),
+ Input(component_id='va_dropdown', component_property='value')
+)
+def display_single_buoy_data(selected_string):
+ """
+ :param selected_string: this is the selection from the dropdown menu
+ :return: nothing is displayed if the selection is 'none' or displays the weather data from the selection
+ """
+ id_list = ['44088','44099','44072','RPLV2','NCDV2','CRYV2']
+
+ # display individual buoy data
+ if selected_string == 'none':
+ # display nothing
+ return ''
+ else:
+ # convert selected_string to station ID using id_list
+ selection_index = options.index(selected_string)
+ selected_station_id = id_list[selection_index - 1]
+ single_buoy = gatheringInfo.Buoy(selected_station_id)
+
+ buoy_name = single_buoy.getNAME()
+ wind_speed = single_buoy.getWSPD()
+ wave_height = single_buoy.getWVHT()
+ pressure = single_buoy.getPRES()
+
+ region = gatheringInfo.BB(id_list)
+
+ avg_wind_speed = region.get_SSI_WSPD()
+ avg_wave_height = region.get_SSI_WVHT()
+ avg_pressure = region.get_SSI_PRES()
+
+ # determines storm strength
+ SSI = (0.5 * ((avg_wind_speed / 60) ** 2) +
+ 0.3 * (930 / avg_pressure) +
+ 0.2 * avg_wave_height / 12)
+
+ if SSI < 0.2:
+ storm_strength = f"The expected storm should be a minimal storm"
+ if 0.21 < SSI < 0.4:
+ storm_strength = f"The expected storm should be a moderate storm"
+ if 0.41 < SSI < 0.6:
+ storm_strength = f"The expected storm should be a strong storm"
+ if 0.61 < SSI < 0.8:
+ storm_strength = f"The expected storm should be a severe storm"
+ if 0.81 < SSI:
+ storm_strength = f"The expected storm should be an extreme storm"
+
+ return [f'Weather conditions at {buoy_name} buoy:',
+ html.Br(),
+ f'Wind Speed: {wind_speed} m/s',
+ html.Br(),
+ f'Wave Height: {wave_height} m',
+ html.Br(),
+ f'Pressure: {pressure} millibars',
+ html.Br(),
+ f'New York Metropolitan Region Weather Data:',
+ html.Br(),
+ f'Average Wind Speed: {avg_wind_speed} m/s',
+ html.Br(),
+ f'Average Wave Height: {avg_wave_height} m',
+ html.Br(),
+ f'Average Pressure: {avg_pressure} millibars',
+ storm_strength
+ ]