Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion Tests/test_pvlive_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,12 @@
"""

import unittest
from unittest.mock import MagicMock
from datetime import datetime, date, time
import pytz

import pandas.api.types as ptypes
from pvlive_api import PVLive
from pvlive_api import PVLive, PVLiveException

class PVLiveTestCase(unittest.TestCase):
"""Tests for `pvlive.py`."""
Expand Down Expand Up @@ -139,6 +140,9 @@ def test_latest(self):
data = self.api.latest(entity_type="gsp", entity_id=103, dataframe=True)
self.check_df_columns(data)
self.check_df_dtypes(data)
self.api._fetch_url = MagicMock(return_value={"notdata": [], "notmeta": []})
with self.assertRaises(PVLiveException):
data = self.api.latest(entity_type="gsp", entity_id=0, dataframe=True)

def test_day_peak(self):
"""Tests the day_peak function."""
Expand Down
4 changes: 2 additions & 2 deletions pvlive_api/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from pvlive_api.pvlive import PVLive
from pvlive_api.pvlive import PVLive, PVLiveException

__all__ = ["PVLive"]
__all__ = ["PVLive", "PVLiveException"]
13 changes: 13 additions & 0 deletions pvlive_api/pvlive.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,12 +96,14 @@ def _get_gsp_list(self):
"""Fetch the GSP list from the API and convert to Pandas DataFrame."""
url = f"{self.base_url}/gsp_list"
response = self._fetch_url(url)
self._validate_api_response(response, expected_keys=("data", "meta"))
return pd.DataFrame(response["data"], columns=response["meta"])

def _get_pes_list(self):
"""Fetch the PES list from the API and convert to Pandas DataFrame."""
url = f"{self.base_url}/pes_list"
response = self._fetch_url(url)
self._validate_api_response(response, expected_keys=("data", "meta"))
return pd.DataFrame(response["data"], columns=response["meta"])

def _get_deployment_releases(self):
Expand Down Expand Up @@ -230,6 +232,7 @@ def latest(self,
extra_fields=extra_fields, period=period)
params = self._compile_params(extra_fields, period=period)
response = self._query_api(entity_type, entity_id, params)
self._validate_api_response(response, expected_keys=("data", "meta"))
if response["data"]:
data, meta = response["data"], response["meta"]
data = tuple(data[0])
Expand Down Expand Up @@ -446,12 +449,22 @@ def _between(self, start, end, entity_type="gsp", entity_id=0, extra_fields="",
request_end = min(end, request_start + max_range)
params = self._compile_params(extra_fields, request_start, request_end, period)
response = self._query_api(entity_type, entity_id, params)
self._validate_api_response(response, expected_keys=("data", "meta"))
data += response["data"]
request_start += max_range + timedelta(minutes=period)
if dataframe:
return self._convert_tuple_to_df(data, response["meta"]), response["meta"]
return data, response["meta"]

@staticmethod
def _validate_api_response(response, expected_keys):
"""Check that a JSON API response contains the expected keys."""
if any(key not in response.keys() for key in expected_keys):
raise PVLiveException(
"The API's JSON response did not contain the required fields. Expected keys: "
f"{expected_keys} , available keys: {response.keys()}"
)

def _compile_params(self, extra_fields="", start=None, end=None, period=30):
"""Compile parameters into a Python dict, formatting where necessary."""
params = {}
Expand Down