Skip to content

Commit edf5616

Browse files
committed
feat: add fetch_json util
1 parent 733fef0 commit edf5616

File tree

4 files changed

+109
-96
lines changed

4 files changed

+109
-96
lines changed

openagent/core/utils/fetch_json.py

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
from typing import Dict, Optional, Any
2+
3+
import httpx
4+
from loguru import logger
5+
6+
7+
async def fetch_json(
8+
url: str,
9+
method: str = "GET",
10+
params: Optional[Dict[str, Any]] = None,
11+
headers: Optional[Dict[str, str]] = None,
12+
timeout: float = 30.0,
13+
**kwargs,
14+
) -> Dict:
15+
"""
16+
Fetches JSON data from a given URL using an HTTP request.
17+
18+
:param url: The API endpoint.
19+
:param method: HTTP method ('GET', 'POST', etc.), defaults to 'GET'.
20+
:param params: Optional query parameters.
21+
:param headers: Optional request headers.
22+
:param timeout: Request timeout in seconds (default 30.0).
23+
:param kwargs: Additional arguments for request customization.
24+
:return: JSON response as a dictionary.
25+
:raises httpx.HTTPStatusError: If the response contains an error status.
26+
:raises httpx.RequestError: If a network error occurs.
27+
"""
28+
async with httpx.AsyncClient(timeout=timeout) as client:
29+
try:
30+
response = await client.request(
31+
method=method, url=url, params=params, headers=headers, **kwargs
32+
)
33+
response.raise_for_status()
34+
return response.json()
35+
except httpx.HTTPStatusError as e:
36+
logger.error(
37+
f"HTTP error {e.response.status_code} for URL {url}: {e.response.text}"
38+
)
39+
raise
40+
except httpx.RequestError as e:
41+
logger.error(f"Request error for URL {url}: {e}")
42+
raise

openagent/tools/arbitrum/compound_market_analysis.py

Lines changed: 41 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from pydantic import BaseModel, Field
1111
from openagent.agent.config import ModelConfig
1212
from openagent.core.tool import Tool
13+
from openagent.core.utils.fetch_json import fetch_json
1314

1415

1516
@dataclass
@@ -112,69 +113,51 @@ async def __call__(self) -> str:
112113

113114
@staticmethod
114115
async def _fetch_compound_arbitrum_market_data() -> list[CompoundMarketData]:
115-
async with httpx.AsyncClient(timeout=30.0) as client:
116-
response = await client.get(
117-
"https://v3-api.compound.finance/market/all-networks/all-contracts/summary"
118-
)
119-
120-
if response.status_code != 200:
121-
raise Exception(
122-
f"Failed to fetch Compound market data: {response.text}, {response.status_code}"
123-
)
124-
125-
results = response.json()
116+
results = await fetch_json(
117+
url="https://v3-api.compound.finance/market/all-networks/all-contracts/summary"
118+
)
126119

127-
# Filter for Arbitrum markets (chain_id 42161)
128-
arbitrum_markets = [
129-
market for market in results if market["chain_id"] == 42161
130-
]
120+
# Filter for Arbitrum markets (chain_id 42161)
121+
arbitrum_markets = [market for market in results if market["chain_id"] == 42161]
131122

132-
market_data = []
123+
market_data = []
133124

134-
for market in arbitrum_markets:
135-
# Fetch historical data for each address
136-
historical_response = await client.get(
137-
f"https://v3-api.compound.finance/market/arbitrum-mainnet/{market['comet']['address']}/historical/summary"
138-
)
139-
140-
if historical_response.status_code != 200:
141-
logger.warning(
142-
f"Failed to fetch historical data for {market['comet']['address']}: {historical_response.text}, {historical_response.status_code}"
143-
)
144-
continue
125+
for market in arbitrum_markets:
126+
# Fetch historical data for each address
127+
historical_data = await fetch_json(
128+
f"https://v3-api.compound.finance/market/arbitrum-mainnet/{market['comet']['address']}/historical/summary"
129+
)
145130

146-
historical_data = historical_response.json()
131+
# Sort historical data by timestamp in descending order (newest first)
132+
sorted_data = sorted(
133+
historical_data, key=lambda x: x["timestamp"], reverse=True
134+
)
147135

148-
# Sort historical data by timestamp in descending order (newest first)
149-
sorted_data = sorted(
150-
historical_data, key=lambda x: x["timestamp"], reverse=True
136+
if len(sorted_data) < 2:
137+
logger.warning(
138+
f"Insufficient historical data for {market['comet']['address']}"
151139
)
152-
153-
if len(sorted_data) < 2:
154-
logger.warning(
155-
f"Insufficient historical data for {market['comet']['address']}"
156-
)
157-
continue
158-
159-
# Convert string APRs to float
160-
current_borrow_apr = float(sorted_data[0]["borrow_apr"])
161-
current_supply_apr = float(sorted_data[0]["supply_apr"])
162-
yesterday_borrow_apr = float(sorted_data[1]["borrow_apr"])
163-
yesterday_supply_apr = float(sorted_data[1]["supply_apr"])
164-
165-
# Calculate 24h changes
166-
borrow_apr_change_24h = current_borrow_apr - yesterday_borrow_apr
167-
supply_apr_change_24h = current_supply_apr - yesterday_supply_apr
168-
169-
market_data.append(
170-
CompoundMarketData(
171-
address=market["comet"]["address"],
172-
collateralAssets=market["collateral_asset_symbols"],
173-
borrowAPR=current_borrow_apr,
174-
supplyAPR=current_supply_apr,
175-
borrowAPRChange24h=borrow_apr_change_24h,
176-
supplyAPRChange24h=supply_apr_change_24h,
177-
)
140+
continue
141+
142+
# Convert string APRs to float
143+
current_borrow_apr = float(sorted_data[0]["borrow_apr"])
144+
current_supply_apr = float(sorted_data[0]["supply_apr"])
145+
yesterday_borrow_apr = float(sorted_data[1]["borrow_apr"])
146+
yesterday_supply_apr = float(sorted_data[1]["supply_apr"])
147+
148+
# Calculate 24h changes
149+
borrow_apr_change_24h = current_borrow_apr - yesterday_borrow_apr
150+
supply_apr_change_24h = current_supply_apr - yesterday_supply_apr
151+
152+
market_data.append(
153+
CompoundMarketData(
154+
address=market["comet"]["address"],
155+
collateralAssets=market["collateral_asset_symbols"],
156+
borrowAPR=current_borrow_apr,
157+
supplyAPR=current_supply_apr,
158+
borrowAPRChange24h=borrow_apr_change_24h,
159+
supplyAPRChange24h=supply_apr_change_24h,
178160
)
161+
)
179162

180-
return market_data
163+
return market_data

openagent/tools/pendle/market_analysis.py

Lines changed: 11 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
from openagent.agent.config import ModelConfig
2020
from openagent.core.database import sqlite
2121
from openagent.core.tool import Tool
22+
from openagent.core.utils.fetch_json import fetch_json
2223

2324
Base = declarative_base()
2425

@@ -234,25 +235,18 @@ async def __call__(self) -> str:
234235
return error_msg
235236

236237
async def _fetch_pendle_market_data(self) -> PendleMarket:
237-
async with httpx.AsyncClient() as client:
238-
response = await client.get(
239-
"https://api-v2.pendle.finance/bff/v3/markets/all?isActive=true"
240-
)
241-
if response.status_code != 200:
242-
raise Exception(
243-
f"API request failed with status {response.status_code}"
244-
)
245-
246-
# Get Pendle market data from API
247-
results = response.json()
238+
# Get Pendle market data from API
239+
result = await fetch_json(
240+
url="https://api-v2.pendle.finance/bff/v3/markets/all?isActive=true"
241+
)
248242

249-
# Process the data
250-
snapshot = self._process_market_data(results)
243+
# Process the data
244+
snapshot = self._process_market_data(result)
251245

252-
return PendleMarket(
253-
data=json.dumps(asdict(snapshot)),
254-
created_at=datetime.now(UTC),
255-
)
246+
return PendleMarket(
247+
data=json.dumps(asdict(snapshot)),
248+
created_at=datetime.now(UTC),
249+
)
256250

257251
@staticmethod
258252
def _process_market_data(results: dict) -> PendleMarketSnapshot:

openagent/tools/pendle/voter_apy_analysis.py

Lines changed: 15 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
from openagent.agent.config import ModelConfig
1717
from openagent.core.database import sqlite
1818
from openagent.core.tool import Tool
19+
from openagent.core.utils.fetch_json import fetch_json
1920
from openagent.core.utils.json_equal import json_equal
2021

2122
Base = declarative_base()
@@ -149,28 +150,23 @@ async def __call__(self) -> str:
149150
return error_msg
150151

151152
async def _fetch_pendle_voter_apy(self) -> PendleVoterApy:
152-
async with httpx.AsyncClient() as client:
153-
response = await client.get(
154-
"https://api-v2.pendle.finance/bff/v1/ve-pendle/pool-voter-apy"
155-
)
156-
if response.status_code != 200:
157-
raise Exception(
158-
f"API request failed with status {response.status_code}"
159-
)
153+
# Get Pendle voter data from API
154+
result = await fetch_json(
155+
url="https://api-v2.pendle.finance/bff/v1/ve-pendle/pool-voter-apy"
156+
)
160157

161-
result = response.json()
162-
if not result["results"]:
163-
raise Exception("API response is empty")
158+
if not result["results"]:
159+
raise Exception("API response is empty")
164160

165-
data = self._filter_pendle_voter_apy(result)
161+
data = self._filter_pendle_voter_apy(result)
166162

167-
# Create new snapshot
168-
snapshot = PendleVoterApy(
169-
data=str(data),
170-
created_at=datetime.now(UTC),
171-
)
163+
# Create new snapshot
164+
snapshot = PendleVoterApy(
165+
data=str(data),
166+
created_at=datetime.now(UTC),
167+
)
172168

173-
return snapshot
169+
return snapshot
174170

175171
@staticmethod
176172
def _filter_pendle_voter_apy(apy_data: dict) -> dict:
@@ -186,9 +182,7 @@ def extract_pool_info(item: dict) -> dict:
186182
change_direction = (
187183
"increased"
188184
if last_epoch_change > 0
189-
else "decreased"
190-
if last_epoch_change < 0
191-
else "unchanged"
185+
else "decreased" if last_epoch_change < 0 else "unchanged"
192186
)
193187

194188
# Convert to percentage strings, use absolute value for lastEpochChange

0 commit comments

Comments
 (0)