Skip to content

Commit dff5ecf

Browse files
Centralize job manager route and operation constants
Co-authored-by: Shri Sukhani <shrisukhani@users.noreply.github.com>
1 parent a451f57 commit dff5ecf

15 files changed

+281
-64
lines changed

CONTRIBUTING.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -105,10 +105,12 @@ This runs lint, format checks, compile checks, tests, and package build.
105105
- `tests/test_guardrail_ast_utils.py` (shared AST guard utility contract),
106106
- `tests/test_job_fetch_helper_boundary.py` (centralization boundary enforcement for retry/paginated-fetch helper primitives),
107107
- `tests/test_job_fetch_helper_usage.py` (shared retry/paginated-fetch defaults helper usage enforcement),
108+
- `tests/test_job_operation_metadata_usage.py` (shared scrape/crawl/extract operation-metadata usage enforcement),
108109
- `tests/test_job_pagination_helper_usage.py` (shared scrape/crawl pagination helper usage enforcement),
109110
- `tests/test_job_poll_helper_boundary.py` (centralization boundary enforcement for terminal-status polling helper primitives),
110111
- `tests/test_job_poll_helper_usage.py` (shared terminal-status polling helper usage enforcement),
111112
- `tests/test_job_query_params_helper_usage.py` (shared scrape/crawl query-param helper usage enforcement),
113+
- `tests/test_job_route_constants_usage.py` (shared scrape/crawl/extract route-constant usage enforcement),
112114
- `tests/test_job_start_payload_helper_usage.py` (shared scrape/crawl start-payload helper usage enforcement),
113115
- `tests/test_job_wait_helper_boundary.py` (centralization boundary enforcement for wait-for-job helper primitives),
114116
- `tests/test_job_wait_helper_usage.py` (shared wait-for-job defaults helper usage enforcement),

hyperbrowser/client/managers/async_manager/crawl.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
)
1717
from ..job_status_utils import is_default_terminal_job_status
1818
from ..job_start_payload_utils import build_crawl_start_payload
19+
from ..job_operation_metadata import CRAWL_OPERATION_METADATA
20+
from ..job_route_constants import CRAWL_JOB_ROUTE_PREFIX
1921
from ..job_query_params_utils import build_crawl_get_params
2022
from ..polling_defaults import (
2123
DEFAULT_MAX_WAIT_SECONDS,
@@ -34,43 +36,46 @@
3436

3537

3638
class CrawlManager:
39+
_OPERATION_METADATA = CRAWL_OPERATION_METADATA
40+
_ROUTE_PREFIX = CRAWL_JOB_ROUTE_PREFIX
41+
3742
def __init__(self, client):
3843
self._client = client
3944

4045
async def start(self, params: StartCrawlJobParams) -> StartCrawlJobResponse:
4146
payload = build_crawl_start_payload(params)
4247
response = await self._client.transport.post(
43-
self._client._build_url("/crawl"),
48+
self._client._build_url(self._ROUTE_PREFIX),
4449
data=payload,
4550
)
4651
return parse_response_model(
4752
response.data,
4853
model=StartCrawlJobResponse,
49-
operation_name="crawl start",
54+
operation_name=self._OPERATION_METADATA.start_operation_name,
5055
)
5156

5257
async def get_status(self, job_id: str) -> CrawlJobStatusResponse:
5358
response = await self._client.transport.get(
54-
self._client._build_url(f"/crawl/{job_id}/status")
59+
self._client._build_url(f"{self._ROUTE_PREFIX}/{job_id}/status")
5560
)
5661
return parse_response_model(
5762
response.data,
5863
model=CrawlJobStatusResponse,
59-
operation_name="crawl status",
64+
operation_name=self._OPERATION_METADATA.status_operation_name,
6065
)
6166

6267
async def get(
6368
self, job_id: str, params: Optional[GetCrawlJobParams] = None
6469
) -> CrawlJobResponse:
6570
query_params = build_crawl_get_params(params)
6671
response = await self._client.transport.get(
67-
self._client._build_url(f"/crawl/{job_id}"),
72+
self._client._build_url(f"{self._ROUTE_PREFIX}/{job_id}"),
6873
params=query_params,
6974
)
7075
return parse_response_model(
7176
response.data,
7277
model=CrawlJobResponse,
73-
operation_name="crawl job",
78+
operation_name=self._OPERATION_METADATA.job_operation_name,
7479
)
7580

7681
async def start_and_wait(
@@ -84,8 +89,8 @@ async def start_and_wait(
8489
job_start_resp = await self.start(params)
8590
job_id, operation_name = build_started_job_context(
8691
started_job_id=job_start_resp.job_id,
87-
start_error_message="Failed to start crawl job",
88-
operation_name_prefix="crawl job ",
92+
start_error_message=self._OPERATION_METADATA.start_error_message,
93+
operation_name_prefix=self._OPERATION_METADATA.operation_name_prefix,
8994
)
9095

9196
job_status = await poll_until_terminal_status_async(

hyperbrowser/client/managers/async_manager/extract.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@
77
StartExtractJobResponse,
88
)
99
from ..extract_payload_utils import build_extract_start_payload
10+
from ..job_operation_metadata import EXTRACT_OPERATION_METADATA
11+
from ..job_route_constants import EXTRACT_JOB_ROUTE_PREFIX
1012
from ..job_status_utils import is_default_terminal_job_status
1113
from ..job_wait_utils import wait_for_job_result_with_defaults_async
1214
from ..polling_defaults import (
@@ -19,40 +21,43 @@
1921

2022

2123
class ExtractManager:
24+
_OPERATION_METADATA = EXTRACT_OPERATION_METADATA
25+
_ROUTE_PREFIX = EXTRACT_JOB_ROUTE_PREFIX
26+
2227
def __init__(self, client):
2328
self._client = client
2429

2530
async def start(self, params: StartExtractJobParams) -> StartExtractJobResponse:
2631
payload = build_extract_start_payload(params)
2732

2833
response = await self._client.transport.post(
29-
self._client._build_url("/extract"),
34+
self._client._build_url(self._ROUTE_PREFIX),
3035
data=payload,
3136
)
3237
return parse_response_model(
3338
response.data,
3439
model=StartExtractJobResponse,
35-
operation_name="extract start",
40+
operation_name=self._OPERATION_METADATA.start_operation_name,
3641
)
3742

3843
async def get_status(self, job_id: str) -> ExtractJobStatusResponse:
3944
response = await self._client.transport.get(
40-
self._client._build_url(f"/extract/{job_id}/status")
45+
self._client._build_url(f"{self._ROUTE_PREFIX}/{job_id}/status")
4146
)
4247
return parse_response_model(
4348
response.data,
4449
model=ExtractJobStatusResponse,
45-
operation_name="extract status",
50+
operation_name=self._OPERATION_METADATA.status_operation_name,
4651
)
4752

4853
async def get(self, job_id: str) -> ExtractJobResponse:
4954
response = await self._client.transport.get(
50-
self._client._build_url(f"/extract/{job_id}")
55+
self._client._build_url(f"{self._ROUTE_PREFIX}/{job_id}")
5156
)
5257
return parse_response_model(
5358
response.data,
5459
model=ExtractJobResponse,
55-
operation_name="extract job",
60+
operation_name=self._OPERATION_METADATA.job_operation_name,
5661
)
5762

5863
async def start_and_wait(
@@ -65,8 +70,8 @@ async def start_and_wait(
6570
job_start_resp = await self.start(params)
6671
job_id, operation_name = build_started_job_context(
6772
started_job_id=job_start_resp.job_id,
68-
start_error_message="Failed to start extract job",
69-
operation_name_prefix="extract job ",
73+
start_error_message=self._OPERATION_METADATA.start_error_message,
74+
operation_name_prefix=self._OPERATION_METADATA.operation_name_prefix,
7075
)
7176

7277
return await wait_for_job_result_with_defaults_async(

hyperbrowser/client/managers/async_manager/scrape.py

Lines changed: 30 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,14 @@
1616
)
1717
from ..job_status_utils import is_default_terminal_job_status
1818
from ..job_wait_utils import wait_for_job_result_with_defaults_async
19+
from ..job_operation_metadata import (
20+
BATCH_SCRAPE_OPERATION_METADATA,
21+
SCRAPE_OPERATION_METADATA,
22+
)
23+
from ..job_route_constants import (
24+
BATCH_SCRAPE_JOB_ROUTE_PREFIX,
25+
SCRAPE_JOB_ROUTE_PREFIX,
26+
)
1927
from ..job_query_params_utils import build_batch_scrape_get_params
2028
from ..polling_defaults import (
2129
DEFAULT_MAX_WAIT_SECONDS,
@@ -42,6 +50,9 @@
4250

4351

4452
class BatchScrapeManager:
53+
_OPERATION_METADATA = BATCH_SCRAPE_OPERATION_METADATA
54+
_ROUTE_PREFIX = BATCH_SCRAPE_JOB_ROUTE_PREFIX
55+
4556
def __init__(self, client):
4657
self._client = client
4758

@@ -50,37 +61,37 @@ async def start(
5061
) -> StartBatchScrapeJobResponse:
5162
payload = build_batch_scrape_start_payload(params)
5263
response = await self._client.transport.post(
53-
self._client._build_url("/scrape/batch"),
64+
self._client._build_url(self._ROUTE_PREFIX),
5465
data=payload,
5566
)
5667
return parse_response_model(
5768
response.data,
5869
model=StartBatchScrapeJobResponse,
59-
operation_name="batch scrape start",
70+
operation_name=self._OPERATION_METADATA.start_operation_name,
6071
)
6172

6273
async def get_status(self, job_id: str) -> BatchScrapeJobStatusResponse:
6374
response = await self._client.transport.get(
64-
self._client._build_url(f"/scrape/batch/{job_id}/status")
75+
self._client._build_url(f"{self._ROUTE_PREFIX}/{job_id}/status")
6576
)
6677
return parse_response_model(
6778
response.data,
6879
model=BatchScrapeJobStatusResponse,
69-
operation_name="batch scrape status",
80+
operation_name=self._OPERATION_METADATA.status_operation_name,
7081
)
7182

7283
async def get(
7384
self, job_id: str, params: Optional[GetBatchScrapeJobParams] = None
7485
) -> BatchScrapeJobResponse:
7586
query_params = build_batch_scrape_get_params(params)
7687
response = await self._client.transport.get(
77-
self._client._build_url(f"/scrape/batch/{job_id}"),
88+
self._client._build_url(f"{self._ROUTE_PREFIX}/{job_id}"),
7889
params=query_params,
7990
)
8091
return parse_response_model(
8192
response.data,
8293
model=BatchScrapeJobResponse,
83-
operation_name="batch scrape job",
94+
operation_name=self._OPERATION_METADATA.job_operation_name,
8495
)
8596

8697
async def start_and_wait(
@@ -94,8 +105,8 @@ async def start_and_wait(
94105
job_start_resp = await self.start(params)
95106
job_id, operation_name = build_started_job_context(
96107
started_job_id=job_start_resp.job_id,
97-
start_error_message="Failed to start batch scrape job",
98-
operation_name_prefix="batch scrape job ",
108+
start_error_message=self._OPERATION_METADATA.start_error_message,
109+
operation_name_prefix=self._OPERATION_METADATA.operation_name_prefix,
99110
)
100111

101112
job_status = await poll_until_terminal_status_async(
@@ -142,40 +153,43 @@ async def start_and_wait(
142153

143154

144155
class ScrapeManager:
156+
_OPERATION_METADATA = SCRAPE_OPERATION_METADATA
157+
_ROUTE_PREFIX = SCRAPE_JOB_ROUTE_PREFIX
158+
145159
def __init__(self, client):
146160
self._client = client
147161
self.batch = BatchScrapeManager(client)
148162

149163
async def start(self, params: StartScrapeJobParams) -> StartScrapeJobResponse:
150164
payload = build_scrape_start_payload(params)
151165
response = await self._client.transport.post(
152-
self._client._build_url("/scrape"),
166+
self._client._build_url(self._ROUTE_PREFIX),
153167
data=payload,
154168
)
155169
return parse_response_model(
156170
response.data,
157171
model=StartScrapeJobResponse,
158-
operation_name="scrape start",
172+
operation_name=self._OPERATION_METADATA.start_operation_name,
159173
)
160174

161175
async def get_status(self, job_id: str) -> ScrapeJobStatusResponse:
162176
response = await self._client.transport.get(
163-
self._client._build_url(f"/scrape/{job_id}/status")
177+
self._client._build_url(f"{self._ROUTE_PREFIX}/{job_id}/status")
164178
)
165179
return parse_response_model(
166180
response.data,
167181
model=ScrapeJobStatusResponse,
168-
operation_name="scrape status",
182+
operation_name=self._OPERATION_METADATA.status_operation_name,
169183
)
170184

171185
async def get(self, job_id: str) -> ScrapeJobResponse:
172186
response = await self._client.transport.get(
173-
self._client._build_url(f"/scrape/{job_id}")
187+
self._client._build_url(f"{self._ROUTE_PREFIX}/{job_id}")
174188
)
175189
return parse_response_model(
176190
response.data,
177191
model=ScrapeJobResponse,
178-
operation_name="scrape job",
192+
operation_name=self._OPERATION_METADATA.job_operation_name,
179193
)
180194

181195
async def start_and_wait(
@@ -188,8 +202,8 @@ async def start_and_wait(
188202
job_start_resp = await self.start(params)
189203
job_id, operation_name = build_started_job_context(
190204
started_job_id=job_start_resp.job_id,
191-
start_error_message="Failed to start scrape job",
192-
operation_name_prefix="scrape job ",
205+
start_error_message=self._OPERATION_METADATA.start_error_message,
206+
operation_name_prefix=self._OPERATION_METADATA.operation_name_prefix,
193207
)
194208

195209
return await wait_for_job_result_with_defaults_async(
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
from dataclasses import dataclass
2+
3+
4+
@dataclass(frozen=True)
5+
class JobOperationMetadata:
6+
start_operation_name: str
7+
status_operation_name: str
8+
job_operation_name: str
9+
start_error_message: str
10+
operation_name_prefix: str
11+
12+
13+
BATCH_SCRAPE_OPERATION_METADATA = JobOperationMetadata(
14+
start_operation_name="batch scrape start",
15+
status_operation_name="batch scrape status",
16+
job_operation_name="batch scrape job",
17+
start_error_message="Failed to start batch scrape job",
18+
operation_name_prefix="batch scrape job ",
19+
)
20+
21+
SCRAPE_OPERATION_METADATA = JobOperationMetadata(
22+
start_operation_name="scrape start",
23+
status_operation_name="scrape status",
24+
job_operation_name="scrape job",
25+
start_error_message="Failed to start scrape job",
26+
operation_name_prefix="scrape job ",
27+
)
28+
29+
CRAWL_OPERATION_METADATA = JobOperationMetadata(
30+
start_operation_name="crawl start",
31+
status_operation_name="crawl status",
32+
job_operation_name="crawl job",
33+
start_error_message="Failed to start crawl job",
34+
operation_name_prefix="crawl job ",
35+
)
36+
37+
EXTRACT_OPERATION_METADATA = JobOperationMetadata(
38+
start_operation_name="extract start",
39+
status_operation_name="extract status",
40+
job_operation_name="extract job",
41+
start_error_message="Failed to start extract job",
42+
operation_name_prefix="extract job ",
43+
)
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
SCRAPE_JOB_ROUTE_PREFIX = "/scrape"
2+
BATCH_SCRAPE_JOB_ROUTE_PREFIX = "/scrape/batch"
3+
CRAWL_JOB_ROUTE_PREFIX = "/crawl"
4+
EXTRACT_JOB_ROUTE_PREFIX = "/extract"

0 commit comments

Comments
 (0)