1616)
1717from ..job_status_utils import is_default_terminal_job_status
1818from ..job_wait_utils import wait_for_job_result_with_defaults_async
19+ from ..job_operation_metadata import (
20+ BATCH_SCRAPE_OPERATION_METADATA ,
21+ SCRAPE_OPERATION_METADATA ,
22+ )
23+ from ..job_route_constants import (
24+ BATCH_SCRAPE_JOB_ROUTE_PREFIX ,
25+ SCRAPE_JOB_ROUTE_PREFIX ,
26+ )
1927from ..job_query_params_utils import build_batch_scrape_get_params
2028from ..polling_defaults import (
2129 DEFAULT_MAX_WAIT_SECONDS ,
4250
4351
4452class BatchScrapeManager :
53+ _OPERATION_METADATA = BATCH_SCRAPE_OPERATION_METADATA
54+ _ROUTE_PREFIX = BATCH_SCRAPE_JOB_ROUTE_PREFIX
55+
4556 def __init__ (self , client ):
4657 self ._client = client
4758
@@ -50,37 +61,37 @@ async def start(
5061 ) -> StartBatchScrapeJobResponse :
5162 payload = build_batch_scrape_start_payload (params )
5263 response = await self ._client .transport .post (
53- self ._client ._build_url ("/scrape/batch" ),
64+ self ._client ._build_url (self . _ROUTE_PREFIX ),
5465 data = payload ,
5566 )
5667 return parse_response_model (
5768 response .data ,
5869 model = StartBatchScrapeJobResponse ,
59- operation_name = "batch scrape start" ,
70+ operation_name = self . _OPERATION_METADATA . start_operation_name ,
6071 )
6172
6273 async def get_status (self , job_id : str ) -> BatchScrapeJobStatusResponse :
6374 response = await self ._client .transport .get (
64- self ._client ._build_url (f"/scrape/batch /{ job_id } /status" )
75+ self ._client ._build_url (f"{ self . _ROUTE_PREFIX } /{ job_id } /status" )
6576 )
6677 return parse_response_model (
6778 response .data ,
6879 model = BatchScrapeJobStatusResponse ,
69- operation_name = "batch scrape status" ,
80+ operation_name = self . _OPERATION_METADATA . status_operation_name ,
7081 )
7182
7283 async def get (
7384 self , job_id : str , params : Optional [GetBatchScrapeJobParams ] = None
7485 ) -> BatchScrapeJobResponse :
7586 query_params = build_batch_scrape_get_params (params )
7687 response = await self ._client .transport .get (
77- self ._client ._build_url (f"/scrape/batch /{ job_id } " ),
88+ self ._client ._build_url (f"{ self . _ROUTE_PREFIX } /{ job_id } " ),
7889 params = query_params ,
7990 )
8091 return parse_response_model (
8192 response .data ,
8293 model = BatchScrapeJobResponse ,
83- operation_name = "batch scrape job" ,
94+ operation_name = self . _OPERATION_METADATA . job_operation_name ,
8495 )
8596
8697 async def start_and_wait (
@@ -94,8 +105,8 @@ async def start_and_wait(
94105 job_start_resp = await self .start (params )
95106 job_id , operation_name = build_started_job_context (
96107 started_job_id = job_start_resp .job_id ,
97- start_error_message = "Failed to start batch scrape job" ,
98- operation_name_prefix = "batch scrape job " ,
108+ start_error_message = self . _OPERATION_METADATA . start_error_message ,
109+ operation_name_prefix = self . _OPERATION_METADATA . operation_name_prefix ,
99110 )
100111
101112 job_status = await poll_until_terminal_status_async (
@@ -142,40 +153,43 @@ async def start_and_wait(
142153
143154
144155class ScrapeManager :
156+ _OPERATION_METADATA = SCRAPE_OPERATION_METADATA
157+ _ROUTE_PREFIX = SCRAPE_JOB_ROUTE_PREFIX
158+
145159 def __init__ (self , client ):
146160 self ._client = client
147161 self .batch = BatchScrapeManager (client )
148162
149163 async def start (self , params : StartScrapeJobParams ) -> StartScrapeJobResponse :
150164 payload = build_scrape_start_payload (params )
151165 response = await self ._client .transport .post (
152- self ._client ._build_url ("/scrape" ),
166+ self ._client ._build_url (self . _ROUTE_PREFIX ),
153167 data = payload ,
154168 )
155169 return parse_response_model (
156170 response .data ,
157171 model = StartScrapeJobResponse ,
158- operation_name = "scrape start" ,
172+ operation_name = self . _OPERATION_METADATA . start_operation_name ,
159173 )
160174
161175 async def get_status (self , job_id : str ) -> ScrapeJobStatusResponse :
162176 response = await self ._client .transport .get (
163- self ._client ._build_url (f"/scrape /{ job_id } /status" )
177+ self ._client ._build_url (f"{ self . _ROUTE_PREFIX } /{ job_id } /status" )
164178 )
165179 return parse_response_model (
166180 response .data ,
167181 model = ScrapeJobStatusResponse ,
168- operation_name = "scrape status" ,
182+ operation_name = self . _OPERATION_METADATA . status_operation_name ,
169183 )
170184
171185 async def get (self , job_id : str ) -> ScrapeJobResponse :
172186 response = await self ._client .transport .get (
173- self ._client ._build_url (f"/scrape /{ job_id } " )
187+ self ._client ._build_url (f"{ self . _ROUTE_PREFIX } /{ job_id } " )
174188 )
175189 return parse_response_model (
176190 response .data ,
177191 model = ScrapeJobResponse ,
178- operation_name = "scrape job" ,
192+ operation_name = self . _OPERATION_METADATA . job_operation_name ,
179193 )
180194
181195 async def start_and_wait (
@@ -188,8 +202,8 @@ async def start_and_wait(
188202 job_start_resp = await self .start (params )
189203 job_id , operation_name = build_started_job_context (
190204 started_job_id = job_start_resp .job_id ,
191- start_error_message = "Failed to start scrape job" ,
192- operation_name_prefix = "scrape job " ,
205+ start_error_message = self . _OPERATION_METADATA . start_error_message ,
206+ operation_name_prefix = self . _OPERATION_METADATA . operation_name_prefix ,
193207 )
194208
195209 return await wait_for_job_result_with_defaults_async (
0 commit comments