From 82c173eefe90d5c5fc9cf5e2106d53dba497287b Mon Sep 17 00:00:00 2001 From: Andrej Shadura <andrew.shadura@collabora.co.uk> Date: Wed, 26 Jul 2023 13:23:22 +0200 Subject: [PATCH] wsclient: Capture more data for JSON-RPC calls Signed-off-by: Andrej Shadura <andrew.shadura@collabora.co.uk> --- obs_proxy/wsclient.py | 38 ++++++++++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/obs_proxy/wsclient.py b/obs_proxy/wsclient.py index 146c392..670ac42 100644 --- a/obs_proxy/wsclient.py +++ b/obs_proxy/wsclient.py @@ -78,17 +78,22 @@ async def worker_log(jobid=None, worker_id=None, **args): worker = workers[worker_id] uri = worker_uri(worker, 'logfile') try: - logger.info(f"==> [log {args.get('start', '-')}:{args.get('end', '-')} {args.get('view', 'log')}]") + logger.info( + f"==> [log {args.get('start', '-')}:{args.get('end', '-')} {args.get('view', 'log')}]", + job_id=jobid, + worker_id=worker_id, + known=jobs.get(worker_id), + ) args['nostream'] = 1 if jobid: args['jobid'] = jobid resp = await client.get(uri, params=args) except httpx.ReadTimeout: - logger.info(" (timed out)") + logger.info(" (timed out)", job_id=jobid, worker_id=worker_id) return {'content': '', 'code': 200} except httpx.HTTPError: raise - logger.info(f"<== [{resp.status_code} {resp.reason_phrase}]") + logger.info(f"<== [{resp.status_code} {resp.reason_phrase}]", job_id=jobid, worker_id=worker_id) return {'content': resp.text, 'code': resp.status_code} else: raise Exception( @@ -108,14 +113,20 @@ async def worker_action(action=None, jobid=None, worker_id=None): worker = workers[worker_id] uri = worker_uri(worker, action) try: - logger.info(f"==> [{action}]") + logger.info( + f"==> [{action}]", + job_id=jobid, + worker_id=worker_id, + known=jobs.get(worker_id), + ) job_trace("client", jobid, worker_id, f"=> {action}") resp = await client.get(uri, params={'jobid': jobid} if jobid else None) except httpx.HTTPError as e: + logger.exception(f"=!> [{action}]", job_id=jobid, worker_id=worker_id) job_trace("client", jobid, worker_id, f"<= {e}") raise job_trace("client", jobid, worker_id, f"<= {resp.status_code} {resp.reason_phrase}") - logger.info(f"<== [{resp.status_code} {resp.reason_phrase}]") + logger.info(f"<== [{resp.status_code} {resp.reason_phrase}]", job_id=jobid, worker_id=worker_id) return {'content': resp.text, 'code': resp.status_code} else: job_trace("client", jobid, worker_id, "<= [no such worker]") @@ -137,10 +148,16 @@ async def worker_info(jobid=None, worker_id=None): action = 'worker' uri = worker_uri(worker, action) try: - logger.info(f"==> [{action}{' job %s' % jobid if jobid else ''}]") + logger.info( + f"==> [{action}]", + job_id=jobid, + worker_id=worker_id, + known=jobs.get(worker_id), + ) job_trace("client", jobid, worker_id, f"=> {action}") resp = await client.get(uri, params={'jobid': jobid} if jobid else None) except httpx.HTTPError as e: + logger.exception(f"=!> [{action}]", job_id=jobid, worker_id=worker_id) job_trace("client", jobid, worker_id, f"<= {e}") raise job_trace("client", jobid, worker_id, f"<= {resp.status_code} {resp.reason_phrase}") @@ -148,14 +165,14 @@ async def worker_info(jobid=None, worker_id=None): # update cached data w = Worker.fromxml(resp.text) worker.update(w) - logger.info(f"<== [{resp.status_code} {resp.reason_phrase}]") + logger.info(f"<== [{resp.status_code} {resp.reason_phrase}]", job_id=jobid, worker_id=worker_id) return { 'worker': worker.asdict(), 'code': resp.status_code, 'reason': resp.reason_phrase, } else: - logger.info(f"<== [{resp.status_code} {resp.reason_phrase} {resp.text}]") + logger.info(f"<== [{resp.status_code} {resp.reason_phrase} {resp.text}]", job_id=jobid, worker_id=worker_id) return { 'worker': worker.asdict(), 'code': resp.status_code, @@ -201,7 +218,7 @@ async def submit_job(job=None, jobid=None, worker_id=None, extra=None): resp = await client.put(uri, params=extra, content=job_xml) if jobid: new_jobid = hashlib.md5(job_xml).hexdigest() - logger.info(f"==> [build {worker_id}/{jobid} => {new_jobid}]") + logger.info(f"==> [build {worker_id}/{jobid} => {new_jobid}]", orig_job_id=jobid, new_job_id=new_jobid, worker_id=worker_id) notification = JSONRPC20Request( method='job_started', @@ -209,10 +226,11 @@ async def submit_job(job=None, jobid=None, worker_id=None, extra=None): ) job_alias(jobid, new_jobid) except httpx.HTTPError as e: + logger.exception("submit_job", orig_job_id=jobid, new_job_id=new_jobid, worker_id=worker_id) job_trace("client", jobid, worker_id, f"<= {e}") raise Exception(f"HTTP error: {e}", uri) job_trace("client", jobid, worker_id, f"<= {resp.status_code} {resp.reason_phrase}") - logger.info(f"<== [{resp.status_code} {resp.reason_phrase}]") + logger.info(f"<== [{resp.status_code} {resp.reason_phrase}]", orig_job_id=jobid, new_job_id=new_jobid, worker_id=worker_id) if resp.status_code == 200 and notification: queue.put_nowait(notification) jobs[worker_id] = (jobid, new_jobid) -- GitLab