Skip to content
This repository has been archived by the owner on Nov 19, 2024. It is now read-only.

Commit

Permalink
improve debug logs and error messages (#46)
Browse files Browse the repository at this point in the history
* improve debug logs

* improve exceptions

* add missing debug

* cleanup
  • Loading branch information
malmans2 authored May 10, 2024
1 parent 9865a14 commit ff7d44e
Show file tree
Hide file tree
Showing 2 changed files with 154 additions and 11 deletions.
36 changes: 27 additions & 9 deletions cads_api_client/processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,17 @@ class DownloadError(RuntimeError):
pass


def error_json_to_message(error_json: dict[str, Any]) -> str:
error_messages = [
str(error_json[key])
for key in ("title", "traceback", "detail")
if key in error_json
]
if trace_id := error_json.get("trace_id"):
error_messages.append(f"Trace ID is {trace_id}")
return "\n".join(error_messages)


def cads_raise_for_status(response: requests.Response) -> None:
if response.status_code > 499:
response.raise_for_status()
Expand All @@ -39,7 +50,9 @@ def cads_raise_for_status(response: requests.Response) -> None:
except Exception:
pass
if error_json is not None:
raise RuntimeError(f"{response.status_code} Client Error: {error_json}")
raise RuntimeError(
f"{response.status_code} Client Error: {error_json_to_message(error_json)}"
)
else:
response.raise_for_status()

Expand All @@ -59,7 +72,13 @@ def from_request(
retry_options: Dict[str, Any] = {"maximum_tries": 2, "retry_after": 10},
**kwargs: Any,
) -> T_ApiResponse:
method = kwargs["method"] if "method" in kwargs else args[0]
url = kwargs["url"] if "url" in kwargs else args[1]
inputs = kwargs.get("json", {}).get("inputs", {})
logger.debug(f"{method.upper()} {url} {inputs}")
response = multiurl.robust(session.request, **retry_options)(*args, **kwargs)
logger.debug(f"REPLY {response.text}")

if raise_for_status:
cads_raise_for_status(response)
self = cls(response, headers=kwargs.get("headers", {}), session=session)
Expand Down Expand Up @@ -168,6 +187,7 @@ def __init__(
self.headers = headers
self.session = session
self.log_start_time = None
logger.debug(f"Request UID is {self.request_uid}")

def log_metadata(self, metadata: dict[str, Any]) -> None:
logs = metadata.get("log", [])
Expand Down Expand Up @@ -196,6 +216,7 @@ def _get_status(self, robust: bool, **retry_options: Any) -> str:

logger.debug(f"GET {self.url}")
requests_response = get(url=self.url, headers=self.headers, params=params)
logger.debug(f"REPLY {requests_response.text}")
requests_response.raise_for_status()
json = requests_response.json()
self.log_metadata(json.get("metadata", {}))
Expand All @@ -222,14 +243,7 @@ def wait_on_result(self, retry_options: Dict[str, Any] = {}) -> None:
# workaround for the server-side 404 due to database replicas out od sync
time.sleep(1)
results = multiurl.robust(self.make_results, **retry_options)(self.url)
info = results.json
error_message = "processing failed"
if info.get("title"):
error_message = f'{info["title"]}'
if info.get("detail"):
error_message = error_message + f': {info["detail"]}'
raise ProcessingFailedError(error_message)
break
raise ProcessingFailedError(error_json_to_message(results.json))
elif status in ("accepted", "running"):
sleep *= 1.5
if sleep > self.sleep_max:
Expand All @@ -250,7 +264,11 @@ def make_results(self, url: Optional[str] = None) -> Results:
status = self.status
if status not in ("successful", "failed"):
raise ValueError(f"Result not ready, job is {status}")

logger.debug(f"GET {url}")
request_response = self.session.get(url, headers=self.headers)
logger.debug(f"REPLY {request_response.text}")

response = ApiResponse(request_response, session=self.session)
try:
results_url = response.get_link_href(rel="results")
Expand Down
129 changes: 127 additions & 2 deletions tests/test_10_processing.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import json
import logging

import pytest
Expand All @@ -9,6 +10,7 @@
COLLECTION_ID = "reanalysis-era5-pressure-levels"
JOB_RUNNING_ID = "9bfc1362-2832-48e1-a235-359267420bb1"
JOB_SUCCESSFUL_ID = "9bfc1362-2832-48e1-a235-359267420bb2"
JOB_FAILED_ID = "9bfc1362-2832-48e1-a235-359267420bb3"

CATALOGUE_URL = "http://localhost:8080/api/catalogue"
COLLECTIONS_URL = "http://localhost:8080/api/catalogue/v1/datasets"
Expand All @@ -20,13 +22,18 @@

JOB_RUNNING_URL = f"http://localhost:8080/api/retrieve/v1/jobs/{JOB_RUNNING_ID}"
JOB_SUCCESSFUL_URL = f"http://localhost:8080/api/retrieve/v1/jobs/{JOB_SUCCESSFUL_ID}"
JOB_FAILED_URL = f"http://localhost:8080/api/retrieve/v1/jobs/{JOB_FAILED_ID}"

RESULT_RUNNING_URL = (
f"http://localhost:8080/api/retrieve/v1/jobs/{JOB_RUNNING_ID}/results"
)
RESULT_SUCCESSFUL_URL = (
f"http://localhost:8080/api/retrieve/v1/jobs/{JOB_SUCCESSFUL_ID}/results"
)
RESULT_FAILED_URL = (
f"http://localhost:8080/api/retrieve/v1/jobs/{JOB_FAILED_ID}/results"
)


CATALOGUE_JSON = {
"type": "Catalog",
Expand Down Expand Up @@ -140,7 +147,10 @@
},
"year": {
"title": "Year",
"schema": {"type": "array", "items": {"enum": ["2022"], "type": "string"}},
"schema": {
"type": "array",
"items": {"enum": ["2022", "0000"], "type": "string"},
},
},
},
"outputs": {
Expand Down Expand Up @@ -219,6 +229,36 @@
},
}

JOB_FAILED_JSON = {
"processID": f"{COLLECTION_ID}",
"type": "process",
"jobID": f"{JOB_FAILED_ID}",
"status": "failed",
"created": "2022-09-02T17:30:48.201213",
"started": "2022-09-02T17:32:43.890617",
"finished": "2022-09-02T17:32:54.308120",
"updated": "2022-09-02T17:32:54.308116",
"links": [
{"href": f"{JOB_FAILED_ID}", "rel": "self", "type": "application/json"},
{
"href": f"http://localhost:8080/api/retrieve/v1/jobs/{JOB_FAILED_ID}/results",
"rel": "results",
},
{
"href": f"{JOB_FAILED_URL}",
"rel": "monitor",
"type": "application/json",
"title": "job status info",
},
],
"metadata": {
"log": [
["2024-02-09T09:14:47.811223", "This is a log"],
["2024-02-09T09:14:50.811223", "WARNING: This is a warning log"],
]
},
}

RESULT_SUCCESSFUL_JSON = {
"asset": {
"value": {
Expand All @@ -244,6 +284,15 @@
"instance": "http://127.0.0.1:8080/api/retrieve/v1/jobs/8b7a1f3d-04b1-425d-96f1-f0634d02ee7f/results",
}

RESULT_FAILED_JSON = {
"type": "job results failed",
"title": "job failed",
"status": 400,
"instance": "http://127.0.0.1:8080/api/retrieve/v1/jobs/02135eee-39a8-4d1f-8cd7-87682de5b981/results",
"trace_id": "ca3e7170-1ce2-48fc-97f8-bbe64fafce44",
"traceback": "This is a traceback",
}


def responses_add() -> None:
responses.add(
Expand Down Expand Up @@ -295,6 +344,34 @@ def responses_add() -> None:
content_type="application/json",
)

responses.add(
responses.POST,
url=EXECUTE_URL,
json=JOB_FAILED_JSON,
match=[
json_params_matcher(
{
"inputs": {"variable": "temperature", "year": "0000"},
}
)
],
content_type="application/json",
)

responses.add(
responses.GET,
url=JOB_FAILED_URL,
json=JOB_FAILED_JSON,
content_type="application/json",
)

responses.add(
responses.GET,
url=RESULT_FAILED_URL,
json=RESULT_FAILED_JSON,
content_type="application/json",
)


@responses.activate
def test_catalogue_collections() -> None:
Expand Down Expand Up @@ -338,7 +415,21 @@ def test_wait_on_result() -> None:


@responses.activate
def test_log_messages(caplog: pytest.LogCaptureFixture) -> None:
def test_wait_on_result_failed() -> None:
responses_add()

catalogue = cads_api_client.Catalogue(CATALOGUE_URL)
collection = catalogue.collection(COLLECTION_ID)
remote = collection.submit(variable="temperature", year="0000")
with pytest.raises(
cads_api_client.processing.ProcessingFailedError,
match="job failed\nThis is a traceback\nTrace ID is ca3e7170-1ce2-48fc-97f8-bbe64fafce44",
):
remote.wait_on_result()


@responses.activate
def test_remote_logs(caplog: pytest.LogCaptureFixture) -> None:
responses_add()

catalogue = cads_api_client.Catalogue(CATALOGUE_URL)
Expand All @@ -349,6 +440,16 @@ def test_log_messages(caplog: pytest.LogCaptureFixture) -> None:
remote.wait_on_result()

assert caplog.record_tuples == [
(
"cads_api_client.processing",
10,
"GET http://localhost:8080/api/retrieve/v1/processes/reanalysis-era5-pressure-levels {}",
),
(
"cads_api_client.processing",
10,
f"REPLY {json.dumps(PROCESS_JSON)}",
),
(
"cads_api_client.processing",
30,
Expand All @@ -359,11 +460,35 @@ def test_log_messages(caplog: pytest.LogCaptureFixture) -> None:
20,
"[2023-12-12T14:00:00] This is a success message",
),
(
"cads_api_client.processing",
10,
(
"POST http://localhost:8080/api/retrieve/v1/processes/"
"reanalysis-era5-pressure-levels/execute "
"{'variable': 'temperature', 'year': '2022'}"
),
),
(
"cads_api_client.processing",
10,
f"REPLY {json.dumps(JOB_SUCCESSFUL_JSON)}",
),
(
"cads_api_client.processing",
10,
"Request UID is 9bfc1362-2832-48e1-a235-359267420bb2",
),
(
"cads_api_client.processing",
10,
"GET http://localhost:8080/api/retrieve/v1/jobs/9bfc1362-2832-48e1-a235-359267420bb2",
),
(
"cads_api_client.processing",
10,
f"REPLY {json.dumps(JOB_SUCCESSFUL_JSON)}",
),
("cads_api_client.processing", 20, "This is a log"),
("cads_api_client.processing", 30, "This is a warning log"),
("cads_api_client.processing", 20, "status has been updated to successful"),
Expand Down

0 comments on commit ff7d44e

Please sign in to comment.