Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

bring issue-2098 up to date with master #2119

Merged
merged 7 commits into from
Aug 29, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions ISSUE_TEMPLATES/kg2rollout.md
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,7 @@ Before rolling out, we need to pre-upload the new databases (referenced in `conf
- [ ] verify once more that ARAX is still working properly, even with the self-hosted new-KG2c-version PloverDB service turned off
- [ ] upload the new `kg2c_lite_2.X.Y.json.gz` file to the [translator-lfs-artifacts](https://github.com/ncats/translator-lfs-artifacts/tree/main/files) repo
- [ ] upload the new `kg2_nodes_not_in_sri_nn.tsv` file to the [translator-lfs-artifacts](https://github.com/ncats/translator-lfs-artifacts/tree/main/files) repo
- [ ] upload KG2c TSV tarball to the [Translator Knowledge Graph Exchange (KGE)](https://archive.translator.ncats.io/home).

#### 7. Roll-out to ITRB TEST
- [ ] In GitHub, for the RTXteam/RTX project, merge `master` to `itrb-test`. Record this issue number in the merge message.
Expand Down
19 changes: 16 additions & 3 deletions code/ARAX/ARAXQuery/ARAX_query_tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs)
import json
import psutil

from datetime import datetime
from datetime import datetime, timezone
import sqlalchemy
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
Expand Down Expand Up @@ -687,6 +687,7 @@ def main():
argparser.add_argument('--show_ongoing', action='count', help='Show all ongoing queries')
argparser.add_argument('--reset_job', action='count', help='Reset the specified job_id(s)')
argparser.add_argument('--job_ids', type=str, help='Job IDs to show (comma separated list)')
argparser.add_argument('--prune_jobs', action='count', help='Simply prune very stale jobs from the active query table')
params = argparser.parse_args()

#### Set verbose
Expand All @@ -695,18 +696,30 @@ def main():

query_tracker = ARAXQueryTracker()

#### If pruning, then also show
if params.prune_jobs:
params.show_ongoing = True
params.reset_job = True
prune_job_ids = []

#### Check ongoing queries
if params.show_ongoing:
entries = query_tracker.get_entries(ongoing_queries=True)
for entry in entries:
#print(entry.__dict__)
print(f"{entry.query_id}\t{entry.pid}\t{entry.start_datetime}\t{entry.instance_name}\t{entry.hostname}\t{entry.status}\t{entry.elapsed}\t{entry.origin}\t{entry.message_id}\t{entry.message_code}\t{entry.code_description}")
return
now = datetime.now(timezone.utc)
now = now.replace(tzinfo=None)
elapsed = now - datetime.fromisoformat(entry.start_datetime)
elapsed = elapsed.seconds + elapsed.days * 24 * 60 * 60
print(f"{entry.query_id}\t{entry.start_datetime}\t{elapsed}\t{entry.instance_name}\t{entry.hostname}\t{entry.status}\t{entry.origin}\t{entry.pid}\t{entry.message_id}\t{entry.message_code}\t{entry.code_description}")
if params.prune_jobs and elapsed > 70000:
prune_job_ids.append(entry.query_id)

#### Extract job_ids
job_ids = []
if params.job_ids:
job_ids = params.job_ids.split(',')
job_ids.extend(prune_job_ids)

#### If the request is to reset jobs, do it
if params.reset_job and len(job_ids) > 0:
Expand Down
18 changes: 13 additions & 5 deletions code/ARAX/ARAXQuery/Expand/kg2_querier.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
import time
from collections import defaultdict
from typing import Dict, Tuple, Union, Set

import requests
import traceback

sys.path.append(os.path.dirname(os.path.abspath(__file__)))
import expand_utilities as eu
Expand Down Expand Up @@ -183,13 +183,21 @@ def _answer_query_using_plover(qg: QueryGraph, log: ARAXResponse) -> Tuple[Dict[
qnode["allow_subclasses"] = True
# Then send the actual query
log.debug(f"Sending query to {rtxc.plover_url}")
response = requests.post(f"{rtxc.plover_url}/query", json=dict_qg, timeout=60,
headers={'accept': 'application/json'})
try:
response = requests.post(f"{rtxc.plover_url}/query",
json=dict_qg,
timeout=60,
headers={'accept': 'application/json'})
except Exception as e:
log.error(f"Error querying PloverDB: {e} "
f"TRACE {traceback.format_exc()}")
raise e
if response.status_code == 200:
log.debug(f"Got response back from Plover")
log.debug(f"Plover returned status code {response.status_code}")
return response.json(), response.status_code
else:
log.warning(f"Plover returned a status code of {response.status_code}. Response was: {response.text}")
log.warning(f"Plover returned status code {response.status_code}."
f" Response was: {response.text}")
return dict(), response.status_code

def _load_plover_answer_into_object_model(self, plover_answer: Dict[str, Dict[str, Union[set, dict]]],
Expand Down
14 changes: 0 additions & 14 deletions data/KGmetadata/EdgeTypes.tsv

This file was deleted.

11 changes: 0 additions & 11 deletions data/KGmetadata/NodeLabels.tsv

This file was deleted.

6 changes: 0 additions & 6 deletions data/README.md

This file was deleted.

13 changes: 0 additions & 13 deletions data/convert-obo-to-tsv.sh

This file was deleted.

44 changes: 0 additions & 44 deletions data/convert-uniprot-humchr-txt-file-to-twocol-tsv.pl

This file was deleted.

Loading
Loading