Skip to content

Commit

Permalink
Break long lines
Browse files Browse the repository at this point in the history
  • Loading branch information
xxyzz committed Aug 26, 2023
1 parent e146f22 commit dfd0494
Show file tree
Hide file tree
Showing 11 changed files with 247 additions and 93 deletions.
15 changes: 10 additions & 5 deletions config.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,8 @@ def __init__(self):
)
self.search_people_box.setToolTip(
_(
"Enable this option for nonfiction books and novels that have character pages on Wikipedia/Fandom"
"Enable this option for nonfiction books and novels that have character"
" pages on Wikipedia/Fandom"
)
)
self.search_people_box.setChecked(prefs["search_people"])
Expand All @@ -122,7 +123,8 @@ def __init__(self):
self.use_gpu_box = QCheckBox(_("Run spaCy with GPU(requires CUDA)"))
self.use_gpu_box.setToolTip(
_(
"GPU will be used when creating X-Ray file if spaCy has transformer model for the book language with ner component."
"GPU will be used when creating X-Ray file if spaCy has transformer"
" model for the book language with ner component."
)
)
self.use_gpu_box.setChecked(prefs["use_gpu"])
Expand Down Expand Up @@ -157,7 +159,8 @@ def __init__(self):
minimal_x_ray_label = QLabel(_("Minimal X-Ray occurrences"))
minimal_x_ray_label.setToolTip(
_(
"X-Ray entities that appear less then this number and don't have description from Wikipedia/Fandom will be removed"
"X-Ray entities that appear less then this number and don't have "
"description from Wikipedia/Fandom will be removed"
)
)
form_layout.addRow(minimal_x_ray_label, self.minimal_x_ray_count)
Expand Down Expand Up @@ -423,7 +426,8 @@ def __init__(self, formats: list[str]) -> None:

message = QLabel(
_(
"This book has multiple supported formats. Choose the format you want to use."
"This book has multiple supported formats. Choose the format "
"you want to use."
)
)
vl.addWidget(message)
Expand Down Expand Up @@ -486,7 +490,8 @@ def __init__(self, parent: QObject, is_kindle: bool):
wiktionary_gloss_label = QLabel(_("Use Wiktionary definition"))
wiktionary_gloss_label.setToolTip(
_(
"Change Word Wise language to Chinese on your Kindle device to view definition from Wiktionary"
"Change Word Wise language to Chinese on your Kindle device to "
"view definition from Wiktionary"
)
)
form_layout.addRow(wiktionary_gloss_label, self.use_wiktionary_box)
Expand Down
16 changes: 12 additions & 4 deletions custom_lemmas.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,8 @@ def init_wiktionary_buttons(
difficulty_label = QLabel(_("Difficulty limit"))
difficulty_label.setToolTip(
_(
"Difficult words have lower value. Words have difficulty value higher than this value are disabled."
"Difficult words have lower value. Words have difficulty value higher "
"than this value are disabled."
)
)
self.difficulty_limit_box = QComboBox()
Expand Down Expand Up @@ -227,15 +228,21 @@ def check_empty_kindle_gloss(self) -> None:

klld_conn = sqlite3.connect(klld_path)
for sense_id, short_def, full_def, example in klld_conn.execute(
"SELECT senses.id, short_def, full_def, example_sentence FROM lemmas JOIN senses ON lemmas.id = display_lemma_id WHERE (full_def IS NOT NULL OR short_def IS NOT NULL) AND lemma NOT like '-%'"
"""
SELECT senses.id, short_def, full_def, example_sentence
FROM lemmas JOIN senses ON lemmas.id = display_lemma_id
WHERE (full_def IS NOT NULL OR short_def IS NOT NULL)
AND lemma NOT like '-%'
"""
):
short_def = base64.b64decode(short_def if short_def else full_def).decode(
"utf-8"
)
full_def = base64.b64decode(full_def).decode("utf-8") if full_def else ""
example = base64.b64decode(example).decode("utf-8") if example else ""
custom_db_conn.execute(
"UPDATE senses SET short_def = ?, full_def = ?, example = ? WHERE id = ?",
"UPDATE senses SET short_def = ?, full_def = ?, example = ? "
"WHERE id = ?",
(short_def, full_def, example, sense_id),
)
klld_conn.close()
Expand Down Expand Up @@ -441,7 +448,8 @@ def __init__(self, parent):

text = QLabel(
_(
'Export text separated by tab, can be imported to Anki.<br/> "Allow HTML in fields" option needs to be enabled in Anki.'
"Export text separated by tab, can be imported to Anki.<br/> "
'"Allow HTML in fields" option needs to be enabled in Anki.'
)
)
vl.addWidget(text)
Expand Down
15 changes: 12 additions & 3 deletions database.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,10 @@ def create_lang_layer(

def insert_lemma(ll_conn: sqlite3.Connection, data: tuple[int, int, int, int]) -> None:
ll_conn.execute(
"INSERT INTO glosses (start, end, difficulty, sense_id, low_confidence) VALUES (?, ?, ?, ?, 0)",
"""
INSERT INTO glosses (start, end, difficulty, sense_id, low_confidence)
VALUES (?, ?, ?, ?, 0)
""",
data,
)

Expand Down Expand Up @@ -150,7 +153,10 @@ def create_x_ray_db(

str_list.append([22, "en", f"{prefs['fandom']}/wiki/%s" if prefs["fandom"] else ""])
x_ray_conn.execute(
"INSERT INTO source (id, label, url, license_label, license_url) VALUES(2, 4, 22, 7, 8)"
"""
INSERT INTO source (id, label, url, license_label, license_url)
VALUES(2, 4, 22, 7, 8)
"""
)
x_ray_conn.executemany("INSERT INTO string VALUES(?, ?, ?)", str_list)
return x_ray_conn, db_path
Expand All @@ -176,7 +182,10 @@ def insert_x_entities(
conn: sqlite3.Connection, data: Iterator[tuple[int, str, int, int]]
) -> None:
conn.executemany(
"INSERT INTO entity (id, label, type, count, has_info_card) VALUES(?, ?, ?, ?, 1)",
"""
INSERT INTO entity (id, label, type, count, has_info_card)
VALUES(?, ?, ?, ?, 1)
""",
data,
)

Expand Down
18 changes: 13 additions & 5 deletions deps.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@
custom_lemmas_folder,
get_plugin_path,
get_wiktionary_klld_path,
mac_bin_path,
kindle_db_path,
load_plugin_json,
mac_bin_path,
run_subprocess,
use_kindle_ww_db,
wiktionary_db_path,
Expand Down Expand Up @@ -50,13 +50,17 @@ def install_deps(pkg: str, notif: Any) -> None:
model_version = dep_versions[
"spacy_trf_model" if pkg.endswith("_trf") else "spacy_cpu_model"
]
url = f"https://github.com/explosion/spacy-models/releases/download/{pkg}-{model_version}/{pkg}-{model_version}-py3-none-any.whl"
url = (
"https://github.com/explosion/spacy-models/releases/download/"
f"{pkg}-{model_version}/{pkg}-{model_version}-py3-none-any.whl"
)
pip_install(pkg, model_version, url=url, notif=notif)
if pkg.endswith("_trf"):
from .config import prefs

pip_install("cupy-wheel", dep_versions["cupy"], notif=notif)
# PyTorch's Windows package on pypi.org is CPU build version, reintall the CUDA build version
# PyTorch's Windows package on pypi.org is CPU build version,
# reintall the CUDA build version
if iswindows or prefs["cuda"] == "cu118":
pip_install(
"torch",
Expand Down Expand Up @@ -159,7 +163,8 @@ def download_word_wise_file(
notifications.put(
(
0,
f"Downloading {lemma_lang}-{gloss_lang} {'Kindle' if is_kindle else 'Wiktionary'} file",
f"Downloading {lemma_lang}-{gloss_lang} "
f"{'Kindle' if is_kindle else 'Wiktionary'} file",
)
)
plugin_path = get_plugin_path()
Expand All @@ -179,7 +184,10 @@ def download_word_wise_file(
if is_kindle:
klld_path = get_wiktionary_klld_path(plugin_path, lemma_lang, gloss_lang)
if not klld_path.exists():
url = f"{PROFICIENCY_RELEASE_URL}/kll.{lemma_lang}.{gloss_lang}_v{PROFICIENCY_VERSION}.klld.bz2"
url = (
PROFICIENCY_RELEASE_URL
+ f"/kll.{lemma_lang}.{gloss_lang}_v{PROFICIENCY_VERSION}.klld.bz2"
)
download_and_extract(url, extract_folder)


Expand Down
23 changes: 19 additions & 4 deletions dump_lemmas.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,9 @@ def spacy_doc_path(
is_kindle = False
py_version = ".".join(platform.python_version_tuple()[:2])
path = custom_lemmas_folder(plugin_path).joinpath(
f"{lemma_lang}/{spacy_model}_{'kindle' if is_kindle else 'wiktionary'}_{gloss_lang}_{model_version}_{py_version}"
f"{lemma_lang}/{spacy_model}_"
f"{'kindle' if is_kindle else 'wiktionary'}"
f"_{gloss_lang}_{model_version}_{py_version}"
)
if prefs["use_pos"]:
if is_phrase:
Expand Down Expand Up @@ -135,7 +137,11 @@ def save_spacy_docs(


def create_lemma_patterns_with_pos(lemma_lang, conn, nlp, difficulty_limit):
query_sql = "SELECT DISTINCT lemma, lemma_id FROM senses JOIN lemmas ON senses.lemma_id = lemmas.id WHERE enabled = 1"
query_sql = """
SELECT DISTINCT lemma, lemma_id
FROM senses JOIN lemmas ON senses.lemma_id = lemmas.id
WHERE enabled = 1
"""
if difficulty_limit is not None:
query_sql += f" AND difficulty <= {difficulty_limit}"
for lemma, lemma_id in conn.execute(query_sql):
Expand All @@ -148,13 +154,22 @@ def create_lemma_patterns_with_pos(lemma_lang, conn, nlp, difficulty_limit):


def create_lemma_patterns_without_pos(conn, nlp, difficulty_limit):
query_sql = "SELECT DISTINCT lemma FROM senses JOIN lemmas ON senses.lemma_id = lemmas.id WHERE enabled = 1"
query_sql = """
SELECT DISTINCT lemma
FROM senses JOIN lemmas ON senses.lemma_id = lemmas.id
WHERE enabled = 1
"""
if difficulty_limit is not None:
query_sql += f" AND difficulty <= {difficulty_limit}"
for (lemma,) in conn.execute(query_sql):
yield nlp.make_doc(lemma)

query_sql = "SELECT DISTINCT form FROM senses JOIN forms ON senses.lemma_id = forms.lemma_id AND senses.pos = forms.pos WHERE enabled = 1"
query_sql = """
SELECT DISTINCT form
FROM senses JOIN forms
ON senses.lemma_id = forms.lemma_id AND senses.pos = forms.pos
WHERE enabled = 1
"""
if difficulty_limit is not None:
query_sql += f" AND difficulty <= {difficulty_limit}"
for (form,) in conn.execute(query_sql):
Expand Down
Loading

0 comments on commit dfd0494

Please sign in to comment.