Skip to content

Commit

Permalink
Remove "use GPU" option
Browse files Browse the repository at this point in the history
not much benefit but lots of errors
  • Loading branch information
xxyzz committed Aug 7, 2024
1 parent 951ecde commit dda3e64
Show file tree
Hide file tree
Showing 8 changed files with 6 additions and 100 deletions.
31 changes: 1 addition & 30 deletions config.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from pathlib import Path
from typing import TYPE_CHECKING, Any

from calibre.constants import isfrozen, ismacos
from calibre.constants import isfrozen
from calibre.gui2 import Dispatcher
from calibre.gui2.threaded_jobs import ThreadedJob
from calibre.utils.config import JSONConfig
Expand Down Expand Up @@ -55,8 +55,6 @@
prefs.defaults["choose_format_manually"] = True
prefs.defaults["wiktionary_gloss_lang"] = "en"
prefs.defaults["kindle_gloss_lang"] = "en"
prefs.defaults["use_gpu"] = False
prefs.defaults["cuda"] = "cu121"
prefs.defaults["use_wiktionary_for_kindle"] = False
prefs.defaults["remove_link_styles"] = False
prefs.defaults["python_path"] = ""
Expand Down Expand Up @@ -124,30 +122,6 @@ def __init__(self):
self.python_path.setText(prefs["python_path"])
form_layout.addRow(python_path_label, self.python_path)

if not ismacos:
self.use_gpu_box = QCheckBox(_("Run spaCy with GPU(requires CUDA)"))
self.use_gpu_box.setToolTip(
_(
"GPU will be used when creating X-Ray file if spaCy has transformer"
" model for the book language with ner component."
)
)
self.use_gpu_box.setChecked(prefs["use_gpu"])
vl.addWidget(self.use_gpu_box)

cuda_versions = {"cu121": "CUDA 12.1", "cu118": "CUDA 11.8"}
self.cuda_version_box = QComboBox()
for cuda_version, text in cuda_versions.items():
self.cuda_version_box.addItem(text, cuda_version)
if prefs["cuda"] not in cuda_versions:
prefs["cuda"] = "cu121"
self.cuda_version_box.setCurrentText(cuda_versions[prefs["cuda"]])
cuda_version_label = QLabel(_("CUDA version"))
cuda_version_label.setToolTip(
_('Use command "nvcc --version" to check CUDA version')
)
form_layout.addRow(cuda_version_label, self.cuda_version_box)

model_size_label = QLabel(
_('<a href="https://spacy.io/models/en">spaCy model</a> size')
)
Expand Down Expand Up @@ -232,9 +206,6 @@ def save_settings(self) -> None:
prefs["add_locator_map"] = self.locator_map_box.isChecked()
prefs["minimal_x_ray_count"] = self.minimal_x_ray_count.value()
prefs["remove_link_styles"] = self.remove_link_styles.isChecked()
if not ismacos:
prefs["use_gpu"] = self.use_gpu_box.isChecked()
prefs["cuda"] = self.cuda_version_box.currentData()
mediawiki_api = self.mediawiki_api.text().strip("/ ")
if mediawiki_api.endswith("/api.php") or mediawiki_api == "":
prefs["mediawiki_api"] = mediawiki_api
Expand Down
5 changes: 0 additions & 5 deletions data/deps.json
Original file line number Diff line number Diff line change
@@ -1,13 +1,8 @@
{
"cupy": "12.3.0",
"lxml": "5.2.2",
"rapidfuzz": "3.9.4",
"spacy": "3.7.5",
"spacy_cpu_model": "3.7.0",
"spacy_trf_model": "3.7.2",
"en_spacy_cpu_model": "3.7.1",
"en_spacy_trf_model": "3.7.3",
"thinc-apple-ops": "0.1.5",
"torch": "2.4.0",
"typing-extensions": "4.12.2"
}
22 changes: 0 additions & 22 deletions deps.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,26 +51,6 @@ def install_deps(pkg: str, notif: Any) -> None:
f"{pkg}-{model_version}/{pkg}-{model_version}-py3-none-any.whl"
)
pip_install(pkg, model_version, url=url, notif=notif)
if pkg.endswith("_trf"):
from .config import prefs

pip_install("cupy-wheel", dep_versions["cupy"], notif=notif)
# PyTorch's Windows package on pypi.org is CPU build version,
# reintall the CUDA build version
if iswindows or prefs["cuda"] == "cu118":
pip_install(
"torch",
dep_versions["torch"],
extra_index=f"https://download.pytorch.org/whl/{prefs['cuda']}",
notif=notif,
)
# an old version of typing-extensions(4.4.0) is installed
# from pytorch's index which is incompatible with pydantic 2.4.2
pip_install(
"typing-extensions",
dep_versions["typing-extensions"],
notif=notif,
)

if ismacos and platform.machine() == "arm64":
pip_install(
Expand Down Expand Up @@ -125,8 +105,6 @@ def pip_install(
notif: Any = None,
) -> None:
pattern = f"{pkg.replace('-', '_')}-{pkg_version}*"
if pkg == "torch" and extra_index:
pattern = f"torch-{pkg_version}+{extra_index.split('/')[-1]}*"
if not any(LIBS_PATH.glob(pattern)):
if notif:
notif.put((0, f"Installing {pkg}"))
Expand Down
14 changes: 0 additions & 14 deletions docs/installation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -71,20 +71,6 @@ Use Chocolatey or download from https://www.python.org/downloads
.. attention::
- Do not change the default installation settings in the Python installer.
- The dependencies(`PyTorch <https://pytorch.org/get-started/locally>`_) of the transformer model may not support the latest Python and CUDA version.


Install CUDA(optional)
----------------------

`CUDA <https://en.wikipedia.org/wiki/CUDA>`_ is required for the "Run spaCy with GPU" feature, you can download CUDA from https://developer.nvidia.com/cuda-toolkit-archive

.. attention::
- The latest CUDA release usually is not supported by PyTorch, read https://pytorch.org/get-started/locally to find the supported CUDA versions.

- C/C++ compiler is needed for Windows, download from https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2022

- Read the installation guide on the CUDA download page for more information.

Install WordDumb plugin
-----------------------
Expand Down
2 changes: 0 additions & 2 deletions docs/usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@ Set preferences

- Enable "Fetch X-Ray people descriptions from Wikipedia or other MediaWiki server" option for nonfiction books and novels that have character pages on Wikipedia or any other MediaWiki server. A quote from the book will be used if it's disabled or the page is not found.

- Enable "Run spaCy with GPU" option if your machine has `CUDA <https://developer.nvidia.com/cuda-toolkit-archive>`_. GPU will be used when creating X-Ray file if spaCy has transformer model for the book language with ner component.

- Larger spaCy model has higher `Named-entity recognition <https://en.wikipedia.org/wiki/Named-entity_recognition>`_ precision therefore improves X-Ray quality, more details at https://spacy.io/models/en

- Enter a `MediaWiki Action API <https://www.mediawiki.org/wiki/API:Main_page>`_ link to get X-Ray descriptions from a MediaWiki server, delete the link to search Wikipedia. Most MediaWiki Action API endpoint is ``https://wiki.domain/w/api.php`` but some servers don't have the ``/w`` part, you can check the API URL in a browser.
Expand Down
10 changes: 0 additions & 10 deletions error_dialogs.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,16 +89,6 @@ def subprocess_error(job: Any, parent: Any) -> None:
)
elif "ModuleNotFoundError" in exception:
module_not_found_error(job.details + exception, parent)
elif "Unable to detect NVIDIA CUDA" in exception:
error_dialog(
_("Can't find CUDA"),
_(
"'Run spaCy with GPU' feature requires "
"<a href='https://developer.nvidia.com/cuda-downloads'>CUDA</a>"
),
job.details + exception,
parent,
)
else:
check_network_error(job.details + exception, parent)

Expand Down
9 changes: 2 additions & 7 deletions parse_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -728,17 +728,12 @@ def load_spacy(model: str, book_path: str | None, lemma_lang: str) -> Any:
if model == "":
return spacy.blank(lemma_lang)

excluded_components = []
excluded_components = ["parser"]
if book_path is None:
excluded_components.append("ner")

if model.endswith("_trf"):
spacy.require_gpu()
else:
excluded_components.append("parser")

nlp = spacy.load(model, exclude=excluded_components)
if not model.endswith("_trf") and book_path is not None:
if book_path is not None:
# simpler and faster https://spacy.io/usage/linguistic-features#sbd
nlp.enable_pipe("senter")

Expand Down
13 changes: 3 additions & 10 deletions utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,6 @@ class Prefs(TypedDict):
choose_format_manually: bool
wiktionary_gloss_lang: str
kindle_gloss_lang: str
use_gpu: bool
cuda: str
last_opened_kindle_lemmas_language: str
last_opened_wiktionary_lemmas_language: str
use_wiktionary_for_kindle: bool
Expand Down Expand Up @@ -165,11 +163,7 @@ def spacy_model_name(lemma_lang: str, prefs: Prefs) -> str:
spacy_model = languages[lemma_lang]["spacy"]
if spacy_model == "":
return ""
if prefs["use_gpu"] and languages[lemma_lang]["has_trf"]:
spacy_model += "trf"
else:
spacy_model += prefs["model_size"]
return spacy_model
return spacy_model + prefs["model_size"]


def load_languages_data(
Expand All @@ -189,9 +183,8 @@ def load_languages_data(
def get_spacy_model_version(
model_name: str, dependency_versions: dict[str, str]
) -> str:
key = "spacy_trf_model" if model_name.endswith("_trf") else "spacy_cpu_model"
lang_code = model_name[:2]
lang_key = f"{lang_code}_{key}"
lang_key = f"{lang_code}_spacy_cpu_model"
if lang_key in dependency_versions:
return dependency_versions[lang_key]
return dependency_versions.get(key, "")
return dependency_versions.get("spacy_cpu_model", "")

0 comments on commit dda3e64

Please sign in to comment.