You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
2024-11-01 13:31:13 INFO t5xxl_max_token_length: 512 flux_train_network_comfy.py:158
2024-11-01 13:31:14 ERROR !!! Exception during processing !!! Unable to load vocabulary from file. execution.py:392
Please check that the provided vocabulary is accessible and not corrupted.
ERROR Traceback (most recent call last): execution.py:393
File
"C:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\transformers
\tokenization_utils_base.py", line 2447, in from_pretrained
tokenizer = cls(*init_inputs, **init_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"C:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\transformers
\models\t5\tokenization_t5.py", line 150, in init
self.sp_model.Load(vocab_file)
File
"C:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\sentencepiec
e_init.py", line 961, in Load
return self.LoadFromFile(model_file)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"C:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\sentencepiec
e_init_.py", line 316, in LoadFromFile
return _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
OSError: Not found:
"C:\Users\Николай.cache\huggingface\hub\models--google--t5-v1_1-xxl\snaps
hots\3db67ab1af984cf10548a73467f0e5bca2aaaeb2\spiece.model": No such file
or directory Error #2
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\ComfyUI_windows_portable\ComfyUI\execution.py", line 323, in
execute
output_data, output_ui, has_subgraph = get_output_data(obj,
input_data_all, execution_block_cb=execution_block_cb,
pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
^
File "C:\ComfyUI_windows_portable\ComfyUI\execution.py", line 198, in
get_output_data
return_values = _map_node_over_list(obj, input_data_all, obj.FUNCTION,
allow_interrupt=True, execution_block_cb=execution_block_cb,
pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^
File "C:\ComfyUI_windows_portable\ComfyUI\execution.py", line 169, in
_map_node_over_list
process_inputs(input_dict, i)
File "C:\ComfyUI_windows_portable\ComfyUI\execution.py", line 158, in
process_inputs
results.append(getattr(obj, func)(**inputs))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"C:\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-FluxTrainer\node
s.py", line 523, in init_training
training_loop = network_trainer.init_train(args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"C:\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-FluxTrainer\trai
n_network.py", line 269, in init_train
tokenize_strategy = self.get_tokenize_strategy(args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"C:\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-FluxTrainer\flux
_train_network_comfy.py", line 159, in get_tokenize_strategy
return strategy_flux.FluxTokenizeStrategy(t5xxl_max_token_length,
args.tokenizer_cache_dir)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^
File
"C:\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-FluxTrainer\libr
ary\strategy_flux.py", line 28, in __init__
self.t5xxl = self._load_tokenizer(T5TokenizerFast,
T5_XXL_TOKENIZER_ID, tokenizer_cache_dir=tokenizer_cache_dir)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"C:\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-FluxTrainer\libr
ary\strategy_base.py", line 44, in _load_tokenizer
tokenizer = model_class.from_pretrained(model_id, subfolder=subfolder)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"C:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\transformers
\tokenization_utils_base.py", line 2213, in from_pretrained
return cls._from_pretrained(
^^^^^^^^^^^^^^^^^^^^^
File
"C:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\transformers
\tokenization_utils_base.py", line 2251, in _from_pretrained
slow_tokenizer = (cls.slow_tokenizer_class)._from_pretrained(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"C:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\transformers
\tokenization_utils_base.py", line 2462, in _from_pretrained
raise OSError(
OSError: Unable to load vocabulary from file. Please check that the
provided vocabulary is accessible and not corrupted.
INFO Prompt executed in 1.13 seconds main.py:138
is there any way to fix this?
The text was updated successfully, but these errors were encountered:
2024-11-01 13:31:13 INFO t5xxl_max_token_length: 512 flux_train_network_comfy.py:158
2024-11-01 13:31:14 ERROR !!! Exception during processing !!! Unable to load vocabulary from file. execution.py:392
Please check that the provided vocabulary is accessible and not corrupted.
ERROR Traceback (most recent call last): execution.py:393
File
"C:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\transformers
\tokenization_utils_base.py", line 2447, in from_pretrained
tokenizer = cls(*init_inputs, **init_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"C:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\transformers
\models\t5\tokenization_t5.py", line 150, in init
self.sp_model.Load(vocab_file)
File
"C:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\sentencepiec
e_init.py", line 961, in Load
return self.LoadFromFile(model_file)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"C:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\sentencepiec
e_init_.py", line 316, in LoadFromFile
return _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
OSError: Not found:
"C:\Users\Николай.cache\huggingface\hub\models--google--t5-v1_1-xxl\snaps
hots\3db67ab1af984cf10548a73467f0e5bca2aaaeb2\spiece.model": No such file
or directory Error #2
is there any way to fix this?
The text was updated successfully, but these errors were encountered: