#3869 TaskCfgSTT(is_cuda=True, uuid='45b813fab2', cache_folder='D:/hai/AI/win-videotrans/tmp/21720/45b813fab2', target_dir='c:

45.62* Posted at: 2 days ago 👁20

语音识别阶段出错 [Qwen-ASR(本地)] Try: pip install transformers -U or pip install -e '.[dev]' if you're working with git main
concurrent.futures.process._RemoteTraceback:
"""
Traceback (most recent call last):
File "concurrent\futures\process.py", line 246, in _process_worker
File "videotrans\process\stt_fun.py", line 806, in qwen3asr_fun
File "D:\hai\AI\win-videotrans\_internal\qwen_asr\__init__.py", line 20, in

from .inference.qwen3_asr import Qwen3ASRModel

File "D:\hai\AI\win-videotrans\_internal\qwen_asr\inference\qwen3_asr.py", line 21, in

from qwen_asr.core.transformers_backend import (

File "D:\hai\AI\win-videotrans\_internal\qwen_asr\core\transformers_backend\__init__.py", line 16, in

from .configuration_qwen3_asr import Qwen3ASRConfig

File "D:\hai\AI\win-videotrans\_internal\qwen_asr\core\transformers_backend\configuration_qwen3_asr.py", line 15, in

from transformers.configuration_utils import PretrainedConfig

File "pyimod02_importers.py", line 457, in exec_module
File "transformers\__init__.py", line 27, in

from . import dependency_versions_check

File "pyimod02_importers.py", line 457, in exec_module
File "transformers\dependency_versions_check.py", line 57, in

require_version_core(deps[pkg])

File "transformers\utils\versions.py", line 117, in require_version_core

return require_version(requirement, hint)

File "transformers\utils\versions.py", line 111, in require_version

_compare_versions(op, got_ver, want_ver, requirement, pkg, hint)

File "transformers\utils\versions.py", line 44, in _compare_versions

raise ImportError(

ImportError: tokenizers>=0.22.0,

1 Replies

这是由于 Qwen-ASR(本地) 渠道依赖的 tokenizers 库版本不兼容导致的。软件内置的 tokenizers 版本 (0.20.1) 低于 Qwen-ASR 模块要求的最低版本 (>=0.22.0)。

解决方案:

  1. 临时方案(推荐):更换语音识别渠道。由于您是 Windows 预打包版用户,自行升级依赖库较为复杂。建议暂时使用其他语音识别渠道,例如:

    • faster-whisper(本地):选择 large-v3 模型(对中文支持良好)。
    • openai-whisper(本地):选择 large-v3 模型。
  2. 根本解决:等待软件作者在后续版本中更新内置的 tokenizers 库依赖。您可以关注官方更新日志或重新下载最新完整包。

注意: 如果您使用的是从官网下载的预打包版(.exe),无法通过错误提示中的 pip install 命令来修复,因为那是针对源码部署用户的建议。

请查阅相关文档:

Post Your Reply
Open source and free maintenance is not easy. If this project is helpful to you, please consider making a small donation to help the project continue to maintain and update.

Related resource