Merge pull request #225507 from MayNiklas/update-whisper
openai-whisper: 20230124 -> 20230314
This commit is contained in:
commit
e4daee5b6e
@ -2,6 +2,7 @@
|
||||
, fetchFromGitHub
|
||||
, buildPythonPackage
|
||||
, substituteAll
|
||||
, cudaSupport ? false
|
||||
|
||||
# runtime
|
||||
, ffmpeg
|
||||
@ -9,10 +10,15 @@
|
||||
# propagates
|
||||
, numpy
|
||||
, torch
|
||||
, torchWithCuda
|
||||
, tqdm
|
||||
, more-itertools
|
||||
, transformers
|
||||
, ffmpeg-python
|
||||
, numba
|
||||
, openai-triton
|
||||
, scipy
|
||||
, tiktoken
|
||||
|
||||
# tests
|
||||
, pytestCheckHook
|
||||
@ -20,14 +26,14 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "whisper";
|
||||
version = "20230124";
|
||||
version = "20230314";
|
||||
format = "setuptools";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "openai";
|
||||
repo = pname;
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-+3fs/EXK5NGlISuMTk7r2ZZ4tNFKbNFNkVS2LmHBvwk=";
|
||||
hash = "sha256-qQCELjRFeRCT1k1CBc3netRtFvt+an/EbkrgnmiX/mc=";
|
||||
};
|
||||
|
||||
patches = [
|
||||
@ -39,13 +45,31 @@ buildPythonPackage rec {
|
||||
|
||||
propagatedBuildInputs = [
|
||||
numpy
|
||||
torch
|
||||
tqdm
|
||||
more-itertools
|
||||
transformers
|
||||
ffmpeg-python
|
||||
numba
|
||||
scipy
|
||||
tiktoken
|
||||
] ++ lib.optionals (!cudaSupport) [
|
||||
torch
|
||||
] ++ lib.optionals (cudaSupport) [
|
||||
openai-triton
|
||||
torchWithCuda
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
substituteInPlace requirements.txt \
|
||||
--replace "tiktoken==0.3.1" "tiktoken>=0.3.1"
|
||||
''
|
||||
# openai-triton is only needed for CUDA support.
|
||||
# triton needs CUDA to be build.
|
||||
# -> by making it optional, we can build whisper without unfree packages enabled
|
||||
+ lib.optionalString (!cudaSupport) ''
|
||||
sed -i '/if sys.platform.startswith("linux") and platform.machine() == "x86_64":/{N;d}' setup.py
|
||||
'';
|
||||
|
||||
preCheck = ''
|
||||
export HOME=$TMPDIR
|
||||
'';
|
||||
@ -56,14 +80,18 @@ buildPythonPackage rec {
|
||||
|
||||
disabledTests = [
|
||||
# requires network access to download models
|
||||
"test_tokenizer"
|
||||
"test_transcribe"
|
||||
# requires NVIDIA drivers
|
||||
"test_dtw_cuda_equivalence"
|
||||
"test_median_filter_equivalence"
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
changelog = "https://github.com/openai/whisper/blob/v$[version}/CHANGELOG.md";
|
||||
description = "General-purpose speech recognition model";
|
||||
homepage = "https://github.com/openai/whisper";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ hexa ];
|
||||
maintainers = with maintainers; [ hexa MayNiklas ];
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -6815,7 +6815,9 @@ self: super: with self; {
|
||||
|
||||
openai-triton = callPackage ../development/python-modules/openai-triton { llvmPackages = pkgs.llvmPackages_rocm; };
|
||||
|
||||
openai-whisper = callPackage ../development/python-modules/openai-whisper { };
|
||||
openai-whisper = callPackage ../development/python-modules/openai-whisper {
|
||||
cudaSupport = pkgs.config.cudaSupport or false;
|
||||
};
|
||||
|
||||
openant = callPackage ../development/python-modules/openant { };
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user