ggerganov commited on
Commit
7cf1c53
·
unverified ·
1 Parent(s): bec875e

models : change default hosting to Hugging Face

Browse files

My Linode is running out of monthly bandwidth due to the big interest in
the project

README.md CHANGED
@@ -428,11 +428,14 @@ The original models are converted to a custom binary format. This allows to pack
428
  - vocabulary
429
  - weights
430
 
431
- You can download the converted models using the [models/download-ggml-model.sh](models/download-ggml-model.sh) script or from here:
 
432
 
433
- https://ggml.ggerganov.com
 
434
 
435
- For more details, see the conversion script [models/convert-pt-to-ggml.py](models/convert-pt-to-ggml.py) or the README in [models](models).
 
436
 
437
  ## Bindings
438
 
 
428
  - vocabulary
429
  - weights
430
 
431
+ You can download the converted models using the [models/download-ggml-model.sh](models/download-ggml-model.sh) script
432
+ or manually from here:
433
 
434
+ - https://huggingface.co/datasets/ggerganov/whisper.cpp
435
+ - https://ggml.ggerganov.com
436
 
437
+ For more details, see the conversion script [models/convert-pt-to-ggml.py](models/convert-pt-to-ggml.py) or the README
438
+ in [models](models).
439
 
440
  ## Bindings
441
 
models/README.md CHANGED
@@ -1,10 +1,13 @@
1
  ## Whisper model files in custom ggml format
2
 
3
  The [original Whisper PyTorch models provided by OpenAI](https://github.com/openai/whisper/blob/main/whisper/__init__.py#L17-L27)
4
- have been converted to custom `ggml` format in order to be able to load them in C/C++. The conversion has been performed using the
5
- [convert-pt-to-ggml.py](convert-pt-to-ggml.py) script. You can either obtain the original models and generate the `ggml` files
6
- yourself using the conversion script, or you can use the [download-ggml-model.sh](download-ggml-model.sh) script to download the
7
- already converted models from https://ggml.ggerganov.com
 
 
 
8
 
9
  Sample usage:
10
 
 
1
  ## Whisper model files in custom ggml format
2
 
3
  The [original Whisper PyTorch models provided by OpenAI](https://github.com/openai/whisper/blob/main/whisper/__init__.py#L17-L27)
4
+ have been converted to custom `ggml` format in order to be able to load them in C/C++. The conversion has been performed
5
+ using the [convert-pt-to-ggml.py](convert-pt-to-ggml.py) script. You can either obtain the original models and generate
6
+ the `ggml` files yourself using the conversion script, or you can use the [download-ggml-model.sh](download-ggml-model.sh)
7
+ script to download the already converted models. Currently, they are hosted on the following locations:
8
+
9
+ - https://huggingface.co/datasets/ggerganov/whisper.cpp
10
+ - https://ggml.ggerganov.com
11
 
12
  Sample usage:
13
 
models/download-ggml-model.cmd CHANGED
@@ -18,7 +18,7 @@ if %argc% neq 1 (
18
 
19
  set model=%1
20
 
21
- for %%b in (%models%) do (
22
  if "%%b"=="%model%" (
23
  CALL :download_model
24
  goto :eof
@@ -41,7 +41,7 @@ if exist "ggml-%model%.bin" (
41
 
42
  PowerShell -NoProfile -ExecutionPolicy Bypass -Command "Invoke-WebRequest -Uri https://ggml.ggerganov.com/ggml-model-whisper-%model%.bin -OutFile ggml-%model%.bin"
43
 
44
- if %ERRORLEVEL% neq 0 (
45
  echo Failed to download ggml model %model%
46
  echo Please try again later or download the original Whisper model files and convert them yourself.
47
  goto :eof
 
18
 
19
  set model=%1
20
 
21
+ for %%b in (%models%) do (
22
  if "%%b"=="%model%" (
23
  CALL :download_model
24
  goto :eof
 
41
 
42
  PowerShell -NoProfile -ExecutionPolicy Bypass -Command "Invoke-WebRequest -Uri https://ggml.ggerganov.com/ggml-model-whisper-%model%.bin -OutFile ggml-%model%.bin"
43
 
44
+ if %ERRORLEVEL% neq 0 (
45
  echo Failed to download ggml model %model%
46
  echo Please try again later or download the original Whisper model files and convert them yourself.
47
  goto :eof
models/download-ggml-model.sh CHANGED
@@ -3,6 +3,12 @@
3
  # This script downloads Whisper model files that have already been converted to ggml format.
4
  # This way you don't have to convert them yourself.
5
 
 
 
 
 
 
 
6
  # get the path of this script
7
  function get_script_path() {
8
  if [ -x "$(command -v realpath)" ]; then
@@ -46,7 +52,7 @@ fi
46
 
47
  # download ggml model
48
 
49
- printf "Downloading ggml model $model ...\n"
50
 
51
  cd $models_path
52
 
@@ -56,9 +62,9 @@ if [ -f "ggml-$model.bin" ]; then
56
  fi
57
 
58
  if [ -x "$(command -v wget)" ]; then
59
- wget --quiet --show-progress -O ggml-$model.bin https://ggml.ggerganov.com/ggml-model-whisper-$model.bin
60
  elif [ -x "$(command -v curl)" ]; then
61
- curl --output ggml-$model.bin https://ggml.ggerganov.com/ggml-model-whisper-$model.bin
62
  else
63
  printf "Either wget or curl is required to download models.\n"
64
  exit 1
 
3
  # This script downloads Whisper model files that have already been converted to ggml format.
4
  # This way you don't have to convert them yourself.
5
 
6
+ #src="https://ggml.ggerganov.com"
7
+ #pfx="ggml-model-whisper"
8
+
9
+ src="https://huggingface.co/datasets/ggerganov/whisper.cpp"
10
+ pfx="resolve/main/ggml"
11
+
12
  # get the path of this script
13
  function get_script_path() {
14
  if [ -x "$(command -v realpath)" ]; then
 
52
 
53
  # download ggml model
54
 
55
+ printf "Downloading ggml model $model from '$src' ...\n"
56
 
57
  cd $models_path
58
 
 
62
  fi
63
 
64
  if [ -x "$(command -v wget)" ]; then
65
+ wget --quiet --show-progress -O ggml-$model.bin $src/$pfx-$model.bin
66
  elif [ -x "$(command -v curl)" ]; then
67
+ curl --output ggml-$model.bin $src/$pfx-$model.bin
68
  else
69
  printf "Either wget or curl is required to download models.\n"
70
  exit 1