whitphx HF Staff commited on
Commit
e1dd8ad
·
verified ·
1 Parent(s): e2a4ce5

Add/update the quantized ONNX model files and README.md for Transformers.js v3

Browse files

## Applied Quantizations

### ✅ Based on `decoder_model.onnx` *with* slimming

↳ ✅ `fp16`: `decoder_model_fp16.onnx` (added)
↳ ✅ `int8`: `decoder_model_int8.onnx` (added)
↳ ✅ `uint8`: `decoder_model_uint8.onnx` (added)
↳ ✅ `q4`: `decoder_model_q4.onnx` (added)
↳ ✅ `q4f16`: `decoder_model_q4f16.onnx` (added)
↳ ✅ `bnb4`: `decoder_model_bnb4.onnx` (added)

### ✅ Based on `encoder_model.onnx` *with* slimming

↳ ❌ `int8`: `encoder_model_int8.onnx` (added but JS-based E2E test failed)
```
dtype not specified for "decoder_model_merged". Using the default dtype (fp32) for this device (cpu).
/home/ubuntu/src/tjsmigration/node_modules/.pnpm/[email protected]/node_modules/onnxruntime-node/dist/backend.js:25
__classPrivateFieldGet(this, _OnnxruntimeSessionHandler_inferenceSession, "f").loadModel(pathOrBuffer, options);
^

Error: Could not find an implementation for ConvInteger(10) node with name '/conv1/Conv_quant'
at new OnnxruntimeSessionHandler (/home/ubuntu/src/tjsmigration/node_modules/.pnpm/[email protected]/node_modules/onnxruntime-node/dist/backend.js:25:92)
at Immediate.<anonymous> (/home/ubuntu/src/tjsmigration/node_modules/.pnpm/[email protected]/node_modules/onnxruntime-node/dist/backend.js:67:29)
at process.processImmediate (node:internal/timers:485:21)

Node.js v22.16.0
```
↳ ✅ `uint8`: `encoder_model_uint8.onnx` (added)
↳ ✅ `q4`: `encoder_model_q4.onnx` (added)
↳ ✅ `q4f16`: `encoder_model_q4f16.onnx` (added)
↳ ✅ `bnb4`: `encoder_model_bnb4.onnx` (added)

### ✅ Based on `decoder_with_past_model.onnx` *with* slimming

↳ ✅ `fp16`: `decoder_with_past_model_fp16.onnx` (added)
↳ ✅ `int8`: `decoder_with_past_model_int8.onnx` (added)
↳ ✅ `uint8`: `decoder_with_past_model_uint8.onnx` (added)
↳ ✅ `q4`: `decoder_with_past_model_q4.onnx` (added)
↳ ✅ `q4f16`: `decoder_with_past_model_q4f16.onnx` (added)
↳ ✅ `bnb4`: `decoder_with_past_model_bnb4.onnx` (added)

### ✅ Based on `decoder_model_merged.onnx` *without* slimming

↳ ✅ `fp16`: `decoder_model_merged_fp16.onnx` (replaced because it was invalid)
↳ ✅ `int8`: `decoder_model_merged_int8.onnx` (added)
↳ ✅ `uint8`: `decoder_model_merged_uint8.onnx` (added)
↳ ✅ `q4`: `decoder_model_merged_q4.onnx` (added)
↳ ✅ `q4f16`: `decoder_model_merged_q4f16.onnx` (added)
↳ ✅ `bnb4`: `decoder_model_merged_bnb4.onnx` (added)

README.md CHANGED
@@ -3,13 +3,10 @@ base_model: openai/whisper-tiny.en
3
  library_name: transformers.js
4
  ---
5
 
6
-
7
  # Whisper
8
 
9
  [openai/whisper-tiny.en](https://huggingface.co/openai/whisper-tiny.en) with ONNX weights to be compatible with [Transformers.js](https://huggingface.co/docs/transformers.js).
10
 
11
-
12
-
13
  ## Usage (Transformers.js)
14
 
15
  If you haven't already, you can install the [Transformers.js](https://huggingface.co/docs/transformers.js) JavaScript library from [NPM](https://www.npmjs.com/package/@huggingface/transformers) using:
@@ -19,12 +16,13 @@ npm i @huggingface/transformers
19
 
20
  **Example:** Transcribe English.
21
 
22
-
23
  ```js
24
  import { pipeline } from '@huggingface/transformers';
25
 
26
  // Create speech recognition pipeline
27
- const transcriber = await pipeline('automatic-speech-recognition', 'Xenova/whisper-tiny.en');
 
 
28
 
29
  // Transcribe audio from URL
30
  const url = 'https://huggingface.co/datasets/Xenova/transformers.js-docs/resolve/main/jfk.wav';
@@ -38,7 +36,9 @@ const output = await transcriber(url);
38
  import { pipeline } from '@huggingface/transformers';
39
 
40
  // Create speech recognition pipeline
41
- const transcriber = await pipeline('automatic-speech-recognition', 'Xenova/whisper-tiny.en');
 
 
42
 
43
  // Transcribe audio from URL with timestamps
44
  const url = 'https://huggingface.co/datasets/Xenova/transformers.js-docs/resolve/main/jfk.wav';
@@ -58,7 +58,9 @@ const output = await transcriber(url, { return_timestamps: true });
58
  import { pipeline } from '@huggingface/transformers';
59
 
60
  // Create speech recognition pipeline
61
- const transcriber = await pipeline('automatic-speech-recognition', 'Xenova/whisper-tiny.en');
 
 
62
 
63
  // Transcribe audio from URL with word-level timestamps
64
  const url = 'https://huggingface.co/datasets/Xenova/transformers.js-docs/resolve/main/jfk.wav';
 
3
  library_name: transformers.js
4
  ---
5
 
 
6
  # Whisper
7
 
8
  [openai/whisper-tiny.en](https://huggingface.co/openai/whisper-tiny.en) with ONNX weights to be compatible with [Transformers.js](https://huggingface.co/docs/transformers.js).
9
 
 
 
10
  ## Usage (Transformers.js)
11
 
12
  If you haven't already, you can install the [Transformers.js](https://huggingface.co/docs/transformers.js) JavaScript library from [NPM](https://www.npmjs.com/package/@huggingface/transformers) using:
 
16
 
17
  **Example:** Transcribe English.
18
 
 
19
  ```js
20
  import { pipeline } from '@huggingface/transformers';
21
 
22
  // Create speech recognition pipeline
23
+ const transcriber = await pipeline('automatic-speech-recognition', 'Xenova/whisper-tiny.en', {
24
+ dtype: 'fp32', // Options: "fp32", "fp16", "q8", "q4"
25
+ });
26
 
27
  // Transcribe audio from URL
28
  const url = 'https://huggingface.co/datasets/Xenova/transformers.js-docs/resolve/main/jfk.wav';
 
36
  import { pipeline } from '@huggingface/transformers';
37
 
38
  // Create speech recognition pipeline
39
+ const transcriber = await pipeline('automatic-speech-recognition', 'Xenova/whisper-tiny.en', {
40
+ dtype: 'fp32', // Options: "fp32", "fp16", "q8", "q4"
41
+ });
42
 
43
  // Transcribe audio from URL with timestamps
44
  const url = 'https://huggingface.co/datasets/Xenova/transformers.js-docs/resolve/main/jfk.wav';
 
58
  import { pipeline } from '@huggingface/transformers';
59
 
60
  // Create speech recognition pipeline
61
+ const transcriber = await pipeline('automatic-speech-recognition', 'Xenova/whisper-tiny.en', {
62
+ dtype: 'fp32', // Options: "fp32", "fp16", "q8", "q4"
63
+ });
64
 
65
  // Transcribe audio from URL with word-level timestamps
66
  const url = 'https://huggingface.co/datasets/Xenova/transformers.js-docs/resolve/main/jfk.wav';
onnx/decoder_model_bnb4.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95b70fcb440a3ac72ae548e054d9d1e1ea3a62e29928629291a38feb7a5ea443
3
+ size 85952527
onnx/decoder_model_fp16.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c048ad447e8f1d76b79532d0df7aaac879be93f5668ae67749bd49e099980ea7
3
+ size 59283763
onnx/decoder_model_int8.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3543fb176f09438923e530029492dc0aee05dc19dc5458b6584f622026a95cc7
3
+ size 110040036
onnx/decoder_model_merged_bnb4.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c18806ab89e8dc0dee96bf20132c314c2358f1a3fc8749c548e716cc289043fc
3
+ size 86148650
onnx/decoder_model_merged_fp16.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6260d80026e23bfa91b47917e534edf73f11fbf03d733836424e3e5236991a38
3
- size 59599490
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2186c6567a2a10ea74f425a369cad5e598c17953d731aa05d23e5fff16afa61f
3
+ size 59602260
onnx/decoder_model_merged_int8.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e28fbe180689bb016ed4c938512c0f789cb51b6ba61055b8d7e1aca79e048a2f
3
+ size 30727547
onnx/decoder_model_merged_q4.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9278519f295f300e8f66f557fcab7b22a48e5e98c2e5ad57694a4397ad6bd59
3
+ size 86737938
onnx/decoder_model_merged_q4f16.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0321a7484af0630a8c3ae837c8fe48e96734af435512d7a43fe07a27f229c61b
3
+ size 46040376
onnx/decoder_model_merged_uint8.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d25cbda3719da7ef3ac69c18f3d93267ccf10e17b0110fac55b51f58810ff425
3
+ size 30727570
onnx/decoder_model_q4.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5145e4a1f8748ad0443d85c7ce2176359799fa99578f06088d72ef122496905a
3
+ size 86542103
onnx/decoder_model_q4f16.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a825e1ef7a76989635f445986ce9a349028a6d119f3bf7fe3bf2397b435ac1bf
3
+ size 45723734
onnx/decoder_model_uint8.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31fe007826bdd9ed2212da1d3c4d5397943d87a33a57a834cf69258c328f93c4
3
+ size 110040059
onnx/decoder_with_past_model_bnb4.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd7471775f5eb10ae42cd80e7dc2a5f8020a28ab449e327e97a84eed9399120a
3
+ size 85285469
onnx/decoder_with_past_model_fp16.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03df0d6070df692adf77dfad1dfeeaf7202df504fb13cf2790cd37ce9004ec75
3
+ size 56927741
onnx/decoder_with_past_model_int8.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f7014b3faea22c37d2225ebd9e20dd05b061aa8c1d6a27d59fef86888ef517f
3
+ size 108850111
onnx/decoder_with_past_model_q4.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:40a880819ea48e92244b18fdca0a079ba0900a46831439b8a6d549a9d230e755
3
+ size 85801365
onnx/decoder_with_past_model_q4f16.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad8d96aeda21889ace8590f87e136f051cadc1e83de9c12cee7d472727b49b0d
3
+ size 45062280
onnx/decoder_with_past_model_uint8.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50a73923d61f8ccd4a084db1f33fd1554d56ece6fa3305acfab8202ab342f60b
3
+ size 108850130
onnx/encoder_model_bnb4.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e38d6ab78841542154c1dc54bf5d54ef0204d820fa04c928ec9ed29ed5603c6c
3
+ size 8563828
onnx/encoder_model_q4.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72cda0fe855ebe366b62131facdbe8d89783f1911e2a21e52cc49d3ca535cf46
3
+ size 9006044
onnx/encoder_model_q4f16.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b834b21e0f0b7ddffa3b80f83b141aeda300db8108111414d67a5e05716ced12
3
+ size 6303086
onnx/encoder_model_uint8.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67316ca75ab167a0039b8390b184e5119e7f6c79ffc3fdc696cbb01889593457
3
+ size 10079605