Add transformers.js sample code (#6)
Browse files- Add transformers.js sample code (f2ec3aa090aaa90df1481872580bf64c338fada8)
Co-authored-by: Joshua <[email protected]>
README.md
CHANGED
@@ -185,6 +185,26 @@ embeddings = model.encode([
|
|
185 |
print(cos_sim(embeddings[0], embeddings[1]))
|
186 |
```
|
187 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
188 |
## Plans
|
189 |
|
190 |
1. Bilingual embedding models supporting more European & Asian languages, including Spanish, French, Italian and Japanese.
|
|
|
185 |
print(cos_sim(embeddings[0], embeddings[1]))
|
186 |
```
|
187 |
|
188 |
+
You can also use the [Transformers.js](https://huggingface.co/docs/transformers.js) library to compute embeddings in JavaScript.
|
189 |
+
```js
|
190 |
+
// npm i @xenova/transformers
|
191 |
+
import { pipeline, cos_sim } from '@xenova/transformers';
|
192 |
+
|
193 |
+
const extractor = await pipeline('feature-extraction', 'jinaai/jina-embeddings-v2-base-code', {
|
194 |
+
quantized: false, // Comment out this line to use the 8-bit quantized version
|
195 |
+
});
|
196 |
+
|
197 |
+
const texts = [
|
198 |
+
'How do I access the index while iterating over a sequence with a for loop?',
|
199 |
+
'# Use the built-in enumerator\nfor idx, x in enumerate(xs):\n print(idx, x)',
|
200 |
+
]
|
201 |
+
const embeddings = await extractor(texts, { pooling: 'mean' });
|
202 |
+
|
203 |
+
const score = cos_sim(embeddings[0].data, embeddings[1].data);
|
204 |
+
console.log(score);
|
205 |
+
// 0.7281748759529421
|
206 |
+
```
|
207 |
+
|
208 |
## Plans
|
209 |
|
210 |
1. Bilingual embedding models supporting more European & Asian languages, including Spanish, French, Italian and Japanese.
|