MRasheq commited on
Commit
b0f6c9f
·
verified ·
1 Parent(s): 3f4f3ba

Update index.js

Browse files
Files changed (1) hide show
  1. index.js +30 -77
index.js CHANGED
@@ -1,79 +1,32 @@
1
- import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/transformers@2.10.1';
2
-
3
- // Since we will download the model from the Hugging Face Hub, we can skip the local model check
4
- env.allowLocalModels = false;
5
-
6
- // Reference the elements that we will need
7
- const status = document.getElementById('status');
8
- const fileUpload = document.getElementById('upload');
9
- const imageContainer = document.getElementById('container');
10
- const example = document.getElementById('example');
11
-
12
- const EXAMPLE_URL = 'https://huggingface.co/datasets/Xenova/transformers.js-docs/resolve/main/city-streets.jpg';
13
-
14
- // Create a new object detection pipeline
15
- status.textContent = 'Loading model...';
16
- const detector = await pipeline('object-detection', 'Xenova/detr-resnet-50');
17
- status.textContent = 'Ready';
18
-
19
- example.addEventListener('click', (e) => {
20
- e.preventDefault();
21
- detect(EXAMPLE_URL);
22
  });
23
 
24
- fileUpload.addEventListener('change', function (e) {
25
- const file = e.target.files[0];
26
- if (!file) {
27
- return;
28
- }
29
-
30
- const reader = new FileReader();
31
-
32
- // Set up a callback when the file is loaded
33
- reader.onload = e2 => detect(e2.target.result);
34
-
35
- reader.readAsDataURL(file);
36
- });
37
-
38
-
39
- // Detect objects in the image
40
- async function detect(img) {
41
- imageContainer.innerHTML = '';
42
- imageContainer.style.backgroundImage = `url(${img})`;
43
-
44
- status.textContent = 'Analysing...';
45
- const output = await detector(img, {
46
- threshold: 0.5,
47
- percentage: true,
48
- });
49
- status.textContent = '';
50
- output.forEach(renderBox);
51
- }
52
-
53
- // Render a bounding box and label on the image
54
- function renderBox({ box, label }) {
55
- const { xmax, xmin, ymax, ymin } = box;
56
-
57
- // Generate a random color for the box
58
- const color = '#' + Math.floor(Math.random() * 0xFFFFFF).toString(16).padStart(6, 0);
59
-
60
- // Draw the box
61
- const boxElement = document.createElement('div');
62
- boxElement.className = 'bounding-box';
63
- Object.assign(boxElement.style, {
64
- borderColor: color,
65
- left: 100 * xmin + '%',
66
- top: 100 * ymin + '%',
67
- width: 100 * (xmax - xmin) + '%',
68
- height: 100 * (ymax - ymin) + '%',
69
- })
70
-
71
- // Draw label
72
- const labelElement = document.createElement('span');
73
- labelElement.textContent = label;
74
- labelElement.className = 'bounding-box-label';
75
- labelElement.style.backgroundColor = color;
76
-
77
- boxElement.appendChild(labelElement);
78
- imageContainer.appendChild(boxElement);
79
- }
 
1
+ import { pipeline } from "@huggingface/transformers";
2
+
3
+ // Create a text-generation pipeline
4
+ const generator = await pipeline(
5
+ "text-generation",
6
+ "HuggingFaceTB/SmolLM2-360M", // You can replace this with other models like "EleutherAI/gpt-neo-125M" or "facebook/opt-125m"
7
+ { device: "webgpu" }
8
+ );
9
+
10
+ // Generate text
11
+ const prompts = [
12
+ "Once upon a time",
13
+ "The artificial intelligence"
14
+ ];
15
+
16
+ const results = await generator(prompts, {
17
+ max_length: 50,
18
+ num_return_sequences: 1,
19
+ temperature: 0.7,
20
+ top_p: 0.9
 
21
  });
22
 
23
+ console.log(results);
24
+ // Will output something like:
25
+ // [
26
+ // [{
27
+ // "generated_text": "Once upon a time there was a young princess who lived in a castle..."
28
+ // }],
29
+ // [{
30
+ // "generated_text": "The artificial intelligence revolution has transformed the way we..."
31
+ // }]
32
+ // ]