-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathscript.js
More file actions
92 lines (73 loc) · 2.61 KB
/
script.js
File metadata and controls
92 lines (73 loc) · 2.61 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
import { GestureRecognizer, FilesetResolver } from "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.0/vision_bundle.js";
const videoElement = document.getElementById("webcam");
const canvasElement = document.getElementById("outputCanvas");
const ctx = canvasElement.getContext("2d");
let gestureRecognizer;
// Start webcam
async function setupCamera() {
const stream = await navigator.mediaDevices.getUserMedia({
video: { width: 640, height: 480 },
audio: false,
});
videoElement.srcObject = stream;
return new Promise((resolve) => {
videoElement.onloadedmetadata = () => {
// Set canvas size to video size for clear rendering
canvasElement.width = videoElement.videoWidth;
canvasElement.height = videoElement.videoHeight;
resolve();
};
});
}
// Load model and initialize recognizer
async function createGestureRecognizer() {
try {
const vision = await FilesetResolver.forVisionTasks(
"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.0/wasm"
);
gestureRecognizer = await GestureRecognizer.createFromOptions(vision, {
baseOptions: {
modelAssetPath: "./gesture_model.task", // Your zipped model file
},
runningMode: "VIDEO",
maxResults: 1,
});
console.log("GestureRecognizer created successfully.");
} catch (error) {
console.error("Failed to create GestureRecognizer:", error);
}
}
// Main loop: run recognition every frame
async function predict() {
if (!gestureRecognizer) {
console.warn("GestureRecognizer not initialized yet.");
return;
}
try {
// Await the async recognition call
const results = await gestureRecognizer.recognizeForVideo(videoElement, performance.now());
// Debug output: see full results in console
console.log("Gesture recognition results:", results);
// Clear canvas and draw video frame
ctx.clearRect(0, 0, canvasElement.width, canvasElement.height);
ctx.drawImage(videoElement, 0, 0, canvasElement.width, canvasElement.height);
// Check if gestures detected
if (results.gestures && results.gestures.length > 0) {
const gesture = results.gestures[0][0]; // top prediction
// Display gesture and confidence
ctx.font = "30px Arial";
ctx.fillStyle = "red";
ctx.fillText(`Gesture: ${gesture.categoryName} (${(gesture.score * 100).toFixed(1)}%)`, 10, 40);
}
} catch (error) {
console.error("Error during gesture recognition:", error);
}
requestAnimationFrame(predict);
}
async function main() {
await setupCamera();
videoElement.play();
await createGestureRecognizer();
predict();
}
main();