practisebook commited on
Commit
1c87666
·
verified ·
1 Parent(s): eee4541

Delete assets/detection.js

Browse files
Files changed (1) hide show
  1. assets/detection.js +0 -94
assets/detection.js DELETED
@@ -1,94 +0,0 @@
1
- const video = document.getElementById("video");
2
- const canvas = document.getElementById("canvas");
3
- const ctx = canvas.getContext("2d");
4
- const startButton = document.getElementById("start");
5
-
6
- async function loadModel() {
7
- console.log("Loading model...");
8
- const model = await tf.loadGraphModel(
9
- "https://tfhub.dev/tensorflow/ssd_mobilenet_v2/1/default/1", // Replace with your model URL if required
10
- { fromTFHub: true }
11
- );
12
- console.log("Model loaded successfully.");
13
- return model;
14
- }
15
-
16
- function speak(text) {
17
- const synth = window.speechSynthesis;
18
- const utterance = new SpeechSynthesisUtterance(text);
19
- synth.speak(utterance);
20
- }
21
-
22
- async function setupCamera() {
23
- const stream = await navigator.mediaDevices.getUserMedia({
24
- video: { width: 640, height: 480 },
25
- });
26
- video.srcObject = stream;
27
- return new Promise((resolve) => {
28
- video.onloadedmetadata = () => resolve(video);
29
- });
30
- }
31
-
32
- async function detectObjects(model) {
33
- console.log("Starting detection...");
34
-
35
- video.play();
36
- canvas.width = video.videoWidth;
37
- canvas.height = video.videoHeight;
38
-
39
- async function processFrame() {
40
- ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
41
-
42
- const inputTensor = tf.browser
43
- .fromPixels(video)
44
- .resizeBilinear([300, 300])
45
- .expandDims(0)
46
- .div(tf.scalar(255));
47
-
48
- const predictions = await model.executeAsync(inputTensor);
49
-
50
- const [boxes, scores, classes] = predictions;
51
-
52
- ctx.clearRect(0, 0, canvas.width, canvas.height);
53
- ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
54
-
55
- const detectionThreshold = 0.5;
56
-
57
- scores.arraySync()[0].forEach((score, index) => {
58
- if (score > detectionThreshold) {
59
- const bbox = boxes.arraySync()[0][index];
60
- const classId = classes.arraySync()[0][index];
61
- const label = `Object ${classId}`;
62
-
63
- const x = bbox[1] * canvas.width;
64
- const y = bbox[0] * canvas.height;
65
- const width = (bbox[3] - bbox[1]) * canvas.width;
66
- const height = (bbox[2] - bbox[0]) * canvas.height;
67
-
68
- // Draw bounding box
69
- ctx.strokeStyle = "red";
70
- ctx.lineWidth = 2;
71
- ctx.strokeRect(x, y, width, height);
72
-
73
- // Draw label
74
- ctx.fillStyle = "red";
75
- ctx.font = "16px Arial";
76
- ctx.fillText(label, x, y > 10 ? y - 5 : y + 20);
77
-
78
- // Speak detected object
79
- speak(`Detected ${label}`);
80
- }
81
- });
82
-
83
- requestAnimationFrame(processFrame);
84
- }
85
-
86
- processFrame();
87
- }
88
-
89
- startButton.addEventListener("click", async () => {
90
- startButton.disabled = true;
91
- const model = await loadModel();
92
- await setupCamera();
93
- detectObjects(model);
94
- });