Compare commits
8 Commits
master
...
feature/rt
Author | SHA1 | Date | |
---|---|---|---|
d2fa91dcf5 | |||
139cca44f5 | |||
75c8c99481 | |||
a0d6b7a73a | |||
adf9d8ef3e | |||
d50ac4ed64 | |||
ca38b7c7f4 | |||
99f5e148b9 |
@ -1,3 +1,66 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
@layer base {
|
||||
:root {
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 0 0% 3.9%;
|
||||
--card: 0 0% 100%;
|
||||
--card-foreground: 0 0% 3.9%;
|
||||
--popover: 0 0% 100%;
|
||||
--popover-foreground: 0 0% 3.9%;
|
||||
--primary: 0 0% 9%;
|
||||
--primary-foreground: 0 0% 98%;
|
||||
--secondary: 0 0% 96.1%;
|
||||
--secondary-foreground: 0 0% 9%;
|
||||
--muted: 0 0% 96.1%;
|
||||
--muted-foreground: 0 0% 45.1%;
|
||||
--accent: 0 0% 96.1%;
|
||||
--accent-foreground: 0 0% 9%;
|
||||
--destructive: 0 84.2% 60.2%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--border: 0 0% 89.8%;
|
||||
--input: 0 0% 89.8%;
|
||||
--ring: 0 0% 3.9%;
|
||||
--chart-1: 12 76% 61%;
|
||||
--chart-2: 173 58% 39%;
|
||||
--chart-3: 197 37% 24%;
|
||||
--chart-4: 43 74% 66%;
|
||||
--chart-5: 27 87% 67%;
|
||||
--radius: 0.5rem
|
||||
}
|
||||
.dark {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--card: 0 0% 3.9%;
|
||||
--card-foreground: 0 0% 98%;
|
||||
--popover: 0 0% 3.9%;
|
||||
--popover-foreground: 0 0% 98%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 0 0% 9%;
|
||||
--secondary: 0 0% 14.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--muted: 0 0% 14.9%;
|
||||
--muted-foreground: 0 0% 63.9%;
|
||||
--accent: 0 0% 14.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
--input: 0 0% 14.9%;
|
||||
--ring: 0 0% 83.1%;
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
--chart-4: 280 65% 60%;
|
||||
--chart-5: 340 75% 55%
|
||||
}
|
||||
}
|
||||
@layer base {
|
||||
* {
|
||||
@apply border-border;
|
||||
}
|
||||
body {
|
||||
@apply bg-background text-foreground;
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
import type { Metadata } from "next";
|
||||
import { Geist, Geist_Mono } from "next/font/google";
|
||||
import "./globals.css";
|
||||
import { Toaster } from "@/components/ui/toaster";
|
||||
|
||||
const geistSans = Geist({
|
||||
variable: "--font-geist-sans",
|
||||
@ -28,6 +29,7 @@ export default function RootLayout({
|
||||
className={`${geistSans.variable} ${geistMono.variable} antialiased`}
|
||||
>
|
||||
{children}
|
||||
<Toaster />
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
|
21
components.json
Normal file
21
components.json
Normal file
@ -0,0 +1,21 @@
|
||||
{
|
||||
"$schema": "https://ui.shadcn.com/schema.json",
|
||||
"style": "default",
|
||||
"rsc": true,
|
||||
"tsx": true,
|
||||
"tailwind": {
|
||||
"config": "tailwind.config.ts",
|
||||
"css": "app/globals.css",
|
||||
"baseColor": "neutral",
|
||||
"cssVariables": true,
|
||||
"prefix": ""
|
||||
},
|
||||
"aliases": {
|
||||
"components": "@/components",
|
||||
"utils": "@/lib/utils",
|
||||
"ui": "@/components/ui",
|
||||
"lib": "@/lib",
|
||||
"hooks": "@/hooks"
|
||||
},
|
||||
"iconLibrary": "lucide"
|
||||
}
|
@ -2,10 +2,21 @@
|
||||
import React, { useState } from "react";
|
||||
import Register from "./register/Register";
|
||||
import Search from "./search/Search";
|
||||
import "./MainForm.css"; // Import CSS for styling
|
||||
import "./MainForm.css";
|
||||
import FaceLiveness from "./faceLivelinessCheck/FaceLivelinessCheck";
|
||||
import FaceMovementDetection from "./faceMovementDetection/FaceMovementDetection";
|
||||
import RealtimeCount from "./realtimeCount/RealtimeCount";
|
||||
import RealtimeDetection from "./realtimeDetection/RealtimeDetection";
|
||||
|
||||
const MainForm: React.FC = () => {
|
||||
const [activeTab, setActiveTab] = useState<"register" | "search">("register");
|
||||
const [activeTab, setActiveTab] = useState<
|
||||
| "register"
|
||||
| "search"
|
||||
| "realtime"
|
||||
| "liveliness"
|
||||
| "realtime-count"
|
||||
| "facemovement"
|
||||
>("register");
|
||||
|
||||
return (
|
||||
<div className="main-container">
|
||||
@ -22,10 +33,42 @@ const MainForm: React.FC = () => {
|
||||
>
|
||||
Search
|
||||
</button>
|
||||
<button
|
||||
className={`tab-button ${activeTab === "realtime" ? "active" : ""}`}
|
||||
onClick={() => setActiveTab("realtime")}
|
||||
>
|
||||
Realtime Detection
|
||||
</button>
|
||||
{/* <button
|
||||
className={`tab-button ${activeTab === "liveliness" ? "active" : ""}`}
|
||||
onClick={() => setActiveTab("liveliness")}
|
||||
>
|
||||
Liveliness Detection
|
||||
</button> */}
|
||||
<button
|
||||
className={`tab-button ${
|
||||
activeTab === "realtime-count" ? "active" : ""
|
||||
}`}
|
||||
onClick={() => setActiveTab("realtime-count")}
|
||||
>
|
||||
Realtime Count
|
||||
</button>
|
||||
<button
|
||||
className={`tab-button ${
|
||||
activeTab === "facemovement" ? "active" : ""
|
||||
}`}
|
||||
onClick={() => setActiveTab("facemovement")}
|
||||
>
|
||||
Face Movement Detection
|
||||
</button>
|
||||
</div>
|
||||
<div className="tab-content">
|
||||
{activeTab === "register" && <Register />}
|
||||
{activeTab === "search" && <Search />}
|
||||
{activeTab === "realtime" && <RealtimeDetection />}
|
||||
{activeTab === "liveliness" && <FaceLiveness />}
|
||||
{activeTab === "realtime-count" && <RealtimeCount />}
|
||||
{activeTab === "facemovement" && <FaceMovementDetection />}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
192
components/faceLivelinessCheck/FaceLivelinessCheck.tsx
Normal file
192
components/faceLivelinessCheck/FaceLivelinessCheck.tsx
Normal file
@ -0,0 +1,192 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
"use client";
|
||||
|
||||
import { useState, useRef, useEffect } from "react";
|
||||
import Webcam from "react-webcam";
|
||||
import { Card, CardContent } from "@/components/ui/card";
|
||||
import { Loader2 } from "lucide-react";
|
||||
import { useToast } from "@/hooks/use-toast";
|
||||
import * as faceapi from "face-api.js";
|
||||
|
||||
export default function FaceLiveness() {
|
||||
const webcamRef = useRef<Webcam>(null);
|
||||
const [isModelLoading, setIsModelLoading] = useState(true);
|
||||
const [isProcessing, setIsProcessing] = useState(false);
|
||||
const [previousExpressions, setPreviousExpressions] = useState<any>(null);
|
||||
const processingTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const { toast } = useToast();
|
||||
|
||||
useEffect(() => {
|
||||
const loadModels = async () => {
|
||||
try {
|
||||
const MODEL_URL =
|
||||
"https://justadudewhohacks.github.io/face-api.js/models";
|
||||
await Promise.all([
|
||||
faceapi.nets.tinyFaceDetector.loadFromUri(MODEL_URL),
|
||||
faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL),
|
||||
faceapi.nets.faceExpressionNet.loadFromUri(MODEL_URL),
|
||||
]);
|
||||
setIsModelLoading(false);
|
||||
} catch (error) {
|
||||
console.error("Error loading models:", error);
|
||||
toast({
|
||||
title: "Error",
|
||||
description:
|
||||
"Failed to load face detection models. Please refresh the page.",
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
};
|
||||
loadModels();
|
||||
}, [toast]);
|
||||
|
||||
const checkLiveness = (expressions: any, landmarks: any) => {
|
||||
if (!previousExpressions) {
|
||||
setPreviousExpressions(expressions);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for expression changes
|
||||
const expressionThreshold = 0.1;
|
||||
let hasExpressionChange = false;
|
||||
for (const expression in expressions) {
|
||||
const diff = Math.abs(
|
||||
expressions[expression] - previousExpressions[expression]
|
||||
);
|
||||
if (diff > expressionThreshold) {
|
||||
hasExpressionChange = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for natural facial movement using landmarks
|
||||
const eyeBlinkDetected = detectEyeBlink(landmarks);
|
||||
|
||||
setPreviousExpressions(expressions);
|
||||
return hasExpressionChange || eyeBlinkDetected;
|
||||
};
|
||||
|
||||
const detectEyeBlink = (landmarks: any) => {
|
||||
const leftEye = landmarks.getLeftEye();
|
||||
const rightEye = landmarks.getRightEye();
|
||||
|
||||
// Calculate eye aspect ratio
|
||||
const leftEAR = getEyeAspectRatio(leftEye);
|
||||
const rightEAR = getEyeAspectRatio(rightEye);
|
||||
|
||||
// If either eye is closed (low aspect ratio), consider it a blink
|
||||
const blinkThreshold = 0.2;
|
||||
return leftEAR < blinkThreshold || rightEAR < blinkThreshold;
|
||||
};
|
||||
|
||||
const getEyeAspectRatio = (eye: any) => {
|
||||
// Calculate the eye aspect ratio using the landmark points
|
||||
const height1 = distance(eye[1], eye[5]);
|
||||
const height2 = distance(eye[2], eye[4]);
|
||||
const width = distance(eye[0], eye[3]);
|
||||
return (height1 + height2) / (2.0 * width);
|
||||
};
|
||||
|
||||
const distance = (point1: any, point2: any) => {
|
||||
return Math.sqrt(
|
||||
Math.pow(point1.x - point2.x, 2) + Math.pow(point1.y - point2.y, 2)
|
||||
);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const processFrame = async () => {
|
||||
if (!webcamRef.current || isProcessing || isModelLoading) return;
|
||||
|
||||
setIsProcessing(true);
|
||||
try {
|
||||
const imageSrc = webcamRef.current.getScreenshot();
|
||||
if (!imageSrc) return;
|
||||
|
||||
const img = new Image();
|
||||
img.src = imageSrc;
|
||||
await new Promise((resolve) => (img.onload = resolve));
|
||||
|
||||
const detections = await faceapi
|
||||
.detectAllFaces(img, new faceapi.TinyFaceDetectorOptions())
|
||||
.withFaceLandmarks()
|
||||
.withFaceExpressions();
|
||||
|
||||
if (detections.length > 0) {
|
||||
// Process each detected face with high confidence
|
||||
detections
|
||||
.filter((detection) => detection.detection.score > 0.7)
|
||||
.forEach((detection) => {
|
||||
const isLive = checkLiveness(
|
||||
detection.expressions,
|
||||
detection.landmarks
|
||||
);
|
||||
if (isLive) {
|
||||
toast({
|
||||
title: "Liveness Detected",
|
||||
description: "Real face detected with natural movements",
|
||||
});
|
||||
} else {
|
||||
toast({
|
||||
title: "Liveness Check",
|
||||
description: "Please move or blink naturally",
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Processing error:", error);
|
||||
} finally {
|
||||
setIsProcessing(false);
|
||||
// Schedule next frame processing
|
||||
processingTimeoutRef.current = setTimeout(processFrame, 1000); // Process every second
|
||||
}
|
||||
};
|
||||
|
||||
if (!isModelLoading) {
|
||||
processFrame();
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (processingTimeoutRef.current) {
|
||||
clearTimeout(processingTimeoutRef.current);
|
||||
}
|
||||
};
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isModelLoading, isProcessing, toast]);
|
||||
|
||||
if (isModelLoading) {
|
||||
return (
|
||||
<Card>
|
||||
<CardContent className="p-6 text-center">
|
||||
<Loader2 className="h-8 w-8 animate-spin mx-auto" />
|
||||
<p className="mt-2">Loading face detection models...</p>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Card>
|
||||
<CardContent className="p-6">
|
||||
<div className="space-y-6">
|
||||
<div className="relative rounded-lg overflow-hidden bg-black">
|
||||
<Webcam
|
||||
ref={webcamRef}
|
||||
screenshotFormat="image/jpeg"
|
||||
className="w-full"
|
||||
videoConstraints={{
|
||||
width: 640,
|
||||
height: 480,
|
||||
facingMode: "user",
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<p className="text-center text-sm text-muted-foreground">
|
||||
Move your face naturally or blink to verify liveness
|
||||
</p>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
28
components/faceMovementDetection/FaceMovementDetection.css
Normal file
28
components/faceMovementDetection/FaceMovementDetection.css
Normal file
@ -0,0 +1,28 @@
|
||||
.video-container {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
max-width: 768px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.webcam {
|
||||
width: 100%;
|
||||
border-radius: 8px;
|
||||
}
|
||||
|
||||
.alert-warning {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
background-color: #facc15; /* Yellow */
|
||||
color: #000;
|
||||
padding: 12px;
|
||||
border-radius: 6px;
|
||||
margin-top: 16px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.button-container {
|
||||
margin-top: 24px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
112
components/faceMovementDetection/FaceMovementDetection.tsx
Normal file
112
components/faceMovementDetection/FaceMovementDetection.tsx
Normal file
@ -0,0 +1,112 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import Webcam from "react-webcam";
|
||||
import * as faceapi from "face-api.js";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { AlertTriangle, Camera } from "lucide-react";
|
||||
import { useToast } from "@/hooks/use-toast";
|
||||
|
||||
const MODEL_URL = "https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model";
|
||||
const CHECK_INTERVAL = 500;
|
||||
|
||||
const FaceMovementDetection = () => {
|
||||
const webcamRef = useRef<Webcam>(null);
|
||||
const [isModelLoaded, setIsModelLoaded] = useState(false);
|
||||
const [isDetecting, setIsDetecting] = useState(false);
|
||||
const prevBoxSizeRef = useRef<number | null>(null);
|
||||
const [movingForward, setMovingForward] = useState(false);
|
||||
const { toast } = useToast();
|
||||
|
||||
useEffect(() => {
|
||||
const loadModels = async () => {
|
||||
try {
|
||||
await Promise.all([
|
||||
faceapi.nets.tinyFaceDetector.loadFromUri(MODEL_URL),
|
||||
faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL),
|
||||
]);
|
||||
setIsModelLoaded(true);
|
||||
toast({
|
||||
title: "Models Loaded",
|
||||
description: "Face detection models ready.",
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error loading models:", error);
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to load models.",
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
loadModels();
|
||||
}, [toast]);
|
||||
|
||||
const detectMovement = async () => {
|
||||
if (!webcamRef.current?.video) return;
|
||||
|
||||
const video = webcamRef.current.video;
|
||||
const detections = await faceapi.detectSingleFace(
|
||||
video,
|
||||
new faceapi.TinyFaceDetectorOptions()
|
||||
);
|
||||
|
||||
console.log("prevBoxSizeRef:", prevBoxSizeRef.current);
|
||||
|
||||
if (detections) {
|
||||
const { width, height } = detections.box;
|
||||
const currentBoxSize = width * height;
|
||||
console.log("currentBoxSize:", currentBoxSize);
|
||||
|
||||
if (prevBoxSizeRef.current !== null) {
|
||||
const sizeIncrease = currentBoxSize - prevBoxSizeRef.current;
|
||||
console.log("Size Increase:", sizeIncrease);
|
||||
|
||||
if (sizeIncrease > 3000) {
|
||||
setMovingForward(true);
|
||||
// toast({
|
||||
// title: "Movement Detected",
|
||||
// description: "The person is moving closer!",
|
||||
// variant: "destructive",
|
||||
// });
|
||||
} else {
|
||||
setMovingForward(false);
|
||||
}
|
||||
}
|
||||
|
||||
prevBoxSizeRef.current = currentBoxSize; // Update ref directly
|
||||
}
|
||||
};
|
||||
|
||||
const startDetection = () => {
|
||||
if (!isModelLoaded) return;
|
||||
setIsDetecting(true);
|
||||
setInterval(detectMovement, CHECK_INTERVAL);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="max-w-3xl mx-auto">
|
||||
<div className="relative">
|
||||
<Webcam ref={webcamRef} mirrored className="w-full rounded-lg" />
|
||||
</div>
|
||||
{movingForward && (
|
||||
<div className="mt-4 p-3 bg-yellow-300 text-black rounded-md flex items-center">
|
||||
<AlertTriangle className="mr-2 h-5 w-5" />
|
||||
Person is moving forward!
|
||||
</div>
|
||||
)}
|
||||
<div className="mt-6 flex justify-center">
|
||||
<Button
|
||||
onClick={startDetection}
|
||||
disabled={!isModelLoaded || isDetecting}
|
||||
>
|
||||
<Camera className="mr-2 h-4 w-4" />
|
||||
{isDetecting ? "Detecting..." : "Start Movement Detection"}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default FaceMovementDetection;
|
39
components/realtimeCount/RealtimeCount.tsx
Normal file
39
components/realtimeCount/RealtimeCount.tsx
Normal file
@ -0,0 +1,39 @@
|
||||
"use client";
|
||||
import React, { useState } from "react";
|
||||
import RegisterFaceCount from "./registerFaceCount/RegisterFaceCount";
|
||||
import HeadCount from "./headCount/HeadCount";
|
||||
|
||||
const RealtimeCount: React.FC = () => {
|
||||
const [activeTab, setActiveTab] = useState<
|
||||
"register-face-count" | "get-face-count"
|
||||
>("register-face-count");
|
||||
|
||||
return (
|
||||
<div className="main-container">
|
||||
<div className="tabs">
|
||||
<button
|
||||
className={`tab-button ${
|
||||
activeTab === "register-face-count" ? "active" : ""
|
||||
}`}
|
||||
onClick={() => setActiveTab("register-face-count")}
|
||||
>
|
||||
Register Face Count
|
||||
</button>
|
||||
<button
|
||||
className={`tab-button ${
|
||||
activeTab === "get-face-count" ? "active" : ""
|
||||
}`}
|
||||
onClick={() => setActiveTab("get-face-count")}
|
||||
>
|
||||
Get Face Count
|
||||
</button>
|
||||
</div>
|
||||
<div className="tab-content">
|
||||
{activeTab === "register-face-count" && <RegisterFaceCount />}
|
||||
{activeTab === "get-face-count" && <HeadCount />}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default RealtimeCount;
|
120
components/realtimeCount/headCount/HeadCount.css
Normal file
120
components/realtimeCount/headCount/HeadCount.css
Normal file
@ -0,0 +1,120 @@
|
||||
.headcount-container {
|
||||
max-width: 500px;
|
||||
margin: 0 auto;
|
||||
padding: 20px;
|
||||
font-family: Arial, sans-serif;
|
||||
background-color: #f9f9f9;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.heading {
|
||||
text-align: center;
|
||||
color: #333;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.form {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 15px;
|
||||
}
|
||||
|
||||
.input-group {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
.label {
|
||||
font-size: 14px;
|
||||
color: #555;
|
||||
}
|
||||
|
||||
.input {
|
||||
padding: 10px;
|
||||
font-size: 16px;
|
||||
border-radius: 4px;
|
||||
border: 1px solid #ccc;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.button {
|
||||
padding: 10px;
|
||||
font-size: 16px;
|
||||
background-color: #007bff;
|
||||
color: #fff;
|
||||
border: none;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.3s ease;
|
||||
}
|
||||
|
||||
.button:disabled {
|
||||
background-color: #ccc;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.error {
|
||||
color: red;
|
||||
text-align: center;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
.names-container {
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
.sub-heading {
|
||||
color: #333;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.list {
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.list-item {
|
||||
padding: 10px;
|
||||
background-color: #fff;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 4px;
|
||||
margin-bottom: 5px;
|
||||
font-size: 14px;
|
||||
}
|
||||
.daily-counts-container {
|
||||
margin-top: 20px;
|
||||
padding: 15px;
|
||||
border-radius: 8px;
|
||||
background: #f8f9fa;
|
||||
box-shadow: 0px 4px 6px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.sub-heading {
|
||||
font-size: 1.5rem;
|
||||
margin-bottom: 10px;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
.list {
|
||||
list-style-type: none;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.list-item {
|
||||
font-size: 1.1rem;
|
||||
padding: 8px 0;
|
||||
border-bottom: 1px solid #ddd;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.date {
|
||||
font-weight: bold;
|
||||
color: #007bff;
|
||||
}
|
||||
|
||||
.count {
|
||||
color: #555;
|
||||
}
|
183
components/realtimeCount/headCount/HeadCount.tsx
Normal file
183
components/realtimeCount/headCount/HeadCount.tsx
Normal file
@ -0,0 +1,183 @@
|
||||
import React, { useState } from "react";
|
||||
import "./HeadCount.css";
|
||||
|
||||
interface ApiResponse {
|
||||
total_unique_faces: number;
|
||||
daily_counts: { date: string; unique_faces: number }[];
|
||||
}
|
||||
|
||||
interface DateTimeRange {
|
||||
date: string;
|
||||
time: string;
|
||||
}
|
||||
|
||||
const HeadCount: React.FC = () => {
|
||||
const [from, setFrom] = useState<DateTimeRange>({ date: "", time: "" });
|
||||
const [to, setTo] = useState<DateTimeRange>({ date: "", time: "" });
|
||||
const [count, setCount] = useState<number | null>(null);
|
||||
const [loading, setLoading] = useState<boolean>(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [dailyCounts, setDailyCounts] = useState<
|
||||
{ date: string; count: number }[]
|
||||
>([]);
|
||||
|
||||
const handleInputChange = (
|
||||
e: React.ChangeEvent<HTMLInputElement>,
|
||||
field: "from" | "to"
|
||||
) => {
|
||||
const { name, value } = e.target;
|
||||
if (field === "from") {
|
||||
setFrom((prev) => ({ ...prev, [name]: value }));
|
||||
} else {
|
||||
setTo((prev) => ({ ...prev, [name]: value }));
|
||||
}
|
||||
};
|
||||
|
||||
const formatDateTime = (date: string, time: string) => {
|
||||
return `${date}T${time}:00+00:00`;
|
||||
};
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
|
||||
if (!from.date || !from.time || !to.date || !to.time) {
|
||||
setError("Please fill in all date and time fields.");
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
const start = formatDateTime(from.date, from.time);
|
||||
const end = formatDateTime(to.date, to.time);
|
||||
console.log(start, end);
|
||||
try {
|
||||
const response = await fetch(
|
||||
`${
|
||||
process.env.NEXT_PUBLIC_BASE_URL
|
||||
}/face/headcount?start_time=${encodeURIComponent(
|
||||
start
|
||||
)}&end_time=${encodeURIComponent(end)}`,
|
||||
{
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to fetch data");
|
||||
}
|
||||
|
||||
const data: ApiResponse = await response.json();
|
||||
setCount(data.total_unique_faces);
|
||||
if (data?.daily_counts) {
|
||||
setDailyCounts(
|
||||
data.daily_counts.map((d) => ({
|
||||
date: d.date,
|
||||
count: d.unique_faces,
|
||||
}))
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
setError("An error occurred while fetching data.");
|
||||
console.error(err);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="headcount-container">
|
||||
<h1 className="heading">Head Count</h1>
|
||||
<form onSubmit={handleSubmit} className="form">
|
||||
<div className="input-group">
|
||||
<label htmlFor="from-date" className="label">
|
||||
From Date:
|
||||
</label>
|
||||
<input
|
||||
type="date"
|
||||
id="from-date"
|
||||
name="date"
|
||||
value={from.date}
|
||||
onChange={(e) => handleInputChange(e, "from")}
|
||||
required
|
||||
className="input"
|
||||
/>
|
||||
</div>
|
||||
<div className="input-group">
|
||||
<label htmlFor="from-time" className="label">
|
||||
From Time:
|
||||
</label>
|
||||
<input
|
||||
type="time"
|
||||
id="from-time"
|
||||
name="time"
|
||||
value={from.time}
|
||||
onChange={(e) => handleInputChange(e, "from")}
|
||||
required
|
||||
className="input"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="input-group">
|
||||
<label htmlFor="to-date" className="label">
|
||||
To Date:
|
||||
</label>
|
||||
<input
|
||||
type="date"
|
||||
id="to-date"
|
||||
name="date"
|
||||
value={to.date}
|
||||
onChange={(e) => handleInputChange(e, "to")}
|
||||
required
|
||||
className="input"
|
||||
/>
|
||||
</div>
|
||||
<div className="input-group">
|
||||
<label htmlFor="to-time" className="label">
|
||||
To Time:
|
||||
</label>
|
||||
<input
|
||||
type="time"
|
||||
id="to-time"
|
||||
name="time"
|
||||
value={to.time}
|
||||
onChange={(e) => handleInputChange(e, "to")}
|
||||
required
|
||||
className="input"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<button type="submit" disabled={loading} className="button">
|
||||
{loading ? "Submitting..." : "Submit"}
|
||||
</button>
|
||||
</form>
|
||||
|
||||
{error && <p className="error">{error}</p>}
|
||||
|
||||
{count && (
|
||||
<div className="names-container">
|
||||
<h2 className="sub-heading">Total Unique Face Count:</h2>
|
||||
<ul className="list">{count}</ul>
|
||||
</div>
|
||||
)}
|
||||
{dailyCounts?.length > 0 && (
|
||||
<div className="daily-counts-container">
|
||||
<h2 className="sub-heading">Daily Counts:</h2>
|
||||
<ul className="list">
|
||||
{dailyCounts.map((item, index) => (
|
||||
<li key={index} className="list-item">
|
||||
<span className="date">{item.date}:</span>
|
||||
<span className="count"> {item.count}</span>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default HeadCount;
|
166
components/realtimeCount/registerFaceCount/RegisterFaceCount.tsx
Normal file
166
components/realtimeCount/registerFaceCount/RegisterFaceCount.tsx
Normal file
@ -0,0 +1,166 @@
|
||||
import React from "react";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import Webcam from "react-webcam";
|
||||
import * as faceapi from "face-api.js";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Camera } from "lucide-react";
|
||||
import { useToast } from "@/hooks/use-toast";
|
||||
|
||||
const MODEL_URL = "https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model";
|
||||
const PADDING = 60;
|
||||
|
||||
const RegisterFaceCount = () => {
|
||||
const webcamRef = useRef<Webcam>(null);
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const [isModelLoaded, setIsModelLoaded] = useState(false);
|
||||
const [isDetecting, setIsDetecting] = useState(false);
|
||||
const { toast } = useToast();
|
||||
|
||||
useEffect(() => {
|
||||
const loadModels = async () => {
|
||||
try {
|
||||
await faceapi.nets.tinyFaceDetector.loadFromUri(MODEL_URL);
|
||||
await faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL);
|
||||
await faceapi.nets.faceRecognitionNet.loadFromUri(MODEL_URL);
|
||||
setIsModelLoaded(true);
|
||||
} catch (error) {
|
||||
console.error("Error loading models:", error);
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to load face detection models.",
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
};
|
||||
loadModels();
|
||||
}, [toast]);
|
||||
|
||||
const extractFaceWithPadding = (
|
||||
video: HTMLVideoElement,
|
||||
box: faceapi.Box
|
||||
): HTMLCanvasElement => {
|
||||
const canvas = document.createElement("canvas");
|
||||
const context = canvas.getContext("2d");
|
||||
|
||||
// Calculate padded dimensions
|
||||
const x = Math.max(0, box.x - PADDING);
|
||||
const y = Math.max(0, box.y - PADDING);
|
||||
const width = Math.min(video.videoWidth - x, box.width + 2 * PADDING);
|
||||
const height = Math.min(video.videoHeight - y, box.height + 2 * PADDING);
|
||||
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
|
||||
if (context) {
|
||||
context.drawImage(video, x, y, width, height, 0, 0, width, height);
|
||||
}
|
||||
|
||||
return canvas;
|
||||
};
|
||||
|
||||
const detectFace = async () => {
|
||||
if (!webcamRef.current?.video || !canvasRef.current) return;
|
||||
|
||||
const video = webcamRef.current.video;
|
||||
const canvas = canvasRef.current;
|
||||
const context = canvas.getContext("2d");
|
||||
|
||||
if (!context) return;
|
||||
|
||||
canvas.width = video.videoWidth;
|
||||
canvas.height = video.videoHeight;
|
||||
context.clearRect(0, 0, canvas.width, canvas.height);
|
||||
context.translate(canvas.width, 0);
|
||||
context.scale(-1, 1);
|
||||
|
||||
const detections = await faceapi
|
||||
.detectAllFaces(video, new faceapi.TinyFaceDetectorOptions())
|
||||
.withFaceLandmarks()
|
||||
.withFaceDescriptors();
|
||||
|
||||
if (detections.length > 0) {
|
||||
const highConfidenceDetections = detections.filter(
|
||||
(detection) => detection.detection.score > 0.5
|
||||
);
|
||||
|
||||
for (const detection of highConfidenceDetections) {
|
||||
const { box } = detection.detection;
|
||||
context.strokeStyle = "#00FF00";
|
||||
context.lineWidth = 2;
|
||||
context.strokeRect(box.x, box.y, box.width, box.height);
|
||||
context.save();
|
||||
context.scale(-1, 1);
|
||||
context.fillStyle = "#00FF00";
|
||||
context.font = "16px Arial";
|
||||
context.fillText(
|
||||
`Confidence: ${Math.round(detection.detection.score * 100)}%`,
|
||||
-box.x - box.width,
|
||||
box.y - 5
|
||||
);
|
||||
context.restore();
|
||||
|
||||
const faceCanvas = extractFaceWithPadding(video, box);
|
||||
faceCanvas.toBlob(
|
||||
(blob) => {
|
||||
if (blob) sendFaceDataToAPI(blob);
|
||||
},
|
||||
"image/jpeg",
|
||||
0.95
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const sendFaceDataToAPI = async (imageBlob: Blob) => {
|
||||
try {
|
||||
const formData = new FormData();
|
||||
formData.append("image", imageBlob, "face.jpg");
|
||||
|
||||
const response = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_BASE_URL}/face/search`,
|
||||
{
|
||||
method: "POST",
|
||||
body: formData,
|
||||
}
|
||||
);
|
||||
|
||||
const data = await response.json();
|
||||
toast({ title: data?.name, description: data.message });
|
||||
} catch (error) {
|
||||
console.error("Error sending face data:", error);
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to send face data.",
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const startDetection = () => {
|
||||
if (!isModelLoaded) return;
|
||||
setIsDetecting(true);
|
||||
setInterval(detectFace, 1000);
|
||||
};
|
||||
return (
|
||||
<div className="max-w-3xl mx-auto">
|
||||
<div className="relative">
|
||||
<Webcam ref={webcamRef} mirrored className="w-full rounded-lg" />
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
className="absolute top-0 left-0 w-full h-full"
|
||||
/>
|
||||
</div>
|
||||
<div className="mt-6 flex justify-center">
|
||||
<Button
|
||||
onClick={startDetection}
|
||||
disabled={!isModelLoaded || isDetecting}
|
||||
>
|
||||
<Camera className="mr-2 h-4 w-4" />
|
||||
{isDetecting ? "Detecting..." : "Start Realtime Detection"}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default RegisterFaceCount;
|
33
components/realtimeDetection/RealtimeDetection.tsx
Normal file
33
components/realtimeDetection/RealtimeDetection.tsx
Normal file
@ -0,0 +1,33 @@
|
||||
"use client";
|
||||
import React, { useState } from "react";
|
||||
import WebcamDetection from "./webcam/Webcam";
|
||||
import RtspStream from "./rtspStream/RtspStream";
|
||||
|
||||
const RealtimeDetection: React.FC = () => {
|
||||
const [activeTab, setActiveTab] = useState<"webcam" | "rtsp">("webcam");
|
||||
|
||||
return (
|
||||
<div className="main-container">
|
||||
<div className="tabs">
|
||||
<button
|
||||
className={`tab-button ${activeTab === "webcam" ? "active" : ""}`}
|
||||
onClick={() => setActiveTab("webcam")}
|
||||
>
|
||||
Webcam
|
||||
</button>
|
||||
<button
|
||||
className={`tab-button ${activeTab === "rtsp" ? "active" : ""}`}
|
||||
onClick={() => setActiveTab("rtsp")}
|
||||
>
|
||||
RTSP
|
||||
</button>
|
||||
</div>
|
||||
<div className="tab-content">
|
||||
{activeTab === "webcam" && <WebcamDetection />}
|
||||
{activeTab === "rtsp" && <RtspStream />}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default RealtimeDetection;
|
270
components/realtimeDetection/rtspStream/RtspStream.tsx
Normal file
270
components/realtimeDetection/rtspStream/RtspStream.tsx
Normal file
@ -0,0 +1,270 @@
|
||||
import React, { useState, useEffect, useRef } from "react";
|
||||
import Hls from "hls.js";
|
||||
import * as faceapi from "face-api.js";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Camera } from "lucide-react";
|
||||
import { useToast } from "@/hooks/use-toast";
|
||||
|
||||
const MODEL_URL = "https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model";
|
||||
const PADDING = 60;
|
||||
const API_URL = "http://localhost:8081/start";
|
||||
|
||||
const RtspStream: React.FC = () => {
|
||||
const [rtspUrl, setRtspUrl] = useState<string>("");
|
||||
const [cameraName, setCameraName] = useState<string>("");
|
||||
const [m3u8Url, setM3u8Url] = useState<string | null>(null);
|
||||
const [loading, setLoading] = useState<boolean>(false);
|
||||
const [isModelLoaded, setIsModelLoaded] = useState(false);
|
||||
const [isDetecting, setIsDetecting] = useState(false);
|
||||
const videoRef = useRef<HTMLVideoElement | null>(null);
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const detectionIntervalRef = useRef<ReturnType<typeof setInterval> | null>(
|
||||
null
|
||||
);
|
||||
const { toast } = useToast();
|
||||
|
||||
useEffect(() => {
|
||||
const loadModels = async () => {
|
||||
try {
|
||||
await faceapi.nets.tinyFaceDetector.loadFromUri(MODEL_URL);
|
||||
await faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL);
|
||||
await faceapi.nets.faceRecognitionNet.loadFromUri(MODEL_URL);
|
||||
setIsModelLoaded(true);
|
||||
} catch (error) {
|
||||
console.error("Error loading models:", error);
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to load face detection models.",
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
};
|
||||
loadModels();
|
||||
}, [toast]);
|
||||
|
||||
useEffect(() => {
|
||||
if (m3u8Url && videoRef.current) {
|
||||
if (Hls.isSupported()) {
|
||||
const hls = new Hls();
|
||||
hls.loadSource(m3u8Url);
|
||||
hls.attachMedia(videoRef.current);
|
||||
} else if (
|
||||
videoRef.current.canPlayType("application/vnd.apple.mpegurl")
|
||||
) {
|
||||
videoRef.current.src = m3u8Url;
|
||||
}
|
||||
}
|
||||
}, [m3u8Url]);
|
||||
|
||||
const extractFaceWithPadding = (
|
||||
video: HTMLVideoElement,
|
||||
box: faceapi.Box
|
||||
): HTMLCanvasElement => {
|
||||
const canvas = document.createElement("canvas");
|
||||
const context = canvas.getContext("2d");
|
||||
|
||||
const x = Math.max(0, box.x - PADDING);
|
||||
const y = Math.max(0, box.y - PADDING);
|
||||
const width = Math.min(video.videoWidth - x, box.width + 2 * PADDING);
|
||||
const height = Math.min(video.videoHeight - y, box.height + 2 * PADDING);
|
||||
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
|
||||
if (context) {
|
||||
context.drawImage(video, x, y, width, height, 0, 0, width, height);
|
||||
}
|
||||
|
||||
return canvas;
|
||||
};
|
||||
|
||||
const detectFace = async () => {
|
||||
if (!videoRef.current || !canvasRef.current || !videoRef.current.videoWidth)
|
||||
return;
|
||||
|
||||
const video = videoRef.current;
|
||||
const canvas = canvasRef.current;
|
||||
const context = canvas.getContext("2d");
|
||||
|
||||
if (!context) return;
|
||||
|
||||
canvas.width = video.videoWidth;
|
||||
canvas.height = video.videoHeight;
|
||||
context.clearRect(0, 0, canvas.width, canvas.height);
|
||||
|
||||
const detections = await faceapi
|
||||
.detectAllFaces(video, new faceapi.TinyFaceDetectorOptions())
|
||||
.withFaceLandmarks()
|
||||
.withFaceDescriptors();
|
||||
|
||||
if (detections.length > 0) {
|
||||
const highConfidenceDetections = detections.filter(
|
||||
(detection) => detection.detection.score > 0.5
|
||||
);
|
||||
|
||||
for (const detection of highConfidenceDetections) {
|
||||
const { box } = detection.detection;
|
||||
context.strokeStyle = "#00FF00";
|
||||
context.lineWidth = 2;
|
||||
context.strokeRect(box.x, box.y, box.width, box.height);
|
||||
context.fillStyle = "#00FF00";
|
||||
context.font = "16px Arial";
|
||||
context.fillText(
|
||||
`Confidence: ${Math.round(detection.detection.score * 100)}%`,
|
||||
box.x,
|
||||
box.y - 5
|
||||
);
|
||||
|
||||
const faceCanvas = extractFaceWithPadding(video, box);
|
||||
faceCanvas.toBlob(
|
||||
(blob) => {
|
||||
if (blob) sendFaceDataToAPI(blob);
|
||||
},
|
||||
"image/jpeg",
|
||||
0.95
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const sendFaceDataToAPI = async (imageBlob: Blob) => {
|
||||
try {
|
||||
const formData = new FormData();
|
||||
formData.append("image", imageBlob, "face.jpg");
|
||||
|
||||
const response = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_BASE_URL}/search`,
|
||||
{
|
||||
method: "POST",
|
||||
body: formData,
|
||||
}
|
||||
);
|
||||
|
||||
const data = await response.json();
|
||||
toast({ title: data?.name, description: data.message });
|
||||
} catch (error) {
|
||||
console.error("Error sending face data:", error);
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to send face data.",
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const startDetection = () => {
|
||||
if (!isModelLoaded || !videoRef.current) return;
|
||||
console.log("Starting detection...");
|
||||
setIsDetecting(true);
|
||||
detectionIntervalRef.current = setInterval(detectFace, 1000);
|
||||
};
|
||||
|
||||
const stopDetection = () => {
|
||||
if (detectionIntervalRef.current) {
|
||||
clearInterval(detectionIntervalRef.current);
|
||||
}
|
||||
setIsDetecting(false);
|
||||
if (canvasRef.current) {
|
||||
const context = canvasRef.current.getContext("2d");
|
||||
if (context) {
|
||||
context.clearRect(
|
||||
0,
|
||||
0,
|
||||
canvasRef.current.width,
|
||||
canvasRef.current.height
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
setLoading(true);
|
||||
stopDetection(); // Stop any ongoing detection
|
||||
|
||||
try {
|
||||
const response = await fetch(API_URL, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
uri: rtspUrl,
|
||||
alias: cameraName,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to fetch stream URL");
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
setM3u8Url(`http://localhost:8081${data?.uri}`);
|
||||
console.log("isModelLoaded", isModelLoaded);
|
||||
console.log("m3u8Url", m3u8Url);
|
||||
} catch (error) {
|
||||
console.error("Error fetching stream:", error);
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to load stream.",
|
||||
variant: "destructive",
|
||||
});
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="max-w-3xl mx-auto p-4">
|
||||
<h2 className="text-2xl font-bold mb-4">
|
||||
RTSP Stream with Face Detection
|
||||
</h2>
|
||||
<form onSubmit={handleSubmit} className="space-y-4 mb-6">
|
||||
<input
|
||||
type="text"
|
||||
value={rtspUrl}
|
||||
onChange={(e) => setRtspUrl(e.target.value)}
|
||||
placeholder="Enter RTSP URL"
|
||||
className="w-full p-2 border rounded"
|
||||
required
|
||||
/>
|
||||
<input
|
||||
type="text"
|
||||
value={cameraName}
|
||||
onChange={(e) => setCameraName(e.target.value)}
|
||||
placeholder="Enter Camera Name"
|
||||
className="w-full p-2 border rounded"
|
||||
required
|
||||
/>
|
||||
<Button type="submit" disabled={loading} className="w-full">
|
||||
{loading ? "Starting stream..." : "Start Stream"}
|
||||
</Button>
|
||||
</form>
|
||||
|
||||
{m3u8Url && !loading && (
|
||||
<div className="relative">
|
||||
<video
|
||||
ref={videoRef}
|
||||
controls
|
||||
autoPlay
|
||||
className="w-full rounded-lg"
|
||||
/>
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
className="absolute top-0 left-0 w-full h-full z-0 pointer-events-none"
|
||||
/>
|
||||
|
||||
<div className="mt-4 flex justify-center">
|
||||
<Button
|
||||
onClick={isDetecting ? stopDetection : startDetection}
|
||||
disabled={!isModelLoaded || !m3u8Url}
|
||||
>
|
||||
<Camera className="mr-2 h-4 w-4" />
|
||||
{isDetecting ? "Stop Detection" : "Start Detection"}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default RtspStream;
|
167
components/realtimeDetection/webcam/Webcam.tsx
Normal file
167
components/realtimeDetection/webcam/Webcam.tsx
Normal file
@ -0,0 +1,167 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import Webcam from "react-webcam";
|
||||
import * as faceapi from "face-api.js";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Camera } from "lucide-react";
|
||||
import { useToast } from "@/hooks/use-toast";
|
||||
|
||||
const MODEL_URL = "https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model";
|
||||
const PADDING = 60;
|
||||
|
||||
const WebcamDetection = () => {
|
||||
const webcamRef = useRef<Webcam>(null);
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const [isModelLoaded, setIsModelLoaded] = useState(false);
|
||||
const [isDetecting, setIsDetecting] = useState(false);
|
||||
const { toast } = useToast();
|
||||
|
||||
useEffect(() => {
|
||||
const loadModels = async () => {
|
||||
try {
|
||||
await faceapi.nets.tinyFaceDetector.loadFromUri(MODEL_URL);
|
||||
await faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL);
|
||||
await faceapi.nets.faceRecognitionNet.loadFromUri(MODEL_URL);
|
||||
setIsModelLoaded(true);
|
||||
} catch (error) {
|
||||
console.error("Error loading models:", error);
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to load face detection models.",
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
};
|
||||
loadModels();
|
||||
}, [toast]);
|
||||
|
||||
const extractFaceWithPadding = (
|
||||
video: HTMLVideoElement,
|
||||
box: faceapi.Box
|
||||
): HTMLCanvasElement => {
|
||||
const canvas = document.createElement("canvas");
|
||||
const context = canvas.getContext("2d");
|
||||
|
||||
// Calculate padded dimensions
|
||||
const x = Math.max(0, box.x - PADDING);
|
||||
const y = Math.max(0, box.y - PADDING);
|
||||
const width = Math.min(video.videoWidth - x, box.width + 2 * PADDING);
|
||||
const height = Math.min(video.videoHeight - y, box.height + 2 * PADDING);
|
||||
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
|
||||
if (context) {
|
||||
context.drawImage(video, x, y, width, height, 0, 0, width, height);
|
||||
}
|
||||
|
||||
return canvas;
|
||||
};
|
||||
|
||||
const detectFace = async () => {
|
||||
if (!webcamRef.current?.video || !canvasRef.current) return;
|
||||
|
||||
const video = webcamRef.current.video;
|
||||
const canvas = canvasRef.current;
|
||||
const context = canvas.getContext("2d");
|
||||
|
||||
if (!context) return;
|
||||
|
||||
canvas.width = video.videoWidth;
|
||||
canvas.height = video.videoHeight;
|
||||
context.clearRect(0, 0, canvas.width, canvas.height);
|
||||
context.translate(canvas.width, 0);
|
||||
context.scale(-1, 1);
|
||||
|
||||
const detections = await faceapi
|
||||
.detectAllFaces(video, new faceapi.TinyFaceDetectorOptions())
|
||||
.withFaceLandmarks()
|
||||
.withFaceDescriptors();
|
||||
|
||||
if (detections.length > 0) {
|
||||
const highConfidenceDetections = detections.filter(
|
||||
(detection) => detection.detection.score > 0.5
|
||||
);
|
||||
|
||||
for (const detection of highConfidenceDetections) {
|
||||
const { box } = detection.detection;
|
||||
context.strokeStyle = "#00FF00";
|
||||
context.lineWidth = 2;
|
||||
context.strokeRect(box.x, box.y, box.width, box.height);
|
||||
context.save();
|
||||
context.scale(-1, 1);
|
||||
context.fillStyle = "#00FF00";
|
||||
context.font = "16px Arial";
|
||||
context.fillText(
|
||||
`Confidence: ${Math.round(detection.detection.score * 100)}%`,
|
||||
-box.x - box.width,
|
||||
box.y - 5
|
||||
);
|
||||
context.restore();
|
||||
|
||||
const faceCanvas = extractFaceWithPadding(video, box);
|
||||
faceCanvas.toBlob(
|
||||
(blob) => {
|
||||
if (blob) sendFaceDataToAPI(blob);
|
||||
},
|
||||
"image/jpeg",
|
||||
0.95
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const sendFaceDataToAPI = async (imageBlob: Blob) => {
|
||||
try {
|
||||
const formData = new FormData();
|
||||
formData.append("image", imageBlob, "face.jpg");
|
||||
|
||||
const response = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_BASE_URL}/search`,
|
||||
{
|
||||
method: "POST",
|
||||
body: formData,
|
||||
}
|
||||
);
|
||||
|
||||
const data = await response.json();
|
||||
toast({ title: data?.name, description: data.message });
|
||||
} catch (error) {
|
||||
console.error("Error sending face data:", error);
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to send face data.",
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const startDetection = () => {
|
||||
if (!isModelLoaded) return;
|
||||
setIsDetecting(true);
|
||||
setInterval(detectFace, 1000);
|
||||
};
|
||||
return (
|
||||
<div className="max-w-3xl mx-auto">
|
||||
<div className="relative">
|
||||
<Webcam ref={webcamRef} mirrored className="w-full rounded-lg" />
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
className="absolute top-0 left-0 w-full h-full"
|
||||
/>
|
||||
</div>
|
||||
<div className="mt-6 flex justify-center">
|
||||
<Button
|
||||
onClick={startDetection}
|
||||
disabled={!isModelLoaded || isDetecting}
|
||||
>
|
||||
<Camera className="mr-2 h-4 w-4" />
|
||||
{isDetecting ? "Detecting..." : "Start Realtime Detection"}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default WebcamDetection;
|
167
components/realtimeFaceDetection/RealtimeFaceDetection.tsx
Normal file
167
components/realtimeFaceDetection/RealtimeFaceDetection.tsx
Normal file
@ -0,0 +1,167 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import Webcam from "react-webcam";
|
||||
import * as faceapi from "face-api.js";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Camera } from "lucide-react";
|
||||
import { useToast } from "@/hooks/use-toast";
|
||||
|
||||
const MODEL_URL = "https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model";
|
||||
const PADDING = 60;
|
||||
|
||||
const RealtimeFaceDetection = () => {
|
||||
const webcamRef = useRef<Webcam>(null);
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const [isModelLoaded, setIsModelLoaded] = useState(false);
|
||||
const [isDetecting, setIsDetecting] = useState(false);
|
||||
const { toast } = useToast();
|
||||
|
||||
useEffect(() => {
|
||||
const loadModels = async () => {
|
||||
try {
|
||||
await faceapi.nets.tinyFaceDetector.loadFromUri(MODEL_URL);
|
||||
await faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL);
|
||||
await faceapi.nets.faceRecognitionNet.loadFromUri(MODEL_URL);
|
||||
setIsModelLoaded(true);
|
||||
} catch (error) {
|
||||
console.error("Error loading models:", error);
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to load face detection models.",
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
};
|
||||
loadModels();
|
||||
}, [toast]);
|
||||
|
||||
const extractFaceWithPadding = (
|
||||
video: HTMLVideoElement,
|
||||
box: faceapi.Box
|
||||
): HTMLCanvasElement => {
|
||||
const canvas = document.createElement("canvas");
|
||||
const context = canvas.getContext("2d");
|
||||
|
||||
// Calculate padded dimensions
|
||||
const x = Math.max(0, box.x - PADDING);
|
||||
const y = Math.max(0, box.y - PADDING);
|
||||
const width = Math.min(video.videoWidth - x, box.width + 2 * PADDING);
|
||||
const height = Math.min(video.videoHeight - y, box.height + 2 * PADDING);
|
||||
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
|
||||
if (context) {
|
||||
context.drawImage(video, x, y, width, height, 0, 0, width, height);
|
||||
}
|
||||
|
||||
return canvas;
|
||||
};
|
||||
|
||||
const detectFace = async () => {
|
||||
if (!webcamRef.current?.video || !canvasRef.current) return;
|
||||
|
||||
const video = webcamRef.current.video;
|
||||
const canvas = canvasRef.current;
|
||||
const context = canvas.getContext("2d");
|
||||
|
||||
if (!context) return;
|
||||
|
||||
canvas.width = video.videoWidth;
|
||||
canvas.height = video.videoHeight;
|
||||
context.clearRect(0, 0, canvas.width, canvas.height);
|
||||
context.translate(canvas.width, 0);
|
||||
context.scale(-1, 1);
|
||||
|
||||
const detections = await faceapi
|
||||
.detectAllFaces(video, new faceapi.TinyFaceDetectorOptions())
|
||||
.withFaceLandmarks()
|
||||
.withFaceDescriptors();
|
||||
|
||||
if (detections.length > 0) {
|
||||
const highConfidenceDetections = detections.filter(
|
||||
(detection) => detection.detection.score > 0.5
|
||||
);
|
||||
|
||||
for (const detection of highConfidenceDetections) {
|
||||
const { box } = detection.detection;
|
||||
context.strokeStyle = "#00FF00";
|
||||
context.lineWidth = 2;
|
||||
context.strokeRect(box.x, box.y, box.width, box.height);
|
||||
context.save();
|
||||
context.scale(-1, 1);
|
||||
context.fillStyle = "#00FF00";
|
||||
context.font = "16px Arial";
|
||||
context.fillText(
|
||||
`Confidence: ${Math.round(detection.detection.score * 100)}%`,
|
||||
-box.x - box.width,
|
||||
box.y - 5
|
||||
);
|
||||
context.restore();
|
||||
|
||||
const faceCanvas = extractFaceWithPadding(video, box);
|
||||
faceCanvas.toBlob(
|
||||
(blob) => {
|
||||
if (blob) sendFaceDataToAPI(blob);
|
||||
},
|
||||
"image/jpeg",
|
||||
0.95
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const sendFaceDataToAPI = async (imageBlob: Blob) => {
|
||||
try {
|
||||
const formData = new FormData();
|
||||
formData.append("image", imageBlob, "face.jpg");
|
||||
|
||||
const response = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_BASE_URL}/search`,
|
||||
{
|
||||
method: "POST",
|
||||
body: formData,
|
||||
}
|
||||
);
|
||||
|
||||
const data = await response.json();
|
||||
toast({ title: data?.name, description: data.message });
|
||||
} catch (error) {
|
||||
console.error("Error sending face data:", error);
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to send face data.",
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const startDetection = () => {
|
||||
if (!isModelLoaded) return;
|
||||
setIsDetecting(true);
|
||||
setInterval(detectFace, 1000);
|
||||
};
|
||||
return (
|
||||
<div className="max-w-3xl mx-auto">
|
||||
<div className="relative">
|
||||
<Webcam ref={webcamRef} mirrored className="w-full rounded-lg" />
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
className="absolute top-0 left-0 w-full h-full"
|
||||
/>
|
||||
</div>
|
||||
<div className="mt-6 flex justify-center">
|
||||
<Button
|
||||
onClick={startDetection}
|
||||
disabled={!isModelLoaded || isDetecting}
|
||||
>
|
||||
<Camera className="mr-2 h-4 w-4" />
|
||||
{isDetecting ? "Detecting..." : "Start Realtime Detection"}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default RealtimeFaceDetection;
|
@ -1,144 +1,8 @@
|
||||
// "use client";
|
||||
// import React, { useRef, useState, useEffect } from "react";
|
||||
// import "./Register.css"; // Import CSS for styling
|
||||
|
||||
// const Register: React.FC = () => {
|
||||
// const videoRef = useRef<HTMLVideoElement>(null);
|
||||
// const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
// const [name, setName] = useState<string>("");
|
||||
// const [error, setError] = useState<string>(""); // State for error message
|
||||
|
||||
// // Automatically open the camera when the component mounts
|
||||
// useEffect(() => {
|
||||
// const openCamera = async () => {
|
||||
// try {
|
||||
// const stream = await navigator.mediaDevices.getUserMedia({
|
||||
// video: true,
|
||||
// });
|
||||
// if (videoRef.current) {
|
||||
// videoRef.current.srcObject = stream;
|
||||
// }
|
||||
// } catch (err) {
|
||||
// console.error("Error accessing the camera", err);
|
||||
// }
|
||||
// };
|
||||
|
||||
// openCamera();
|
||||
// }, []);
|
||||
|
||||
// // Capture image from video stream
|
||||
// const captureImage = () => {
|
||||
// if (!name.trim()) {
|
||||
// setError("Name is required"); // Set error message if name is empty
|
||||
// return;
|
||||
// }
|
||||
|
||||
// const video = videoRef.current;
|
||||
// const canvas = canvasRef.current;
|
||||
|
||||
// if (video && canvas) {
|
||||
// const context = canvas.getContext("2d");
|
||||
// if (context) {
|
||||
// // Draw the current frame from the video on the canvas
|
||||
// context.drawImage(video, 0, 0, canvas.width, canvas.height);
|
||||
// canvas.toBlob((blob) => {
|
||||
// if (blob) {
|
||||
// const file = new File([blob], "captured-image.png", {
|
||||
// type: "image/png",
|
||||
// });
|
||||
// callApi(name, file);
|
||||
// }
|
||||
// }, "image/png");
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
|
||||
// // Call API with name and image file
|
||||
// const callApi = async (name: string, file: File) => {
|
||||
// const formData = new FormData();
|
||||
// formData.append("name", name);
|
||||
// formData.append("image", file);
|
||||
|
||||
// try {
|
||||
// const startTime = performance.now();
|
||||
// const response = await fetch(
|
||||
// `${process.env.NEXT_PUBLIC_BASE_URL}/register`,
|
||||
// {
|
||||
// method: "POST",
|
||||
// body: formData,
|
||||
// }
|
||||
// );
|
||||
|
||||
// // End measuring time
|
||||
// const endTime = performance.now();
|
||||
// const totalTime = (endTime - startTime) / 1000; // Convert milliseconds to seconds
|
||||
|
||||
// if (response.ok) {
|
||||
// const data = await response.json();
|
||||
// console.log("API call successful");
|
||||
// alert(
|
||||
// `Message: ${data.message}\nName: ${
|
||||
// data?.name
|
||||
// }\nTime taken: ${totalTime.toFixed(2)} seconds`
|
||||
// ); // Show success message with time taken
|
||||
// setError(""); // Clear error message on success
|
||||
// } else {
|
||||
// console.error("API call failed");
|
||||
// setError("Failed to register. Please try again."); // Set error message on failure
|
||||
// }
|
||||
// } catch (error) {
|
||||
// console.error("Error calling API", error);
|
||||
// setError("An error occurred. Please try again."); // Set error message on exception
|
||||
// }
|
||||
// };
|
||||
|
||||
// return (
|
||||
// <div className="container">
|
||||
// <h1 className="title">Face Capture Form</h1>
|
||||
// <form className="form">
|
||||
// <div className="form-group">
|
||||
// <label htmlFor="name" className="label">
|
||||
// Name:
|
||||
// </label>
|
||||
// <input
|
||||
// id="name"
|
||||
// type="text"
|
||||
// value={name}
|
||||
// onChange={(e) => {
|
||||
// setName(e.target.value);
|
||||
// setError(""); // Clear error message when user starts typing
|
||||
// }}
|
||||
// className="input"
|
||||
// placeholder="Enter your name"
|
||||
// required
|
||||
// />
|
||||
// {error && !name.trim() && (
|
||||
// <p className="error-message">{error}</p> // Display error message if name is empty
|
||||
// )}
|
||||
// </div>
|
||||
// <div className="video-container">
|
||||
// <video ref={videoRef} autoPlay playsInline className="video" />
|
||||
// </div>
|
||||
// <button type="button" onClick={captureImage} className="capture-button">
|
||||
// Capture Image
|
||||
// </button>
|
||||
// <canvas
|
||||
// ref={canvasRef}
|
||||
// style={{ display: "none" }}
|
||||
// width="640"
|
||||
// height="480"
|
||||
// />
|
||||
// </form>
|
||||
// </div>
|
||||
// );
|
||||
// };
|
||||
|
||||
// export default Register;
|
||||
|
||||
|
||||
"use client";
|
||||
import React, { useRef, useState, useEffect } from "react";
|
||||
import "./Register.css"; // Import CSS for styling
|
||||
import { toast } from "@/hooks/use-toast";
|
||||
import { Button } from "../ui/button";
|
||||
|
||||
const Register: React.FC = () => {
|
||||
const videoRef = useRef<HTMLVideoElement>(null);
|
||||
@ -235,18 +99,19 @@ const Register: React.FC = () => {
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
console.log("API call successful");
|
||||
alert(
|
||||
`Message: ${data.message}\nName: ${data?.name}\nTime taken: ${totalTime.toFixed(2)} seconds`
|
||||
); // Show success message with time taken
|
||||
setError(""); // Clear error message on success
|
||||
console.log("API call successful", totalTime);
|
||||
toast({
|
||||
title: data.message,
|
||||
description: `Name: ${data?.name} `,
|
||||
});
|
||||
setError("");
|
||||
} else {
|
||||
console.error("API call failed");
|
||||
setError("Failed to register. Please try again."); // Set error message on failure
|
||||
setError("Failed to register. Please try again.");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error calling API", error);
|
||||
setError("An error occurred. Please try again."); // Set error message on exception
|
||||
setError("An error occurred. Please try again.");
|
||||
}
|
||||
};
|
||||
|
||||
@ -275,15 +140,20 @@ const Register: React.FC = () => {
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="video-container">
|
||||
<video ref={videoRef} autoPlay playsInline className="video" />
|
||||
<button
|
||||
<div className="mb-5">
|
||||
<video
|
||||
ref={videoRef}
|
||||
autoPlay
|
||||
playsInline
|
||||
className="w-full max-w-[500px] rounded-lg shadow-md scale-x-[-1]"
|
||||
/>
|
||||
<Button
|
||||
type="button"
|
||||
onClick={captureImage}
|
||||
className="capture-button"
|
||||
>
|
||||
Capture Image
|
||||
</button>
|
||||
</Button>
|
||||
</div>
|
||||
<div>Or</div>
|
||||
|
||||
|
@ -1,105 +1,7 @@
|
||||
// import React, { useRef, useEffect } from "react";
|
||||
// import "./Search.css"; // Import CSS for styling
|
||||
|
||||
// const Search: React.FC = () => {
|
||||
// const videoRef = useRef<HTMLVideoElement>(null);
|
||||
// const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
|
||||
// // Automatically open the camera when the component mounts
|
||||
// useEffect(() => {
|
||||
// const openCamera = async () => {
|
||||
// try {
|
||||
// const stream = await navigator.mediaDevices.getUserMedia({
|
||||
// video: true,
|
||||
// });
|
||||
// if (videoRef.current) {
|
||||
// videoRef.current.srcObject = stream;
|
||||
// }
|
||||
// } catch (err) {
|
||||
// console.error("Error accessing the camera", err);
|
||||
// }
|
||||
// };
|
||||
|
||||
// openCamera();
|
||||
// }, []);
|
||||
|
||||
// // Capture image from video stream
|
||||
// const captureImage = () => {
|
||||
// const video = videoRef.current;
|
||||
// const canvas = canvasRef.current;
|
||||
|
||||
// if (video && canvas) {
|
||||
// const context = canvas.getContext("2d");
|
||||
// if (context) {
|
||||
// context.drawImage(video, 0, 0, canvas.width, canvas.height);
|
||||
// canvas.toBlob((blob) => {
|
||||
// if (blob) {
|
||||
// const file = new File([blob], "captured-image.png", {
|
||||
// type: "image/png",
|
||||
// });
|
||||
// callApi(file);
|
||||
// }
|
||||
// }, "image/png");
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
|
||||
// // Call API with the captured image file
|
||||
// const callApi = async (file: File) => {
|
||||
// const formData = new FormData();
|
||||
// formData.append("image", file); // Append the file to the form data
|
||||
|
||||
// try {
|
||||
// const startTime = performance.now();
|
||||
// const response = await fetch(
|
||||
// `${process.env.NEXT_PUBLIC_BASE_URL}/search`,
|
||||
// {
|
||||
// method: "POST",
|
||||
// body: formData, // Send the form data
|
||||
// }
|
||||
// );
|
||||
// // End measuring time
|
||||
// const endTime = performance.now();
|
||||
// const totalTime = (endTime - startTime) / 1000;
|
||||
// if (response.ok) {
|
||||
// const data = await response.json();
|
||||
// console.log("API call successful", data);
|
||||
// alert(
|
||||
// `Search result: ${data.message}\nName: ${
|
||||
// data?.name
|
||||
// }\nTime taken: ${totalTime.toFixed(2)} seconds`
|
||||
// );
|
||||
// } else {
|
||||
// console.error("API call failed");
|
||||
// }
|
||||
// } catch (error) {
|
||||
// console.error("Error calling API", error);
|
||||
// }
|
||||
// };
|
||||
|
||||
// return (
|
||||
// <div className="search-container">
|
||||
// <h2>Search</h2>
|
||||
// <div className="video-container">
|
||||
// <video ref={videoRef} autoPlay playsInline className="video" />
|
||||
// </div>
|
||||
// <button type="button" onClick={captureImage} className="capture-button">
|
||||
// Capture Image
|
||||
// </button>
|
||||
// <canvas
|
||||
// ref={canvasRef}
|
||||
// style={{ display: "none" }}
|
||||
// width="640"
|
||||
// height="480"
|
||||
// />
|
||||
// </div>
|
||||
// );
|
||||
// };
|
||||
|
||||
// export default Search;
|
||||
|
||||
import React, { useRef, useEffect } from "react";
|
||||
import "./Search.css"; // Import CSS for styling
|
||||
import { Button } from "../ui/button";
|
||||
import { toast } from "@/hooks/use-toast";
|
||||
|
||||
const Search: React.FC = () => {
|
||||
const videoRef = useRef<HTMLVideoElement>(null);
|
||||
@ -169,11 +71,12 @@ const Search: React.FC = () => {
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
console.log("API call successful", data);
|
||||
alert(
|
||||
`Search result: ${data.message}\nName: ${
|
||||
data?.name
|
||||
}\nTime taken: ${totalTime.toFixed(2)} seconds`
|
||||
);
|
||||
toast({
|
||||
title: data.message,
|
||||
description: `Name: ${data?.name}\n\nTime taken: ${totalTime.toFixed(
|
||||
2
|
||||
)} seconds`,
|
||||
});
|
||||
} else {
|
||||
console.error("API call failed");
|
||||
}
|
||||
@ -188,9 +91,9 @@ const Search: React.FC = () => {
|
||||
<div className="video-container">
|
||||
<video ref={videoRef} autoPlay playsInline className="video" />
|
||||
</div>
|
||||
<button type="button" onClick={captureImage} className="capture-button">
|
||||
<Button type="button" onClick={captureImage} className="capture-button">
|
||||
Capture Image
|
||||
</button>
|
||||
</Button>
|
||||
<div>Or</div>
|
||||
<div className="upload-container">
|
||||
<label htmlFor="upload" className="upload-label">
|
||||
|
56
components/ui/button.tsx
Normal file
56
components/ui/button.tsx
Normal file
@ -0,0 +1,56 @@
|
||||
import * as React from "react"
|
||||
import { Slot } from "@radix-ui/react-slot"
|
||||
import { cva, type VariantProps } from "class-variance-authority"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const buttonVariants = cva(
|
||||
"inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0",
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
default: "bg-primary text-primary-foreground hover:bg-primary/90",
|
||||
destructive:
|
||||
"bg-destructive text-destructive-foreground hover:bg-destructive/90",
|
||||
outline:
|
||||
"border border-input bg-background hover:bg-accent hover:text-accent-foreground",
|
||||
secondary:
|
||||
"bg-secondary text-secondary-foreground hover:bg-secondary/80",
|
||||
ghost: "hover:bg-accent hover:text-accent-foreground",
|
||||
link: "text-primary underline-offset-4 hover:underline",
|
||||
},
|
||||
size: {
|
||||
default: "h-10 px-4 py-2",
|
||||
sm: "h-9 rounded-md px-3",
|
||||
lg: "h-11 rounded-md px-8",
|
||||
icon: "h-10 w-10",
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
variant: "default",
|
||||
size: "default",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
export interface ButtonProps
|
||||
extends React.ButtonHTMLAttributes<HTMLButtonElement>,
|
||||
VariantProps<typeof buttonVariants> {
|
||||
asChild?: boolean
|
||||
}
|
||||
|
||||
const Button = React.forwardRef<HTMLButtonElement, ButtonProps>(
|
||||
({ className, variant, size, asChild = false, ...props }, ref) => {
|
||||
const Comp = asChild ? Slot : "button"
|
||||
return (
|
||||
<Comp
|
||||
className={cn(buttonVariants({ variant, size, className }))}
|
||||
ref={ref}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
)
|
||||
Button.displayName = "Button"
|
||||
|
||||
export { Button, buttonVariants }
|
79
components/ui/card.tsx
Normal file
79
components/ui/card.tsx
Normal file
@ -0,0 +1,79 @@
|
||||
import * as React from "react"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const Card = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.HTMLAttributes<HTMLDivElement>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"rounded-lg border bg-card text-card-foreground shadow-sm",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
Card.displayName = "Card"
|
||||
|
||||
const CardHeader = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.HTMLAttributes<HTMLDivElement>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn("flex flex-col space-y-1.5 p-6", className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
CardHeader.displayName = "CardHeader"
|
||||
|
||||
const CardTitle = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.HTMLAttributes<HTMLDivElement>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"text-2xl font-semibold leading-none tracking-tight",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
CardTitle.displayName = "CardTitle"
|
||||
|
||||
const CardDescription = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.HTMLAttributes<HTMLDivElement>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn("text-sm text-muted-foreground", className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
CardDescription.displayName = "CardDescription"
|
||||
|
||||
const CardContent = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.HTMLAttributes<HTMLDivElement>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<div ref={ref} className={cn("p-6 pt-0", className)} {...props} />
|
||||
))
|
||||
CardContent.displayName = "CardContent"
|
||||
|
||||
const CardFooter = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.HTMLAttributes<HTMLDivElement>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn("flex items-center p-6 pt-0", className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
CardFooter.displayName = "CardFooter"
|
||||
|
||||
export { Card, CardHeader, CardFooter, CardTitle, CardDescription, CardContent }
|
129
components/ui/toast.tsx
Normal file
129
components/ui/toast.tsx
Normal file
@ -0,0 +1,129 @@
|
||||
"use client"
|
||||
|
||||
import * as React from "react"
|
||||
import * as ToastPrimitives from "@radix-ui/react-toast"
|
||||
import { cva, type VariantProps } from "class-variance-authority"
|
||||
import { X } from "lucide-react"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const ToastProvider = ToastPrimitives.Provider
|
||||
|
||||
const ToastViewport = React.forwardRef<
|
||||
React.ElementRef<typeof ToastPrimitives.Viewport>,
|
||||
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Viewport>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<ToastPrimitives.Viewport
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"fixed top-0 z-[100] flex max-h-screen w-full flex-col-reverse p-4 sm:bottom-0 sm:right-0 sm:top-auto sm:flex-col md:max-w-[420px]",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
ToastViewport.displayName = ToastPrimitives.Viewport.displayName
|
||||
|
||||
const toastVariants = cva(
|
||||
"group pointer-events-auto relative flex w-full items-center justify-between space-x-4 overflow-hidden rounded-md border p-6 pr-8 shadow-lg transition-all data-[swipe=cancel]:translate-x-0 data-[swipe=end]:translate-x-[var(--radix-toast-swipe-end-x)] data-[swipe=move]:translate-x-[var(--radix-toast-swipe-move-x)] data-[swipe=move]:transition-none data-[state=open]:animate-in data-[state=closed]:animate-out data-[swipe=end]:animate-out data-[state=closed]:fade-out-80 data-[state=closed]:slide-out-to-right-full data-[state=open]:slide-in-from-top-full data-[state=open]:sm:slide-in-from-bottom-full",
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
default: "border bg-background text-foreground",
|
||||
destructive:
|
||||
"destructive group border-destructive bg-destructive text-destructive-foreground",
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
variant: "default",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
const Toast = React.forwardRef<
|
||||
React.ElementRef<typeof ToastPrimitives.Root>,
|
||||
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Root> &
|
||||
VariantProps<typeof toastVariants>
|
||||
>(({ className, variant, ...props }, ref) => {
|
||||
return (
|
||||
<ToastPrimitives.Root
|
||||
ref={ref}
|
||||
className={cn(toastVariants({ variant }), className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
})
|
||||
Toast.displayName = ToastPrimitives.Root.displayName
|
||||
|
||||
const ToastAction = React.forwardRef<
|
||||
React.ElementRef<typeof ToastPrimitives.Action>,
|
||||
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Action>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<ToastPrimitives.Action
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"inline-flex h-8 shrink-0 items-center justify-center rounded-md border bg-transparent px-3 text-sm font-medium ring-offset-background transition-colors hover:bg-secondary focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 group-[.destructive]:border-muted/40 group-[.destructive]:hover:border-destructive/30 group-[.destructive]:hover:bg-destructive group-[.destructive]:hover:text-destructive-foreground group-[.destructive]:focus:ring-destructive",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
ToastAction.displayName = ToastPrimitives.Action.displayName
|
||||
|
||||
const ToastClose = React.forwardRef<
|
||||
React.ElementRef<typeof ToastPrimitives.Close>,
|
||||
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Close>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<ToastPrimitives.Close
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"absolute right-2 top-2 rounded-md p-1 text-foreground/50 opacity-0 transition-opacity hover:text-foreground focus:opacity-100 focus:outline-none focus:ring-2 group-hover:opacity-100 group-[.destructive]:text-red-300 group-[.destructive]:hover:text-red-50 group-[.destructive]:focus:ring-red-400 group-[.destructive]:focus:ring-offset-red-600",
|
||||
className
|
||||
)}
|
||||
toast-close=""
|
||||
{...props}
|
||||
>
|
||||
<X className="h-4 w-4" />
|
||||
</ToastPrimitives.Close>
|
||||
))
|
||||
ToastClose.displayName = ToastPrimitives.Close.displayName
|
||||
|
||||
const ToastTitle = React.forwardRef<
|
||||
React.ElementRef<typeof ToastPrimitives.Title>,
|
||||
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Title>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<ToastPrimitives.Title
|
||||
ref={ref}
|
||||
className={cn("text-sm font-semibold", className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
ToastTitle.displayName = ToastPrimitives.Title.displayName
|
||||
|
||||
const ToastDescription = React.forwardRef<
|
||||
React.ElementRef<typeof ToastPrimitives.Description>,
|
||||
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Description>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<ToastPrimitives.Description
|
||||
ref={ref}
|
||||
className={cn("text-sm opacity-90", className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
ToastDescription.displayName = ToastPrimitives.Description.displayName
|
||||
|
||||
type ToastProps = React.ComponentPropsWithoutRef<typeof Toast>
|
||||
|
||||
type ToastActionElement = React.ReactElement<typeof ToastAction>
|
||||
|
||||
export {
|
||||
type ToastProps,
|
||||
type ToastActionElement,
|
||||
ToastProvider,
|
||||
ToastViewport,
|
||||
Toast,
|
||||
ToastTitle,
|
||||
ToastDescription,
|
||||
ToastClose,
|
||||
ToastAction,
|
||||
}
|
35
components/ui/toaster.tsx
Normal file
35
components/ui/toaster.tsx
Normal file
@ -0,0 +1,35 @@
|
||||
"use client"
|
||||
|
||||
import { useToast } from "@/hooks/use-toast"
|
||||
import {
|
||||
Toast,
|
||||
ToastClose,
|
||||
ToastDescription,
|
||||
ToastProvider,
|
||||
ToastTitle,
|
||||
ToastViewport,
|
||||
} from "@/components/ui/toast"
|
||||
|
||||
export function Toaster() {
|
||||
const { toasts } = useToast()
|
||||
|
||||
return (
|
||||
<ToastProvider>
|
||||
{toasts.map(function ({ id, title, description, action, ...props }) {
|
||||
return (
|
||||
<Toast key={id} {...props}>
|
||||
<div className="grid gap-1">
|
||||
{title && <ToastTitle>{title}</ToastTitle>}
|
||||
{description && (
|
||||
<ToastDescription>{description}</ToastDescription>
|
||||
)}
|
||||
</div>
|
||||
{action}
|
||||
<ToastClose />
|
||||
</Toast>
|
||||
)
|
||||
})}
|
||||
<ToastViewport />
|
||||
</ToastProvider>
|
||||
)
|
||||
}
|
@ -1 +0,0 @@
|
||||
Subproject commit 300e624454be86585500a57bd54a1e0cb28b7a94
|
194
hooks/use-toast.ts
Normal file
194
hooks/use-toast.ts
Normal file
@ -0,0 +1,194 @@
|
||||
"use client"
|
||||
|
||||
// Inspired by react-hot-toast library
|
||||
import * as React from "react"
|
||||
|
||||
import type {
|
||||
ToastActionElement,
|
||||
ToastProps,
|
||||
} from "@/components/ui/toast"
|
||||
|
||||
const TOAST_LIMIT = 1
|
||||
const TOAST_REMOVE_DELAY = 1000000
|
||||
|
||||
type ToasterToast = ToastProps & {
|
||||
id: string
|
||||
title?: React.ReactNode
|
||||
description?: React.ReactNode
|
||||
action?: ToastActionElement
|
||||
}
|
||||
|
||||
const actionTypes = {
|
||||
ADD_TOAST: "ADD_TOAST",
|
||||
UPDATE_TOAST: "UPDATE_TOAST",
|
||||
DISMISS_TOAST: "DISMISS_TOAST",
|
||||
REMOVE_TOAST: "REMOVE_TOAST",
|
||||
} as const
|
||||
|
||||
let count = 0
|
||||
|
||||
function genId() {
|
||||
count = (count + 1) % Number.MAX_SAFE_INTEGER
|
||||
return count.toString()
|
||||
}
|
||||
|
||||
type ActionType = typeof actionTypes
|
||||
|
||||
type Action =
|
||||
| {
|
||||
type: ActionType["ADD_TOAST"]
|
||||
toast: ToasterToast
|
||||
}
|
||||
| {
|
||||
type: ActionType["UPDATE_TOAST"]
|
||||
toast: Partial<ToasterToast>
|
||||
}
|
||||
| {
|
||||
type: ActionType["DISMISS_TOAST"]
|
||||
toastId?: ToasterToast["id"]
|
||||
}
|
||||
| {
|
||||
type: ActionType["REMOVE_TOAST"]
|
||||
toastId?: ToasterToast["id"]
|
||||
}
|
||||
|
||||
interface State {
|
||||
toasts: ToasterToast[]
|
||||
}
|
||||
|
||||
const toastTimeouts = new Map<string, ReturnType<typeof setTimeout>>()
|
||||
|
||||
const addToRemoveQueue = (toastId: string) => {
|
||||
if (toastTimeouts.has(toastId)) {
|
||||
return
|
||||
}
|
||||
|
||||
const timeout = setTimeout(() => {
|
||||
toastTimeouts.delete(toastId)
|
||||
dispatch({
|
||||
type: "REMOVE_TOAST",
|
||||
toastId: toastId,
|
||||
})
|
||||
}, TOAST_REMOVE_DELAY)
|
||||
|
||||
toastTimeouts.set(toastId, timeout)
|
||||
}
|
||||
|
||||
export const reducer = (state: State, action: Action): State => {
|
||||
switch (action.type) {
|
||||
case "ADD_TOAST":
|
||||
return {
|
||||
...state,
|
||||
toasts: [action.toast, ...state.toasts].slice(0, TOAST_LIMIT),
|
||||
}
|
||||
|
||||
case "UPDATE_TOAST":
|
||||
return {
|
||||
...state,
|
||||
toasts: state.toasts.map((t) =>
|
||||
t.id === action.toast.id ? { ...t, ...action.toast } : t
|
||||
),
|
||||
}
|
||||
|
||||
case "DISMISS_TOAST": {
|
||||
const { toastId } = action
|
||||
|
||||
// ! Side effects ! - This could be extracted into a dismissToast() action,
|
||||
// but I'll keep it here for simplicity
|
||||
if (toastId) {
|
||||
addToRemoveQueue(toastId)
|
||||
} else {
|
||||
state.toasts.forEach((toast) => {
|
||||
addToRemoveQueue(toast.id)
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
toasts: state.toasts.map((t) =>
|
||||
t.id === toastId || toastId === undefined
|
||||
? {
|
||||
...t,
|
||||
open: false,
|
||||
}
|
||||
: t
|
||||
),
|
||||
}
|
||||
}
|
||||
case "REMOVE_TOAST":
|
||||
if (action.toastId === undefined) {
|
||||
return {
|
||||
...state,
|
||||
toasts: [],
|
||||
}
|
||||
}
|
||||
return {
|
||||
...state,
|
||||
toasts: state.toasts.filter((t) => t.id !== action.toastId),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const listeners: Array<(state: State) => void> = []
|
||||
|
||||
let memoryState: State = { toasts: [] }
|
||||
|
||||
function dispatch(action: Action) {
|
||||
memoryState = reducer(memoryState, action)
|
||||
listeners.forEach((listener) => {
|
||||
listener(memoryState)
|
||||
})
|
||||
}
|
||||
|
||||
type Toast = Omit<ToasterToast, "id">
|
||||
|
||||
function toast({ ...props }: Toast) {
|
||||
const id = genId()
|
||||
|
||||
const update = (props: ToasterToast) =>
|
||||
dispatch({
|
||||
type: "UPDATE_TOAST",
|
||||
toast: { ...props, id },
|
||||
})
|
||||
const dismiss = () => dispatch({ type: "DISMISS_TOAST", toastId: id })
|
||||
|
||||
dispatch({
|
||||
type: "ADD_TOAST",
|
||||
toast: {
|
||||
...props,
|
||||
id,
|
||||
open: true,
|
||||
onOpenChange: (open) => {
|
||||
if (!open) dismiss()
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
return {
|
||||
id: id,
|
||||
dismiss,
|
||||
update,
|
||||
}
|
||||
}
|
||||
|
||||
function useToast() {
|
||||
const [state, setState] = React.useState<State>(memoryState)
|
||||
|
||||
React.useEffect(() => {
|
||||
listeners.push(setState)
|
||||
return () => {
|
||||
const index = listeners.indexOf(setState)
|
||||
if (index > -1) {
|
||||
listeners.splice(index, 1)
|
||||
}
|
||||
}
|
||||
}, [state])
|
||||
|
||||
return {
|
||||
...state,
|
||||
toast,
|
||||
dismiss: (toastId?: string) => dispatch({ type: "DISMISS_TOAST", toastId }),
|
||||
}
|
||||
}
|
||||
|
||||
export { useToast, toast }
|
6
lib/utils.ts
Normal file
6
lib/utils.ts
Normal file
@ -0,0 +1,6 @@
|
||||
import { clsx, type ClassValue } from "clsx"
|
||||
import { twMerge } from "tailwind-merge"
|
||||
|
||||
export function cn(...inputs: ClassValue[]) {
|
||||
return twMerge(clsx(inputs))
|
||||
}
|
1061
package-lock.json
generated
1061
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
27
package.json
27
package.json
@ -9,19 +9,32 @@
|
||||
"lint": "next lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"next": "15.1.6"
|
||||
"@radix-ui/react-slot": "^1.1.1",
|
||||
"@radix-ui/react-toast": "^1.2.5",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"encoding": "^0.1.13",
|
||||
"face-api.js": "^0.22.2",
|
||||
"hls.js": "^1.0.3-0.canary.7275",
|
||||
"lucide-react": "^0.474.0",
|
||||
"next": "15.1.6",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-webcam": "^7.2.0",
|
||||
"tailwind-merge": "^3.0.1",
|
||||
"tailwindcss-animate": "^1.0.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5",
|
||||
"@babel/core": "^7.26.7",
|
||||
"@babel/helper-plugin-utils": "^7.26.5",
|
||||
"@eslint/eslintrc": "^3",
|
||||
"@types/node": "^20",
|
||||
"@types/react": "^19",
|
||||
"@types/react-dom": "^19",
|
||||
"postcss": "^8",
|
||||
"tailwindcss": "^3.4.1",
|
||||
"eslint": "^9",
|
||||
"eslint-config-next": "15.1.6",
|
||||
"@eslint/eslintrc": "^3"
|
||||
"postcss": "^8",
|
||||
"tailwindcss": "^3.4.1",
|
||||
"typescript": "^5"
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
import type { Config } from "tailwindcss";
|
||||
|
||||
export default {
|
||||
darkMode: ["class"],
|
||||
content: [
|
||||
"./pages/**/*.{js,ts,jsx,tsx,mdx}",
|
||||
"./components/**/*.{js,ts,jsx,tsx,mdx}",
|
||||
@ -9,10 +10,53 @@ export default {
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
background: "var(--background)",
|
||||
foreground: "var(--foreground)",
|
||||
background: 'hsl(var(--background))',
|
||||
foreground: 'hsl(var(--foreground))',
|
||||
card: {
|
||||
DEFAULT: 'hsl(var(--card))',
|
||||
foreground: 'hsl(var(--card-foreground))'
|
||||
},
|
||||
popover: {
|
||||
DEFAULT: 'hsl(var(--popover))',
|
||||
foreground: 'hsl(var(--popover-foreground))'
|
||||
},
|
||||
primary: {
|
||||
DEFAULT: 'hsl(var(--primary))',
|
||||
foreground: 'hsl(var(--primary-foreground))'
|
||||
},
|
||||
plugins: [],
|
||||
secondary: {
|
||||
DEFAULT: 'hsl(var(--secondary))',
|
||||
foreground: 'hsl(var(--secondary-foreground))'
|
||||
},
|
||||
muted: {
|
||||
DEFAULT: 'hsl(var(--muted))',
|
||||
foreground: 'hsl(var(--muted-foreground))'
|
||||
},
|
||||
accent: {
|
||||
DEFAULT: 'hsl(var(--accent))',
|
||||
foreground: 'hsl(var(--accent-foreground))'
|
||||
},
|
||||
destructive: {
|
||||
DEFAULT: 'hsl(var(--destructive))',
|
||||
foreground: 'hsl(var(--destructive-foreground))'
|
||||
},
|
||||
border: 'hsl(var(--border))',
|
||||
input: 'hsl(var(--input))',
|
||||
ring: 'hsl(var(--ring))',
|
||||
chart: {
|
||||
'1': 'hsl(var(--chart-1))',
|
||||
'2': 'hsl(var(--chart-2))',
|
||||
'3': 'hsl(var(--chart-3))',
|
||||
'4': 'hsl(var(--chart-4))',
|
||||
'5': 'hsl(var(--chart-5))'
|
||||
}
|
||||
},
|
||||
borderRadius: {
|
||||
lg: 'var(--radius)',
|
||||
md: 'calc(var(--radius) - 2px)',
|
||||
sm: 'calc(var(--radius) - 4px)'
|
||||
}
|
||||
}
|
||||
},
|
||||
plugins: [require("tailwindcss-animate")],
|
||||
} satisfies Config;
|
||||
|
Loading…
x
Reference in New Issue
Block a user