diff --git a/components/MainForm.tsx b/components/MainForm.tsx
index 9dec51a..31490fb 100644
--- a/components/MainForm.tsx
+++ b/components/MainForm.tsx
@@ -4,10 +4,18 @@ import Register from "./register/Register";
import Search from "./search/Search";
import "./MainForm.css";
import RealtimeFaceDetection from "./realtimeFaceDetection/RealtimeFaceDetection";
+import FaceLiveness from "./faceLivelinessCheck/FaceLivelinessCheck";
+import FaceMovementDetection from "./faceMovementDetection/FaceMovementDetection";
+import RealtimeCount from "./realtimeCount/RealtimeCount";
const MainForm: React.FC = () => {
const [activeTab, setActiveTab] = useState<
- "register" | "search" | "realtime"
+ | "register"
+ | "search"
+ | "realtime"
+ | "liveliness"
+ | "realtime-count"
+ | "facemovement"
>("register");
return (
@@ -31,11 +39,36 @@ const MainForm: React.FC = () => {
>
Realtime Detection
+ {/* */}
+
+
{activeTab === "register" && }
{activeTab === "search" && }
{activeTab === "realtime" && }
+ {activeTab === "liveliness" && }
+ {activeTab === "realtime-count" && }
+ {activeTab === "facemovement" && }
);
diff --git a/components/faceLivelinessCheck/FaceLivelinessCheck.tsx b/components/faceLivelinessCheck/FaceLivelinessCheck.tsx
new file mode 100644
index 0000000..79f85ca
--- /dev/null
+++ b/components/faceLivelinessCheck/FaceLivelinessCheck.tsx
@@ -0,0 +1,192 @@
+/* eslint-disable @typescript-eslint/no-explicit-any */
+"use client";
+
+import { useState, useRef, useEffect } from "react";
+import Webcam from "react-webcam";
+import { Card, CardContent } from "@/components/ui/card";
+import { Loader2 } from "lucide-react";
+import { useToast } from "@/hooks/use-toast";
+import * as faceapi from "face-api.js";
+
+export default function FaceLiveness() {
+ const webcamRef = useRef(null);
+ const [isModelLoading, setIsModelLoading] = useState(true);
+ const [isProcessing, setIsProcessing] = useState(false);
+ const [previousExpressions, setPreviousExpressions] = useState(null);
+ const processingTimeoutRef = useRef(null);
+ const { toast } = useToast();
+
+ useEffect(() => {
+ const loadModels = async () => {
+ try {
+ const MODEL_URL =
+ "https://justadudewhohacks.github.io/face-api.js/models";
+ await Promise.all([
+ faceapi.nets.tinyFaceDetector.loadFromUri(MODEL_URL),
+ faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL),
+ faceapi.nets.faceExpressionNet.loadFromUri(MODEL_URL),
+ ]);
+ setIsModelLoading(false);
+ } catch (error) {
+ console.error("Error loading models:", error);
+ toast({
+ title: "Error",
+ description:
+ "Failed to load face detection models. Please refresh the page.",
+ variant: "destructive",
+ });
+ }
+ };
+ loadModels();
+ }, [toast]);
+
+ const checkLiveness = (expressions: any, landmarks: any) => {
+ if (!previousExpressions) {
+ setPreviousExpressions(expressions);
+ return false;
+ }
+
+ // Check for expression changes
+ const expressionThreshold = 0.1;
+ let hasExpressionChange = false;
+ for (const expression in expressions) {
+ const diff = Math.abs(
+ expressions[expression] - previousExpressions[expression]
+ );
+ if (diff > expressionThreshold) {
+ hasExpressionChange = true;
+ break;
+ }
+ }
+
+ // Check for natural facial movement using landmarks
+ const eyeBlinkDetected = detectEyeBlink(landmarks);
+
+ setPreviousExpressions(expressions);
+ return hasExpressionChange || eyeBlinkDetected;
+ };
+
+ const detectEyeBlink = (landmarks: any) => {
+ const leftEye = landmarks.getLeftEye();
+ const rightEye = landmarks.getRightEye();
+
+ // Calculate eye aspect ratio
+ const leftEAR = getEyeAspectRatio(leftEye);
+ const rightEAR = getEyeAspectRatio(rightEye);
+
+ // If either eye is closed (low aspect ratio), consider it a blink
+ const blinkThreshold = 0.2;
+ return leftEAR < blinkThreshold || rightEAR < blinkThreshold;
+ };
+
+ const getEyeAspectRatio = (eye: any) => {
+ // Calculate the eye aspect ratio using the landmark points
+ const height1 = distance(eye[1], eye[5]);
+ const height2 = distance(eye[2], eye[4]);
+ const width = distance(eye[0], eye[3]);
+ return (height1 + height2) / (2.0 * width);
+ };
+
+ const distance = (point1: any, point2: any) => {
+ return Math.sqrt(
+ Math.pow(point1.x - point2.x, 2) + Math.pow(point1.y - point2.y, 2)
+ );
+ };
+
+ useEffect(() => {
+ const processFrame = async () => {
+ if (!webcamRef.current || isProcessing || isModelLoading) return;
+
+ setIsProcessing(true);
+ try {
+ const imageSrc = webcamRef.current.getScreenshot();
+ if (!imageSrc) return;
+
+ const img = new Image();
+ img.src = imageSrc;
+ await new Promise((resolve) => (img.onload = resolve));
+
+ const detections = await faceapi
+ .detectAllFaces(img, new faceapi.TinyFaceDetectorOptions())
+ .withFaceLandmarks()
+ .withFaceExpressions();
+
+ if (detections.length > 0) {
+ // Process each detected face with high confidence
+ detections
+ .filter((detection) => detection.detection.score > 0.7)
+ .forEach((detection) => {
+ const isLive = checkLiveness(
+ detection.expressions,
+ detection.landmarks
+ );
+ if (isLive) {
+ toast({
+ title: "Liveness Detected",
+ description: "Real face detected with natural movements",
+ });
+ } else {
+ toast({
+ title: "Liveness Check",
+ description: "Please move or blink naturally",
+ variant: "destructive",
+ });
+ }
+ });
+ }
+ } catch (error) {
+ console.error("Processing error:", error);
+ } finally {
+ setIsProcessing(false);
+ // Schedule next frame processing
+ processingTimeoutRef.current = setTimeout(processFrame, 1000); // Process every second
+ }
+ };
+
+ if (!isModelLoading) {
+ processFrame();
+ }
+
+ return () => {
+ if (processingTimeoutRef.current) {
+ clearTimeout(processingTimeoutRef.current);
+ }
+ };
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [isModelLoading, isProcessing, toast]);
+
+ if (isModelLoading) {
+ return (
+
+
+
+ Loading face detection models...
+
+
+ );
+ }
+
+ return (
+
+
+
+
+
+
+
+ Move your face naturally or blink to verify liveness
+
+
+
+
+ );
+}
diff --git a/components/faceMovementDetection/FaceMovementDetection.css b/components/faceMovementDetection/FaceMovementDetection.css
new file mode 100644
index 0000000..c536140
--- /dev/null
+++ b/components/faceMovementDetection/FaceMovementDetection.css
@@ -0,0 +1,28 @@
+.video-container {
+ position: relative;
+ width: 100%;
+ max-width: 768px;
+ margin: 0 auto;
+}
+
+.webcam {
+ width: 100%;
+ border-radius: 8px;
+}
+
+.alert-warning {
+ display: flex;
+ align-items: center;
+ background-color: #facc15; /* Yellow */
+ color: #000;
+ padding: 12px;
+ border-radius: 6px;
+ margin-top: 16px;
+ font-weight: bold;
+}
+
+.button-container {
+ margin-top: 24px;
+ display: flex;
+ justify-content: center;
+}
diff --git a/components/faceMovementDetection/FaceMovementDetection.tsx b/components/faceMovementDetection/FaceMovementDetection.tsx
new file mode 100644
index 0000000..a32677f
--- /dev/null
+++ b/components/faceMovementDetection/FaceMovementDetection.tsx
@@ -0,0 +1,112 @@
+"use client";
+
+import { useEffect, useRef, useState } from "react";
+import Webcam from "react-webcam";
+import * as faceapi from "face-api.js";
+import { Button } from "@/components/ui/button";
+import { AlertTriangle, Camera } from "lucide-react";
+import { useToast } from "@/hooks/use-toast";
+
+const MODEL_URL = "https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model";
+const CHECK_INTERVAL = 500;
+
+const FaceMovementDetection = () => {
+ const webcamRef = useRef(null);
+ const [isModelLoaded, setIsModelLoaded] = useState(false);
+ const [isDetecting, setIsDetecting] = useState(false);
+ const prevBoxSizeRef = useRef(null);
+ const [movingForward, setMovingForward] = useState(false);
+ const { toast } = useToast();
+
+ useEffect(() => {
+ const loadModels = async () => {
+ try {
+ await Promise.all([
+ faceapi.nets.tinyFaceDetector.loadFromUri(MODEL_URL),
+ faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL),
+ ]);
+ setIsModelLoaded(true);
+ toast({
+ title: "Models Loaded",
+ description: "Face detection models ready.",
+ });
+ } catch (error) {
+ console.error("Error loading models:", error);
+ toast({
+ title: "Error",
+ description: "Failed to load models.",
+ variant: "destructive",
+ });
+ }
+ };
+
+ loadModels();
+ }, [toast]);
+
+ const detectMovement = async () => {
+ if (!webcamRef.current?.video) return;
+
+ const video = webcamRef.current.video;
+ const detections = await faceapi.detectSingleFace(
+ video,
+ new faceapi.TinyFaceDetectorOptions()
+ );
+
+ console.log("prevBoxSizeRef:", prevBoxSizeRef.current);
+
+ if (detections) {
+ const { width, height } = detections.box;
+ const currentBoxSize = width * height;
+ console.log("currentBoxSize:", currentBoxSize);
+
+ if (prevBoxSizeRef.current !== null) {
+ const sizeIncrease = currentBoxSize - prevBoxSizeRef.current;
+ console.log("Size Increase:", sizeIncrease);
+
+ if (sizeIncrease > 3000) {
+ setMovingForward(true);
+ // toast({
+ // title: "Movement Detected",
+ // description: "The person is moving closer!",
+ // variant: "destructive",
+ // });
+ } else {
+ setMovingForward(false);
+ }
+ }
+
+ prevBoxSizeRef.current = currentBoxSize; // Update ref directly
+ }
+ };
+
+ const startDetection = () => {
+ if (!isModelLoaded) return;
+ setIsDetecting(true);
+ setInterval(detectMovement, CHECK_INTERVAL);
+ };
+
+ return (
+
+
+
+
+ {movingForward && (
+
+
+ Person is moving forward!
+
+ )}
+
+
+
+
+ );
+};
+
+export default FaceMovementDetection;
diff --git a/components/realtimeCount/RealtimeCount.tsx b/components/realtimeCount/RealtimeCount.tsx
new file mode 100644
index 0000000..3940f18
--- /dev/null
+++ b/components/realtimeCount/RealtimeCount.tsx
@@ -0,0 +1,39 @@
+"use client";
+import React, { useState } from "react";
+import RegisterFaceCount from "./registerFaceCount/RegisterFaceCount";
+import HeadCount from "./headCount/HeadCount";
+
+const RealtimeCount: React.FC = () => {
+ const [activeTab, setActiveTab] = useState<
+ "register-face-count" | "get-face-count"
+ >("register-face-count");
+
+ return (
+
+
+
+
+
+
+ {activeTab === "register-face-count" && }
+ {activeTab === "get-face-count" && }
+
+
+ );
+};
+
+export default RealtimeCount;
diff --git a/components/realtimeCount/headCount/HeadCount.css b/components/realtimeCount/headCount/HeadCount.css
new file mode 100644
index 0000000..7f607b9
--- /dev/null
+++ b/components/realtimeCount/headCount/HeadCount.css
@@ -0,0 +1,120 @@
+.headcount-container {
+ max-width: 500px;
+ margin: 0 auto;
+ padding: 20px;
+ font-family: Arial, sans-serif;
+ background-color: #f9f9f9;
+ border-radius: 8px;
+ box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
+}
+
+.heading {
+ text-align: center;
+ color: #333;
+ margin-bottom: 20px;
+}
+
+.form {
+ display: flex;
+ flex-direction: column;
+ gap: 15px;
+}
+
+.input-group {
+ display: flex;
+ flex-direction: column;
+ gap: 5px;
+}
+
+.label {
+ font-size: 14px;
+ color: #555;
+}
+
+.input {
+ padding: 10px;
+ font-size: 16px;
+ border-radius: 4px;
+ border: 1px solid #ccc;
+ outline: none;
+}
+
+.button {
+ padding: 10px;
+ font-size: 16px;
+ background-color: #007bff;
+ color: #fff;
+ border: none;
+ border-radius: 4px;
+ cursor: pointer;
+ transition: background-color 0.3s ease;
+}
+
+.button:disabled {
+ background-color: #ccc;
+ cursor: not-allowed;
+}
+
+.error {
+ color: red;
+ text-align: center;
+ margin-top: 10px;
+}
+
+.names-container {
+ margin-top: 20px;
+}
+
+.sub-heading {
+ color: #333;
+ margin-bottom: 10px;
+}
+
+.list {
+ list-style: none;
+ padding: 0;
+}
+
+.list-item {
+ padding: 10px;
+ background-color: #fff;
+ border: 1px solid #ddd;
+ border-radius: 4px;
+ margin-bottom: 5px;
+ font-size: 14px;
+}
+.daily-counts-container {
+ margin-top: 20px;
+ padding: 15px;
+ border-radius: 8px;
+ background: #f8f9fa;
+ box-shadow: 0px 4px 6px rgba(0, 0, 0, 0.1);
+}
+
+.sub-heading {
+ font-size: 1.5rem;
+ margin-bottom: 10px;
+ color: #333;
+}
+
+.list {
+ list-style-type: none;
+ padding: 0;
+}
+
+.list-item {
+ font-size: 1.1rem;
+ padding: 8px 0;
+ border-bottom: 1px solid #ddd;
+ display: flex;
+ justify-content: space-between;
+}
+
+.date {
+ font-weight: bold;
+ color: #007bff;
+}
+
+.count {
+ color: #555;
+}
diff --git a/components/realtimeCount/headCount/HeadCount.tsx b/components/realtimeCount/headCount/HeadCount.tsx
new file mode 100644
index 0000000..0a29db2
--- /dev/null
+++ b/components/realtimeCount/headCount/HeadCount.tsx
@@ -0,0 +1,183 @@
+import React, { useState } from "react";
+import "./HeadCount.css";
+
+interface ApiResponse {
+ total_unique_faces: number;
+ daily_counts: { date: string; unique_faces: number }[];
+}
+
+interface DateTimeRange {
+ date: string;
+ time: string;
+}
+
+const HeadCount: React.FC = () => {
+ const [from, setFrom] = useState({ date: "", time: "" });
+ const [to, setTo] = useState({ date: "", time: "" });
+ const [count, setCount] = useState(null);
+ const [loading, setLoading] = useState(false);
+ const [error, setError] = useState(null);
+ const [dailyCounts, setDailyCounts] = useState<
+ { date: string; count: number }[]
+ >([]);
+
+ const handleInputChange = (
+ e: React.ChangeEvent,
+ field: "from" | "to"
+ ) => {
+ const { name, value } = e.target;
+ if (field === "from") {
+ setFrom((prev) => ({ ...prev, [name]: value }));
+ } else {
+ setTo((prev) => ({ ...prev, [name]: value }));
+ }
+ };
+
+ const formatDateTime = (date: string, time: string) => {
+ return `${date}T${time}:00+00:00`;
+ };
+
+ const handleSubmit = async (e: React.FormEvent) => {
+ e.preventDefault();
+
+ if (!from.date || !from.time || !to.date || !to.time) {
+ setError("Please fill in all date and time fields.");
+ return;
+ }
+
+ setLoading(true);
+ setError(null);
+
+ const start = formatDateTime(from.date, from.time);
+ const end = formatDateTime(to.date, to.time);
+ console.log(start, end);
+ try {
+ const response = await fetch(
+ `${
+ process.env.NEXT_PUBLIC_BASE_URL
+ }/face/headcount?start_time=${encodeURIComponent(
+ start
+ )}&end_time=${encodeURIComponent(end)}`,
+ {
+ method: "GET",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ }
+ );
+
+ if (!response.ok) {
+ throw new Error("Failed to fetch data");
+ }
+
+ const data: ApiResponse = await response.json();
+ setCount(data.total_unique_faces);
+ if (data?.daily_counts) {
+ setDailyCounts(
+ data.daily_counts.map((d) => ({
+ date: d.date,
+ count: d.unique_faces,
+ }))
+ );
+ }
+ } catch (err) {
+ setError("An error occurred while fetching data.");
+ console.error(err);
+ } finally {
+ setLoading(false);
+ }
+ };
+
+ return (
+
+
Head Count
+
+
+ {error &&
{error}
}
+
+ {count && (
+
+
Total Unique Face Count:
+
+
+ )}
+ {dailyCounts?.length > 0 && (
+
+
Daily Counts:
+
+ {dailyCounts.map((item, index) => (
+ -
+ {item.date}:
+ {item.count}
+
+ ))}
+
+
+ )}
+
+ );
+};
+
+export default HeadCount;
diff --git a/components/realtimeCount/registerFaceCount/RegisterFaceCount.tsx b/components/realtimeCount/registerFaceCount/RegisterFaceCount.tsx
new file mode 100644
index 0000000..261467a
--- /dev/null
+++ b/components/realtimeCount/registerFaceCount/RegisterFaceCount.tsx
@@ -0,0 +1,166 @@
+import React from "react";
+import { useEffect, useRef, useState } from "react";
+import Webcam from "react-webcam";
+import * as faceapi from "face-api.js";
+import { Button } from "@/components/ui/button";
+import { Camera } from "lucide-react";
+import { useToast } from "@/hooks/use-toast";
+
+const MODEL_URL = "https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model";
+const PADDING = 60;
+
+const RegisterFaceCount = () => {
+ const webcamRef = useRef(null);
+ const canvasRef = useRef(null);
+ const [isModelLoaded, setIsModelLoaded] = useState(false);
+ const [isDetecting, setIsDetecting] = useState(false);
+ const { toast } = useToast();
+
+ useEffect(() => {
+ const loadModels = async () => {
+ try {
+ await faceapi.nets.tinyFaceDetector.loadFromUri(MODEL_URL);
+ await faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL);
+ await faceapi.nets.faceRecognitionNet.loadFromUri(MODEL_URL);
+ setIsModelLoaded(true);
+ } catch (error) {
+ console.error("Error loading models:", error);
+ toast({
+ title: "Error",
+ description: "Failed to load face detection models.",
+ variant: "destructive",
+ });
+ }
+ };
+ loadModels();
+ }, [toast]);
+
+ const extractFaceWithPadding = (
+ video: HTMLVideoElement,
+ box: faceapi.Box
+ ): HTMLCanvasElement => {
+ const canvas = document.createElement("canvas");
+ const context = canvas.getContext("2d");
+
+ // Calculate padded dimensions
+ const x = Math.max(0, box.x - PADDING);
+ const y = Math.max(0, box.y - PADDING);
+ const width = Math.min(video.videoWidth - x, box.width + 2 * PADDING);
+ const height = Math.min(video.videoHeight - y, box.height + 2 * PADDING);
+
+ canvas.width = width;
+ canvas.height = height;
+
+ if (context) {
+ context.drawImage(video, x, y, width, height, 0, 0, width, height);
+ }
+
+ return canvas;
+ };
+
+ const detectFace = async () => {
+ if (!webcamRef.current?.video || !canvasRef.current) return;
+
+ const video = webcamRef.current.video;
+ const canvas = canvasRef.current;
+ const context = canvas.getContext("2d");
+
+ if (!context) return;
+
+ canvas.width = video.videoWidth;
+ canvas.height = video.videoHeight;
+ context.clearRect(0, 0, canvas.width, canvas.height);
+ context.translate(canvas.width, 0);
+ context.scale(-1, 1);
+
+ const detections = await faceapi
+ .detectAllFaces(video, new faceapi.TinyFaceDetectorOptions())
+ .withFaceLandmarks()
+ .withFaceDescriptors();
+
+ if (detections.length > 0) {
+ const highConfidenceDetections = detections.filter(
+ (detection) => detection.detection.score > 0.5
+ );
+
+ for (const detection of highConfidenceDetections) {
+ const { box } = detection.detection;
+ context.strokeStyle = "#00FF00";
+ context.lineWidth = 2;
+ context.strokeRect(box.x, box.y, box.width, box.height);
+ context.save();
+ context.scale(-1, 1);
+ context.fillStyle = "#00FF00";
+ context.font = "16px Arial";
+ context.fillText(
+ `Confidence: ${Math.round(detection.detection.score * 100)}%`,
+ -box.x - box.width,
+ box.y - 5
+ );
+ context.restore();
+
+ const faceCanvas = extractFaceWithPadding(video, box);
+ faceCanvas.toBlob(
+ (blob) => {
+ if (blob) sendFaceDataToAPI(blob);
+ },
+ "image/jpeg",
+ 0.95
+ );
+ }
+ }
+ };
+
+ const sendFaceDataToAPI = async (imageBlob: Blob) => {
+ try {
+ const formData = new FormData();
+ formData.append("image", imageBlob, "face.jpg");
+
+ const response = await fetch(
+ `${process.env.NEXT_PUBLIC_BASE_URL}/face/search`,
+ {
+ method: "POST",
+ body: formData,
+ }
+ );
+
+ const data = await response.json();
+ toast({ title: data?.name, description: data.message });
+ } catch (error) {
+ console.error("Error sending face data:", error);
+ toast({
+ title: "Error",
+ description: "Failed to send face data.",
+ variant: "destructive",
+ });
+ }
+ };
+
+ const startDetection = () => {
+ if (!isModelLoaded) return;
+ setIsDetecting(true);
+ setInterval(detectFace, 1000);
+ };
+ return (
+
+
+
+
+
+
+
+
+
+ );
+};
+
+export default RegisterFaceCount;
diff --git a/components/realtimeFaceDetection/RealtimeFaceDetection.tsx b/components/realtimeFaceDetection/RealtimeFaceDetection.tsx
index 1508a9c..0147b34 100644
--- a/components/realtimeFaceDetection/RealtimeFaceDetection.tsx
+++ b/components/realtimeFaceDetection/RealtimeFaceDetection.tsx
@@ -81,7 +81,7 @@ const RealtimeFaceDetection = () => {
if (detections.length > 0) {
const highConfidenceDetections = detections.filter(
- (detection) => detection.detection.score > 0.7
+ (detection) => detection.detection.score > 0.5
);
for (const detection of highConfidenceDetections) {
diff --git a/components/ui/card.tsx b/components/ui/card.tsx
new file mode 100644
index 0000000..f62edea
--- /dev/null
+++ b/components/ui/card.tsx
@@ -0,0 +1,79 @@
+import * as React from "react"
+
+import { cn } from "@/lib/utils"
+
+const Card = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+Card.displayName = "Card"
+
+const CardHeader = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+CardHeader.displayName = "CardHeader"
+
+const CardTitle = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+CardTitle.displayName = "CardTitle"
+
+const CardDescription = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+CardDescription.displayName = "CardDescription"
+
+const CardContent = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+CardContent.displayName = "CardContent"
+
+const CardFooter = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+CardFooter.displayName = "CardFooter"
+
+export { Card, CardHeader, CardFooter, CardTitle, CardDescription, CardContent }