Author | SHA1 | Message | Date |
---|---|---|---|
ayan.ghoshal | d5c6c7c0c4 |
Upload files to ''
docker with FAISS |
5 days ago |
ayan.ghoshal | 3fd33539d5 |
Upload files to ''
Added the scripts for accessing the database |
5 days ago |
@ -0,0 +1,34 @@ | |||
# Python | |||
__pycache__ | |||
*.pyc | |||
*.pyo | |||
*.pyd | |||
.Python | |||
env/ | |||
venv/ | |||
.env | |||
*.log | |||
# Docker | |||
Dockerfile | |||
docker-compose.yml | |||
.dockerignore | |||
# Git | |||
.git | |||
.gitignore | |||
# IDE | |||
.vscode/ | |||
.idea/ | |||
# Face data (will be mounted as volume) | |||
*.pkl | |||
*.faiss | |||
data/ | |||
# Others | |||
README.md | |||
*.md | |||
tests/ | |||
.pytest_cache/ |
@ -0,0 +1,34 @@ | |||
# Use Python 3.11 slim as base image | |||
FROM python:3.11-slim | |||
# Set working directory | |||
WORKDIR /app | |||
# Install system dependencies | |||
RUN apt-get update && apt-get install -y \ | |||
build-essential \ | |||
cmake \ | |||
libgl1-mesa-glx \ | |||
libglib2.0-0 \ | |||
libsm6 \ | |||
libxext6 \ | |||
libxrender-dev \ | |||
&& rm -rf /var/lib/apt/lists/* | |||
# Copy requirements first for better caching | |||
COPY requirements.txt . | |||
# Install Python dependencies | |||
RUN pip install --no-cache-dir -r requirements.txt | |||
# Copy application code | |||
COPY . . | |||
# Create directory for face data persistence | |||
RUN mkdir -p /app/data | |||
# Expose port | |||
EXPOSE 8000 | |||
# Command to run the application | |||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] |
@ -0,0 +1,34 @@ | |||
version: '3.8' | |||
services: | |||
face-recognition-api: | |||
build: . | |||
container_name: face-recognition-api | |||
ports: | |||
- "8000:8000" | |||
volumes: | |||
- ./data:/app/data # For persisting face data | |||
environment: | |||
- PYTHONUNBUFFERED=1 | |||
- MAX_WORKERS=4 | |||
- FACE_DETECTION_SIZE=640 | |||
- SIMILARITY_THRESHOLD=0.6 | |||
deploy: | |||
resources: | |||
limits: | |||
cpus: '2' | |||
memory: 4G | |||
reservations: | |||
cpus: '1' | |||
memory: 2G | |||
restart: unless-stopped | |||
healthcheck: | |||
test: ["CMD", "curl", "-f", "http://localhost:8000/"] | |||
interval: 30s | |||
timeout: 10s | |||
retries: 3 | |||
start_period: 40s | |||
networks: | |||
default: | |||
driver: bridge |
@ -0,0 +1,32 @@ | |||
# import faiss | |||
# # Load the FAISS index | |||
# index = faiss.read_index("face_index.faiss") | |||
# # Print information about the index | |||
# print(f"Number of vectors in the index: {index.ntotal}") | |||
# print(index) | |||
import faiss | |||
import numpy as np | |||
# Load the FAISS index | |||
index = faiss.read_index("face_index.faiss") | |||
# Check the total number of vectors in the index | |||
total_vectors = index.ntotal | |||
print(f"Number of vectors in the index: {total_vectors}") | |||
# Reconstruct vectors one by one | |||
vectors = [] | |||
for i in range(total_vectors): | |||
vectors.append(index.reconstruct(i)) | |||
# Convert the list of vectors to a NumPy array | |||
vectors_array = np.array(vectors) | |||
# Save the array to a text file for inspection | |||
np.savetxt("faiss_vectors.txt", vectors_array, fmt="%.6f") | |||
print("Vectors saved to faiss_vectors.txt") |
@ -0,0 +1,70 @@ | |||
# import pickle | |||
# # Load the pickle file | |||
# with open("face_store.pkl", "rb") as f: | |||
# data = pickle.load(f) | |||
# # Print the data for inspection | |||
# print(data) | |||
# import json | |||
# import numpy as np | |||
# def convert_ndarray(obj): | |||
# """Recursively convert numpy.ndarray to list in a data structure.""" | |||
# if isinstance(obj, np.ndarray): | |||
# return obj.tolist() | |||
# elif isinstance(obj, list): | |||
# return [convert_ndarray(item) for item in obj] | |||
# elif isinstance(obj, dict): | |||
# return {key: convert_ndarray(value) for key, value in obj.items()} | |||
# else: | |||
# return obj | |||
# # Assuming `data` contains numpy arrays | |||
# # data = { | |||
# # "name": "example", | |||
# # "embedding": np.array([1.0, 2.0, 3.0]), | |||
# # "nested": { | |||
# # "another_array": np.array([4.0, 5.0]) | |||
# # } | |||
# # } | |||
# # Convert `data` to JSON-serializable format | |||
# data_serializable = convert_ndarray(data) | |||
# # Save as JSON | |||
# with open("data.json", "w") as json_file: | |||
# json.dump(data_serializable, json_file, indent=4) | |||
# print("JSON file saved successfully!") | |||
import pickle | |||
import json | |||
import numpy as np | |||
# Function to convert non-serializable objects (like numpy arrays) to a serializable format | |||
def convert_to_serializable(data): | |||
if isinstance(data, np.ndarray): | |||
return data.tolist() # Convert ndarray to list | |||
elif isinstance(data, dict): # If data is a dictionary, recursively convert values | |||
return {key: convert_to_serializable(value) for key, value in data.items()} | |||
elif isinstance(data, list): # If data is a list, recursively convert items | |||
return [convert_to_serializable(item) for item in data] | |||
else: | |||
return data # For other types, return as is | |||
# Load the pickle file | |||
with open("face_store.pkl", "rb") as f: | |||
data = pickle.load(f) | |||
# Convert the data to a JSON-serializable format | |||
data_serializable = convert_to_serializable(data) | |||
# Save to a JSON file | |||
with open("face_store.json", "w") as json_file: | |||
json.dump(data_serializable, json_file, indent=4) | |||
print("Data has been saved to face_store.json") |