facial_recognition_photobooth/CrestronOpenCvSharp/Capture/FacialRecognition.cs
2025-02-23 20:49:56 +01:00

116 lines
No EOL
4 KiB
C#

using FaceAiSharp;
using FaceAiSharp.Extensions;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.PixelFormats;
namespace CrestronOpenCvSharp.Capture;
public class FacialRecognition
{
private readonly IFaceDetectorWithLandmarks _detector;
private readonly IFaceEmbeddingsGenerator _recognizer;
private readonly string? _baseDirectory;
private Image<Rgb24>? _image;
private float[]? _referenceEmbeddings;
private readonly Dictionary<string, string> _faceImagesDict;
private string? FaceImagePath { get; set; }
public FacialRecognition(string? baseDirectory)
{
_baseDirectory = baseDirectory;
_detector = FaceAiSharpBundleFactory.CreateFaceDetectorWithLandmarks();
_recognizer = FaceAiSharpBundleFactory.CreateFaceEmbeddingsGenerator();
if (_baseDirectory != null)
FaceImagePath = Path.Combine(_baseDirectory, "aligned.png");
// Let's load the default stuff in this dictionary
_faceImagesDict = new Dictionary<string, string>
{
{ "Yuri Staal", "https://ise2025.local.staal.one/VirtualControl/MA/Rooms/MYFIRSTAI/Html/yuri.jpg" },
{ "Toine C. Leerentveld", "https://ise2025.local.staal.one/VirtualControl/MA/Rooms/MYFIRSTAI/Html/toine.jpg" },
{ "Oliver Hall", "https://ise2025.local.staal.one/VirtualControl/MA/Rooms/MYFIRSTAI/Html/oliver.jpg" }
};
}
public bool CheckForFace(string imageFilePath)
{
try
{
// Load the photo
var photo = File.ReadAllBytes(imageFilePath);
// Convert it
_image = Image.Load<Rgb24>(photo);
// Detect faces in this photo
var faces = _detector.DetectFaces(_image);
if (faces.Count != 0)
{
_recognizer.AlignFaceUsingLandmarks(_image, faces.First().Landmarks!);
_referenceEmbeddings = _recognizer.GenerateEmbedding(_image);
_image.Save(FaceImagePath!);
Console.WriteLine("Aligned faces!");
}
else
{
Console.WriteLine("No faces were found!");
}
// Return true or false
return faces.Any();
}
catch (Exception e)
{
Console.WriteLine($"Exception detecting faces: {e.Message}");
throw;
}
}
public async Task<string?> CompareFaces()
{
foreach (var (name, value) in _faceImagesDict)
{
var faceImage = await LoadImageAsync(value);
var detectedFace = _detector.DetectFaces(faceImage).FirstOrDefault();
// Generate embedding for the detected face
_recognizer.AlignFaceUsingLandmarks(faceImage, detectedFace.Landmarks!);
var faceEmbedding = _recognizer.GenerateEmbedding(faceImage);
// Compare embeddings
var similarity = _referenceEmbeddings?.Dot(faceEmbedding);
Console.WriteLine($"Similarity with {name}: {similarity}");
if (similarity >= 0.42)
{
//Console.WriteLine("Assessment: Both pictures show the same person.");
return name;
}
}
return null;
}
public void AddPersonToDatabase(string name)
{
var shortName = name.Replace(" ", "");
// Copy the aligned image to a new image
if (_baseDirectory != null)
{
var newFile = Path.Combine(_baseDirectory, $"{shortName}.jpg");
Console.WriteLine($"Saved new image to {newFile}");
File.Copy(FaceImagePath!, newFile, overwrite: true);
}
_faceImagesDict.Add(name, $"https://ise2025.local.staal.one/VirtualControl/MA/Rooms/MYFIRSTAI/Html/{shortName}.jpg");
Console.WriteLine($"Added new image to dictionary");
}
private async Task<Image<Rgb24>> LoadImageAsync(string path)
{
using var hc = new HttpClient();
var imageBytes = await hc.GetByteArrayAsync(path);
return Image.Load<Rgb24>(imageBytes);
}
}