FaceRcognizer/FaceRecognizer/MainForm.cs
2018-09-21 15:25:13 +03:00

168 lines
5.7 KiB
C#

using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.IO;
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.UI;
using Emgu.Util;
using Emgu.CV.Face;
namespace FaceRecognizer
{
public partial class MainForm : Form
{
LBPHFaceRecognizer reconizer;
CascadeClassifier faceCascade;
Image<Gray, byte> temp;
List<Image<Gray, byte>> imageList = new List<Image<Gray, byte>>();
List<string> imageLabels = new List<string>();
List<int> imageIndicies = new List<int>();
List<int> predictions = new List<int>();
Rectangle[] detectedFaces;
string trainImagesFolder = Application.StartupPath + "\\Train";
string reconizeImagesFolder = Application.StartupPath + "\\photo";
public MainForm()
{
InitializeComponent();
console.AppendText("Main OK!");
}
void train(string personName)
{
faceCascade = new CascadeClassifier("haarcascade_frontalface_alt_tree.xml");
reconizer = new LBPHFaceRecognizer();
int j = 0;
foreach (var file in Directory.GetFiles(trainImagesFolder))
{
if (file.EndsWith("jpg"))
{
temp = new Image<Gray, byte>(file);
temp._EqualizeHist();
var detected_faces = faceCascade.DetectMultiScale(temp, 1.1, 2, new Size(24, 24), Size.Empty);
if (detected_faces.Length == 0) { continue; }
temp.ROI = detected_faces[0];
temp = temp.Copy();
temp = temp.Resize(24, 24, Inter.Cubic);
imageList.Add(temp);
imageLabels.Add(Path.GetFileNameWithoutExtension(file));
}
j++;
}
for (int i = 0; i < imageList.Count; i++)
{
imageIndicies.Add(i);
}
try
{
reconizer.Train(imageList.ToArray(), imageIndicies.ToArray());
reconizer.Write(personName);
//console.AppendText("Training done!\nFile Name: " + personName);
reconizer.Dispose();
imageList.Clear();
faceCascade.Dispose();
temp.Dispose();
}
catch (Exception e) { } //console.AppendText("Error: " + e.Message); }
}
void recognize(string personName)
{
int i = 0;
try
{
//reconizer = new LBPHFaceRecognizer(1, 8, 8, 9, 65);
reconizer = new LBPHFaceRecognizer();
faceCascade = new CascadeClassifier("haarcascade_frontalface_alt_tree.xml");
predictions.Clear();
//reconizer = new LBPHFaceRecognizer();
reconizer.Read(personName);
foreach (var file in Directory.GetFiles(reconizeImagesFolder))
{
if (file.EndsWith("jpg"))
{
temp = new Image<Gray, byte>(file);
temp._EqualizeHist();
var faces = faceCascade.DetectMultiScale(temp, 1.1, 2, new Size(24, 24), Size.Empty);
if (faces.Length == 0) { continue; }
temp.ROI = faces[0];
temp = temp.Copy();
temp.Resize(24, 24, Inter.Cubic);
temp.ToBitmap().Save(reconizeImagesFolder + "\\" + i + ".jpg");
var result = reconizer.Predict(temp);
predictions.Add(result.Label);
}
i++;
}
reconizer.Dispose();
imageList.Clear();
faceCascade.Dispose();
temp.Dispose();
}
catch (Exception e){ throw e; }
}
private void TrainBtn_Click(object sender, EventArgs e)
{
trainBackWorker.RunWorkerAsync();
console.AppendText("Training started for " + nameTxtBx.Text);
}
private void RecconizeBtn_Click(object sender, EventArgs e)
{
recognizeBackWorker.RunWorkerAsync();
console.AppendText("Started recognition");
}
private void console_TextChanged(object sender, EventArgs e)
{
console.AppendText("\n");
console.SelectionStart = console.Text.Length;
console.ScrollToCaret();
}
private void trainBackWorker_DoWork(object sender, DoWorkEventArgs e)
{
train(nameTxtBx.Text);
}
private void trainBackWorker_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
{
console.AppendText("Finished training: " + nameTxtBx.Text);
}
private void recognizeBackWorker_DoWork(object sender, DoWorkEventArgs e)
{
recognize(nameTxtBx.Text);
}
private void recognizeBackWorker_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
{
console.AppendText("Recognition finished\nResults:");
printResults();
}
void printResults()
{
foreach(var number in predictions)
{
console.AppendText(number.ToString());
}
}
}
}