Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
104 changes: 104 additions & 0 deletions dotnet/Face/Detect.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
using System.Drawing;

using Azure;
using Azure.AI.Vision.Face;

namespace FaceQuickstart
{
class Program
{
static string SUBSCRIPTION_KEY = "PASTE_YOUR_FACE_SUBSCRIPTION_KEY_HERE";
static string ENDPOINT = "PASTE_YOUR_FACE_ENDPOINT_HERE";

async static void Quickstart()
{
FaceClient faceClient = new FaceClient(new Uri(ENDPOINT), new AzureKeyCredential(SUBSCRIPTION_KEY));

var imageUrl = "https://raw.githubusercontent.com/Azure-Samples/cognitive-services-sample-data-files/master/ComputerVision/Images/faces.jpg";

// <basic1>
var response = await faceClient.DetectAsync(new Uri(imageUrl), FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, returnFaceId: false);
IReadOnlyList<FaceDetectionResult> faces = response.Value;
// </basic1>

// <basic2>
foreach (var face in faces)
{
string id = face.FaceId.ToString();
FaceRectangle rect = face.FaceRectangle;
}
// </basic2>

// <landmarks1>
// Note DetectionModel.Detection02 cannot be used with returnFaceLandmarks.
var response2 = await faceClient.DetectAsync(new Uri(imageUrl), FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, returnFaceId: false, returnFaceLandmarks: true);
IReadOnlyList<FaceDetectionResult> faces2 = response2.Value;
// </landmarks1>

// <landmarks2>
foreach (var face in faces2)
{
var landmarks = face.FaceLandmarks;

double noseX = landmarks.NoseTip.X;
double noseY = landmarks.NoseTip.Y;

double leftPupilX = landmarks.PupilLeft.X;
double leftPupilY = landmarks.PupilLeft.Y;

double rightPupilX = landmarks.PupilRight.X;
double rightPupilY = landmarks.PupilRight.Y;
// </landmarks2>

// <direction>
var upperLipBottom = landmarks.UpperLipBottom;
var underLipTop = landmarks.UnderLipTop;

var centerOfMouth = new Point(
(int)((upperLipBottom.X + underLipTop.X) / 2),
(int)((upperLipBottom.Y + underLipTop.Y) / 2));

var eyeLeftInner = landmarks.EyeLeftInner;
var eyeRightInner = landmarks.EyeRightInner;

var centerOfTwoEyes = new Point(
(int)((eyeLeftInner.X + eyeRightInner.X) / 2),
(int)((eyeLeftInner.Y + eyeRightInner.Y) / 2));

var faceDirectionVectorX = centerOfTwoEyes.X - centerOfMouth.X;
var faceDirectionVectorY = centerOfTwoEyes.Y - centerOfMouth.Y;
}
// </direction>

// <attributes1>
var requiredFaceAttributes = new FaceAttributeType[] {
FaceAttributeType.Detection03.Blur,
FaceAttributeType.Detection03.HeadPose,
FaceAttributeType.Detection03.Mask,
FaceAttributeType.Recognition04.QualityForRecognition
};
// Note DetectionModel.Detection02 cannot be used with returnFaceAttributes.
var response3 = await faceClient.DetectAsync(new Uri(imageUrl), FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, returnFaceId: false, returnFaceAttributes: requiredFaceAttributes);
IReadOnlyList<FaceDetectionResult> faces3 = response3.Value;
// </attributes1>

// <attributes2>
foreach (var face in faces3)
{
var attributes = face.FaceAttributes;
var blur = attributes.Blur;
var headPose = attributes.HeadPose;
var mask = attributes.Mask;
var qualityForRecognition = attributes.QualityForRecognition;
}
// </attributes2>
}

static void Main(string[] args)
{
Quickstart();
Console.WriteLine("Press any key to exit.");
Console.ReadKey();
}
}
}
107 changes: 107 additions & 0 deletions dotnet/Face/FindSimilar.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
// <snippet_using>
using Azure;
using Azure.AI.Vision.Face;
// </snippet_using>

namespace FaceQuickstart
{
class Program
{
// <snippet_image_url>
const string IMAGE_BASE_URL = "https://raw.githubusercontent.com/Azure-Samples/cognitive-services-sample-data-files/master/Face/images/";
// </snippet_image_url>

// <snippet_creds>
static readonly string SUBSCRIPTION_KEY = Environment.GetEnvironmentVariable("FACE_APIKEY") ?? "<apikey>";
static readonly string ENDPOINT = Environment.GetEnvironmentVariable("FACE_ENDPOINT") ?? "<endpoint>";
// </snippet_creds>

static void Main(string[] args)
{

// <snippet_detect_models>
FaceRecognitionModel RECOGNITION_MODEL4 = FaceRecognitionModel.Recognition04;
// </snippet_detect_models>

// <snippet_maincalls>
FaceClient client = Authenticate(ENDPOINT, SUBSCRIPTION_KEY);
FindSimilar(client, IMAGE_BASE_URL, RECOGNITION_MODEL4).Wait();
// </snippet_maincalls>
}

// <snippet_auth>
public static FaceClient Authenticate(string endpoint, string key)
{
return new FaceClient(new Uri(endpoint), new AzureKeyCredential(key));
}
// </snippet_auth>

// <snippet_face_detect_recognize>
private static async Task<List<FaceDetectionResult>> DetectFaceRecognize(FaceClient faceClient, string url, FaceRecognitionModel recognition_model)
{
// Detect faces from image URL.
Response<IReadOnlyList<FaceDetectionResult>> response = await faceClient.DetectAsync(new Uri(url), FaceDetectionModel.Detection03, recognition_model, returnFaceId: true, [FaceAttributeType.QualityForRecognition]);
IReadOnlyList<FaceDetectionResult> detectedFaces = response.Value;
List<FaceDetectionResult> sufficientQualityFaces = new List<FaceDetectionResult>();
foreach (FaceDetectionResult detectedFace in detectedFaces)
{
var faceQualityForRecognition = detectedFace.FaceAttributes.QualityForRecognition;
if (faceQualityForRecognition.HasValue && (faceQualityForRecognition.Value != QualityForRecognition.Low))
{
sufficientQualityFaces.Add(detectedFace);
}
}
Console.WriteLine($"{detectedFaces.Count} face(s) with {sufficientQualityFaces.Count} having sufficient quality for recognition detected from image `{Path.GetFileName(url)}`");

return sufficientQualityFaces;
}
// </snippet_face_detect_recognize>

public static async Task FindSimilar(FaceClient client, string base_url, FaceRecognitionModel recognition_model)
{
// <snippet_loadfaces>
Console.WriteLine("========FIND SIMILAR========");
Console.WriteLine();

List<string> targetImageFileNames = new List<string>
{
"Family1-Dad1.jpg",
"Family1-Daughter1.jpg",
"Family1-Mom1.jpg",
"Family1-Son1.jpg",
"Family2-Lady1.jpg",
"Family2-Man1.jpg",
"Family3-Lady1.jpg",
"Family3-Man1.jpg"
};

string sourceImageFileName = "findsimilar.jpg";
IList<Guid> targetFaceIds = new List<Guid>();
foreach (var targetImageFileName in targetImageFileNames)
{
// Detect faces from target image url.
var faces = await DetectFaceRecognize(client, $"{base_url}{targetImageFileName}", recognition_model);
// Add detected faceId to list of GUIDs.
targetFaceIds.Add(faces[0].FaceId.Value);
}

// Detect faces from source image url.
IList<FaceDetectionResult> detectedFaces = await DetectFaceRecognize(client, $"{base_url}{sourceImageFileName}", recognition_model);
Console.WriteLine();
// </snippet_loadfaces>

// <snippet_find_similar>
// Find a similar face(s) in the list of IDs. Comapring only the first in list for testing purposes.
Response<IReadOnlyList<FaceFindSimilarResult>> response = await client.FindSimilarAsync(detectedFaces[0].FaceId.Value, targetFaceIds);
IList<FaceFindSimilarResult> similarResults = response.Value.ToList();
// </snippet_find_similar>
// <snippet_find_similar_print>
foreach (var similarResult in similarResults)
{
Console.WriteLine($"Faces from {sourceImageFileName} & ID:{similarResult.FaceId} are similar with confidence: {similarResult.Confidence}.");
}
Console.WriteLine();
// </snippet_find_similar_print>
}
}
}
Loading