From 6cb6315c5562e3ec89a19ad507101ede99580a93 Mon Sep 17 00:00:00 2001
From: "Zhuoqun(Linda) Li" <55338278+linndaqun@users.noreply.github.com>
Date: Wed, 22 Jun 2022 10:03:41 -0700
Subject: [PATCH 1/4] Update FaceQuickstart-single.cs
---
dotnet/Face/FaceQuickstart-single.cs | 16 +++++++++++++++-
1 file changed, 15 insertions(+), 1 deletion(-)
diff --git a/dotnet/Face/FaceQuickstart-single.cs b/dotnet/Face/FaceQuickstart-single.cs
index f8444f6e..9be5eb91 100644
--- a/dotnet/Face/FaceQuickstart-single.cs
+++ b/dotnet/Face/FaceQuickstart-single.cs
@@ -15,15 +15,18 @@ class Program
{
static string personGroupId = Guid.NewGuid().ToString();
+ //
// URL path for the images.
const string IMAGE_BASE_URL = "https://csdx.blob.core.windows.net/resources/Face/Images/";
+ //
+ //
// From your Face subscription in the Azure portal, get your subscription key and endpoint.
const string SUBSCRIPTION_KEY = "PASTE_YOUR_FACE_SUBSCRIPTION_KEY_HERE";
const string ENDPOINT = "PASTE_YOUR_FACE_SUBSCRIPTION_ENDPOINT_HERE";
//
- static void Main(string[] args)
+ static void Main(string[] args)
{
// Recognition model 4 was released in 2021 February.
// It is recommended since its accuracy is improved
@@ -32,15 +35,18 @@ static void Main(string[] args)
// with models 1 and 2.
const string RECOGNITION_MODEL4 = RecognitionModel.Recognition04;
+ //
// Authenticate.
IFaceClient client = Authenticate(ENDPOINT, SUBSCRIPTION_KEY);
// Identify - recognize a face(s) in a person group (a person group is created in this example).
IdentifyInPersonGroup(client, IMAGE_BASE_URL, RECOGNITION_MODEL4).Wait();
+ //
Console.WriteLine("End of quickstart.");
}
+ //
/*
* AUTHENTICATE
* Uses subscription key and region to create a client.
@@ -49,7 +55,9 @@ public static IFaceClient Authenticate(string endpoint, string key)
{
return new FaceClient(new ApiKeyServiceClientCredentials(key)) { Endpoint = endpoint };
}
+ //
+ //
// Detect faces from image url for recognition purpose. This is a helper method for other functions in this quickstart.
// Parameter `returnFaceId` of `DetectWithUrlAsync` must be set to `true` (by default) for recognition purpose.
// Parameter `FaceAttributes` is set to include the QualityForRecognition attribute.
@@ -73,6 +81,7 @@ private static async Task> DetectFaceRecognize(IFaceClient fa
return sufficientQualityFaces.ToList();
}
+ //
/*
* IDENTIFY FACES
@@ -142,7 +151,9 @@ public static async Task IdentifyInPersonGroup(IFaceClient client, string url, s
$"{url}{similarImage}", similarImage);
}
}
+ //
+ //
// Start to train the person group.
Console.WriteLine();
Console.WriteLine($"Train person group {personGroupId}.");
@@ -157,6 +168,7 @@ public static async Task IdentifyInPersonGroup(IFaceClient client, string url, s
if (trainingStatus.Status == TrainingStatusType.Succeeded) { break; }
}
Console.WriteLine();
+ //
List sourceFaceIds = new List();
// Detect faces from source image url.
@@ -165,6 +177,7 @@ public static async Task IdentifyInPersonGroup(IFaceClient client, string url, s
// Add detected faceId to sourceFaceIds.
foreach (var detectedFace in detectedFaces) { sourceFaceIds.Add(detectedFace.FaceId.Value); }
+ //
// Identify the faces in a person group.
var identifyResults = await client.Face.IdentifyAsync(sourceFaceIds, personGroupId);
@@ -178,6 +191,7 @@ public static async Task IdentifyInPersonGroup(IFaceClient client, string url, s
Console.WriteLine($"Person '{person.Name}' is identified for the face in: {sourceImageFileName} - {identifyResult.FaceId}," +
$" confidence: {identifyResult.Candidates[0].Confidence}.");
}
+ //
Console.WriteLine();
}
}
From 5209ff4ceedcaae8acdb2932e64d701976c8cf51 Mon Sep 17 00:00:00 2001
From: "Zhuoqun(Linda) Li" <55338278+linndaqun@users.noreply.github.com>
Date: Tue, 28 Jun 2022 11:35:09 -0700
Subject: [PATCH 2/4] Create IdentityVerification.cs
---
dotnet/Face/IdentityVerification.cs | 222 ++++++++++++++++++++++++++++
1 file changed, 222 insertions(+)
create mode 100644 dotnet/Face/IdentityVerification.cs
diff --git a/dotnet/Face/IdentityVerification.cs b/dotnet/Face/IdentityVerification.cs
new file mode 100644
index 00000000..bb5b18c5
--- /dev/null
+++ b/dotnet/Face/IdentityVerification.cs
@@ -0,0 +1,222 @@
+//
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Threading;
+using System.Threading.Tasks;
+
+using Microsoft.Azure.CognitiveServices.Vision.Face;
+using Microsoft.Azure.CognitiveServices.Vision.Face.Models;
+
+namespace FaceQuickstart
+{
+ class Program
+ {
+ static string personGroupId = Guid.NewGuid().ToString();
+
+ //
+ // URL path for the images.
+ const string IMAGE_BASE_URL = "https://csdx.blob.core.windows.net/resources/Face/Images/";
+ //
+
+ //
+ // From your Face subscription in the Azure portal, get your subscription key and endpoint.
+ const string SUBSCRIPTION_KEY = "PASTE YOUR KEY";
+ const string ENDPOINT = "PASTE YOUR ENDPOINT";
+ //
+
+ static void Main(string[] args)
+ {
+ // Recognition model 4 was released in 2021 February.
+ // It is recommended since its accuracy is improved
+ // on faces wearing masks compared with model 3,
+ // and its overall accuracy is improved compared
+ // with models 1 and 2.
+ const string RECOGNITION_MODEL4 = RecognitionModel.Recognition04;
+
+ //
+ // Authenticate.
+ IFaceClient client = Authenticate(ENDPOINT, SUBSCRIPTION_KEY);
+
+ // Identify - recognize a face(s) in a person group (a person group is created in this example).
+ IdentifyInPersonGroup(client, IMAGE_BASE_URL, RECOGNITION_MODEL4).Wait();
+ //
+
+ Console.WriteLine("End of quickstart.");
+ }
+
+ //
+ /*
+ * AUTHENTICATE
+ * Uses subscription key and region to create a client.
+ */
+ public static IFaceClient Authenticate(string endpoint, string key)
+ {
+ return new FaceClient(new ApiKeyServiceClientCredentials(key)) { Endpoint = endpoint };
+ }
+ //
+
+ //
+ // Detect faces from image url for recognition purpose. This is a helper method for other functions in this quickstart.
+ // Parameter `returnFaceId` of `DetectWithUrlAsync` must be set to `true` (by default) for recognition purpose.
+ // Parameter `FaceAttributes` is set to include the QualityForRecognition attribute.
+ // Recognition model must be set to recognition_03 or recognition_04 as a result.
+ // Result faces with insufficient quality for recognition are filtered out.
+ // The field `faceId` in returned `DetectedFace`s will be used in Face - Find Similar, Face - Verify. and Face - Identify.
+ // It will expire 24 hours after the detection call.
+ private static async Task> DetectFaceRecognize(IFaceClient faceClient, string url, string recognition_model)
+ {
+ // Detect faces from image URL. Since only recognizing, use the recognition model 1.
+ // We use detection model 3 because we are not retrieving attributes.
+ IList detectedFaces = await faceClient.Face.DetectWithUrlAsync(url, recognitionModel: recognition_model, detectionModel: DetectionModel.Detection03, returnFaceAttributes: new List { FaceAttributeType.QualityForRecognition });
+ List sufficientQualityFaces = new List();
+ foreach (DetectedFace detectedFace in detectedFaces){
+ var faceQualityForRecognition = detectedFace.FaceAttributes.QualityForRecognition;
+ if (faceQualityForRecognition.HasValue && (faceQualityForRecognition.Value >= QualityForRecognition.Medium)){
+ sufficientQualityFaces.Add(detectedFace);
+ }
+ }
+ Console.WriteLine($"{detectedFaces.Count} face(s) with {sufficientQualityFaces.Count} having sufficient quality for recognition detected from image `{Path.GetFileName(url)}`");
+
+ return sufficientQualityFaces.ToList();
+ }
+ //
+
+ /*
+ * IDENTIFY FACES
+ * To identify faces, you need to create and define a person group.
+ * The Identify operation takes one or several face IDs from DetectedFace or PersistedFace and a PersonGroup and returns
+ * a list of Person objects that each face might belong to. Returned Person objects are wrapped as Candidate objects,
+ * which have a prediction confidence value.
+ */
+ //
+ public static async Task IdentifyInPersonGroup(IFaceClient client, string url, string recognitionModel)
+ {
+ Console.WriteLine("========IDENTIFY FACES========");
+ Console.WriteLine();
+
+ // Create a dictionary for all your images, grouping similar ones under the same key.
+ Dictionary personDictionary =
+ new Dictionary
+ { { "Family1-Dad", new[] { "Family1-Dad1.jpg", "Family1-Dad2.jpg" } },
+ { "Family1-Mom", new[] { "Family1-Mom1.jpg", "Family1-Mom2.jpg" } },
+ { "Family1-Son", new[] { "Family1-Son1.jpg", "Family1-Son2.jpg" } },
+ { "Family1-Daughter", new[] { "Family1-Daughter1.jpg", "Family1-Daughter2.jpg" } },
+ { "Family2-Lady", new[] { "Family2-Lady1.jpg", "Family2-Lady2.jpg" } },
+ { "Family2-Man", new[] { "Family2-Man1.jpg", "Family2-Man2.jpg" } }
+ };
+ // A group photo that includes some of the persons you seek to identify from your dictionary.
+ string sourceImageFileName = "identification1.jpg";
+ //
+
+ //
+ // Create a person group.
+ Console.WriteLine($"Create a person group ({personGroupId}).");
+ await client.PersonGroup.CreateAsync(personGroupId, personGroupId, recognitionModel: recognitionModel);
+ // The similar faces will be grouped into a single person group person.
+ foreach (var groupedFace in personDictionary.Keys)
+ {
+ // Limit TPS
+ await Task.Delay(250);
+ Person person = await client.PersonGroupPerson.CreateAsync(personGroupId: personGroupId, name: groupedFace);
+ Console.WriteLine($"Create a person group person '{groupedFace}'.");
+
+ // Add face to the person group person.
+ foreach (var similarImage in personDictionary[groupedFace])
+ {
+ Console.WriteLine($"Check whether image is of sufficient quality for recognition");
+ IList detectedFaces1 = await client.Face.DetectWithUrlAsync($"{url}{similarImage}",
+ recognitionModel: recognitionModel,
+ detectionModel: DetectionModel.Detection03,
+ returnFaceAttributes: new List { FaceAttributeType.QualityForRecognition });
+ bool sufficientQuality = true;
+ foreach (var face1 in detectedFaces1)
+ {
+ var faceQualityForRecognition = face1.FaceAttributes.QualityForRecognition;
+ // Only "high" quality images are recommended for person enrollment
+ if (faceQualityForRecognition.HasValue && (faceQualityForRecognition.Value != QualityForRecognition.High)){
+ sufficientQuality = false;
+ break;
+ }
+ }
+
+ if (!sufficientQuality){
+ continue;
+ }
+
+
+ Console.WriteLine($"Add face to the person group person({groupedFace}) from image `{similarImage}`");
+ PersistedFace face = await client.PersonGroupPerson.AddFaceFromUrlAsync(personGroupId, person.PersonId,
+ $"{url}{similarImage}", similarImage);
+ }
+ }
+ //
+
+ //
+ // Start to train the person group.
+ Console.WriteLine();
+ Console.WriteLine($"Train person group {personGroupId}.");
+ await client.PersonGroup.TrainAsync(personGroupId);
+
+ // Wait until the training is completed.
+ while (true)
+ {
+ await Task.Delay(1000);
+ var trainingStatus = await client.PersonGroup.GetTrainingStatusAsync(personGroupId);
+ Console.WriteLine($"Training status: {trainingStatus.Status}.");
+ if (trainingStatus.Status == TrainingStatusType.Succeeded) { break; }
+ }
+ Console.WriteLine();
+ //
+
+ List sourceFaceIds = new List();
+ // Detect faces from source image url.
+ List detectedFaces = await DetectFaceRecognize(client, $"{url}{sourceImageFileName}", recognitionModel);
+
+ // Add detected faceId to sourceFaceIds.
+ foreach (var detectedFace in detectedFaces) { sourceFaceIds.Add(detectedFace.FaceId.Value); }
+
+ //
+ // Identify the faces in a person group.
+ var identifyResults = await client.Face.IdentifyAsync(sourceFaceIds, personGroupId);
+
+ foreach (var identifyResult in identifyResults)
+ {
+ if (identifyResult.Candidates.Count==0) {
+ Console.WriteLine($"No person is identified for the face in: {sourceImageFileName} - {identifyResult.FaceId},");
+ continue;
+ }
+ Person person = await client.PersonGroupPerson.GetAsync(personGroupId, identifyResult.Candidates[0].PersonId);
+ Console.WriteLine($"Person '{person.Name}' is identified for the face in: {sourceImageFileName} - {identifyResult.FaceId}," +
+ $" confidence: {identifyResult.Candidates[0].Confidence}.");
+ }
+ //
+ Console.WriteLine();
+
+ //
+ Console.WriteLine("verify");
+ IList persons = await client.PersonGroupPerson.ListAsync(personGroupId);
+ Guid faceId = detectedFaces[0].FaceId.Value;
+ foreach (var person in persons)
+ {
+ Console.WriteLine($"faceID: {faceId}");
+ try
+ {
+ VerifyResult result = await client.Face.VerifyFaceToPersonAsync(faceId, person.PersonId, personGroupId);
+ if (result.IsIdentical)
+ {
+ Console.WriteLine($"verify face {faceId} is person {person.Name}");
+ }
+ }
+ catch (APIErrorException e)
+ {
+ Console.WriteLine(e.Response);
+ }
+
+ }
+ //
+ }
+ }
+}
+//
From 2d71d19eca76cd370b21fb0fcc87c25856eb29be Mon Sep 17 00:00:00 2001
From: "Zhuoqun(Linda) Li" <55338278+linndaqun@users.noreply.github.com>
Date: Tue, 12 Jul 2022 15:01:56 -0700
Subject: [PATCH 3/4] Update ImageCaptioningQuickstart.cs
---
dotnet/ComputerVision/ImageCaptioningQuickstart.cs | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dotnet/ComputerVision/ImageCaptioningQuickstart.cs b/dotnet/ComputerVision/ImageCaptioningQuickstart.cs
index 18309677..a2c869c9 100644
--- a/dotnet/ComputerVision/ImageCaptioningQuickstart.cs
+++ b/dotnet/ComputerVision/ImageCaptioningQuickstart.cs
@@ -74,7 +74,7 @@ static void Main(string[] args)
AnalyzeImageUrl(client, ANALYZE_URL_IMAGE).Wait();
//
- // AnalyzeImageLocal(client, ANALYZE_LOCAL_IMAGE).Wait();
+ AnalyzeImageLocal(client, ANALYZE_LOCAL_IMAGE).Wait();
Console.WriteLine("----------------------------------------------------------");
Console.WriteLine();
From c37ec26e1b32e0a7cfaaa7ae9b810438efe52628 Mon Sep 17 00:00:00 2001
From: "Zhuoqun(Linda) Li" <55338278+linndaqun@users.noreply.github.com>
Date: Tue, 12 Jul 2022 15:13:28 -0700
Subject: [PATCH 4/4] Update ImageCaptioningQuickstart.py
---
python/ComputerVision/ImageCaptioningQuickstart.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/python/ComputerVision/ImageCaptioningQuickstart.py b/python/ComputerVision/ImageCaptioningQuickstart.py
index 33ffdbd4..26251f3f 100644
--- a/python/ComputerVision/ImageCaptioningQuickstart.py
+++ b/python/ComputerVision/ImageCaptioningQuickstart.py
@@ -83,10 +83,10 @@
# Get the captions (descriptions) from the response, with confidence level
print("Description of local image: ")
-if ( not description_result.description):
+if ( not description_result.captions):
print("No description detected.")
else:
- for caption in description_result.description.captions:
+ for caption in description_result.captions:
print("'{}' with confidence {:.2f}%".format(caption.text, caption.confidence * 100))
print()
'''