Skip to content

Commit e1015d3

Browse files
committed
Updated to Microblink SDK v 5.11 and Ver-ID SDK v 2.0.1
1 parent 86ebf12 commit e1015d3

File tree

9 files changed

+223
-166
lines changed

9 files changed

+223
-166
lines changed

README.md

Lines changed: 81 additions & 108 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,7 @@ The project contains a sample application that uses Microblink's [BlinkID SDK](h
4242
}
4343

4444
dependencies {
45-
implementation 'com.appliedrec.verid:rx:[1.10,2.0['
46-
implementation 'com.appliedrec.verid:ui:[1.20,2.0['
45+
implementation 'com.appliedrec.verid:ui2:[2.0,3.0['
4746
}
4847
~~~
4948

@@ -59,7 +58,7 @@ The project contains a sample application that uses Microblink's [BlinkID SDK](h
5958
}
6059

6160
dependencies {
62-
implementation('com.microblink:blinkid:5.0.0@aar') {
61+
implementation('com.microblink:blinkid:5.11.0@aar') {
6362
transitive = true
6463
}
6564
}
@@ -74,13 +73,14 @@ The project contains a sample application that uses Microblink's [BlinkID SDK](h
7473
public class MyActivity extends AppCompatActivity {
7574

7675
private RecognizerBundle recognizerBundle;
76+
private VerID verID; // See Example 2 for instructions on how to load VerID
7777
private static final int REQUEST_CODE_ID_CAPTURE = 0;
7878

7979
/**
8080
* Call this method to start the ID capture
8181
* (for example in response to a button click).
8282
*/
83-
void runIdCapture() {
83+
void startCapture() {
8484
try {
8585
// Set the Microblink licence key
8686
// This example assumes the key is set in your build.gradle file
@@ -91,14 +91,12 @@ public class MyActivity extends AppCompatActivity {
9191
}
9292
// To enable high-res images in intents
9393
MicroblinkSDK.setIntentDataTransferMode(IntentDataTransferMode.PERSISTED_OPTIMISED);
94-
// To detect US or Canadian ID card
95-
UsdlCombinedRecognizer recognizer = new UsdlCombinedRecognizer();
94+
// Create a recognizer to detect an ID card
95+
BlinkIdCombinedRecognizer recognizer = new BlinkIdCombinedRecognizer();
9696
recognizer.setEncodeFullDocumentImage(true);
97-
// For ID cards issued outside USA or Canada uncomment the following 2 lines and delete the 2 lines above
98-
// BlinkIdCombinedRecognizerrecognizer = new BlinkIdCombinedRecognizer();
99-
// recognizer.setEncodeFullDocumentImage(true);
100-
SuccessFrameGrabberRecognizer successFrameGrabberRecognizer = new SuccessFrameGrabberRecognizer(recognizer);
101-
recognizerBundle = new RecognizerBundle(successFrameGrabberRecognizer);
97+
recognizerBundle = new RecognizerBundle(recognizer);
98+
BlinkIdUISettings uiSettings = new BlinkIdUISettings(recognizerBundle);
99+
uiSettings.enableHighResSuccessFrameCapture(true);
102100
startActivityForResult(intent, REQUEST_CODE_ID_CAPTURE);
103101
}
104102

@@ -110,31 +108,35 @@ public class MyActivity extends AppCompatActivity {
110108
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
111109
super.onActivityResult(requestCode, resultCode, data);
112110
if (requestCode == REQUEST_CODE_ID_CAPTURE && resultCode == RESULT_OK && data != null) {
113-
// Load the ID capture result from the data intent
114-
recognizerBundle.loadFromIntent(data);
115-
116-
Recognizer firstRecognizer = recognizerBundle.getRecognizers()[0];
117-
SuccessFrameGrabberRecognizer successFrameGrabberRecognizer = (SuccessFrameGrabberRecognizer) firstRecognizer;
118-
119-
byte[] frontImage;
120-
if (successFrameGrabberRecognizer.getSlaveRecognizer() instanceof UsdlCombinedRecognizer) {
121-
frontImage = ((UsdlCombinedRecognizer) successFrameGrabberRecognizer.getSlaveRecognizer()).getResult().getEncodedFullDocumentImage();
122-
} else if (successFrameGrabberRecognizer.getSlaveRecognizer() instanceof BlinkIdCombinedRecognizer) {
123-
frontImage = ((BlinkIdCombinedRecognizer) successFrameGrabberRecognizer.getSlaveRecognizer()).getResult().getEncodedFrontFullDocumentImage();
124-
} else {
125-
return;
126-
}
127-
// Save the image of the front of the card in your app's files
128-
File imageFile = new File(getFilesDir(), "cardFront.jpg");
129-
FileOutputStream outputStream = new FileOutputStream(imageFile);
130-
ByteArrayInputStream inputStream = new ByteArrayInputStream(frontImage);
131-
int read;
132-
byte[] buffer = new byte[512];
133-
while ((read = inputStream.read(buffer, 0, buffer.length)) > 0) {
134-
outputStream.write(buffer, 0, read);
111+
try {
112+
// Load the ID capture result from the data intent
113+
recognizerBundle.loadFromIntent(data);
114+
115+
Recognizer<?> firstRecognizer = recognizerBundle.getRecognizers()[0];
116+
if (firstRecognizer instanceof BlinkIdCombinedRecognizer) {
117+
BlinkIdCombinedRecognizer.Result result = ((BlinkIdCombinedRecognizer)firstRecognizer).getResult();
118+
if (result.getDocumentDataMatch() == DataMatchResult.Failed) {
119+
// The back and front of the card don't match
120+
throw new Exception("Front and back of the ID card don't match");
121+
}
122+
}
123+
byte[] frontImage = result.getEncodedFrontFullDocumentImage();
124+
if (frontImage.length == 0) {
125+
throw new Exception("Unable to retrieve an image of the ID card");
126+
}
127+
Bitmap frontImageBitmap = BitmapFactory.decodeByteArray(frontImage, 0, frontImage.length);
128+
VerIDImageBitmap image = new VerIDImageBitmap(frontImageBitmap, ExifInterface.ORIENTATION_NORMAL);
129+
Face[] faces = verID.getFaceDetection().detectFacesInImage(image.createFaceDetectionImage(), 1, 0);
130+
if (faces.length > 0) {
131+
RecognizableFace[] recognizableFaces = verID.getFaceRecognition().createRecognizableFacesFromFaces(faces, image);
132+
RecognizableFace face = recognizableFaces[0];
133+
// Face can be used for face recognition
134+
} else {
135+
throw new Exception("Failed to detect a face on the ID card");
136+
}
137+
} catch (Exception e) {
138+
// TODO: Handle exception
135139
}
136-
outputStream.close();
137-
inputStream.close();
138140
}
139141
}
140142
}
@@ -143,97 +145,72 @@ public class MyActivity extends AppCompatActivity {
143145
## Example 2Capture live face
144146

145147
~~~java
146-
public class MyActivity extends AppCompatActivity {
147-
148-
private static final int REQUEST_CODE_LIVENESS_DETECTION = 1;
149-
private RxVerID rxVerID;
150-
148+
public class MyActivity extends AppCompatActivity implements VerIDFactoryDelegate, VerIDSessionDelegate {
149+
151150
@Override
152151
protected void onCreate(Bundle savedInstanceState) {
153152
super.onCreate(savedInstanceState);
154153
setContentView(R.layout.activity_my);
155-
rxVerID = new RxVerID.Builder(this).build();
156154
}
157155

158156
/**
159157
* Call this method to start the liveness detection session
160158
* (for example in response to a button click).
161159
*/
162-
void runLivenessDetection() {
163-
rxVerID.getVerID() // Load Ver-ID
164-
.subscribeOn(Schedulers.io())
165-
.observeOn(AndroidSchedulers.mainThread())
166-
.subscribe(
167-
verID -> {
168-
// Create liveness detection settings
169-
LivenessDetectionSessionSettings settings = new LivenessDetectionSessionSettings();
170-
// Construct the liveness detection intent
171-
VerIDLivenessDetectionIntent intent = new VerIDLivenessDetectionIntent(this, verID, settings);
172-
// Start the liveness detection activity
173-
startActivityForResult(intent, REQUEST_CODE_LIVENESS_DETECTION);
174-
},
175-
error -> {
176-
// Ver-ID failed to load
177-
});
160+
void startLivenessDetection() {
161+
// Load Ver-ID
162+
VerIDFactory verIDFactory = new VerIDFactory(this, this);
163+
verIDFactory.createVerID();
178164
}
179-
180-
/**
181-
* Listen for the result of the liveness detection
182-
*/
165+
166+
//region VerIDFactoryDelegate
167+
183168
@Override
184-
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
185-
super.onActivityResult(requestCode, resultCode, data);
186-
if (requestCode == REQUEST_CODE_LIVENESS_DETECTION && resultCode == RESULT_OK) {
187-
rxVerID.getSessionResultFromIntent(data)
188-
.flatMapObservable(result -> rxVerID.getFacesAndImageUrisFromSessionResult(result, Bearing.STRAIGHT))
189-
.filter(detectedFace -> detectedFace.getFace() instanceof RecognizableFace)
190-
.firstOrError()
191-
.subscribeOn(Schedulers.io())
192-
.observeOn(AndroidSchedulers.mainThread())
193-
.subscribe(
194-
detectedFace -> {
195-
// You can now use the face for face recognition:
196-
RecognizableFace recognizableFace = (RecognizableFace) detectedFace.getFace();
197-
},
198-
error -> {
199-
// Failed to get the first face from the result
200-
}
201-
));
169+
public void onVerIDCreated(VerIDFactory factory, VerID verID) {
170+
// Create liveness detection settings
171+
LivenessDetectionSessionSettings settings = new LivenessDetectionSessionSettings();
172+
// Create VerIDSession
173+
VerIDSession session = new VerIDSession(verId, settings);
174+
session.setDelegate(this);
175+
session.start();
176+
}
177+
178+
@Override
179+
public void onVerIDCreationFailed(VerIDFactory factory, Exception error) {
180+
}
181+
182+
//endregion
183+
184+
//region VerIDSessionDelegate
185+
186+
@Override
187+
public void onSessionFinished(IVerIDSession<?> session, VerIDSessionResult result) {
188+
// Check if result contains an error
189+
if (result.getError().isPresent()) {
190+
// Liveness detection failed
191+
return;
202192
}
203-
}
193+
// Grab the first face capture where the user is looking straight at the camera
194+
result.getFirstFaceCapture(Bearing.STRAIGHT).ifPresent(capture => {
195+
// Face from capture.getFace() can be used for face recognition
196+
});
197+
}
198+
199+
//endregion
204200
}
205201
~~~
206202

207203
## Example 3 - Compare face on ID card with live face
208204

209205
Building on example 1 and 2, you can use the results of the ID capture and liveness detection sessions and compare their faces.
210206

211-
This class takes as input the image file of the front of the card captured in example 1 and the `recognizableFace` captured in example 2.
212-
213207
~~~java
214208
class FaceComparison {
215-
216-
private final RxVerID rxVerID;
217-
218-
/**
219-
* Pass an instance of RxVerID to the constructor
220-
*/
221-
FaceComparison(RxVerID rxVerID) {
222-
this.rxVerID = rxVerID;
223-
}
224-
225-
/**
226-
* This function returns a Single whose value is a pair of Floats.
227-
* The first Float is the comparison score between the two faces.
228-
* The second Float in the pair is the threshold required to consider the two faces as authenticated against each other.
229-
*/
230-
Single<Pair<Float,Float>> compareIDCardToLiveFace(Uri imageFileUri, RecognizableFace face) {
231-
return rxVerID.detectRecognizableFacesInImage(imageFileUri, 1)
232-
.singleOrError()
233-
.flatMap(cardFace -> rxVerID.compareFaceToFaces(cardFace, new RecognizableFace[]{face}))
234-
.flatMap(score -> rxVerID.getVerID().map(verID -> new Pair<>(score, verID.getFaceRecognition().getAuthenticationThreshold())))
235-
.subscribeOn(Schedulers.io())
236-
.observeOn(AndroidSchedulers.mainThread());
209+
210+
// Refer to Example 1 to see how to detect a face in an ID card
211+
// Refer to Example 2 to see how to load VerID and detect a "live" face
212+
static float compareFaces(VerID verID, RecognizableFace face1, RecognizableFace face2) throws Exception {
213+
return verID.getFaceRecognition().compareSubjectFacesToFaces(new IRecognizable[]{face1}, new IRecognizable[]{face2});
237214
}
238215
}
239216
~~~
@@ -244,10 +221,6 @@ class FaceComparison {
244221
- [Github](https://github.com/AppliedRecognition/Ver-ID-UI-Android)
245222
- [Reference documentation](https://appliedrecognition.github.io/Ver-ID-UI-Android/)
246223

247-
### Rx-Ver-ID
248-
- [Github](https://github.com/AppliedRecognition/Rx-Ver-ID-Android)
249-
- [Reference documentation](https://appliedrecognition.github.io/Rx-Ver-ID-Android/)
250-
251224
### BlinkID
252225
- [Github](https://github.com/BlinkID/blinkid-android)
253226
- [Reference documentation](https://blinkid.github.io/blinkid-android/)

app/src/main/java/com/appliedrec/credentials/app/CredentialsApplication.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ public void onCreate() {
3636

3737
private void setupMicroblink() {
3838
MicroblinkSDK.setIntentDataTransferMode(IntentDataTransferMode.PERSISTED_OPTIMISED);
39-
MicroblinkSDK.setShowTimeLimitedLicenseWarning(false);
39+
MicroblinkSDK.setShowTrialLicenseWarning(false);
4040
}
4141

4242
//region ActivityLifecycleCallbacks
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
package com.appliedrec.credentials.app;
2+
3+
import android.graphics.Bitmap;
4+
5+
import com.appliedrec.verid.core2.RecognizableFace;
6+
import com.appliedrec.verid.core2.serialization.CborAdapter;
7+
8+
@CborAdapter(FaceWithImageCborAdapter.class)
9+
public class FaceWithImage {
10+
11+
private RecognizableFace face;
12+
private Bitmap bitmap;
13+
14+
public FaceWithImage(RecognizableFace face, Bitmap bitmap) {
15+
this.face = face;
16+
this.bitmap = bitmap;
17+
}
18+
19+
public RecognizableFace getFace() {
20+
return face;
21+
}
22+
23+
public Bitmap getBitmap() {
24+
return bitmap;
25+
}
26+
}
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
package com.appliedrec.credentials.app;
2+
3+
import android.graphics.Bitmap;
4+
import android.graphics.BitmapFactory;
5+
6+
import com.appliedrec.verid.core2.RecognizableFace;
7+
import com.appliedrec.verid.core2.serialization.Cbor;
8+
import com.appliedrec.verid.core2.serialization.CborCoder;
9+
import com.fasterxml.jackson.core.JsonToken;
10+
import com.fasterxml.jackson.dataformat.cbor.CBORGenerator;
11+
import com.fasterxml.jackson.dataformat.cbor.CBORParser;
12+
13+
import java.io.ByteArrayInputStream;
14+
import java.io.ByteArrayOutputStream;
15+
16+
public class FaceWithImageCborAdapter implements CborCoder<FaceWithImage> {
17+
18+
public static final String FIELD_FACE = "face";
19+
public static final String FIELD_IMAGE = "image";
20+
21+
@Override
22+
public FaceWithImage decodeFromCbor(CBORParser parser) throws Exception {
23+
RecognizableFace face = null;
24+
Bitmap bitmap = null;
25+
if ((parser.hasCurrentToken() && parser.getCurrentToken() == JsonToken.START_OBJECT) || parser.nextToken() == JsonToken.START_OBJECT) {
26+
while (parser.nextToken() == JsonToken.FIELD_NAME) {
27+
String fieldName = parser.getCurrentName();
28+
if (FIELD_FACE.equals(fieldName) && parser.nextToken() != JsonToken.VALUE_NULL) {
29+
byte[] encodedFace = parser.getBinaryValue();
30+
try (ByteArrayInputStream inputStream = new ByteArrayInputStream(encodedFace)) {
31+
face = Cbor.decodeStream(inputStream, RecognizableFace.class);
32+
}
33+
} else if (FIELD_IMAGE.equals(fieldName) && parser.nextToken() != JsonToken.VALUE_NULL) {
34+
byte[] encodedBitmap = parser.getBinaryValue();
35+
bitmap = BitmapFactory.decodeByteArray(encodedBitmap, 0, encodedBitmap.length);
36+
}
37+
}
38+
}
39+
if (face != null && bitmap != null) {
40+
return new FaceWithImage(face, bitmap);
41+
}
42+
throw new Exception("Failed to decode face and image from CBOR");
43+
}
44+
45+
@Override
46+
public void encodeToCbor(FaceWithImage faceWithImage, CBORGenerator cborGenerator) throws Exception {
47+
cborGenerator.writeStartObject();
48+
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) {
49+
Cbor.encodeToStream(faceWithImage.getFace(), outputStream);
50+
cborGenerator.writeBinaryField(FIELD_FACE, outputStream.toByteArray());
51+
}
52+
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) {
53+
faceWithImage.getBitmap().compress(Bitmap.CompressFormat.PNG, 100, outputStream);
54+
cborGenerator.writeBinaryField(FIELD_IMAGE, outputStream.toByteArray());
55+
}
56+
cborGenerator.writeEndObject();
57+
}
58+
}

0 commit comments

Comments
 (0)