Skip to content

Instantly share code, notes, and snippets.

@wiseminds
Created April 3, 2024 15:58
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save wiseminds/ccc6eb9b3c1278a119e5f174459ceb1e to your computer and use it in GitHub Desktop.
Save wiseminds/ccc6eb9b3c1278a119e5f174459ceb1e to your computer and use it in GitHub Desktop.
Facial recognition
import 'dart:io';
import 'package:camera/camera.dart';
import 'package:flutter/cupertino.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:get_it/get_it.dart';
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
import 'package:image/image.dart' as imglib;
import 'package:mobile/constants/app_colors.dart';
import 'package:mobile/core/extensions/index.dart';
import 'package:mobile/modules/common/widgets/circle_navbar.dart';
import 'package:mobile/modules/common/widgets/large_button.dart';
import 'package:mobile/modules/common/widgets/toast/slide_toast.dart';
import 'package:mobile/modules/common/widgets/toast/toast_controller.dart';
import 'package:mobile/modules/kyc/cubit/kyc_cubit.dart';
import 'package:mobile/modules/kyc/models/camera_list.dart';
import 'package:mobile/service/upload/image_convert.dart';
import 'package:path_provider/path_provider.dart';
import 'package:uuid/uuid.dart';
import 'upload_popup.dart';
enum LivenessTypes { none, smile, blink, features }
class AnalyzeView extends StatefulWidget {
const AnalyzeView({super.key});
@override
AnalyzeViewState createState() => AnalyzeViewState();
}
class AnalyzeViewState extends State<AnalyzeView>
with SingleTickerProviderStateMixin, WidgetsBindingObserver {
// late CameraController cameraController;
late AnimationController animationConrtroller;
late Animation<double> offsetAnimation;
late Animation<double> opacityAnimation;
// late List<CameraDescription> _cameras;
CameraController? cameraController;
ValueNotifier<LivenessTypes> liveness = ValueNotifier(LivenessTypes.none);
ValueNotifier<String?> errorMessage = ValueNotifier(null);
// @override
// void didChangeAppLifecycleState(AppLifecycleState state) {
// final CameraController? c = cameraController;
// // App state changed before we got the chance to initialize.
// if (c == null || c.value.isInitialized != true) {
// return;
// }
// if (state == AppLifecycleState.inactive) {
// cameraController?.dispose();
// } else if (state == AppLifecycleState.resumed) {
// init();
// }
// }
@override
void initState() {
super.initState();
init();
// cameraController = CameraController(CameraFacing.front);
animationConrtroller =
AnimationController(duration: const Duration(seconds: 2), vsync: this);
offsetAnimation = Tween(begin: 0.2, end: 0.8).animate(animationConrtroller);
opacityAnimation =
CurvedAnimation(parent: animationConrtroller, curve: OpacityCurve());
animationConrtroller.repeat();
// start();
}
// void _startImageStream() {
// final CameraController? cameraController = _controller;
// if (cameraController != null) {
// cameraController.startImageStream(_processImage);
// }
// }
init() async {
if (kDebugMode) {
print(GetIt.I<CameraList>().cameras.map((e) => e.lensDirection));
}
if (GetIt.I<CameraList>().cameras.isEmpty) {
Navigator.pop(context);
showToast('No camera found', type: ToastType.error);
return;
}
cameraController = CameraController(
GetIt.I<CameraList>().cameras.firstWhereOrNull(
(element) =>
element.lensDirection == CameraLensDirection.front,
) ??
GetIt.I<CameraList>().cameras[0],
ResolutionPreset.low,
enableAudio: false,
imageFormatGroup: Platform.isAndroid
? ImageFormatGroup.nv21
: ImageFormatGroup.yuv420);
cameraController?.initialize().then((_) {
if (!mounted) {
return;
}
cameraController?.getMaxZoomLevel().then((value) {
cameraController?.setZoomLevel(1.5);
});
cameraController?.startImageStream(_processCameraImage).then((value) {});
setState(() {});
}).catchError((Object e) {
if (kDebugMode) {
print('object $e');
}
if (e is CameraException) {
switch (e.code) {
case 'CameraAccessDenied':
case 'CameraAccessDeniedWithoutPrompt':
case 'CameraAccessRestricted':
context.pop();
showToast('Allow camera access to continue', type: ToastType.error);
// Handle access errors here.
break;
default:
// Handle other errors here.
break;
}
}
});
}
final faceDetector = FaceDetector(
options: FaceDetectorOptions(
enableClassification: true,
enableLandmarks: true,
enableContours: true,
enableTracking: true));
showError(message) {
showToast(message, type: ToastType.error);
}
int openEyeCount = 0;
int closedEyeCount = 0;
int multipleFacesCount = 0;
int smileCount = 0;
bool get hasDetectedSmilling => smileCount > 10;
int? trackingID;
int nofaceCount = 0;
bool alreadyNotifiedMultipleFaces = false;
resetLiveness() {
liveness.value = LivenessTypes.none;
// multipleFacesCount = 0;
openEyeCount = 0;
closedEyeCount = 0;
smileCount = 0;
trackingID = null;
}
void _processCameraImage(CameraImage image) async {
// print('inputImage::: ${image.format}');
final inputImage = _inputImageFromCameraImage(image);
if (inputImage == null) return;
final List<Face> faces = await faceDetector.processImage(inputImage);
if (faces.length > 1 && multipleFacesCount > 10) {
if (alreadyNotifiedMultipleFaces) return;
resetLiveness();
alreadyNotifiedMultipleFaces = true;
errorMessage.value =
'Multiple faces detected, please make sure there is no one else in your background';
return;
}
if (faces.length > 1) {
multipleFacesCount++;
return;
}
alreadyNotifiedMultipleFaces = false;
multipleFacesCount = 0;
var face = faces.firstItem;
if (face == null) {
nofaceCount++;
// return;
} else {
nofaceCount = 0;
}
if (nofaceCount > 4) {
resetLiveness();
errorMessage.value =
'No face detected, please make sure to centre your face in the circle';
return;
}
if (face == null) return;
if (trackingID != null && trackingID != face.trackingId) {
resetLiveness();
showError(
'Different faces detected, please make sure there is no one else in your background');
return;
}
switch (liveness.value) {
case LivenessTypes.none:
liveness.value = LivenessTypes.blink;
return;
case LivenessTypes.smile:
if (kDebugMode) {
print('face.smilingProbability:: ${face.smilingProbability}');
}
if ((face.smilingProbability ?? 0) >= .65) smileCount++;
if (hasDetectedSmilling) {
openEyeCount = 0;
liveness.value = LivenessTypes.features;
return;
}
return;
case LivenessTypes.blink:
if (kDebugMode) {
print(
'face.leftEyeOpenProbability:: ${face.leftEyeOpenProbability} face.rightEyeOpenProbability:: ${face.rightEyeOpenProbability}');
}
if ((face.leftEyeOpenProbability ?? 0) > .65 &&
(face.rightEyeOpenProbability ?? 0) > .45) openEyeCount++;
if ((face.leftEyeOpenProbability ?? 0) < .65 &&
(face.rightEyeOpenProbability ?? 0) < .45) closedEyeCount++;
if (closedEyeCount > 10 && openEyeCount > 10) {
liveness.value = LivenessTypes.smile;
return;
}
return;
case LivenessTypes.features:
default:
}
// final Rect boundingBox = face.boundingBox;
// final double? rotX =
// face.headEulerAngleX; // Head is tilted up and down rotX degrees
// final double? rotY =
// face.headEulerAngleY; // Head is rotated to the right rotY degrees
// final double? rotZ =
// face.headEulerAngleZ; // Head is tilted sideways rotZ degrees
// If landmark detection was enabled with FaceDetectorOptions (mouth, ears,
// eyes, cheeks, and nose available):
var leftEar = face.landmarks[FaceLandmarkType.leftEar];
var rightEar = face.landmarks[FaceLandmarkType.rightEar];
// var leftMouth = face.landmarks[FaceLandmarkType.leftMouth];
// var rightMouth = face.landmarks[FaceLandmarkType.rightMouth];
var bottomMouth = face.landmarks[FaceLandmarkType.bottomMouth];
// var noseBase = face.landmarks[FaceLandmarkType.noseBase];
if (leftEar == null) {
errorMessage.value =
'Left ear not detected, adjust camera to show left ear';
return;
}
if (rightEar == null) {
errorMessage.value =
'Right ear not detected, adjust camera to show right ear';
return;
}
if (bottomMouth == null) {
errorMessage.value =
'Mouth not detected, adjust camera to show your mouth';
return;
}
if ((face.leftEyeOpenProbability ?? 0) < .50 ||
(face.rightEyeOpenProbability ?? 0) < .50) {
errorMessage.value =
'Eyes closed, please open your eye to complete verification';
return;
}
if (!((face.leftEyeOpenProbability ?? 0) > .90 &&
(face.rightEyeOpenProbability ?? 0) > .90)) {
errorMessage.value =
'Eyes closed, please open your eye to complete verification';
return;
} else {
openEyeCount++;
}
if (openEyeCount < 10) {
return;
}
// if ((face.smilingProbability ?? 0) > .65) {
// errorMessage.value =
// 'Smiling detected, please stop smilling to complete verification';
// return;
// }
errorMessage.value = '';
// print(
// 'inputImage.filePath:::${inputImage.type}, ${image.format.group} ${inputImage.filePath}, ${inputImage.metadata?.size}');
// print(inputImage.metadata);
cameraController?.pausePreview();
cameraController?.stopImageStream();
Directory tempDir = await getApplicationDocumentsDirectory();
if (inputImage.bytes != null) {
// Uint8List imageBytes = image.planes[0].bytes;
//convert bytedata to image
var bitmap = ImageUtils.convertCameraImage(image);
// cameraController?.
// Resize the image to a 120x? thumbnail (maintaining the aspect ratio).
// Image thumbnail = copyResize(image, width: 120);
// if (bitmap != null) {
// Save the thumbnail as a PNG.
var file = File("${tempDir.path}/${const Uuid().v4()}.png")
..createSync()
..writeAsBytesSync(imglib.encodePng(bitmap));
// ignore: use_build_context_synchronously
context.read<KycCubit>().onSelfieChanged(file.path);
// }
}
resetLiveness();
}
final _orientations = {
DeviceOrientation.portraitUp: 0,
DeviceOrientation.landscapeLeft: 90,
DeviceOrientation.portraitDown: 180,
DeviceOrientation.landscapeRight: 270,
};
InputImage? _inputImageFromCameraImage(CameraImage image) {
final camera = cameraController?.description;
if (camera == null) return null;
// get image rotation
// it is used in android to convert the InputImage from Dart to Java: https://github.com/flutter-ml/google_ml_kit_flutter/blob/master/packages/google_mlkit_commons/android/src/main/java/com/google_mlkit_commons/InputImageConverter.java
// `rotation` is not used in iOS to convert the InputImage from Dart to Obj-C: https://github.com/flutter-ml/google_ml_kit_flutter/blob/master/packages/google_mlkit_commons/ios/Classes/MLKVisionImage%2BFlutterPlugin.m
// in both platforms `rotation` and `camera.lensDirection` can be used to compensate `x` and `y` coordinates on a canvas: https://github.com/flutter-ml/google_ml_kit_flutter/blob/master/packages/example/lib/vision_detector_views/painters/coordinates_translator.dart
final sensorOrientation = camera.sensorOrientation;
// print(
// 'lensDirection: ${camera.lensDirection}, sensorOrientation: $sensorOrientation, ${_controller?.value.deviceOrientation} ${_controller?.value.lockedCaptureOrientation} ${_controller?.value.isCaptureOrientationLocked}');
InputImageRotation? rotation;
if (Platform.isIOS) {
rotation = InputImageRotationValue.fromRawValue(sensorOrientation);
} else if (Platform.isAndroid) {
var rotationCompensation =
_orientations[cameraController!.value.deviceOrientation];
if (rotationCompensation == null) return null;
if (camera.lensDirection == CameraLensDirection.front) {
// front-facing
rotationCompensation = (sensorOrientation + rotationCompensation) % 360;
} else {
// back-facing
rotationCompensation =
(sensorOrientation - rotationCompensation + 360) % 360;
}
rotation = InputImageRotationValue.fromRawValue(rotationCompensation);
// print('rotationCompensation: $rotationCompensation');
}
if (rotation == null) return null;
// print('final rotation: $rotation');
// get image format
final format = InputImageFormatValue.fromRawValue(image.format.raw);
// validate format depending on platform
// only supported formats:
// * nv21 for Android
// * bgra8888 for iOS
if (format == null ||
(Platform.isAndroid && format != InputImageFormat.nv21) ||
(Platform.isIOS && format != InputImageFormat.bgra8888)) return null;
// since format is constraint to nv21 or bgra8888, both only have one plane
if (image.planes.length != 1) return null;
final plane = image.planes.first;
// compose InputImage using bytes
return InputImage.fromBytes(
bytes: plane.bytes,
metadata: InputImageMetadata(
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation, // used only in Android
format: format, // used only in iOS
bytesPerRow: plane.bytesPerRow, // used only in iOS
),
);
}
takePicture() {
cameraController?.pausePreview();
cameraController?.takePicture().then((value) {
// cameraController?.pausePreview();
// print('image path ${value.path}');
context.read<KycCubit>().onSelfieChanged(value.path);
}).catchError((e, _) {
if (kDebugMode) {
print('error $e, $_');
}
});
}
@override
Widget build(BuildContext context) {
var cubit = context.read<KycCubit>();
var background = context.watch<KycCubit>().state.selfie == null
? AppColors.backgroundDark
: context.backgroundColor;
return Scaffold(
appBar: CircleNavbar(
background: background,
context: context,
textColor: Colors.white,
arrowColor: Colors.white,
title:
(context.watch<KycCubit>().state.selfie == null) ? 'Selfie' : ''),
backgroundColor: background,
bottomNavigationBar: SafeArea(
child: SizedBox(
height: 135,
child: Padding(
padding: const EdgeInsets.symmetric(horizontal: 16.0),
child: context.watch<KycCubit>().state.selfie != null
? Column(
children: [
const Spacer(),
SizedBox(
width: 600,
child: LargeButton(
isLoading: context
.read<KycCubit>()
.state
.isSubmitting,
// backgroundColor: Colors.white,
// textColor: BrandColors.blue,
text: 'Submit selfie',
onPressed: () {
if (context.read<KycCubit>().state.upload ==
null) {
showDialog(
context: context,
builder: (c) => UploadPopup(
onUpload: (file) => context
.read<KycCubit>()
.onUploadChanged(file),
path: context
.read<KycCubit>()
.state
.selfie!,
));
} else {
context.read<KycCubit>().kycOne(context
.read<KycCubit>()
.state
.upload!);
}
})),
14.0.ht,
Material(
shape: RoundedRectangleBorder(
borderRadius: 100.0.toBorderRadius,
side: BorderSide(color: context.textColor)),
child: CupertinoButton(
onPressed: () {
cubit.resetSelfie();
cameraController?.resumePreview();
cameraController
?.startImageStream(_processCameraImage);
},
child: Center(
child: Text('Take a new photo',
style: context.bodyMedium?.copyWith(
fontWeight: FontWeight.w500)),
)),
),
const SizedBox(height: 20)
],
)
: const SizedBox()
// : Center(
// child: Material(
// type: MaterialType.transparency,
// shape: const CircleBorder(
// side: BorderSide(width: 1.6, color: Colors.white)),
// child: CupertinoButton(
// minSize: 1,
// padding: const EdgeInsets.all(10.0),
// onPressed: takePicture,
// borderRadius: BorderRadius.circular(100),
// child: const Material(
// shape: CircleBorder(),
// child: SizedBox.square(dimension: 44),
// ),
// ),
// ),
// ),
)),
),
body: SafeArea(
child: Padding(
padding: const EdgeInsets.symmetric(horizontal: 20.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
children: [
if (context.watch<KycCubit>().state.selfie == null) ...[
ValueListenableBuilder(
valueListenable: liveness,
builder: (c, s, child) {
return switch (s) {
LivenessTypes.none ||
LivenessTypes.features =>
ValueListenableBuilder(
valueListenable: errorMessage,
builder: (c, error, child) => Padding(
padding: const EdgeInsets.all(30.0),
child: Text(
error ??
'Center your face in the frame, make sure both ears are visible in the frame',
textAlign: TextAlign.center,
style: context.bodyMedium?.copyWith(
fontSize: 16,
color: Colors.red,
fontWeight: FontWeight.w500)),
)),
LivenessTypes.blink || LivenessTypes.smile => Padding(
padding: const EdgeInsets.all(20.0),
child: Material(
borderRadius: 10.0.toBorderRadius,
color: AppColors.alertSuccess,
child: Padding(
padding: const EdgeInsets.all(8.0),
child: Text(
s == LivenessTypes.blink
? 'Blink slowly'
: 'Smile',
style: context.bodyMedium?.copyWith(
fontSize: 30,
color: Colors.white,
fontWeight: FontWeight.w500),
),
),
),
),
};
})
] else ...[
const SizedBox(height: 60),
Material(
color: context.isDark
? const Color.fromARGB(255, 179, 182, 97).withOpacity(.6)
: const Color(0xff0047FF).withOpacity(.1),
borderRadius: 8.0.toBorderRadius,
child: Padding(
padding: const EdgeInsets.only(right: 19.0),
child: Material(
color: context.textColor,
borderRadius: 8.0.toBorderRadius,
child: const SizedBox(
width: 64,
height: 8.0,
),
),
),
),
16.0.ht,
Text(
'Check quality',
style: context.bodyMedium?.copyWith(
fontSize: 24, height: 1.3, fontWeight: FontWeight.w600),
),
4.0.ht,
Text(
'Make sure your face is not blurred \nor out of frame before continuing',
textAlign: TextAlign.center,
style: context.bodyMedium?.copyWith()),
72.0.h.ht,
],
Center(
child: Padding(
padding: const EdgeInsets.symmetric(horizontal: 60.0),
child: SizedBox(
width: 239,
height: 290,
child: Material(
clipBehavior: Clip.antiAlias,
shape: RoundedRectangleBorder(
borderRadius:
(context.watch<KycCubit>().state.selfie != null
? 40.0
: 100.0)
.toBorderRadius,
// side: BorderSide(
// width: .30, color: context.primaryColor
// )
),
child: (cameraController?.value.isInitialized != true)
? Container(color: Colors.white)
: context.watch<KycCubit>().state.selfie != null
? Image.file(
File(context
.watch<KycCubit>()
.state
.selfie!),
fit: BoxFit.cover,
)
: CameraPreview(cameraController!),
)),
),
),
// AnimatedLine(
// offsetAnimation: offsetAnimation,
// opacityAnimation: opacityAnimation,
// ),
if (context.watch<KycCubit>().state.selfie == null) ...[
SizedBox(height: 60.h),
Text(
'Center your face',
style: TextStyle(
color: Colors.white,
fontSize: 24.sp,
height: 1.3,
fontWeight: FontWeight.w700),
),
4.0.ht,
Text(
'Align your face to the center of the selfie \narea and then take a photo',
textAlign: TextAlign.center,
style: context.bodyMedium?.copyWith(color: Colors.white)),
]
// const Positioned(
// left: 24.0,
// top: 32.0,
// right: 20,
// bottom: 20,
// child: Material(color: Colors.red)),
// Container(
// alignment: Alignment.bottomCenter,
// margin: const EdgeInsets.only(bottom: 80.0),
// child: IconButton(
// icon: ValueListenableBuilder(
// valueListenable: cameraController.torchState,
// builder: (context, state, child) {
// final color =
// state == TorchState.off ? Colors.grey : Colors.white;
// return Icon(Icons.bolt, color: color);
// },
// ),
// iconSize: 32.0,
// onPressed: () => cameraController.torch(),
// ),
// ),
],
),
),
),
);
}
@override
void dispose() async {
try {
await cameraController?.stopImageStream();
} catch (e) {
if (kDebugMode) {
print(e);
}
}
animationConrtroller.dispose();
cameraController?.dispose();
// controller.dispose();
super.dispose();
}
// void start() async {
// await cameraController.startAsync();
// try {
// final face = await cameraController.args;
// // display(face.value?.);
// } catch (e) {
// print(e);
// }
// }
// void display(Barcode barcode) {
// Navigator.of(context).popAndPushNamed('display', arguments: barcode);
// }
}
class OpacityCurve extends Curve {
@override
double transform(double t) {
if (t < 0.1) {
return t * 10;
} else if (t <= 0.9) {
return 1.0;
} else {
return (1.0 - t) * 10;
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment