Skip to main content
Version: 3.23.1

Face detection from phone camera

In this tutorial, you'll learn how to use the Processing Block API objects from the Face SDK to detect faces and estimate their age and gender.

Detected faces will be highlighted with a green bounding box (bbox). Information about the person's age and gender will appear on the left.

To work, you will need a phone with Android operating system version 7.0 or higher, as well as Android Studio tools.

The project source code is available in Face SDK examples/tutorials/kotlin/KotlinTutorial

Project preparation

  1. Launch AndroidStudio and create a new project File > New > Project. Select the Empty Views Activity template and click next.
  2. Specify the name and location of your project, install the minimum version of Android sdk 24 or higher and click finish.

Working with the camera

Obtaining permissions to use the camera

  1. Add to AndroidManifest.xml.

    <manifest xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:tools="http://schemas.android.com/tools">
    // ................................................
    <uses-feature
    android:name="android.hardware.camera"
    android:required="false" />

    <uses-permission android:name="android.permission.CAMERA"/>
  2. In the MainActivity.kt file in the MainActivity class, add the getPermision method and call onCreate in the onCreate method.

    private fun getPermission(){
    if (ContextCompat.checkSelfPermission(this, android.Manifest.permission.CAMERA) !=
    PackageManager.PERMISSION_GRANTED){
    requestPermissions(arrayOf(android.Manifest.permission.CAMERA), 0)
    }
    }
  3. Override the onRequestPermissionsResult method and call the getPermission method in it.

    override fun onRequestPermissionsResult(
    requestCode: Int,
    permissions: Array<out String>,
    grantResults: IntArray
    ) {
    super.onRequestPermissionsResult(requestCode, permissions, grantResults)
    getPermission()
    }

Receiving frames from the camera

  1. In /app/src/main/res/layout/activity_main.xml remove TextView and add TextureView to display camera image.

    <TextureView
    android:layout_height="match_parent"
    android:layout_width="match_parent"
    android:id="@+id/textureView" />
  2. Add the variables cameraManager, cameraDevice, cameraCaptureSession, handlerThread, handler, textureView, previewSize to the MainActivity class.

    class MainActivity : AppCompatActivity() {
    //........................................
    private lateinit var cameraManager: CameraManager
    lateinit var cameraDevice: CameraDevice
    lateinit var cameraCaptureSession: CameraCaptureSession

    private lateinit var handlerThread: HandlerThread
    lateinit var handler: Handler

    lateinit var textureView: TextureView
    val previewSize = Size(1280, 720)
  3. Add getFrontalCameraId and openCamera methods.

    private fun getFrontalCameraId(): String {
    return cameraManager.cameraIdList.first {
    cameraManager
    .getCameraCharacteristics(it)
    .get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT
    }
    }

    @SuppressLint("MissingPermission")
    fun openCamera(){
    cameraManager.openCamera(getFrontalCameraId(), object : CameraDevice.StateCallback(){
    override fun onOpened(p0: CameraDevice) {
    cameraDevice = p0
    val surfaceTexture = textureView.surfaceTexture
    surfaceTexture?.setDefaultBufferSize(previewSize.width, previewSize.height)

    val capReq = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
    val surface = Surface(surfaceTexture)
    capReq.addTarget(surface)

    cameraDevice.createCaptureSession(listOf(surface), object: CameraCaptureSession.StateCallback(){
    override fun onConfigured(p0: CameraCaptureSession) {
    cameraCaptureSession = p0
    cameraCaptureSession.setRepeatingRequest(capReq.build(), null, null)
    }
    override fun onConfigureFailed(p0: CameraCaptureSession) {

    }
    }, handler)
    }

    override fun onDisconnected(p0: CameraDevice) {}

    override fun onError(p0: CameraDevice, p1: Int) {}
    }, handler)
    }
  4. Initialize all variables after calling getPermision.

    textureView = findViewById(R.id.textureView)
    cameraManager = getSystemService(Context.CAMERA_SERVICE) as CameraManager

    handlerThread = HandlerThread("videoThread")
    handlerThread.start()
    handler = Handler(handlerThread.looper)

    textureView.surfaceTextureListener = object : TextureView.SurfaceTextureListener{
    override fun onSurfaceTextureAvailable(p0: SurfaceTexture, p1: Int, p2: Int) { openCamera() }

    override fun onSurfaceTextureUpdated(p0: SurfaceTexture) { }

    override fun onSurfaceTextureDestroyed(p0: SurfaceTexture): Boolean { return false }

    override fun onSurfaceTextureSizeChanged(p0: SurfaceTexture, p1: Int, p2: Int) { }
    }
  5. Add portrait orientation for MainActivity. To do this, go to AndroidManifest.xml and in <activity ...> write android:screenOrientation="portrait".

    <activity
    android:name=".MainActivity"
    android:exported="true"
    android:screenOrientation="portrait">

Connecting Face SDK to the project

Setting up build.gradle.kts

  1. Add a copy of the Face SDK libraries to your project. To do this, enter android in the field.

    sourceSets {
    this.getByName("main"){
    jniLibs {
    srcDir("/path/to/face_sdk/lib")
    }
    }
    }
  2. Add functions to copy Face SDK configuration files and models into your project.

    task("computeAssetsHash") {
    doLast {
    mkdir("$projectDir/src/main/assets/")
    File("$projectDir/src/main/assets/", "assets-hash.txt").writeText(
    "Buildtime ${LocalDateTime.now().format(DateTimeFormatter.ofPattern("YYYY:MM:dd:HH:mm:ss"))}"
    )
    }
    }

    task<Copy>("copyFiles") {
    description = "Copy files"
    from("/path/to/face_sdk/") {
    include(
    "conf/**",
    "share/processing_block/age_estimator/light/2.enc",
    "share/processing_block/gender_estimator/light/2.enc",
    "share/processing_block/face_fitter/fda/1.enc",
    "share/processing_block/face_detector/blf_front/1.enc",
    "license/**"
    )
    }
    into("$projectDir/src/main/assets/")
    }
  3. Import dependsOn and call the computeAssetsHash and copyFiles functions.

    import com.android.build.gradle.internal.tasks.factory.dependsOn
    //........................................................ ..............
    project.tasks.preBuild.dependsOn("computeAssetsHash")
    project.tasks.preBuild.dependsOn("copyFiles")
  4. In dependencies, include facerec.jar.

    implementation(files("/path/to/face_sdk/lib/facerec.jar"))

Getting Face SDK assets inside the application

In the application, assets are stored in compressed form. To obtain Face SDK assets, we will add a new entry point to the application, in which we will unpack them to another location.

  1. Create a new file UnpackAssetsActivity.kts and class UnpackAssetsActivity.

    // activity to upack all assets
    class UnpackAssetsActivity : Activity() {
    public override fun onCreate(savedInstanceState: Bundle?) {
    super.onCreate(savedInstanceState)
    try {
    // read first line from assets-hash.txt
    val newHash =
    BufferedReader(InputStreamReader(assets.open("assets-hash.txt"))).readLine()

    // and compare it with what we have already
    val sharedPreferences = getSharedPreferences("fe9733f0bfb7", 0)

    val prevHash = sharedPreferences.getString("assets-hash", null)

    // unpack everything again, if something changes
    if (prevHash == null || prevHash != newHash) {
    val buffer = ByteArray(10240)

    val persistentDir = applicationInfo.dataDir

    val queue: Queue<String?> = ArrayDeque<String?>()
    queue.add("conf")
    queue.add("share")
    queue.add("license")

    while (!queue.isEmpty()) {
    val path = queue.element()
    queue.remove()

    val list = assets.list(path!!)

    if (list!!.isEmpty()) {
    val fileStream = assets.open(path)

    val fullPath = "$persistentDir/fsdk/$path"

    File(fullPath).parentFile?.mkdirs()

    val outFile = FileOutputStream(fullPath)

    while (true) {
    val read = fileStream.read(buffer)

    if (read <= 0) break

    outFile.write(buffer, 0, read)
    }

    fileStream.close()
    outFile.close()
    } else {
    for (p in list) queue.add("$path/$p")
    }
    }

    val editor = sharedPreferences.edit()
    editor.putString("assets-hash", newHash)
    while (!editor.commit());
    }

    val intent = Intent(applicationContext, MainActivity::class.java)
    startActivity(intent)
    finish()
    } catch (e: Exception) {
    Log.e("UnpackAssetsActivity", e.message!!)
    e.printStackTrace()
    finishAffinity()
    }
    }
    }
  2. In AndroidManifest.xml add an activity field for UnpackAssetsActivity and make it the first one on startup.

    <activity
    android:name="UnpackAssetsActivity"
    android:directBootAware="true"
    android:exported="true">

    <intent-filter>
    <action android:name="android.intent.action.MAIN" />
    <category android:name="android.intent.category.LAUNCHER" />
    </intent-filter>
    </activity>

Using Face SDK modules

  1. Add the variables service, faceDetector, faceFitter, ageEstimator, genderEstimator to the MainActivity class and initialize them in the onCreate method.

    private lateinit var service: FacerecService
    private lateinit var faceDetector: ProcessingBlock
    private lateinit var faceFitter: ProcessingBlock
    private lateinit var ageEstimator: ProcessingBlock
    private lateinit var genderEstimator: ProcessingBlock
    //...................................................

    service = FacerecService.createService(
    "libfacerec.so",
    applicationInfo.dataDir + "/fsdk/conf/facerec",
    applicationInfo.dataDir + "/fsdk/license"
    )

    val configDetector = service.createContext()
    configDetector["unit_type"].string = "FACE_DETECTOR"
    configDetector["modification"].string = "blf_front"

    faceDetector = service.createProcessingBlock(configDetector)

    val configFitter = service.createContext()
    configFitter["unit_type"].string = "FACE_FITTER"
    configFitter["modification"].string = "fda"

    faceFitter = service.createProcessingBlock(configFitter)

    val configAgeEstimator = service.createContext()
    configAgeEstimator["unit_type"].string = "AGE_ESTIMATOR"
    configAgeEstimator["modification"].string = "light"
    configAgeEstimator["version"].long = 2

    ageEstimator = service.createProcessingBlock(configAgeEstimator)

    val configGenderEstimator = service.createContext()
    configGenderEstimator["unit_type"].string = "GENDER_ESTIMATOR"
    configGenderEstimator["modification"].string = "light"
    configGenderEstimator["version"].long = 2

    genderEstimator = service.createProcessingBlock(configGenderEstimator)
  2. To display the received detections, we need the paint, bitmap and imageView variables in the MainActivity class, and also ImageView in /app/src/main/res/layout/activity_main.xml.

    ```kotlin
    val paint = Paint()
    lateinit var bitmap: Bitmap
    lateinit var imageView: ImageView
    ```

    ```xml
    <ImageView
    android:layout_width="match_parent"
    android:layout_height="match_parent"
    android:background="#000"
    android:id="@+id/imageView" />
    ```
  3. Initialize imageView in the onCreate method.

    imageView = findViewById(R.id.imageView)
  4. In textureView.surfaceTextureListener, override the onSurfaceTextureUpdated method. Add to it the formation of an input Context container with a binary RGB image, calls to Face SDK modules and display of the result.

    override fun onSurfaceTextureUpdated(p0: SurfaceTexture) {
    bitmap = textureView.bitmap!!

    val width = bitmap.width
    val height = bitmap.height

    val pixels = IntArray(width * height)
    val imageData = ByteArray(width * height * 3)

    bitmap.getPixels(pixels, 0, width, 0, 0, width, height)

    for (i in pixels.indices) {
    imageData[i * 3 + 0] = (pixels[i] shr 16).toByte()
    imageData[i * 3 + 1] = (pixels[i] shr 8).toByte()
    imageData[i * 3 + 2] = (pixels[i] shr 0).toByte()
    }

    val ioData = service.createContextFromFrame(imageData, width, height,
    com.vdt.face_recognition.sdk.Context.Format.FORMAT_RGB, 0)
    faceDetector.process(ioData)
    faceFitter.process(ioData)
    ageEstimator.process(ioData)
    genderEstimator.process(ioData)

    val objects = ioData["objects"]

    val mutable = bitmap.copy(Bitmap.Config.ARGB_8888, true)

    val canvas = Canvas(mutable)

    paint.textSize = height/50f
    paint.strokeWidth = width/100f
    val indentionX = (0.01 * width).toFloat()
    val indentionY = paint.textSize
    for (i in 0..< objects.size()) {
    val obj = objects[0]
    val bbox = obj["bbox"]
    val x1 = (bbox[0].double).toFloat() * width
    val y1 = (bbox[1].double).toFloat() * height
    val x2 = (bbox[2].double).toFloat() * width
    val y2 = (bbox[3].double).toFloat() * height

    paint.color = Color.GREEN
    paint.style = Paint.Style.STROKE
    canvas.drawRect(RectF(x1, y1, x2, y2), paint)
    paint.style = Paint.Style.FILL
    canvas.drawText("age: " + obj["age"].long,
    x2 + indentionX, y1 + indentionY, paint)
    canvas.drawText("gender: " + obj["gender"].string,
    x2 + indentionX, y1 + 2 * indentionY, paint)
    }

    imageView.setImageBitmap(mutable)
    }