原文链接

欢迎大家对于本站的访问 - AsterCasc

前言

在前文构建完全自定义的全屋智能系统(一)(监控篇-服务端)以及嵌入式开发人脸识别模块从0开始全流程(树莓派5为例)等相关文章中,使用InspireFace进行人脸检测和识别,但是考虑到通用性,我们使用是CPU的计算模型。如果要很好地在RK相关的芯片上工作,比如RV1109或者RV1126之类的,我们需要更多地利用NPU的相关算力。这里还会给出活体和追踪框的一般解决方案,尽可能在不影响体验的情况下节约资源消耗

以下内容为rv1109的演示,其他芯片处理类似,不再一一给出

实现

InspireFace编译

这里我已经编译好了,直接从docker pull astercass/arm-gcc-8.3.0-dev-toolchain:1.1.0拉取,使用:

services:
  ccu:
    image: astercass/arm-gcc-8.3.0-dev-toolchain:1.1.0
    hostname: 'ccua'
    container_name: 'ccua'
    working_dir: /data/app
    tty: true
    stdin_open: true
    volumes:
      - /yourDir/app:/data/app

再执行docker compose up -d即可,然后进入容器,可在/data/src/InspireFace中找到相关库以及编译结果

如果需要构建自己环境,那么需要保证GCC等相关库版本的兼容性。比如我们按照官网的逻辑,参考环境处理,执行docker-compose up build-cross-rv1109rv1126-armhf命令,从docker-compose.yml文件中,我们知道相应处理为:

    build:
      context: .
      dockerfile: docker/Dockerfile.arm-linux-gnueabihf # Use the arm-linux-gnueabihf tool chain
    environment:
      - VERSION=${VERSION}
    working_dir: /workspace
    volumes:
      - .:/workspace # Mount the project root directory to the container
    command: bash command/build_cross_rv1109rv1126_armhf.sh

使用环境为docker/Dockerfile.arm-linux-gnueabihf即:

# Use Ubuntu 18.04 as the base image
FROM ubuntu:18.04

# Update the package list and install basic development tools
RUN apt-get update && \
    apt-get install -y --no-install-recommends \
        build-essential \
        software-properties-common \
        wget \
        curl \
        git \
        vim

# Install CMake
RUN apt-get install -y --no-install-recommends

# Install CMake 3.20.6
RUN cd /opt && \
    wget https://github.com/Kitware/CMake/releases/download/v3.20.6/cmake-3.20.6-linux-x86_64.tar.gz && \
    tar -zxvf cmake-3.20.6-linux-x86_64.tar.gz && \
    ln -s /opt/cmake-3.20.6-linux-x86_64/bin/* /usr/local/bin/ && \
    rm cmake-3.20.6-linux-x86_64.tar.gz

# Set the URL and installation path for the Linaro toolchain
ARG LINARO_URL="https://releases.linaro.org/components/toolchain/binaries/6.3-2017.05/arm-linux-gnueabihf/gcc-linaro-6.3.1-2017.05-x86_64_arm-linux-gnueabihf.tar.xz"
ARG TOOLCHAIN_PATH="/opt/linaro-toolchain"

# Create the installation path, download, and extract the Linaro toolchain
RUN mkdir -p ${TOOLCHAIN_PATH} && \
    wget -qO- ${LINARO_URL} | tar -xJ -C ${TOOLCHAIN_PATH} --strip-components=1

# Set environment variables to point to the toolchain directory
ENV ARM_CROSS_COMPILE_TOOLCHAIN=${TOOLCHAIN_PATH}
ENV PATH="${TOOLCHAIN_PATH}/bin:${PATH}"

# Clean temporary files to reduce image size
RUN apt-get clean && \
    rm -rf /var/lib/apt/lists/*

# Set the working directory
WORKDIR /workspace

# Default to running Bash
CMD ["/bin/bash"]

确定你本身应用程序的编译环境和他的是兼容的,比如这里使用arm-linux-gnueabihf/gcc-linaro-6.3.1-2017.05-x86_64_arm-linux-gnueabihf.tar.xz,那么你的GCC版本不能低于GCC 6.3.1,并且编译器本身运行在x86_64主机生成ARM硬浮点可执行文件,目标架构是ARM 32-bit 硬浮点 ABI,并且需要设置ARM_CROSS_COMPILE_TOOLCHAIN等等

确认环境没有问题之后,直接在你的容器(或者编译机,不过为了环境隔离,这种最好推荐还是利用容器维护环境)的InspireFace目录下执行bash command/build_cross_rv1109rv1126_armhf.sh即可完成编译,生成libInspireFace.solibrknn_api.so这些需要在应用编译和运行时候使用,复制到相关位置

应用构建

我们的应用需要利用InspireFace使用NPU,那么我们需要保证运行机器以及交叉编译机器上有RK相关环境以及运行库,这里InspireFace已经封装好调用了,不需要我们再去对RK的相关文档接口,示例CMakeLists.txt如下:

cmake_minimum_required(VERSION 3.10)  
  
#...
  
if (WIN32)  
	#...
elseif (UNIX)  
	#...
    set(RK_ROOT /data/src/rockchip/_install)  
    set(INSPIRE_ROOT /data/src/InspireFace/InspireFace-master/build/inspireface-linux-armv7-rv1109rv1126-armhf/InspireFace)  
    set(CMAKE_PREFIX_PATH  
            "${INSPIRE_ROOT}"  
			#...
    )  
else ()  
    message(FATAL_ERROR "Unsupported operating system")  
endif ()  
  
	#...
  
  
if (WIN32)  
	#...
elseif (UNIX)  
    include_directories(  
		#...
    )  
    link_directories(  
            ${INSPIRE_ROOT}/lib  
            ${RK_ROOT}/lib  
    )  
else ()  
    message(FATAL_ERROR "Unsupported operating system")  
endif ()  
  
  
	#...
  
  
if (WIN32)  
	#...
elseif (UNIX)  
    target_link_libraries(Demo PRIVATE  
			#...
            # InspireFace  
            InspireFace  
            # Rock chip  
            -lrkaiq -leasymedia -ldrm -lrockchip_mpp -lavformat -lavcodec -lswresample -lavutil -lasound  
            -lRKAP_3A -lRKAP_ANR -lRKAP_Common -lv4l2 -lrga -lmd_share -lod_share -lrkaiq -lz -lv4lconvert -ljpeg  
            -lVSC_Lite -lGAL -lArchModelSw -lNNArchPerf -lOpenVX -lrknn_runtime -lrknn_api  
			#...
    )  
else ()  
    message(FATAL_ERROR "Unsupported operating system")  
endif ()

对应toolchain.cmake如下:

set(CMAKE_SYSTEM_NAME Linux)  
set(CMAKE_SYSTEM_PROCESSOR arm)  
  
set(CMAKE_C_COMPILER arm-linux-gnueabihf-gcc)  
set(CMAKE_CXX_COMPILER arm-linux-gnueabihf-g++)

然后只需要:

cmake -DCMAKE_TOOLCHAIN_FILE=../toolchain.cmake .. && cmake --build . -j $(nproc)

即可构建完成

运行环境

InspireFace提供的Gundam系列为非预编译模型,如果希望在mini driver的机器上运行,我们要么将模型编译为预编译模型,要么将驱动改成full driver执行。以改驱动为例,参考rknpu,执行:

adb push drivers/linux-armhf-puma/*   /
adb push drivers/npu_ko/galcore_puma.ko /lib/modules/galcore.ko

等类似命令,这里建议将压缩包下载/推送到机器后,在机器中压缩,然后执行cp -r避免数据移动过程中的各种奇怪问题和影响,而不是使用adb push

如果在改成完整驱动后,应用启动后仍然提示不是完整版驱动,建议先执行改成mini driver的相关命令,在执行改成完整驱动的相关的命令,可能是机器中缺少某些特定mini driver库导致的这个问题

相关代码

这里我们选择使用红外检测判断活体,然后综合人脸检测进行高频循环,减少实际人脸识别的频率,减少系统资源的浪费

mutex mtx;  
bool started = false;  
int closeProcess = false;  
  
int g_appWidth;  
int g_appHeight;  
  
int g_appWidthIr;  
int g_appHeightIr;  
  
int g_onFaceFrameIr = false;  
int g_onFaceFrameRga = false;  
  
#define CAMERA_WIDTH 1920;  
#define CAMERA_HEIGHT 1080;  
#define CAMERA_WIDTH_VI 1920;  
#define CAMERA_HEIGHT_VI 1080;  
  
void faceRecognitionPreFun(uchar *irFrame, uchar *rgaFrame) {  
    static uchar *s_irFrame = nullptr;  
    static uchar *s_rgaFrame = nullptr;  
    if (nullptr != irFrame) {  
        s_irFrame = irFrame;  
    }  
    if (nullptr != rgaFrame) {  
        s_rgaFrame = rgaFrame;  
    }  
    if (s_irFrame == nullptr || s_rgaFrame == nullptr) {  
        return;  
    }  
  
    const cv::Mat frameIr(g_appHeightIr, g_appWidthIr, CV_8UC3, s_irFrame);  
    const cv::Mat frameRga(g_appHeight, g_appWidth, CV_8UC3, s_rgaFrame);  
    try {  
	    faceRecognition(frameRga, frameIr);      
    } catch (const exception &e) {  
        logPrintln("Face Recognition fail : " + string(e.what()),  
                   airstrip::ERROR, __FUNCTION__);  
    }  
  
    delete [] s_irFrame;  
    delete [] s_rgaFrame;  
    s_irFrame = nullptr;  
    s_rgaFrame = nullptr;  
    g_onFaceFrameRga = false;  
    g_onFaceFrameIr = false;  
}  
  
void processWithMb(bool isIr, MEDIA_BUFFER mb) {  
    const void *data = RK_MPI_MB_GetPtr(mb);  
    const size_t size = RK_MPI_MB_GetSize(mb);  
    auto *buff = new uchar[size];  
    memcpy(buff, data, size);  
  
    uchar *otherBuff = nullptr;  
    auto boundFunction = isIr  
                             ? bind(faceRecognitionPreFun, buff, otherBuff)  
                             : bind(faceRecognitionPreFun, otherBuff, buff);  
  
    boundFunction();  
    RK_MPI_MB_ReleaseBuffer(mb);  
}  
  
void processWithMbIr(MEDIA_BUFFER mb) {  
    if (closeProcess)return;  
    if (g_onFaceFrameIr || g_closeFaceRecognition || g_closeFaceRecognitionRegister || !g_allowFaceOpen) {  
        RK_MPI_MB_ReleaseBuffer(mb);  
        return;  
    }  
    g_onFaceFrameIr = true;  
  
    static int64_t lastMillisecondCount = 0L;  
    const int64_t currentMillisecondCount =  
            std::chrono::duration_cast<chrono::milliseconds>(  
                chrono::system_clock::now().time_since_epoch()).  
            count();  
    if (currentMillisecondCount - lastMillisecondCount < 100) {  
        RK_MPI_MB_ReleaseBuffer(mb);  
        g_onFaceFrameIr = false;  
        return;  
    }  
    lastMillisecondCount = currentMillisecondCount;  
  
    processWithMb(true, mb);  
}  
  
void processWithMbRga(MEDIA_BUFFER mb) {  
    if (closeProcess)return;  
    if (g_onFaceFrameRga || g_closeFaceRecognition || g_closeFaceRecognitionRegister || !g_allowFaceOpen) {  
        RK_MPI_MB_ReleaseBuffer(mb);  
        return;  
    }  
    g_onFaceFrameRga = true;  
  
    static int64_t lastMillisecondCount = 0L;  
    const int64_t currentMillisecondCount =  
            std::chrono::duration_cast<chrono::milliseconds>(  
                chrono::system_clock::now().time_since_epoch()).  
            count();  
    if (currentMillisecondCount - lastMillisecondCount < 100) {  
        RK_MPI_MB_ReleaseBuffer(mb);  
        g_onFaceFrameRga = false;  
        return;  
    }  
    lastMillisecondCount = currentMillisecondCount;  
  
    processWithMb(false, mb);  
}  
  
  
void startCameraRk() {  
    std::lock_guard<std::mutex> lock(mtx);  
    if (started) {  
        logPrintln("Camera RK has started", airstrip::WARN, __FUNCTION__);  
        return;  
    }  
  
    int appWidth, appHeight;  
    airstrip::getProgramOptions(PRO_OPT_APP_WIDTH, &appWidth);  
    airstrip::getProgramOptions(PRO_OPT_APP_HEIGHT, &appHeight);  
    g_appWidth = appWidth;  
    g_appHeight = appHeight;  
    g_appWidthIr = static_cast<int>(appWidth * IR_SCALE);  
    g_appHeightIr = static_cast<int>(appHeight * IR_SCALE);  
  
  
    display_init(0, 0);  
    display_exit();  
  
    closeProcess = false;  
    int ret = 0;  
  
    // Init  
    RK_MPI_SYS_Init();  
  
    constexpr RK_S32 irCameraId = 0;  
    constexpr RK_S32 rgaCameraId = 1;  
    SAMPLE_COMM_ISP_Init(rgaCameraId, RK_AIQ_WORKING_MODE_NORMAL, RK_FALSE, "/etc/iqfiles");  
    SAMPLE_COMM_ISP_Run(rgaCameraId);  
    SAMPLE_COMM_ISP_SetFrameRate(rgaCameraId, 30);  
  
    SAMPLE_COMM_ISP_Init(irCameraId, RK_AIQ_WORKING_MODE_NORMAL, RK_FALSE, "/etc/iqfiles");  
    SAMPLE_COMM_ISP_Run(irCameraId);  
    SAMPLE_COMM_ISP_SetFrameRate(irCameraId, 10);  
  
  
    SAMPLE_COMM_ISP_SET_ManualExposureManualGain(1, 0, 0);  
  
  
    // Init vi 0  
    VI_CHN_ATTR_S vi_chn_attr;  
    vi_chn_attr.pcVideoNode = "rkispp_scale0";  
    vi_chn_attr.u32BufCnt = 3;  
    vi_chn_attr.u32Width = CAMERA_WIDTH;  
    vi_chn_attr.u32Height = CAMERA_HEIGHT;  
    vi_chn_attr.enPixFmt = IMAGE_TYPE_NV12;  
    vi_chn_attr.enWorkMode = VI_WORK_MODE_NORMAL;  
    ret = RK_MPI_VI_SetChnAttr(rgaCameraId, 0, &vi_chn_attr);  
    ret |= RK_MPI_VI_EnableChn(rgaCameraId, 0);  
    if (ret) {  
        logPrintln("Create vi[0] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    // Init vi 1  
    vi_chn_attr.pcVideoNode = "rkispp_scale0";  
    vi_chn_attr.u32BufCnt = 3;  
    vi_chn_attr.u32Width = CAMERA_WIDTH_VI;  
    vi_chn_attr.u32Height = CAMERA_HEIGHT_VI;  
    vi_chn_attr.enPixFmt = IMAGE_TYPE_NV12;  
    vi_chn_attr.enWorkMode = VI_WORK_MODE_NORMAL;  
    ret = RK_MPI_VI_SetChnAttr(irCameraId, 1, &vi_chn_attr);  
    ret |= RK_MPI_VI_EnableChn(irCameraId, 1);  
    if (ret) {  
        logPrintln("Create vi[1] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    // Init rga 0  
    RGA_ATTR_S stRgaAttr = {};  
    stRgaAttr.bEnBufPool = RK_TRUE;  
    stRgaAttr.u16BufPoolCnt = 2;  
    stRgaAttr.u16Rotaion = 90;  
    stRgaAttr.stImgIn.u32X = 0;  
    stRgaAttr.stImgIn.u32Y = 0;  
    stRgaAttr.stImgIn.imgType = IMAGE_TYPE_NV12;  
    stRgaAttr.stImgIn.u32Width = CAMERA_WIDTH;  
    stRgaAttr.stImgIn.u32Height = CAMERA_HEIGHT;  
    stRgaAttr.stImgIn.u32HorStride = CAMERA_WIDTH;  
    stRgaAttr.stImgIn.u32VirStride = CAMERA_HEIGHT;  
    stRgaAttr.stImgOut.u32X = 0;  
    stRgaAttr.stImgOut.u32Y = 0;  
    stRgaAttr.stImgOut.imgType = IMAGE_TYPE_RGB888;  
    stRgaAttr.stImgOut.u32Width = g_appWidth;  
    stRgaAttr.stImgOut.u32Height = g_appHeight;  
    stRgaAttr.stImgOut.u32HorStride = g_appWidth;  
    stRgaAttr.stImgOut.u32VirStride = g_appHeight;  
    ret = RK_MPI_RGA_CreateChn(0, &stRgaAttr);  
    if (ret) {  
        logPrintln("Create rga[0] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    // Init rga 1  
    stRgaAttr.bEnBufPool = RK_TRUE;  
    stRgaAttr.u16BufPoolCnt = 4;  
    stRgaAttr.u16Rotaion = 270;  
    stRgaAttr.stImgIn.u32X = 0;  
    stRgaAttr.stImgIn.u32Y = 0;  
    stRgaAttr.stImgIn.imgType = IMAGE_TYPE_NV12;  
    stRgaAttr.stImgIn.u32Width = CAMERA_WIDTH_VI;  
    stRgaAttr.stImgIn.u32Height = CAMERA_HEIGHT_VI;  
    stRgaAttr.stImgIn.u32HorStride = CAMERA_WIDTH_VI;  
    stRgaAttr.stImgIn.u32VirStride = CAMERA_HEIGHT_VI;  
    stRgaAttr.stImgOut.u32X = 0;  
    stRgaAttr.stImgOut.u32Y = 0;  
    stRgaAttr.stImgOut.imgType = IMAGE_TYPE_RGB888;  
    stRgaAttr.stImgOut.u32Width = g_appWidthIr;  
    stRgaAttr.stImgOut.u32Height = g_appHeightIr;  
    stRgaAttr.stImgOut.u32HorStride = g_appWidthIr;  
    stRgaAttr.stImgOut.u32VirStride = g_appHeightIr;  
    ret = RK_MPI_RGA_CreateChn(1, &stRgaAttr);  
    if (ret) {  
        logPrintln("Create rga[1] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    // Init rga 2  
    stRgaAttr.bEnBufPool = RK_TRUE;  
    stRgaAttr.u16BufPoolCnt = 4;  
    stRgaAttr.u16Rotaion = 90;  
    stRgaAttr.stImgIn.u32X = 0;  
    stRgaAttr.stImgIn.u32Y = 0;  
    stRgaAttr.stImgIn.imgType = IMAGE_TYPE_NV12;  
    stRgaAttr.stImgIn.u32Width = CAMERA_WIDTH;  
    stRgaAttr.stImgIn.u32Height = CAMERA_HEIGHT;  
    stRgaAttr.stImgIn.u32HorStride = CAMERA_WIDTH;  
    stRgaAttr.stImgIn.u32VirStride = CAMERA_HEIGHT;  
    stRgaAttr.stImgOut.u32X = 0;  
    stRgaAttr.stImgOut.u32Y = 0;  
    stRgaAttr.stImgOut.imgType = IMAGE_TYPE_RGB888;  
    stRgaAttr.stImgOut.u32Width = g_appWidth;  
    stRgaAttr.stImgOut.u32Height = g_appHeight;  
    stRgaAttr.stImgOut.u32HorStride = g_appWidth;  
    stRgaAttr.stImgOut.u32VirStride = g_appHeight;  
    ret = RK_MPI_RGA_CreateChn(2, &stRgaAttr);  
    if (ret) {  
        logPrintln("Create rga[2] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
  
    // Init vo 0  
    VO_CHN_ATTR_S stVoAttr = {};  
    stVoAttr.pcDevNode = "/dev/dri/card0";  
    stVoAttr.emPlaneType = VO_PLANE_OVERLAY;  
    stVoAttr.enImgType = IMAGE_TYPE_RGB888;  
    stVoAttr.u16Zpos = 0;  
    stVoAttr.stDispRect.s32X = 0;  
    stVoAttr.stDispRect.s32Y = 0;  
    stVoAttr.stDispRect.u32Width = g_appWidth;  
    stVoAttr.stDispRect.u32Height = g_appHeight;  
    ret = RK_MPI_VO_CreateChn(0, &stVoAttr);  
    if (ret) {  
        logPrintln("Create vo[0] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
  
    // Bind  
    MPP_CHN_S stSrcChn = {};  
    MPP_CHN_S stDestChn = {};  
  
    logPrintln("Bind VI[0] to RGA[0]...", airstrip::INFO, __FUNCTION__);  
    stSrcChn.enModId = RK_ID_VI;  
    stSrcChn.s32ChnId = 0;  
    stDestChn.enModId = RK_ID_RGA;  
    stDestChn.s32ChnId = 0;  
    ret = RK_MPI_SYS_Bind(&stSrcChn, &stDestChn);  
    if (ret) {  
        logPrintln("Bind vi[0] to rga[0] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    logPrintln("Bind VI[0] to RGA[2]...", airstrip::INFO, __FUNCTION__);  
    stSrcChn.enModId = RK_ID_VI;  
    stSrcChn.s32ChnId = 0;  
    stDestChn.enModId = RK_ID_RGA;  
    stDestChn.s32ChnId = 2;  
    ret = RK_MPI_SYS_Bind(&stSrcChn, &stDestChn);  
    if (ret) {  
        logPrintln("Bind vi[0] to rga[2] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    logPrintln("Bind VI[1] to RGA[1]...", airstrip::INFO, __FUNCTION__);  
    stSrcChn.enModId = RK_ID_VI;  
    stSrcChn.s32ChnId = 1;  
    stDestChn.enModId = RK_ID_RGA;  
    stDestChn.s32ChnId = 1;  
    ret = RK_MPI_SYS_Bind(&stSrcChn, &stDestChn);  
    if (ret) {  
        logPrintln("Bind vi[1] to rga[1] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    logPrintln("Bind RGA[0] to VO[0]...", airstrip::INFO, __FUNCTION__);  
    stSrcChn.enModId = RK_ID_RGA;  
    stSrcChn.s32ChnId = 0;  
    stDestChn.enModId = RK_ID_VO;  
    stDestChn.s32ChnId = 0;  
    ret = RK_MPI_SYS_Bind(&stSrcChn, &stDestChn);  
    if (ret) {  
        logPrintln("Bind rga[0] to vo[0] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    MPP_CHN_S stEncChn;  
    stEncChn.enModId = RK_ID_RGA;  
    stEncChn.s32DevId = 1;  
    stEncChn.s32ChnId = 1;  
    ret = RK_MPI_SYS_RegisterOutCb(&stEncChn, processWithMbIr);  
    if (ret) {  
        logPrintln("Register out cb ir failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    stEncChn.enModId = RK_ID_RGA;  
    stEncChn.s32DevId = 2;  
    stEncChn.s32ChnId = 2;  
    ret = RK_MPI_SYS_RegisterOutCb(&stEncChn, processWithMbRga);  
    if (ret) {  
        logPrintln("Register out rga cb failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    logPrintln("Camera RK initial finish", airstrip::INFO, __FUNCTION__);  
    started = true;  
}  
  
void stopCameraRk() {  
    std::lock_guard<std::mutex> lock(mtx);  
    if (!started) {  
        logPrintln("Camera RK has stoped", airstrip::WARN, __FUNCTION__);  
        return;  
    }  
  
    closeProcess = true;  
    int ret = 0;  
  
    constexpr RK_S32 irCameraId = 0;  
    constexpr RK_S32 rgaCameraId = 1;  
  
    // Unbind  
    MPP_CHN_S stSrcChn = {};  
    MPP_CHN_S stDestChn = {};  
  
    stSrcChn.enModId = RK_ID_VI;  
    stSrcChn.s32ChnId = 0;  
    stDestChn.enModId = RK_ID_RGA;  
    stDestChn.s32ChnId = 0;  
    ret = RK_MPI_SYS_UnBind(&stSrcChn, &stDestChn);  
    if (ret) {  
        logPrintln("Unbind vi[0] to rga[0] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    stSrcChn.enModId = RK_ID_VI;  
    stSrcChn.s32ChnId = 0;  
    stDestChn.enModId = RK_ID_RGA;  
    stDestChn.s32ChnId = 2;  
    ret = RK_MPI_SYS_UnBind(&stSrcChn, &stDestChn);  
    if (ret) {  
        logPrintln("Unbind vi[0] to rga[2] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    stSrcChn.enModId = RK_ID_VI;  
    stSrcChn.s32ChnId = 1;  
    stDestChn.enModId = RK_ID_RGA;  
    stDestChn.s32ChnId = 1;  
    ret = RK_MPI_SYS_UnBind(&stSrcChn, &stDestChn);  
    if (ret) {  
        logPrintln("Unbind vi[1] to rga[1] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    stSrcChn.enModId = RK_ID_RGA;  
    stSrcChn.s32ChnId = 0;  
    stDestChn.enModId = RK_ID_VO;  
    stDestChn.s32ChnId = 0;  
    ret = RK_MPI_SYS_UnBind(&stSrcChn, &stDestChn);  
    if (ret) {  
        logPrintln("Unbind rga[0] to vo[0] failed! ret = " + ret,  
                   airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    RK_MPI_VO_DestroyChn(0);  
    RK_MPI_RGA_DestroyChn(0);  
    RK_MPI_RGA_DestroyChn(1);  
    RK_MPI_RGA_DestroyChn(2);  
    RK_MPI_VI_DisableChn(rgaCameraId, 0);  
    RK_MPI_VI_DisableChn(irCameraId, 1);  
  
    logPrintln("Camera RK stop finish", airstrip::INFO, __FUNCTION__);  
  
    started = false;  
}

这里我们为了VO通道的较高帧率,对于摄像头使用了较高的帧率,但是在processWithMbIrprocessWithMbRga实际捕获计算帧,利用std::chrono::duration_cast<chrono::milliseconds>(chrono::system_clock::now().time_since_epoch()).count();将两次计算间隔拓展到100毫秒以上,这样内存复制和后续的人脸追踪帧率会有所下降,但是资源消耗就明显减少,小伙伴可以根据自己的场景和机器性能自行取舍

faceRecognitionPreFun函数中我们同步了红外帧和RGA帧,将同一时刻的两个摄像头画面传输给faceRecognition进行处理:


void initFaceRecognition() {  
    if (initializedFaceRec) {  
        return;  
    }  
  
    logPrintln("Start init face model", airstrip::INFO, __FUNCTION__);  
  
    const string modelPath = g_appWorkDir + "model/Gundam_RV1109";  
    HResult ret = HFLaunchInspireFace(modelPath.c_str());  
    if (ret != HSUCCEED) {  
        logPrintln("Load resource error: " + ret, airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    constexpr HOption option = HF_ENABLE_FACE_RECOGNITION;  
    constexpr HFDetectMode detMode = HF_DETECT_MODE_LIGHT_TRACK;  
    constexpr HInt32 maxDetectNum = 1;  
    constexpr HInt32 detectPixelLevel = 160;  
    ret = HFCreateInspireFaceSessionOptional(  
        option, detMode, maxDetectNum, detectPixelLevel, -1, &faceRecognitionSession);  
    if (ret != HSUCCEED) {  
        logPrintln("Create face context error: " + ret, airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    HFSessionSetTrackPreviewSize(faceRecognitionSession, detectPixelLevel);  
    HFSessionSetFilterMinimumFacePixelSize(faceRecognitionSession, 30);  
  
    HFFeatureHubConfiguration configuration;  
    configuration.primaryKeyMode = HF_PK_MANUAL_INPUT;  
    configuration.enablePersistence = 0;  
    configuration.persistenceDbPath = nullptr;  
    if (g_fullFaceCompare) {  
        configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;  
    } else {  
        configuration.searchMode = HF_SEARCH_MODE_EAGER;  
    }  
    configuration.searchThreshold = static_cast<float>(std::min(g_faceThreshold, g_faceThresholdNight));  
    ret = HFFeatureHubDataEnable(configuration);  
    if (ret != HSUCCEED) {  
        logPrintln("Create face db error: " + ret, airstrip::CRITICAL, __FUNCTION__);  
        exit(-1);  
    }  
  
    logPrintln("Face model init finish", airstrip::INFO, __FUNCTION__);  
    initializedFaceRec = true;  
}

bool faceDetectInspire(const cv::Mat &frame, const cv::Mat &frameIr, cv::Rect &rectOutput, bool moreAction) {  
    bool ret = false;  
    if (!initializedFaceRec || frame.empty() || frameIr.empty()) {  
        return ret;  
    }  
  
    HFImageStream stream = nullptr;  
    HFImageData imageData = {};  
    imageData.data = frameIr.data;  
    imageData.format = HF_STREAM_BGR;  
    imageData.height = frameIr.rows;  
    imageData.width = frameIr.cols;  
    imageData.rotation = HF_CAMERA_ROTATION_0;  
    HResult retI = HFCreateImageStream(&imageData, &stream);  
    if (retI != HSUCCEED) {  
        logPrintln("Face recognition build image fail " + retI,  
                   airstrip::WARN, __FUNCTION__);  
        return ret;  
    }  
  
    HFMultipleFaceData multipleFaceData = {};  
    retI = HFExecuteFaceTrack(faceRecognitionSession, stream, &multipleFaceData);  
    if (retI != HSUCCEED) {  
        logPrintln("Face recognition track image fail " + retI,  
                   airstrip::WARN, __FUNCTION__);  
        HFReleaseImageStream(stream);  
        return ret;  
    }  
  
    if (multipleFaceData.detectedNum <= 0) {  
        HFReleaseImageStream(stream);  
        return ret;  
    }  
    ret = true;  
  
    logPrintln("Track id: " + to_string(multipleFaceData.trackIds[0]),  
               airstrip::DEBUG, __FUNCTION__);  
  
    const cv::Rect rectIr(multipleFaceData.rects->x, multipleFaceData.rects->y,  
                          multipleFaceData.rects->width, multipleFaceData.rects->height);  
  
    // 校正  
    const int maxWidth = frameIr.cols;  
    const int maxHeight = frameIr.rows;  
    const int faceX = std::max(0, rectIr.x);  
    const int faceY = std::max(0, rectIr.y);  
    int faceW = std::max(0, rectIr.width);  
    int faceH = std::max(0, rectIr.height);  
    faceW = faceX + faceW > maxWidth ? maxWidth - faceX : faceW;  
    faceH = faceY + faceH > maxHeight ? maxHeight - faceY : faceH;  
    rectOutput = cv::Rect(faceX, faceY, faceW, faceH);  
  
    // 获取rgb图像对应区域  
    const auto frameRgbFace = frame(cv::Rect(faceX, faceY, faceW, faceH));  
  
    // 检查最小范围  
    const auto minSide = min(frameRgbFace.cols, frameRgbFace.rows);  
    logPrintln("Size min side =  " + to_string(minSide) +  
               " faceDistance = " + to_string(g_faceDistance), airstrip::DEBUG, __FUNCTION__);  
    if (!g_longDistanceDetect && minSide < 120) {  
        ret = false;  
    }  

	// 业务逻辑...
  
    HFReleaseImageStream(stream);  
    return ret;  
}  
  
  
void faceRecognition(const cv::Mat &frame, const cv::Mat &frameIr) {  
    if (!initializedFaceRec) {  
        return;  
    }  
  
    static int isCheckFaceReco = 0;  
    if (isCheckFaceReco > 1) {  
        logPrintln("In Recognition ... ", airstrip::DEBUG, __FUNCTION__);  
        return;  
    }  
    isCheckFaceReco++;  
    cv::Mat frameCopy = frame.clone();  
    cv::Mat frameIrCopy = frameIr.clone();  
  
    static_cast<airstrip::ThreadPool *>(g_mainThreadPool)->enqueue([frameCopy, frameIrCopy] {  
    
            // 确定是否执行人脸识别,还是只是检测  
            bool onlyDetect = true;  
            static int64_t lastMillisecondCount = 0L;  
            const int64_t currentMillisecondCount =  
                    std::chrono::duration_cast<chrono::milliseconds>(chrono::system_clock::now().time_since_epoch()).  
                    count();  
                    
            // 超过间隔时间且没有在进行人脸检测则发起新的人脸检测  
            if (currentMillisecondCount - lastMillisecondCount > g_faceRegCoreIvMillSec && !g_isCheckFace) {  
                onlyDetect = false;  
                g_isCheckFace = true;  
                lastMillisecondCount = currentMillisecondCount;  
            }  
  
            // 红外人脸检测  
            cv::Rect rectIr;  
            const bool retDetect = faceDetectInspire(frameCopy, frameIrCopy, rectIr, !onlyDetect);  
            if (retDetect) {  
                CameraFrame::getInstance()->setFaceRects(rectIr.x, rectIr.y, rectIr.width, rectIr.height);  
            } else {  
                CameraFrame::getInstance()->setFaceRects(0, 0, 0, 0);  
            }  
  
            if (onlyDetect) {  
                --isCheckFaceReco;  
                return;  
            }  
  
            if (!retDetect && g_enableFaceSpoof) {  
                g_isCheckFace = false;  
                --isCheckFaceReco;  
                return;  
            }  
  
            // 执行人脸检测  
            HFImageStream stream = nullptr;  
            HFImageData imageData = {};  
            imageData.data = frameCopy.data;  
            imageData.format = HF_STREAM_BGR;  
            imageData.height = frameCopy.rows;  
            imageData.width = frameCopy.cols;  
            imageData.rotation = HF_CAMERA_ROTATION_0;  
            HResult ret = HFCreateImageStream(&imageData, &stream);  
            if (ret != HSUCCEED) {  
                logPrintln("Face recognition build image fail " + ret,  
                           airstrip::WARN, __FUNCTION__);  
                g_isCheckFace = false;  
                --isCheckFaceReco;  
                return;  
            }  
  
            HFMultipleFaceData multipleFaceData = {};  
            ret = HFExecuteFaceTrack(faceRecognitionSession, stream, &multipleFaceData);  
            if (ret != HSUCCEED) {  
                logPrintln("Face recognition track image fail " + ret,  
                           airstrip::WARN, __FUNCTION__);  
                HFReleaseImageStream(stream);  
                g_isCheckFace = false;  
                --isCheckFaceReco;  
                return;  
            }  
  
            if (multipleFaceData.detectedNum <= 0) {  
                HFReleaseImageStream(stream);  
                g_isCheckFace = false;  
                --isCheckFaceReco;  
                return;  
            }  
  
  
            // 确认当前IR和RGB图片基本重合  
            if (g_enableFaceSpoof) {  
                const cv::Rect rectRgb(multipleFaceData.rects->x, multipleFaceData.rects->y,  
                                       multipleFaceData.rects->width, multipleFaceData.rects->height);  
                const cv::Point rectRgbCenter(rectRgb.x + rectRgb.width / 2, rectRgb.y + rectRgb.height / 2);  
                const cv::Point rectIrCenter(rectIr.x + rectIr.width / 2, rectIr.y + rectIr.height / 2);  
                if (!rectRgb.contains(rectIrCenter) || !rectIr.contains(rectRgbCenter)) {  
                    logPrintln("Face fake face !!!!!", airstrip::WARN, __FUNCTION__);  
                    HFReleaseImageStream(stream);  
                    g_isCheckFace = false;  
                    --isCheckFaceReco;  
                    return;  
                }  
            }  
  
            HFFaceFeature feature = {};  
            ret = HFFaceFeatureExtract(faceRecognitionSession, stream, multipleFaceData.tokens[0], &feature);  
            if (ret != HSUCCEED) {  
                logPrintln("Face recognition feature extract fail " + ret,  
                           airstrip::WARN, __FUNCTION__);  
                HFReleaseImageStream(stream);  
                g_isCheckFace = false;  
                --isCheckFaceReco;  
                return;  
            }  
  
            HFloat confidence;  
            HFFaceFeatureIdentity searchResult = {};  
            ret = HFFeatureHubFaceSearch(feature, &confidence, &searchResult);  
            if (ret != HSUCCEED) {  
                logPrintln("Face recognition feature search fail " + ret,  
                           airstrip::WARN, __FUNCTION__);  
                HFReleaseImageStream(stream);  
                g_isCheckFace = false;  
                --isCheckFaceReco;  
                return;  
            }  

			// 业务逻辑...
  
            HFReleaseImageStream(stream);  
            g_isCheckFace = false;  
            --isCheckFaceReco;  
        }  
  
    );  
}

首先从isCheckFaceReco静态变量,限制最多两个线程进入,这里也可以根据实际场景和设备性能调整,但是最低两个线程,否则在人练识别以及后续业务逻辑跟踪框会卡住。在开出新线程之后,利用g_faceRegCoreIvMillSec判断当前是否应该执行人脸识别,还是仅仅进行追踪,这个也可以同样也可以配置,如果间隔时间越长CPU负载越低

当只是执行人脸检测,那么只需要拿到红外摄像头数据,然后将获取的数据方框校正之后显示在图片上即可,注意这里如果使用QT需要投递到主线程当中,否则可能在运行过程中产生不可预知的错误。如果还需要做人脸识别,那么,由于红外摄像头人脸效果失真,比对效果不好,我们需要从RGA中获取人脸方框,并比较该方框和红外方框的重合性,确认同一个对象后,进行人脸比对,并返回比对结果即可

原文链接

欢迎大家对于本站的访问 - AsterCasc

Logo

有“AI”的1024 = 2048,欢迎大家加入2048 AI社区

更多推荐