当前位置: 首页 > web >正文

Camera相机人脸识别系列专题分析之十七:人脸特征检测FFD算法之libhci_face_camera_api.so 296点位人脸识别检测流程详解

【关注我,后续持续新增专题博文,谢谢!!!】

上一篇我们讲了

        这一篇我们开始讲 Camera相机人脸识别系列专题分析之十七:人脸特征检测FFD算法之libhci_face_camera_api.so 296点位人脸识别检测流程详解

目录

一、背景

二、:FFD算法libhci_face_camera_api.so人脸识别检测流程详解

    2.1:FFD初始化

    2.2 :FFD人脸识别检测process

    2.3 :setFdAlgoInfo

    2.4 :FFD卸载


一、背景

人脸特征检测FFD算法有很多三方FFD算法,我们以FFD算法libhci_face_camera_api.so为例讲解人脸识别检测流程。和libcvface_api.so的流程类似,区别在于:

  1. libcvface_api.so是137点位FFD
  2. libhci_face_camera_api.so是296点位FFD,更加精细化。

二、:FFD算法libhci_face_camera_api.so人脸识别检测流程详解

    2.1:FFD初始化

  1. 首先通过vendorFDLoad()通过dlopen加载libhci_face_camera_api.so,并对算法库API进行初始化。
  2. 再通过vendorFDInit对算法内部进行初始化。
    1. get sdk version
    2. set debug log info
    3. set license
    4. create tracker,参数是CV_DETECT_ENABLE_ALIGN_296,296点位FFD参数。
void CustomHciFadApi::vendorFDLoad(senseTime_lib_struct* p_lib)
{if (p_lib->cv_face_lib_ptr != nullptr) {return;}memset(p_lib, 0x0, sizeof(senseTime_lib_struct));p_lib->cv_face_lib_ptr = dlopen(LIB_PATH_HCI_FD, RTLD_NOW | RTLD_NODELETE);if (p_lib->cv_face_lib_ptr == nullptr) {CAM_LOGE("Error opening libcvface_api.so lib");return;}CAM_LOGI("%s cv_face_lib_ptr is %p", __FUNCTION__, p_lib->cv_face_lib_ptr);IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_algorithm_info, "cv_face_algorithm_info");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_init_license_config, "cv_face_init_license_config");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_get_version, "cv_face_get_version");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_detect_get_threshold, "cv_face_detect_get_threshold");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_detect_set_threshold, "cv_face_detect_set_threshold");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_destroy_detector, "cv_face_destroy_detector");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_detect, "cv_face_detect");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_release_detector_result, "cv_face_release_detector_result");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_create_tracker, "cv_face_create_tracker");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_destroy_tracker, "cv_face_destroy_tracker");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_track, "cv_face_track");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_reset_tracker, "cv_face_reset_tracker");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_release_tracker_result, "cv_face_release_tracker_result");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_track_set_detect_face_cnt_limit, "cv_face_track_set_detect_face_cnt_limit");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_track_set_detect_interval, "cv_face_track_set_detect_interval");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_track_set_alignment_threshold, "cv_face_track_set_alignment_threshold");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr ,p_lib->cv_face_create_attribute_detector, "cv_face_create_attribute_detector");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_attribute_detector_detect, "cv_face_attribute_detector_detect");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_destroy_attribute_detector, "cv_face_destroy_attribute_detector");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_open_log, "cv_face_open_log");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_create_matrix_attribute_detector, "cv_face_create_matrix_attribute_detector");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_matrix_attribute_detect, "cv_face_matrix_attribute_detect");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_destroy_matrix_attribute_detector, "cv_face_destroy_matrix_attribute_detector");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_attribute_classify_detect, "cv_face_attribute_classify_detect");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_attribute_classify_reset, "cv_face_attribute_classify_reset");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_create_attribute_handle, "cv_face_create_attribute_handle");IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_destroy_attribute_handle, "cv_face_destroy_attribute_handle");
}int32_t CustomHciFadApi::cusHalFDInit(int camId, thirdFaceInitData* faceData)
{pSenseTimelib = reinterpret_cast<senseTime_lib_struct*>(malloc(sizeof(senseTime_lib_struct)));pSenseTimelib->cv_face_lib_ptr = nullptr;pSenseTimelib->g_SenseTimeEngine.attr_Buff = nullptr;pSenseTimelib->g_engine_counter = 0;parent = std::make_shared<FdAlgoApi>();parent->mDisplaySize = faceData->previewImgSize;parent->mIsEisOn = faceData->isEISOn;vendorFDLoad(pSenseTimelib);vendorFDInit(pSenseTimelib, camId);return 0;
}int CustomHciFadApi::vendorFDInit(senseTime_lib_struct* p_lib, int camId)
{int ret = -1;NSCam::IHalSensorList* const pIHalSensorList = NSCam::IHalSensorList::get();int isfacing = pIHalSensorList->queryFacingDirection(camId);if (isfacing) {m_ImageSensorFacing = ImageSensorFacingFront;} else {m_ImageSensorFacing = ImageSensorFacingBack;}if (p_lib->cv_face_lib_ptr == nullptr) {CAM_LOGE("Get NULL face lib pointer");return ret;}int init_flag = p_lib->cv_face_init_license_config((const char*)pLicense_Data);if (init_flag != CV_OK) {CAM_LOGE("cv_face_init_license_config error");}const char* version = p_lib->cv_face_get_version();int32_t value = property_get_int32("third.camera.pfdLog.enable", 0);p_lib->cv_face_open_log(value);CAM_LOGI("fad version is %s, setAlgo log is %d, isfacing is %d", version, value, isfacing);unsigned int flag = 0;unsigned int attr_flag = 0;if (p_lib->g_cv_tracker_handle_front == nullptr) {int cvDetectEnableAlign = CV_DETECT_ENABLE_ALIGN_296;flag = cvDetectEnableAlign | CV_FACE_RESIZE_IMG_320W;ret = p_lib->cv_face_create_tracker(&(p_lib->g_cv_tracker_handle_front), NULL, flag);if ((ret != CV_OK) || (p_lib->g_cv_tracker_handle_front == nullptr)) {CAM_LOGE("%s: FRONT: cv_face_create_tracker error: %d", __FUNCTION__, ret);return ret;}}p_lib->g_SenseTimeEngine.cv_tracker_handle = p_lib->g_cv_tracker_handle_front;if (p_lib->g_cv_attr_handle == nullptr) {int cvDetectAttrAlign = CV_DETECT_ENABLE_ALIGN_296;attr_flag = cvDetectAttrAlign | CV_FACE_RESIZE_IMG_640W;ret = p_lib->cv_face_create_attribute_handle(&(p_lib->g_cv_attr_handle), ATTR_MODE, attr_flag);if ((ret != CV_OK) || (p_lib->g_cv_attr_handle == nullptr)) {CAM_LOGE("%s: cv_face_create_attribute_handle error: %d", __FUNCTION__, ret);return ret;}}p_lib->g_SenseTimeEngine.cv_attr_handle = p_lib->g_cv_attr_handle;memset(&p_lib->attribute_result, 0, sizeof(cv_face_attribute_classify_t) * HCI_FD_MAX_FACE_NUM);p_lib->g_engine_counter++;return ret;
}

    2.2 :FFD人脸识别检测process

process进行人脸识别检测主要通过cv_fad_process进行人脸识别:

  1. cv_face_track: 对连续视频帧进行实时快速人脸跟踪
  2. cv_face_track_set_detect_interval: 设置检测到的最大人脸数
  3. setFdAlgoInfo:设置人脸信息
MINT32 CustomCvFadApi::process(struct FD_Frame_Parameters& param, MtkCameraFaceMetadata* p3AFaceResult)
{CustomFDImage* imgBuffer = reinterpret_cast<CustomFDImage*>(param.imgBuffer);cv_image_t frame;frame.width = imgBuffer->w;frame.height = imgBuffer->h;frame.stride = imgBuffer->w;cv_fad_process(&param, frame, p3AFaceResult);return 0;
}void CustomCvFadApi::cv_fad_process(struct FD_Frame_Parameters* in, cv_image_t frame, MtkCameraFaceMetadata* p3AFaceResult)
{cv_face_orientation mFaceOrientation = CV_FACE_LEFT;cv_face_t* p_faces_array = nullptr;int faceCount = 0;int rotation = in->Rotation_Info;memset(&in->faceData, 0, sizeof(faceDataAppJoint));mFaceOrientation = setFaceRation(mFaceOrientation, rotation);int cv_result = pSenseTimelib->cv_face_track(pSenseTimelib->g_SenseTimeEngine.cv_tracker_handle,in->pImageBufferY,CV_PIX_FMT_NV12,frame.width, frame.height, frame.stride, mFaceOrientation,&p_faces_array, &faceCount);faceCount = std::min(faceCount, CV_FAD_MAX_FACE_NUM);p3AFaceResult->number_of_faces = faceCount;in->faceData.faceInfoOri.face_num = std::min(faceCount, MAX_ATTRI_FACE_NUM);in->faceData.faceInfoOri.versionId = Version_855;in->faceData.faceInfoOri.master_index = -1;in->faceData.faceInfoOri.points_count = MAX_FFD_NUM;in->faceData.faceInfoOri.fdDimensionW = parent->mDisplaySize.w;in->faceData.faceInfoOri.fdDimensionH = parent->mDisplaySize.h;in->faceData.fdProcessInfo.sensorSize.w = in->thirdCusSensorSize.w;in->faceData.fdProcessInfo.sensorSize.h = in->thirdCusSensorSize.h;in->faceData.fdProcessInfo.previewSize.w = parent->mDisplaySize.w;in->faceData.fdProcessInfo.previewSize.h = parent->mDisplaySize.h;in->faceData.fdProcessInfo.ImgSize.w = frame.width;in->faceData.fdProcessInfo.ImgSize.h = frame.height;in->faceData.fdProcessInfo.isEisOn = parent->mIsEisOn;int interval = 0;if (m_ImageSensorFacing == ImageSensorFacingBack) {interval = (faceCount > 0) ? FREQ_FACE_BACK : FREQ_NOFACE_BACK;} else {interval = (faceCount > 0) ? FREQ_FACE_FRONT : FREQ_NOFACE_FRONT;}int val = -1;if (g_currFreq != interval) {cv_result = pSenseTimelib->cv_face_track_set_detect_interval(pSenseTimelib->g_SenseTimeEngine.cv_tracker_handle,interval,&val);if (cv_result != CV_OK) {CAM_LOGE("cv_face_track_set_detect_interval error");} else {g_currFreq = interval;}}if (faceCount > 0) {for (int i = 0; i < faceCount; i++) {setFdAlgoInfo(in, frame, p3AFaceResult, p_faces_array, i);}}fd_algo_face_attribute(in, frame, p3AFaceResult, faceCount, p_faces_array);
}

    2.3 :setFdAlgoInfo

主要设置p3AFaceResult,填充ffd_data数据和faceInfoOri结构体。后面APP和3A会使用这些FFD数据。

void CustomCvFadApi::setFdAlgoInfo(struct FD_Frame_Parameters* in, cv_image_t frame,MtkCameraFaceMetadata* p3AFaceResult, cv_face_t* p_faces_array, int i)
{p3AFaceResult->faces[i].id = (int32_t)p_faces_array[i].ID;p3AFaceResult->faces[i].score = FDFaceMinConfidence;p3AFaceResult->faces[i].rect[0] = (int32_t)p_faces_array[i].rect.left;p3AFaceResult->faces[i].rect[1] = (int32_t)p_faces_array[i].rect.top;p3AFaceResult->faces[i].rect[2] = (int32_t)p_faces_array[i].rect.right;p3AFaceResult->faces[i].rect[3] = (int32_t)p_faces_array[i].rect.bottom;coordinate center;center.x = (int32_t)(p_faces_array[i].rect.left + p_faces_array[i].rect.right) / 2;center.y = (int32_t)(p_faces_array[i].rect.top + p_faces_array[i].rect.bottom) / 2;mCcoordinate[0] = mCcoordinate[1];mCcoordinate[1] = center;if(center.x != 0 || center.y != 0) {p3AFaceResult->motion[i][0] = mCcoordinate[1].x - mCcoordinate[0].x;p3AFaceResult->motion[i][1] = mCcoordinate[1].y - mCcoordinate[0].y;}p3AFaceResult->faces[i].left_eye[0] = (int32_t)p_faces_array[i].points_more[132].x;p3AFaceResult->faces[i].left_eye[1] = (int32_t)p_faces_array[i].points_more[132].y;p3AFaceResult->faces[i].right_eye[0] = (int32_t)p_faces_array[i].points_more[160].x;p3AFaceResult->faces[i].right_eye[1] = (int32_t)p_faces_array[i].points_more[160].y;p3AFaceResult->faces[i].mouth[0] = (int32_t)p_faces_array[i].points_more[229].x;p3AFaceResult->faces[i].mouth[1] = (int32_t)p_faces_array[i].points_more[229].y;p3AFaceResult->leyex0[i] = (int32_t)p_faces_array[i].points_more[108].x;p3AFaceResult->leyey0[i] = (int32_t)p_faces_array[i].points_more[108].y;p3AFaceResult->leyex1[i] = (int32_t)p_faces_array[i].points_more[120].x;p3AFaceResult->leyey1[i] = (int32_t)p_faces_array[i].points_more[120].y;p3AFaceResult->reyex0[i] = (int32_t)p_faces_array[i].points_more[136].x;p3AFaceResult->reyey0[i] = (int32_t)p_faces_array[i].points_more[136].y;p3AFaceResult->reyex1[i] = (int32_t)p_faces_array[i].points_more[148].x;p3AFaceResult->reyey1[i] = (int32_t)p_faces_array[i].points_more[148].y;p3AFaceResult->nosex[i] = (int32_t)p_faces_array[i].points_more[167].x;p3AFaceResult->nosey[i] = (int32_t)p_faces_array[i].points_more[167].y;p3AFaceResult->mouthx0[i] = (int32_t)p_faces_array[i].points_more[191].x;p3AFaceResult->mouthy0[i] = (int32_t)p_faces_array[i].points_more[191].y;p3AFaceResult->mouthx1[i] = (int32_t)p_faces_array[i].points_more[209].x;p3AFaceResult->mouthy1[i] = (int32_t)p_faces_array[i].points_more[209].y;p3AFaceResult->leyeux[i] = (int32_t)p_faces_array[i].points_more[133].x;p3AFaceResult->leyeuy[i] = (int32_t)p_faces_array[i].points_more[133].y;p3AFaceResult->leyedx[i] = (int32_t)p_faces_array[i].points_more[134].x;p3AFaceResult->leyedy[i] = (int32_t)p_faces_array[i].points_more[134].y;p3AFaceResult->reyeux[i] = (int32_t)p_faces_array[i].points_more[161].x;p3AFaceResult->reyeuy[i] = (int32_t)p_faces_array[i].points_more[161].y;p3AFaceResult->reyedx[i] = (int32_t)p_faces_array[i].points_more[162].x;p3AFaceResult->reyedy[i] = (int32_t)p_faces_array[i].points_more[162].y;for (int k = 0; k < MAX_FFD_NUM; k++) {in->faceData.faceInfoOri.ffd_data[i].x[k] = (int32_t)(p_faces_array[i].points_more[k].x);in->faceData.faceInfoOri.ffd_data[i].y[k] = (int32_t)(p_faces_array[i].points_more[k].y);in->faceData.faceInfoOri.ffd_data[i].occlusion[k] = (int32_t)p_faces_array[i].landmarks.occlusion[k];}if (p_faces_array[i].yaw < NEGATIVE_ROTATE_5) {p3AFaceResult->posInfo[i].rop_dir = FACE_RIGHT;} else if (p_faces_array[i].yaw > ROTATE_5) {p3AFaceResult->posInfo[i].rop_dir = FACE_LEFT;} else {p3AFaceResult->posInfo[i].rop_dir = FACE_FRONT;}if (p_faces_array[i].roll >= NEGATIVE_ROTATE_15 && p_faces_array[i].roll < ROTATE_15) {p3AFaceResult->posInfo[i].rip_dir = 0;} else if (p_faces_array[i].roll >= NEGATIVE_ROTATE_90 && p_faces_array[i].roll < NEGATIVE_ROTATE_75) {p3AFaceResult->posInfo[i].rip_dir = 9;} else if (p_faces_array[i].roll >= NEGATIVE_ROTATE_75 && p_faces_array[i].roll < NEGATIVE_ROTATE_45) {p3AFaceResult->posInfo[i].rip_dir = 10;} else if (p_faces_array[i].roll >= NEGATIVE_ROTATE_45 && p_faces_array[i].roll < NEGATIVE_ROTATE_15) {p3AFaceResult->posInfo[i].rip_dir = 11;} else if (p_faces_array[i].roll >= ROTATE_15 && p_faces_array[i].roll < ROTATE_45) {p3AFaceResult->posInfo[i].rip_dir = 1;} else if (p_faces_array[i].roll >= ROTATE_45 && p_faces_array[i].roll < ROTATE_75) {p3AFaceResult->posInfo[i].rip_dir = 2;} else if (p_faces_array[i].roll >= ROTATE_75 && p_faces_array[i].roll < ROTATE_90) {p3AFaceResult->posInfo[i].rip_dir = 3;}switch (in->Rotation_Info) {case 0:case 180:p3AFaceResult->fld_rip[i] = in->Rotation_Info - (int)p_faces_array[i].roll;break;case 90:case 270:p3AFaceResult->fld_rip[i] = (in->Rotation_Info - 180) - (int)p_faces_array[i].roll;break;default:break;}if (p3AFaceResult->fld_rip[i] > 180) {p3AFaceResult->fld_rip[i] -= 360;}if (m_ImageSensorFacing == ImageSensorFacingFront) {p3AFaceResult->fld_rop[i] = -(int)p_faces_array[i].yaw;} else {p3AFaceResult->fld_rop[i] = (int)p_faces_array[i].yaw;}faceRect face;face.left = (float)p_faces_array[i].rect.left;face.top = (float)p_faces_array[i].rect.top;face.right = (float)p_faces_array[i].rect.right;face.bottom = (float)p_faces_array[i].rect.bottom;in->faceData.faceInfoOri.roll[i] = (int)(p_faces_array[i].roll + 0.5);in->faceData.faceInfoOri.yaw[i] = (int)(p_faces_array[i].yaw + 0.5);in->faceData.faceInfoOri.pitch[i] = (int)(p_faces_array[i].pitch + 0.5);in->faceData.faceInfoOri.face_roi[i].id = (int32_t)p_faces_array[i].ID;in->faceData.faceInfoOri.face_roi[i].confidence = FDFaceMinConfidence;in->faceData.faceInfoOri.face_roi[i].faceRect.left = (int32_t)face.left;in->faceData.faceInfoOri.face_roi[i].faceRect.top = (int32_t)face.top;in->faceData.faceInfoOri.face_roi[i].faceRect.width = (int32_t)(face.right - face.left);in->faceData.faceInfoOri.face_roi[i].faceRect.height = (int32_t)(face.bottom - face.top);in->faceData.faceInfoOri.faceid[i] = (int32_t)p_faces_array[i].ID;CAM_LOGD("faceInfo %d_%d, %d x %d x %d x %d", p3AFaceResult->faces[i].id, p3AFaceResult->faces[i].score, p3AFaceResult->faces[i].rect[0],p3AFaceResult->faces[i].rect[1], p3AFaceResult->faces[i].rect[2], p3AFaceResult->faces[i].rect[3]);
}

    2.4 :FFD卸载

FFD卸载比较简单,主要通过vendorFDUnload对所有资源进行释放。

MINT32 CustomCvFadApi::uninit()
{if (pSenseTimelib == nullptr) {CAM_LOGE("Error: pSenseTimelib is NULL");return -1;}vendorFDUnload(pSenseTimelib);return 0;
}void CustomCvFadApi::vendorFDUnload(senseTime_lib_struct* p_lib)
{if (p_lib->g_engine_counter > 0) {CAM_LOGI("g_engine_counter is %d", p_lib->g_engine_counter);vendorFDDeinit(p_lib);}if (p_lib->cv_face_lib_ptr != nullptr) {CAM_LOGI("%s: close fd lib", __FUNCTION__);dlclose(p_lib->cv_face_lib_ptr);p_lib->cv_face_lib_ptr = nullptr;}free(p_lib);
}void CustomCvFadApi::vendorFDDeinit(senseTime_lib_struct* p_lib)
{if ((p_lib->g_engine_counter > 0) && (p_lib->cv_face_lib_ptr != nullptr)) {p_lib->g_engine_counter--;}if(p_lib->g_cv_tracker_handle_front != nullptr) {p_lib->cv_face_destroy_tracker(p_lib->g_cv_tracker_handle_front);p_lib->g_cv_tracker_handle_front = nullptr;}if(p_lib->g_cv_attr_handle != nullptr) {p_lib->cv_face_destroy_attribute_handle(p_lib->g_cv_attr_handle);p_lib->g_cv_attr_handle = nullptr;}if (pSenseTimelib->g_SenseTimeEngine.attr_Buff != nullptr) {free(pSenseTimelib->g_SenseTimeEngine.attr_Buff);pSenseTimelib->g_SenseTimeEngine.attr_Buff = nullptr;}
}

【关注我,后续持续新增专题博文,谢谢!!!】

下一篇讲解

http://www.xdnf.cn/news/15775.html

相关文章:

  • Flutter——Android原生View是如何通过Flutter进行加载
  • 关于Mysql开启慢查询日志报错:13 - Permission denied的解决方案
  • logback日志控制服务器日志输出
  • 对Yii2中开启`authenticator`后出现的跨域问题-修复
  • 图机器学习(11)——链接预测
  • 现代R语言【Tidyverse、Tidymodel】的机器学习方法
  • Typecho博客集成阿里云CDN+OSS实现全站加速方案
  • 关于字符编辑器vi、vim版本的安装过程及其常用命令:
  • 第七章 愿景09 海波龙的坑
  • 数字化转型:概念性名词浅谈(第三十讲)
  • Kotlin集合过滤
  • 文档处理控件TX Text Control系列教程:使用 C# .NET 将二维码添加到 PDF 文档
  • 从拆分到导出图片,这款工具如何简化PDF处理流程
  • 基于R语言piecewiseSEM结构方程模型在生态环境领域实践技术应用
  • 无标记点动捕:如何突破传统娱乐边界,打造沉浸式交互体验
  • RuoYi-Cloud框架功能分析与请求处理流程解析
  • docker,防火墙关闭后,未重启docker,导致端口映射失败
  • 【3D并排条状图】:附Origin详细画图教程
  • CertiK创始人顾荣辉出席上海Conflux大会,聚焦Web3全球化中的安全与合规路径
  • 区块链加密技术全景解析
  • (nice!!!)(LeetCode 每日一题) 2163. 删除元素后和的最小差值 (贪心+优先队列)
  • Java学习第五十三部分——后端常用函数
  • 从抓包GitHub Copilot认证请求,认识OAuth 2.0技术
  • 性能远超Spring Cloud Gateway!Apache ShenYu如何重新定义API网关!
  • 集成开发环境:在IntelliJ IDEA中高效运行与调试Spring Boot
  • LangChain 源码剖析(三):连接提示词与大语言模型的核心纽带——LLMChain
  • Mock 单元测试
  • AI驱动数据质量优化:破局数据治理难题
  • PyCharm高效入门指南
  • 力扣 hot100 Day48