作者 潘浩彬

合并分支 'dev' 到 'master'

Dev



查看合并请求 !11
正在显示 48 个修改的文件 包含 2445 行增加1243 行删除

要显示太多修改。

为保证性能只显示 48 of 48+ 个文件。

... ... @@ -56,7 +56,23 @@
"mediaparm.h": "c",
"videostream_push.h": "c",
"v4l2_record.h": "c",
"deviceinfo.h": "c"
"deviceinfo.h": "c",
"mediaproc.h": "c",
"jzsdk_network.h": "c",
"kt_irc.h": "c",
"rtk_mmp_dec.h": "c",
"rtk_mmp_enc.h": "c",
"videostreamtranscode.h": "c",
"time.h": "c",
"cam_framecatch.h": "c",
"audiostreamdeal.h": "c",
"ff_statement.h": "c",
"configparams.h": "c",
"im2d.hpp": "c",
"rk_rga.h": "c",
"rgautils.h": "c",
"im2d_type.h": "c",
"im2d.h": "c"
},
"Codegeex.GenerationPreference": "automatic",
"C_Cpp.dimInactiveRegions": false
... ...
# 编译链的配置
#1、编译链与设备类型的选择
set(DEVICE_NAME JZ_H150S)
set(DEVICE_NAME JZ_U3S)
#上一行为禁止修改行
message("**************************JZSDK构建编译开始***************************\n")
... ...
... ... @@ -7,7 +7,8 @@ set(IFLAY_TTS_MODULE VERSION_SWITCH_OFF)
set(ESPEAK_TTS_MODULE VERSION_SWITCH_OFF)
set(ALSALIB_MODULE VERSION_SWITCH_OFF)
set(OPUS_MODULE VERSION_SWITCH_OFF)
set(LIB_RTK_MMP VERSION_SWITCH_OFF)
set(LIB_RK_MMP VERSION_SWITCH_OFF)
set(LIB_RK_RGB VERSION_SWITCH_OFF)
set(LIB_USB VERSION_SWITCH_OFF)
set(LIB_KT_IRC VERSION_SWITCH_OFF)
... ... @@ -40,6 +41,13 @@ if(${DEVICE_INFO_MODULE} STREQUAL "VERSION_SWITCH_ON")
endif()
#如果要加载设备配置
if(${DEVICE_CONFIG_MODULE} STREQUAL "VERSION_SWITCH_ON")
message("\n设备配置源码加载中")
file(GLOB_RECURSE DEVICE_CONFOG_SRC ${ROOT_DIRS}Module/DeviceConfig/*.c)
list(APPEND ALL_SRC_FILES ${DEVICE_CONFOG_SRC})
endif()
#如果要加载云台模块,需要加载以下附属内容
if(${GIMBAL_MODULE} STREQUAL "VERSION_SWITCH_ON")
message("\n云台模块源码加载中")
... ... @@ -86,8 +94,11 @@ if(${MEDIA_PROC_MODULE} STREQUAL "VERSION_SWITCH_ON")
message("加载红外相机模块")
add_definitions(-DMACRO_IRC_MODULE)
message("加载RTK模块")
set(LIB_RTK_MMP VERSION_SWITCH_ON)
message("加载RK MMP模块")
set(LIB_RK_MMP VERSION_SWITCH_ON)
message("加载RK RGA模块")
set(LIB_RK_RGB VERSION_SWITCH_ON)
message("加载USB模块")
set(LIB_USB VERSION_SWITCH_ON)
... ...
... ... @@ -9,6 +9,9 @@ set(AUDIODEAL_MODULE VERSION_SWITCH_OFF)
# 信息模块
set(DEVICE_INFO_MODULE VERSION_SWITCH_OFF)
# 设备配置模块
set(DEVICE_CONFIG_MODULE VERSION_SWITCH_OFF)
# Gimbal 云台处理模块
set(GIMBAL_MODULE VERSION_SWITCH_OFF)
... ... @@ -57,12 +60,16 @@ set(IMAGEPROCESSING_MODULE VERSION_SWITCH_OFF)
# 添加信息模块
set(DEVICE_INFO_MODULE VERSION_SWITCH_ON)
# 添加设备配置模块
set(DEVICE_CONFIG_MODULE VERSION_SWITCH_ON)
# 添加UI管理模块
set(UI_CONTROL_MODULE VERSION_SWITCH_ON)
# 添加电源管理模块
set(POWER_MANAGER_MODULE VERSION_SWITCH_ON)
message("通用库加载完成")
########################### 独立库加载 ##########################################
... ...
... ... @@ -130,16 +130,28 @@ if(${FFMPEG_MODULE} STREQUAL "VERSION_SWITCH_ON")
endif()
#rtk_mmp库
if(${LIB_RTK_MMP} STREQUAL "VERSION_SWITCH_ON")
message("RTK_MMP库已加载\n")
add_definitions(-DMACRO_RTK_MPP_MODULE) #加载usb模块
#RK_mmp库
if(${LIB_RK_MMP} STREQUAL "VERSION_SWITCH_ON")
message("RK_MMP库已加载\n")
add_definitions(-DMACRO_RK_MPP_MODULE) #加载模块
include_directories(${ROOT_DIRS}/ThirdParty/RTK_mmp/aarch64-none-linux-gnu/include)
include_directories(${ROOT_DIRS}/ThirdParty/RK_mmp/aarch64-none-linux-gnu/include)
target_link_libraries(
${PROJECT_NAME}
${ROOT_DIRS}/ThirdParty/RTK_mmp/aarch64-none-linux-gnu/lib/librockchip_mpp.so.0
${ROOT_DIRS}/ThirdParty/RTK_mmp/aarch64-none-linux-gnu/lib/librockchip_vpu.so.0
${ROOT_DIRS}/ThirdParty/RK_mmp/aarch64-none-linux-gnu/lib/librockchip_mpp.so.0
${ROOT_DIRS}/ThirdParty/RK_mmp/aarch64-none-linux-gnu/lib/librockchip_vpu.so.0
)
endif()
#RK_rgb库
if(${LIB_RK_RGB} STREQUAL "VERSION_SWITCH_ON")
message("RK_RGB库已加载\n")
add_definitions(-DMACRO_RK_RGA_MODULE) #加载模块
include_directories(${ROOT_DIRS}/ThirdParty/RK_rga/aarch64-none-linux-gnu/include)
target_link_libraries(
${PROJECT_NAME}
${ROOT_DIRS}/ThirdParty/RK_rga/aarch64-none-linux-gnu/lib/librga.so
)
endif()
... ...
... ... @@ -32,7 +32,7 @@ typedef enum JZsdk_Widget_Control
JZSDK_WIDGET_SHUTTER_SWITCH = 0x0023, //快门开关
JZSDK_WIDGET_FREEZE_VIDEO = 0x0024, //冻结视频流
JZSDK_WIDGET_PSEUDO_MODE = 0x0025, //色彩输出模式
JZSDK_WIDGET_FOCAL_LENGTH = 0x0026, //焦距
JZSDK_WIDGET_ZOOM_SIZE = 0x0026, //焦距
JZSDK_WIDGET_SPC_RESET = 0x0027, //spc重置
JZSDK_WIDGET_PSEUDO_COLOR = 0x0028, //伪彩颜色
JZSDK_WIDGET_CAMERA_CORRECT_MODE = 0x0029, //相机纠正模式
... ...
... ... @@ -205,6 +205,7 @@ T_JZsdkReturnCode JZsdk_LoggerInit()
char logMessage[256];
snprintf(logMessage,256,"LOG_FILE:%s\n",LogFileName);
fprintf(logFile, "%s", logMessage);
fflush(logFile);
//dfclose(logFile);
... ... @@ -248,7 +249,7 @@ T_JZsdkReturnCode writeToLogFile(const char *data)
}
fprintf(logFile, "%s", data); // 写入新的数据
fflush(logFile);
//fclose(logFile);
// 解锁
... ... @@ -279,6 +280,7 @@ T_JZsdkReturnCode HexToLogFile(const char *data)
}
fprintf(logFile, "%x ", data[0]); // 写入新的数据
fflush(logFile);
//fclose(logFile);
... ...
... ... @@ -48,6 +48,8 @@ extern "C" {
#define JZ_MATH_SWAP(a, b) do { typeof(a) temp = (a); (a) = (b); (b) = temp; } while (0) //交换两个变量的值(这个宏使用了一个临时变量,但它仍然是通过宏来完成的):
#define JZ_ALIGN(x, a) (((x)+(a)-1)&~((a)-1)) //用于将 x 的值对齐到最近的 a 的倍数。这里的对齐是通过加上 a-1 然后使用位运算 &~((a)-1) 来实现的。
#ifdef __cplusplus
}
#endif
... ...
... ... @@ -15,14 +15,9 @@
#define THREAD_POOL_SIZE 5 // 线程池大小
typedef struct {
void (*task_function)(void*); //任务函数指针,用于指定 执行的任务
void* data; // 任务参数 //任务参数的地址 用于输入任务内容
} t_JZsdk_TaskMgmt_TaskInput;
typedef struct {
pthread_t thread; // 线程 //调用的线程
int is_busy; // 标识线程是否忙碌
t_JZsdk_TaskMgmt_TaskInput* task; // 线程执行的任务
t_JZsdk_TaskFuntionInput* task; // 线程执行的任务
pthread_mutex_t lock; // 互斥锁
pthread_cond_t condition; // 条件变量
} t_ThreadPool;
... ... @@ -73,7 +68,7 @@ T_JZsdkReturnCode TaskManagement_SubmitTask(void (*task_function)(void*), void*
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
t_JZsdk_TaskMgmt_TaskInput *task = (t_JZsdk_TaskMgmt_TaskInput*)malloc(sizeof(t_JZsdk_TaskMgmt_TaskInput));
t_JZsdk_TaskFuntionInput *task = (t_JZsdk_TaskFuntionInput*)malloc(sizeof(t_JZsdk_TaskFuntionInput));
if (task == NULL)
{
// 处理内存分配失败的情况
... ... @@ -81,7 +76,7 @@ T_JZsdkReturnCode TaskManagement_SubmitTask(void (*task_function)(void*), void*
}
task->task_function = task_function;
task->data = data;
task->args = data;
//将发送内容放入任务
TaskPool[i].task = task;
... ... @@ -109,7 +104,7 @@ static void* TaskManagement_ThreadFunction(void* arg)
if (pool->task->task_function != NULL)
{
pool->task->task_function(pool->task->data);
pool->task->task_function(pool->task->args);
}
// 执行完成,将状态置为未忙碌
... ...
... ... @@ -23,6 +23,12 @@ extern "C" {
/* Exported types ------------------------------------------------------------*/
//用于传递指针函数的类型
typedef struct t_JZsdk_TaskFuntionInput{
void (*task_function)(void*); //任务函数指针,用于指定 执行的任务
void *args; // 任务参数 //任务参数的地址 用于输入任务内容
} t_JZsdk_TaskFuntionInput;
/* Exported functions --------------------------------------------------------*/
T_JZsdkReturnCode TaskManagement_Init();
T_JZsdkReturnCode TaskManagement_SubmitTask(void (*task_function)(void*), void* data);
... ...
... ... @@ -22,6 +22,7 @@
#include "JZsdk_haldata_deal/JZsdk_data_transmisson.h"
#include "MediaProc/MediaProc.h"
#include "UI_control/WidegMgmt/JZsdk_Widget.h"
#include "DeviceConfig/DeviceConfig.h"
#if APP_VERSION == APP_PSDK
#include "fc_subscription/test_fc_subscription.h"
... ... @@ -347,11 +348,8 @@ T_JZsdkReturnCode Main_APP_Psdk()
else if (DEVICE_VERSION == JZ_C1)
{
//引脚初始化
Ircut_Init();
//视频流模块初始化
MediaProc_Init();
//初始化C1
JZC1_Init();
}
... ...
... ... @@ -7,19 +7,19 @@
#define VERSION_CHOOSE_H
#include "./ConfigParams.h"
//1~10行 除了D可以修改版本选择 禁止动任何东西
#define DEVICE_VERSION JZ_C1
#define DEVICE_VERSION JZ_U3S
//禁止修改行 选择是串口程序 还是 psdk程序
#define APP_VERSION APP_PSDK
//禁止修改行 板子型号
#define PLATFORM_VERSION PLATFORM_H3
#define PLATFORM_VERSION PLATFORM_V3S
//禁止修改行 串口连接程序的软件版本号
#define MAJOR_VERSION 0x01
#define MINOR_VERSION 0x03
#define MODIFY_VERSION 0x09
#define DEBUG_VERSION 0x05
#define DEBUG_VERSION 0x03
//禁止修改行 滤波方式
#define FILTERING_TYPE HIGH_PASS_FILTERING
... ... @@ -48,6 +48,7 @@
//是否开启媒体功能
#ifdef MACRO_MEDIA_PROC_MODULE
#define MEDIA_PROC_CONFIG_STATUS VERSION_SWITCH_ON
#define MEDIA_PROC_CONFIG_STATUS_ON
#else
#define MEDIA_PROC_CONFIG_STATUS VERSION_SWITCH_OFF
#endif
... ... @@ -73,10 +74,13 @@
#endif
//是否加载RTK_MPP模块
#ifdef MACRO_RTK_MPP_MODULE
#define RTK_MPP_STATUS VERSION_SWITCH_ON
#else
#define RTK_MPP_STATUS VERSION_SWITCH_OFF
#ifdef MACRO_RK_MPP_MODULE
#define RTK_MPP_STATUS_ON
#endif
//是否加载RTK_RGA模块
#ifdef MACRO_RK_RGA_MODULE
#define RTK_RGA_STATUS_ON
#endif
//是否加载WIRINGPI模块
... ... @@ -100,6 +104,7 @@
//是否开启媒体管理功能
#define MEDIA_PROC_CONFIG_STATUS VERSION_SWITCH_ON
#define MEDIA_PROC_CONFIG_STATUS_ON
//是否开启红外相机功能
#define IRC_CONFIG_STATUS VERSION_SWITCH_ON
... ... @@ -108,7 +113,10 @@
#define USB_CONFIG_STATUS VERSION_SWITCH_ON
//是否加载RTK_MPP模块
#define RTK_MPP_STATUS VERSION_SWITCH_ON
#define RTK_MPP_STATUS_ON
//是否加载RGA模块
#define RTK_RGA_STATUS_ON
//是否加载WIRINGPI模块
#define WIRINGPI_STATUS_ON
... ...
... ... @@ -401,7 +401,7 @@ static int RecvDeal_RealTimeMP2_Limit_transmission(int Port, char *getbuf, int l
T_JZsdkReturnCode ret;
//提取出数据
int DataLen = ((int)getbuf[3] << 8 ) + (int)getbuf[4] - 2 - 9;
int DataLen = (((int)getbuf[3] << 8 ) + (int)getbuf[4]) - 9 - 2;
if (DataLen > 128)
{
JZSDK_LOG_ERROR("传输得到的mp2实时数据帧长度超出128上限");
... ... @@ -1065,6 +1065,8 @@ static T_JZsdkReturnCode RecvDeal_CheckStatus_AudioDetailMessage(int Port, char
**********/
static int RecvDeal_Amplifier_stop(int Port, char *getbuf)
{
T_JZsdkReturnCode ret;
JZSDK_LOG_INFO("%s,强制关闭功放",RecvDeal_GetPortName(Port));
//获取帧的序列号
... ... @@ -1073,7 +1075,15 @@ static int RecvDeal_Amplifier_stop(int Port, char *getbuf)
int status = JZ_FLAGCODE_OFF;
#if MEGAPHONE_CONFIG_STATUS == VERSION_SWITCH_ON
return Megaphone_Amplifier_param(JZ_FLAGCODE_SET, &status);
//先关闭播放
ret = UIcontrol_StopPlayAudio(NO_SPECIFIED);
if (ret == JZ_ERRORCODE_REALTIMEVOICE_HAS_BEEN_ON) //如果喊话器正处于实时播放,禁止关闭功放操作
{
JZsdk_Uart_SendDeal_Reply_Failure(Port, FrameSequence);
}
Megaphone_Amplifier_param(JZ_FLAGCODE_SET, &status);
#endif
//回复操作成功
... ... @@ -1088,6 +1098,8 @@ static int RecvDeal_Amplifier_stop(int Port, char *getbuf)
**********/
static int RecvDeal_Amplifier_open(int Port, char *getbuf)
{
T_JZsdkReturnCode ret;
JZSDK_LOG_INFO("%s,强制开启功放",RecvDeal_GetPortName(Port));
//获取帧的序列号
... ... @@ -1096,7 +1108,15 @@ static int RecvDeal_Amplifier_open(int Port, char *getbuf)
int status = JZ_FLAGCODE_ON;
#if MEGAPHONE_CONFIG_STATUS == VERSION_SWITCH_ON
return Megaphone_Amplifier_param(JZ_FLAGCODE_SET, &status);
//先关闭播放
ret = UIcontrol_StopPlayAudio(NO_SPECIFIED);
if (ret == JZ_ERRORCODE_REALTIMEVOICE_HAS_BEEN_ON) //如果喊话器正处于实时播放,禁止关闭功放操作
{
JZsdk_Uart_SendDeal_Reply_Failure(Port, FrameSequence);
}
Megaphone_Amplifier_param(JZ_FLAGCODE_SET, &status);
#endif
//回复操作成功
... ... @@ -1123,12 +1143,13 @@ static int RecvDeal_Amplifier_auto(int Port)
**********/
static int RecvDeal_SetVolume(int Port, char *getbuf)
{
JZSDK_LOG_INFO("%s,调节音量",RecvDeal_GetPortName(Port));
//获取帧的序列号
int FrameSequence = JZsdk_Get_FrameSequence(getbuf);
int value = (int)getbuf[9];
JZSDK_LOG_INFO("%s,调节音量:%d",RecvDeal_GetPortName(Port),value);
UIcontrol_SetVolume(Port, value);
//回复操作成功
... ... @@ -3384,9 +3405,9 @@ static int RecvDeal_ObtainGimbalLinkage(int Port, char *getbuf)
*
*
**********/
static int RecvDeal_FrameErrorReply(int Port, char *getbuf)
static int RecvDeal_FrameErrorReply(int Port, char *getbuf, int len)
{
JZSDK_LOG_INFO("%s,帧指令错误:帧错误或者是帧无对应操作",RecvDeal_GetPortName(Port));
JZSDK_LOG_INFO("%s,帧指令错误:帧错误或者是帧无对应操作,长度为:%d",RecvDeal_GetPortName(Port),len);
//无法正常获取帧的序列号
//回复操作失败
... ... @@ -4024,13 +4045,13 @@ int RecvDeal_InstructInput(int Port, int Receive_mode, unsigned char *getbuf, in
//帧错误回复操作失败
case JZ_ERROR_SYSTEM_FRAME_ERROR:
RecvDeal_FrameErrorReply(Port,getbuf);
RecvDeal_FrameErrorReply(Port,getbuf,len);
return JZ_ERROR_SYSTEM_FRAME_ERROR;
break;
default:
//无指令,发送操作失败
RecvDeal_FrameErrorReply(Port,getbuf);
RecvDeal_FrameErrorReply(Port,getbuf,len);
return JZ_ERROR_SYSTEM_FRAME_ERROR;
break;
... ...
... ... @@ -399,15 +399,15 @@ int UartDeal_Recv_interface(int type, int Uart_fd , unsigned char *getbuf, int g
{
if (Uart_fd == Uart_DEV1_fd)
{
JZSDK_LOG_INFO("串口-设备1号,接受到数据+未处理数据的长度len: %d", getbufLen);
//JZSDK_LOG_INFO("串口-设备1号,接受到数据+未处理数据的长度len: %d", getbufLen);
}
else if (Uart_fd == Uart_DEV2_fd)
{
JZSDK_LOG_INFO("串口-设备2号,接受到数据+未处理数据的长度len: %d", getbufLen);
//JZSDK_LOG_INFO("串口-设备2号,接受到数据+未处理数据的长度len: %d", getbufLen);
}
else if (Uart_fd == Uart_4G_fd)
{
JZSDK_LOG_INFO("串口-设备4G,接受到数据+未处理数据的长度len: %d", getbufLen);
//JZSDK_LOG_INFO("串口-设备4G,接受到数据+未处理数据的长度len: %d", getbufLen);
}
else
{
... ... @@ -419,7 +419,7 @@ int UartDeal_Recv_interface(int type, int Uart_fd , unsigned char *getbuf, int g
{
if (Uart_fd == HAL_DATA_TRANSMISSION)
{
JZSDK_LOG_INFO("hal_data,接受到数据+未处理数据的长度len: %d", getbufLen);
//JZSDK_LOG_INFO("hal_data,接受到数据+未处理数据的长度len: %d", getbufLen);
}
else
{
... ... @@ -549,7 +549,7 @@ int UartDeal_Recv_interface(int type, int Uart_fd , unsigned char *getbuf, int g
HaveReadLen = HaveReadLen + FrameLen;
HaveDealLen = HaveReadLen;
JZSDK_LOG_INFO("framelen%d read%d [read]:%x get%d",FrameLen ,HaveReadLen, getbuf[HaveReadLen], getbufLen);
JZSDK_LOG_INFO("fd:%x f_len%d h_r%d [h_r]:%x get%d", Uart_fd,FrameLen ,HaveReadLen, getbuf[HaveReadLen-1], getbufLen);
FrameLen = 0;
FrameFlag = 0;
... ...
... ... @@ -69,6 +69,9 @@ T_JZsdkReturnCode AudioDeal_Init()
AudioDeakInfo_index->FilterInfo = NULL;
FF_Filter_Init(AudioDeakInfo_index, 0x00);
//初始化mp2音频流
File_Stream_deal_Init(AV_CODEC_ID_MP2);
Audiodeal_status = JZ_FLAGCODE_ON;
JZSDK_LOG_INFO("MODULE_AUDIODEL_INIT_COMPLETE");
... ... @@ -261,6 +264,9 @@ T_JZsdkReturnCode AudioDeal_StopDeal()
//清空alsa里的缓冲区
Alsa_DropPcm(AudioDeakInfo_index);
//清空mp3音频流
Stream_Player_Stop(AudioDeakInfo_index);
while (AudioDeakInfo_index->AudioDeal_Alsa_Finish_Flag != JZ_FLAGCODE_OFF)
{
delayMs(1);
... ...
... ... @@ -31,6 +31,8 @@ int PCM_PooL_Interface_PcmData_WithoutReply(struct AudioDealInfo *AD_Info,unsign
T_JZsdkReturnCode AudioFile_Stream_Interface_PcmData(struct AudioDealInfo *AD_Info, AVFrame *frame);
T_JZsdkReturnCode mp3_Stream_Interface_Mp3Data(struct AudioDealInfo *AD_Info, unsigned int in_sampleRate, unsigned char *data, int dataSize);
T_JZsdkReturnCode File_Stream_deal_Init(enum AVCodecID id);
T_JZsdkReturnCode Stream_Player_Stop(struct AudioDealInfo *AD_Info);
#ifdef __cplusplus
}
... ...
... ... @@ -21,52 +21,71 @@ static const AVCodec *codec;
T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame);
int File_Stream_deal_Init(enum AVCodecID id)
T_JZsdkReturnCode File_Stream_deal_Init(enum AVCodecID id)
{
//注册包
pkt = av_packet_alloc();
if(!pkt)
{
JZSDK_LOG_ERROR("av_packet_alloc failed.");
}
//寻找解码器
codec = avcodec_find_decoder(id);
if (!codec) {
JZSDK_LOG_ERROR("Codec not found\n");
}
//获得裸流的解析器
parser = av_parser_init(codec->id);
if (!parser) {
JZSDK_LOG_ERROR("Parser not found\n");
}
//分配解码上下文
cdc_ctx = avcodec_alloc_context3(codec);
if (!cdc_ctx) {
JZSDK_LOG_ERROR("Could not allocate audio codec context\n");
}
/* open it */
//将解码器和解码上下文绑定
if (avcodec_open2(cdc_ctx, codec, NULL) < 0)
{
JZSDK_LOG_ERROR("Could not open codec\n");
}
//如果解码器不存在,初始化解码器
if (!decoded_frame)
{
if (!(decoded_frame = av_frame_alloc()))
{
JZSDK_LOG_ERROR("Could not allocate audio frame\n");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
}
JZSDK_LOG_INFO("file stream init complete");
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//输入mp3的实时数据,以及本次数据的长度
T_JZsdkReturnCode mp3_Stream_Interface_Mp3Data(struct AudioDealInfo *AD_Info, unsigned int in_sampleRate, unsigned char *data, int dataSize)
{
//JZSDK_LOG_DEBUG("mp3 stream输入 %d 字节数据", dataSize);
//重置重采样器
FF_Resample_Reset(AD_Info, in_sampleRate, (AVChannelLayout)AV_CHANNEL_LAYOUT_MONO, AV_SAMPLE_FMT_S16);
//检查滤波器
FF_Filter_Init(AD_Info, 0x01);
int ret = 0;
unsigned char *databufPtr = data;
int databufSize = dataSize;
//将数据输入到
while(dataSize > 0)
while(databufSize > 0)
{
//如果解码器不存在,初始化解码器
if (!decoded_frame)
{
if (!(decoded_frame = av_frame_alloc()))
{
JZSDK_LOG_ERROR("Could not allocate audio frame\n");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
}
//检查参数,并将正确的数据输入到pkt中
//parser 解析器
... ... @@ -74,22 +93,16 @@ T_JZsdkReturnCode mp3_Stream_Interface_Mp3Data(struct AudioDealInfo *AD_Info, un
//pkt输出的数据指针
//data datasize 输入的数据指针
//pts、dts、pos:时间戳和位置信息,一般可以设置为AV_NOPTS_VALUE和0。
int ret = av_parser_parse2(parser, cdc_ctx, &pkt->data, &pkt->size, data, dataSize, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0);
ret = av_parser_parse2(parser, cdc_ctx, &pkt->data, &pkt->size, databufPtr, databufSize, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0);
if (ret < 0) {
printf("Error while parsing\n");
return -1;
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
//重置重采样器
FF_Resample_Reset(AD_Info, in_sampleRate, (AVChannelLayout)AV_CHANNEL_LAYOUT_MONO, AV_SAMPLE_FMT_S16);
//检查滤波器
FF_Filter_Init(AD_Info, 0x01);
//数据指针 往后一个解析长度
//长度指针 减少一个被解析数据的长度
data += ret;
dataSize -= ret;
databufPtr += ret;
databufSize -= ret;
//如果输出有长度 解码输出的数据
if (pkt->size > 0)
... ... @@ -97,6 +110,8 @@ T_JZsdkReturnCode mp3_Stream_Interface_Mp3Data(struct AudioDealInfo *AD_Info, un
Stream_Player_decode(AD_Info, cdc_ctx, pkt, decoded_frame);
}
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
... ... @@ -108,7 +123,13 @@ T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecCon
//发送数据包给解码器解码,已将数据解码为pcm原始数据
ret = avcodec_send_packet(dec_ctx, pkt);
if (ret < 0)
if (ret == AVERROR(EAGAIN))
{
char errbuf[128];
av_strerror(ret, errbuf, sizeof(errbuf));
JZSDK_LOG_ERROR("Error while sending a packet to the decoder %s",errbuf);
}
else if (ret < 0)
{
JZSDK_LOG_ERROR("Error submitting the packet to the decoder, ret=%d\n",ret);
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
... ... @@ -120,7 +141,7 @@ T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecCon
/* read all the output frames (in general there may be any number of them */
//读取输出的帧
while ( (ret >= 0) && (AD_Info->Flag_AudioDataGenerationImplement == JZ_FLAGCODE_ON) )
while ( (ret >= 0) && AD_Info->AudioDeal_ResampleAndFilter_Execute_Flag == JZ_FLAGCODE_ON)
{
//从解码器中读取解码后的音频帧数据
ret = avcodec_receive_frame(dec_ctx, frame);
... ... @@ -134,6 +155,8 @@ T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecCon
JZSDK_LOG_ERROR("Error during decoding\n");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
//printf("输出了:%d的数据\n",frame->nb_samples);
int out_nb_samples = 0;
... ... @@ -149,7 +172,7 @@ T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecCon
//将临时帧 放入 均衡滤波器
FF_Filter_push_frame_to_fliter(AD_Info, temp_frame);
while(AD_Info->Flag_AudioDataGenerationImplement == JZ_FLAGCODE_ON)
while(AD_Info->AudioDeal_ResampleAndFilter_Execute_Flag == JZ_FLAGCODE_ON)
{
//得到滤波器输出的音频帧 eq_frame
int fret = FF_Filter_get_frame_from_filter(AD_Info, eq_frame);
... ... @@ -158,6 +181,8 @@ T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecCon
break;
}
//printf("pcm播放 %d 数据\n",eq_frame->nb_samples);
//播放改滤波后的帧
Pcm_AlsaPlay(AD_Info, (unsigned char*)eq_frame->data[0], eq_frame->nb_samples);
... ... @@ -179,4 +204,14 @@ T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecCon
//释放掉输出的变量
av_frame_unref(temp_frame);
av_frame_unref(eq_frame);
}
T_JZsdkReturnCode Stream_Player_Stop(struct AudioDealInfo *AD_Info)
{
pkt->data = NULL;
pkt->size = 0;
Stream_Player_decode(AD_Info, cdc_ctx, pkt, decoded_frame);
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
\ No newline at end of file
... ...
... ... @@ -296,13 +296,25 @@ T_JZsdkReturnCode FF_Filter_Init(struct AudioDealInfo *AD_Info, int AudioType)
{
T_JZsdkReturnCode ret;
int NewType = FILTER_NORMAL;
int NewType = FILTER_NORMAL_AUDIO;
//通过g_FilterMode 与 音频内容 得出滤波类型
if (g_FilterMode == 0x00) //默认滤波
{
//无须管音频类型,直接同一个默认滤波器
NewType = FILTER_NORMAL;
if (AudioType == 0x00) //文本类型
{
NewType = FILTER_NORMAL_TTS;
}
else if (AudioType == 0x01) //音频类型
{
NewType = FILTER_NORMAL_AUDIO;
}
else
{
JZSDK_LOG_ERROR("错误的音频类型");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
}
else if (g_FilterMode == 0x01) //30M滤波
{
... ... @@ -348,8 +360,12 @@ T_JZsdkReturnCode FF_Filter_Init(struct AudioDealInfo *AD_Info, int AudioType)
//初始化滤波器
switch (NewType)
{
case FILTER_NORMAL:
ret = FF_Filter_ParamInit(AD_Info, FILTER_PARAM_NORMAL);
case FILTER_NORMAL_AUDIO:
ret = FF_Filter_ParamInit(AD_Info, FILTER_PARAM_NORMAL_AUDIO);
break;
case FILTER_NORMAL_TTS:
ret = FF_Filter_ParamInit(AD_Info, FILTER_PARAM_NORMAL_TTS);
break;
case FILTER_NORMAL_M30_TTS:
... ... @@ -361,7 +377,7 @@ T_JZsdkReturnCode FF_Filter_Init(struct AudioDealInfo *AD_Info, int AudioType)
break;
default:
ret = FF_Filter_ParamInit(AD_Info, FILTER_PARAM_NORMAL);
ret = FF_Filter_ParamInit(AD_Info, FILTER_PARAM_NORMAL_AUDIO);
break;
}
... ...
... ... @@ -12,7 +12,7 @@
/* Includes ------------------------------------------------------------------*/
#include "JZsdk_Base/JZsdk_Code/JZsdk_Code.h"
#include "BaseConfig.h"
#ifdef __cplusplus
extern "C" {
... ... @@ -46,7 +46,18 @@ extern "C" {
equalizer=f=8000:t=q:w=2.0:g=0, \
equalizer=f=16000:t=q:w=2.0:g=0"
#define FILTER_FORMAL_3 "equalizer=f=31:t=q:w=2.0:g=-48, \
#define FILTER_FORMAL_3_AUDIO "equalizer=f=31:t=q:w=2.0:g=-48, \
equalizer=f=62:t=q:w=2.0:g=-36, \
equalizer=f=125:t=q:w=2.0:g=-30, \
equalizer=f=250:t=q:w=2.0:g=-26, \
equalizer=f=500:t=q:w=2.0:g=-20, \
equalizer=f=1000:t=q:w=2.0:g=-12, \
equalizer=f=2000:t=q:w=2.0:g=-8, \
equalizer=f=4000:t=q:w=2.0:g=+1, \
equalizer=f=8000:t=q:w=2.0:g=+2, \
equalizer=f=16000:t=q:w=2.0:g=+2"
#define FILTER_FORMAL_3_TTS "equalizer=f=31:t=q:w=2.0:g=-48, \
equalizer=f=62:t=q:w=2.0:g=-36, \
equalizer=f=125:t=q:w=2.0:g=-30, \
equalizer=f=250:t=q:w=2.0:g=-26, \
... ... @@ -83,6 +94,21 @@ extern "C" {
anequalizer=c0 f=3800 w=1200 g=-22 t=0|c1 f=3800 w=1200 g=-22 t=0, \
anequalizer=c0 f=3100 w=600 g=-16 t=0|c1 f=3100 w=600 g=-16 t=0"
#define FILTER_FORMAL_1_M30_2 "equalizer=f=31:t=q:w=2.0:g=-48, \
equalizer=f=62:t=q:w=2.0:g=-36, \
equalizer=f=125:t=q:w=2.0:g=-30, \
equalizer=f=250:t=q:w=2.0:g=-26, \
equalizer=f=500:t=q:w=2.0:g=-20, \
equalizer=f=1000:t=q:w=2.0:g=-12, \
equalizer=f=2000:t=q:w=2.0:g=-8, \
equalizer=f=4000:t=q:w=2.0:g=+1, \
equalizer=f=8000:t=q:w=2.0:g=+2, \
equalizer=f=16000:t=q:w=2.0:g=+2, \
anequalizer=c0 f=4000 w=1400 g=-28 t=0|c1 f=4000 w=1400 g=-28 t=0, \
anequalizer=c0 f=3700 w=1400 g=-24 t=0|c1 f=3700 w=1400 g=-24 t=0"
/*00 35
02 37
05 39
... ... @@ -118,15 +144,31 @@ extern "C" {
typedef enum FilterList{
FILTER_NORMAL = 1,
FILTER_NORMAL_AUDIO = 1,
FILTER_NORMAL_TTS = 2,
FILTER_NORMAL_M30_AUDIO = 5,
FILTER_NORMAL_M30_TTS = 6,
}FilterList;
#define FILTER_PARAM_NORMAL FILTER_FORMAL_3
//普通音频滤波参数
#define FILTER_PARAM_NORMAL_AUDIO FILTER_FORMAL_3_AUDIO
//普通TTS滤波参数
#define FILTER_PARAM_NORMAL_TTS FILTER_FORMAL_3_TTS
#define FILTER_PARAM_M30_TTS FILTER_FORMAL_1_M30_TTS
//h10t的滤波器跟其他的不一样,所以单独定义
#if DEVICE_VERSION == JZ_H10T
#define FILTER_PARAM_M30_AUDIO FILTER_FORMAL_1_M30_2
#else
#define FILTER_PARAM_M30_AUDIO FILTER_FORMAL_1_M30_1
#endif
/* Exported types ------------------------------------------------------------*/
/* Exported functions --------------------------------------------------------*/
... ...
/**
********************************************************************
* @file Kt_Irc.h
* Kt_Irc的头文件
* @file DeviceConfig.h
* DeviceConfig.h的头文件
*
*********************************************************************
*/
/* Define to prevent recursive inclusion 避免重定义 -------------------------------------*/
#ifndef KT_IRC_H
#define KT_IRC_H
#ifndef DEVICE_CONFIG_H
#define DEVICE_CONFIG_H
/* Includes ------------------------------------------------------------------*/
#include "JZsdk_Base/JZsdk_Code/JZsdk_Code.h"
#include "DeviceConfig/JZC1/JZC1.h"
/* Includes ------------------------------------------------------------------*/
#ifdef __cplusplus
extern "C" {
#endif
/* Exported constants --------------------------------------------------------*/
/* 常亮定义*/
/* Exported types ------------------------------------------------------------*/
/* Exported functions --------------------------------------------------------*/
T_JZsdkReturnCode JZsdk_Kt_Irc_Camera_Init();
T_JZsdkReturnCode JZsdk_Kt_Irc_ShutterSwitch(int value);
#ifdef __cplusplus
}
... ...
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <pthread.h>
#include <time.h>
#include <unistd.h>
#include "JZsdkLib.h"
#include "BaseConfig.h"
#include "JZsdk_usb_bulk/JZsdk_usb_bulk.h"
#include "IRCUT/ircut.h"
#ifdef RTK_MPP_STATUS_ON
#include "MediaProc/MultProc/RTK_mmp/RTK_mmp.h"
#include "MediaProc/MultProc/RTK_mmp/Dec/RTK_mmp_dec.h"
#include "MediaProc/MultProc/RTK_mmp/Enc/RTK_mmp_enc.h"
#include "MediaProc/Camera/Camera.h"
#endif
#ifdef RTK_RGA_STATUS_ON
#include "MediaProc/RgaProc/RK_Rga/RK_Rga.h"
//c1的rga结构体
typedef struct C1_RgaInfo
{
//源图像
RK_RgaImage *src_img;
//裁剪图像
RK_RgaImage *corp_img;
//目标图像
RK_RgaImage *dst_img;
//放大倍数
int scale;
}C1_RgaInfo;
static C1_RgaInfo *g_C1_RgaIrcInfo = NULL;
static C1_RgaInfo *g_C1_RgaOptInfo = NULL;
static unsigned char *g_MixedIrc_Buffer = NULL;
static unsigned char *g_MixedOpt_Buffer = NULL;
static unsigned int g_MixedOptBuffer_UseFlag = JZ_FLAGCODE_OFF;
#endif
#ifdef MEDIA_PROC_CONFIG_STATUS_ON
#include "MediaProc/Camera/Cam_FrameCatch/Cam_FrameCatch.h"
#include "MediaProc/MediaParm.h"
#include "MediaProc/VideoMgmt/VideoStreamPush/VideoStream_Push.h"
#include "MediaProc/VideoMgmt/VideoMgmt.h"
#include "MediaProc/IRC_funtion/IRC_Param.h"
#include "MediaProc/IRC_funtion/IRC_funtion.h"
#include "MediaProc/MediaProc_Param.h"
static void *g_usb_index = NULL;
// 定义 昆腾的 帧头长度和帧头内容
#define FRAME_HEADER_SIZE 4
static const unsigned char FRAME_HEADER[FRAME_HEADER_SIZE] = {0xaa, 0xbb, 0xcc, 0xdd};
#define NSEC_PER_SEC 1000000000L //1秒的纳秒数
#define TARGET_FPS 30
static unsigned char FrameBuffer[FIRST_HEIGHT * FIRST_WIDTH *2]; //用于存储帧数据的缓冲区
static unsigned int FrameBufferLen = 0; //用于存储帧数据的长度
static FrameBuffer_UseFlag = JZ_FLAGCODE_OFF;
#ifdef RTK_RGA_STATUS_ON
static T_JZsdkReturnCode JZC1_RgaDeal(C1_RgaInfo *rga_info, int resize, unsigned char *image, unsigned int *imgage_size);
static T_JZsdkReturnCode JZC1_RgaInit(C1_RgaInfo **rgaInfo, int dst_width, int dst_height, int dst_format);
#endif
//数据推送函数
static T_JZsdkReturnCode JZC1_PushFrame(int CameraIndex, unsigned char* data, unsigned int data_len)
{
int currentIndex = VideoMgmt_GetVideoStreamFlowIndexNum(); //获取当前视频流索引
//无视频流
if (currentIndex == 0)
{
//不推送视频
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//红外相机
if (currentIndex == VIDEOMGMT_STREAMING_FLOW_INDEX_FIRST && CameraIndex == VIDEOMGMT_STREAMING_FLOW_INDEX_FIRST)
{
//推送数据到流转模块
VideoMgmt_Single_FrameIn(data, data_len);
}
//光学相机
if(CameraIndex == VIDEOMGMT_STREAMING_FLOW_INDEX_SECOND && currentIndex == VIDEOMGMT_STREAMING_FLOW_INDEX_SECOND)
{
//推送数据到流转模块
VideoMgmt_Single_FrameIn(data, data_len);
}
//组合视频流
if (currentIndex == VIDEOMGMT_STREAMING_FLOW_INDEX_THIRD && CameraIndex == VIDEOMGMT_STREAMING_FLOW_INDEX_THIRD)
{
//推送数据到流转模块
VideoMgmt_Single_FrameIn(data, data_len);
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
static T_JZsdkReturnCode JZC1_OptDeal(unsigned char *data, unsigned int data_len)
{
#ifdef RTK_MPP_STATUS_ON
//JZSDK_LOG_INFO("JZC1_OptDeal");
MppFrame yuv_data = NULL; //用于传递yuv数据的地址
MppPacket Packet = NULL;
//输入数据进入解码器
RTK_mmp_dec_input(JZsdk_RtkMmpGetDecHandleAddr(1), data, data_len, &yuv_data);
// int width = mpp_frame_get_width(yuv_data);
// int height = mpp_frame_get_height(yuv_data);
// int h_stride = mpp_frame_get_hor_stride(yuv_data);
// int v_stride = mpp_frame_get_ver_stride(yuv_data);
// JZSDK_LOG_INFO("w:%d h:%d hor:%d ver:%d",width,height,h_stride,v_stride);
//光学数据缩放
int resize = JZ_FLAGCODE_OFF;
Camera_param(JZ_FLAGCODE_GET, CAMERA_RESIZE, &resize);
//将size转化为倍数
int ZoomRatio = 0;
switch (resize)
{
case 0:
ZoomRatio = JZ_FLAGCODE_OFF;
break;
case 1:
ZoomRatio = 2;
break;
case 2:
ZoomRatio = 4;
break;
case 3:
ZoomRatio = 8;
break;
default:
ZoomRatio = JZ_FLAGCODE_OFF;
break;
}
if (ZoomRatio != JZ_FLAGCODE_OFF)
{
MppBuffer temp = mpp_frame_get_buffer(yuv_data);
RK_U32 h = mpp_frame_get_hor_stride(yuv_data);
RK_U32 w = mpp_frame_get_ver_stride(yuv_data);
int size = (h * w * 1.5);
unsigned char *temp_data = (unsigned char *)malloc(size);
memcpy(temp_data, mpp_buffer_get_ptr(temp), size);
JZC1_RgaDeal(g_C1_RgaOptInfo, ZoomRatio, temp_data, &size);
//重新将数据放回
memcpy(mpp_buffer_get_ptr(temp), temp_data, size);
free(temp_data);
}
//将数据放入混合缓冲区
MppBuffer temp = mpp_frame_get_buffer(yuv_data);
if (g_MixedOpt_Buffer != NULL && g_MixedOptBuffer_UseFlag == JZ_FLAGCODE_OFF)
{
g_MixedOptBuffer_UseFlag = JZ_FLAGCODE_ON;
memcpy(g_MixedOpt_Buffer, mpp_buffer_get_ptr(temp), (mpp_frame_get_hor_stride(yuv_data) * mpp_frame_get_ver_stride(yuv_data) * 1.5));
g_MixedOptBuffer_UseFlag = JZ_FLAGCODE_OFF;
}
//将返回的数据输入进编码器
RTK_mmp_enc_yuv_to_h264_byFrame(JZsdk_RtkMmpGetEncHandleAddr(1), yuv_data, &Packet);
//获取数据指针与长度
int packet_len = mpp_packet_get_length(Packet);
void *ptr = mpp_packet_get_pos(Packet);
//推送视频流
JZC1_PushFrame(2, (unsigned char *)ptr, packet_len);
//释放掉编码图像
mpp_packet_deinit(&Packet);
#endif
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//红外数据纠正函数, 暂定全部替换
static T_JZsdkReturnCode JZC1_Irc_DataCorrect(unsigned char *data)
{
//像素修正
data[0] = data[5];
data[1] = data[6];
data[2] = data[5];
data[3] = data[6];
}
//在这里将灰度图数据转换成目标数据
static T_JZsdkReturnCode JZC1_IrcDeal(unsigned char *data, unsigned int data_len)
{
//JZSDK_LOG_DEBUG("irc数据处理");
if (data_len == 0)
{
JZSDK_LOG_ERROR("无数据错误");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
//红外数据纠正处理
JZC1_Irc_DataCorrect(data);
//将 8位的raw数据 合并为16位数据
U16_t * u16Data = (U16_t *)malloc(sizeof(U16_t) * (data_len / 2));
int u16DataSize = data_len / 2;
//JZSDK_LOG_DEBUG("data_len:%d u16DataSize:%d", data_len / 2, FIRST_HEIGHT * FIRST_WIDTH);
//合成像素,u8转换合并成u16
JZsdk_Merge_U8_to_U16_byReverse(data, data_len, u16Data, &u16DataSize);
//将灰度图数据转换为原始码流数据
unsigned char *raw_data = NULL;
int raw_data_len = 0;
//将原始码流数据转换为rgb数据
IRC_FrameDeal(u16Data, u16DataSize, &raw_data, &raw_data_len);
//获取放大的倍数
//光学数据缩放
int resize = JZ_FLAGCODE_OFF;
Camera_param(JZ_FLAGCODE_GET, CAMERA_RESIZE, &resize);
//将size转化为倍数
int ZoomRatio = 0;
switch (resize)
{
case 0:
ZoomRatio = JZ_FLAGCODE_OFF;
break;
case 1:
ZoomRatio = 2;
break;
case 2:
ZoomRatio = 4;
break;
case 3:
//红外做不了8倍放大咧
ZoomRatio = 4;
//ZoomRatio = 8;
break;
default:
ZoomRatio = 0;
break;
}
if (ZoomRatio != JZ_FLAGCODE_OFF)
{
JZC1_RgaDeal(g_C1_RgaIrcInfo, ZoomRatio, raw_data, &raw_data_len);
}
//将数据放入混合缓冲区
if (g_MixedIrc_Buffer != NULL)
{
memcpy(g_MixedIrc_Buffer, raw_data, raw_data_len);
}
//将原始码流数据写入到编码器 并转换为h264
unsigned char *h264Data = NULL;
unsigned int h264DataLen = 0;
#ifdef RTK_MPP_STATUS_ON
MppPacket Packet = NULL;
RTK_mmp_enc_data_to_h264(JZsdk_RtkMmpGetEncHandleAddr(0), raw_data, raw_data_len, &Packet);
h264DataLen = mpp_packet_get_length(Packet);
h264Data = (unsigned char *)mpp_packet_get_pos(Packet);
//EncCfg->Packet_eos = mpp_packet_get_eos(packet);
// printf("获取到编码内容 len:%d\n",packet_len);
//释放掉packet
mpp_packet_deinit(&Packet);
#endif
//将h264数据推送
JZC1_PushFrame(1, h264Data, h264DataLen);
//释放内存
if (raw_data != NULL)
{
free(raw_data);
raw_data = NULL;
}
if (u16Data != NULL)
{
free(u16Data);
u16Data = NULL;
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
static void *JZC1_IrcDataBuffer_Thread(void *args)
{
struct timespec now;
//获取起始时间
struct timespec start_time;
clock_gettime(CLOCK_MONOTONIC, &start_time);
long long prev_time = start_time.tv_sec * NSEC_PER_SEC + start_time.tv_nsec;
//设置间隔时间
long long period = NSEC_PER_SEC / TARGET_FPS;
unsigned char *TempBuffer = (unsigned char *)malloc(163840);
unsigned int TempBufferLen = 0;
while (1)
{
//获取当前时间
clock_gettime(CLOCK_MONOTONIC, &now);
long long current_time = now.tv_sec * NSEC_PER_SEC + now.tv_nsec;
//计算时间差
long long elapsed_time = current_time - prev_time;
//超过33ms
if (elapsed_time >= period)
{
while (FrameBuffer_UseFlag == JZ_FLAGCODE_ON)
{
delayUs(100);
}
FrameBuffer_UseFlag = JZ_FLAGCODE_ON;
memset(TempBuffer, 0, sizeof(TempBuffer));
memcpy(TempBuffer, FrameBuffer, FrameBufferLen);
TempBufferLen = FrameBufferLen;
FrameBuffer_UseFlag = JZ_FLAGCODE_OFF;
//红外数据缓冲线程
JZC1_IrcDeal(TempBuffer, TempBufferLen);
prev_time = current_time;
}
// 为了防止过于频繁地调用 clock_gettime,可以添加一个小的睡眠时间
// 例如,休眠1毫秒(100000000纳秒),以减少CPU占用
struct timespec req = { .tv_sec = 0, .tv_nsec = 1000000 };
nanosleep(&req, NULL);
}
}
static T_JZsdkReturnCode JZC1_IrcDataSave(unsigned char *data, unsigned int data_len)
{
//避免缓冲区被同时操作
while (FrameBuffer_UseFlag == JZ_FLAGCODE_ON)
{
delayUs(100);
}
FrameBuffer_UseFlag = JZ_FLAGCODE_ON;
memset(FrameBuffer, 0, sizeof(FrameBuffer));
memcpy(FrameBuffer, data, data_len);
FrameBufferLen = data_len;
FrameBuffer_UseFlag = JZ_FLAGCODE_OFF;
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//红外数据接收线程
static void *JZC1_IrcDataRecv_Thread(void *args)
{
int frame_len = FIRST_WIDTH *2* FIRST_HEIGHT; //163840
unsigned char buf[frame_len]; //usb数据缓冲区需要为512的倍数
unsigned char frameData[frame_len]; // 存储整帧数据的画面缓冲区
unsigned int lineNum = 0;
int frame = 0;
int frameDataLen = 0;//缓冲区的数据长度
/*****
*
* 数据格式说明
*
* 帧头 0xaa 0xbb 0x02 0x80 0x01 0x00 + 行数 如第一行0x00 0x01
* 数据包长度为 0x0280
* 数据包 一包的行数为256 0x0100
* 当前数据为第x行 x= 0x0001
* 两位数据为一个点
* 接着把前4个点的数据 用第五个点替换掉
* *****/
while (1)
{
int realLen;
memset(buf,0,sizeof(buf));
T_JZsdkReturnCode ret = JZsdk_HalUsbBulk_ReadData(&g_usb_index, buf, sizeof(buf), &realLen);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
// 处理读取错误
JZSDK_LOG_ERROR("读取错误");
continue;
//return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
if (realLen != 0)
{
//JZSDK_LOG_INFO("读取到%d 字节",realLen);
}
//寻找数据是否存在帧头
for (int i = 0; i < realLen; i++)
{
// 验证帧头
if (memcmp(buf+i, FRAME_HEADER, FRAME_HEADER_SIZE) != 0)
{
// 帧头不匹配,可能是噪声或错误的数据包
continue;
}
/**********************
*
* 方法一,将usb缓冲区调整大小 到超过640*256, 然后直接输出整段画面,避免重复复制,节省处理时间
*
* ******************************/
//如果查找到帧头
//查看是否是第0帧
if (frame == 0)
{
//重置掉画面缓冲区
memset(frameData,0,sizeof(frameData));
//将数据置于缓冲区
frameDataLen = (realLen-i);
memcpy( &frameData[0], buf + i, frameDataLen);
//让画面帧强跳到第一帧
frame = 1;
continue;
}
//如果是第一帧
if (frame == 1)
{
memcpy( &frameData[frameDataLen], buf, frame_len-frameDataLen );
JZC1_IrcDataSave(frameData, frame_len);
frame = 2;
frameDataLen = 0;
//memset(frameData,0,sizeof(frameData));
}
//如果不是第1帧,且上段数据小于一画面段,说明为数据被切割
if ( i<frame_len)
{
//则于前端残余数据拼接,并输出
if (frame%2==0 && (frame != 1) )
{
memcpy( &frameData[frame_len-i], buf, i);
//将未处理raw数据放入缓冲区
//JZSDK_LOG_INFO("写入1 %d %x", i, frameData[20]);
JZC1_IrcDataSave(frameData, frame_len);
//JZSDK_LOG_INFO("放入数据到缓冲区");
//memset(frameData,0,sizeof(frameData));
}
frame++;
}
//如果剩余长度超出一画数据,将画面数据整段输出
if ( (i + frame_len) < realLen)
{
if (frame%2==0)
{
//JZSDK_LOG_INFO("写入2");
memcpy( &frameData[0], buf, frame_len);
//将未处理raw数据放入缓冲区
JZC1_IrcDataSave(frameData, frame_len);
//JZSDK_LOG_INFO("放入数据到缓冲区");
}
frame++;
continue;
}
//JZSDK_LOG_INFO("i:%d, frame_len:%d realLen:%d frame:%d",i,frame_len,realLen,frame);
//如果剩余数据小于一画,存进画面缓冲区
//memset(frameData,0,sizeof(frameData));
memcpy(frameData, buf+i, (realLen-i));
break;
}
}
}
//C1 红外相机数据的初始化
static T_JZsdkReturnCode JZsdk_JZC1_Irc_Data_Init()
{
T_JZsdkReturnCode ret;
//初始化接收的usb口
ret = JZsdk_HalUsbBulk_Init(&g_usb_index, 0, 0, LINUX_USB_PID, LINUX_USB_VID, USB_IN_POINT, USB_OUT_POINT);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
return ret;
}
//初始化usb接收线程
pthread_t ReadDataTask;
pthread_attr_t task_attribute; //线程属性
pthread_attr_init(&task_attribute); //初始化线程属性
pthread_attr_setdetachstate(&task_attribute, PTHREAD_CREATE_DETACHED); //设置线程分离属性
int Urcdata_Protection = pthread_create(&ReadDataTask,&task_attribute,JZC1_IrcDataRecv_Thread,NULL); //线程
if(Urcdata_Protection != 0)
{
JZSDK_LOG_ERROR("创建视频usb线程失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
//初始化送usb数据去处理的线程
pthread_t BufferDataTask;
pthread_attr_t BufferDataTask_attribute; //线程属性
pthread_attr_init(&BufferDataTask_attribute); //初始化线程属性
pthread_attr_setdetachstate(&BufferDataTask_attribute, PTHREAD_CREATE_DETACHED); //设置线程分离属性
int bufferdata_Protection = pthread_create(&BufferDataTask,&BufferDataTask_attribute,JZC1_IrcDataBuffer_Thread,NULL); //线程
if(bufferdata_Protection != 0)
{
JZSDK_LOG_ERROR("创建usb缓冲失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/********************
*
* 混合视频说明
*
* 1920*1088 做处理 并输出为 1920*1080
* __________________________________________________________________
* |_________________________________ |
* | |________________________________|
* | | |
* | | 1920*1088 /2 960* 544 |
* | | |
* | 320*256 拉伸4到 1280*1024 | |
* | 裁剪出 3/4 到 960 * 768 | |
* | | |
* | |________________________________|
* |_________________________________| |
* |__________________________________________________________________|
*
* ******************/
/******************
*
* 混合视频处理
*
*
* ********************/
static void JZC1_MixedVideo_Deal()
{
}
static void *MixedVideo_Thread(void *args)
{
#ifdef RTK_RGA_STATUS_ON
struct timespec now;
int d_ret = 0;
//获取起始时间
struct timespec start_time;
clock_gettime(CLOCK_MONOTONIC, &start_time);
long long prev_time = start_time.tv_sec * NSEC_PER_SEC + start_time.tv_nsec;
//设置间隔时间
long long period = NSEC_PER_SEC / TARGET_FPS;
/***** 红外图像参数 **********************************************************************************************************/
//红外源图像
im_rect Irc_Rect;
Irc_Rect.x = 0;
Irc_Rect.y = 0;
Irc_Rect.width = JZ_ALIGN(FIRST_WIDTH, 16);
Irc_Rect.height = JZ_ALIGN(FIRST_HEIGHT, 16);
int IRc_Format = RK_FORMAT_YCbCr_420_SP;
//红外放大图像
im_rect Irc_ResizeRect;
Irc_ResizeRect.x = 0;
Irc_ResizeRect.y = 0;
Irc_ResizeRect.width = JZ_ALIGN(FIRST_WIDTH, 16) * 4;
Irc_ResizeRect.height = JZ_ALIGN(FIRST_HEIGHT, 16) * 4;
//红外裁剪区域
im_rect Irc_CropRect;
Irc_CropRect.width = Irc_ResizeRect.width / 4 * 3;
Irc_CropRect.height = Irc_ResizeRect.height / 4 * 3;
Irc_CropRect.x = (Irc_ResizeRect.width - Irc_CropRect.width)/2;
Irc_CropRect.y = (Irc_ResizeRect.height - Irc_CropRect.height)/2;
//红外源图像
RK_RgaImage *Irc_SrcImg = NULL;
RK_Rga_ImageInit(&Irc_SrcImg, Irc_Rect.width, Irc_Rect.height, IRc_Format, Irc_Rect.x, Irc_Rect.y, Irc_Rect.width, Irc_Rect.height);
//红外放大图像
RK_RgaImage *Irc_ResizeImg = NULL;
RK_Rga_ImageInit(&Irc_ResizeImg, Irc_ResizeRect.width, Irc_ResizeRect.height, IRc_Format, Irc_ResizeRect.x, Irc_ResizeRect.y, Irc_ResizeRect.width, Irc_ResizeRect.height);
//红外裁剪图像
RK_RgaImage *Irc_CropImg = NULL;
RK_Rga_ImageInit(&Irc_CropImg, Irc_CropRect.width, Irc_CropRect.height, IRc_Format, 0, 0, Irc_CropRect.width, Irc_CropRect.height);
/***** 光学图像参数 **********************************************************************************************************/
//光学源图像
im_rect Opt_Rect;
Opt_Rect.x = 0;
Opt_Rect.y = 0;
Opt_Rect.width = JZ_ALIGN(SECOND_WIDTH, 16);
Opt_Rect.height = JZ_ALIGN(SECOND_HEIGHT, 16);
int Opt_Format = RK_FORMAT_YCbCr_420_SP;
//光学源缩小图像
im_rect Opt_ResizeRect;
Opt_ResizeRect.x = 0;
Opt_ResizeRect.y = 0;
Opt_ResizeRect.width = JZ_ALIGN(SECOND_WIDTH, 16) / 2;
Opt_ResizeRect.height = JZ_ALIGN(SECOND_HEIGHT, 16) / 2;
//光学源图像
RK_RgaImage *Opt_SrcImg = NULL;
RK_Rga_ImageInit(&Opt_SrcImg, Opt_Rect.width, Opt_Rect.height, Opt_Format, Opt_Rect.x, Opt_Rect.y, Opt_Rect.width, Opt_Rect.height);
//光学的缩小图像
RK_RgaImage *Opt_ResizeImg = NULL;
RK_Rga_ImageInit(&Opt_ResizeImg, Opt_ResizeRect.width, Opt_ResizeRect.height, Opt_Format, Opt_ResizeRect.x, Opt_ResizeRect.y, Opt_ResizeRect.width, Opt_ResizeRect.height);
/***** 目标图像参数 ***************************************************************************************************/
//目标图像的矩形
im_rect Dst_Rect;
Dst_Rect.x = 0;
Dst_Rect.y = 0;
Dst_Rect.width = JZ_ALIGN(SECOND_WIDTH, 16);
Dst_Rect.height = JZ_ALIGN(SECOND_HEIGHT, 16);
int Dst_Format = RK_FORMAT_YCbCr_420_SP;
//目标图像中光学图像的矩形
im_rect Dst_OptRect;
Dst_OptRect.x = Dst_Rect.width / 2;
Dst_OptRect.y = Dst_Rect.height / 4;
Dst_OptRect.width = Opt_ResizeRect.width;
Dst_OptRect.height = Opt_ResizeRect.height;
//目标图像中红外图像的矩形
im_rect Dst_IrcRect;
Dst_IrcRect.x = 0;
Dst_IrcRect.y = (Dst_Rect.height - Irc_CropRect.height) / 2;
Dst_IrcRect.width = Irc_CropRect.width;
Dst_IrcRect.height = Irc_CropRect.height;
//目标图像
RK_RgaImage *DstImg = NULL;
RK_Rga_ImageInit(&DstImg, Dst_Rect.width, Dst_Rect.height, Dst_Format, Dst_Rect.x, Dst_Rect.y, Dst_Rect.width, Dst_Rect.height);
JZSDK_LOG_DEBUG("Dstimg witdh :%d height:%d DstImg->buf_size:%d", DstImg->width, DstImg->height, DstImg->buf_size);
//空缓冲区
rga_buffer_t EmptyImg = {0};
im_rect EmptyRect = {0};
//开始绘制画面,待优化,如多步骤合成为一个步骤
while (1)
{
//获取当前时间
clock_gettime(CLOCK_MONOTONIC, &now);
long long current_time = now.tv_sec * NSEC_PER_SEC + now.tv_nsec;
//计算时间差
long long elapsed_time = current_time - prev_time;
//超过33ms
if (elapsed_time >= period)
{
if (g_MixedIrc_Buffer == NULL || g_MixedOpt_Buffer == NULL)
{
delayMs(100);
continue;
}
while (g_MixedOptBuffer_UseFlag == JZ_FLAGCODE_ON)
{
delayUs(100);
}
g_MixedOptBuffer_UseFlag = JZ_FLAGCODE_ON;
//填充输出图像
memset(DstImg->buf, 0x80, DstImg->buf_size);
//混合视频处理
memset(Irc_SrcImg->buf, 0, Irc_SrcImg->buf_size);
memset(Opt_SrcImg->buf, 0, Opt_SrcImg->buf_size);
//将数据放入缓冲区
memcpy(Irc_SrcImg->buf, g_MixedIrc_Buffer, (JZ_ALIGN(FIRST_WIDTH, 16)) * (JZ_ALIGN(FIRST_HEIGHT, 16)) * 3 / 2);
memcpy(Opt_SrcImg->buf, g_MixedOpt_Buffer, JZ_ALIGN(SECOND_WIDTH, 16) * JZ_ALIGN(SECOND_HEIGHT, 16) * 3 / 2);
g_MixedOptBuffer_UseFlag = JZ_FLAGCODE_OFF;
//光学数据处理
//缩小图像到1/2
d_ret = imresize(Opt_SrcImg->img, Opt_ResizeImg->img);
if (d_ret != IM_STATUS_SUCCESS)
{
printf("irc resize failed\n");
continue;
}
//红外数据处理
//放大图像到4倍
d_ret = imresize(Irc_SrcImg->img, Irc_ResizeImg->img);
if (d_ret != IM_STATUS_SUCCESS)
{
printf("opt resize failed\n");
continue;
}
//裁切红外图像
d_ret = imcrop(Irc_ResizeImg->img, Irc_CropImg->img, Irc_CropRect);
if (d_ret != IM_STATUS_SUCCESS)
{
printf("opt crop failed\n");
continue;
}
//将缩放好的光学画面放入目标画面
d_ret = improcess(Opt_ResizeImg->img, DstImg->img, EmptyImg, Opt_ResizeRect, Dst_OptRect, EmptyRect, IM_SYNC);
if (d_ret != IM_STATUS_SUCCESS)
{
printf("opt improcess failed\n");
continue;
}
//将裁切好的红外画面放入目标画面
d_ret = improcess(Irc_CropImg->img, DstImg->img, EmptyImg, Irc_CropImg->rect, Dst_IrcRect, EmptyRect, IM_SYNC);
if (d_ret != IM_STATUS_SUCCESS)
{
printf("irc improcess failed\n");
}
//将原始码流数据写入到编码器 并转换为h264
unsigned char *h264Data = NULL;
unsigned int h264DataLen = 0;
//JZSDK_LOG_DEBUG("DstImg->buf_size:%d", DstImg->buf_size);
#ifdef RTK_MPP_STATUS_ON
MppPacket Packet = NULL;
RTK_mmp_enc_data_to_h264(JZsdk_RtkMmpGetEncHandleAddr(2), DstImg->buf, DstImg->buf_size, &Packet);
h264DataLen = mpp_packet_get_length(Packet);
h264Data = (unsigned char *)mpp_packet_get_pos(Packet);
//EncCfg->Packet_eos = mpp_packet_get_eos(packet);
// printf("获取到编码内容 len:%d\n",packet_len);
//释放掉packet
mpp_packet_deinit(&Packet);
#endif
//推送视频流
JZC1_PushFrame(VIDEOMGMT_STREAMING_FLOW_INDEX_THIRD, h264Data, h264DataLen);
//JZSDK_LOG_DEBUG("混合一帧 :%d", h264DataLen);
//更新时间
prev_time = current_time;
}
// 为了防止过于频繁地调用 clock_gettime,可以添加一个小的睡眠时间
// 例如,休眠1毫秒(100000000纳秒),以减少CPU占用
struct timespec req = { .tv_sec = 0, .tv_nsec = 1000000 };
nanosleep(&req, NULL);
}
#endif
}
/******************
*
* 混合视频初始化
*
*
* ********************/
static JZC1_MixedVideo_Init()
{
g_MixedIrc_Buffer = (unsigned char *)malloc(JZ_ALIGN(FIRST_WIDTH, 16)*JZ_ALIGN(FIRST_HEIGHT, 16)*3/2);
g_MixedOpt_Buffer = (unsigned char *)malloc(JZ_ALIGN(SECOND_WIDTH, 16)*JZ_ALIGN(SECOND_HEIGHT, 16)*3/2);
//混合视频初始化
pthread_t Task;
pthread_attr_t attribute; //线程属性
pthread_attr_init(&attribute); //初始化线程属性
pthread_attr_setdetachstate(&attribute, PTHREAD_CREATE_DETACHED); //设置线程分离属性
int bufferdata_Protection = pthread_create(&Task,&attribute,MixedVideo_Thread,NULL); //线程
if(bufferdata_Protection != 0)
{
JZSDK_LOG_ERROR("创建混合视频初始化失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
JZSDK_LOG_INFO("MixedVidoe_Init Success");
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
#endif
//JZ_C1 的媒体初始化
static T_JZsdkReturnCode JZC1_MediaInit()
{
T_JZsdkReturnCode ret = JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
//初始化媒体模块
#ifdef MEDIA_PROC_CONFIG_STATUS_ON
//初始化videoMgmt模块
VideoMgmt_Init();
//初始化Mulit模块
#ifdef RTK_MPP_STATUS_ON
//初始化红外的编解码器
RTK_mmp_enc_Init(JZsdk_RtkMmpGetEncHandleAddr(0), MPP_VIDEO_CodingAVC, MPP_FMT_YUV420SP, FIRST_WIDTH, FIRST_HEIGHT, 30, 5);
//初始化光学的编解码器
RTK_mmp_dec_Init(JZsdk_RtkMmpGetDecHandleAddr(1), MPP_VIDEO_CodingMJPEG, MPP_FMT_YUV420SP, SECOND_WIDTH, SECOND_HEIGHT);
RTK_mmp_enc_Init(JZsdk_RtkMmpGetEncHandleAddr(1), MPP_VIDEO_CodingAVC, MPP_FMT_YUV420SP, SECOND_WIDTH, SECOND_HEIGHT, 30, 15);
// RTK_mmp_dec_Init(JZsdk_RtkMmpGetDecHandleAddr(1), MPP_VIDEO_CodingMJPEG, MPP_FMT_YUV420SP, SECOND_WIDTH, SECOND_HEIGHT);
// RTK_mmp_enc_Init(JZsdk_RtkMmpGetEncHandleAddr(1), MPP_VIDEO_CodingAVC, MPP_FMT_YUV420SP, SECOND_WIDTH, SECOND_HEIGHT, 30, 15);
//初始化混合视频流的编码器
RTK_mmp_enc_Init(JZsdk_RtkMmpGetEncHandleAddr(2), MPP_VIDEO_CodingAVC, MPP_FMT_YUV420SP, SECOND_WIDTH, SECOND_HEIGHT, 30, 15);
#endif
//初始化Camera模块
int CameraFd = -1;
ret = V4l2_Camarainit2(&CameraFd, SECOND_WIDTH, SECOND_HEIGHT, 30);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_ERROR("光学相机启动失败");
}
else
{
//光学相机的抓取
ret = JZsdk_FrameCatch_Single(JZC1_OptDeal);
}
//初始化IRC_funtion模块
ret = IRC_ParamInit(FIRST_HEIGHT, FIRST_WIDTH, 25);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_ERROR("初始化红外的数据处理失败");
}
else
{
//初始化红外的数据输入
ret = JZsdk_JZC1_Irc_Data_Init();
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_ERROR("红外相机初始化失败");
}
}
//设置默认参数
//默认推送红外摄像头 后续改成 红外+光学 的组合画面
VideoMgmt_VideoStreamFlowIndex(VIDEOMGMT_STREAMING_FLOW_INDEX_THIRD);
//设置快门为开
JZsdk_Camera_ShutterSwitch(JZ_FLAGCODE_ON);
//设置伪彩颜色为hot
int value = 8;
Camera_param(JZ_FLAGCODE_SET, CAMERA_PSEUDO_COLOR, &value);
//设置为默认输出模式
value = 0;
Camera_param(JZ_FLAGCODE_SET, CAMERA_PIXEL_PSEUDO_COLOR_MODE, &value);
//设置默认打开自动校正
value = JZ_FLAGCODE_ON;
Proc_IRC_param(JZ_FLAGCODE_SET, JZSDK_WIDGET_BAD_PIXEL_CORRECT_MODE, &value);
//初始化变焦模块
Cam_Zoom_Init();
//初始化RGA模块
#ifdef RTK_RGA_STATUS_ON
ret = JZC1_RgaInit(&g_C1_RgaIrcInfo, JZ_ALIGN(FIRST_WIDTH, 16), JZ_ALIGN(FIRST_HEIGHT, 16), RK_FORMAT_YCbCr_420_SP);
ret = JZC1_RgaInit(&g_C1_RgaOptInfo, JZ_ALIGN(SECOND_WIDTH, 16), JZ_ALIGN(SECOND_HEIGHT, 16), RK_FORMAT_YCbCr_420_SP);
//混合视频初始化
JZC1_MixedVideo_Init();
#endif
#endif
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
#ifdef RTK_RGA_STATUS_ON
static T_JZsdkReturnCode JZC1_RgaInit(C1_RgaInfo **rgaInfo, int dst_width, int dst_height, int dst_format)
{
//初始化rga结构体
(*rgaInfo) = (C1_RgaInfo *)malloc(sizeof(C1_RgaInfo));
if ((*rgaInfo) == NULL)
{
JZSDK_LOG_ERROR("RGA初始化失败");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
int width = dst_width;
int height = dst_height;
int rect_x = 0;
int rect_y = 0;
int rect_w = dst_width;
int rect_h = dst_height;
(*rgaInfo)->src_img = NULL;
RK_Rga_ImageInit(&(*rgaInfo)->src_img, width, height, dst_format, rect_x, rect_y, rect_w, rect_h); //初始化输入模块
(*rgaInfo)->corp_img = NULL;
RK_Rga_ImageInit(&(*rgaInfo)->corp_img, width, height, dst_format, rect_x, rect_y, rect_w, rect_h); //初始化裁剪模块
(*rgaInfo)->dst_img = NULL;
RK_Rga_ImageInit(&(*rgaInfo)->dst_img, width, height, dst_format, rect_x, rect_y, rect_w, rect_h); //初始化输出模块
}
#endif
//rga处理
#ifdef RTK_RGA_STATUS_ON
static T_JZsdkReturnCode JZC1_RgaDeal(C1_RgaInfo *rga_info, int resize, unsigned char *image, unsigned int *imgage_size)
{
int d_ret;
if (g_C1_RgaIrcInfo == NULL || *imgage_size != rga_info->src_img->buf_size)
{
printf("C1_Rga_Deal failed imagesize:%d bufsize:%d\n", *imgage_size, rga_info->src_img->buf_size);
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
//检查裁剪倍数是否改变
if (resize != rga_info->scale)
{
rga_info->scale = resize;
//释放裁剪区域, 并重新注册
if (rga_info->corp_img != NULL)
{
RK_Rga_ImageDeInit(&(rga_info->corp_img));
int width = rga_info->src_img->width / resize;
int height = rga_info->src_img->height / resize;
int rect_x = (rga_info->src_img->width) / 2 - width / 2;
int rect_y = (rga_info->src_img->height) / 2 - height / 2;
int rect_w = width;
int rect_h = height;
int dst_format = rga_info->src_img->format;
RK_Rga_ImageInit(&(rga_info->corp_img), width, height, dst_format, rect_x, rect_y, rect_w, rect_h); //初始化裁剪模块
}
}
//将图像放入处理器
memcpy(rga_info->src_img->buf, image, *imgage_size);
//JZSDK_LOG_DEBUG("裁剪倍率%d",resize);
if (resize == 0)
{
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//裁剪图像
d_ret = imcrop(rga_info->src_img->img, rga_info->corp_img->img, rga_info->corp_img->rect);
if (d_ret != IM_STATUS_SUCCESS)
{
printf("crop failed resize:%d\n",resize);
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
//缩放图像
d_ret = imresize(rga_info->corp_img->img, rga_info->dst_img->img);
if (d_ret != IM_STATUS_SUCCESS)
{
printf("resize failed\n");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
//返回图像
memcpy(image, rga_info->dst_img->buf, rga_info->dst_img->buf_size);
//printf("image[0]:%d image[1]:%d\n", image[0], image[1]);
*imgage_size = rga_info->dst_img->buf_size;
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
#endif
T_JZsdkReturnCode JZC1_Init()
{
T_JZsdkReturnCode ret;
//初始化引脚
Ircut_Init();
//初始化媒体模块
ret = JZC1_MediaInit();
JZSDK_LOG_INFO("JZ_C1 INIT COMPLETED\n");
return ret;
}
... ...
/**
********************************************************************
* @file JZC1.h
* JZC1.h的头文件
*
*********************************************************************
*/
/* Define to prevent recursive inclusion 避免重定义 -------------------------------------*/
#ifndef JZC1_H
#define JZC1_H
#include "JZsdk_Base/JZsdk_Code/JZsdk_Code.h"
/* Includes ------------------------------------------------------------------*/
#ifdef __cplusplus
extern "C" {
#endif
/* Exported constants --------------------------------------------------------*/
/* 常亮定义*/
/* Exported types ------------------------------------------------------------*/
T_JZsdkReturnCode JZC1_Init();
#ifdef __cplusplus
}
#endif
#endif
... ...
... ... @@ -14,7 +14,6 @@ sysfs接口与GPIO(通用输入/输出)模块
**/
/*******************
*
* sysfs的gpio引脚注册
* 引脚号计算公式为 pin = bank * 32 + number, number = group*8 + X
* 格式为 GPIO + bank + '_' + group + X
... ...
... ... @@ -17,6 +17,14 @@
static int CameraFd = 0;
//临时方案 后续会改写法合并到t_JZsdk_TaskFuntionInput(搞起来有点麻烦)
typedef struct t_FrameCatch_TaskFuntionInput
{
void (*task_function)(unsigned char*, unsigned int); //任务函数指针,用于指定 执行的任务
unsigned* data; //数据指针
unsigned int data_size; //数据大小
} t_FrameCatch_TaskFuntionInput;
//多线程抓取数据线程
static void *JZsdk_Catch_MultiThread(void *args)
{
... ... @@ -45,8 +53,49 @@ static void *JZsdk_Catch_MultiThread(void *args)
}
//单线程抓取数据线程
static void *JZsdk_Catch_SingleThread(void *args)
/******************************
*
* 相机抓取初始化
* ThreadMode: 0为单线程 1为多线程
*
* ******************************/
T_JZsdkReturnCode JZsdk_FrameCatch_Init(int ThreadMode)
{
T_JZsdkReturnCode ret;
//初始化数据接收线程
pthread_t ReadDataTask;
pthread_attr_t task_attribute; //线程属性
pthread_attr_init(&task_attribute); //初始化线程属性
pthread_attr_setdetachstate(&task_attribute, PTHREAD_CREATE_DETACHED); //设置线程分离属性
if (ThreadMode == 0)
{
// int opus_Protection = pthread_create(&ReadDataTask,&task_attribute,JZsdk_Catch_SingleThread,NULL); //线程
// if(opus_Protection != 0)
// {
// JZSDK_LOG_ERROR("创建相机抓取并处理初始化线程失败!");
// return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
// }
}
else if (ThreadMode == 1)
{
int opus_Protection = pthread_create(&ReadDataTask,&task_attribute,JZsdk_Catch_MultiThread,NULL); //线程
if(opus_Protection != 0)
{
JZSDK_LOG_ERROR("创建相机抓取并处理初始化线程失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/*******************************************************************************************************************************************/
//多线程抓取数据线程
static void *JZsdk_Catch_MultiThread2(void *args)
{
while (1)
{
... ... @@ -64,21 +113,55 @@ static void *JZsdk_Catch_SingleThread(void *args)
continue;
}
//放入缓冲池 //将数据放入缓冲池,从而被其他线程使用
VideoMgmt_write_data(&args, buf, buf_size);
//归还图片
V4L2_CameraFrameRecord_OnlyReturnFrame();
}
}
//单线程抓取数据线程
static void *JZsdk_Catch_SingleThread2(void *args)
{
t_FrameCatch_TaskFuntionInput *task = (t_FrameCatch_TaskFuntionInput *)args;
while (1)
{
unsigned int buf_size = 0;
unsigned char *buf = NULL;
//从相机中读取一张照片
V4L2_CameraFrameRecord_OnlyGetFrame(&buf, &buf_size);
//JZSDK_LOG_INFO("read: len:%d data[3]:%x data[4]:%x\n", buf_size, buf[3], buf[4]);
if (buf == NULL)
{
JZSDK_LOG_ERROR("相机数据读取失败");
continue;
}
//进行数据处理
VideoMgmt_Single_FrameIn(buf, buf_size);
task->task_function(buf, buf_size);
//归还图片
V4L2_CameraFrameRecord_OnlyReturnFrame();
}
}
/******************************
/********************************************
*
* 相机抓取初始化
* ThreadMode: 0为单线程 1为多线程
*
* ******************************/
T_JZsdkReturnCode JZsdk_FrameCatch_Init(int ThreadMode)
* 相机抓取多线程单线程
*
* 传入线程的处理函数 task_function
*
* *****************************************/
T_JZsdkReturnCode JZsdk_FrameCatch_Single(T_JZsdkReturnCode (*task_function)(unsigned char*, unsigned int))
{
T_JZsdkReturnCode ret;
... ... @@ -88,24 +171,61 @@ T_JZsdkReturnCode JZsdk_FrameCatch_Init(int ThreadMode)
pthread_attr_init(&task_attribute); //初始化线程属性
pthread_attr_setdetachstate(&task_attribute, PTHREAD_CREATE_DETACHED); //设置线程分离属性
if (ThreadMode == 0)
//单线程模式
if (task_function == NULL)
{
int opus_Protection = pthread_create(&ReadDataTask,&task_attribute,JZsdk_Catch_SingleThread,NULL); //线程
if(opus_Protection != 0)
{
JZSDK_LOG_ERROR("创建相机抓取并处理初始化线程失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
}
else if (ThreadMode == 1)
t_FrameCatch_TaskFuntionInput *task = (t_FrameCatch_TaskFuntionInput*)malloc(sizeof(t_FrameCatch_TaskFuntionInput));
if (task == NULL)
{
int opus_Protection = pthread_create(&ReadDataTask,&task_attribute,JZsdk_Catch_MultiThread,NULL); //线程
if(opus_Protection != 0)
{
JZSDK_LOG_ERROR("创建相机抓取并处理初始化线程失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
// 处理内存分配失败的情况
return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
}
task->task_function = task_function;
int opus_Protection = pthread_create(&ReadDataTask,&task_attribute,JZsdk_Catch_SingleThread2,(void *)task); //线程
if(opus_Protection != 0)
{
JZSDK_LOG_ERROR("创建相机抓取并处理初始化线程失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/*************************
*
*
* 相机抓取多线程
*
*
* ******************/
T_JZsdkReturnCode JZsdk_FrameCatch_Multi(void *FrameIndex)
{
T_JZsdkReturnCode ret;
//初始化数据接收线程
pthread_t ReadDataTask;
pthread_attr_t task_attribute; //线程属性
pthread_attr_init(&task_attribute); //初始化线程属性
pthread_attr_setdetachstate(&task_attribute, PTHREAD_CREATE_DETACHED); //设置线程分离属性
if (FrameIndex == NULL)
{
return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
}
int opus_Protection = pthread_create(&ReadDataTask,&task_attribute,JZsdk_Catch_MultiThread2, FrameIndex); //线程
if(opus_Protection != 0)
{
JZSDK_LOG_ERROR("创建相机抓取并处理初始化线程失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
\ No newline at end of file
... ...
... ... @@ -25,7 +25,8 @@ extern "C" {
/* Exported functions --------------------------------------------------------*/
T_JZsdkReturnCode JZsdk_FrameCatch_Init(int ThreadMode);
T_JZsdkReturnCode JZsdk_FrameCatch_Single(T_JZsdkReturnCode (*task_function)(unsigned char*, unsigned int));
T_JZsdkReturnCode JZsdk_FrameCatch_Multi(void *FrameIndex);
#ifdef __cplusplus
... ...
#include <stdio.h>
#include <pthread.h>
#include "BaseConfig.h"
#include "JZsdkLib.h"
#define ZOOM_VALUE_DETECTION_FREQUENCY 50 //每秒检测50次变焦值
static int g_Cam_ZoomTempValue = 0; //相机对焦临时变量 用于各个模块的输入使用
static int g_Cam_ZoomValue = 0; //相机对焦变量的实际对焦值, 0为最远(完整画面), 1000为最近(最大放大画面)
/*****************
*
*
* 设置临时变焦值
*
*
* *******************/
T_JZsdkReturnCode Cam_Zoom_SetTempValue(int *value)
{
if (*value > 1000)
{
*value = 1000;
}
else if (*value < 0)
{
*value = 0;
}
g_Cam_ZoomTempValue = *value;
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/*************
*
*
* 获取变焦值
*
*
* *****************/
T_JZsdkReturnCode Cam_Zoom_GetValue(int *value)
{
*value = g_Cam_ZoomValue;
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/*********************
*
*
* 设置实际变焦值
*
*
* *********************/
static T_JZsdkReturnCode Cam_Zoom_SetValue(int value)
{
g_Cam_ZoomValue = value;
//对外广播变焦值
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
static void *ZoomValue_Thread(void *args)
{
while (1)
{
//每一段时间检测输入的变焦值是否有变化
delayMs(1000/ZOOM_VALUE_DETECTION_FREQUENCY);
if (g_Cam_ZoomTempValue != g_Cam_ZoomValue)
{
Cam_Zoom_SetValue(g_Cam_ZoomTempValue);
}
//JZSDK_LOG_DEBUG("g_Cam_ZoomValue = %d , g_Cam_ZoomTempValue = %d", g_Cam_ZoomValue, g_Cam_ZoomTempValue);
}
}
/******************
*
*
* 变焦模块初始化
*
* *****************/
T_JZsdkReturnCode Cam_Zoom_Init()
{
//初始化处理现场
pthread_t ZoomValue_Task;
pthread_attr_t attribute; //线程属性
pthread_attr_init(&attribute); //初始化线程属性
pthread_attr_setdetachstate(&attribute, PTHREAD_CREATE_DETACHED); //设置线程分离属性
int bufferdata_Protection = pthread_create(&ZoomValue_Task,&attribute,ZoomValue_Thread,NULL); //线程
if(bufferdata_Protection != 0)
{
JZSDK_LOG_ERROR("创建变焦模块失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
\ No newline at end of file
... ...
/**
********************************************************************
* @file Cam_Zoom.h
* Cam_Zoom.c 的头文件
*
*********************************************************************
*/
/* Define to prevent recursive inclusion 避免重定义 -------------------------------------*/
#ifndef CAM_ZOOM_H
#define CAM_ZOOM_H
/* Includes ------------------------------------------------------------------*/
#include "JZsdk_Base/JZsdk_Code/JZsdk_Code.h"
#ifdef __cplusplus
extern "C" {
#endif
/* Exported constants --------------------------------------------------------*/
/* 常亮定义*/
/* Exported types ------------------------------------------------------------*/
/* Exported functions --------------------------------------------------------*/
T_JZsdkReturnCode Cam_Zoom_SetTempValue(int *value);
T_JZsdkReturnCode Cam_Zoom_GetValue(int *value);
T_JZsdkReturnCode Cam_Zoom_Init();
#ifdef __cplusplus
}
#endif
#endif
... ...
... ... @@ -8,9 +8,9 @@
#include "./Camera.h"
#include "version_choose.h"
#include "BaseConfig.h"
#include "./Kt_Irc/Kt_Irc.h"
#include "../ImageProc/PseudoColor/PseudoColor.h"
#include "MediaProc/MediaProc_Param.h"
#include "Ircut/ircut.h"
#include "MediaProc/IRC_funtion/IRC_funtion.h"
... ... @@ -68,6 +68,8 @@ T_JZsdkReturnCode Camera_Init(int ThreadMode, int width, int height, int frame_n
}
//快门开关
/***********************************
*
... ... @@ -79,7 +81,20 @@ T_JZsdkReturnCode JZsdk_Camera_ShutterSwitch(int value)
{
T_JZsdkReturnCode ret;
#if DEVICE_VERSION == JZ_C1
ret = JZsdk_Kt_Irc_ShutterSwitch(value);
if (value == JZ_FLAGCODE_ON)
{
ret = SysfsGPIO_Set_ircut(KT_IRC_SHUTTER_GPIO_NUM, 1);
}
else if (value == JZ_FLAGCODE_OFF)
{
ret = SysfsGPIO_Set_ircut(KT_IRC_SHUTTER_GPIO_NUM, 0);
}
else
{
ret = JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
#else
ret = JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
#endif
... ... @@ -298,6 +313,13 @@ T_JZsdkReturnCode Camera_param(int flagcode, enum CameraParam paramflag, int *va
{
*value = g_CameraGasEnhancementColor;
}
break;
case CAMERA_RESIZE:
{
Cam_Zoom_GetValue(value);
}
break;
default:
{
... ... @@ -374,6 +396,15 @@ T_JZsdkReturnCode Camera_param(int flagcode, enum CameraParam paramflag, int *va
}
break;
case CAMERA_RESIZE:
{
Cam_Zoom_SetTempValue(value);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
}
default:
{
*value = JZ_FLAGCODE_OFF;
... ...
... ... @@ -14,6 +14,11 @@
#include "JZsdk_Base/JZsdk_Code/JZsdk_Code.h"
#include "MediaProc/MediaProc_Param.h"
#include "MediaProc/Camera/Cam_FrameCatch/Cam_FrameCatch.h"
#include "MediaProc/Camera/V4L2_camera/V4L2_CameraParameterSetting.h"
#include "MediaProc/Camera/V4L2_camera/V4L2_Record.h"
#include "MediaProc/Camera/Cam_Zoom/Cam_Zoom.h"
#ifdef __cplusplus
extern "C" {
#endif
... ...
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <pthread.h>
#include <unistd.h>
#include "JZsdkLib.h"
#include "version_choose.h"
#include "BaseConfig.h"
#include "JZsdk_usb_bulk/JZsdk_usb_bulk.h"
#include "../V4L2_camera/V4L2_Record.h"
#include "../../MultProc/MultProc.h"
#include "../../MediaParm.h"
#include "../../VideoMgmt/VideoMgmt.h"
#include "Ircut/ircut.h"
#include "../Camera.h"
#include "UI_control/UI_control.h"
#include "../../IRC_funtion/IRC_Param.h"
#include "../../IRC_funtion/IRC_funtion.h"
#define IRC_WIDTH FIRST_WIDTH
#define IRC_HEIGHT FIRST_HEIGHT
#define CAMERA_WIDTH SECOND_WIDTH
#define CAMERA_HEIGHT SECOND_HEIGHT
static int Kt_Camera_fd;
extern IRC_param *g_IRC_Param;
#define IRC_WIDTH FIRST_WIDTH
#define IRC_HEIGHT FIRST_HEIGHT
// 定义帧头长度和帧头内容
#define FRAME_HEADER_SIZE 4
static const unsigned char FRAME_HEADER[FRAME_HEADER_SIZE] = {0xaa, 0xbb, 0xcc, 0xdd};
static void *Irc_usb_index = NULL;
//红外数据纠正函数, 暂定全部替换
T_JZsdkReturnCode Kt_Irc_DataCorrect(unsigned char *data)
{
//像素修正
data[0] = data[5];
data[1] = data[6];
data[2] = data[5];
data[3] = data[6];
}
//红外数据接收线程
static void *JZsdk_Kt_Irc_DataRecv_Thread(void *args)
{
int frame_len = IRC_WIDTH*2*IRC_HEIGHT; //163840
unsigned char buf[163840]; //usb数据缓冲区需要为512的倍数 4194304 后续考虑为 262144
unsigned char frameData[frame_len]; // 存储整帧数据的画面缓冲区
unsigned int lineNum = 0;
int frame = 0;
int frameDataLen = 0;//缓冲区的数据长度
/*****
*
* 数据格式说明
*
* 帧头 0xaa 0xbb 0x02 0x80 0x01 0x00 + 行数 如第一行0x00 0x01
* 数据包长度为 0x0280
* 数据包 一包的行数为256 0x0100
* 当前数据为第x行 x= 0x0001
* 两位数据为一个点
* 接着把前4个点的数据 用第五个点替换掉
* *****/
while (1)
{
int realLen;
memset(buf,0,sizeof(buf));
T_JZsdkReturnCode ret = JZsdk_HalUsbBulk_ReadData(&Irc_usb_index, buf, sizeof(buf), &realLen);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
// 处理读取错误
JZSDK_LOG_ERROR("读取错误");
continue;
//return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
if (realLen != 0)
{
//JZSDK_LOG_INFO("读取到%d 字节",realLen);
}
//寻找数据是否存在帧头
for (int i = 0; i < realLen; i++)
{
// 验证帧头
if (memcmp(buf+i, FRAME_HEADER, FRAME_HEADER_SIZE) != 0)
{
// 帧头不匹配,可能是噪声或错误的数据包
continue;
}
/**********************
*
* 方法一,将usb缓冲区调整大小 到超过640*256, 然后直接输出整段画面,避免重复复制,节省处理时间
*
* ******************************/
//如果查找到帧头
//查看是否是第0帧
if (frame == 0)
{
//重置掉画面缓冲区
memset(frameData,0,sizeof(frameData));
//将数据置于缓冲区
frameDataLen = (realLen-i);
memcpy( &frameData[0], buf + i, frameDataLen);
//让画面帧强跳到第一帧
frame = 1;
continue;
}
//如果是第一帧
if (frame == 1)
{
memcpy( &frameData[frameDataLen], buf, frame_len-frameDataLen );
VideoMgmt_write_data(&VideoMgmt_FirstRaw_index, frameData, frame_len);
frame = 2;
frameDataLen = 0;
//memset(frameData,0,sizeof(frameData));
}
//如果不是第1帧,且上段数据小于一画面段,说明为数据被切割
if ( i<frame_len)
{
//则于前端残余数据拼接,并输出
if (frame%2==0 && (frame != 1) )
{
memcpy( &frameData[frame_len-i], buf, i);
//将未处理raw数据放入缓冲区
//JZSDK_LOG_INFO("写入1 %d %x", i, frameData[20]);
VideoMgmt_write_data(&VideoMgmt_FirstRaw_index, frameData, frame_len);
//JZSDK_LOG_INFO("放入数据到缓冲区");
//memset(frameData,0,sizeof(frameData));
}
frame++;
}
//如果剩余长度超出一画数据,将画面数据整段输出
if ( (i + frame_len) < realLen)
{
if (frame%2==0)
{
//JZSDK_LOG_INFO("写入2");
memcpy( &frameData[0], buf, frame_len);
//将未处理raw数据放入缓冲区
VideoMgmt_write_data(&VideoMgmt_FirstRaw_index, frameData, frame_len);
//JZSDK_LOG_INFO("放入数据到缓冲区");
}
frame++;
continue;
}
//JZSDK_LOG_INFO("i:%d, frame_len:%d realLen:%d frame:%d",i,frame_len,realLen,frame);
//如果剩余数据小于一画,存进画面缓冲区
//memset(frameData,0,sizeof(frameData));
memcpy(frameData, buf+i, (realLen-i));
break;
}
}
}
//红外相机数据的初始化
T_JZsdkReturnCode JZsdk_Kt_Irc_Data_Init()
{
T_JZsdkReturnCode ret;
//初始化接收的usb口
ret = JZsdk_HalUsbBulk_Init(&Irc_usb_index, 0, 0, LINUX_USB_PID, LINUX_USB_VID, USB_IN_POINT, USB_OUT_POINT);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
return ret;
}
//初始化usb接收线程
pthread_t ReadDataTask;
pthread_attr_t task_attribute; //线程属性
pthread_attr_init(&task_attribute); //初始化线程属性
pthread_attr_setdetachstate(&task_attribute, PTHREAD_CREATE_DETACHED); //设置线程分离属性
int opus_Protection = pthread_create(&ReadDataTask,&task_attribute,JZsdk_Kt_Irc_DataRecv_Thread,NULL); //线程
if(opus_Protection != 0)
{
JZSDK_LOG_ERROR("创建视频usb线程失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//红外数据的处理
static void *JZsdk_Kt_Irc_DataDeal_Thread(void *args)
{
int DealFrameNum = 0;
unsigned char BaseBuffer[IRC_WIDTH*2*IRC_HEIGHT];
while (1)
{
unsigned char *gary_data = NULL;
unsigned int gary_data_len;
//1、从原始流缓冲区中取出raw数据
VideoMgmt_read_data(&VideoMgmt_FirstRaw_index, &gary_data, &gary_data_len,JZ_FLAGCODE_OFF ,JZ_FLAGCODE_OFF);
//2、红外数据纠正处理
Kt_Irc_DataCorrect(gary_data);
//3、将灰度图数据转换为原始码流数据
unsigned char *raw_data = NULL;
int raw_data_len = 0;
IRC_FrameDeal(gary_data, gary_data_len, &raw_data, &raw_data_len);
//4、将原始码流数据转换成h264流,为避免多次复制,这里的h264会直接放入视频流管理的缓冲区
JZsdk_RTKMMP_RawData_to_h264(raw_data, raw_data_len);
//5、释放内存
if (raw_data != NULL)
{
if (raw_data != NULL)
{
free(raw_data);
raw_data = NULL;
}
if (gary_data != NULL)
{
}
if (gary_data != NULL)
{
free(gary_data);
gary_data = NULL;
}
}
//JZSDK_LOG_DEBUG("得到了一帧红外h264");
}
}
//红外相机数据的处理线程
static T_JZsdkReturnCode JZsdk_Kt_Irc_DataDeal_Init()
{
pthread_t ReadDataTask;
pthread_attr_t task_attribute; //线程属性
pthread_attr_init(&task_attribute); //初始化线程属性
pthread_attr_setdetachstate(&task_attribute, PTHREAD_CREATE_DETACHED); //设置线程分离属性
int opus_Protection = pthread_create(&ReadDataTask,&task_attribute,JZsdk_Kt_Irc_DataDeal_Thread,NULL); //线程
if(opus_Protection != 0)
{
JZSDK_LOG_ERROR("创建红外相机数据的处理线程失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//光学相机数据的处理线程
static void *JZsdk_Kt_Cam_DataDeal_Thread(void *args)
{
int DealFrameNum = 0;
while (1)
{
unsigned char *raw_data = NULL;
unsigned int raw_data_len;
//1、从原始流缓冲区中取出raw数据
VideoMgmt_read_data(&VideoMgmt_SecondRaw_index, &raw_data, &raw_data_len,JZ_FLAGCODE_OFF ,JZ_FLAGCODE_OFF);
//2、将raw数据流转换成h264流,并放置到视频流缓冲区
JZsdk_Kt_CamMMP_Mjpeg_to_h264(raw_data, raw_data_len);
free(raw_data);
raw_data = NULL;
//DealFrameNum++;
//printf("get Cam Frame%d\n",DealFrameNum);
}
}
//光学相机数据的处理线程
static T_JZsdkReturnCode JZsdk_Kt_Cam_DataDeal_Init()
{
pthread_t ReadDataTask;
pthread_attr_t task_attribute; //线程属性
pthread_attr_init(&task_attribute); //初始化线程属性
pthread_attr_setdetachstate(&task_attribute, PTHREAD_CREATE_DETACHED); //设置线程分离属性
int opus_Protection = pthread_create(&ReadDataTask,&task_attribute,JZsdk_Kt_Cam_DataDeal_Thread,NULL); //线程
if(opus_Protection != 0)
{
JZSDK_LOG_ERROR("创建红外相机数据的处理线程失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//光学相机数据读取线程
static void *JZsdk_Kt_Cam_Data_Thread(void *args)
{
while (1)
{
unsigned int buf_size = 0;
unsigned char *buf = NULL;
//从相机中读取一张照片
V4L2_CameraFrameRecord_OnlyGetFrame(&buf, &buf_size);
if (buf == NULL)
{
JZSDK_LOG_ERROR("相机数据读取失败");
continue;
}
//放入缓冲池
VideoMgmt_write_data(&VideoMgmt_SecondRaw_index, buf, buf_size);
//归还图片
V4L2_CameraFrameRecord_OnlyReturnFrame();
}
}
//光学相机初始化
static T_JZsdkReturnCode JZsdk_Kt_Camera_Init()
{
T_JZsdkReturnCode ret;
//初始化摄像头
ret = V4l2_Camarainit2(&Kt_Camera_fd,CAMERA_WIDTH,CAMERA_HEIGHT,30);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
return ret;
}
//初始化数据接收线程
pthread_t ReadDataTask;
pthread_attr_t task_attribute; //线程属性
pthread_attr_init(&task_attribute); //初始化线程属性
pthread_attr_setdetachstate(&task_attribute, PTHREAD_CREATE_DETACHED); //设置线程分离属性
int opus_Protection = pthread_create(&ReadDataTask,&task_attribute,JZsdk_Kt_Cam_Data_Thread,NULL); //线程
if(opus_Protection != 0)
{
JZSDK_LOG_ERROR("创建v4l2线程失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//昆腾相机初始化
T_JZsdkReturnCode JZsdk_Kt_Irc_Camera_Init()
{
T_JZsdkReturnCode ret;
//1、初始化光学相机
ret = JZsdk_Kt_Camera_Init();
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_ERROR("光学相机初始化失败");
}
else
{
//初始化数据转换模块
JZsdk_Kt_Cam_DataDeal_Init();
}
//2、初始化红外的数据处理
ret = IRC_ParamInit(&g_IRC_Param, IRC_HEIGHT, IRC_WIDTH, 25);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_ERROR("初始化红外的数据处理失败");
}
//3、初始化红外的数据输入
ret = JZsdk_Kt_Irc_Data_Init();
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_ERROR("红外相机初始化失败");
}
else
{
//初始化数据转换模块
JZsdk_Kt_Irc_DataDeal_Init();
}
JZSDK_LOG_INFO("KT_Irc init complete");
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//昆腾的红外相机的快门开关
/***********************************
*
* value on为开 off为关
*
*
* ***************************************/
T_JZsdkReturnCode JZsdk_Kt_Irc_ShutterSwitch(int value)
{
T_JZsdkReturnCode ret;
if (value == JZ_FLAGCODE_ON)
{
ret = SysfsGPIO_Set_ircut(KT_IRC_SHUTTER_GPIO_NUM, 1);
}
else if (value == JZ_FLAGCODE_OFF)
{
ret = SysfsGPIO_Set_ircut(KT_IRC_SHUTTER_GPIO_NUM, 0);
}
else
{
ret = JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
return ret;
}
... ... @@ -57,10 +57,12 @@ typedef struct IRC_param
//单点校正(Single-Point Correction -> SPC)
double *SPC_Diff; // spc截距数组 即计算得到的增益
double *SPC_Slope; // spc斜率数组 即计算得到的补正
U16_t *SPC_Mark_Data;
unsigned int FirstSPC_flag; //第一次使用spc的标志位
unsigned int SPC_ResetFlag; //spc校准标志位 开启时进行spc校准
U16_t *SPC_Mark_Data; //spc标定帧灰度
unsigned int SPC_mode; //spc模式 0低温画面打档 1手动打档 2定时打档
unsigned int SPC_LastMode; //上一次spc模式
... ... @@ -69,15 +71,24 @@ typedef struct IRC_param
unsigned int SPC_auto_time_flag; //spc定时自动打档标志位,用于计算打档时间
//两点矫正(Two-Point Correction -> TPC)
double *TPC_Diff; // tpc截距数组 即计算得到的增益
double *TPC_Slope; // tpc斜率数组 即计算得到的补正
double *TPC_Gain; // tpc斜率数组 即计算得到的校准增益
double *TPC_Offset; // tpc截距数组 即计算得到的校正偏移值
unsigned int TPC_mode; //tpc模式 0出厂打档 1手动打档
unsigned int TPC_ResetFlag; //tpc校准标志位 开启时进行tpc校准
unsigned int TPC_mode; //tpc数据模式 0出厂数据 1手动数据A 2手动数据B 3手动数据C 4手动数据D
unsigned int TPC_LastTPCMode; //上一次tpc数据模式
//画面纠正模式
unsigned int FrameCorrectMode; //画面纠正模式 0 无纠正 1 单点纠正 2 两点纠正
// 高低温标定
U16_t *Factory_HighT_Mark_Data; //出厂的高温标定帧灰度
unsigned int Factory_HighT_Mark_Data_flag; //出厂的高温标定帧灰度标志位
U16_t *Factory_LowT_Mark_Data; //出厂的低温标定帧灰度
unsigned int Factory_LowT_Mark_Data_flag; //出厂的高温标定帧灰度标志位
U16_t *HighT_NineFrameAdd; //高温温度判定时的9帧综合数组
U16_t *HighT_NineFrame_Avg; //高温温度判定时的9帧综合数组平均值
unsigned int HighT_flag; //高温判定标志 0~9 0关闭,1~9 为计数
... ... @@ -117,13 +128,27 @@ typedef struct IRC_param
typedef enum TEMP_CHOOSE
{
HIGH_SD_TEMP = 0,
LOW_SD_TEMP = 1,
HIGH_LOCAL_TEMP = 2,
LOW_LOCAL_TEMP = 3,
SPC_MARK_DATA = 4,
HIGH_DGCE_THRESHOLD = 5,
LOW_DGCE_THRESHOLD = 6,
FACTORY_HIGH_TEMP = 10, //出厂高温数据
FACTORY_LOW_TEMP = 11, //出厂低温数据
LOCAL_HIGH_TEMP_1 = 12, //手动高温数据1型
LOCAL_LOW_TEMP_1 = 13, //手动低温数据1型
LOCAL_HIGH_TEMP_2 = 14, //手动高温数据2型
LOCAL_LOW_TEMP_2 = 15, //手动低温数据2型
LOCAL_HIGH_TEMP_3 = 16, //手动高温数据3型
LOCAL_LOW_TEMP_3 = 17, //手动低温数据3型
LOCAL_HIGH_TEMP_4 = 18, //手动高温数据4型
LOCAL_LOW_TEMP_4 = 19, //手动低温数据4型
}TEMP_CHOOSE;
/* Exported types ------------------------------------------------------------*/
... ...
... ... @@ -9,10 +9,21 @@
#include "./IRC_data_deal.h"
#define IRC_FILE_DIR "/root/IRC_File"
#define HIGH_SD_DIR "AvgMarkedHighT.txt"
#define LOW_SD_DIR "AvgMarkedLowT.txt"
#define HIGH_LOCAL_DIR "AvgMarkedHighTLocal.txt"
#define LOW_LOCAL_DIR "AvgMarkedLowTLocal.txt"
#define FACTORY_HIGH_DATA_DIR "AvgMarkedHighT.txt"
#define FACTORY_LOW_DATA_DIR "AvgMarkedLowT.txt"
#define LOCAL_HIGH_SPC_DATA_DIR "AvgMarkedHighTLocal.txt" //沿用之前的
#define LOCAL_LOW_SPC_DATA_DIR "AvgMarkedLowTLocal.txt" //沿用之前的
#define LOCAL_HIGH_SPC_DATA_DIR_2 "AvgMarkedHighTLocal_2.txt"
#define LOCAL_LOW_SPC_DATA_DIR_2 "AvgMarkedLowTLocal_2.txt"
#define LOCAL_HIGH_SPC_DATA_DIR_3 "AvgMarkedHighTLocal_3.txt"
#define LOCAL_LOW_SPC_DATA_DIR_3 "AvgMarkedLowTLocal_3.txt"
#define LOCAL_HIGH_SPC_DATA_DIR_4 "AvgMarkedHighTLocal_4.txt"
#define LOCAL_LOW_SPC_DATA_DIR_4 "AvgMarkedLowTLocal_4.txt"
#define SPC_MARK_DATA_DIR "SPCMarkData.txt"
#define HIGH_THRESHOLD_DIR "/root/highThreshold.txt"
... ... @@ -55,8 +66,11 @@ T_JZsdkReturnCode IRC_LowT_CycleCalibration(U16_t *ImageData,struct IRC_param *d
dealInfo->LowT_NineFrameAdd[i] += ImageData[i];
}
dealInfo->LowT_flag++;
}
JZSDK_LOG_DEBUG("低温标定基准值已存入%d组",dealInfo->LowT_flag - 1);
if (dealInfo->LowT_flag > 9) //已经存储了9组数据
{
for (int i = 0; i < (dealInfo->Width * dealInfo->Height); i++)
... ... @@ -64,9 +78,6 @@ T_JZsdkReturnCode IRC_LowT_CycleCalibration(U16_t *ImageData,struct IRC_param *d
dealInfo->LowT_NineFrame_Avg[i] = dealInfo->LowT_NineFrameAdd[i] / 9;
}
//存放到本地
IRC_LocalFrame_DataWrite(LOW_LOCAL_TEMP,dealInfo, (U16_t *)dealInfo->LowT_NineFrame_Avg, dealInfo->PixelNum);
dealInfo->LowT_flag = JZ_FLAGCODE_OFF; //将标志位归位
JZSDK_LOG_DEBUG("低温数据记录完成");
... ... @@ -110,7 +121,9 @@ T_JZsdkReturnCode IRC_HighT_CycleCalibration(U16_t *ImageData, struct IRC_param
}
dealInfo->HighT_flag++;
}
JZSDK_LOG_DEBUG("高温标定基准值已存入%d组",dealInfo->HighT_flag - 1);
if (dealInfo->HighT_flag > 9) //已经存储了9组数据
{
for (int i = 0; i < (dealInfo->Width * dealInfo->Height); i++)
... ... @@ -118,9 +131,6 @@ T_JZsdkReturnCode IRC_HighT_CycleCalibration(U16_t *ImageData, struct IRC_param
dealInfo->HighT_NineFrame_Avg[i] = dealInfo->HighT_NineFrameAdd[i] / 9;
}
//存放到本地
IRC_LocalFrame_DataWrite(HIGH_LOCAL_TEMP,dealInfo, (U16_t *)dealInfo->HighT_NineFrame_Avg, dealInfo->PixelNum);
dealInfo->HighT_flag = JZ_FLAGCODE_OFF; //将标志位归位
JZSDK_LOG_DEBUG("高温数据记录完成");
... ... @@ -661,19 +671,44 @@ T_JZsdkReturnCode IRC_LocalFrame_DataRead(int flag,struct IRC_param *dealInfo)
switch (flag)
{
case HIGH_SD_TEMP:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, HIGH_SD_DIR);
case FACTORY_HIGH_TEMP:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, FACTORY_HIGH_DATA_DIR);
break;
case LOCAL_HIGH_TEMP_1:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_HIGH_SPC_DATA_DIR);
break;
case LOCAL_HIGH_TEMP_2:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_HIGH_SPC_DATA_DIR_2);
break;
case LOCAL_HIGH_TEMP_3:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_HIGH_SPC_DATA_DIR_3);
break;
case LOCAL_HIGH_TEMP_4:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_HIGH_SPC_DATA_DIR_4);
break;
case FACTORY_LOW_TEMP:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, FACTORY_LOW_DATA_DIR);
break;
case LOCAL_LOW_TEMP_1:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_LOW_SPC_DATA_DIR);
break;
case LOW_SD_TEMP:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOW_SD_DIR);
case LOCAL_LOW_TEMP_2:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_LOW_SPC_DATA_DIR_2);
break;
case HIGH_LOCAL_TEMP:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, HIGH_LOCAL_DIR);
case LOCAL_LOW_TEMP_3:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_LOW_SPC_DATA_DIR_3);
break;
case LOW_LOCAL_TEMP:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOW_LOCAL_DIR);
case LOCAL_LOW_TEMP_4:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_LOW_SPC_DATA_DIR_4);
break;
case SPC_MARK_DATA:
... ... @@ -697,7 +732,6 @@ T_JZsdkReturnCode IRC_LocalFrame_DataRead(int flag,struct IRC_param *dealInfo)
int capacity = 0; // 用于记录当前分配的内存容量
U16_t *numbers = NULL; // 用于存储已读取的整数
// 读取数据
while (fscanf(fp, "%d", &num) == 1) {
// 如果当前容量不足以存储新元素,则增加容量
if (count >= capacity) {
... ... @@ -726,24 +760,22 @@ T_JZsdkReturnCode IRC_LocalFrame_DataRead(int flag,struct IRC_param *dealInfo)
switch (flag)
{
case HIGH_SD_TEMP:
memcpy(dealInfo->HighT_NineFrame_Avg, numbers, count * sizeof(U16_t));
JZSDK_LOG_DEBUG("读取高SD温度数据");
break;
case LOW_SD_TEMP:
memcpy(dealInfo->LowT_NineFrame_Avg, numbers, count * sizeof(U16_t));
JZSDK_LOG_DEBUG("读取低SD温度数据");
break;
case HIGH_LOCAL_TEMP:
case FACTORY_HIGH_TEMP:
case LOCAL_HIGH_TEMP_1:
case LOCAL_HIGH_TEMP_2:
case LOCAL_HIGH_TEMP_3:
case LOCAL_HIGH_TEMP_4:
memcpy(dealInfo->HighT_NineFrame_Avg, numbers, count * sizeof(U16_t));
JZSDK_LOG_DEBUG("读取高本地温度数据");
JZSDK_LOG_DEBUG("读取高数据");
break;
case LOW_LOCAL_TEMP:
case FACTORY_LOW_TEMP:
case LOCAL_LOW_TEMP_1:
case LOCAL_LOW_TEMP_2:
case LOCAL_LOW_TEMP_3:
case LOCAL_LOW_TEMP_4:
memcpy(dealInfo->LowT_NineFrame_Avg, numbers, count * sizeof(U16_t));
JZSDK_LOG_DEBUG("读取低本地温度数据");
JZSDK_LOG_DEBUG("读取低数据");
break;
case SPC_MARK_DATA:
... ... @@ -796,19 +828,44 @@ T_JZsdkReturnCode IRC_LocalFrame_DataWrite(int flag,struct IRC_param *dealInfo,
switch (flag)
{
case HIGH_SD_TEMP:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, HIGH_SD_DIR);
case FACTORY_HIGH_TEMP:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, FACTORY_HIGH_DATA_DIR);
break;
case LOCAL_HIGH_TEMP_1:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_HIGH_SPC_DATA_DIR);
break;
case LOW_SD_TEMP:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOW_SD_DIR);
case LOCAL_HIGH_TEMP_2:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_HIGH_SPC_DATA_DIR_2);
break;
case LOCAL_HIGH_TEMP_3:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_HIGH_SPC_DATA_DIR_3);
break;
case LOCAL_HIGH_TEMP_4:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_HIGH_SPC_DATA_DIR_4);
break;
case FACTORY_LOW_TEMP:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, FACTORY_LOW_DATA_DIR);
break;
case LOCAL_LOW_TEMP_1:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_LOW_SPC_DATA_DIR);
break;
case LOCAL_LOW_TEMP_2:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_LOW_SPC_DATA_DIR_2);
break;
case HIGH_LOCAL_TEMP:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, HIGH_LOCAL_DIR);
case LOCAL_LOW_TEMP_3:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_LOW_SPC_DATA_DIR_3);
break;
case LOW_LOCAL_TEMP:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOW_LOCAL_DIR);
case LOCAL_LOW_TEMP_4:
snprintf(str, sizeof(str), "%s/%s",IRC_FILE_DIR, LOCAL_LOW_SPC_DATA_DIR_4);
break;
case SPC_MARK_DATA:
... ...
... ... @@ -30,11 +30,11 @@ static T_JZsdkReturnCode IRC_param_Init(struct IRC_param **index, int height, in
备注:
*/
T_JZsdkReturnCode IRC_ParamInit(struct IRC_param **index, int height, int width, int frameRate)
T_JZsdkReturnCode IRC_ParamInit(int height, int width, int frameRate)
{
T_JZsdkReturnCode ret;
IRC_param_Init(index, height, width, frameRate);
ret = IRC_param_Init(&g_IRC_Param, height, width, frameRate);
if (g_IRC_Param->DealWay ==IRC_DEALMODE_KTLIB)
{
... ... @@ -44,86 +44,256 @@ T_JZsdkReturnCode IRC_ParamInit(struct IRC_param **index, int height, int width,
return ret;
}
/*
功能:IRC数据预处理
参数:U8_t *rawData, int dataSize, U16_t **returnData, unsigned int *returnDataSize, struct IRC_param *dealInfo
返回值:T_JZsdkReturnCode
备注:
*/
static T_JZsdkReturnCode IRC_data_PreliminaryDeal(U8_t *rawData , int dataSize, U16_t *returnData, unsigned int *returnDataSize, struct IRC_param *dealInfo)
static T_JZsdkReturnCode IRC_data_PreliminaryDeal(U16_t *rawData ,unsigned int *rawSize, struct IRC_param *dealInfo)
{
T_JZsdkReturnCode ret;
//无图像
if (rawData == NULL || dealInfo == NULL)
{
JZSDK_LOG_ERROR("irc数据预处理失败,参数错误");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
//合成像素,u8转换合并成u16
ret = JZsdk_Merge_U8_to_U16_byReverse(rawData, dataSize, returnData, returnDataSize);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS || *returnDataSize != dealInfo->PixelNum)
{
JZSDK_LOG_ERROR("像素合成失败");
return ret;
}
//数据检查
for (int i = 0; i < *returnDataSize; i++)
for (int i = 0; i < *rawSize; i++)
{
if (returnData[i] > dealInfo->ExpectedMax)
if (rawData[i] > dealInfo->ExpectedMax)
{
returnData[i] = dealInfo->ExpectedMax;
rawData[i] = dealInfo->ExpectedMax;
}
}
/*
手动盲点纠正部分
手动盲点纠正默认打开
*/
/*************************************手动盲点部分******************************************************************/
//手动盲点处理
if (dealInfo->BlindNum > 0)
{
JZsdk_CutBadPixel_U16(returnData, dealInfo->Width, dealInfo->Height, dealInfo->Blind, dealInfo->BlindNum,dealInfo->BadPixelExtern, 1);
JZsdk_CutBadPixel_U16(rawData, dealInfo->Width, dealInfo->Height, dealInfo->Blind, dealInfo->BlindNum,dealInfo->BadPixelExtern, 1);
}
/*
自动盲点纠正部分
*/
/*************************************自动盲点部分******************************************************************/
//如果自动盲点纠正已打开
if (dealInfo->AutoBadPixelReset == JZ_FLAGCODE_ON)
{
JZsdk_RawCheckisBadPixel_U16(returnData, dealInfo->Width, dealInfo->Height, dealInfo->BadPixel, &dealInfo->BadPixelNum);
//检查自动盲点的位置
JZsdk_RawCheckisBadPixel_U16(rawData, dealInfo->Width, dealInfo->Height, dealInfo->BadPixel, &dealInfo->BadPixelNum);
//关闭自动盲点校正
dealInfo->AutoBadPixelReset = JZ_FLAGCODE_OFF;
}
//盲元纠正
//如果自动盲点校正已打开
if (dealInfo->AutoBadPixel_flag == JZ_FLAGCODE_ON)
{
JZsdk_CutBadPixel_U16(returnData, dealInfo->Width, dealInfo->Height, dealInfo->BadPixel, dealInfo->BadPixelNum,dealInfo->BadPixelExtern, 1);
//对数据进行自动盲点校正
JZsdk_CutBadPixel_U16(rawData, dealInfo->Width, dealInfo->Height, dealInfo->BadPixel, dealInfo->BadPixelNum,dealInfo->BadPixelExtern, 1);
}
/*************************************两点纠正部分******************************************************************/
//判断两点的数据模式是否有改变
//注:该判断默认拥有出厂的数据
if (dealInfo->TPC_mode != dealInfo->TPC_LastTPCMode)
{
/********读取高温数据**********************************************************/
switch (dealInfo->TPC_mode)
{
//读取出厂的高温数据
case 0:
ret = IRC_LocalFrame_DataRead(FACTORY_HIGH_TEMP, dealInfo);
break;
//读取手动数据1
case 1:
ret = IRC_LocalFrame_DataRead(LOCAL_HIGH_TEMP_1, dealInfo);
break;
//读取手动数据2
case 2:
ret = IRC_LocalFrame_DataRead(LOCAL_HIGH_TEMP_2, dealInfo);
break;
//读取手动数据3
case 3:
ret = IRC_LocalFrame_DataRead(LOCAL_HIGH_TEMP_3, dealInfo);
break;
//读取手动数据4
case 4:
ret = IRC_LocalFrame_DataRead(LOCAL_HIGH_TEMP_4, dealInfo);
break;
//其余读取出厂数据
default:
ret = IRC_LocalFrame_DataRead(FACTORY_HIGH_TEMP, dealInfo);
break;
}
//如果读取数据失败,则当场开启录入标志位
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_ERROR("数据读取失败");
//先填入出厂数据
IRC_LocalFrame_DataRead(FACTORY_HIGH_TEMP, dealInfo);
//且开启数据录入标志位
dealInfo->HighT_flag = JZ_FLAGCODE_ON;
}
/********读取低温数据**********************************************************/
switch (dealInfo->TPC_mode)
{
//读取出厂的低温数据
case 0:
ret = IRC_LocalFrame_DataRead(FACTORY_LOW_TEMP, dealInfo);
break;
//读取手动数据1
case 1:
ret = IRC_LocalFrame_DataRead(LOCAL_LOW_TEMP_1, dealInfo);
break;
//读取手动数据2
case 2:
ret = IRC_LocalFrame_DataRead(LOCAL_LOW_TEMP_2, dealInfo);
break;
//读取手动数据3
case 3:
ret = IRC_LocalFrame_DataRead(LOCAL_LOW_TEMP_3, dealInfo);
break;
//读取手动数据4
case 4:
ret = IRC_LocalFrame_DataRead(LOCAL_LOW_TEMP_4, dealInfo);
break;
//其余读取出厂数据
default:
ret = IRC_LocalFrame_DataRead(FACTORY_LOW_TEMP, dealInfo);
break;
}
//如果读取数据失败,则当场开启录入标志位
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_ERROR("数据读取失败");
//先填入出厂数据
IRC_LocalFrame_DataRead(FACTORY_LOW_TEMP, dealInfo);
//且开启数据录入标志位
dealInfo->LowT_flag = JZ_FLAGCODE_ON;
}
//***********重新计算两点校正的斜率**************************************/
JZIrcLib_TPC_Slope_Calculation2(dealInfo);
}
/*
两点纠正部分
*/
//如果开启了低温循环标定
//如果开启了低温循环标定,计算低温循环数据
if (dealInfo->LowT_flag != JZ_FLAGCODE_OFF)
{
IRC_LowT_CycleCalibration(returnData, dealInfo);
//将数据写入缓冲区
IRC_LowT_CycleCalibration(rawData, dealInfo);
//数据重新缓存完毕,将缓存保存到本地
if (dealInfo->LowT_flag == JZ_FLAGCODE_OFF)
{
switch (dealInfo->TPC_mode)
{
//写入出厂的低温数据
case 0:
JZSDK_LOG_DEBUG("无法覆盖出厂的低温数据");
break;
//写入手动数据1
case 1:
IRC_LocalFrame_DataWrite(LOCAL_LOW_TEMP_1,dealInfo, dealInfo->LowT_NineFrame_Avg, dealInfo->PixelNum);
break;
//写入手动数据2
case 2:
IRC_LocalFrame_DataWrite(LOCAL_LOW_TEMP_2,dealInfo, dealInfo->LowT_NineFrame_Avg, dealInfo->PixelNum);
break;
//写入手动数据3
case 3:
IRC_LocalFrame_DataWrite(LOCAL_LOW_TEMP_3,dealInfo, dealInfo->LowT_NineFrame_Avg, dealInfo->PixelNum);
break;
//写入手动数据4
case 4:
IRC_LocalFrame_DataWrite(LOCAL_LOW_TEMP_4,dealInfo, dealInfo->LowT_NineFrame_Avg, dealInfo->PixelNum);
break;
default:
JZSDK_LOG_DEBUG("无法覆盖出厂的低温数据");
break;
}
}
//***********重新计算两点校正的斜率**************************************/
JZIrcLib_TPC_Slope_Calculation2(dealInfo);
}
//如果开启了高温循环标定
//如果开启了高温循环标定,计算高温循环数据
if (dealInfo->HighT_flag != JZ_FLAGCODE_OFF)
{
IRC_HighT_CycleCalibration(returnData, dealInfo);
//将数据写入缓冲区
IRC_HighT_CycleCalibration(rawData, dealInfo);
//数据重新缓存完毕,将缓存保存到本地
if (dealInfo->HighT_flag == JZ_FLAGCODE_OFF)
{
switch (dealInfo->TPC_mode)
{
//写入出厂的高温数据
case 0:
JZSDK_LOG_DEBUG("无法覆盖出厂的高温数据");
break;
//写入手动数据1
case 1:
IRC_LocalFrame_DataWrite(LOCAL_HIGH_TEMP_1,dealInfo, dealInfo->HighT_NineFrame_Avg, dealInfo->PixelNum);
break;
//写入手动数据2
case 2:
IRC_LocalFrame_DataWrite(LOCAL_HIGH_TEMP_2,dealInfo, dealInfo->HighT_NineFrame_Avg, dealInfo->PixelNum);
break;
//写入手动数据3
case 3:
IRC_LocalFrame_DataWrite(LOCAL_HIGH_TEMP_3,dealInfo, dealInfo->HighT_NineFrame_Avg, dealInfo->PixelNum);
break;
//写入手动数据4
case 4:
IRC_LocalFrame_DataWrite(LOCAL_HIGH_TEMP_4,dealInfo, dealInfo->HighT_NineFrame_Avg, dealInfo->PixelNum);
break;
default:
JZSDK_LOG_DEBUG("无法覆盖出厂的高温数据");
break;
}
}
//***********重新计算两点校正的斜率**************************************/
JZIrcLib_TPC_Slope_Calculation2(dealInfo);
}
/*
单点纠正部分
*/
//两点校正模式
dealInfo->TPC_LastTPCMode = dealInfo->TPC_mode;
/*************************************单点纠正部分******************************************************************/
//单点校正spc如果未进行过数据的标定,则进行一次
if (dealInfo->FirstSPC_flag == JZ_FLAGCODE_OFF)
{
... ... @@ -155,7 +325,7 @@ static T_JZsdkReturnCode IRC_data_PreliminaryDeal(U8_t *rawData , int dataSize,
{
JZSDK_LOG_INFO("SPC重置标定");
memcpy(dealInfo->SPC_Mark_Data, returnData, dealInfo->PixelNum * sizeof(U16_t));
memcpy(dealInfo->SPC_Mark_Data, rawData, dealInfo->PixelNum * sizeof(U16_t));
//保存spc标定点
IRC_LocalFrame_DataWrite(SPC_MARK_DATA, dealInfo, (U16_t *)dealInfo->SPC_Mark_Data, dealInfo->PixelNum);
... ... @@ -163,25 +333,8 @@ static T_JZsdkReturnCode IRC_data_PreliminaryDeal(U8_t *rawData , int dataSize,
dealInfo->SPC_ResetFlag = JZ_FLAGCODE_OFF;
}
//备注:这部分使用我们自己的库才要这样做
//如果开启了spc单点校正
if(dealInfo->FrameCorrectMode == IRC_CORRCTION_SPC && dealInfo->DealWay == IRC_DEALMODE_JZSDK)
{
//计算spc参数(后续要改成只计算一次,总不能每帧都计算吧)
if (dealInfo->SPC_mode == 0 && dealInfo->LowT_NineFrame_Avg != NULL)
{
IRC_SPC_ParamCorrect(dealInfo, dealInfo->LowT_NineFrame_Avg);
}
else
{
IRC_SPC_ParamCorrect(dealInfo, dealInfo->SPC_Mark_Data);
}
}
/*************************************气体增强部分,标记气体增图像图像******************************************************************/
/*
气体增强部分
标记气体增图像图像
*/
//如果开启气体增强时,未标记过气体画面,则标记
if (dealInfo->OutputPixelColorMode == 2 && dealInfo->First_DGCE_flag == JZ_FLAGCODE_OFF)
{
... ... @@ -194,14 +347,13 @@ static T_JZsdkReturnCode IRC_data_PreliminaryDeal(U8_t *rawData , int dataSize,
if (dealInfo->DGCE_ResetFlag == JZ_FLAGCODE_ON)
{
JZSDK_LOG_INFO("气体增强重置标定");
memcpy(dealInfo->DGCE_Mark_Data, returnData, dealInfo->PixelNum * sizeof(U16_t));
memcpy(dealInfo->DGCE_Mark_Data, rawData, dealInfo->PixelNum * sizeof(U16_t));
dealInfo->DGCE_ResetFlag = JZ_FLAGCODE_OFF;
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
unsigned int Get_2DImage_PointToArray(int x, int y, int width, int height)
{
return (x + y*width);
... ... @@ -215,7 +367,6 @@ unsigned int Get_2DImage_PointToArray(int x, int y, int width, int height)
*/
static T_JZsdkReturnCode IRC_Postdeal(unsigned char *rgb_data, struct IRC_param *dealInfo)
{
//将rgb图形镜像
int MirrorImageFlag = JZ_FLAGCODE_ON;
... ... @@ -235,7 +386,7 @@ static T_JZsdkReturnCode IRC_Postdeal(unsigned char *rgb_data, struct IRC_param
for (y = 0; y < dealInfo->Height; y++)
{
// 复制当前行到临时缓冲区
memcpy(temp_row, rgb_data + y * bytes_per_row, bytes_per_row);
memcpy(temp_row, rgb_data + (y * bytes_per_row), bytes_per_row);
// 从右向左(即反向)复制临时缓冲区的数据回原位置
for (x = 0; x < dealInfo->Width; x++) {
... ... @@ -252,7 +403,6 @@ static T_JZsdkReturnCode IRC_Postdeal(unsigned char *rgb_data, struct IRC_param
free(temp_row); // 释放临时缓冲区
}
//修复外圈图像
if (dealInfo->RingRepair == JZ_FLAGCODE_ON)
{
... ... @@ -398,35 +548,35 @@ static T_JZsdkReturnCode IRC_Postdeal(unsigned char *rgb_data, struct IRC_param
}
/*
功能:IRC 帧处理
参数:
返回值:T_JZsdkReturnCode
备注:
/****************************
*/
T_JZsdkReturnCode IRC_FrameDeal(unsigned char *rawData ,unsigned int dataSize, unsigned char **outData, unsigned int *outDataSize)
功能:IRC 帧处理
参数:
返回值:T_JZsdkReturnCode
备注:
* *************/
T_JZsdkReturnCode IRC_FrameDeal(U16_t *rawData ,unsigned int dataSize, unsigned char **outData, unsigned int *outDataSize)
{
//1、数据预处理(将红外相机的8位数据,合成为16位数据,移除盲点,并记录标定点)
U16_t *U16_data = (U16_t *)malloc(dataSize/2 * sizeof(U16_t));
unsigned int U16_dataSize = 0;
IRC_data_PreliminaryDeal(rawData , dataSize, U16_data, &U16_dataSize, g_IRC_Param);
//1、数据预处理(移除数据的盲点,并记录标定数据)
IRC_data_PreliminaryDeal(rawData , &dataSize, g_IRC_Param);
//printf("预处理完成\n");
//JZSDK_LOG_DEBUG("数据预处理完成");
//2、数据前处理,将原始数据和16位数据 合成为rgb数据
U8_t *RGB_data = NULL;
unsigned int RGB_dataSize = 0;
//选择前处理的方式
switch (g_IRC_Param->DealWay)
{
case IRC_DEALMODE_KTLIB:
KtLib_DataDeal(U16_data, U16_dataSize, &RGB_data, &RGB_dataSize,g_IRC_Param);
KtLib_DataDeal(rawData, dataSize, &RGB_data, &RGB_dataSize,g_IRC_Param);
break;
case IRC_DEALMODE_JZSDK:
JZIrcLib_DataDeal(U16_data, U16_dataSize, &RGB_data, &RGB_dataSize,g_IRC_Param);
JZIrcLib_DataDeal(rawData, dataSize, &RGB_data, &RGB_dataSize,g_IRC_Param);
break;
default:
... ... @@ -434,12 +584,12 @@ T_JZsdkReturnCode IRC_FrameDeal(unsigned char *rawData ,unsigned int dataSize, u
break;
}
//printf("前处理完成\n");
//JZSDK_LOG_DEBUG("数据前处理完成");
//3、数据后处理,在rgb数据上,进行图像处理
IRC_Postdeal(RGB_data, g_IRC_Param);
//printf("后处理完成\n");
//JZSDK_LOG_DEBUG("数据后处理完成");
//4、数据转换,将rgb数据,转换为yuv数据
if (g_IRC_Param->IRC_outPixelMode == PIXEL_MODE_YUV420)
... ... @@ -447,7 +597,8 @@ T_JZsdkReturnCode IRC_FrameDeal(unsigned char *rawData ,unsigned int dataSize, u
U8_t *yuv_frame = (U8_t *)malloc(g_IRC_Param->Width*g_IRC_Param->Height*3/2 *sizeof(U8_t));
// 调用转换函数
Stream_rgb888_to_yuv420p(RGB_data,g_IRC_Param->Width,g_IRC_Param->Height, yuv_frame);
//Stream_rgb888_to_yuv420p(RGB_data,g_IRC_Param->Width,g_IRC_Param->Height, yuv_frame);
Stream_rgb888_to_yuv420sp(RGB_data,g_IRC_Param->Width,g_IRC_Param->Height, yuv_frame);
*outData = yuv_frame;
*outDataSize = g_IRC_Param->Width*g_IRC_Param->Height*3/2;
... ... @@ -458,7 +609,6 @@ T_JZsdkReturnCode IRC_FrameDeal(unsigned char *rawData ,unsigned int dataSize, u
}
//printf("输出yuv\n");
}
/*****************
... ... @@ -467,18 +617,12 @@ T_JZsdkReturnCode IRC_FrameDeal(unsigned char *rawData ,unsigned int dataSize, u
* 如果码流类型要求为rgb888
*
* **********************/
if (g_IRC_Param->IRC_outPixelMode == PIXEL_MODE_RGB888)
else if (g_IRC_Param->IRC_outPixelMode == PIXEL_MODE_RGB888)
{
*outData = RGB_data;
*outDataSize = g_IRC_Param->Width*g_IRC_Param->Height*3;
}
//5、数据释放
if (U16_data != NULL)
{
JZsdk_Free((void *)U16_data);
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
... ... @@ -790,7 +934,6 @@ static T_JZsdkReturnCode IRC_Set_SpcTime(int timebase)
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
int Last_TpcMode = 0;
/********************************************
*
* 设置两点纠正模式
... ... @@ -804,28 +947,16 @@ static T_JZsdkReturnCode IRC_Set_TpcMode(int mode)
if (mode == 0)
{
JZSDK_LOG_INFO("出厂高低温模式");
if (Last_TpcMode != mode)
{
//重新读取数据
IRC_LocalFrame_DataRead(HIGH_SD_TEMP, g_IRC_Param);
}
}
else if (mode == 1)
else if (mode == 1 || mode == 2 || mode == 3 || mode == 4)
{
JZSDK_LOG_INFO("手动高低温模式");
if (Last_TpcMode != mode)
{
//重新读取数据
IRC_LocalFrame_DataRead(HIGH_LOCAL_TEMP, g_IRC_Param);
}
JZSDK_LOG_INFO("手动高低温模式 %d", mode);
}
else
{
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
Last_TpcMode = mode;
g_IRC_Param->TPC_mode = mode;
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
... ... @@ -968,71 +1099,6 @@ T_JZsdkReturnCode Proc_IRC_param(int flagcode, enum JZsdk_Widget_Control paramfl
/*
填入高温数据
*/
static T_JZsdkReturnCode IRC_Set_HighTempData(struct IRC_param *dealInfo)
{
T_JZsdkReturnCode ret;
//先尝试 填入录入的数据
ret = IRC_LocalFrame_DataRead(HIGH_SD_TEMP, dealInfo);
if (ret == JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_INFO("外置高温数据录入成功");
return ret;
}
//失败了录入 本地数据
ret = IRC_LocalFrame_DataRead(HIGH_LOCAL_TEMP, dealInfo);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_ERROR("没有本地高温数据数据,打开录入标志");
dealInfo->HighT_flag = JZ_FLAGCODE_ON; //打开录入标志
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
JZSDK_LOG_INFO("本地高温数据录入成功");
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/*
填入低温数据
*/
static T_JZsdkReturnCode IRC_Set_LowTempData(struct IRC_param *dealInfo)
{
T_JZsdkReturnCode ret;
//先尝试 填入录入的数据
ret = IRC_LocalFrame_DataRead(LOW_SD_TEMP, dealInfo);
if (ret == JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_INFO("外置低温数据录入成功");
return ret;
}
//失败了录入 本地数据
ret = IRC_LocalFrame_DataRead(LOW_LOCAL_TEMP, dealInfo);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_ERROR("没有本地低温数据数据,打开录入标志");
dealInfo->LowT_flag = JZ_FLAGCODE_ON; //打开录入标志
}
JZSDK_LOG_INFO("本地低温数据录入成功");
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
... ... @@ -1089,86 +1155,104 @@ static T_JZsdkReturnCode IRC_param_Init(struct IRC_param **index, int height, in
IrcDealCfg->LowT_flag = JZ_FLAGCODE_OFF;
IrcDealCfg->HighT_flag = JZ_FLAGCODE_OFF;
IrcDealCfg->TPC_Diff = (double *)malloc(IrcDealCfg->Height * IrcDealCfg->Width * sizeof(double));
if (IrcDealCfg->TPC_Diff == NULL)
IrcDealCfg->TPC_Offset = (double *)malloc(IrcDealCfg->PixelNum * sizeof(double));
if (IrcDealCfg->TPC_Offset == NULL)
{
JZSDK_LOG_ERROR("TPC_Diff注册失败");
JZSDK_LOG_ERROR("TPC_Offset注册失败");
}
IrcDealCfg->TPC_Slope = (double *)malloc(IrcDealCfg->Height * IrcDealCfg->Width * sizeof(double));
if (IrcDealCfg->TPC_Slope == NULL)
IrcDealCfg->TPC_Gain = (double *)malloc(IrcDealCfg->PixelNum * sizeof(double));
if (IrcDealCfg->TPC_Gain == NULL)
{
JZSDK_LOG_ERROR("TPC_Slope注册失败");
JZSDK_LOG_ERROR("TPC_Gain注册失败");
}
IrcDealCfg->HighT_NineFrameAdd = (U16_t *)malloc(IrcDealCfg->Height * IrcDealCfg->Width * sizeof(U16_t));
IrcDealCfg->HighT_NineFrameAdd = (U16_t *)malloc(IrcDealCfg->PixelNum * sizeof(U16_t));
if (IrcDealCfg->HighT_NineFrameAdd == NULL)
{
JZSDK_LOG_ERROR("HighT_NineFrameAdd注册失败");
}
IrcDealCfg->LowT_NineFrameAdd = (U16_t *)malloc(IrcDealCfg->Height * IrcDealCfg->Width * sizeof(U16_t));
IrcDealCfg->LowT_NineFrameAdd = (U16_t *)malloc(IrcDealCfg->PixelNum * sizeof(U16_t));
if (IrcDealCfg->LowT_NineFrameAdd == NULL)
{
JZSDK_LOG_ERROR("LowT_NineFrameAdd注册失败");
}
IrcDealCfg->HighT_NineFrame_Avg = (U16_t *)malloc(IrcDealCfg->Height * IrcDealCfg->Width * sizeof(U16_t));
IrcDealCfg->HighT_NineFrame_Avg = (U16_t *)malloc(IrcDealCfg->PixelNum * sizeof(U16_t));
if (IrcDealCfg->HighT_NineFrame_Avg == NULL)
{
JZSDK_LOG_ERROR("HighT_NineFrame_Avg注册失败");
}
IrcDealCfg->LowT_NineFrame_Avg = (U16_t *)malloc(IrcDealCfg->Height * IrcDealCfg->Width * sizeof(U16_t));
IrcDealCfg->LowT_NineFrame_Avg = (U16_t *)malloc(IrcDealCfg->PixelNum * sizeof(U16_t));
if (IrcDealCfg->LowT_NineFrame_Avg == NULL)
{
JZSDK_LOG_ERROR("LowT_NineFrame_Avg注册失败");
}
IrcDealCfg->SPC_Diff = (double *)malloc(IrcDealCfg->Height * IrcDealCfg->Width * sizeof(double));
IrcDealCfg->SPC_Diff = (double *)malloc(IrcDealCfg->PixelNum * sizeof(double));
if (IrcDealCfg->SPC_Diff == NULL)
{
JZSDK_LOG_ERROR("SPC_Diff注册失败");
}
IrcDealCfg->SPC_Slope = (double *)malloc(IrcDealCfg->Height * IrcDealCfg->Width * sizeof(double));
IrcDealCfg->SPC_Slope = (double *)malloc(IrcDealCfg->PixelNum * sizeof(double));
if (IrcDealCfg->SPC_Slope == NULL)
{
JZSDK_LOG_ERROR("SPC_Slope注册失败");
}
IrcDealCfg->DGCE_Area = (U16_t *)malloc(IrcDealCfg->Height * IrcDealCfg->Width * sizeof(U16_t));
IrcDealCfg->DGCE_Area = (U16_t *)malloc(IrcDealCfg->PixelNum * sizeof(U16_t));
if (IrcDealCfg->DGCE_Area == NULL)
{
JZSDK_LOG_ERROR("DGCE_Area注册失败");
}
IrcDealCfg->DGCE_Mark_Data = (U16_t *)malloc(IrcDealCfg->Height * IrcDealCfg->Width * sizeof(U16_t));
IrcDealCfg->DGCE_Mark_Data = (U16_t *)malloc(IrcDealCfg->PixelNum * sizeof(U16_t));
if (IrcDealCfg->DGCE_Mark_Data == NULL)
{
JZSDK_LOG_ERROR("DGCE_Mark_Data注册失败");
}
IrcDealCfg->SPC_Mark_Data = (U16_t *)malloc(IrcDealCfg->Height * IrcDealCfg->Width * sizeof(U16_t));
IrcDealCfg->SPC_Mark_Data = (U16_t *)malloc(IrcDealCfg->PixelNum * sizeof(U16_t));
if (IrcDealCfg->SPC_Mark_Data == NULL)
{
JZSDK_LOG_ERROR("SPC_Mark_Data注册失败");
}
//填入本地高温数据
ret = IRC_Set_HighTempData(IrcDealCfg);
if (ret == JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
IrcDealCfg->Factory_HighT_Mark_Data = (U16_t *)malloc(IrcDealCfg->PixelNum * sizeof(U16_t));
if (IrcDealCfg->Factory_HighT_Mark_Data == NULL)
{
JZSDK_LOG_INFO("本地高温数据 0:%d 1:%d 2:%d", IrcDealCfg->HighT_NineFrame_Avg[2550], IrcDealCfg->HighT_NineFrame_Avg[2551], IrcDealCfg->HighT_NineFrame_Avg[2552]);
JZSDK_LOG_ERROR("Factory_HighT_Mark_Data注册失败");
}
IrcDealCfg->Factory_HighT_Mark_Data_flag = JZ_FLAGCODE_OFF;
//填入低温数据
ret = IRC_Set_LowTempData(IrcDealCfg);
if (ret == JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
IrcDealCfg->Factory_LowT_Mark_Data = (U16_t *)malloc(IrcDealCfg->PixelNum * sizeof(U16_t));
if (IrcDealCfg->Factory_LowT_Mark_Data == NULL)
{
JZSDK_LOG_INFO("本地低温数据 0:%d 1:%d 2:%d", IrcDealCfg->LowT_NineFrame_Avg[2550], IrcDealCfg->LowT_NineFrame_Avg[2551], IrcDealCfg->LowT_NineFrame_Avg[2552]);
JZSDK_LOG_ERROR("Factory_LowT_Mark_Data注册失败");
}
IrcDealCfg->Factory_LowT_Mark_Data_flag = JZ_FLAGCODE_OFF;
/*******************************************************************************************
*
* 加载文件数据
*
*
**********************************************************************************************/
//填入出厂的高温数据
IRC_LocalFrame_DataRead(FACTORY_HIGH_TEMP, IrcDealCfg);
//填入低温数据
IRC_LocalFrame_DataRead(FACTORY_LOW_TEMP, IrcDealCfg);
//计算两点系数***********重新计算两点校正的斜率**************************************/
JZIrcLib_TPC_Slope_Calculation2(IrcDealCfg);
//开启tpc校验标志
IrcDealCfg->TPC_ResetFlag = JZ_FLAGCODE_ON;
//获取markdata的本地数据
ret = IRC_LocalFrame_DataRead(SPC_MARK_DATA, IrcDealCfg);
... ...
... ... @@ -26,8 +26,8 @@ extern "C" {
/* Exported functions --------------------------------------------------------*/
T_JZsdkReturnCode IRC_ParamInit(struct IRC_param **index, int height, int width, int frameRate);
T_JZsdkReturnCode IRC_FrameDeal(unsigned char *rawData ,unsigned int dataSize, unsigned char **outData, unsigned int *outDataSize);
T_JZsdkReturnCode IRC_ParamInit(int height, int width, int frameRate);
T_JZsdkReturnCode IRC_FrameDeal(U16_t *rawData ,unsigned int dataSize, unsigned char **outData, unsigned int *outDataSize);
T_JZsdkReturnCode IRC_SetRawPixel_ResetFlag();
T_JZsdkReturnCode IRC_SetGrayPixel_ResetFlag();
... ...
... ... @@ -202,274 +202,6 @@ static T_JZsdkReturnCode JZsdk_vKT(U16_t *in_str, U8_t **out_str, int *out_str_l
}
// //将14位灰度图数据转换为原始码流
// T_JZsdkReturnCode IRC_14bitGrayData_to_RawData(U8_t *data, int data_len, U8_t **raw_data, int *raw_data_len, struct IRC_param *dealInfo, int rawType)
// {
// T_JZsdkReturnCode ret;
// //无图像
// if (data == NULL || dealInfo == NULL)
// {
// return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
// }
// U16_t u16_RawData[dealInfo->PixelNum]; //原始16位码流
// U16_t u16_CorrentData[dealInfo->PixelNum];
// unsigned int u16_data_len; //16位码流的数据长度
// //1、合成像素,u8转换合并成u16
// JZsdk_Merge_U8_to_U16_byReverse(data, data_len,u16_RawData, &u16_data_len);
// if (u16_data_len != dealInfo->PixelNum)
// {
// JZSDK_LOG_ERROR("像素合成失败");
// }
// //2、数据检查
// for (int i = 0; i < dealInfo->PixelNum; i++)
// {
// if (u16_RawData[i] > dealInfo->ExpectedMax)
// {
// u16_RawData[i] = dealInfo->ExpectedMax;
// }
// }
// //3、如果开启了原始流坏点寻找
// if (dealInfo->AutoBadPixelReset == JZ_FLAGCODE_ON)
// {
// JZsdk_RawCheckisBadPixel_U16(u16_RawData, dealInfo->Width, dealInfo->Height, dealInfo->BadPixel, &dealInfo->BadPixelNum);
// dealInfo->AutoBadPixelReset = JZ_FLAGCODE_OFF;
// }
// //盲元纠正
// JZsdk_CutBadPixel_U16(u16_RawData, dealInfo->Width, dealInfo->Height, dealInfo->BadPixel, dealInfo->BadPixelNum,dealInfo->BadPixelExtern, 1);
// /***************************************************************************
// *
// * 图像标定记录
// *
// *
// * *******************************************************************/
// //3、如果开启了低温循环标定
// if (dealInfo->LowT_flag != JZ_FLAGCODE_OFF)
// {
// IRC_LowT_CycleCalibration(u16_RawData, dealInfo);
// }
// //4、如果开启了高温循环标定
// if (dealInfo->HighT_flag != JZ_FLAGCODE_OFF)
// {
// IRC_HighT_CycleCalibration(u16_RawData, dealInfo);
// }
// //单点校正spc如果未进行过数据的标定,则进行一次
// if (dealInfo->FirstSPC_flag == JZ_FLAGCODE_ON)
// {
// JZSDK_LOG_INFO("未进行过数据标定");
// memcpy(dealInfo->SPC_Mark_Data, u16_RawData, dealInfo->PixelNum * sizeof(U16_t));
// IRC_SPC_FrameSaveOrRead(dealInfo, 1, dealInfo->SPC_Mark_Data);
// IRC_SPC_ParamCorrect(dealInfo, dealInfo->SPC_Mark_Data);
// dealInfo->FirstSPC_flag = JZ_FLAGCODE_OFF;
// }
// //如果开启spc重置标定
// if (dealInfo->SPC_ResetFlag == JZ_FLAGCODE_ON)
// {
// JZSDK_LOG_INFO("SPC重置标定");
// memcpy(dealInfo->SPC_Mark_Data, u16_RawData, dealInfo->PixelNum * sizeof(U16_t));
// IRC_SPC_FrameSaveOrRead(dealInfo, 1, dealInfo->SPC_Mark_Data);
// IRC_SPC_ParamCorrect(dealInfo, dealInfo->SPC_Mark_Data);
// dealInfo->SPC_ResetFlag = JZ_FLAGCODE_OFF;
// }
// //标记气体增图像图像
// //如果开启气体增强时,未标记过
// if (dealInfo->OutputPixelColorMode == 2 && dealInfo->First_DGCE_flag == JZ_FLAGCODE_ON)
// {
// memcpy(dealInfo->DGCE_Mark_Data, u16_RawData, dealInfo->PixelNum * sizeof(U16_t) );
// dealInfo->First_DGCE_flag = JZ_FLAGCODE_OFF;
// }
// //如果开启气体增强重置标定
// if (dealInfo->DGCE_ResetFlag == JZ_FLAGCODE_ON)
// {
// JZSDK_LOG_INFO("气体增强重置标定");
// memcpy(dealInfo->DGCE_Mark_Data, u16_RawData, dealInfo->PixelNum * sizeof(U16_t));
// dealInfo->DGCE_ResetFlag = JZ_FLAGCODE_OFF;
// }
// /***************************************************************************
// *
// * 校正
// *
// *
// * *******************************************************************/
// //5、校正
// //先复制一份纠正用数据
// memcpy(u16_CorrentData, u16_RawData, dealInfo->PixelNum * sizeof(U16_t));
// //如果打开了两点校正
// if (dealInfo->FrameCorrectMode == IRC_CORRCTION_TPC)
// {
// IRC_TPC(u16_CorrentData, dealInfo);
// }
// if (dealInfo->FrameCorrectMode == IRC_CORRCTION_SPC)
// {
// IRC_SPC(u16_CorrentData, dealInfo);
// }
// //7、图像输出模式
// U8_t *GrayImage = NULL; //灰度图数组
// unsigned int GrayImageLen = 0; //灰度图长度
// U8_t *RgbImage = NULL; //rgb888的图像数组
// unsigned int RgbImageLen = 0; //rgb数组的长度
// switch (dealInfo->OutputPixelColorMode)
// {
// case 0: //默认输出模式
// {
// //u16转换为灰度图
// //IRC_Histogram_vKT(u16_CorrentData, &GrayImage, &GrayImageLen, dealInfo);
// JZsdk_vKT(u16_CorrentData, &GrayImage, &GrayImageLen, dealInfo);
// if (JZsdk_GrayBadPixelCheck_flag == JZ_FLAGCODE_ON)
// {
// JZsdk_GrayCheckisBadPixel_U8(GrayImage, dealInfo->Width, dealInfo->Height, dealInfo->GrayPixel, &dealInfo->GrayPixelNum);
// JZsdk_GrayBadPixelCheck_flag = JZ_FLAGCODE_OFF;
// }
// //JZsdk_CutBadPixel_U8(GrayImage, dealInfo->Width, dealInfo->Height, dealInfo->GrayPixel, dealInfo->GrayPixelNum,dealInfo->GrayPixelExtern, 1);
// //灰度图转rgb888
// IRC_GrayTo_RGB(GrayImage, &RgbImage, &RgbImageLen,dealInfo);
// }
// break;
// case 1: //伪彩输出模式
// {
// //u16转换为灰度图
// //IRC_Histogram_vKT(u16_CorrentData, &GrayImage, &GrayImageLen, dealInfo);
// JZsdk_vKT(u16_CorrentData, &GrayImage, &GrayImageLen, dealInfo);
// if (JZsdk_GrayBadPixelCheck_flag == JZ_FLAGCODE_ON)
// {
// JZsdk_GrayCheckisBadPixel_U8(GrayImage, dealInfo->Width, dealInfo->Height, dealInfo->GrayPixel, &dealInfo->GrayPixelNum);
// JZsdk_GrayBadPixelCheck_flag = JZ_FLAGCODE_OFF;
// }
// //JZsdk_CutBadPixel_U8(GrayImage, dealInfo->Width, dealInfo->Height, dealInfo->GrayPixel, dealInfo->GrayPixelNum,dealInfo->GrayPixelExtern, 1);
// //灰度图转伪彩rgb888
// ret = PseudoColor_Gray2Rgb(GrayImage, &RgbImage, &RgbImageLen, dealInfo->PixelNum);
// if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
// {
// IRC_GrayTo_RGB(GrayImage, &RgbImage, &RgbImageLen,dealInfo);
// }
// }
// break;
// case 2: //气体色彩增强输出模式
// {
// //进行前需要自动打开单点纠正模式
// //u16转换为灰度图
// //IRC_Histogram_vKT(u16_CorrentData, &GrayImage, &GrayImageLen, dealInfo);
// JZsdk_vKT(u16_CorrentData, &GrayImage, &GrayImageLen, dealInfo);
// // if (JZsdk_GrayBadPixelCheck_flag == JZ_FLAGCODE_ON)
// // {
// // JZsdk_GrayCheckisBadPixel_U8(GrayImage, dealInfo->Width, dealInfo->Height, dealInfo->GrayPixel, &dealInfo->GrayPixelNum);
// // JZsdk_GrayBadPixelCheck_flag = JZ_FLAGCODE_OFF;
// // }
// // JZsdk_CutBadPixel_U8(GrayImage, dealInfo->Width, dealInfo->Height, dealInfo->GrayPixel, dealInfo->GrayPixelNum,dealInfo->GrayPixelExtern, 1);
// //转为rgb
// IRC_GrayTo_RGB(GrayImage, &RgbImage, &RgbImageLen,dealInfo);
// //灰度图转气体增强rgb888
// IRC_DynamicGasesColorEnhance(RgbImage, u16_RawData, dealInfo);
// }
// break;
// default:
// return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
// break;
// }
// // 在rgb图上画图形
// IRC_WriteGraphical(RgbImage, dealInfo);
// /*****************
// *
// *
// * 如果码流类型要求为yuv420p
// *
// * **********************/
// if (rawType == 0)
// {
// //rgb888转yuv
// U8_t *yuv_frame = (U8_t *)malloc(dealInfo->Width*dealInfo->Height*3/2 );
// if (yuv_frame == NULL)
// {
// printf("内存注册失败\n");
// if (GrayImage != NULL)
// {
// free(GrayImage);
// GrayImage = NULL;
// }
// if (RgbImage != NULL)
// {
// free(RgbImage);
// RgbImage = NULL;
// }
// return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
// }
// // 调用转换函数
// Stream_rgb888_to_yuv420p(RgbImage,dealInfo->Width,dealInfo->Height,yuv_frame);
// // 释放临时缓冲区,保留yuv_frame
// if (GrayImage != NULL)
// {
// free(GrayImage);
// GrayImage = NULL;
// }
// if (RgbImage != NULL)
// {
// free(RgbImage);
// RgbImage = NULL;
// }
// *raw_data = yuv_frame;
// *raw_data_len = dealInfo->Width*dealInfo->Height*3/2;
// }
// /*****************
// *
// *
// * 如果码流类型要求为rgb888
// *
// * **********************/
// if (rawType == 1)
// {
// // 释放临时缓冲区
// if (GrayImage != NULL)
// {
// free(GrayImage);
// GrayImage = NULL;
// }
// *raw_data = RgbImage;
// *raw_data_len = dealInfo->Width*dealInfo->Height*3;
// }
// return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
// }
/******
*
* 单点矫正
... ... @@ -501,9 +233,38 @@ static T_JZsdkReturnCode JZIrcLib_SPC(U16_t *ImageData,struct IRC_param *dealInf
}
}
//JZSDK_LOG_INFO("单点校正");
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/*******************
*
* 单点校正斜率计算
*
*
* *******************/
static T_JZsdkReturnCode JZIrcLib_SPC_Slope_Calculation(struct IRC_param *dealInfo)
{
if (dealInfo->SPC_mode == 0)
{
//采用出厂模式的斜率
if (dealInfo->TPC_mode == 0 && dealInfo->Factory_LowT_Mark_Data != NULL)
{
IRC_SPC_ParamCorrect(dealInfo, dealInfo->Factory_LowT_Mark_Data);
}
//采用手动模式的斜率
else
{
IRC_SPC_ParamCorrect(dealInfo, dealInfo->LowT_NineFrame_Avg);
}
}
//都不满足的情况下,采用默认的斜率
else
{
IRC_SPC_ParamCorrect(dealInfo, dealInfo->SPC_Mark_Data);
}
}
/******
*
... ... @@ -519,15 +280,50 @@ T_JZsdkReturnCode JZIrcLib_TPC(U16_t *ImageData,struct IRC_param *dealInfo)
return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
}
// 应用两点校正公式
for (int i = 0; i < dealInfo->PixelNum; i++)
{
ImageData[i] = (int)(dealInfo->TPC_Gain[i] * ImageData[i] + dealInfo->TPC_Offset[i]);
if (ImageData[i] < 0)
{
ImageData[i] = 0;
}
else if (ImageData[i] > dealInfo->ExpectedMax)
{
ImageData[i] = dealInfo->ExpectedMax;
}
}
//JZSDK_LOG_INFO("两点校正");
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS; // 返回校正后的灰度数组
}
/*******************
*
* 两点校正斜率计算
*
*
* *******************/
static T_JZsdkReturnCode JZIrcLib_TPC_Slope_Calculation(struct IRC_param *dealInfo)
{
//无图像
if (dealInfo == NULL)
{
return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
}
// 如果tpc的斜率和tpc的截距都为0,重新计算斜率与斜距
int allZeroSlope_flag = 1, allZeroDiff_flag = 1;
for (int i = 0; i < dealInfo->PixelNum; i++)
{
if (dealInfo->TPC_Slope[i] != 0)
if (dealInfo->TPC_Gain[i] != 0)
{
allZeroSlope_flag = 0;
}
if (dealInfo->TPC_Diff[i] != 0) {
if (dealInfo->TPC_Offset[i] != 0)
{
allZeroDiff_flag = 0;
}
}
... ... @@ -550,33 +346,59 @@ T_JZsdkReturnCode JZIrcLib_TPC(U16_t *ImageData,struct IRC_param *dealInfo)
{
if (dealInfo->HighT_NineFrame_Avg[i] > dealInfo->LowT_NineFrame_Avg[i])
{
dealInfo->TPC_Slope[i] = (AvgSingleFrame_HighT - AvgSingleFrame_LowT) / (dealInfo->HighT_NineFrame_Avg[i] - dealInfo->LowT_NineFrame_Avg[i]);
dealInfo->TPC_Gain[i] = (AvgSingleFrame_HighT - AvgSingleFrame_LowT) / (dealInfo->HighT_NineFrame_Avg[i] - dealInfo->LowT_NineFrame_Avg[i]);
}
else
{
dealInfo->TPC_Slope[i] = 0;
dealInfo->TPC_Gain[i] = 0;
}
dealInfo->TPC_Diff[i] = AvgSingleFrame_LowT - dealInfo->TPC_Slope[i] * dealInfo->LowT_NineFrame_Avg[i];
dealInfo->TPC_Offset[i] = AvgSingleFrame_LowT - dealInfo->TPC_Gain[i] * dealInfo->LowT_NineFrame_Avg[i];
}
}
// 应用两点校正公式
}
/*******************
*
* 两点校正斜率计算
*
*
* *******************/
T_JZsdkReturnCode JZIrcLib_TPC_Slope_Calculation2(struct IRC_param *dealInfo)
{
//判断是否存在结构体
if (dealInfo == NULL)
{
return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
}
//计算像元平均响应
double AvgSingleFrame_LowT = 0, AvgSingleFrame_HighT = 0;
for (int i = 0; i < dealInfo->PixelNum; i++)
{
ImageData[i] = (int)(dealInfo->TPC_Slope[i] * ImageData[i] + dealInfo->TPC_Diff[i]);
if (ImageData[i] < 0)
AvgSingleFrame_LowT += dealInfo->LowT_NineFrame_Avg[i];
AvgSingleFrame_HighT += dealInfo->HighT_NineFrame_Avg[i];
}
AvgSingleFrame_HighT = AvgSingleFrame_HighT / dealInfo->PixelNum;
AvgSingleFrame_LowT = AvgSingleFrame_LowT / dealInfo->PixelNum;
for (int i = 0; i < dealInfo->PixelNum; i++)
{
if (dealInfo->HighT_NineFrame_Avg[i] != dealInfo->LowT_NineFrame_Avg[i])
{
ImageData[i] = 0;
dealInfo->TPC_Gain[i] = (AvgSingleFrame_HighT - AvgSingleFrame_LowT) / (dealInfo->HighT_NineFrame_Avg[i] - dealInfo->LowT_NineFrame_Avg[i]);
dealInfo->TPC_Offset[i] = AvgSingleFrame_LowT - dealInfo->TPC_Gain[i] * dealInfo->LowT_NineFrame_Avg[i];
}
else if (ImageData[i] > dealInfo->ExpectedMax)
else
{
ImageData[i] = dealInfo->ExpectedMax;
dealInfo->TPC_Gain[i] = 1;
dealInfo->TPC_Offset[i] = 0;
}
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS; // 返回校正后的灰度数组
}
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/*计算直方图均衡化并转换为8位灰度值
... ... @@ -780,19 +602,32 @@ T_JZsdkReturnCode JZIrcLib_DataDeal(U16_t* U16_data, unsigned int U16_dataSize,
//如果打开了单点纠正模式
if (dealInfo->FrameCorrectMode == IRC_CORRCTION_SPC)
{
JZIrcLib_SPC(u16_CorrentData, dealInfo);
//计算单点校正的斜率
JZIrcLib_SPC_Slope_Calculation(dealInfo);
//通过斜率进行单点校正
ret = JZIrcLib_SPC(u16_CorrentData, dealInfo);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_ERROR("两点校正失败");
}
}
// 如果打开了两点校正
if (dealInfo->FrameCorrectMode == IRC_CORRCTION_TPC)
{
JZIrcLib_TPC(u16_CorrentData, dealInfo);
//对数据进行两点校正
ret = JZIrcLib_TPC(u16_CorrentData, dealInfo);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_ERROR("两点校正失败");
}
}
U8_t *GrayImage = NULL;
int GrayImageLen = 0;
//直方图
//直方图均衡化
JZIrcLib_vKT(u16_CorrentData, &GrayImage, &GrayImageLen, dealInfo);
//图像输出模式
... ...
... ... @@ -16,6 +16,7 @@ extern "C" {
T_JZsdkReturnCode JZIrcLib_DataDeal(U16_t* U16_data, unsigned int U16_dataSize,
U8_t **RGB_data, unsigned int *RGB_dataSize,
struct IRC_param *dealInfo);
T_JZsdkReturnCode JZIrcLib_TPC_Slope_Calculation2(struct IRC_param *dealInfo);
#ifdef __cplusplus
... ...
... ... @@ -116,6 +116,7 @@ T_JZsdkReturnCode PseudoColor_Gray2Rgb(U8_t *in_str, U8_t **out_str, unsigned in
{
if (P_Color_FinishFlag != JZ_FLAGCODE_ON)
{
JZSDK_LOG_ERROR("伪彩配置文件未初始化");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
... ... @@ -123,6 +124,7 @@ T_JZsdkReturnCode PseudoColor_Gray2Rgb(U8_t *in_str, U8_t **out_str, unsigned in
*out_str = (unsigned char*)malloc(*out_str_len * sizeof(unsigned char));
if (out_str == NULL)
{
JZSDK_LOG_ERROR("内存分配失败");
return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
}
... ...
... ... @@ -36,7 +36,6 @@ extern "C" {
#define SATURATION (128) //饱和度 摄像头饱和度范围为 1-255 默认128
#define EXPSURE (100) //曝光值 摄像头曝光值范围: 5-2500 默认auto设置
#define SECOND_WIDTH 1920
#define SECOND_HEIGHT 1080
#define SECOND_FRAME_NUM 30
... ...
... ... @@ -12,7 +12,6 @@
#include "./MultProc/MultProc.h"
#include "./MediaParm.h"
#include "./Camera/Camera.h"
#include "./Camera/Kt_Irc/Kt_Irc.h"
//单线程方案
static T_JZsdkReturnCode MediaProc_SingleThreading()
... ... @@ -48,41 +47,6 @@ static T_JZsdkReturnCode MediaProc_MultiThreading()
VideoMgmt_VideoStreamFlowIndex(VIDEOMGMT_STREAMING_FLOW_INDEX_FIRST); //默认推送光学摄像头
}
//如果是昆腾相机 红外+光学 则红外为1号,光学为2号
if (DEVICE_VERSION == JZ_C1)
{
//1、启动视频流缓冲区模块
VideoMgmt_init_buffer(&VideoMgmt_FirstVideo_index);
VideoMgmt_init_buffer(&VideoMgmt_FirstRaw_index);
VideoMgmt_init_buffer(&VideoMgmt_SecondVideo_index);
VideoMgmt_init_buffer(&VideoMgmt_SecondRaw_index);
//2、初始化编解码处理模块
JZsdk_Kt_IrcMMP_Init(FIRST_WIDTH ,FIRST_HEIGHT, 25, 5 ,SECOND_WIDTH ,SECOND_HEIGHT, 30, 15);
//3、相机初始化
JZsdk_Kt_Irc_Camera_Init();
//4、启用推流模块
VideoMgmt_VideoStreamFlow_Init(25, &VideoMgmt_FirstVideo_index, VIDEOMGMT_STREAMING_FLOW_INDEX_FIRST);
VideoMgmt_VideoStreamFlow_Init(30, &VideoMgmt_SecondVideo_index, VIDEOMGMT_STREAMING_FLOW_INDEX_SECOND);
//转码模块
VideoStreamTransCode_Init();
//5、打开默认选项
VideoMgmt_VideoStreamFlowIndex(VIDEOMGMT_STREAMING_FLOW_INDEX_FIRST); //默认推送红外摄像头 后续改成 红外+光学 的组合画面
JZsdk_Kt_Irc_ShutterSwitch(JZ_FLAGCODE_ON);
//6、修改部分参数
int value = 8;
Camera_param(JZ_FLAGCODE_SET, CAMERA_PSEUDO_COLOR, &value);
value = 1;
Camera_param(JZ_FLAGCODE_SET, CAMERA_PIXEL_PSEUDO_COLOR_MODE, &value);
}
}
//视频流模块初始化
... ... @@ -91,8 +55,6 @@ T_JZsdkReturnCode MediaProc_Init()
#if DEVICE_VERSION == JZ_H150S || DEVICE_VERSION == JZ_H150T
MediaProc_SingleThreading();
# elif DEVICE_VERSION == JZ_C1
MediaProc_MultiThreading();
#endif
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
... ...
... ... @@ -26,13 +26,13 @@ extern "C" {
typedef enum CameraParam
{
CAMERA_PSEUDO_COLOR = 0x000001,
CAMERA_PIXEL_PSEUDO_COLOR_MODE = 0x000002,
CAMERA_REGION_BOX = 0x000003,
CAMERA_CORRCTION_MODE = 0x000004,
CAMERA_GAS_ENHANCEMENT_COLOR = 0x000005,
CAMERA_GAS_ENHANCEMENT_CORRCTION = 0x000006,
CAMERA_PSEUDO_COLOR = 0x000001, //伪彩色
CAMERA_PIXEL_PSEUDO_COLOR_MODE = 0x000002, //像素输出模式
CAMERA_REGION_BOX = 0x000003, //区域框
CAMERA_CORRCTION_MODE = 0x000004, //纠正模式
CAMERA_GAS_ENHANCEMENT_COLOR = 0x000005, //气体增强颜色
CAMERA_GAS_ENHANCEMENT_CORRCTION = 0x000006, //气体增强纠正
CAMERA_RESIZE= 0x000007, //相机缩放图像
}CameraParam;
typedef enum IRC_CorrectionMode
... ...
... ... @@ -10,7 +10,7 @@
#include "JZsdkLib.h"
// 将RGB888格式转换为YUV420P格
// 将RGB888格式转换为YUV420P格(I420)
T_JZsdkReturnCode Stream_rgb888_to_yuv420p(U8_t *rgb_data, int width, int height, U8_t *yuv_data)
{
// YUV420P格式的大小:Y平面后面跟着U和V平面,它们的高度和宽度都是原图像的一半
... ... @@ -55,6 +55,60 @@ T_JZsdkReturnCode Stream_rgb888_to_yuv420p(U8_t *rgb_data, int width, int height
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//rgb888 转yuv420sp MPP_FMT_YUV420SP = (MPP_FRAME_FMT_YUV + 0), /* YYYY... UV... (NV12) */
T_JZsdkReturnCode Stream_rgb888_to_yuv420sp(U8_t *rgb_data, int width, int height, U8_t *yuv_data)
{
// YUV420SP(NV12)格式的大小:Y平面后面跟着一个交织的UV平面
int y_size = width * height;
int uv_size = (width / 2) * (height / 2) * 2; // UV平面交织存储,所以大小是U或V平面的两倍
// YUV420SP的各个平面
U8_t *y_plane = yuv_data;
U8_t *uv_plane = yuv_data + y_size;
// RGB888到YUV420SP的转换
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
// 计算Y分量的索引
int y_idx = j * width + i;
// 计算UV分量的索引(每个第二个像素存储一次)
int uv_idx = (j / 2) * (width / 2) + (i / 2);
// 提取RGB分量
U8_t r = rgb_data[y_idx * 3];
U8_t g = rgb_data[y_idx * 3 + 1];
U8_t b = rgb_data[y_idx * 3 + 2];
// 将RGB转换为YUV
int y = ((66 * r + 129 * g + 25 * b + 128) >> 8);
int u = ((-38 * r - 74 * g + 112 * b + 128) >> 8) + 128;
int v = ((112 * r - 94 * g - 18 * b + 128) >> 8) + 128;
// 存储Y分量
y_plane[y_idx] = (U8_t)y;
// 仅当为每第二个像素时存储U和V分量
if ((i & 1) == 0 && (j & 1) == 0)
{
// 存储U分量
uv_plane[uv_idx * 2] = (U8_t)u;
// 存储V分量
uv_plane[uv_idx * 2 + 1] = (U8_t)v;
}
}
}
// 注意:如果图像的宽度或高度不是偶数,上面的代码可能无法正确处理最后一行或一列。
// 在实际应用中,通常需要确保图像的尺寸是2的倍数,或者添加适当的边界处理代码。
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/*************************
*
* /rgb888图片画框
... ...
... ... @@ -26,6 +26,7 @@ extern "C" {
/* Exported functions --------------------------------------------------------*/
T_JZsdkReturnCode Stream_rgb888_to_yuv420p(U8_t *rgb_data, int width, int height, U8_t *yuv_data);
T_JZsdkReturnCode Stream_rgb888_to_yuv420sp(U8_t *rgb_data, int width, int height, U8_t *yuv_data);
T_JZsdkReturnCode Stream_rgb888_WriteRectangle(U8_t *rgb_data, int width, int height, int Point1_X, int Point1_Y, int Point2_X, int Point2_Y, int R_Color,int G_Color, int B_Color, int DrawWidth);
T_JZsdkReturnCode Stream_rgb888_WriteCross(U8_t *rgb_data, int width, int height, int PointX, int PointY, int R_Color,int G_Color, int B_Color, int DrawWidth, int DrawHeight);
... ...
... ... @@ -7,7 +7,7 @@
#include "JZsdkLib.h"
#include "./RTK_mmp_dec.h"
#if RTK_MPP_STATUS == VERSION_SWITCH_ON
#ifdef RTK_MPP_STATUS_ON
#include "rockchip/mpp_common.h" //这个.h能在mpp的源码中找到
#include "rockchip/mpp_packet.h"
#include "rockchip/rk_mpi.h"
... ... @@ -93,39 +93,39 @@ void dump_frame(MppFrame frame, FILE *out_fp)
}
void dump_frame_to_file(MppCtx ctx, MppApi *mpi, MppFrame frame, FILE *out_fp)
{
printf("decode_and_dump_to_file\n");
MPP_RET ret;
if (mpp_frame_get_info_change(frame)) {
printf("mpp_frame_get_info_change\n");
/**
* 第一次解码会到这个分支,需要为解码器设置缓冲区.
* 解码器缓冲区支持3种模式。参考【图像内存分配以及交互模式】Rockchip_Developer_Guide_MPP_CN.pdf
* 这里使用纯内部模式。
*/
ret = mpi->control(ctx, MPP_DEC_SET_INFO_CHANGE_READY, NULL);
if (ret) {
printf("mpp_frame_get_info_change mpi->control error"
"MPP_DEC_SET_INFO_CHANGE_READY %d\n", ret);
}
return;
}
RK_U32 err_info = mpp_frame_get_errinfo(frame);
RK_U32 discard = mpp_frame_get_discard(frame);
printf("err_info: %u discard: %u\n", err_info, discard);
if (err_info) {
return;
}
// void dump_frame_to_file(MppCtx ctx, MppApi *mpi, MppFrame frame, FILE *out_fp)
// {
// printf("decode_and_dump_to_file\n");
// MPP_RET ret;
// if (mpp_frame_get_info_change(frame)) {
// printf("mpp_frame_get_info_change\n");
// /**
// * 第一次解码会到这个分支,需要为解码器设置缓冲区.
// * 解码器缓冲区支持3种模式。参考【图像内存分配以及交互模式】Rockchip_Developer_Guide_MPP_CN.pdf
// * 这里使用纯内部模式。
// */
// ret = mpi->control(ctx, MPP_DEC_SET_INFO_CHANGE_READY, NULL);
// if (ret) {
// printf("mpp_frame_get_info_change mpi->control error"
// "MPP_DEC_SET_INFO_CHANGE_READY %d\n", ret);
// }
// return;
// }
// RK_U32 err_info = mpp_frame_get_errinfo(frame);
// RK_U32 discard = mpp_frame_get_discard(frame);
// printf("err_info: %u discard: %u\n", err_info, discard);
// if (err_info) {
// return;
// }
// save
dump_frame(frame, out_fp);
return;
}
// // save
// dump_frame(frame, out_fp);
// return;
// }
//rtk解码器初始化
... ... @@ -298,8 +298,10 @@ T_JZsdkReturnCode RTK_mmp_dec_Init(void **index, MppCodingType int_type, MppFram
}
//输出的码流格式
param = &out_format;
ret = DecConfigInput->mpi->control(DecConfigInput->ctx, MPP_DEC_SET_OUTPUT_FORMAT, param);
//param = &out_format;
//也不知道为什么只能设420sp没法设420p
MppFrameFormat format = out_format;
ret = DecConfigInput->mpi->control(DecConfigInput->ctx, MPP_DEC_SET_OUTPUT_FORMAT, &format);
if (ret == MPP_OK)
{
printf("输出格式正确\n");
... ...
... ... @@ -3,7 +3,7 @@
#include "version_choose.h"
#if RTK_MPP_STATUS == VERSION_SWITCH_ON
#ifdef RTK_MPP_STATUS_ON
#include "rockchip/rk_type.h"
#include "rockchip/mpp_frame.h"
... ...
... ... @@ -5,7 +5,7 @@
#include "JZsdkLib.h"
#include "./RTK_mmp_enc.h"
#if RTK_MPP_STATUS == VERSION_SWITCH_ON
#ifdef RTK_MPP_STATUS_ON
#include "rockchip/mpp_packet.h"
#include "rockchip/rk_mpi.h"
#include "rockchip/mpp_env.h" //这个.h能在mpp的源码中找到
... ...
... ... @@ -3,7 +3,7 @@
#include "version_choose.h"
#if RTK_MPP_STATUS == VERSION_SWITCH_ON
#ifdef RTK_MPP_STATUS_ON
#include "rockchip/rk_type.h"
#include "rockchip/mpp_frame.h"
... ...
... ... @@ -7,88 +7,69 @@
#include "version_choose.h"
#include "MediaProc/VideoMgmt/VideoMgmt.h"
void *Kt_Irc_enc_index = NULL; //昆腾红外编码器的索引值
void *Kt_Cam_enc_index = NULL; //昆腾光学编码器的索引值
void *Kt_Cam_dec_index = NULL; //昆腾光学解码器的索引值
//c1 0是红外权柄 1是光学权柄 2为空权柄
static void *RtkMmpEncHandle[3] = { NULL }; // 所有元素都被初始化为NULL
static void *RtkMmpDecHandle[3] = { NULL }; // 所有元素都被初始化为NULL
//昆腾红外相机的mmp初始化部分
T_JZsdkReturnCode JZsdk_Kt_IrcMMP_Init(int Irc_width, int Irc_height, int Irc_frame, int Irc_gop, int Cam_width, int Cam_height, int Cam_frame,int Cam_gop)
{
#if RTK_MPP_STATUS == VERSION_SWITCH_ON
//初始化红外数据的编码器
RTK_mmp_enc_Init(&Kt_Irc_enc_index, MPP_VIDEO_CodingAVC, MPP_FMT_YUV420P, Irc_width, Irc_height, Irc_frame, Irc_gop);
//RTK_mmp_enc_Init(&Kt_Irc_enc_index, MPP_VIDEO_CodingAVC, MPP_FMT_RGB888, Irc_width, Irc_height, Irc_frame);
/*
rtk模块获取编码权柄
属于参数即可权柄
//初始化光学数据的编解码器
RTK_mmp_dec_Init(&Kt_Cam_dec_index, MPP_VIDEO_CodingMJPEG, MPP_FMT_YUV420SP, Cam_width, Cam_height);
RTK_mmp_enc_Init(&Kt_Cam_enc_index, MPP_VIDEO_CodingAVC, MPP_FMT_YUV420SP, Cam_width, Cam_height, Cam_frame, Cam_gop);
#endif
*/
void *JZsdk_RtkMmpGetEncHandle(int CameraIndex)
{
return RtkMmpEncHandle[CameraIndex];
}
//原始视频流通过rtkmmp转为h264
T_JZsdkReturnCode JZsdk_RTKMMP_RawData_to_h264(unsigned char *RawData, int data_len)
{
#if RTK_MPP_STATUS == VERSION_SWITCH_ON
MppPacket Packet = NULL;
RTK_mmp_enc_data_to_h264(&Kt_Irc_enc_index, RawData, data_len, &Packet);
int packet_len = mpp_packet_get_length(Packet);
void *ptr = mpp_packet_get_pos(Packet);
//EncCfg->Packet_eos = mpp_packet_get_eos(packet);
// printf("获取到编码内容 len:%d\n",packet_len);
//3、将h264流输出到视频流缓冲区
VideoMgmt_write_data(&VideoMgmt_FirstVideo_index, ptr, packet_len);
/*
rtk模块获取解码权柄
属于参数即可权柄
//释放掉packet
mpp_packet_deinit(&Packet);
#endif
*/
void *JZsdk_RtkMmpGetDecHandle(int CameraIndex)
{
return RtkMmpDecHandle[CameraIndex];
}
//昆腾光学相机数据输入部分
T_JZsdkReturnCode JZsdk_Kt_CamMMP_Mjpeg_to_h264(unsigned char *data, int data_len)
/*********
*
* 返回dec权柄地址
*
*
* ***/
void **JZsdk_RtkMmpGetDecHandleAddr(int CameraIndex)
{
#if RTK_MPP_STATUS == VERSION_SWITCH_ON
MppFrame yuv_data = NULL; //用于传递yuv数据的地址
MppPacket Packet = NULL;
//输入数据进入解码器
RTK_mmp_dec_input(&Kt_Cam_dec_index, data, data_len, &yuv_data);
// int width = mpp_frame_get_width(yuv_data);
// int height = mpp_frame_get_height(yuv_data);
// int h_stride = mpp_frame_get_hor_stride(yuv_data);
// int v_stride = mpp_frame_get_ver_stride(yuv_data);
// JZSDK_LOG_INFO("w:%d h:%d hor:%d ver:%d",width,height,h_stride,v_stride);
//将返回的数据输入进编码器
RTK_mmp_enc_yuv_to_h264_byFrame(&Kt_Cam_enc_index, yuv_data, &Packet);
//获取数据指针与长度
int packet_len = mpp_packet_get_length(Packet);
void *ptr = mpp_packet_get_pos(Packet);
//置入视频缓冲区
VideoMgmt_write_data(&VideoMgmt_SecondVideo_index, (unsigned char *)ptr, (unsigned int)packet_len);
return &(RtkMmpDecHandle[CameraIndex]);
}
//释放掉编码图像
mpp_packet_deinit(&Packet);
#endif
/*********
*
* 返回enc权柄地址
*
*
* ***/
void **JZsdk_RtkMmpGetEncHandleAddr(int CameraIndex)
{
return &(RtkMmpEncHandle[CameraIndex]);
}
//昆腾相机设置下一帧为I帧
T_JZsdkReturnCode JZsdk_Kt_CamMMPenc_SetNextFrame_IDR(int CameraIndex)
{
#if RTK_MPP_STATUS == VERSION_SWITCH_ON
#ifdef RTK_MPP_STATUS_ON
if (CameraIndex == 0)
{
RTK_mmp_enc_SetNextFrame_IDR(&Kt_Irc_enc_index);
RTK_mmp_enc_SetNextFrame_IDR(JZsdk_RtkMmpGetEncHandleAddr(0));
}
else if(CameraIndex == 1)
{
RTK_mmp_enc_SetNextFrame_IDR(&Kt_Cam_enc_index);
RTK_mmp_enc_SetNextFrame_IDR(JZsdk_RtkMmpGetEncHandleAddr(1));
}
else if(CameraIndex == 2)
{
RTK_mmp_enc_SetNextFrame_IDR(JZsdk_RtkMmpGetEncHandleAddr(2));
}
else
{
... ...
... ... @@ -12,6 +12,15 @@
/* Includes ------------------------------------------------------------------*/
#include "JZsdk_Base/JZsdk_Code/JZsdk_Code.h"
#include "BaseConfig.h"
#ifdef RTK_MPP_STATUS_ON
#include "MediaProc/MultProc/RTK_mmp/Dec/RTK_mmp_dec.h"
#include "MediaProc/MultProc/RTK_mmp/Enc/RTK_mmp_enc.h"
#endif
#ifdef __cplusplus
extern "C" {
... ... @@ -26,9 +35,17 @@ extern "C" {
/* Exported functions --------------------------------------------------------*/
T_JZsdkReturnCode JZsdk_Kt_CamMMP_Mjpeg_to_h264(unsigned char *data, int data_len);
T_JZsdkReturnCode JZsdk_RTKMMP_RawData_to_h264(unsigned char *RawData, int data_len);
T_JZsdkReturnCode JZsdk_RTKMMP_RawData_to_h264_Return(void **index, unsigned char *RawData, int *data_len);
T_JZsdkReturnCode JZsdk_Kt_IrcMMP_Init(int Irc_width, int Irc_height, int Irc_frame, int Irc_gop, int Cam_width, int Cam_height, int Cam_frame,int Cam_gop);
T_JZsdkReturnCode JZsdk_Kt_CamMMPenc_SetNextFrame_IDR(int CameraIndex);
void *JZsdk_RtkMmpGetEncHandle(int CameraIndex);
void *JZsdk_RtkMmpGetDecHandle(int CameraIndex);
void **JZsdk_RtkMmpGetDecHandleAddr(int CameraIndex);
void **JZsdk_RtkMmpGetEncHandleAddr(int CameraIndex);
#ifdef __cplusplus
... ...