作者 ookk303

10更新

正在显示 45 个修改的文件 包含 1341 行增加420 行删除
... ... @@ -64,7 +64,15 @@
"rtk_mmp_enc.h": "c",
"videostreamtranscode.h": "c",
"time.h": "c",
"cam_framecatch.h": "c"
"cam_framecatch.h": "c",
"audiostreamdeal.h": "c",
"ff_statement.h": "c",
"configparams.h": "c",
"im2d.hpp": "c",
"rk_rga.h": "c",
"rgautils.h": "c",
"im2d_type.h": "c",
"im2d.h": "c"
},
"Codegeex.GenerationPreference": "automatic",
"C_Cpp.dimInactiveRegions": false
... ...
... ... @@ -7,7 +7,8 @@ set(IFLAY_TTS_MODULE VERSION_SWITCH_OFF)
set(ESPEAK_TTS_MODULE VERSION_SWITCH_OFF)
set(ALSALIB_MODULE VERSION_SWITCH_OFF)
set(OPUS_MODULE VERSION_SWITCH_OFF)
set(LIB_RTK_MMP VERSION_SWITCH_OFF)
set(LIB_RK_MMP VERSION_SWITCH_OFF)
set(LIB_RK_RGB VERSION_SWITCH_OFF)
set(LIB_USB VERSION_SWITCH_OFF)
set(LIB_KT_IRC VERSION_SWITCH_OFF)
... ... @@ -93,8 +94,11 @@ if(${MEDIA_PROC_MODULE} STREQUAL "VERSION_SWITCH_ON")
message("加载红外相机模块")
add_definitions(-DMACRO_IRC_MODULE)
message("加载RTK模块")
set(LIB_RTK_MMP VERSION_SWITCH_ON)
message("加载RK MMP模块")
set(LIB_RK_MMP VERSION_SWITCH_ON)
message("加载RK RGA模块")
set(LIB_RK_RGB VERSION_SWITCH_ON)
message("加载USB模块")
set(LIB_USB VERSION_SWITCH_ON)
... ...
... ... @@ -130,16 +130,28 @@ if(${FFMPEG_MODULE} STREQUAL "VERSION_SWITCH_ON")
endif()
#rtk_mmp库
if(${LIB_RTK_MMP} STREQUAL "VERSION_SWITCH_ON")
message("RTK_MMP库已加载\n")
add_definitions(-DMACRO_RTK_MPP_MODULE) #加载usb模块
#RK_mmp库
if(${LIB_RK_MMP} STREQUAL "VERSION_SWITCH_ON")
message("RK_MMP库已加载\n")
add_definitions(-DMACRO_RK_MPP_MODULE) #加载模块
include_directories(${ROOT_DIRS}/ThirdParty/RTK_mmp/aarch64-none-linux-gnu/include)
include_directories(${ROOT_DIRS}/ThirdParty/RK_mmp/aarch64-none-linux-gnu/include)
target_link_libraries(
${PROJECT_NAME}
${ROOT_DIRS}/ThirdParty/RTK_mmp/aarch64-none-linux-gnu/lib/librockchip_mpp.so.0
${ROOT_DIRS}/ThirdParty/RTK_mmp/aarch64-none-linux-gnu/lib/librockchip_vpu.so.0
${ROOT_DIRS}/ThirdParty/RK_mmp/aarch64-none-linux-gnu/lib/librockchip_mpp.so.0
${ROOT_DIRS}/ThirdParty/RK_mmp/aarch64-none-linux-gnu/lib/librockchip_vpu.so.0
)
endif()
#RK_rgb库
if(${LIB_RK_RGB} STREQUAL "VERSION_SWITCH_ON")
message("RK_RGB库已加载\n")
add_definitions(-DMACRO_RK_RGA_MODULE) #加载模块
include_directories(${ROOT_DIRS}/ThirdParty/RK_rga/aarch64-none-linux-gnu/include)
target_link_libraries(
${PROJECT_NAME}
${ROOT_DIRS}/ThirdParty/RK_rga/aarch64-none-linux-gnu/lib/librga.so
)
endif()
... ...
... ... @@ -32,7 +32,7 @@ typedef enum JZsdk_Widget_Control
JZSDK_WIDGET_SHUTTER_SWITCH = 0x0023, //快门开关
JZSDK_WIDGET_FREEZE_VIDEO = 0x0024, //冻结视频流
JZSDK_WIDGET_PSEUDO_MODE = 0x0025, //色彩输出模式
JZSDK_WIDGET_FOCAL_LENGTH = 0x0026, //焦距
JZSDK_WIDGET_ZOOM_SIZE = 0x0026, //焦距
JZSDK_WIDGET_SPC_RESET = 0x0027, //spc重置
JZSDK_WIDGET_PSEUDO_COLOR = 0x0028, //伪彩颜色
JZSDK_WIDGET_CAMERA_CORRECT_MODE = 0x0029, //相机纠正模式
... ...
... ... @@ -205,6 +205,7 @@ T_JZsdkReturnCode JZsdk_LoggerInit()
char logMessage[256];
snprintf(logMessage,256,"LOG_FILE:%s\n",LogFileName);
fprintf(logFile, "%s", logMessage);
fflush(logFile);
//dfclose(logFile);
... ... @@ -248,7 +249,7 @@ T_JZsdkReturnCode writeToLogFile(const char *data)
}
fprintf(logFile, "%s", data); // 写入新的数据
fflush(logFile);
//fclose(logFile);
// 解锁
... ... @@ -279,6 +280,7 @@ T_JZsdkReturnCode HexToLogFile(const char *data)
}
fprintf(logFile, "%x ", data[0]); // 写入新的数据
fflush(logFile);
//fclose(logFile);
... ...
... ... @@ -48,6 +48,8 @@ extern "C" {
#define JZ_MATH_SWAP(a, b) do { typeof(a) temp = (a); (a) = (b); (b) = temp; } while (0) //交换两个变量的值(这个宏使用了一个临时变量,但它仍然是通过宏来完成的):
#define JZ_ALIGN(x, a) (((x)+(a)-1)&~((a)-1)) //用于将 x 的值对齐到最近的 a 的倍数。这里的对齐是通过加上 a-1 然后使用位运算 &~((a)-1) 来实现的。
#ifdef __cplusplus
}
#endif
... ...
... ... @@ -19,7 +19,7 @@
#define MAJOR_VERSION 0x01
#define MINOR_VERSION 0x03
#define MODIFY_VERSION 0x09
#define DEBUG_VERSION 0x05
#define DEBUG_VERSION 0x07
//禁止修改行 滤波方式
#define FILTERING_TYPE HIGH_PASS_FILTERING
... ... @@ -74,14 +74,14 @@
#endif
//是否加载RTK_MPP模块
#ifdef MACRO_RTK_MPP_MODULE
#define RTK_MPP_STATUS VERSION_SWITCH_ON
#ifdef MACRO_RK_MPP_MODULE
#define RTK_MPP_STATUS_ON
#else
#define RTK_MPP_STATUS VERSION_SWITCH_OFF
#endif
//是否加载RTK_RGA模块
#ifdef MACRO_RK_RGA_MODULE
#define RTK_RGA_STATUS_ON
#endif
//是否加载WIRINGPI模块
#ifdef MACRO_WIRINGPI_MODULE
... ... @@ -113,9 +113,11 @@
#define USB_CONFIG_STATUS VERSION_SWITCH_ON
//是否加载RTK_MPP模块
#define RTK_MPP_STATUS VERSION_SWITCH_ON
#define RTK_MPP_STATUS_ON
//是否加载RGA模块
#define RTK_RGA_STATUS_ON
//是否加载WIRINGPI模块
#define WIRINGPI_STATUS_ON
... ...
... ... @@ -401,7 +401,7 @@ static int RecvDeal_RealTimeMP2_Limit_transmission(int Port, char *getbuf, int l
T_JZsdkReturnCode ret;
//提取出数据
int DataLen = ((int)getbuf[3] << 8 ) + (int)getbuf[4] - 2 - 9;
int DataLen = (((int)getbuf[3] << 8 ) + (int)getbuf[4]) - 9 - 2;
if (DataLen > 128)
{
JZSDK_LOG_ERROR("传输得到的mp2实时数据帧长度超出128上限");
... ... @@ -1065,6 +1065,8 @@ static T_JZsdkReturnCode RecvDeal_CheckStatus_AudioDetailMessage(int Port, char
**********/
static int RecvDeal_Amplifier_stop(int Port, char *getbuf)
{
T_JZsdkReturnCode ret;
JZSDK_LOG_INFO("%s,强制关闭功放",RecvDeal_GetPortName(Port));
//获取帧的序列号
... ... @@ -1073,7 +1075,15 @@ static int RecvDeal_Amplifier_stop(int Port, char *getbuf)
int status = JZ_FLAGCODE_OFF;
#if MEGAPHONE_CONFIG_STATUS == VERSION_SWITCH_ON
return Megaphone_Amplifier_param(JZ_FLAGCODE_SET, &status);
//先关闭播放
ret = UIcontrol_StopPlayAudio(NO_SPECIFIED);
if (ret == JZ_ERRORCODE_REALTIMEVOICE_HAS_BEEN_ON) //如果喊话器正处于实时播放,禁止关闭功放操作
{
JZsdk_Uart_SendDeal_Reply_Failure(Port, FrameSequence);
}
Megaphone_Amplifier_param(JZ_FLAGCODE_SET, &status);
#endif
//回复操作成功
... ... @@ -1088,6 +1098,8 @@ static int RecvDeal_Amplifier_stop(int Port, char *getbuf)
**********/
static int RecvDeal_Amplifier_open(int Port, char *getbuf)
{
T_JZsdkReturnCode ret;
JZSDK_LOG_INFO("%s,强制开启功放",RecvDeal_GetPortName(Port));
//获取帧的序列号
... ... @@ -1096,7 +1108,15 @@ static int RecvDeal_Amplifier_open(int Port, char *getbuf)
int status = JZ_FLAGCODE_ON;
#if MEGAPHONE_CONFIG_STATUS == VERSION_SWITCH_ON
return Megaphone_Amplifier_param(JZ_FLAGCODE_SET, &status);
//先关闭播放
ret = UIcontrol_StopPlayAudio(NO_SPECIFIED);
if (ret == JZ_ERRORCODE_REALTIMEVOICE_HAS_BEEN_ON) //如果喊话器正处于实时播放,禁止关闭功放操作
{
JZsdk_Uart_SendDeal_Reply_Failure(Port, FrameSequence);
}
Megaphone_Amplifier_param(JZ_FLAGCODE_SET, &status);
#endif
//回复操作成功
... ... @@ -1123,12 +1143,13 @@ static int RecvDeal_Amplifier_auto(int Port)
**********/
static int RecvDeal_SetVolume(int Port, char *getbuf)
{
JZSDK_LOG_INFO("%s,调节音量",RecvDeal_GetPortName(Port));
//获取帧的序列号
int FrameSequence = JZsdk_Get_FrameSequence(getbuf);
int value = (int)getbuf[9];
JZSDK_LOG_INFO("%s,调节音量:%d",RecvDeal_GetPortName(Port),value);
UIcontrol_SetVolume(Port, value);
//回复操作成功
... ... @@ -3384,9 +3405,9 @@ static int RecvDeal_ObtainGimbalLinkage(int Port, char *getbuf)
*
*
**********/
static int RecvDeal_FrameErrorReply(int Port, char *getbuf)
static int RecvDeal_FrameErrorReply(int Port, char *getbuf, int len)
{
JZSDK_LOG_INFO("%s,帧指令错误:帧错误或者是帧无对应操作",RecvDeal_GetPortName(Port));
JZSDK_LOG_INFO("%s,帧指令错误:帧错误或者是帧无对应操作,长度为:%d",RecvDeal_GetPortName(Port),len);
//无法正常获取帧的序列号
//回复操作失败
... ... @@ -4024,13 +4045,13 @@ int RecvDeal_InstructInput(int Port, int Receive_mode, unsigned char *getbuf, in
//帧错误回复操作失败
case JZ_ERROR_SYSTEM_FRAME_ERROR:
RecvDeal_FrameErrorReply(Port,getbuf);
RecvDeal_FrameErrorReply(Port,getbuf,len);
return JZ_ERROR_SYSTEM_FRAME_ERROR;
break;
default:
//无指令,发送操作失败
RecvDeal_FrameErrorReply(Port,getbuf);
RecvDeal_FrameErrorReply(Port,getbuf,len);
return JZ_ERROR_SYSTEM_FRAME_ERROR;
break;
... ...
... ... @@ -399,15 +399,15 @@ int UartDeal_Recv_interface(int type, int Uart_fd , unsigned char *getbuf, int g
{
if (Uart_fd == Uart_DEV1_fd)
{
JZSDK_LOG_INFO("串口-设备1号,接受到数据+未处理数据的长度len: %d", getbufLen);
//JZSDK_LOG_INFO("串口-设备1号,接受到数据+未处理数据的长度len: %d", getbufLen);
}
else if (Uart_fd == Uart_DEV2_fd)
{
JZSDK_LOG_INFO("串口-设备2号,接受到数据+未处理数据的长度len: %d", getbufLen);
//JZSDK_LOG_INFO("串口-设备2号,接受到数据+未处理数据的长度len: %d", getbufLen);
}
else if (Uart_fd == Uart_4G_fd)
{
JZSDK_LOG_INFO("串口-设备4G,接受到数据+未处理数据的长度len: %d", getbufLen);
//JZSDK_LOG_INFO("串口-设备4G,接受到数据+未处理数据的长度len: %d", getbufLen);
}
else
{
... ... @@ -419,7 +419,7 @@ int UartDeal_Recv_interface(int type, int Uart_fd , unsigned char *getbuf, int g
{
if (Uart_fd == HAL_DATA_TRANSMISSION)
{
JZSDK_LOG_INFO("hal_data,接受到数据+未处理数据的长度len: %d", getbufLen);
//JZSDK_LOG_INFO("hal_data,接受到数据+未处理数据的长度len: %d", getbufLen);
}
else
{
... ... @@ -549,7 +549,7 @@ int UartDeal_Recv_interface(int type, int Uart_fd , unsigned char *getbuf, int g
HaveReadLen = HaveReadLen + FrameLen;
HaveDealLen = HaveReadLen;
JZSDK_LOG_INFO("framelen%d read%d [read]:%x get%d",FrameLen ,HaveReadLen, getbuf[HaveReadLen], getbufLen);
JZSDK_LOG_INFO("fd:%x f_len%d h_r%d [h_r]:%x get%d", Uart_fd,FrameLen ,HaveReadLen, getbuf[HaveReadLen-1], getbufLen);
FrameLen = 0;
FrameFlag = 0;
... ...
... ... @@ -69,6 +69,9 @@ T_JZsdkReturnCode AudioDeal_Init()
AudioDeakInfo_index->FilterInfo = NULL;
FF_Filter_Init(AudioDeakInfo_index, 0x00);
//初始化mp2音频流
File_Stream_deal_Init(AV_CODEC_ID_MP2);
Audiodeal_status = JZ_FLAGCODE_ON;
JZSDK_LOG_INFO("MODULE_AUDIODEL_INIT_COMPLETE");
... ... @@ -261,6 +264,9 @@ T_JZsdkReturnCode AudioDeal_StopDeal()
//清空alsa里的缓冲区
Alsa_DropPcm(AudioDeakInfo_index);
//清空mp3音频流
Stream_Player_Stop(AudioDeakInfo_index);
while (AudioDeakInfo_index->AudioDeal_Alsa_Finish_Flag != JZ_FLAGCODE_OFF)
{
delayMs(1);
... ...
... ... @@ -31,6 +31,8 @@ int PCM_PooL_Interface_PcmData_WithoutReply(struct AudioDealInfo *AD_Info,unsign
T_JZsdkReturnCode AudioFile_Stream_Interface_PcmData(struct AudioDealInfo *AD_Info, AVFrame *frame);
T_JZsdkReturnCode mp3_Stream_Interface_Mp3Data(struct AudioDealInfo *AD_Info, unsigned int in_sampleRate, unsigned char *data, int dataSize);
T_JZsdkReturnCode File_Stream_deal_Init(enum AVCodecID id);
T_JZsdkReturnCode Stream_Player_Stop(struct AudioDealInfo *AD_Info);
#ifdef __cplusplus
}
... ...
... ... @@ -21,35 +21,62 @@ static const AVCodec *codec;
T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame);
int File_Stream_deal_Init(enum AVCodecID id)
T_JZsdkReturnCode File_Stream_deal_Init(enum AVCodecID id)
{
//注册包
pkt = av_packet_alloc();
if(!pkt)
{
JZSDK_LOG_ERROR("av_packet_alloc failed.");
}
//寻找解码器
codec = avcodec_find_decoder(id);
if (!codec) {
JZSDK_LOG_ERROR("Codec not found\n");
}
//获得裸流的解析器
parser = av_parser_init(codec->id);
if (!parser) {
JZSDK_LOG_ERROR("Parser not found\n");
}
//分配解码上下文
cdc_ctx = avcodec_alloc_context3(codec);
if (!cdc_ctx) {
JZSDK_LOG_ERROR("Could not allocate audio codec context\n");
}
/* open it */
//将解码器和解码上下文绑定
if (avcodec_open2(cdc_ctx, codec, NULL) < 0)
{
JZSDK_LOG_ERROR("Could not open codec\n");
}
JZSDK_LOG_INFO("file stream init complete");
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//输入mp3的实时数据,以及本次数据的长度
T_JZsdkReturnCode mp3_Stream_Interface_Mp3Data(struct AudioDealInfo *AD_Info, unsigned int in_sampleRate, unsigned char *data, int dataSize)
{
//JZSDK_LOG_DEBUG("mp3 stream输入 %d 字节数据", dataSize);
//重置重采样器
FF_Resample_Reset(AD_Info, in_sampleRate, (AVChannelLayout)AV_CHANNEL_LAYOUT_MONO, AV_SAMPLE_FMT_S16);
//检查滤波器
FF_Filter_Init(AD_Info, 0x01);
int ret = 0;
unsigned char *databufPtr = data;
int databufSize = dataSize;
//将数据输入到
while(databufSize > 0)
{
//如果解码器不存在,初始化解码器
if (!decoded_frame)
{
... ... @@ -59,14 +86,6 @@ int File_Stream_deal_Init(enum AVCodecID id)
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
}
}
//输入mp3的实时数据,以及本次数据的长度
T_JZsdkReturnCode mp3_Stream_Interface_Mp3Data(struct AudioDealInfo *AD_Info, unsigned int in_sampleRate, unsigned char *data, int dataSize)
{
//将数据输入到
while(dataSize > 0)
{
//检查参数,并将正确的数据输入到pkt中
//parser 解析器
... ... @@ -74,22 +93,16 @@ T_JZsdkReturnCode mp3_Stream_Interface_Mp3Data(struct AudioDealInfo *AD_Info, un
//pkt输出的数据指针
//data datasize 输入的数据指针
//pts、dts、pos:时间戳和位置信息,一般可以设置为AV_NOPTS_VALUE和0。
int ret = av_parser_parse2(parser, cdc_ctx, &pkt->data, &pkt->size, data, dataSize, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0);
ret = av_parser_parse2(parser, cdc_ctx, &pkt->data, &pkt->size, databufPtr, databufSize, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0);
if (ret < 0) {
printf("Error while parsing\n");
return -1;
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
//重置重采样器
FF_Resample_Reset(AD_Info, in_sampleRate, (AVChannelLayout)AV_CHANNEL_LAYOUT_MONO, AV_SAMPLE_FMT_S16);
//检查滤波器
FF_Filter_Init(AD_Info, 0x01);
//数据指针 往后一个解析长度
//长度指针 减少一个被解析数据的长度
data += ret;
dataSize -= ret;
databufPtr += ret;
databufSize -= ret;
//如果输出有长度 解码输出的数据
if (pkt->size > 0)
... ... @@ -97,6 +110,8 @@ T_JZsdkReturnCode mp3_Stream_Interface_Mp3Data(struct AudioDealInfo *AD_Info, un
Stream_Player_decode(AD_Info, cdc_ctx, pkt, decoded_frame);
}
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
... ... @@ -108,7 +123,13 @@ T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecCon
//发送数据包给解码器解码,已将数据解码为pcm原始数据
ret = avcodec_send_packet(dec_ctx, pkt);
if (ret < 0)
if (ret == AVERROR(EAGAIN))
{
char errbuf[128];
av_strerror(ret, errbuf, sizeof(errbuf));
JZSDK_LOG_ERROR("Error while sending a packet to the decoder %s",errbuf);
}
else if (ret < 0)
{
JZSDK_LOG_ERROR("Error submitting the packet to the decoder, ret=%d\n",ret);
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
... ... @@ -120,7 +141,7 @@ T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecCon
/* read all the output frames (in general there may be any number of them */
//读取输出的帧
while ( (ret >= 0) && (AD_Info->Flag_AudioDataGenerationImplement == JZ_FLAGCODE_ON) )
while ( (ret >= 0) && AD_Info->AudioDeal_ResampleAndFilter_Execute_Flag == JZ_FLAGCODE_ON)
{
//从解码器中读取解码后的音频帧数据
ret = avcodec_receive_frame(dec_ctx, frame);
... ... @@ -135,6 +156,8 @@ T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecCon
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
//printf("输出了:%d的数据\n",frame->nb_samples);
int out_nb_samples = 0;
//重采样解码后的数据
... ... @@ -149,7 +172,7 @@ T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecCon
//将临时帧 放入 均衡滤波器
FF_Filter_push_frame_to_fliter(AD_Info, temp_frame);
while(AD_Info->Flag_AudioDataGenerationImplement == JZ_FLAGCODE_ON)
while(AD_Info->AudioDeal_ResampleAndFilter_Execute_Flag == JZ_FLAGCODE_ON)
{
//得到滤波器输出的音频帧 eq_frame
int fret = FF_Filter_get_frame_from_filter(AD_Info, eq_frame);
... ... @@ -158,6 +181,8 @@ T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecCon
break;
}
//printf("pcm播放 %d 数据\n",eq_frame->nb_samples);
//播放改滤波后的帧
Pcm_AlsaPlay(AD_Info, (unsigned char*)eq_frame->data[0], eq_frame->nb_samples);
... ... @@ -179,4 +204,14 @@ T_JZsdkReturnCode Stream_Player_decode(struct AudioDealInfo *AD_Info, AVCodecCon
//释放掉输出的变量
av_frame_unref(temp_frame);
av_frame_unref(eq_frame);
}
T_JZsdkReturnCode Stream_Player_Stop(struct AudioDealInfo *AD_Info)
{
pkt->data = NULL;
pkt->size = 0;
Stream_Player_decode(AD_Info, cdc_ctx, pkt, decoded_frame);
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
\ No newline at end of file
... ...
... ... @@ -296,13 +296,25 @@ T_JZsdkReturnCode FF_Filter_Init(struct AudioDealInfo *AD_Info, int AudioType)
{
T_JZsdkReturnCode ret;
int NewType = FILTER_NORMAL;
int NewType = FILTER_NORMAL_AUDIO;
//通过g_FilterMode 与 音频内容 得出滤波类型
if (g_FilterMode == 0x00) //默认滤波
{
//无须管音频类型,直接同一个默认滤波器
NewType = FILTER_NORMAL;
if (AudioType == 0x00) //文本类型
{
NewType = FILTER_NORMAL_AUDIO;
}
else if (AudioType == 0x01) //音频类型
{
NewType = FILTER_NORMAL_TTS;
}
else
{
JZSDK_LOG_ERROR("错误的音频类型");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
}
else if (g_FilterMode == 0x01) //30M滤波
{
... ... @@ -348,8 +360,12 @@ T_JZsdkReturnCode FF_Filter_Init(struct AudioDealInfo *AD_Info, int AudioType)
//初始化滤波器
switch (NewType)
{
case FILTER_NORMAL:
ret = FF_Filter_ParamInit(AD_Info, FILTER_PARAM_NORMAL);
case FILTER_NORMAL_AUDIO:
ret = FF_Filter_ParamInit(AD_Info, FILTER_PARAM_NORMAL_AUDIO);
break;
case FILTER_NORMAL_TTS:
ret = FF_Filter_ParamInit(AD_Info, FILTER_PARAM_NORMAL_TTS);
break;
case FILTER_NORMAL_M30_TTS:
... ... @@ -361,7 +377,7 @@ T_JZsdkReturnCode FF_Filter_Init(struct AudioDealInfo *AD_Info, int AudioType)
break;
default:
ret = FF_Filter_ParamInit(AD_Info, FILTER_PARAM_NORMAL);
ret = FF_Filter_ParamInit(AD_Info, FILTER_PARAM_NORMAL_AUDIO);
break;
}
... ...
... ... @@ -12,7 +12,7 @@
/* Includes ------------------------------------------------------------------*/
#include "JZsdk_Base/JZsdk_Code/JZsdk_Code.h"
#include "BaseConfig.h"
#ifdef __cplusplus
extern "C" {
... ... @@ -46,7 +46,18 @@ extern "C" {
equalizer=f=8000:t=q:w=2.0:g=0, \
equalizer=f=16000:t=q:w=2.0:g=0"
#define FILTER_FORMAL_3 "equalizer=f=31:t=q:w=2.0:g=-48, \
#define FILTER_FORMAL_3_AUDIO "equalizer=f=31:t=q:w=2.0:g=-48, \
equalizer=f=62:t=q:w=2.0:g=-36, \
equalizer=f=125:t=q:w=2.0:g=-30, \
equalizer=f=250:t=q:w=2.0:g=-26, \
equalizer=f=500:t=q:w=2.0:g=-20, \
equalizer=f=1000:t=q:w=2.0:g=-12, \
equalizer=f=2000:t=q:w=2.0:g=-8, \
equalizer=f=4000:t=q:w=2.0:g=+1, \
equalizer=f=8000:t=q:w=2.0:g=+2, \
equalizer=f=16000:t=q:w=2.0:g=+2"
#define FILTER_FORMAL_3_TTS "equalizer=f=31:t=q:w=2.0:g=-48, \
equalizer=f=62:t=q:w=2.0:g=-36, \
equalizer=f=125:t=q:w=2.0:g=-30, \
equalizer=f=250:t=q:w=2.0:g=-26, \
... ... @@ -83,6 +94,21 @@ extern "C" {
anequalizer=c0 f=3800 w=1200 g=-22 t=0|c1 f=3800 w=1200 g=-22 t=0, \
anequalizer=c0 f=3100 w=600 g=-16 t=0|c1 f=3100 w=600 g=-16 t=0"
#define FILTER_FORMAL_1_M30_2 "equalizer=f=31:t=q:w=2.0:g=-48, \
equalizer=f=62:t=q:w=2.0:g=-36, \
equalizer=f=125:t=q:w=2.0:g=-30, \
equalizer=f=250:t=q:w=2.0:g=-26, \
equalizer=f=500:t=q:w=2.0:g=-20, \
equalizer=f=1000:t=q:w=2.0:g=-12, \
equalizer=f=2000:t=q:w=2.0:g=-8, \
equalizer=f=4000:t=q:w=2.0:g=+1, \
equalizer=f=8000:t=q:w=2.0:g=+2, \
equalizer=f=16000:t=q:w=2.0:g=+2, \
anequalizer=c0 f=4000 w=1400 g=-28 t=0|c1 f=4000 w=1400 g=-28 t=0, \
anequalizer=c0 f=3700 w=1400 g=-24 t=0|c1 f=3700 w=1400 g=-24 t=0"
/*00 35
02 37
05 39
... ... @@ -118,15 +144,31 @@ extern "C" {
typedef enum FilterList{
FILTER_NORMAL = 1,
FILTER_NORMAL_AUDIO = 1,
FILTER_NORMAL_TTS = 2,
FILTER_NORMAL_M30_AUDIO = 5,
FILTER_NORMAL_M30_TTS = 6,
}FilterList;
#define FILTER_PARAM_NORMAL FILTER_FORMAL_3
//普通音频滤波参数
#define FILTER_PARAM_NORMAL_AUDIO FILTER_FORMAL_3_AUDIO
//普通TTS滤波参数
#define FILTER_PARAM_NORMAL_TTS FILTER_FORMAL_3_TTS
#define FILTER_PARAM_M30_TTS FILTER_FORMAL_1_M30_TTS
//h10t的滤波器跟其他的不一样,所以单独定义
#if DEVICE_VERSION == JZ_H10T
#define FILTER_PARAM_M30_AUDIO FILTER_FORMAL_1_M30_2
#else
#define FILTER_PARAM_M30_AUDIO FILTER_FORMAL_1_M30_1
#endif
/* Exported types ------------------------------------------------------------*/
/* Exported functions --------------------------------------------------------*/
... ...
... ... @@ -6,7 +6,6 @@
#include <time.h>
#include <unistd.h>
#include "JZsdkLib.h"
#include "BaseConfig.h"
#include "JZsdk_usb_bulk/JZsdk_usb_bulk.h"
... ... @@ -18,6 +17,33 @@
#include "MediaProc/MultProc/RTK_mmp/RTK_mmp.h"
#include "MediaProc/MultProc/RTK_mmp/Dec/RTK_mmp_dec.h"
#include "MediaProc/MultProc/RTK_mmp/Enc/RTK_mmp_enc.h"
#include "MediaProc/Camera/Camera.h"
#endif
#ifdef RTK_RGA_STATUS_ON
#include "MediaProc/RgaProc/RK_Rga/RK_Rga.h"
//c1的rga结构体
typedef struct C1_RgaInfo
{
//源图像
RK_RgaImage *src_img;
//裁剪图像
RK_RgaImage *corp_img;
//目标图像
RK_RgaImage *dst_img;
//放大倍数
int scale;
}C1_RgaInfo;
static C1_RgaInfo *g_C1_RgaIrcInfo = NULL;
static C1_RgaInfo *g_C1_RgaOptInfo = NULL;
#endif
... ... @@ -45,6 +71,10 @@ static unsigned char FrameBuffer[FIRST_HEIGHT * FIRST_WIDTH *2]; //用于存储
static unsigned int FrameBufferLen = 0; //用于存储帧数据的长度
static FrameBuffer_UseFlag = JZ_FLAGCODE_OFF;
#ifdef RTK_RGA_STATUS_ON
static T_JZsdkReturnCode JZC1_RgaDeal(C1_RgaInfo *rga_info, int resize, unsigned char *image, unsigned int *imgage_size);
static T_JZsdkReturnCode JZC1_RgaInit(C1_RgaInfo **rgaInfo, int dst_width, int dst_height, int dst_format);
#endif
//数据推送函数
static T_JZsdkReturnCode JZC1_PushFrame(int CameraIndex, unsigned char* data, unsigned int data_len)
... ... @@ -74,12 +104,14 @@ static T_JZsdkReturnCode JZC1_PushFrame(int CameraIndex, unsigned char* data, un
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
static T_JZsdkReturnCode JZC1_OptDeal(unsigned char *data, unsigned int data_len)
{
//JZSDK_LOG_INFO("JZC1_OptDeal");
#ifdef RTK_MPP_STATUS_ON
//JZSDK_LOG_INFO("JZC1_OptDeal");
MppFrame yuv_data = NULL; //用于传递yuv数据的地址
MppPacket Packet = NULL;
... ... @@ -93,6 +125,68 @@ static T_JZsdkReturnCode JZC1_OptDeal(unsigned char *data, unsigned int data_len
// JZSDK_LOG_INFO("w:%d h:%d hor:%d ver:%d",width,height,h_stride,v_stride);
//光学数据缩放
int resize = JZ_FLAGCODE_OFF;
Camera_param(JZ_FLAGCODE_GET, CAMERA_RESIZE, &resize);
//将size转化为倍数
int ZoomRatio = 0;
switch (resize)
{
case 0:
ZoomRatio = JZ_FLAGCODE_OFF;
break;
case 1:
ZoomRatio = 2;
break;
case 2:
ZoomRatio = 4;
break;
case 3:
ZoomRatio = 8;
break;
default:
ZoomRatio = JZ_FLAGCODE_OFF;
break;
}
if (ZoomRatio != JZ_FLAGCODE_OFF)
{
MppBuffer temp = mpp_frame_get_buffer(yuv_data);
RK_U32 h = mpp_frame_get_hor_stride(yuv_data);
RK_U32 w = mpp_frame_get_ver_stride(yuv_data);
int size = (h * w * 1.5);
unsigned char *temp_data = (unsigned char *)malloc(size);
memcpy(temp_data, mpp_buffer_get_ptr(temp), size);
JZC1_RgaDeal(g_C1_RgaOptInfo, ZoomRatio, temp_data, &size);
//重新将数据放回
memcpy(mpp_buffer_get_ptr(temp), temp_data, size);
free(temp_data);
}
//测试
MppBuffer temp = mpp_frame_get_buffer(yuv_data);
RK_U32 h = mpp_frame_get_hor_stride(yuv_data);
RK_U32 w = mpp_frame_get_ver_stride(yuv_data);
int size = (h * w * 1.5);
unsigned char *temp_data = (unsigned char *)malloc(size);
memcpy(temp_data, mpp_buffer_get_ptr(temp), size);
//将返回的数据输入进编码器
RTK_mmp_enc_yuv_to_h264_byFrame(JZsdk_RtkMmpGetEncHandleAddr(1), yuv_data, &Packet);
... ... @@ -150,6 +244,41 @@ static T_JZsdkReturnCode JZC1_IrcDeal(unsigned char *data, unsigned int data_len
//将原始码流数据转换为rgb数据
IRC_FrameDeal(u16Data, u16DataSize, &raw_data, &raw_data_len);
//获取放大的倍数
//光学数据缩放
int resize = JZ_FLAGCODE_OFF;
Camera_param(JZ_FLAGCODE_GET, CAMERA_RESIZE, &resize);
//将size转化为倍数
int ZoomRatio = 0;
switch (resize)
{
case 0:
ZoomRatio = JZ_FLAGCODE_OFF;
break;
case 1:
ZoomRatio = 2;
break;
case 2:
ZoomRatio = 4;
break;
case 3:
ZoomRatio = 8;
break;
default:
ZoomRatio = 0;
break;
}
if (ZoomRatio != JZ_FLAGCODE_OFF)
{
JZC1_RgaDeal(g_C1_RgaIrcInfo, ZoomRatio, raw_data, &raw_data_len);
}
//将原始码流数据写入到编码器 并转换为h264
unsigned char *h264Data = NULL;
unsigned int h264DataLen = 0;
... ... @@ -436,6 +565,205 @@ static T_JZsdkReturnCode JZsdk_JZC1_Irc_Data_Init()
}
/********************
*
* 混合视频说明
*
* 1920*1088 做处理 并输出为 1920*1080
* __________________________________________________________________
* |_________________________________ |
* | |________________________________|
* | | |
* | | 1920*1088 /2 960* 544 |
* | | |
* | 320*256 拉伸4到 1280*1024 | |
* | 裁剪出 3/4 到 960 * 768 | |
* | | |
* | |________________________________|
* |_________________________________| |
* |__________________________________________________________________|
*
* ******************/
static unsigned char *g_MixedIrc_Buffer = NULL;
static unsigned char *g_MixedOpt_Buffer = NULL;
/******************
*
* 混合视频处理
*
*
* ********************/
static void JZC1_MixedVideo_Deal()
{
}
static void *MixedVideo_Thread(void *args)
{
#ifdef RTK_RGA_STATUS_ON
struct timespec now;
int d_ret = 0;
//获取起始时间
struct timespec start_time;
clock_gettime(CLOCK_MONOTONIC, &start_time);
long long prev_time = start_time.tv_sec * NSEC_PER_SEC + start_time.tv_nsec;
//设置间隔时间
long long period = NSEC_PER_SEC / TARGET_FPS;
//混合视频缓冲区
unsigned char * MixedIrc_Buffer = (unsigned char *)malloc((JZ_ALIGN(FIRST_WIDTH, 16)) * (JZ_ALIGN(FIRST_HEIGHT, 16)) * 3 / 2);
unsigned char * MixedOpt_Buffer = (unsigned char *)malloc(JZ_ALIGN(SECOND_WIDTH, 16) * JZ_ALIGN(SECOND_HEIGHT, 16) * 3 / 2);
//初始化混合视频的rga
int Irc_width = JZ_ALIGN(FIRST_WIDTH, 16);
int Irc_height = JZ_ALIGN(FIRST_HEIGHT, 16);
int IRc_Rect_x = 0;
int IRc_Rect_y = 0;
int Irc_Rect_w = JZ_ALIGN(FIRST_WIDTH, 16);
int Irc_Rect_h = JZ_ALIGN(FIRST_HEIGHT, 16);
int Irc_Format = RK_FORMAT_YCbCr_420_SP;
int Opt_width = JZ_ALIGN(SECOND_WIDTH, 16);
int Opt_height = JZ_ALIGN(SECOND_HEIGHT, 16);
int Opt_Rect_x = 0;
int Opt_Rect_y = 0;
int Opt_Rect_w = JZ_ALIGN(SECOND_WIDTH, 16);
int Opt_Rect_h = JZ_ALIGN(SECOND_HEIGHT, 16);
int Opt_Format = RK_FORMAT_YCbCr_420_SP;
//红外源图像
RK_RgaImage *Irc_SrcImg = NULL;
RK_Rga_ImageInit(&Irc_SrcImg, Irc_width, Irc_height, Irc_Format, IRc_Rect_x, IRc_Rect_y, Irc_Rect_w, Irc_Rect_h);
//红外放大图像
RK_RgaImage *Irc_ResizeImg = NULL;
RK_Rga_ImageInit(&Irc_ResizeImg, Irc_width*4, Irc_height*4, Irc_Format, IRc_Rect_x, IRc_Rect_y, Irc_Rect_w*4, Irc_Rect_h*4);
//红外裁剪图像
RK_RgaImage *Irc_CropImg = NULL;
RK_Rga_ImageInit(&Irc_CropImg, (Irc_width*4) /4 *3, (Irc_height*4) /4 *3, Irc_Format, (Irc_width*4)/4, (Irc_height*4)/4, (Irc_width*4) /4 *3, (Irc_height*4) /4 *3);
//光学源图像
RK_RgaImage *Opt_SrcImg = NULL;
RK_Rga_ImageInit(&Opt_SrcImg, Opt_width, Opt_height, Opt_Format, Opt_Rect_x, Opt_Rect_y, Opt_Rect_w, Opt_Rect_h);
//光学的缩小图像
RK_RgaImage *Opt_ResizeImg = NULL;
RK_Rga_ImageInit(&Opt_ResizeImg, Opt_width/2, Opt_height/2, Opt_Format, Opt_Rect_x, Opt_Rect_y, Opt_Rect_w/2, Opt_Rect_h/2);
//目标图像
RK_RgaImage *DstImg = NULL;
RK_Rga_ImageInit(&DstImg, Irc_width, Irc_height, Irc_Format, IRc_Rect_x, IRc_Rect_y, Irc_Rect_w, Irc_Rect_h);
while (1)
{
//获取当前时间
clock_gettime(CLOCK_MONOTONIC, &now);
long long current_time = now.tv_sec * NSEC_PER_SEC + now.tv_nsec;
//计算时间差
long long elapsed_time = current_time - prev_time;
//超过33ms
if (elapsed_time >= period)
{
// while (FrameBuffer_UseFlag == JZ_FLAGCODE_ON)
// {
// delayUs(100);
// }
//将数据放入缓冲区
memcpy(MixedIrc_Buffer, g_MixedIrc_Buffer, (JZ_ALIGN(FIRST_WIDTH, 16)) * (JZ_ALIGN(FIRST_HEIGHT, 16)) * 3 / 2);
memcpy(MixedOpt_Buffer, g_MixedOpt_Buffer, JZ_ALIGN(SECOND_WIDTH, 16) * JZ_ALIGN(SECOND_HEIGHT, 16) * 3 / 2);
//混合视频处理
memset(MixedIrc_Buffer, 0, sizeof(MixedIrc_Buffer));
memset(MixedOpt_Buffer, 0, sizeof(MixedOpt_Buffer));
memcpy(MixedIrc_Buffer, g_MixedIrc_Buffer, (JZ_ALIGN(FIRST_WIDTH, 16)) * (JZ_ALIGN(FIRST_HEIGHT, 16)) * 3 / 2);
memcpy(MixedOpt_Buffer, g_MixedOpt_Buffer, JZ_ALIGN(SECOND_WIDTH, 16) * JZ_ALIGN(SECOND_HEIGHT, 16) * 3 / 2);
//FrameBuffer_UseFlag = JZ_FLAGCODE_OFF;
//光学数据处理
//缩小图像到1/2
d_ret = imresize(Opt_SrcImg->img, Opt_ResizeImg->img);
if (d_ret != IM_STATUS_SUCCESS)
{
printf("irc resize failed\n");
continue;
}
//红外数据处理
//放大图像到4倍
d_ret = imresize(Irc_SrcImg->img, Irc_ResizeImg->img);
if (d_ret != IM_STATUS_SUCCESS)
{
printf("opt resize failed\n");
continue;
}
//裁切红外图像
d_ret = imcrop(Irc_ResizeImg->img, Irc_CropImg->img, Irc_CropImg->rect);
if (d_ret != IM_STATUS_SUCCESS)
{
printf("opt crop failed\n");
continue;
}
//以下参考rga_alpha_yuv_demo.cpp
//将缩放好的光学画面放入目标画面
int usage = IM_SYNC | IM_ALPHA_BLEND_DST_OVER | IM_ALPHA_BLEND_PRE_MUL;
// d_ret = improcess()
// prev_time = current_time;
}
// 为了防止过于频繁地调用 clock_gettime,可以添加一个小的睡眠时间
// 例如,休眠1毫秒(100000000纳秒),以减少CPU占用
struct timespec req = { .tv_sec = 0, .tv_nsec = 1000000 };
nanosleep(&req, NULL);
}
#endif
}
/******************
*
* 混合视频初始化
*
*
* ********************/
static JZC1_MixedVideo_Init()
{
g_MixedIrc_Buffer = (unsigned char *)malloc(JZ_ALIGN(FIRST_WIDTH, 16)*JZ_ALIGN(FIRST_HEIGHT, 16)*3/2);
g_MixedOpt_Buffer = (unsigned char *)malloc(JZ_ALIGN(SECOND_WIDTH, 16)*JZ_ALIGN(SECOND_HEIGHT, 16)*3/2);
//混合视频初始化
pthread_t Task;
pthread_attr_t attribute; //线程属性
pthread_attr_init(&attribute); //初始化线程属性
pthread_attr_setdetachstate(&attribute, PTHREAD_CREATE_DETACHED); //设置线程分离属性
int bufferdata_Protection = pthread_create(&Task,&attribute,MixedVideo_Thread,NULL); //线程
if(bufferdata_Protection != 0)
{
JZSDK_LOG_ERROR("创建混合视频初始化失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
JZSDK_LOG_INFO("MixedVidoe_Init Success");
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
#endif
//JZ_C1 的媒体初始化
... ... @@ -453,11 +781,14 @@ static T_JZsdkReturnCode JZC1_MediaInit()
//初始化Mulit模块
#ifdef RTK_MPP_STATUS_ON
//初始化红外的编解码器
RTK_mmp_enc_Init(JZsdk_RtkMmpGetEncHandleAddr(0), MPP_VIDEO_CodingAVC, MPP_FMT_YUV420P, FIRST_WIDTH, FIRST_HEIGHT, 30, 5);
RTK_mmp_enc_Init(JZsdk_RtkMmpGetEncHandleAddr(0), MPP_VIDEO_CodingAVC, MPP_FMT_YUV420SP, FIRST_WIDTH, FIRST_HEIGHT, 30, 5);
//初始化光学的编解码器
RTK_mmp_dec_Init(JZsdk_RtkMmpGetDecHandleAddr(1), MPP_VIDEO_CodingMJPEG, MPP_FMT_YUV420SP, SECOND_WIDTH, SECOND_HEIGHT);
RTK_mmp_enc_Init(JZsdk_RtkMmpGetEncHandleAddr(1), MPP_VIDEO_CodingAVC, MPP_FMT_YUV420SP, SECOND_WIDTH, SECOND_HEIGHT, 30, 15);
// RTK_mmp_dec_Init(JZsdk_RtkMmpGetDecHandleAddr(1), MPP_VIDEO_CodingMJPEG, MPP_FMT_YUV420SP, SECOND_WIDTH, SECOND_HEIGHT);
// RTK_mmp_enc_Init(JZsdk_RtkMmpGetEncHandleAddr(1), MPP_VIDEO_CodingAVC, MPP_FMT_YUV420SP, SECOND_WIDTH, SECOND_HEIGHT, 30, 15);
#endif
//初始化Camera模块
... ... @@ -469,6 +800,7 @@ static T_JZsdkReturnCode JZC1_MediaInit()
}
else
{
//光学相机的抓取
ret = JZsdk_FrameCatch_Single(JZC1_OptDeal);
}
... ... @@ -505,12 +837,132 @@ static T_JZsdkReturnCode JZC1_MediaInit()
value = 0;
Camera_param(JZ_FLAGCODE_SET, CAMERA_PIXEL_PSEUDO_COLOR_MODE, &value);
//初始化变焦模块
Cam_Zoom_Init();
//初始化RGA模块
#ifdef RTK_RGA_STATUS_ON
ret = JZC1_RgaInit(&g_C1_RgaIrcInfo, JZ_ALIGN(FIRST_WIDTH, 16), JZ_ALIGN(FIRST_HEIGHT, 16), RK_FORMAT_YCbCr_420_SP);
ret = JZC1_RgaInit(&g_C1_RgaOptInfo, JZ_ALIGN(SECOND_WIDTH, 16), JZ_ALIGN(SECOND_HEIGHT, 16), RK_FORMAT_YCbCr_420_SP);
#endif
#endif
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
#ifdef RTK_RGA_STATUS_ON
static T_JZsdkReturnCode JZC1_RgaInit(C1_RgaInfo **rgaInfo, int dst_width, int dst_height, int dst_format)
{
//初始化rga结构体
(*rgaInfo) = (C1_RgaInfo *)malloc(sizeof(C1_RgaInfo));
if ((*rgaInfo) == NULL)
{
JZSDK_LOG_ERROR("RGA初始化失败");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
int width = dst_width;
int height = dst_height;
int rect_x = 0;
int rect_y = 0;
int rect_w = dst_width;
int rect_h = dst_height;
(*rgaInfo)->src_img = NULL;
RK_Rga_ImageInit(&(*rgaInfo)->src_img, width, height, dst_format, rect_x, rect_y, rect_w, rect_h); //初始化输入模块
(*rgaInfo)->corp_img = NULL;
RK_Rga_ImageInit(&(*rgaInfo)->corp_img, width, height, dst_format, rect_x, rect_y, rect_w, rect_h); //初始化裁剪模块
(*rgaInfo)->dst_img = NULL;
RK_Rga_ImageInit(&(*rgaInfo)->dst_img, width, height, dst_format, rect_x, rect_y, rect_w, rect_h); //初始化输出模块
}
#endif
//rga处理
#ifdef RTK_RGA_STATUS_ON
static T_JZsdkReturnCode JZC1_RgaDeal(C1_RgaInfo *rga_info, int resize, unsigned char *image, unsigned int *imgage_size)
{
int d_ret;
if (g_C1_RgaIrcInfo == NULL || *imgage_size != rga_info->src_img->buf_size)
{
printf("C1_Rga_Deal failed imagesize:%d bufsize:%d\n", *imgage_size, rga_info->src_img->buf_size);
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
//检查裁剪倍数是否改变
if (resize != rga_info->scale)
{
rga_info->scale = resize;
//释放裁剪区域, 并重新注册
if (rga_info->corp_img != NULL)
{
RK_Rga_ImageDeInit(&(rga_info->corp_img));
int width = rga_info->src_img->width / resize;
int height = rga_info->src_img->height / resize;
int rect_x = (rga_info->src_img->width) / 2 - width / 2;
int rect_y = (rga_info->src_img->height) / 2 - height / 2;
int rect_w = width;
int rect_h = height;
int dst_format = rga_info->src_img->format;
RK_Rga_ImageInit(&(rga_info->corp_img), width, height, dst_format, rect_x, rect_y, rect_w, rect_h); //初始化裁剪模块
}
}
//将图像放入处理器
memcpy(rga_info->src_img->buf, image, *imgage_size);
//JZSDK_LOG_DEBUG("裁剪倍率%d",resize);
if (resize == 0)
{
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//裁剪图像
d_ret = imcrop(rga_info->src_img->img, rga_info->corp_img->img, rga_info->corp_img->rect);
if (d_ret != IM_STATUS_SUCCESS)
{
printf("crop failed\n");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
//缩放图像
d_ret = imresize(rga_info->corp_img->img, rga_info->dst_img->img);
if (d_ret != IM_STATUS_SUCCESS)
{
printf("resize failed\n");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
//返回图像
memcpy(image, rga_info->dst_img->buf, rga_info->dst_img->buf_size);
//printf("image[0]:%d image[1]:%d\n", image[0], image[1]);
*imgage_size = rga_info->dst_img->buf_size;
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
#endif
T_JZsdkReturnCode JZC1_Init()
{
... ...
... ... @@ -14,7 +14,6 @@ sysfs接口与GPIO(通用输入/输出)模块
**/
/*******************
*
* sysfs的gpio引脚注册
* 引脚号计算公式为 pin = bank * 32 + number, number = group*8 + X
* 格式为 GPIO + bank + '_' + group + X
... ...
#include <stdio.h>
#include <pthread.h>
#include "BaseConfig.h"
#include "JZsdkLib.h"
#define ZOOM_VALUE_DETECTION_FREQUENCY 50 //每秒检测50次变焦值
static int g_Cam_ZoomTempValue = 0; //相机对焦临时变量 用于各个模块的输入使用
static int g_Cam_ZoomValue = 0; //相机对焦变量的实际对焦值, 0为最远(完整画面), 1000为最近(最大放大画面)
/*****************
*
*
* 设置临时变焦值
*
*
* *******************/
T_JZsdkReturnCode Cam_Zoom_SetTempValue(int *value)
{
if (*value > 1000)
{
*value = 1000;
}
else if (*value < 0)
{
*value = 0;
}
g_Cam_ZoomTempValue = *value;
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/*************
*
*
* 获取变焦值
*
*
* *****************/
T_JZsdkReturnCode Cam_Zoom_GetValue(int *value)
{
*value = g_Cam_ZoomValue;
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/*********************
*
*
* 设置实际变焦值
*
*
* *********************/
static T_JZsdkReturnCode Cam_Zoom_SetValue(int value)
{
g_Cam_ZoomValue = value;
//对外广播变焦值
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
static void *ZoomValue_Thread(void *args)
{
while (1)
{
//每一段时间检测输入的变焦值是否有变化
delayMs(1000/ZOOM_VALUE_DETECTION_FREQUENCY);
if (g_Cam_ZoomTempValue != g_Cam_ZoomValue)
{
Cam_Zoom_SetValue(g_Cam_ZoomTempValue);
}
//JZSDK_LOG_DEBUG("g_Cam_ZoomValue = %d , g_Cam_ZoomTempValue = %d", g_Cam_ZoomValue, g_Cam_ZoomTempValue);
}
}
/******************
*
*
* 变焦模块初始化
*
* *****************/
T_JZsdkReturnCode Cam_Zoom_Init()
{
//初始化处理现场
pthread_t ZoomValue_Task;
pthread_attr_t attribute; //线程属性
pthread_attr_init(&attribute); //初始化线程属性
pthread_attr_setdetachstate(&attribute, PTHREAD_CREATE_DETACHED); //设置线程分离属性
int bufferdata_Protection = pthread_create(&ZoomValue_Task,&attribute,ZoomValue_Thread,NULL); //线程
if(bufferdata_Protection != 0)
{
JZSDK_LOG_ERROR("创建变焦模块失败!");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
\ No newline at end of file
... ...
/**
********************************************************************
* @file Cam_Zoom.h
* Cam_Zoom.c 的头文件
*
*********************************************************************
*/
/* Define to prevent recursive inclusion 避免重定义 -------------------------------------*/
#ifndef CAM_ZOOM_H
#define CAM_ZOOM_H
/* Includes ------------------------------------------------------------------*/
#include "JZsdk_Base/JZsdk_Code/JZsdk_Code.h"
#ifdef __cplusplus
extern "C" {
#endif
/* Exported constants --------------------------------------------------------*/
/* 常亮定义*/
/* Exported types ------------------------------------------------------------*/
/* Exported functions --------------------------------------------------------*/
T_JZsdkReturnCode Cam_Zoom_SetTempValue(int *value);
T_JZsdkReturnCode Cam_Zoom_GetValue(int *value);
T_JZsdkReturnCode Cam_Zoom_Init();
#ifdef __cplusplus
}
#endif
#endif
... ...
... ... @@ -313,6 +313,13 @@ T_JZsdkReturnCode Camera_param(int flagcode, enum CameraParam paramflag, int *va
{
*value = g_CameraGasEnhancementColor;
}
break;
case CAMERA_RESIZE:
{
Cam_Zoom_GetValue(value);
}
break;
default:
{
... ... @@ -389,6 +396,15 @@ T_JZsdkReturnCode Camera_param(int flagcode, enum CameraParam paramflag, int *va
}
break;
case CAMERA_RESIZE:
{
Cam_Zoom_SetTempValue(value);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
}
default:
{
*value = JZ_FLAGCODE_OFF;
... ...
... ... @@ -14,6 +14,11 @@
#include "JZsdk_Base/JZsdk_Code/JZsdk_Code.h"
#include "MediaProc/MediaProc_Param.h"
#include "MediaProc/Camera/Cam_FrameCatch/Cam_FrameCatch.h"
#include "MediaProc/Camera/V4L2_camera/V4L2_CameraParameterSetting.h"
#include "MediaProc/Camera/V4L2_camera/V4L2_Record.h"
#include "MediaProc/Camera/Cam_Zoom/Cam_Zoom.h"
#ifdef __cplusplus
extern "C" {
#endif
... ...
... ... @@ -71,8 +71,11 @@ typedef struct IRC_param
unsigned int SPC_auto_time_flag; //spc定时自动打档标志位,用于计算打档时间
//两点矫正(Two-Point Correction -> TPC)
double *TPC_Diff; // tpc截距数组 即计算得到的增益
double *TPC_Slope; // tpc斜率数组 即计算得到的补正
double *TPC_Gain; // tpc斜率数组 即计算得到的校准增益
double *TPC_Offset; // tpc截距数组 即计算得到的校正偏移值
unsigned int TPC_ResetFlag; //tpc校准标志位 开启时进行tpc校准
unsigned int TPC_mode; //tpc模式 0出厂打档 1手动打档
... ...
... ... @@ -407,7 +407,8 @@ T_JZsdkReturnCode IRC_FrameDeal(U16_t *rawData ,unsigned int dataSize, unsigned
U8_t *yuv_frame = (U8_t *)malloc(g_IRC_Param->Width*g_IRC_Param->Height*3/2 *sizeof(U8_t));
// 调用转换函数
Stream_rgb888_to_yuv420p(RGB_data,g_IRC_Param->Width,g_IRC_Param->Height, yuv_frame);
//Stream_rgb888_to_yuv420p(RGB_data,g_IRC_Param->Width,g_IRC_Param->Height, yuv_frame);
Stream_rgb888_to_yuv420sp(RGB_data,g_IRC_Param->Width,g_IRC_Param->Height, yuv_frame);
*outData = yuv_frame;
*outDataSize = g_IRC_Param->Width*g_IRC_Param->Height*3/2;
... ... @@ -1044,16 +1045,16 @@ static T_JZsdkReturnCode IRC_param_Init(struct IRC_param **index, int height, in
IrcDealCfg->LowT_flag = JZ_FLAGCODE_OFF;
IrcDealCfg->HighT_flag = JZ_FLAGCODE_OFF;
IrcDealCfg->TPC_Diff = (double *)malloc(IrcDealCfg->PixelNum * sizeof(double));
if (IrcDealCfg->TPC_Diff == NULL)
IrcDealCfg->TPC_Offset = (double *)malloc(IrcDealCfg->PixelNum * sizeof(double));
if (IrcDealCfg->TPC_Offset == NULL)
{
JZSDK_LOG_ERROR("TPC_Diff注册失败");
JZSDK_LOG_ERROR("TPC_Offset注册失败");
}
IrcDealCfg->TPC_Slope = (double *)malloc(IrcDealCfg->PixelNum * sizeof(double));
if (IrcDealCfg->TPC_Slope == NULL)
IrcDealCfg->TPC_Gain = (double *)malloc(IrcDealCfg->PixelNum * sizeof(double));
if (IrcDealCfg->TPC_Gain == NULL)
{
JZSDK_LOG_ERROR("TPC_Slope注册失败");
JZSDK_LOG_ERROR("TPC_Gain注册失败");
}
IrcDealCfg->HighT_NineFrameAdd = (U16_t *)malloc(IrcDealCfg->PixelNum * sizeof(U16_t));
... ... @@ -1137,6 +1138,9 @@ static T_JZsdkReturnCode IRC_param_Init(struct IRC_param **index, int height, in
//填入低温数据
IRC_Set_LowTempData(IrcDealCfg);
//开启tpc校验标志
IrcDealCfg->TPC_ResetFlag = JZ_FLAGCODE_ON;
//获取markdata的本地数据
ret = IRC_LocalFrame_DataRead(SPC_MARK_DATA, IrcDealCfg);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
... ...
... ... @@ -202,274 +202,6 @@ static T_JZsdkReturnCode JZsdk_vKT(U16_t *in_str, U8_t **out_str, int *out_str_l
}
// //将14位灰度图数据转换为原始码流
// T_JZsdkReturnCode IRC_14bitGrayData_to_RawData(U8_t *data, int data_len, U8_t **raw_data, int *raw_data_len, struct IRC_param *dealInfo, int rawType)
// {
// T_JZsdkReturnCode ret;
// //无图像
// if (data == NULL || dealInfo == NULL)
// {
// return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
// }
// U16_t u16_RawData[dealInfo->PixelNum]; //原始16位码流
// U16_t u16_CorrentData[dealInfo->PixelNum];
// unsigned int u16_data_len; //16位码流的数据长度
// //1、合成像素,u8转换合并成u16
// JZsdk_Merge_U8_to_U16_byReverse(data, data_len,u16_RawData, &u16_data_len);
// if (u16_data_len != dealInfo->PixelNum)
// {
// JZSDK_LOG_ERROR("像素合成失败");
// }
// //2、数据检查
// for (int i = 0; i < dealInfo->PixelNum; i++)
// {
// if (u16_RawData[i] > dealInfo->ExpectedMax)
// {
// u16_RawData[i] = dealInfo->ExpectedMax;
// }
// }
// //3、如果开启了原始流坏点寻找
// if (dealInfo->AutoBadPixelReset == JZ_FLAGCODE_ON)
// {
// JZsdk_RawCheckisBadPixel_U16(u16_RawData, dealInfo->Width, dealInfo->Height, dealInfo->BadPixel, &dealInfo->BadPixelNum);
// dealInfo->AutoBadPixelReset = JZ_FLAGCODE_OFF;
// }
// //盲元纠正
// JZsdk_CutBadPixel_U16(u16_RawData, dealInfo->Width, dealInfo->Height, dealInfo->BadPixel, dealInfo->BadPixelNum,dealInfo->BadPixelExtern, 1);
// /***************************************************************************
// *
// * 图像标定记录
// *
// *
// * *******************************************************************/
// //3、如果开启了低温循环标定
// if (dealInfo->LowT_flag != JZ_FLAGCODE_OFF)
// {
// IRC_LowT_CycleCalibration(u16_RawData, dealInfo);
// }
// //4、如果开启了高温循环标定
// if (dealInfo->HighT_flag != JZ_FLAGCODE_OFF)
// {
// IRC_HighT_CycleCalibration(u16_RawData, dealInfo);
// }
// //单点校正spc如果未进行过数据的标定,则进行一次
// if (dealInfo->FirstSPC_flag == JZ_FLAGCODE_ON)
// {
// JZSDK_LOG_INFO("未进行过数据标定");
// memcpy(dealInfo->SPC_Mark_Data, u16_RawData, dealInfo->PixelNum * sizeof(U16_t));
// IRC_SPC_FrameSaveOrRead(dealInfo, 1, dealInfo->SPC_Mark_Data);
// IRC_SPC_ParamCorrect(dealInfo, dealInfo->SPC_Mark_Data);
// dealInfo->FirstSPC_flag = JZ_FLAGCODE_OFF;
// }
// //如果开启spc重置标定
// if (dealInfo->SPC_ResetFlag == JZ_FLAGCODE_ON)
// {
// JZSDK_LOG_INFO("SPC重置标定");
// memcpy(dealInfo->SPC_Mark_Data, u16_RawData, dealInfo->PixelNum * sizeof(U16_t));
// IRC_SPC_FrameSaveOrRead(dealInfo, 1, dealInfo->SPC_Mark_Data);
// IRC_SPC_ParamCorrect(dealInfo, dealInfo->SPC_Mark_Data);
// dealInfo->SPC_ResetFlag = JZ_FLAGCODE_OFF;
// }
// //标记气体增图像图像
// //如果开启气体增强时,未标记过
// if (dealInfo->OutputPixelColorMode == 2 && dealInfo->First_DGCE_flag == JZ_FLAGCODE_ON)
// {
// memcpy(dealInfo->DGCE_Mark_Data, u16_RawData, dealInfo->PixelNum * sizeof(U16_t) );
// dealInfo->First_DGCE_flag = JZ_FLAGCODE_OFF;
// }
// //如果开启气体增强重置标定
// if (dealInfo->DGCE_ResetFlag == JZ_FLAGCODE_ON)
// {
// JZSDK_LOG_INFO("气体增强重置标定");
// memcpy(dealInfo->DGCE_Mark_Data, u16_RawData, dealInfo->PixelNum * sizeof(U16_t));
// dealInfo->DGCE_ResetFlag = JZ_FLAGCODE_OFF;
// }
// /***************************************************************************
// *
// * 校正
// *
// *
// * *******************************************************************/
// //5、校正
// //先复制一份纠正用数据
// memcpy(u16_CorrentData, u16_RawData, dealInfo->PixelNum * sizeof(U16_t));
// //如果打开了两点校正
// if (dealInfo->FrameCorrectMode == IRC_CORRCTION_TPC)
// {
// IRC_TPC(u16_CorrentData, dealInfo);
// }
// if (dealInfo->FrameCorrectMode == IRC_CORRCTION_SPC)
// {
// IRC_SPC(u16_CorrentData, dealInfo);
// }
// //7、图像输出模式
// U8_t *GrayImage = NULL; //灰度图数组
// unsigned int GrayImageLen = 0; //灰度图长度
// U8_t *RgbImage = NULL; //rgb888的图像数组
// unsigned int RgbImageLen = 0; //rgb数组的长度
// switch (dealInfo->OutputPixelColorMode)
// {
// case 0: //默认输出模式
// {
// //u16转换为灰度图
// //IRC_Histogram_vKT(u16_CorrentData, &GrayImage, &GrayImageLen, dealInfo);
// JZsdk_vKT(u16_CorrentData, &GrayImage, &GrayImageLen, dealInfo);
// if (JZsdk_GrayBadPixelCheck_flag == JZ_FLAGCODE_ON)
// {
// JZsdk_GrayCheckisBadPixel_U8(GrayImage, dealInfo->Width, dealInfo->Height, dealInfo->GrayPixel, &dealInfo->GrayPixelNum);
// JZsdk_GrayBadPixelCheck_flag = JZ_FLAGCODE_OFF;
// }
// //JZsdk_CutBadPixel_U8(GrayImage, dealInfo->Width, dealInfo->Height, dealInfo->GrayPixel, dealInfo->GrayPixelNum,dealInfo->GrayPixelExtern, 1);
// //灰度图转rgb888
// IRC_GrayTo_RGB(GrayImage, &RgbImage, &RgbImageLen,dealInfo);
// }
// break;
// case 1: //伪彩输出模式
// {
// //u16转换为灰度图
// //IRC_Histogram_vKT(u16_CorrentData, &GrayImage, &GrayImageLen, dealInfo);
// JZsdk_vKT(u16_CorrentData, &GrayImage, &GrayImageLen, dealInfo);
// if (JZsdk_GrayBadPixelCheck_flag == JZ_FLAGCODE_ON)
// {
// JZsdk_GrayCheckisBadPixel_U8(GrayImage, dealInfo->Width, dealInfo->Height, dealInfo->GrayPixel, &dealInfo->GrayPixelNum);
// JZsdk_GrayBadPixelCheck_flag = JZ_FLAGCODE_OFF;
// }
// //JZsdk_CutBadPixel_U8(GrayImage, dealInfo->Width, dealInfo->Height, dealInfo->GrayPixel, dealInfo->GrayPixelNum,dealInfo->GrayPixelExtern, 1);
// //灰度图转伪彩rgb888
// ret = PseudoColor_Gray2Rgb(GrayImage, &RgbImage, &RgbImageLen, dealInfo->PixelNum);
// if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
// {
// IRC_GrayTo_RGB(GrayImage, &RgbImage, &RgbImageLen,dealInfo);
// }
// }
// break;
// case 2: //气体色彩增强输出模式
// {
// //进行前需要自动打开单点纠正模式
// //u16转换为灰度图
// //IRC_Histogram_vKT(u16_CorrentData, &GrayImage, &GrayImageLen, dealInfo);
// JZsdk_vKT(u16_CorrentData, &GrayImage, &GrayImageLen, dealInfo);
// // if (JZsdk_GrayBadPixelCheck_flag == JZ_FLAGCODE_ON)
// // {
// // JZsdk_GrayCheckisBadPixel_U8(GrayImage, dealInfo->Width, dealInfo->Height, dealInfo->GrayPixel, &dealInfo->GrayPixelNum);
// // JZsdk_GrayBadPixelCheck_flag = JZ_FLAGCODE_OFF;
// // }
// // JZsdk_CutBadPixel_U8(GrayImage, dealInfo->Width, dealInfo->Height, dealInfo->GrayPixel, dealInfo->GrayPixelNum,dealInfo->GrayPixelExtern, 1);
// //转为rgb
// IRC_GrayTo_RGB(GrayImage, &RgbImage, &RgbImageLen,dealInfo);
// //灰度图转气体增强rgb888
// IRC_DynamicGasesColorEnhance(RgbImage, u16_RawData, dealInfo);
// }
// break;
// default:
// return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
// break;
// }
// // 在rgb图上画图形
// IRC_WriteGraphical(RgbImage, dealInfo);
// /*****************
// *
// *
// * 如果码流类型要求为yuv420p
// *
// * **********************/
// if (rawType == 0)
// {
// //rgb888转yuv
// U8_t *yuv_frame = (U8_t *)malloc(dealInfo->Width*dealInfo->Height*3/2 );
// if (yuv_frame == NULL)
// {
// printf("内存注册失败\n");
// if (GrayImage != NULL)
// {
// free(GrayImage);
// GrayImage = NULL;
// }
// if (RgbImage != NULL)
// {
// free(RgbImage);
// RgbImage = NULL;
// }
// return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
// }
// // 调用转换函数
// Stream_rgb888_to_yuv420p(RgbImage,dealInfo->Width,dealInfo->Height,yuv_frame);
// // 释放临时缓冲区,保留yuv_frame
// if (GrayImage != NULL)
// {
// free(GrayImage);
// GrayImage = NULL;
// }
// if (RgbImage != NULL)
// {
// free(RgbImage);
// RgbImage = NULL;
// }
// *raw_data = yuv_frame;
// *raw_data_len = dealInfo->Width*dealInfo->Height*3/2;
// }
// /*****************
// *
// *
// * 如果码流类型要求为rgb888
// *
// * **********************/
// if (rawType == 1)
// {
// // 释放临时缓冲区
// if (GrayImage != NULL)
// {
// free(GrayImage);
// GrayImage = NULL;
// }
// *raw_data = RgbImage;
// *raw_data_len = dealInfo->Width*dealInfo->Height*3;
// }
// return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
// }
/******
*
* 单点矫正
... ... @@ -556,7 +288,7 @@ T_JZsdkReturnCode JZIrcLib_TPC(U16_t *ImageData,struct IRC_param *dealInfo)
// 应用两点校正公式
for (int i = 0; i < dealInfo->PixelNum; i++)
{
ImageData[i] = (int)(dealInfo->TPC_Slope[i] * ImageData[i] + dealInfo->TPC_Diff[i]);
ImageData[i] = (int)(dealInfo->TPC_Gain[i] * ImageData[i] + dealInfo->TPC_Offset[i]);
if (ImageData[i] < 0)
{
ImageData[i] = 0;
... ... @@ -590,12 +322,12 @@ static T_JZsdkReturnCode JZIrcLib_TPC_Slope_Calculation(struct IRC_param *dealIn
int allZeroSlope_flag = 1, allZeroDiff_flag = 1;
for (int i = 0; i < dealInfo->PixelNum; i++)
{
if (dealInfo->TPC_Slope[i] != 0)
if (dealInfo->TPC_Gain[i] != 0)
{
allZeroSlope_flag = 0;
}
if (dealInfo->TPC_Diff[i] != 0)
if (dealInfo->TPC_Offset[i] != 0)
{
allZeroDiff_flag = 0;
}
... ... @@ -619,17 +351,61 @@ static T_JZsdkReturnCode JZIrcLib_TPC_Slope_Calculation(struct IRC_param *dealIn
{
if (dealInfo->HighT_NineFrame_Avg[i] > dealInfo->LowT_NineFrame_Avg[i])
{
dealInfo->TPC_Slope[i] = (AvgSingleFrame_HighT - AvgSingleFrame_LowT) / (dealInfo->HighT_NineFrame_Avg[i] - dealInfo->LowT_NineFrame_Avg[i]);
dealInfo->TPC_Gain[i] = (AvgSingleFrame_HighT - AvgSingleFrame_LowT) / (dealInfo->HighT_NineFrame_Avg[i] - dealInfo->LowT_NineFrame_Avg[i]);
}
else
{
dealInfo->TPC_Slope[i] = 0;
dealInfo->TPC_Gain[i] = 0;
}
dealInfo->TPC_Offset[i] = AvgSingleFrame_LowT - dealInfo->TPC_Gain[i] * dealInfo->LowT_NineFrame_Avg[i];
}
}
}
/*******************
*
* 两点校正斜率计算
*
*
* *******************/
static T_JZsdkReturnCode JZIrcLib_TPC_Slope_Calculation2(struct IRC_param *dealInfo)
{
//判断是否存在结构体
if (dealInfo == NULL)
{
return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
}
//计算像元平均响应
double AvgSingleFrame_LowT = 0, AvgSingleFrame_HighT = 0;
for (int i = 0; i < dealInfo->PixelNum; i++)
{
AvgSingleFrame_LowT += dealInfo->LowT_NineFrame_Avg[i];
AvgSingleFrame_HighT += dealInfo->HighT_NineFrame_Avg[i];
}
dealInfo->TPC_Diff[i] = AvgSingleFrame_LowT - dealInfo->TPC_Slope[i] * dealInfo->LowT_NineFrame_Avg[i];
AvgSingleFrame_HighT = AvgSingleFrame_HighT / dealInfo->PixelNum;
AvgSingleFrame_LowT = AvgSingleFrame_LowT / dealInfo->PixelNum;
for (int i = 0; i < dealInfo->PixelNum; i++)
{
if (dealInfo->HighT_NineFrame_Avg[i] != dealInfo->LowT_NineFrame_Avg[i])
{
dealInfo->TPC_Gain[i] = (AvgSingleFrame_HighT - AvgSingleFrame_LowT) / (dealInfo->HighT_NineFrame_Avg[i] - dealInfo->LowT_NineFrame_Avg[i]);
dealInfo->TPC_Offset[i] = AvgSingleFrame_LowT - dealInfo->TPC_Gain[i] * dealInfo->LowT_NineFrame_Avg[i];
}
else
{
dealInfo->TPC_Gain[i] = 1;
dealInfo->TPC_Offset[i] = 0;
}
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/*计算直方图均衡化并转换为8位灰度值
U16_t in_str 输入的数据
int in_str_len 输入的数据长度
... ... @@ -845,8 +621,11 @@ T_JZsdkReturnCode JZIrcLib_DataDeal(U16_t* U16_data, unsigned int U16_dataSize,
// 如果打开了两点校正
if (dealInfo->FrameCorrectMode == IRC_CORRCTION_TPC)
{
if (dealInfo->TPC_ResetFlag == JZ_FLAGCODE_ON)
{
//计算两点校正的斜率
JZIrcLib_TPC_Slope_Calculation(dealInfo);
JZIrcLib_TPC_Slope_Calculation2(dealInfo);
}
//对数据进行两点校正
ret = JZIrcLib_TPC(u16_CorrentData, dealInfo);
... ...
... ... @@ -32,7 +32,7 @@ typedef enum CameraParam
CAMERA_CORRCTION_MODE = 0x000004, //纠正模式
CAMERA_GAS_ENHANCEMENT_COLOR = 0x000005, //气体增强颜色
CAMERA_GAS_ENHANCEMENT_CORRCTION = 0x000006, //气体增强纠正
CAMERA_RESIZE= 0x000007, //相机缩放图像
}CameraParam;
typedef enum IRC_CorrectionMode
... ...
... ... @@ -10,7 +10,7 @@
#include "JZsdkLib.h"
// 将RGB888格式转换为YUV420P格
// 将RGB888格式转换为YUV420P格(I420)
T_JZsdkReturnCode Stream_rgb888_to_yuv420p(U8_t *rgb_data, int width, int height, U8_t *yuv_data)
{
// YUV420P格式的大小:Y平面后面跟着U和V平面,它们的高度和宽度都是原图像的一半
... ... @@ -55,6 +55,80 @@ T_JZsdkReturnCode Stream_rgb888_to_yuv420p(U8_t *rgb_data, int width, int height
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//rgb888 转yuv420sp(NV12)
T_JZsdkReturnCode Stream_rgb888_to_yuv420sp(U8_t *rgb_data, int width, int height, U8_t *yuv_data)
{
// YUV420SP(NV12)格式的大小:Y平面后面跟着一个交织的UV平面
int y_size = width * height;
int uv_size = (width / 2) * (height / 2) * 2; // UV平面交织存储,所以大小是U或V平面的两倍
// YUV420SP的各个平面
U8_t *y_plane = yuv_data;
U8_t *uv_plane = yuv_data + y_size;
// 用于存储每个2x2块的U和V值的临时变量
int u_temp = 0, v_temp = 0;
int u_count = 0, v_count = 0;
// RGB888到YUV420SP的转换
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
// 计算Y分量的索引
int y_idx = j * width + i;
// 提取RGB分量
U8_t r = rgb_data[y_idx * 3];
U8_t g = rgb_data[y_idx * 3 + 1];
U8_t b = rgb_data[y_idx * 3 + 2];
// 将RGB转换为YUV
int y = ((66 * r + 129 * g + 25 * b + 128) >> 8);
int u = ((-38 * r - 74 * g + 112 * b + 128) >> 8) + 128;
int v = ((112 * r - 94 * g - 18 * b + 128) >> 8) + 128;
// 存储Y分量
y_plane[y_idx] = (U8_t)y;
// 对于U和V分量,我们每4个RGB像素(2x2块)存储一次
if ((i & 1) == 0 && (j & 1) == 0) {
// 初始化临时变量(对于每个2x2块的第一个像素)
u_temp = u;
v_temp = v;
u_count = 0;
v_count = 0;
}
// 更新UV计数器和临时变量(对于2x2块中的每个像素)
u_count++;
v_count++;
// 在2x2块的右下角像素处存储U和V分量
if (u_count == 2 && v_count == 2) {
// 计算UV分量的索引(交织存储)
int uv_idx = ((j / 2) * (width / 2) + (i / 2)) * 2;
// 存储U分量
uv_plane[uv_idx] = (U8_t)u_temp;
// 存储V分量
uv_plane[uv_idx + 1] = (U8_t)v_temp;
// 重置临时变量和计数器(为下一个2x2块做准备)
u_temp = 0;
v_temp = 0;
u_count = 0;
v_count = 0;
}
}
}
// 注意:如果图像的宽度或高度不是偶数,上面的代码可能无法正确处理最后一行或一列。
// 在实际应用中,通常需要确保图像的尺寸是2的倍数,或者添加适当的边界处理代码。
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/*************************
*
* /rgb888图片画框
... ...
... ... @@ -26,6 +26,7 @@ extern "C" {
/* Exported functions --------------------------------------------------------*/
T_JZsdkReturnCode Stream_rgb888_to_yuv420p(U8_t *rgb_data, int width, int height, U8_t *yuv_data);
T_JZsdkReturnCode Stream_rgb888_to_yuv420sp(U8_t *rgb_data, int width, int height, U8_t *yuv_data);
T_JZsdkReturnCode Stream_rgb888_WriteRectangle(U8_t *rgb_data, int width, int height, int Point1_X, int Point1_Y, int Point2_X, int Point2_Y, int R_Color,int G_Color, int B_Color, int DrawWidth);
T_JZsdkReturnCode Stream_rgb888_WriteCross(U8_t *rgb_data, int width, int height, int PointX, int PointY, int R_Color,int G_Color, int B_Color, int DrawWidth, int DrawHeight);
... ...
... ... @@ -7,7 +7,7 @@
#include "JZsdkLib.h"
#include "./RTK_mmp_dec.h"
#if RTK_MPP_STATUS == VERSION_SWITCH_ON
#ifdef RTK_MPP_STATUS_ON
#include "rockchip/mpp_common.h" //这个.h能在mpp的源码中找到
#include "rockchip/mpp_packet.h"
#include "rockchip/rk_mpi.h"
... ... @@ -93,39 +93,39 @@ void dump_frame(MppFrame frame, FILE *out_fp)
}
void dump_frame_to_file(MppCtx ctx, MppApi *mpi, MppFrame frame, FILE *out_fp)
{
printf("decode_and_dump_to_file\n");
MPP_RET ret;
if (mpp_frame_get_info_change(frame)) {
printf("mpp_frame_get_info_change\n");
/**
* 第一次解码会到这个分支,需要为解码器设置缓冲区.
* 解码器缓冲区支持3种模式。参考【图像内存分配以及交互模式】Rockchip_Developer_Guide_MPP_CN.pdf
* 这里使用纯内部模式。
*/
ret = mpi->control(ctx, MPP_DEC_SET_INFO_CHANGE_READY, NULL);
if (ret) {
printf("mpp_frame_get_info_change mpi->control error"
"MPP_DEC_SET_INFO_CHANGE_READY %d\n", ret);
}
return;
}
RK_U32 err_info = mpp_frame_get_errinfo(frame);
RK_U32 discard = mpp_frame_get_discard(frame);
printf("err_info: %u discard: %u\n", err_info, discard);
if (err_info) {
return;
}
// save
dump_frame(frame, out_fp);
return;
}
// void dump_frame_to_file(MppCtx ctx, MppApi *mpi, MppFrame frame, FILE *out_fp)
// {
// printf("decode_and_dump_to_file\n");
// MPP_RET ret;
// if (mpp_frame_get_info_change(frame)) {
// printf("mpp_frame_get_info_change\n");
// /**
// * 第一次解码会到这个分支,需要为解码器设置缓冲区.
// * 解码器缓冲区支持3种模式。参考【图像内存分配以及交互模式】Rockchip_Developer_Guide_MPP_CN.pdf
// * 这里使用纯内部模式。
// */
// ret = mpi->control(ctx, MPP_DEC_SET_INFO_CHANGE_READY, NULL);
// if (ret) {
// printf("mpp_frame_get_info_change mpi->control error"
// "MPP_DEC_SET_INFO_CHANGE_READY %d\n", ret);
// }
// return;
// }
// RK_U32 err_info = mpp_frame_get_errinfo(frame);
// RK_U32 discard = mpp_frame_get_discard(frame);
// printf("err_info: %u discard: %u\n", err_info, discard);
// if (err_info) {
// return;
// }
// // save
// dump_frame(frame, out_fp);
// return;
// }
//rtk解码器初始化
... ... @@ -298,8 +298,10 @@ T_JZsdkReturnCode RTK_mmp_dec_Init(void **index, MppCodingType int_type, MppFram
}
//输出的码流格式
param = &out_format;
ret = DecConfigInput->mpi->control(DecConfigInput->ctx, MPP_DEC_SET_OUTPUT_FORMAT, param);
//param = &out_format;
//也不知道为什么只能设420sp没法设420p
MppFrameFormat format = out_format;
ret = DecConfigInput->mpi->control(DecConfigInput->ctx, MPP_DEC_SET_OUTPUT_FORMAT, &format);
if (ret == MPP_OK)
{
printf("输出格式正确\n");
... ...
... ... @@ -3,7 +3,7 @@
#include "version_choose.h"
#if RTK_MPP_STATUS == VERSION_SWITCH_ON
#ifdef RTK_MPP_STATUS_ON
#include "rockchip/rk_type.h"
#include "rockchip/mpp_frame.h"
... ...
... ... @@ -5,7 +5,7 @@
#include "JZsdkLib.h"
#include "./RTK_mmp_enc.h"
#if RTK_MPP_STATUS == VERSION_SWITCH_ON
#ifdef RTK_MPP_STATUS_ON
#include "rockchip/mpp_packet.h"
#include "rockchip/rk_mpi.h"
#include "rockchip/mpp_env.h" //这个.h能在mpp的源码中找到
... ...
... ... @@ -3,7 +3,7 @@
#include "version_choose.h"
#if RTK_MPP_STATUS == VERSION_SWITCH_ON
#ifdef RTK_MPP_STATUS_ON
#include "rockchip/rk_type.h"
#include "rockchip/mpp_frame.h"
... ...
... ... @@ -58,7 +58,7 @@ void **JZsdk_RtkMmpGetEncHandleAddr(int CameraIndex)
//昆腾相机设置下一帧为I帧
T_JZsdkReturnCode JZsdk_Kt_CamMMPenc_SetNextFrame_IDR(int CameraIndex)
{
#if RTK_MPP_STATUS == VERSION_SWITCH_ON
#ifdef RTK_MPP_STATUS_ON
if (CameraIndex == 0)
{
RTK_mmp_enc_SetNextFrame_IDR(JZsdk_RtkMmpGetEncHandleAddr(0));
... ...
#include <stdint.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include "BaseConfig.h"
#include "JZsdkLib.h"
#include "./RK_Rga.h"
#include "MediaProc/RgaProc/RgaParam.h"
#ifdef RTK_RGA_STATUS_ON
float get_bpp_from_format(int format)
{
float bpp = 0;
switch (format)
{
case RK_FORMAT_RGBA2BPP:
return 0.25;
case RK_FORMAT_Y4:
bpp = 0.5;
break;
case RK_FORMAT_BPP1:
case RK_FORMAT_BPP2:
case RK_FORMAT_BPP4:
case RK_FORMAT_BPP8:
case RK_FORMAT_YCbCr_400:
bpp = 1;
break;
case RK_FORMAT_YCbCr_420_SP:
case RK_FORMAT_YCbCr_420_P:
case RK_FORMAT_YCrCb_420_P:
case RK_FORMAT_YCrCb_420_SP:
bpp = 1.5;
break;
case RK_FORMAT_RGB_565:
case RK_FORMAT_RGBA_5551:
case RK_FORMAT_RGBA_4444:
case RK_FORMAT_BGR_565:
case RK_FORMAT_BGRA_5551:
case RK_FORMAT_BGRA_4444:
case RK_FORMAT_ARGB_5551:
case RK_FORMAT_ARGB_4444:
case RK_FORMAT_ABGR_5551:
case RK_FORMAT_ABGR_4444:
case RK_FORMAT_YCbCr_422_SP:
case RK_FORMAT_YCbCr_422_P:
case RK_FORMAT_YCrCb_422_SP:
case RK_FORMAT_YCrCb_422_P:
/* yuyv */
case RK_FORMAT_YVYU_422:
case RK_FORMAT_VYUY_422:
case RK_FORMAT_YUYV_422:
case RK_FORMAT_UYVY_422:
case RK_FORMAT_YVYU_420:
case RK_FORMAT_VYUY_420:
case RK_FORMAT_YUYV_420:
case RK_FORMAT_UYVY_420:
bpp = 2;
break;
/*RK encoder requires alignment of odd multiples of 256.*/
/*Here bpp=2 guarantee to read complete data.*/
case RK_FORMAT_YCbCr_420_SP_10B:
case RK_FORMAT_YCrCb_420_SP_10B:
bpp = 2;
break;
case RK_FORMAT_YCbCr_422_10b_SP:
case RK_FORMAT_YCrCb_422_10b_SP:
bpp = 2.5;
break;
case RK_FORMAT_BGR_888:
case RK_FORMAT_RGB_888:
bpp = 3;
break;
case RK_FORMAT_RGBA_8888:
case RK_FORMAT_RGBX_8888:
case RK_FORMAT_BGRA_8888:
case RK_FORMAT_BGRX_8888:
case RK_FORMAT_ARGB_8888:
case RK_FORMAT_XRGB_8888:
case RK_FORMAT_ABGR_8888:
case RK_FORMAT_XBGR_8888:
bpp = 4;
break;
default:
printf("Is unsupport format now, please fix \n");
return 0;
}
return bpp;
}
T_JZsdkReturnCode RK_Rga_ImageInit(struct RK_RgaImage **RgaImage, int width, int height, int format,
int Rect_x, int Rect_y, int Rect_w, int Rect_h)
{
if ((*RgaImage) != NULL)
{
return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
}
(*RgaImage) = (struct RK_RgaImage *)malloc(sizeof(struct RK_RgaImage));
if ((*RgaImage) == NULL)
{
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
(*RgaImage)->width = width;
(*RgaImage)->height = height;
(*RgaImage)->format = format;
(*RgaImage)->buf_size = width * height * get_bpp_from_format(format);
(*RgaImage)->buf = (unsigned char *)malloc((*RgaImage)->buf_size * sizeof(unsigned char));
if ((*RgaImage)->buf == NULL)
{
return JZ_ERROR_SYSTEM_MODULE_CODE_INVALID_PARAMETER;
}
(*RgaImage)->handle_param.width = width;
(*RgaImage)->handle_param.height = height;
(*RgaImage)->handle_param.format = format;
(*RgaImage)->handle = importbuffer_virtualaddr((*RgaImage)->buf, &(*RgaImage)->handle_param);
if ((*RgaImage)->handle == 0)
{
printf("importbuffer_virtualaddr failed\n");
return JZ_ERROR_SYSTEM_MODULE_CODE_FAILURE;
}
memset(&(*RgaImage)->img, 0, sizeof((*RgaImage)->img));
(*RgaImage)->img = wrapbuffer_handle((*RgaImage)->handle, (*RgaImage)->width, (*RgaImage)->height, (*RgaImage)->format);
(*RgaImage)->rect.x = Rect_x;
(*RgaImage)->rect.y = Rect_y;
(*RgaImage)->rect.width = Rect_w;
(*RgaImage)->rect.height = Rect_h;
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
T_JZsdkReturnCode RK_Rga_ImageDeInit(struct RK_RgaImage **RgaImage)
{
if ((*RgaImage)->handle != 0)
{
releasebuffer_handle((*RgaImage)->handle);
(*RgaImage)->handle = 0;
}
if ((*RgaImage)->buf != NULL)
{
free((*RgaImage)->buf);
(*RgaImage)->buf = NULL;
}
if((*RgaImage) != NULL)
{
free((*RgaImage));
(*RgaImage) = NULL;
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
#endif
... ...
#ifndef RK_RGA_H
#define RK_RGA_H
#include "JZsdk_Base/JZsdk_Code/JZsdk_Code.h"
#include "BaseConfig.h"
#ifdef RTK_RGA_STATUS_ON
#include "rga.h"
#include "RgaUtils.h"
#include "im2d.hpp"
#include "im2d_type.h"
typedef struct RK_RgaImage
{
int width; //图像的宽度
int height; //图像的高度
int format; //图像的格式
unsigned char *buf; //图像的缓冲区
int buf_size; //图像缓冲区的大小
rga_buffer_t img; //图像处理区
rga_buffer_handle_t handle; //图像处理区句柄
im_handle_param_t handle_param; //图像处理区参数
im_rect rect; //处理的区域
}RK_RgaImage;
T_JZsdkReturnCode RK_Rga_ImageInit(struct RK_RgaImage **RgaImage, int width, int height, int format,
int Rect_x, int Rect_y, int Rect_w, int Rect_h);
T_JZsdkReturnCode RK_Rga_ImageDeInit(struct RK_RgaImage **RgaImage);
#endif
#endif
\ No newline at end of file
... ...
#ifndef RGA_PARAM_H
#define RGA_PARAM_H
#include "JZsdk_Base/JZsdk_Code/JZsdk_Code.h"
#include "BaseConfig.h"
typedef enum RGA_PARAM_PROCESSING_TYPE
{
RGA_NO_PROCESSING = 0x0000, //无处理类型
RGA_CROP = 0x0001, //裁剪类型
RGA_RESIZE = 0x0002, //缩放类型
}RGA_PARAM_PROCESSING_TYPE;
#endif
\ No newline at end of file
... ...
... ... @@ -96,6 +96,8 @@ T_JZsdkReturnCode RealTimeMP2_start()
return ret;
}
JZSDK_LOG_INFO("MP2实时喊话初始化成功");
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
... ... @@ -130,6 +132,9 @@ T_JZsdkReturnCode RealTimeMP2_Close()
int amplifier = JZ_FLAGCODE_OFF;
Megaphone_Amplifier_param(JZ_FLAGCODE_SET, &amplifier);
//关闭音频库
AudioDeal_StopDeal();
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
... ... @@ -164,9 +169,14 @@ static T_JZsdkReturnCode RealTimeMP2_Play_init()
//解码并播放
static T_JZsdkReturnCode RealTimeMP2_PlayData(unsigned char *data,int len)
{
AudioDeal_Mp3DataInput(8000, data, len);
unsigned char playFile[len];
//清空
memset(playFile, 0, len);
memcpy(playFile, data, len);
printf("播放结束\n");
AudioDeal_Mp3DataInput(8000, playFile, len);
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
... ... @@ -196,7 +206,7 @@ static void *RealTimeMP2_LoopPlay(void *arg)
{
if(RealTimeMP2_PlayData_LoopHead!=RealTimeMP2_PlayData_LoopEnd)
{
printf("播放线程执行中\n");
//printf("播放线程执行中\n");
ret = RealTimeMP2_PlayData(RealTimeMP2_PlayData_Loop[RealTimeMP2_PlayData_LoopHead],128);
RealTimeMP2_PlayData_LoopHead++;
... ... @@ -274,7 +284,7 @@ static void *RealTimeMP2_WriteData_Task(void *arg)
{
int ret;
printf("MP2实施喊话,输入数据缓冲线程建立\n");
JZSDK_LOG_DEBUG("MP2实施喊话,输入数据缓冲线程建立");
RealTimeMP2_WriteFlag = JZ_FLAGCODE_ON;
while(RealTimeMP2_WriteFlag == JZ_FLAGCODE_ON)
... ... @@ -284,7 +294,7 @@ static void *RealTimeMP2_WriteData_Task(void *arg)
if(RealTimeMP2_WriteData_LoopHead!=RealTimeMP2_WriteData_LoopEnd)
{
printf("缓冲线程写入中\n");
//printf("缓冲线程写入中\n");
//将输入缓冲池的数据放入播放缓冲池
RealTimeMP2_Put_WriteData_In_PlayLoop();
... ...
... ... @@ -173,11 +173,15 @@ static T_JZsdkReturnCode Opus_RealTimeVoice_PlayData(unsigned char *data,int len
//转换
opus_int16 TempPcm[WIDGET_SPEAKER_AUDIO_OPUS_MAX_FRAME_SIZE * WIDGET_SPEAKER_AUDIO_OPUS_CHANNELS];
for (i = 0; i < WIDGET_SPEAKER_AUDIO_OPUS_CHANNELS * frame_size; i++)
{
TempPcm[i] = out[i] & 0xFF | (out[i] >> 8) << 8;
TempPcm[i] = PcmNoiseReduction(TempPcm[i]);
#if SPECIAL_VERSION == SPECIAL_DAOTONG
TempPcm[i] = PcmNoiseReduction(TempPcm[i]);
#endif
pcm_bytes[2 * i] = TempPcm[i] & 0xFF;
pcm_bytes[2 * i + 1] = (TempPcm[i] >> 8) & 0xFF;
}
... ... @@ -543,11 +547,15 @@ T_JZsdkReturnCode Opus_RealTimeVoice_Close()
JZSDK_LOG_DEBUG("解码器释放完毕");
//产生标志位
Megaphone_MegDataGenFlag(JZ_FLAGCODE_SET, JZ_FLAGCODE_OFF);
Megaphone_MegDataGenFinshFlag(JZ_FLAGCODE_SET, JZ_FLAGCODE_OFF);
int amplifier = JZ_FLAGCODE_OFF;
Megaphone_Amplifier_param(JZ_FLAGCODE_SET, &amplifier);
//关闭音频库
AudioDeal_StopDeal();
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
... ...
... ... @@ -230,6 +230,7 @@ static T_JZsdkReturnCode UI_control_WidgetSet(int type, int value)
case JZSDK_WIDGET_SPC_AUTO_TIME:
case JZSDK_WIDGET_TPC_MODE:
case JZSDK_WIDGET_BAD_PIXEL_CORRECT_MODE:
case JZSDK_WIDGET_ZOOM_SIZE:
UI_control_WidgetArraySet(index, value);
break;
... ... @@ -1329,6 +1330,24 @@ T_JZsdkReturnCode JZsdk_Psdk_UI_io_Set_PixelColorMode(int wheather_ChangeWidget,
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
// psdk 变焦
T_JZsdkReturnCode JZsdk_Psdk_UI_io_Set_ZoomSize(int wheather_ChangeWidget, int wheather_control, int value)
{
//如果修改控件
if (wheather_ChangeWidget == JZ_FLAGCODE_ON)
{
UI_control_WidgetSet(JZSDK_WIDGET_ZOOM_SIZE, value);
}
//如果进行控制
if (wheather_control == JZ_FLAGCODE_ON)
{
UIcontrol_Set_ZoomSize(DEVICE_PSDK, value);
}
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
// psdk 气体增强颜色
T_JZsdkReturnCode JZsdk_Psdk_UI_io_Set_GasEnhancementColor(int wheather_ChangeWidget, int wheather_control, int value)
{
... ...
... ... @@ -147,6 +147,8 @@ T_JZsdkReturnCode JZsdk_Psdk_UI_io_Set_ShutterSwitch(int wheather_ChangeWidget,
T_JZsdkReturnCode JZsdk_Psdk_UI_io_Set_PseudoColor(int wheather_ChangeWidget, int wheather_control, int value);
// psdk 像素色彩模式
T_JZsdkReturnCode JZsdk_Psdk_UI_io_Set_PixelColorMode(int wheather_ChangeWidget, int wheather_control, int value);
T_JZsdkReturnCode JZsdk_Psdk_UI_io_Set_ZoomSize(int wheather_ChangeWidget, int wheather_control, int value);
T_JZsdkReturnCode JZsdk_Psdk_UI_io_Camera_BadPixelSwitch(int wheather_ChangeWidget, int wheather_control, int value);
T_JZsdkReturnCode JZsdk_Psdk_UI_io_Camera_SPC_ResetSwitch(int wheather_ChangeWidget, int wheather_control, int value);
... ...
... ... @@ -1957,6 +1957,30 @@ T_JZsdkReturnCode UIcontrol_Set_PixelColorMode(int DeviceName, int value)
return JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
/*********************
*
* 设置变焦尺寸
*
*
* **************************/
T_JZsdkReturnCode UIcontrol_Set_ZoomSize(int DeviceName, int value)
{
T_JZsdkReturnCode ret = JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
#if MEDIA_PROC_CONFIG_STATUS == VERSION_SWITCH_ON
//1、设置变焦值
ret = Camera_param(JZ_FLAGCODE_SET, CAMERA_RESIZE, &value);
if (ret != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
return ret;
}
//无须另外通知其他设备变化
#endif
return ret;
}
/*********
*
* 气体增强的颜色
... ...
... ... @@ -109,6 +109,8 @@ T_JZsdkReturnCode UIcontrol_Camera_BadPixelSwitch(int DeviceName, int value);
T_JZsdkReturnCode UIcontrol_Camera_SPC_ResetSwitch(int DeviceName, int value);
T_JZsdkReturnCode UIcontrol_FreezeVideo(int DeviceName, int value);
T_JZsdkReturnCode UIcontrol_Set_PseudoColor(int DeviceName, int value);
T_JZsdkReturnCode UIcontrol_Set_ZoomSize(int DeviceName, int value);
T_JZsdkReturnCode UIcontrol_Set_PixelColorMode(int DeviceName, int value);
T_JZsdkReturnCode UIcontrol_CameraRegionbox(int DeviceName, int value1, int value2);
T_JZsdkReturnCode UIcontrol_CameraCorrectionMode(int DeviceName, int value);
... ...
... ... @@ -69,8 +69,8 @@ static int WidgetMgMT_Control_WorkFuntion(int Inscode, int value)
JZsdk_Psdk_UI_io_Set_PixelColorMode(JZ_FLAGCODE_ON, JZ_FLAGCODE_ON, value);
break;
case JZSDK_WIDGET_FOCAL_LENGTH:
printf("未设置");
case JZSDK_WIDGET_ZOOM_SIZE:
JZsdk_Psdk_UI_io_Set_ZoomSize(JZ_FLAGCODE_ON, JZ_FLAGCODE_ON, value);
break;
case JZSDK_WIDGET_SPC_RESET:
... ...
... ... @@ -66,7 +66,7 @@ static T_JZsdkReturnCode JZSDK_WidgetMgMT_DJI_GetWidgetIndex(int tpye, int *inde
}
break;
case JZSDK_WIDGET_FOCAL_LENGTH:
case JZSDK_WIDGET_ZOOM_SIZE:
{
ReturnIndex1 = 5;
}
... ...
... ... @@ -35,7 +35,7 @@
#include "gimbal_emu/test_payload_gimbal_emu.h"
#include "Psdk_UI_io.h"
#include "BaseConfig.h"
/* Private constants ---------------------------------------------------------*/
#define PAYLOAD_CAMERA_EMU_TASK_FREQ (100)
... ... @@ -50,8 +50,8 @@
#define ZOOM_DIGITAL_BASE_FACTOR (1.0)
#define ZOOM_DIGITAL_STEP_FACTOR (0.1)
#define ZOOM_DIGITAL_MAX_FACTOR (6.0)
#define FOCUS_MAX_RINGVALUE (1000)
#define FOCUS_MID_RINGVALUE (500)
#define FOCUS_MAX_RINGVALUE (3) //0~3
#define FOCUS_MID_RINGVALUE (0)
#define IMAGE_SENSOR_X_SIZE (88.0f) // unit: 0.1mm
#define IMAGE_SENSOR_Y_SIZE (66.0f) // unit: 0.1mm
#define CENTER_POINT_IN_SCREEN_X_VALUE (0.5f)
... ... @@ -653,9 +653,13 @@ static T_DjiReturnCode GetFocusAssistantSettings(T_DjiCameraFocusAssistantSettin
return DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
}
//设置对焦值
static T_DjiReturnCode SetFocusRingValue(uint32_t value)
{
USER_LOG_INFO("set focus ring value:%d", value);
//有x1~x16
UIcontrol_Set_ZoomSize(DEVICE_PSDK, value);
s_cameraFocusRingValue = value;
return DJI_ERROR_SYSTEM_MODULE_CODE_SUCCESS;
... ...
... ... @@ -69,6 +69,8 @@ extern int height_Volume_lock;
extern int SpeakerWidgetUseLock;
extern char Jz_SerialNumber[128]; //设备序列号
extern float g_height_value;
extern int Widget_RealTimeOpusFlag; //用于标志ui里的实时语音开关
/* Private types -------------------------------------------------------------*/
... ... @@ -253,7 +255,10 @@ T_DjiReturnCode DjiTest_WidgetStartService(void)
if (JZsdk_check_file_exists("/root/ShakedownTest") == JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_INFO("开启调试模式");
JZsdk_delete_file("/root/ShakedownTest");
if(JZsdk_delete_file("/root/ShakedownTest") != JZ_ERROR_SYSTEM_MODULE_CODE_SUCCESS)
{
JZSDK_LOG_ERROR("关闭调试模式失败");
}
memset(curFileDirPath, 0, WIDGET_DIR_PATH_LEN_MAX);
snprintf(curFileDirPath, WIDGET_DIR_PATH_LEN_MAX, "%s/debug",WIDGET_FILE_DIR);
}
... ... @@ -556,12 +561,15 @@ static T_JZsdkReturnCode Megphone_Widget(unsigned int index, unsigned int value)
switch(value){
case 0:
Widget_RealTimeOpusFlag = JZ_FLAGCODE_OFF;
Opus_PlayMode = 0;
break;
case 1:
Widget_RealTimeOpusFlag = JZ_FLAGCODE_ON;
Opus_PlayMode = 1;
break;
default:
Widget_RealTimeOpusFlag = JZ_FLAGCODE_OFF;
Opus_PlayMode = 0;
break;
}
... ... @@ -806,20 +814,21 @@ static T_JZsdkReturnCode Irc_Widget(unsigned int index, unsigned int value)
break;
}
//该功能移动到media管理中
case 5://画面放大
{
if (value == 1)
{
EnlargeScreen+=1;
if (EnlargeScreen >= 5)
{
EnlargeScreen = JZ_FLAGCODE_OFF;
}
// if (value == 1)
// {
// EnlargeScreen+=1;
// if (EnlargeScreen >= 4)
// {
// EnlargeScreen = JZ_FLAGCODE_OFF;
// }
JZSDK_LOG_DEBUG("画面放大%d",EnlargeScreen);
// JZSDK_LOG_DEBUG("画面变焦%d",EnlargeScreen);
//JZSDK_WidgetMgMT_ConrtrolInputTask(JZSDK_WIDGET_FOCAL_LENGTH, value);
}
// JZSDK_WidgetMgMT_ConrtrolInputTask(JZSDK_WIDGET_ZOOM_SIZE, value);
// }
break;
}
... ...