《音视频:将h264裸流转为YUV》
文章目录
- 一,前言
- 二,分解步骤
- 2.1 初始化解码器
- 2.2 释放解码器资源
- 2.3 获取完整帧数据
- 2.4 h264转yuv
- 2.5 h264裸流文件转yuv
- 2.6 解RTP包后h264转yuv
- 三,完整代码
一,前言
本文旨在将h264裸流数据转为yuv数据,分为两种情况一种是读取h264文件,一次性将读取到的所有h264数据给到解码器转为yuv,一种是从网络中接收含h264数据的RTP,这种情况下需要先对RTP包进行解包提取出h264数据,然后判断接收到一个完整的帧数据后再交给解码器转为yuv。最后生成的yuv文件可以使用ffmpeg正常播放。对RTP包进行解包提取出h264数据的方法在前一篇文章中分析过,这里不在赘述。
二,分解步骤
2.1 初始化解码器
AVCodec* codec = NULL;
AVCodecContext* pAVCodecCtx = NULL;
AVFrame* frame = NULL;
AVCodecParserContext* avParserContext = NULL;int initDecode()
{av_register_all();codec = avcodec_find_decoder(AV_CODEC_ID_H264);if (NULL == codec){qDebug("Codec not found\n");return -1;}pAVCodecCtx = avcodec_alloc_context3(codec);if (NULL == pAVCodecCtx){qDebug("Could not allocate video codec context\n");return -2;}pAVCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;pAVCodecCtx->codec_id = AV_CODEC_ID_H264;pAVCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;if (avcodec_open2(pAVCodecCtx, codec, NULL) < 0){qDebug("Could not open codec\n");return -3;}avParserContext = av_parser_init(AV_CODEC_ID_H264);if (NULL == avParserContext){qDebug("Could not init avParserContext\n");return -4;}frame = av_frame_alloc();if (NULL == frame){qDebug("Could not allocate video frame\n");return -5;}return 0;
}
2.2 释放解码器资源
int unInitDecode()
{ if(frame){av_frame_free(&frame);frame = NULL;}if(avParserContext){av_parser_close(avParserContext);avParserContext = NULL;}if(pAVCodecCtx){avcodec_close(pAVCodecCtx);avcodec_free_context(&pAVCodecCtx);pAVCodecCtx = NULL;}return 0;
}
2.3 获取完整帧数据
// IDR帧buff: buff大小,最大为 width x height?
// 0x67(sps) 0x68(pps) 0x66(冗余信息) 0x65(I帧数据) 0x61/0x41(p/b帧数据)
//
// 从srcStr中查找{00,00,00,01}的位置,判断nalu类型,srcStr[4]&0x1f
// 如果为7 8 6 5类型则拼接到IDR帧buff中,如果为1类型则将IDR帧buff交由解码器解码,同时将该1类型数据(p帧/b帧)也交由解码器解码
int getFullFrame(unsigned char* srcStr, int srcStrLen, unsigned char* findStr, int findStrLen)
{int srcIndex = 0;int lastNaluPos = -1;int lastNaluType = -1;int lastNaluLen = 0;int naluCount = 0;int maxLen = 0; if (!srcStr || !findStr || !findStrLen || !srcStrLen) return -1;qDebug("srcLen: %d findstrlen: %d", srcStrLen, findStrLen);while (srcStrLen){if (memcmp(srcStr + srcIndex, findStr, findStrLen) == 0){ if (lastNaluPos != -1){// 之前找到过一个开头,那么这一次再次找到,则上一个完整nalu长度为 srcIndex-lastNaluPoslastNaluLen = srcIndex - lastNaluPos;if (lastNaluType == 1){if (fullFrameLen){// 得到一个完整的IDR帧,交由解码器解码qDebug("get a full IDR frame len: %d",fullFrameLen);toDecodeH264(fullFrame, fullFrameLen, 0, 0,DEC_OUTPUT_BGRA); fullFrameLen = 0;}// 上一个nalu为P/B帧,交由解码器解码memcpy(fullFrame + fullFrameLen, &srcStr[lastNaluPos], lastNaluLen);fullFrameLen += lastNaluLen;qDebug("copy %d to fullFrame,fullFrameLen = %d", lastNaluLen, fullFrameLen);qDebug("get a full B/P frame len: %d", lastNaluLen);toDecodeH264(fullFrame, fullFrameLen, 0, 0,DEC_OUTPUT_BGRA); fullFrameLen = 0;}else {memcpy(fullFrame+fullFrameLen, &srcStr[lastNaluPos], lastNaluLen);fullFrameLen += lastNaluLen;qDebug("copy %d to fullFrame,fullFrameLen = %d", lastNaluLen, fullFrameLen);}}lastNaluType = srcStr[srcIndex+4]&0x1f;lastNaluPos = srcIndex;}srcStrLen--;srcIndex++;// 找到最后一个字节,不再有下一个nalu,将这最后一个nalu处理掉。if (!srcStrLen){if (lastNaluPos != -1){// 之前找到过一个开头,那么这一次再次找到,则上一个完整nalu长度为 srcIndex-lastNaluPoslastNaluLen = srcIndex - lastNaluPos;if (lastNaluType == 1){if (fullFrameLen){// 得到一个完整的IDR帧,交由解码器解码qDebug("get a full IDR frame len: %d", fullFrameLen);toDecodeH264(fullFrame, fullFrameLen, 0, 0,DEC_OUTPUT_BGRA); fullFrameLen = 0;}// 上一个nalu为P/B帧,交由解码器解码memcpy(fullFrame + fullFrameLen, &srcStr[lastNaluPos], lastNaluLen);fullFrameLen += lastNaluLen;qDebug("copy %d to fullFrame,fullFrameLen = %d", lastNaluLen, fullFrameLen);qDebug("get a full B/P frame len: %d", lastNaluLen);toDecodeH264(fullFrame, fullFrameLen, 0, 0,DEC_OUTPUT_BGRA); fullFrameLen = 0;}else{memcpy(fullFrame+fullFrameLen, &srcStr[lastNaluPos], lastNaluLen);fullFrameLen += lastNaluLen;qDebug("copy %d to fullFrame,fullFrameLen = %d", lastNaluLen, fullFrameLen);}}}}return 0;
}
2.4 h264转yuv
int toDecodeH264(unsigned char* inbuf, int inbufLen,int videoWidth,int videoHeight,DecodeOutputImageType outType)
{AVPacket avpkt;int parse_len = 0;int frame_count = 0;int got_frame;int total_parse_len = 0;memset(&avpkt, 0, sizeof(AVPacket));av_init_packet(&avpkt);while (inbufLen){memset(&avpkt, 0, sizeof(AVPacket));av_init_packet(&avpkt);// parse_len已经解析过的长度parse_len = av_parser_parse2(avParserContext, pAVCodecCtx, &avpkt.data, &avpkt.size, inbuf, inbufLen, AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);total_parse_len += parse_len;inbuf += parse_len;inbufLen -= parse_len;qDebug("inbufLen=%d, parse_len=%d\n", inbufLen, parse_len);if (avpkt.size != 0){ int decode_len = avcodec_decode_video2(pAVCodecCtx, frame, &got_frame, &avpkt);if (decode_len < 0)qDebug("Error while decoding frame %d\n", frame_count);if (got_frame){qDebug("decode success frame width = %d height = %d frame width = %d height = %d codec width = %d height = %d\n", frame->width, frame->height, pAVCodecCtx->width, pAVCodecCtx->height, pAVCodecCtx->coded_width, pAVCodecCtx->coded_height);int width = frame->width;int height = frame->height;if (width != 720 && height != 480){continue;}if(outType == DEC_OUTPUT_YUV420){unsigned char* yuv_buf = (unsigned char*)malloc((width * height * 3 / 2));if (!yuv_buf){qDebug("malloc yuv buf fialed");continue;}memset(yuv_buf, 0, (width * height * 3 / 2));//Yfor (int i = 0; i < height; i++){memcpy(yuv_buf + i * width, frame->data[0] + i * frame->linesize[0], width);}//UBYTE* pUOut = yuv_buf + height * width;for (int i = 0; i < height / 2; i++){memcpy(pUOut + i * width / 2, frame->data[1] + i * frame->linesize[1], width / 2);}//VBYTE* pVOut = pUOut + height * width / 4;for (int i = 0; i < height / 2; i++){memcpy(pVOut + i * width / 2, frame->data[2] + i * frame->linesize[2], width / 2);}
#ifdef TEST_PARESERTP//把解码出来的数据存为文件,方便验证解码是否正确if (decodeFile){qDebug("write to decode file");fwrite(yuv_buf, sizeof(unsigned char), (width * height * 3 / 2), decodeFile);}
#endif free(yuv_buf); }else if(outType == DEC_OUTPUT_BGRA){unsigned char* brgabuf = (unsigned char*)malloc((width * height * 4));if (!brgabuf){qDebug("malloc yuv buf fialed");continue;}memset(brgabuf, 0, (width * height * 4));myYUVtoRGBA32(frame, width, height, brgabuf, false);//把解码出来的数据存为文件,方便验证解码是否正确if (decodeFile){qDebug("write to decode file");fwrite(brgabuf, sizeof(unsigned char), (width * height * 4), decodeFile);}free(brgabuf); }frame_count++;}else{qDebug("decode fail\n");}av_packet_unref(&avpkt);}} return total_parse_len;
}
2.5 h264裸流文件转yuv
// 读取完整的h264裸流文件,直接给到ffmpeg解析帧进而解码
void testH264File2YuvFile()
{unsigned char h264Data[4096000] = {0};int dataLen = 0;initDecode();initTest();FILE *h264File = fopen("G:\\receive.h264","rb");dataLen = fread(h264Data,4096000,1,h264File);while (dataLen){toDecodeH264(h264Data,dataLen,0,0,DEC_OUTPUT_YUV420);dataLen = fread(h264Data,4096000,1,h264File);}unInitDecode();uninitTest();}
2.6 解RTP包后h264转yuv
// 接收网络数据包时,需要先判断获取到完整的帧后再给到ffmpeg解码
//接收RTP数据接口
void on_rx_rtp(void *pkt, pj_ssize_t size)//收到RTP视频
{BYTE tmpbuff[2048] = { 0 };unsigned int tmpbuffLen = 0;unsigned char findcode[] = { 0x00 ,0x00 ,0x00 ,0x01 };DC_PRINT_DEBUG << "to rtp_unpackage start";rtp_unpackage((char*)pkt, size, tmpbuff, &tmpbuffLen);DC_PRINT_DEBUG << "to rtp_unpackage end";// IDR帧buff: buff大小,最大为 width x height?// 0x67(sps) 0x68(pps) 0x66(冗余信息) 0x65(I帧数据) 0x61/0x41(p/b帧数据)// 如果此时收到的是FU分片包,就等待分片包接收完全后,// 从fuData中查找{00,00,00,01}的位置,判断nalu类型,fuData[4]&0x1f// 如果不是FU分片包// 从tmpbuff中查找{00,00,00,01}的位置,判断nalu类型,tmpbuff[4]&0x1f// 如果为7 8 6 5类型则拼接到IDR帧buff中,如果为1类型则将IDR帧buff交由解码器解码,并将1类型数据(p帧/b帧)也交由解码器解码if (isFU && !fuReviceEnd) //现在接收到的是fu分包且fu分包数据未接收完{// 如果是fu分包数据且分包数据没有接收结束,先不给解码器解码DC_PRINT_DEBUG << "wait fu data revice end";return;}if (fuReviceEnd){DC_PRINT_DEBUG << "fu data revice end total len is : " << fuDataLen;getFullFrame(fuData, fuDataLen, findcode, sizeof(findcode));isFU = 0;fuReviceEnd = 0;fuDataLen = 0;}else {DC_PRINT_DEBUG << "tmpbuff tmpbuffLen is : " << tmpbuffLen;getFullFrame(tmpbuff, tmpbuffLen, findcode, sizeof(findcode));}}
三,完整代码
最后生成的yuv文件可以通过ffplay播放。ffplay -f rawvideo -video_size 720x480 G:\decode_720x480.yuv
#define DECODEH264
#define TEST_PARESERTP#ifdef TEST_PARESERTP
static FILE* poutfile;
static FILE* decodeFile;
const char* outputfilename = "G:\\receive.h264";
const char* decode = "G:\\decode_720x480.yuv";int initTest()
{poutfile = fopen(outputfilename, "ab+");if (!poutfile){return -1;}decodeFile = fopen(decode, "ab+");if (!decodeFile){ return -1;}return 0;
}void uninitTest()
{if (poutfile){fclose(poutfile);poutfile = NULL;}if (decodeFile){fclose(decodeFile);decodeFile = NULL;}
}#endif#ifdef DECODEH264
// #define DEBUG_DECODE_H264// cpp中包含c类型的.h必须用 extern "C"
extern "C" {
#include
#include
}#pragma comment(lib,"swscale.lib")
#pragma comment(lib,"avutil.lib")
#pragma comment(lib,"avcodec.lib")
#pragma comment(lib,"avformat.lib")
#pragma comment(lib,"swresample.lib")
#pragma comment(lib,"avfilter.lib")#define MAXFREAMLEN (720*480)static unsigned char fullFrame[MAXFREAMLEN];
static unsigned int fullFrameLen;static unsigned char isFU;
static unsigned char fuReviceEnd;
static unsigned char fuData[MAXFREAMLEN];
static unsigned int fuDataLen;AVCodec* codec = NULL;
AVCodecContext* pAVCodecCtx = NULL;
AVFrame* frame = NULL;
AVCodecParserContext* avParserContext = NULL;int initDecode()
{av_register_all();codec = avcodec_find_decoder(AV_CODEC_ID_H264);if (NULL == codec){qDebug("Codec not found\n");return -1;}pAVCodecCtx = avcodec_alloc_context3(codec);if (NULL == pAVCodecCtx){qDebug("Could not allocate video codec context\n");return -2;}pAVCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;pAVCodecCtx->codec_id = AV_CODEC_ID_H264;pAVCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;if (avcodec_open2(pAVCodecCtx, codec, NULL) < 0){qDebug("Could not open codec\n");return -3;}avParserContext = av_parser_init(AV_CODEC_ID_H264);if (NULL == avParserContext){qDebug("Could not init avParserContext\n");return -4;}frame = av_frame_alloc();if (NULL == frame){qDebug("Could not allocate video frame\n");return -5;}return 0;
}int unInitDecode()
{ if(frame){av_frame_free(&frame);frame = NULL;}if(avParserContext){av_parser_close(avParserContext);avParserContext = NULL;}if(pAVCodecCtx){avcodec_close(pAVCodecCtx);avcodec_free_context(&pAVCodecCtx);pAVCodecCtx = NULL;}return 0;
}// yuv转码为RGBA32
BOOL myYUVtoRGBA32(AVFrame * pAVFrame, int width, int height, BYTE* byOutbuf,bool bIsRGBOut)
{struct SwsContext *img_convert_ctx = NULL;AVFrame *pAVFrameRGBA = av_frame_alloc();if (pAVFrameRGBA == NULL){return FALSE;}if (bIsRGBOut){avpicture_fill((AVPicture*)pAVFrameRGBA, byOutbuf, AV_PIX_FMT_RGBA, width, height);img_convert_ctx = sws_getContext(width, height, AV_PIX_FMT_YUV420P, width, height, AV_PIX_FMT_RGBA,SWS_BICUBIC, NULL, NULL, NULL);}else{avpicture_fill((AVPicture*)pAVFrameRGBA, byOutbuf, AV_PIX_FMT_BGRA, width, height);img_convert_ctx = sws_getContext(width, height, AV_PIX_FMT_YUV420P, width, height, AV_PIX_FMT_BGRA,SWS_BICUBIC, NULL, NULL, NULL);}//PIX_FMT_RGB24 PIX_FMT_BGR24sws_scale(img_convert_ctx, pAVFrame->data, pAVFrame->linesize,0, height, pAVFrameRGBA->data, pAVFrameRGBA->linesize);av_frame_free(&pAVFrameRGBA);sws_freeContext(img_convert_ctx);return TRUE;}// h264转yuv
int toDecodeH264(unsigned char* inbuf, int inbufLen,int videoWidth,int videoHeight,DecodeOutputImageType outType)
{AVPacket avpkt;int parse_len = 0;int frame_count = 0;int got_frame;int total_parse_len = 0;memset(&avpkt, 0, sizeof(AVPacket));av_init_packet(&avpkt);while (inbufLen){memset(&avpkt, 0, sizeof(AVPacket));av_init_packet(&avpkt);// parse_len已经解析过的长度parse_len = av_parser_parse2(avParserContext, pAVCodecCtx, &avpkt.data, &avpkt.size, inbuf, inbufLen, AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);total_parse_len += parse_len;inbuf += parse_len;inbufLen -= parse_len;qDebug("inbufLen=%d, parse_len=%d\n", inbufLen, parse_len);if (avpkt.size != 0){ int decode_len = avcodec_decode_video2(pAVCodecCtx, frame, &got_frame, &avpkt);if (decode_len < 0)qDebug("Error while decoding frame %d\n", frame_count);if (got_frame){qDebug("decode success frame width = %d height = %d frame width = %d height = %d codec width = %d height = %d\n", frame->width, frame->height, pAVCodecCtx->width, pAVCodecCtx->height, pAVCodecCtx->coded_width, pAVCodecCtx->coded_height);int width = frame->width;int height = frame->height;if (width != 720 && height != 480){continue;}if(outType == DEC_OUTPUT_YUV420){unsigned char* yuv_buf = (unsigned char*)malloc((width * height * 3 / 2));if (!yuv_buf){qDebug("malloc yuv buf fialed");continue;}memset(yuv_buf, 0, (width * height * 3 / 2));//Yfor (int i = 0; i < height; i++){memcpy(yuv_buf + i * width, frame->data[0] + i * frame->linesize[0], width);}//UBYTE* pUOut = yuv_buf + height * width;for (int i = 0; i < height / 2; i++){memcpy(pUOut + i * width / 2, frame->data[1] + i * frame->linesize[1], width / 2);}//VBYTE* pVOut = pUOut + height * width / 4;for (int i = 0; i < height / 2; i++){memcpy(pVOut + i * width / 2, frame->data[2] + i * frame->linesize[2], width / 2);}
#ifdef TEST_PARESERTP//把解码出来的数据存为文件,方便验证解码是否正确if (decodeFile){qDebug("write to decode file");fwrite(yuv_buf, sizeof(unsigned char), (width * height * 3 / 2), decodeFile);}
#endif free(yuv_buf); }else if(outType == DEC_OUTPUT_BGRA){unsigned char* brgabuf = (unsigned char*)malloc((width * height * 4));if (!brgabuf){qDebug("malloc yuv buf fialed");continue;}memset(brgabuf, 0, (width * height * 4));myYUVtoRGBA32(frame, width, height, brgabuf, false);//把解码出来的数据存为文件,方便验证解码是否正确if (decodeFile){qDebug("write to decode file");fwrite(brgabuf, sizeof(unsigned char), (width * height * 4), decodeFile);}free(brgabuf); }frame_count++;}else{qDebug("decode fail\n");}av_packet_unref(&avpkt);}} return total_parse_len;
}int getFullFrame(unsigned char* srcStr, int srcStrLen, unsigned char* findStr, int findStrLen)
{int srcIndex = 0;int lastNaluPos = -1;int lastNaluType = -1;int lastNaluLen = 0;int naluCount = 0;int maxLen = 0; if (!srcStr || !findStr || !findStrLen || !srcStrLen) return -1;qDebug("srcLen: %d findstrlen: %d", srcStrLen, findStrLen);while (srcStrLen){if (memcmp(srcStr + srcIndex, findStr, findStrLen) == 0){ if (lastNaluPos != -1){// 之前找到过一个开头,那么这一次再次找到,则上一个完整nalu长度为 srcIndex-lastNaluPoslastNaluLen = srcIndex - lastNaluPos;if (lastNaluType == 1){if (fullFrameLen){// 得到一个完整的IDR帧,交由解码器解码qDebug("get a full IDR frame len: %d",fullFrameLen);toDecodeH264(fullFrame, fullFrameLen, 0, 0,DEC_OUTPUT_BGRA); fullFrameLen = 0;}// 上一个nalu为P/B帧,交由解码器解码memcpy(fullFrame + fullFrameLen, &srcStr[lastNaluPos], lastNaluLen);fullFrameLen += lastNaluLen;qDebug("copy %d to fullFrame,fullFrameLen = %d", lastNaluLen, fullFrameLen);qDebug("get a full B/P frame len: %d", lastNaluLen);toDecodeH264(fullFrame, fullFrameLen, 0, 0,DEC_OUTPUT_BGRA); fullFrameLen = 0;}else {memcpy(fullFrame+fullFrameLen, &srcStr[lastNaluPos], lastNaluLen);fullFrameLen += lastNaluLen;qDebug("copy %d to fullFrame,fullFrameLen = %d", lastNaluLen, fullFrameLen);}}lastNaluType = srcStr[srcIndex+4]&0x1f;lastNaluPos = srcIndex;}srcStrLen--;srcIndex++;// 找到最后一个字节,不再有下一个nalu,将这最后一个nalu处理掉。if (!srcStrLen){if (lastNaluPos != -1){// 之前找到过一个开头,那么这一次再次找到,则上一个完整nalu长度为 srcIndex-lastNaluPoslastNaluLen = srcIndex - lastNaluPos;if (lastNaluType == 1){if (fullFrameLen){// 得到一个完整的IDR帧,交由解码器解码qDebug("get a full IDR frame len: %d", fullFrameLen);toDecodeH264(fullFrame, fullFrameLen, 0, 0,DEC_OUTPUT_BGRA); fullFrameLen = 0;}// 上一个nalu为P/B帧,交由解码器解码memcpy(fullFrame + fullFrameLen, &srcStr[lastNaluPos], lastNaluLen);fullFrameLen += lastNaluLen;qDebug("copy %d to fullFrame,fullFrameLen = %d", lastNaluLen, fullFrameLen);qDebug("get a full B/P frame len: %d", lastNaluLen);toDecodeH264(fullFrame, fullFrameLen, 0, 0,DEC_OUTPUT_BGRA); fullFrameLen = 0;}else{memcpy(fullFrame+fullFrameLen, &srcStr[lastNaluPos], lastNaluLen);fullFrameLen += lastNaluLen;qDebug("copy %d to fullFrame,fullFrameLen = %d", lastNaluLen, fullFrameLen);}}}}return 0;
}// 读取完整的h264裸流文件,直接给到ffmpeg解析帧进而解码
void testH264File2YuvFile()
{unsigned char h264Data[4096000] = {0};int dataLen = 0;initDecode();initTest();FILE *h264File = fopen("G:\\receive.h264","rb");dataLen = fread(h264Data,4096000,1,h264File);while (dataLen){toDecodeH264(h264Data,dataLen,0,0,DEC_OUTPUT_YUV420);dataLen = fread(h264Data,4096000,1,h264File);}unInitDecode();uninitTest();}// 接收网络数据包时,需要先判断获取到完整的帧后再给到ffmpeg解码
//接收RTP数据接口
void on_rx_rtp(void *pkt, pj_ssize_t size)//收到RTP视频
{BYTE tmpbuff[2048] = { 0 };unsigned int tmpbuffLen = 0;unsigned char findcode[] = { 0x00 ,0x00 ,0x00 ,0x01 };DC_PRINT_DEBUG << "to rtp_unpackage start";rtp_unpackage((char*)pkt, size, tmpbuff, &tmpbuffLen);DC_PRINT_DEBUG << "to rtp_unpackage end";// IDR帧buff: buff大小,最大为 width x height?// 0x67(sps) 0x68(pps) 0x66(冗余信息) 0x65(I帧数据) 0x61/0x41(p/b帧数据)// 如果此时收到的是FU分片包,就等待分片包接收完全后,// 从fuData中查找{00,00,00,01}的位置,判断nalu类型,fuData[4]&0x1f// 如果不是FU分片包// 从tmpbuff中查找{00,00,00,01}的位置,判断nalu类型,tmpbuff[4]&0x1f// 如果为7 8 6 5类型则拼接到IDR帧buff中,如果为1类型则将IDR帧buff交由解码器解码,并将1类型数据(p帧/b帧)也交由解码器解码if (isFU && !fuReviceEnd) //现在接收到的是fu分包且fu分包数据未接收完{// 如果是fu分包数据且分包数据没有接收结束,先不给解码器解码DC_PRINT_DEBUG << "wait fu data revice end";return;}if (fuReviceEnd){DC_PRINT_DEBUG << "fu data revice end total len is : " << fuDataLen;getFullFrame(fuData, fuDataLen, findcode, sizeof(findcode));isFU = 0;fuReviceEnd = 0;fuDataLen = 0;}else {DC_PRINT_DEBUG << "tmpbuff tmpbuffLen is : " << tmpbuffLen;getFullFrame(tmpbuff, tmpbuffLen, findcode, sizeof(findcode));}}#endif
本文来自互联网用户投稿,文章观点仅代表作者本人,不代表本站立场,不承担相关法律责任。如若转载,请注明出处。 如若内容造成侵权/违法违规/事实不符,请点击【内容举报】进行投诉反馈!
