视频编码
功能约束
视频编码的功能约束详见下表。
表 视频编码功能约束
约束项 |
属性 |
参数 |
---|---|---|
输入 |
分辨率 |
|
码率控制 |
支持CBR,VBR与FixQP三种 |
|
最大码率 |
400M |
|
GopType |
目前仅支持GopType为Low Delay的B帧编码,且bFramesNum最大为5 |
|
格式 |
支持NV12 8bit与NV12 10bit,对应的bitDepth需设置为8与10 |
|
输入内存 |
只能调用lynMalloc和lynFree进行视频编码输入的内存申请和释放 |
|
输出 |
输出格式 |
|
输出内存 |
只能调用lynMalloc和lynFree进行视频编码输出的内存申请和释放 |
视频编码
用户可以从LynSDK提供的示例代码中查看完整样例,在示例代码中,调用各接口后都添加了异常判断和处理,以下是关键步骤代码示例,仅供参考,不可以直接拷贝编译。
lynCodecBuf_t GetInputFromFile(std::string srcFileName)
{
lynCodecBuf_t buf = {0};
FILE *fp = fopen(srcFileName.c_str(), "r");
if (fp)
{
fseek(fp, 0, SEEK_END);
buf.size = (int)ftell(fp);
buf.data = (uint8_t *)malloc(buf.size);
fseek(fp, 0, SEEK_SET);
fread(buf.data, 1, buf.size, fp);
fclose(fp);
}
else
{
LOG(ERROR) << "Open file failed, path: " << srcFileName;
}
return buf;
}
static vector<lynCodecBuf_t> resourceList;
void ReadAllResource(vector<lynCodecBuf_t> &resource)
{
static once_flag flag;
std::call_once(flag, [&] {
for (size_t i = 0; i < 10; i++)
{
stringstream ss;
ss << "../source/encode_yuv_src/dump_1_00" << setw(2) << setfill('0') << i + 1 << ".yuv";
resource.push_back(GetInputFromFile(ss.str()));
}
})
}
// 准备输入frame
void FillInputFrame(lynFrame_t *frame, size_t i, lynStream_t stream)
{
size_t index = i % 10;
if (lynMemcpyAsync(stream, frame->data, resourceList[index].data, resourceList[index].size, ClientToServer) != 0)
{
LOG(ERROR) << "lynMemcpyAsync error!!!!!";
}
}
template <typename T>
class Queue
{
public:
Queue(uint32_t bufNum, int bufSize)
{
for (size_t i = 0; i < bufNum; i++)
{
// 准备输入frame
T *frame = new T;
void *buf = nullptr;
assert(lynMalloc(&buf, bufSize) == 0);
frame->data = (uint8_t *)buf;
frame->size = bufSize;
frame->eos = false;
m_map[frame] = false;
}
it = m_map.begin();
}
~Queue()
{
for (auto it = m_map.begin(); it != m_map.end(); ++it)
{
lynFree(it->first->data);
delete(it->first);
}
m_map.clear();
}
void Push(void *x)
{
std::lock_guard<std::mutex> locker(m_mutex);
for (auto it = m_map.begin(); it != m_map.end(); ++it)
{
if ((void*)it->first->data == x)
{
LOG(INFO) << "Restore " << it->first;
it->second = false;
break;
}
}
}
T *Pop()
{
loop:
{
std::lock_guard<std::mutex> locker(m_mutex);
for (; it != m_map.end(); ++it)
{
if (it->second == false)
{
LOG(INFO) << "Pop " << it->first;
it->second = true;
return it->first;
}
}
}
if (it == m_map.end())
{
it = m_map.begin();
goto loop;
}
return nullptr;
}
private:
std::unordered_map<T *, bool> m_map;
typename std::unordered_map<T *, bool>::iterator it; //游标
std::mutex m_mutex;
}
typedef struct inData
{
std::shared_ptr<Queue<lynFrame_t>> pQueue;
lynFrame_t *frame;
} inData;
lynError_t SendFrameCallback(void *userData)
{
inData *pData = (inData *)userData;
pData->pQueue->Push(pData->frame->data);
delete pData;
return 0;
}
typedef struct rcvData
{
std::shared_ptr<Queue<lynPacket_t>> pQueue;
lynPacket_t *packet;
lynCodecBuf_t *localBuf;
std::string dstFile; //输出目标文件
} rcvData;
lynError_t WriteRemotePacketToFile(const lynPacket_t *packet, lynCodecBuf_t *localBuf, const std::string &fileName)
{
uint32_t data_size = 0;
lynError_t ret = lynEncGetRemotePacketValidSize(packet, &data_size);
if (ret != 0)
{
LOG(ERROR) << "lynEncGetRemotePacketValidSize error:" << ret;
return ret;
}
ret = lynMemcpy(localBuf->data, packet->data, data_size, ServerToClient);
if (ret != 0)
{
LOG(ERROR) << "lynMemcpy data_size error:" << ret;
return ret;
}
ofstream dump_file;
dump_file.open(fileName, ios::binary | ios::app);
dump_file.write((char *)localBuf->data, data_size);
dump_file.close();
return 0;
}
lynError_t RecvPacketCallback(void *userData)
{
rcvData *pData = (rcvData *)userData;
if (WriteRemotePacketToFile(pData->packet, pData->localBuf, pData->dstFile) != 0)
{
LOG(ERROR) << "WriteRemotePacketToFile failed.";
return -1;
}
if (pData->pQueue)
{
pData->pQueue->Push(pData->packet->data);
}
delete pData;
return 0;
}
int frameNum = 10; //编码帧数
uint32_t width = 1920;
uint32_t height = 1080;
uint8_t bNums = 0;
uint8_t pNums = 0;
uint8_t codecType = LYN_CODEC_ID_H264;
uint8_t inputFormat = LYN_PIX_FMT_NV12;
uint8_t bitDepth = 8;
void MultiChannelThreadFunc(std::string outputFileName)
{
//删除已有文件
FileManager::deleteFile(outputFileName);
lynContext_t context = nullptr;
EXPECT_EQ(lynCreateContext(&context, 0), 0);
EXPECT_NE(context, nullptr);
lynVencAttr_t attr;
attr.codecType = LYN_CODEC_ID_H264;
lynVencSetDefaultParams(&attr);
attr.width = width;
attr.height = height;
attr.bitdepth = bitDepth;
attr.bframesNum = bNums;
attr.pframesNum = pNums;
attr.codecType = (lynCodecId_t)codecType;
attr.inputFormat = (lynPixelFormat_t)inputFormat;
int bufSize = attr.width * attr.height * 3 / 2;
//打开编码器
lynVencHandle_t vencHdl;
EXPECT_EQ(lynVencOpen(&vencHdl, &attr), 0);
EXPECT_NE(vencHdl, nullptr);
//创建发送流
lynStream_t sendStream = nullptr;
EXPECT_EQ(lynCreateStream(&sendStream), 0);
EXPECT_NE(sendStream, nullptr);
//创建接收流
lynStream_t recvStream = nullptr;
EXPECT_EQ(lynCreateStream(&recvStream), 0);
EXPECT_NE(recvStream, nullptr);
std::shared_ptr<Queue<lynFrame_t>> pQueue = std::make_shared<Queue<lynFrame_t>>(inBufNum, bufSize);
std::shared_ptr<Queue<lynPacket_t>> pOutQueue = std::make_shared<Queue<lynPacket_t>>(outBufNum, bufSize);
lynFrame_t *frame = nullptr;
lynPacket_t *packet = nullptr;
lynCodecBuf_t *outbuf = new lynCodecBuf_t;
outbuf->data = (uint8_t *)malloc(bufSize);
outbuf->size = bufSize;
packet = pOutQueue->Pop();
EXPECT_EQ(lynVencGetParamsSetAsync(recvStream, vencHdl, packet), 0);
rcvData *params_data = new rcvData;
params_data->pQueue = pOutQueue;
params_data->packet = packet;
params_data->localBuf = outbuf;
params_data->dstFile = outputFileName;
EXPECT_EQ(lynStreamAddAsyncCallback(recvStream, RecvPacketCallback, params_data), 0);
for (size_t i = 0; i <= frameNum; i++)
{
frame = pQueue->Pop();
if (i == frameNum)
{
frame->eos = true; //尾帧标记
}
FillInputFrame(frame, i, sendStream);
EXPECT_EQ(lynVencSendFrameAsync(sendStream, vencHdl, frame), 0);
inData *data = new inData;
data->pQueue = pQueue;
data->frame = frame;
EXPECT_EQ(lynStreamAddAsyncCallback(sendStream, SendFrameCallback, data), 0);
packet = pOutQueue->Pop();
EXPECT_EQ(lynVencRecvPacketAsync(recvStream, vencHdl, packet), 0);
rcvData *out_data = new rcvData;
out_data->pQueue = pOutQueue;
out_data->packet = packet;
out_data->localBuf = outbuf;
out_data->dstFile = outputFileName;
EXPECT_EQ(lynStreamAddAsyncCallback(recvStream, RecvPacketCallback, out_data), 0);
}
LOG(INFO) << "send cmd finish";
EXPECT_EQ(lynSynchronizeStream(sendStream), 0);
EXPECT_EQ(lynSynchronizeStream(recvStream), 0);
EXPECT_EQ(lynVencClose(vencHdl), 0);
EXPECT_EQ(lynDestroyStream(sendStream), 0);
EXPECT_EQ(lynDestroyStream(recvStream), 0);
free(outbuf->data);
delete outbuf;
pQueue = nullptr;
pOutQueue = nullptr;
EXPECT_EQ(lynDestroyContext(context), 0);
LOG(INFO) << "destory context finish";
}
const uint32_t threadNum = 3;
TEST(lynEncoderPATest, MultiChannelTest)
{
std::string outputFileName;
std::thread threads[threadNum];
for (size_t i = 0; i < threadNum; i++)
{
outputFileName = "./output/MultiChannelTest_" + to_string(i) + ".h264";
threads[i] = std::thread(MultiChannelThreadFunc, outputFileName);
}
for (thread &th : threads)
{
th.join();
}
}