YUYV格式推流

This commit is contained in:
zhangpeng 2025-06-30 17:02:22 +08:00
parent 15599a6ee4
commit bba581e510

View File

@ -18,243 +18,194 @@
#include "mk_mediakit.h" #include "mk_mediakit.h"
typedef struct typedef struct {
{
MppEncoder *encoder; MppEncoder *encoder;
mk_media media; mk_media media;
mk_pusher pusher; mk_pusher pusher;
const char *push_url; const char *push_url;
int video_type=264; int video_type = 264;
int push_rtsp_port; int push_rtsp_port;
std::string push_path_first; std::string push_path_first;
std::string push_path_second; std::string push_path_second;
} rknn_app_context_t; } rknn_app_context_t;
void release_media(mk_media *ptr) void release_media(mk_media *ptr) {
{ if (ptr && *ptr) {
if (ptr && *ptr)
{
mk_media_release(*ptr); mk_media_release(*ptr);
*ptr = NULL; *ptr = NULL;
} }
} }
void release_pusher(mk_pusher *ptr) void release_pusher(mk_pusher *ptr) {
{ if (ptr && *ptr) {
if (ptr && *ptr)
{
mk_pusher_release(*ptr); mk_pusher_release(*ptr);
*ptr = NULL; *ptr = NULL;
} }
} }
void release_track(mk_track *ptr) void release_track(mk_track *ptr) {
{ if (ptr && *ptr) {
if (ptr && *ptr)
{
mk_track_unref(*ptr); mk_track_unref(*ptr);
*ptr = NULL; *ptr = NULL;
} }
} }
// 函数定义
int padToMultipleOf16(int number) { int padToMultipleOf16(int number) {
// 如果number已经是16的倍数则直接返回
if (number % 16 == 0) { if (number % 16 == 0) {
return number; return number;
} }
// 否则计算需要添加的额外量即16 - (number % 16)
// 这等价于找到比number大的最小16的倍数并减去number
int extra = 16 - (number % 16); int extra = 16 - (number % 16);
// 返回扩充后的数
return number + extra; return number + extra;
} }
void API_CALL on_mk_push_event_func(void *user_data, int err_code, const char *err_msg) {
void API_CALL on_mk_push_event_func(void *user_data, int err_code, const char *err_msg)
{
rknn_app_context_t *ctx = (rknn_app_context_t *)user_data; rknn_app_context_t *ctx = (rknn_app_context_t *)user_data;
if (err_code == 0) if (err_code == 0) {
{
// push success
log_info("push %s success!", ctx->push_url); log_info("push %s success!", ctx->push_url);
printf("push %s success!\n", ctx->push_url); printf("push %s success!\n", ctx->push_url);
} } else {
else
{
log_warn("push %s failed:%d %s", ctx->push_url, err_code, err_msg); log_warn("push %s failed:%d %s", ctx->push_url, err_code, err_msg);
printf("push %s failed:%d %s\n", ctx->push_url, err_code, err_msg); printf("push %s failed:%d %s\n", ctx->push_url, err_code, err_msg);
release_pusher(&(ctx->pusher)); release_pusher(&(ctx->pusher));
} }
} }
void API_CALL on_mk_media_source_regist_func(void *user_data, mk_media_source sender, int regist) void API_CALL on_mk_media_source_regist_func(void *user_data, mk_media_source sender, int regist) {
{
rknn_app_context_t *ctx = (rknn_app_context_t *)user_data; rknn_app_context_t *ctx = (rknn_app_context_t *)user_data;
const char *schema = mk_media_source_get_schema(sender); const char *schema = mk_media_source_get_schema(sender);
if (strncmp(schema, ctx->push_url, strlen(schema)) == 0) if (strncmp(schema, ctx->push_url, strlen(schema)) == 0) {
{
release_pusher(&(ctx->pusher)); release_pusher(&(ctx->pusher));
if (regist) if (regist) {
{
ctx->pusher = mk_pusher_create_src(sender); ctx->pusher = mk_pusher_create_src(sender);
mk_pusher_set_on_result(ctx->pusher, on_mk_push_event_func, ctx); mk_pusher_set_on_result(ctx->pusher, on_mk_push_event_func, ctx);
mk_pusher_set_on_shutdown(ctx->pusher, on_mk_push_event_func, ctx); mk_pusher_set_on_shutdown(ctx->pusher, on_mk_push_event_func, ctx);
log_info("push started!"); log_info("push started!");
printf("push started!\n"); printf("push started!\n");
} } else {
else
{
log_info("push stoped!"); log_info("push stoped!");
printf("push stoped!\n"); printf("push stoped!\n");
} }
printf("push_url:%s\n", ctx->push_url); printf("push_url:%s\n", ctx->push_url);
} } else {
else
{
printf("unknown schema:%s\n", schema); printf("unknown schema:%s\n", schema);
} }
} }
void API_CALL on_mk_shutdown_func(void *user_data, int err_code, const char *err_msg, mk_track tracks[], int track_count) int process_camera_stream(rknn_app_context_t *ctx) {
{ // Open camera device
printf("play interrupted: %d %s", err_code, err_msg); cv::VideoCapture cap("/dev/video61");
} if (!cap.isOpened()) {
printf("Failed to open camera device: /dev/video61\n");
int process_video_file(rknn_app_context_t *ctx, const char *video_path) return -1;
{
// 读取视频
cv::VideoCapture cap(video_path);
if (!cap.isOpened())
{
printf("Failed to open video file: %s", video_path);
} }
// 使用前需要使用v4l2-ctl --device=/dev/video0 --list-formats-ext检查一下设备支持范围
cap.set(cv::CAP_PROP_FOURCC, cv::VideoWriter::fourcc('M', 'J', 'P', 'G')); // Set camera parameters based on v4l2-ctl output
// set width cap.set(cv::CAP_PROP_FOURCC, cv::VideoWriter::fourcc('Y', 'U', 'Y', 'V'));
cap.set(cv::CAP_PROP_FRAME_WIDTH, 1920); cap.set(cv::CAP_PROP_FRAME_WIDTH, 2452);
// set height cap.set(cv::CAP_PROP_FRAME_HEIGHT, 2056);
cap.set(cv::CAP_PROP_FRAME_HEIGHT, 1080); cap.set(cv::CAP_PROP_FPS, 30);
// set fps
cap.set(cv::CAP_PROP_FPS,30); // Get actual parameters
// 获取视频尺寸、帧率
int cap_width = cap.get(cv::CAP_PROP_FRAME_WIDTH); int cap_width = cap.get(cv::CAP_PROP_FRAME_WIDTH);
int cap_height = cap.get(cv::CAP_PROP_FRAME_HEIGHT); int cap_height = cap.get(cv::CAP_PROP_FRAME_HEIGHT);
int fps = cap.get(cv::CAP_PROP_FPS); int fps = cap.get(cv::CAP_PROP_FPS);
ctx->push_url = "rtsp://localhost/live/stream"; printf("Camera opened with resolution: %dx%d, FPS: %d\n", cap_width, cap_height, fps);
ctx->media = mk_media_create("__defaultVhost__", ctx->push_path_first.c_str(), ctx->push_path_second.c_str(), 0, 0, 0); // RTSP stream setup
ctx->push_url = "rtsp://localhost/live/stream";
ctx->media = mk_media_create("__defaultVhost__", ctx->push_path_first.c_str(),
ctx->push_path_second.c_str(), 0, 0, 0);
codec_args v_args = {0}; codec_args v_args = {0};
mk_track v_track = mk_track_create(MKCodecH264, &v_args); mk_track v_track = mk_track_create(MKCodecH264, &v_args);
mk_media_init_track(ctx->media, v_track); mk_media_init_track(ctx->media, v_track);
mk_media_init_complete(ctx->media); mk_media_init_complete(ctx->media);
mk_media_set_on_regist(ctx->media, on_mk_media_source_regist_func, ctx); mk_media_set_on_regist(ctx->media, on_mk_media_source_regist_func, ctx);
// 初始化编码器 // Initialize encoder
MppEncoder *mpp_encoder = new MppEncoder(); MppEncoder *mpp_encoder = new MppEncoder();
MppEncoderParams enc_params; MppEncoderParams enc_params;
memset(&enc_params, 0, sizeof(MppEncoderParams)); memset(&enc_params, 0, sizeof(MppEncoderParams));
enc_params.width = cap_width; enc_params.width = cap_width;
enc_params.height = cap_height; enc_params.height = cap_height;
enc_params.fmt = MPP_FMT_YUV420SP; enc_params.fmt = MPP_FMT_YUV420SP; // We'll convert from YUYV to YUV420SP
enc_params.type = MPP_VIDEO_CodingAVC; enc_params.type = MPP_VIDEO_CodingAVC;
mpp_encoder->Init(enc_params, ctx); mpp_encoder->Init(enc_params, ctx);
ctx->encoder = mpp_encoder; ctx->encoder = mpp_encoder;
// mpp编码配置 // Variables for frame processing
void *mpp_frame = NULL; void *mpp_frame = NULL;
int mpp_frame_fd = 0; int mpp_frame_fd = 0;
void *mpp_frame_addr = NULL; void *mpp_frame_addr = NULL;
int enc_data_size; int enc_data_size;
cv::Mat frame, yuv_frame;
int frame_index = 0; while (true) {
int ret = 0; // Read frame from camera
if (!cap.read(frame)) {
// 画面 printf("Failed to read frame from camera\n");
cv::Mat img;
while (true)
{
// 读取视频帧
cap >> img;
if (img.empty())
{
printf("Video end.");
break; break;
} }
frame_index++; if (frame.empty()) {
// 结束计时 printf("Empty frame received\n");
auto end_time = std::chrono::high_resolution_clock::now(); continue;
// 将当前时间点转换为毫秒级别的时间戳 }
auto millis = std::chrono::time_point_cast<std::chrono::milliseconds>(end_time).time_since_epoch().count();
// 获取解码后的帧 // Get current timestamp
auto millis = std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch()).count();
// Get encoder input buffer
mpp_frame = ctx->encoder->GetInputFrameBuffer(); mpp_frame = ctx->encoder->GetInputFrameBuffer();
// 获取解码后的帧fd
mpp_frame_fd = ctx->encoder->GetInputFrameBufferFd(mpp_frame); mpp_frame_fd = ctx->encoder->GetInputFrameBufferFd(mpp_frame);
// 获取解码后的帧地址
mpp_frame_addr = ctx->encoder->GetInputFrameBufferAddr(mpp_frame); mpp_frame_addr = ctx->encoder->GetInputFrameBufferAddr(mpp_frame);
rga_buffer_t src = wrapbuffer_fd(mpp_frame_fd, cap_width, cap_height, RK_FORMAT_YCbCr_420_SP,padToMultipleOf16(cap_width),padToMultipleOf16(cap_height)); // Convert YUYV to YUV420SP (NV12) using RGA
rga_buffer_t src = wrapbuffer_virtualaddr((void *)frame.data,
cap_width, cap_height,
RK_FORMAT_YUYV_422);
rga_buffer_t dst = wrapbuffer_fd(mpp_frame_fd,
cap_width, cap_height,
RK_FORMAT_YCbCr_420_SP,
padToMultipleOf16(cap_width),
padToMultipleOf16(cap_height));
// Perform color space conversion
imcvtcolor(src, dst, RK_FORMAT_YUYV_422, RK_FORMAT_YCbCr_420_SP);
// Encode frame
int enc_buf_size = ctx->encoder->GetFrameSize(); int enc_buf_size = ctx->encoder->GetFrameSize();
char *enc_data = (char *)malloc(enc_buf_size); char *enc_data = (char *)malloc(enc_buf_size);
rga_buffer_t rgb_img = wrapbuffer_virtualaddr((void *)img.data, cap_width, cap_height, RK_FORMAT_BGR_888);
// 将RGB图像复制到src中
imcopy(rgb_img, src);
if (frame_index == 1)
{
enc_data_size = ctx->encoder->GetHeader(enc_data, enc_buf_size);
}
// 内存初始化
memset(enc_data, 0, enc_buf_size); memset(enc_data, 0, enc_buf_size);
enc_data_size = ctx->encoder->Encode(mpp_frame, enc_data, enc_buf_size); enc_data_size = ctx->encoder->Encode(mpp_frame, enc_data, enc_buf_size);
ret = mk_media_input_h264(ctx->media, enc_data, enc_data_size, millis, millis); // Push to RTSP
if (ret != 1) int ret = mk_media_input_h264(ctx->media, enc_data, enc_data_size, millis, millis);
{ if (ret != 1) {
printf("mk_media_input_frame failed\n"); printf("mk_media_input_frame failed\n");
} }
if (enc_data != nullptr)
{ if (enc_data != nullptr) {
free(enc_data); free(enc_data);
} }
// Small delay to maintain frame rate
usleep(1000000 / fps);
} }
// 释放资源
// Release resources
cap.release(); cap.release();
release_track(&v_track); release_track(&v_track);
release_media(&ctx->media); release_media(&ctx->media);
return 0;
} }
int main(int argc, char **argv) int main(int argc, char **argv) {
{ // Initialize media kit
int status = 0;
int ret;
if (argc != 2)
{
printf("Usage: %s<video_path>\n", argv[0]);
return -1;
}
char *stream_url = argv[1]; // 视频流地址
int video_type = 264;
// 初始化流媒体
mk_config config; mk_config config;
memset(&config, 0, sizeof(mk_config)); memset(&config, 0, sizeof(mk_config));
config.log_mask = LOG_CONSOLE; config.log_mask = LOG_CONSOLE;
@ -262,19 +213,20 @@ int main(int argc, char **argv)
mk_env_init(&config); mk_env_init(&config);
mk_rtsp_server_start(3554, 0); mk_rtsp_server_start(3554, 0);
rknn_app_context_t app_ctx; // 创建上下文 // Initialize application context
memset(&app_ctx, 0, sizeof(rknn_app_context_t)); // 初始化上下文 rknn_app_context_t app_ctx;
app_ctx.video_type = video_type; memset(&app_ctx, 0, sizeof(rknn_app_context_t));
app_ctx.push_path_first = "yunyan-live"; app_ctx.video_type = 264; // H.264
app_ctx.push_path_second = "test"; app_ctx.push_path_first = "live";
app_ctx.push_path_second = "stream";
process_video_file(&app_ctx, stream_url); // Start processing camera stream
process_camera_stream(&app_ctx);
printf("waiting finish\n"); printf("Streaming finished\n");
usleep(3 * 1000 * 1000); usleep(3 * 1000 * 1000);
if (app_ctx.encoder != nullptr) if (app_ctx.encoder != nullptr) {
{
delete (app_ctx.encoder); delete (app_ctx.encoder);
app_ctx.encoder = nullptr; app_ctx.encoder = nullptr;
} }