diff --git a/rknn_usb_rtsp-master/src/usb_to_rtsp.cpp b/rknn_usb_rtsp-master/src/usb_to_rtsp.cpp index 50c70c4..6b22b3f 100644 --- a/rknn_usb_rtsp-master/src/usb_to_rtsp.cpp +++ b/rknn_usb_rtsp-master/src/usb_to_rtsp.cpp @@ -18,243 +18,194 @@ #include "mk_mediakit.h" -typedef struct -{ +typedef struct { MppEncoder *encoder; mk_media media; mk_pusher pusher; const char *push_url; - int video_type=264; - + int video_type = 264; int push_rtsp_port; std::string push_path_first; std::string push_path_second; - - } rknn_app_context_t; -void release_media(mk_media *ptr) -{ - if (ptr && *ptr) - { +void release_media(mk_media *ptr) { + if (ptr && *ptr) { mk_media_release(*ptr); *ptr = NULL; } } -void release_pusher(mk_pusher *ptr) -{ - if (ptr && *ptr) - { +void release_pusher(mk_pusher *ptr) { + if (ptr && *ptr) { mk_pusher_release(*ptr); *ptr = NULL; } } -void release_track(mk_track *ptr) -{ - if (ptr && *ptr) - { +void release_track(mk_track *ptr) { + if (ptr && *ptr) { mk_track_unref(*ptr); *ptr = NULL; } } - - -// 函数定义 -int padToMultipleOf16(int number) { - // 如果number已经是16的倍数,则直接返回 - if (number % 16 == 0) { - return number; - } - // 否则,计算需要添加的额外量(即16 - (number % 16)) - // 这等价于找到比number大的最小16的倍数,并减去number - int extra = 16 - (number % 16); - // 返回扩充后的数 - return number + extra; +int padToMultipleOf16(int number) { + if (number % 16 == 0) { + return number; + } + int extra = 16 - (number % 16); + return number + extra; } - -void API_CALL on_mk_push_event_func(void *user_data, int err_code, const char *err_msg) -{ +void API_CALL on_mk_push_event_func(void *user_data, int err_code, const char *err_msg) { rknn_app_context_t *ctx = (rknn_app_context_t *)user_data; - if (err_code == 0) - { - // push success + if (err_code == 0) { log_info("push %s success!", ctx->push_url); printf("push %s success!\n", ctx->push_url); - } - else - { + } else { log_warn("push %s failed:%d %s", ctx->push_url, err_code, err_msg); printf("push %s failed:%d %s\n", ctx->push_url, err_code, err_msg); release_pusher(&(ctx->pusher)); } } -void API_CALL on_mk_media_source_regist_func(void *user_data, mk_media_source sender, int regist) -{ +void API_CALL on_mk_media_source_regist_func(void *user_data, mk_media_source sender, int regist) { rknn_app_context_t *ctx = (rknn_app_context_t *)user_data; const char *schema = mk_media_source_get_schema(sender); - if (strncmp(schema, ctx->push_url, strlen(schema)) == 0) - { + if (strncmp(schema, ctx->push_url, strlen(schema)) == 0) { release_pusher(&(ctx->pusher)); - if (regist) - { + if (regist) { ctx->pusher = mk_pusher_create_src(sender); mk_pusher_set_on_result(ctx->pusher, on_mk_push_event_func, ctx); mk_pusher_set_on_shutdown(ctx->pusher, on_mk_push_event_func, ctx); log_info("push started!"); printf("push started!\n"); - } - else - { + } else { log_info("push stoped!"); printf("push stoped!\n"); } printf("push_url:%s\n", ctx->push_url); - } - else - { + } else { printf("unknown schema:%s\n", schema); } } -void API_CALL on_mk_shutdown_func(void *user_data, int err_code, const char *err_msg, mk_track tracks[], int track_count) -{ - printf("play interrupted: %d %s", err_code, err_msg); -} - -int process_video_file(rknn_app_context_t *ctx, const char *video_path) -{ - // 读取视频 - cv::VideoCapture cap(video_path); - if (!cap.isOpened()) - { - printf("Failed to open video file: %s", video_path); +int process_camera_stream(rknn_app_context_t *ctx) { + // Open camera device + cv::VideoCapture cap("/dev/video61"); + if (!cap.isOpened()) { + printf("Failed to open camera device: /dev/video61\n"); + return -1; } - // 使用前需要使用v4l2-ctl --device=/dev/video0 --list-formats-ext检查一下设备支持范围 - cap.set(cv::CAP_PROP_FOURCC, cv::VideoWriter::fourcc('M', 'J', 'P', 'G')); - // set width - cap.set(cv::CAP_PROP_FRAME_WIDTH, 1920); - // set height - cap.set(cv::CAP_PROP_FRAME_HEIGHT, 1080); - // set fps - cap.set(cv::CAP_PROP_FPS,30); - // 获取视频尺寸、帧率 + + // Set camera parameters based on v4l2-ctl output + cap.set(cv::CAP_PROP_FOURCC, cv::VideoWriter::fourcc('Y', 'U', 'Y', 'V')); + cap.set(cv::CAP_PROP_FRAME_WIDTH, 2452); + cap.set(cv::CAP_PROP_FRAME_HEIGHT, 2056); + cap.set(cv::CAP_PROP_FPS, 30); + + // Get actual parameters int cap_width = cap.get(cv::CAP_PROP_FRAME_WIDTH); int cap_height = cap.get(cv::CAP_PROP_FRAME_HEIGHT); int fps = cap.get(cv::CAP_PROP_FPS); + + printf("Camera opened with resolution: %dx%d, FPS: %d\n", cap_width, cap_height, fps); + // RTSP stream setup ctx->push_url = "rtsp://localhost/live/stream"; - - ctx->media = mk_media_create("__defaultVhost__", ctx->push_path_first.c_str(), ctx->push_path_second.c_str(), 0, 0, 0); + ctx->media = mk_media_create("__defaultVhost__", ctx->push_path_first.c_str(), + ctx->push_path_second.c_str(), 0, 0, 0); codec_args v_args = {0}; mk_track v_track = mk_track_create(MKCodecH264, &v_args); mk_media_init_track(ctx->media, v_track); - mk_media_init_complete(ctx->media); mk_media_set_on_regist(ctx->media, on_mk_media_source_regist_func, ctx); - // 初始化编码器 + // Initialize encoder MppEncoder *mpp_encoder = new MppEncoder(); MppEncoderParams enc_params; memset(&enc_params, 0, sizeof(MppEncoderParams)); enc_params.width = cap_width; enc_params.height = cap_height; - enc_params.fmt = MPP_FMT_YUV420SP; + enc_params.fmt = MPP_FMT_YUV420SP; // We'll convert from YUYV to YUV420SP enc_params.type = MPP_VIDEO_CodingAVC; mpp_encoder->Init(enc_params, ctx); ctx->encoder = mpp_encoder; - // mpp编码配置 + // Variables for frame processing void *mpp_frame = NULL; int mpp_frame_fd = 0; void *mpp_frame_addr = NULL; int enc_data_size; - - int frame_index = 0; - int ret = 0; + cv::Mat frame, yuv_frame; - // 画面 - cv::Mat img; - - while (true) - { - // 读取视频帧 - cap >> img; - if (img.empty()) - { - printf("Video end."); + while (true) { + // Read frame from camera + if (!cap.read(frame)) { + printf("Failed to read frame from camera\n"); break; } - frame_index++; - // 结束计时 - auto end_time = std::chrono::high_resolution_clock::now(); - // 将当前时间点转换为毫秒级别的时间戳 - auto millis = std::chrono::time_point_cast(end_time).time_since_epoch().count(); + if (frame.empty()) { + printf("Empty frame received\n"); + continue; + } - // 获取解码后的帧 + // Get current timestamp + auto millis = std::chrono::duration_cast( + std::chrono::system_clock::now().time_since_epoch()).count(); + + // Get encoder input buffer mpp_frame = ctx->encoder->GetInputFrameBuffer(); - // 获取解码后的帧fd mpp_frame_fd = ctx->encoder->GetInputFrameBufferFd(mpp_frame); - // 获取解码后的帧地址 mpp_frame_addr = ctx->encoder->GetInputFrameBufferAddr(mpp_frame); - rga_buffer_t src = wrapbuffer_fd(mpp_frame_fd, cap_width, cap_height, RK_FORMAT_YCbCr_420_SP,padToMultipleOf16(cap_width),padToMultipleOf16(cap_height)); + // Convert YUYV to YUV420SP (NV12) using RGA + rga_buffer_t src = wrapbuffer_virtualaddr((void *)frame.data, + cap_width, cap_height, + RK_FORMAT_YUYV_422); + rga_buffer_t dst = wrapbuffer_fd(mpp_frame_fd, + cap_width, cap_height, + RK_FORMAT_YCbCr_420_SP, + padToMultipleOf16(cap_width), + padToMultipleOf16(cap_height)); + // Perform color space conversion + imcvtcolor(src, dst, RK_FORMAT_YUYV_422, RK_FORMAT_YCbCr_420_SP); + + // Encode frame int enc_buf_size = ctx->encoder->GetFrameSize(); - char *enc_data = (char *)malloc(enc_buf_size); - - rga_buffer_t rgb_img = wrapbuffer_virtualaddr((void *)img.data, cap_width, cap_height, RK_FORMAT_BGR_888); - // 将RGB图像复制到src中 - imcopy(rgb_img, src); - - if (frame_index == 1) - { - enc_data_size = ctx->encoder->GetHeader(enc_data, enc_buf_size); - } - // 内存初始化 memset(enc_data, 0, enc_buf_size); enc_data_size = ctx->encoder->Encode(mpp_frame, enc_data, enc_buf_size); - ret = mk_media_input_h264(ctx->media, enc_data, enc_data_size, millis, millis); - if (ret != 1) - { + // Push to RTSP + int ret = mk_media_input_h264(ctx->media, enc_data, enc_data_size, millis, millis); + if (ret != 1) { printf("mk_media_input_frame failed\n"); } - if (enc_data != nullptr) - { + + if (enc_data != nullptr) { free(enc_data); } + + // Small delay to maintain frame rate + usleep(1000000 / fps); } - // 释放资源 + + // Release resources cap.release(); release_track(&v_track); release_media(&ctx->media); - + return 0; } -int main(int argc, char **argv) -{ - int status = 0; - int ret; - - if (argc != 2) - { - printf("Usage: %s\n", argv[0]); - return -1; - } - char *stream_url = argv[1]; // 视频流地址 - int video_type = 264; - - // 初始化流媒体 +int main(int argc, char **argv) { + // Initialize media kit mk_config config; memset(&config, 0, sizeof(mk_config)); config.log_mask = LOG_CONSOLE; @@ -262,19 +213,20 @@ int main(int argc, char **argv) mk_env_init(&config); mk_rtsp_server_start(3554, 0); - rknn_app_context_t app_ctx; // 创建上下文 - memset(&app_ctx, 0, sizeof(rknn_app_context_t)); // 初始化上下文 - app_ctx.video_type = video_type; - app_ctx.push_path_first = "yunyan-live"; - app_ctx.push_path_second = "test"; + // Initialize application context + rknn_app_context_t app_ctx; + memset(&app_ctx, 0, sizeof(rknn_app_context_t)); + app_ctx.video_type = 264; // H.264 + app_ctx.push_path_first = "live"; + app_ctx.push_path_second = "stream"; - process_video_file(&app_ctx, stream_url); + // Start processing camera stream + process_camera_stream(&app_ctx); - printf("waiting finish\n"); + printf("Streaming finished\n"); usleep(3 * 1000 * 1000); - if (app_ctx.encoder != nullptr) - { + if (app_ctx.encoder != nullptr) { delete (app_ctx.encoder); app_ctx.encoder = nullptr; }