结合前面的 采集 v4l2 视频, 使用 live555, 通过 rtsp 发布实时流. capture.h, capture.cpp, vcompress.h, vcompress.cpp 需要参考前面几片文章. 这里仅仅贴出 v4l2_x264_service.cpp
[cpp] view plaincopy
#include #include #include #include #include #include #include #include #include #include capture.h #include vcompress.h static usageenvironment *_env = 0; #define sink_port 3030 #define video_width 320 #define video_height 240 #define frame_per_sec 5.0 pid_t gettid() { return syscall(sys_gettid); } // 使用 webcam + x264 class webcamframesource : public framedsource { void *mp_capture, *mp_compress; // v4l2 + x264 encoder int m_started; void *mp_token; public: webcamframesource (usageenvironment &env) : framedsource(env) { fprintf(stderr, [%d] %s .... calling\n, gettid(), __func__); mp_capture = capture_open(/dev/video0, video_width, video_height, pix_fmt_yuv420p); if (!mp_capture) { fprintf(stderr, %s: open /dev/video0 err\n, __func__); exit(-1); } mp_compress = vc_open(video_width, video_height, frame_per_sec); if (!mp_compress) { fprintf(stderr, %s: open x264 err\n, __func__); exit(-1); } m_started = 0; mp_token = 0; } ~webcamframesource () { fprintf(stderr, [%d] %s .... calling\n, gettid(), __func__); if (m_started) { envir().taskscheduler().unscheduledelayedtask(mp_token); } if (mp_compress) vc_close(mp_compress); if (mp_capture) capture_close(mp_capture); } protected: virtual void dogetnextframe () { if (m_started) return; m_started = 1; // 根据 fps, 计算等待时间 double delay = 1000.0 / frame_per_sec; int to_delay = delay * 1000; // us mp_token = envir().taskscheduler().scheduledelayedtask(to_delay, getnextframe, this); }
[cpp] view plaincopy
virtual unsigned maxframesize() const // 这个很重要, 如果不设置, 可能导致 getnextframe() 出现 fmaxsize 小于实际编码帧的情况, 导致图像不完整
[cpp] view plaincopy
{ return 100*1024; }
[cpp] view plaincopy
private: static void getnextframe (void *ptr) { ((webcamframesource*)ptr)->getnextframe1(); } void getnextframe1 () { // capture: picture pic; if (capture_get_picture(mp_capture, &pic) fprintf(stderr, ==== %s: capture_get_picture err\n, __func__); m_started = 0; return; } // compress const void *outbuf; int outlen; if (vc_compress(mp_compress, pic.data, pic.stride, &outbuf, &outlen) fprintf(stderr, ==== %s: vc_compress err\n, __func__); m_started = 0; return; } int64_t pts, dts; int key; vc_get_last_frame_info(mp_compress, &key, &pts, &dts); // save outbuf gettimeofday(&fpresentationtime, 0); fframesize = outlen; if (fframesize > fmaxsize) { fnumtruncatedbytes = fframesize - fmaxsize; fframesize = fmaxsize; } else { fnumtruncatedbytes = 0; } memmove(fto, outbuf, fframesize); // notify aftergetting(this); m_started = 0; } }; class webcamondemandmediasubsession : public ondemandservermediasubsession { public: static webcamondemandmediasubsession *createnew (usageenvironment &env, framedsource *source) { return new webcamondemandmediasubsession(env, source); } protected: webcamondemandmediasubsession (usageenvironment &env, framedsource *source) : ondemandservermediasubsession(env, true) // reuse the first source { fprintf(stderr, [%d] %s .... calling\n, gettid(), __func__); mp_source = source; mp_sdp_line = 0; } ~webcamondemandmediasubsession () { fprintf(stderr, [%d] %s .... calling\n, gettid(), __func__); if (mp_sdp_line) free(mp_sdp_line); } private: static void afterplayingdummy (void *ptr) { fprintf(stderr, [%d] %s .... calling\n, gettid(), __func__); // ok webcamondemandmediasubsession *this = (webcamondemandmediasubsession*)ptr; this->m_done = 0xff; } static void chkforauxsdpline (void *ptr) { webcamondemandmediasubsession *this = (webcamondemandmediasubsession *)ptr; this->chkforauxsdpline1(); } void chkforauxsdpline1 () { fprintf(stderr, [%d] %s .... calling\n, gettid(), __func__); if (mp_dummy_rtpsink->auxsdpline()) m_done = 0xff; else { int delay = 100*1000; // 100ms nexttask() = envir().taskscheduler().scheduledelayedtask(delay, chkforauxsdpline, this); } } protected: virtual const char *getauxsdpline (rtpsink *sink, framedsource *source) { fprintf(stderr, [%d] %s .... calling\n, gettid(), __func__); if (mp_sdp_line) return mp_sdp_line; mp_dummy_rtpsink = sink; mp_dummy_rtpsink->startplaying(*source, 0, 0); //mp_dummy_rtpsink->startplaying(*source, afterplayingdummy, this); chkforauxsdpline(this); m_done = 0; envir().taskscheduler().doeventloop(&m_done); mp_sdp_line = strdup(mp_dummy_rtpsink->auxsdpline()); mp_dummy_rtpsink->stopplaying(); return mp_sdp_line; } virtual rtpsink *createnewrtpsink(groupsock *rtpsock, unsigned char type, framedsource *source) { fprintf(stderr, [%d] %s .... calling\n, gettid(), __func__); return h264videortpsink::createnew(envir(), rtpsock, type); } virtual framedsource *createnewstreamsource (unsigned sid, unsigned &bitrate) { fprintf(stderr, [%d] %s .... calling\n, gettid(), __func__); bitrate = 500; return h264videostreamframer::createnew(envir(), new webcamframesource(envir())); } private: framedsource *mp_source; // 对应 webcamframesource char *mp_sdp_line; rtpsink *mp_dummy_rtpsink; char m_done; }; static void test_task (void *ptr) { fprintf(stderr, test: task ....\n); _env->taskscheduler().scheduledelayedtask(100000, test_task, 0); } static void test (usageenvironment &env) { fprintf(stderr, test: begin...\n); char done = 0; int delay = 100 * 1000; env.taskscheduler().scheduledelayedtask(delay, test_task, 0); env.taskscheduler().doeventloop(&done); fprintf(stderr, test: end..\n); } int main (int argc, char **argv) { // env taskscheduler *scheduler = basictaskscheduler::createnew(); _env = basicusageenvironment::createnew(*scheduler); // test //test(*_env); // rtsp server rtspserver *rtspserver = rtspserver::createnew(*_env, 8554); if (!rtspserver) { fprintf(stderr, err: create rtspserver err\n); ::exit(-1); } // add live stream do { webcamframesource *webcam_source = 0; servermediasession *sms = servermediasession::createnew(*_env, webcam, 0, session from /dev/video0); sms->addsubsession(webcamondemandmediasubsession::createnew(*_env, webcam_source)); rtspserver->addservermediasession(sms); char *url = rtspserver->rtspurl(sms); *_env using url \ \\n; delete [] url; } while (0); // run loop _env->taskscheduler().doeventloop(); return 1; }
需要 live555 + libavcodec + libswscale + libx264, client 使用 vlc, mplayer, quicktime, .....
