首页 > 其他分享 >使用三方摄像头,实现pjsip的视频通话功能

使用三方摄像头,实现pjsip的视频通话功能

时间:2023-10-24 14:11:48浏览次数:38  
标签:stream dev pjsip 通话 pj ov5000 data 摄像头 size

提要:

近期一直在做视频通话功能,主要基于pjsip来实现的,将这些过程记录下来,可能对做同类型工作的同学有所帮助!

实现思路,参考pjsip原来设备采集视频、编码并rtp组包发送的思路,再在原有流程中做修改!

主要关键点:

1、摄像头采集完成后已经是已编码的H264/H265的流,不需要再开启pjsip的编码/解码流程;

2、组包发送,H264的FU-A组包、PS封装发送;

首先梳理流程,具体包括下面几个点:

1. 摄像头设备适配(这里可以考虑多个数据源,包括文件、socket或者摄像头的数据源;)

2. 参考android_dev.c 实现一个ov5000_dev.c

/* $Id$ */
/*
 * Copyright (C) 2015 Teluu Inc. (http://www.teluu.com)
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation; either version 2 of the License, or
 * (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program; if not, write to the Free Software
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 */
#include "util.h"
#include <pjmedia-videodev/videodev_imp.h>
#include <pj/assert.h>
#include <pj/log.h>
#include <pj/math.h>
#include <pj/os.h>

//#define ARM_LINUX
#define VIDEO_USE_SOCKET
 


extern void set_take_ps_packet(int enable_value);
int log_printf(int a,...)
{
    return 0;
}
#define PJMEDIA_VIDEO_DEV_HAS_OV5000 1
#if defined(PJMEDIA_HAS_VIDEO) && PJMEDIA_HAS_VIDEO != 0 && \
    defined(PJMEDIA_VIDEO_DEV_HAS_OV5000) 
 

#define THIS_FILE		"ov5000_dev.c"

/* Default video params */
#define DEFAULT_CLOCK_RATE	90000
#define DEFAULT_WIDTH		352
#define DEFAULT_HEIGHT		288
#define DEFAULT_FPS		15
#define ALIGN16(x)		((((x)+15) >> 4) << 4)

/* Define whether we should maintain the aspect ratio when rotating the image.
 * For more details, please refer to util.h.
 */
#define MAINTAIN_ASPECT_RATIO 	PJ_TRUE

#define JNIEnv (void *)
/* Format map info */
typedef struct ov5000_fmt_map
{
    pjmedia_format_id   fmt_id;
    pj_uint32_t		ov5000_fmt_id;
} ov5000_fmt_map;


/* Format map.
 * Note: it seems that most of Ov5000 devices don't support I420, while
 * unfortunately, our converter (libyuv based) only support I420 & RGBA,
 * so in this case, we'd just pretend that we support I420 and we'll do
 * the NV21/YV12 -> I420 conversion here.
 */
static ov5000_fmt_map fmt_map[] =
{
    {PJMEDIA_FORMAT_NV21, 0x00000011},
    {PJMEDIA_FORMAT_YV12, 0x32315659},
    {PJMEDIA_FORMAT_I420, 0x00000023}, /* YUV_420_888 */
};


/* Device info */
typedef struct ov5000_dev_info
{
    pjmedia_vid_dev_info	 info;		/**< Base info         */
    unsigned			 dev_idx;	/**< Original dev ID   */
    pj_bool_t			 facing;	/**< Front/back camera?*/
    unsigned			 sup_size_cnt;	/**< # of supp'd size  */
    pjmedia_rect_size		*sup_size;	/**< Supported size    */
    unsigned			 sup_fps_cnt;	/**< # of supp'd FPS   */
    pjmedia_rect_size		*sup_fps;	/**< Supported FPS     */
    pj_bool_t			 has_yv12;	/**< Support YV12?     */
    pj_bool_t			 has_nv21;	/**< Support NV21?     */
    pj_bool_t			 forced_i420;	/**< Support I420 with
						     conversion		*/
} ov5000_dev_info;


/* Video factory */
typedef struct ov5000_factory
{
    pjmedia_vid_dev_factory	 base;		/**< Base factory      */
    pj_pool_t			*pool;		/**< Memory pool       */
    pj_pool_factory		*pf;		/**< Pool factory      */

    pj_pool_t			*dev_pool;	/**< Device list pool  */
    unsigned			 dev_count;	/**< Device count      */
    ov5000_dev_info		*dev_info;	/**< Device info list  */
} ov5000_factory;


/* Video stream. */
typedef struct ov5000_stream
{
    pjmedia_vid_dev_stream  base;		/**< Base stream       */
    pjmedia_vid_dev_param   param;		/**< Settings	       */
    pj_pool_t		   *pool;		/**< Memory pool       */
    ov5000_factory		   *factory;            /**< Factory           */
    
    pjmedia_vid_dev_cb	    vid_cb;		/**< Stream callback   */
    void		   *user_data;          /**< Application data  */
    pj_bool_t		    is_running;		/**< Stream running?   */
    
    void*		    jcam;		/**< PjCamera instance */

    pj_timestamp            frame_ts;		/**< Current timestamp */
    unsigned                ts_inc;		/**< Timestamp interval*/
    unsigned		    convert_to_i420;	/**< Need to convert to I420?
						     0: no
						     1: from NV21
						     2: from YV12	*/
    
    /** Capture thread info */
    pj_bool_t		    thread_initialized;
    pj_thread_desc	    thread_desc;
    pj_thread_t		   *thread;

    /** NV21/YV12 -> I420 Conversion buffer  */
    pj_uint8_t		   *convert_buf;
    pjmedia_rect_size	    cam_size;
    
    /** Converter to rotate frame  */
    pjmedia_vid_dev_conv    conv;

    //
    pj_uint8_t *sps_pps;
	int sps_pps_p_index;
	int pps_len;
	int sps_pps_len;
	int recive_video_packet_count;
    
    
    /** Frame format param for NV21/YV12 -> I420 conversion */
    pjmedia_video_apply_fmt_param vafp;

    //capture thread.
    pj_thread_t		*ca_thread;
} ov5000_stream;


/* Prototypes */
static pj_status_t ov5000_factory_init(pjmedia_vid_dev_factory *f);
static pj_status_t ov5000_factory_destroy(pjmedia_vid_dev_factory *f);
static pj_status_t ov5000_factory_refresh(pjmedia_vid_dev_factory *f); 
static unsigned    ov5000_factory_get_dev_count(pjmedia_vid_dev_factory *f);
static pj_status_t ov5000_factory_get_dev_info(pjmedia_vid_dev_factory *f,
					    unsigned index,
					    pjmedia_vid_dev_info *info);
static pj_status_t ov5000_factory_default_param(pj_pool_t *pool,
                                             pjmedia_vid_dev_factory *f,
					     unsigned index,
					     pjmedia_vid_dev_param *param);
static pj_status_t ov5000_factory_create_stream(
					pjmedia_vid_dev_factory *f,
					pjmedia_vid_dev_param *param,
					const pjmedia_vid_dev_cb *cb,
					void *user_data,
					pjmedia_vid_dev_stream **p_vid_strm);


static pj_status_t ov5000_stream_get_param(pjmedia_vid_dev_stream *strm,
                                        pjmedia_vid_dev_param *param);
static pj_status_t ov5000_stream_get_cap(pjmedia_vid_dev_stream *strm,
                                      pjmedia_vid_dev_cap cap,
                                      void *value);
static pj_status_t ov5000_stream_set_cap(pjmedia_vid_dev_stream *strm,
                                      pjmedia_vid_dev_cap cap,
                                      const void *value);
static pj_status_t ov5000_stream_start(pjmedia_vid_dev_stream *strm);
static pj_status_t ov5000_stream_stop(pjmedia_vid_dev_stream *strm);
static pj_status_t ov5000_stream_destroy(pjmedia_vid_dev_stream *strm);
static void  OnGetFrame2(uint8_t* data, int length, void* user_data);


/* Operations */
static pjmedia_vid_dev_factory_op factory_op =
{
    &ov5000_factory_init,
    &ov5000_factory_destroy,
    &ov5000_factory_get_dev_count,
    &ov5000_factory_get_dev_info,
    &ov5000_factory_default_param,
    &ov5000_factory_create_stream,
    &ov5000_factory_refresh
};

static pjmedia_vid_dev_stream_op stream_op =
{
    &ov5000_stream_get_param,
    &ov5000_stream_get_cap,
    &ov5000_stream_set_cap,
    &ov5000_stream_start,
    NULL,
    NULL,
    &ov5000_stream_stop,
    &ov5000_stream_destroy
};


/****************************************************************************
 * OTHER stuff
 */

/* Use camera2 (since Ov5000 API level 21) */
#define USE_CAMERA2	0

#if USE_CAMERA2
#define PJ_CAMERA			"PjCamera2"
#define PJ_CAMERA_INFO			"PjCameraInfo2"
#else
#define PJ_CAMERA			"PjCamera"
#define PJ_CAMERA_INFO			"PjCameraInfo"
#endif

#define PJ_CLASS_PATH			"org/pjsip/"
#define PJ_CAMERA_CLASS_PATH		PJ_CLASS_PATH PJ_CAMERA
#define PJ_CAMERA_INFO_CLASS_PATH	PJ_CLASS_PATH PJ_CAMERA_INFO


static volatile ov5000_stream *vid_stream =NULL;
static void  OnGetFrame(uint8_t* data, int length,
			       void* user_data); 
 

typedef enum gb28181_history_play_source_type
{
	 PJMEDIA_VID_S_PLAY = 0, //s=play?    s=Download    s=Playback
	 PJMEDIA_VID_S_DOWNLOAD,
	 PJMEDIA_VID_S_PLAYBACK,
	 PJMEDIA_VID_S_OTHER,


} ;

static char gb28181_history_video_id[255];
static int   gb28181_history_play_source_type = PJMEDIA_VID_S_PLAY;//
void set_gb28181_history_play_source_type(int source_type, char *history_file_id){
	gb28181_history_play_source_type = source_type;
	if (source_type != PJMEDIA_VID_S_PLAY){
		memset(gb28181_history_video_id, 0x00, sizeof(gb28181_history_video_id));
		strcpy(gb28181_history_video_id, history_file_id);
	}
}
static volatile int ov_stream_capture_end = 0;
static volatile int ov5000_samplerate = 600000;
static volatile int ov5000_framerate = 25;


/****************************************************************************
 * Helper functions
 */
static pjmedia_format_id ov5000_fmt_to_pj(pj_uint32_t fmt)
{
    unsigned i;
    for (i = 0; i < PJ_ARRAY_SIZE(fmt_map); i++) {
        if (fmt_map[i].ov5000_fmt_id == fmt)
            return fmt_map[i].fmt_id;
    }
    return 0;
}

static pj_uint32_t pj_fmt_to_and(pjmedia_format_id fmt)
{
    unsigned i;
    for (i = 0; i < PJ_ARRAY_SIZE(fmt_map); i++) {
        if (fmt_map[i].fmt_id == fmt)
            return fmt_map[i].ov5000_fmt_id;
    }
    return 0;
}


/****************************************************************************
 * Factory operations
 */
 
int video_encode_callback_test_f(const unsigned char *_data, int _data_len)
{
#if 1
    pj_thread_t  *call_thread = NULL;
    //脳垄虏谩脢脗录镁
    if(!pj_thread_is_registered())
    {    
      pj_thread_desc      thread_desc;

        if (pj_thread_register("ov5000_capture_thread_func", thread_desc, &call_thread) == PJ_SUCCESS)
        {
          printf("pj_thread_register ok.\r\n");  
        }
    }
#endif

    //PJ_LOG(4, (THIS_FILE, "video_encode_callback_test_f"));
    OnGetFrame2(_data, _data_len, vid_stream);
}


/*
 * Init ov5000_ video driver.
 */
pjmedia_vid_dev_factory* pjmedia_ov5000_factory(pj_pool_factory *pf)
{
    ov5000_factory *f;
    pj_pool_t *pool;

    pool = pj_pool_create(pf, "ov5000_video", 512, 512, NULL);
    f = PJ_POOL_ZALLOC_T(pool, ov5000_factory);
    f->pf = pf;
    f->pool = pool;
    f->base.op = &factory_op;
    f->dev_pool = pj_pool_create(pf, "ov5000_video_dev", 512, 512, NULL);

    return &f->base;
}


/* API: init factory */
static pj_status_t ov5000_factory_init(pjmedia_vid_dev_factory *ff)
{
    pj_status_t status;
  
    status = ov5000_factory_refresh(ff);
    if (status != PJ_SUCCESS)
	return status;

    PJ_LOG(4, (THIS_FILE, "ov5000 video src initialized with 1 device(s)"));


    return PJ_SUCCESS;
}


/* API: destroy factory */
static pj_status_t ov5000_factory_destroy(pjmedia_vid_dev_factory *ff)
{
    ov5000_factory *f = (ov5000_factory*)ff;

    pj_pool_safe_release(&f->dev_pool);
    pj_pool_safe_release(&f->pool);

    return PJ_SUCCESS;
}


/* API: refresh the list of devices */
static pj_status_t ov5000_factory_refresh(pjmedia_vid_dev_factory *ff)
{
    ov5000_factory *f = (ov5000_factory*)ff;
    pj_status_t status = PJ_SUCCESS;
 
    pj_bool_t with_attach, found_front = PJ_FALSE;
    int i, dev_count = 1;
	//need adaptor by , recreate camera?

    /* Clean up device info and pool */
    f->dev_count = 1;
    pj_pool_reset(f->dev_pool); 

    /* Start querying device info */
    f->dev_info = (ov5000_dev_info*)
 		  pj_pool_calloc(f->dev_pool, dev_count,
 				 sizeof(ov5000_dev_info));
 
    for (i = 0; i < dev_count; i++) {
	ov5000_dev_info *adi = &f->dev_info[i];
	pjmedia_vid_dev_info *vdi = &adi->info;

	/* Set device ID, direction, and has_callback info */
	adi->dev_idx = i;
	//adi->has_nv21 = 1;
	vdi->fmt_cnt = 1;
	vdi->id = f->dev_count;
	vdi->dir = PJMEDIA_DIR_CAPTURE;
	vdi->has_callback = PJ_TRUE;
	vdi->caps = 
		    PJMEDIA_VID_DEV_CAP_INPUT_PREVIEW;
      adi->forced_i420 = 1;
      adi->sup_size_cnt = 1;
	/* Landscape video */
      adi->sup_size = pj_pool_calloc(f->dev_pool, adi->sup_size_cnt,
					   sizeof(adi->sup_size[0]));
	adi->sup_size[0].w = 640;
	adi->sup_size[0].h = 480;
	pjmedia_format_init_video(&vdi->fmt[vdi->fmt_cnt++],
				  PJMEDIA_FORMAT_I420,
				  adi->sup_size[0].w,
				  adi->sup_size[0].h,
				  DEFAULT_FPS, 1);

	/* Set driver & name info */
	pj_ansi_strncpy(vdi->driver, "linux", sizeof(vdi->driver));
	pj_ansi_strncpy(vdi->name, "ov5000", sizeof(vdi->name));
    }
    return status;
}


/* API: get number of devices */
static unsigned ov5000_factory_get_dev_count(pjmedia_vid_dev_factory *ff)
{
    ov5000_factory *f = (ov5000_factory*)ff;
    return f->dev_count;
}


/* API: get device info */
static pj_status_t ov5000_factory_get_dev_info(pjmedia_vid_dev_factory *f,
					    unsigned index,
					    pjmedia_vid_dev_info *info)
{
    ov5000_factory *cf = (ov5000_factory*)f;

    PJ_ASSERT_RETURN(index < cf->dev_count, PJMEDIA_EVID_INVDEV);

    pj_memcpy(info, &cf->dev_info[index].info, sizeof(*info));

    return PJ_SUCCESS;
}


/* API: create default device parameter */
static pj_status_t ov5000_factory_default_param(pj_pool_t *pool,
                                             pjmedia_vid_dev_factory *f,
					     unsigned index,
					     pjmedia_vid_dev_param *param)
{
    ov5000_factory *cf = (ov5000_factory*)f;
    ov5000_dev_info *di = &cf->dev_info[index];

    PJ_ASSERT_RETURN(index < cf->dev_count, PJMEDIA_EVID_INVDEV);

    PJ_UNUSED_ARG(pool);

    pj_bzero(param, sizeof(*param));
    param->dir = PJMEDIA_DIR_CAPTURE;
    param->cap_id = index;
    param->rend_id = PJMEDIA_VID_INVALID_DEV;
    param->flags = PJMEDIA_VID_DEV_CAP_FORMAT;
    param->clock_rate = DEFAULT_CLOCK_RATE;
    pj_memcpy(&param->fmt, &di->info.fmt[0], sizeof(param->fmt));
 


    return PJ_SUCCESS;
}


/* API: create stream */
static pj_status_t ov5000_factory_create_stream(
					pjmedia_vid_dev_factory *ff,
					pjmedia_vid_dev_param *param,
					const pjmedia_vid_dev_cb *cb,
					void *user_data,
					pjmedia_vid_dev_stream **p_vid_strm)
{
    ov5000_factory *f = (ov5000_factory*)ff;
    pj_pool_t *pool;
    ov5000_stream *strm;
    ov5000_dev_info *adi;
    const pjmedia_video_format_detail *vfd;
    const pjmedia_video_format_info *vfi;
    pjmedia_video_apply_fmt_param vafp;
    pj_uint32_t ov5000_fmt = 0;
    unsigned convert_to_i420 = 0;
    pj_status_t status = PJ_SUCCESS;
 
    PJ_ASSERT_RETURN(f && param && p_vid_strm, PJ_EINVAL);
    PJ_ASSERT_RETURN(param->fmt.type == PJMEDIA_TYPE_VIDEO &&
		     param->fmt.detail_type == PJMEDIA_FORMAT_DETAIL_VIDEO &&
                     param->dir == PJMEDIA_DIR_CAPTURE,
		     PJ_EINVAL);

/* Camera2 supports only I420 for now */
#if USE_CAMERA2
    if (param->fmt.id != PJMEDIA_FORMAT_I420)
        return PJMEDIA_EVID_BADFORMAT;
#endif

    pj_bzero(&vafp, sizeof(vafp));
    adi = &f->dev_info[param->cap_id];
    vfd = pjmedia_format_get_video_format_detail(&param->fmt, PJ_TRUE);
    vfi = pjmedia_get_video_format_info(NULL, param->fmt.id);

    if (param->fmt.id == PJMEDIA_FORMAT_I420 && adi->forced_i420) {
	convert_to_i420 = 0;
    } 
    if (!vfi)
        return PJMEDIA_EVID_BADFORMAT;

    vafp.size = vfd->size;
    if (vfi->apply_fmt(vfi, &vafp) != PJ_SUCCESS)
        return PJMEDIA_EVID_BADFORMAT;

    /* Create and Initialize stream descriptor */
    pool = pj_pool_create(f->pf, "ov5000-dev", 512, 512, NULL);
    PJ_ASSERT_RETURN(pool != NULL, PJ_ENOMEM);

    strm = PJ_POOL_ZALLOC_T(pool, ov5000_stream);
    pj_memcpy(&strm->param, param, sizeof(*param));
    strm->pool = pool;
    strm->factory = f;
    pj_memcpy(&strm->vid_cb, cb, sizeof(*cb));
    strm->user_data = user_data;
    pj_memcpy(&strm->vafp, &vafp, sizeof(vafp));
    strm->ts_inc = PJMEDIA_SPF2(param->clock_rate, &vfd->fps, 1);

	strm->sps_pps = (pj_uint8_t*)
		  pj_pool_calloc(f->dev_pool, 56,
				 sizeof(pj_uint8_t));
	strm->recive_video_packet_count = 0;
	strm->sps_pps_len = 0;
	
    /* Allocate buffer for YV12 -> I420 conversion.
     * The camera2 is a bit tricky with format, for example it reports
     * for I420 support (and no NV21 support), however the incoming frame
     * buffers are actually in NV21 format (e.g: pixel stride is 2), so
     * we should always check and conversion buffer may be needed.
     */
    #if 0
    if (USE_CAMERA2 || convert_to_i420) {
	pj_assert(vfi->plane_cnt > 1);
	strm->convert_to_i420 = convert_to_i420;
	strm->convert_buf = pj_pool_alloc(pool, vafp.plane_bytes[1]);
    }

    /* Native preview */
    if (param->flags & PJMEDIA_VID_DEV_CAP_INPUT_PREVIEW) {
    }
 
    strm->jcam = NULL; 
	//need adaptor by , create camera?
	
    
    /* Video orientation.
     * If we send in portrait, we need to set up orientation converter
     * as well.
     */
    if ((param->flags & PJMEDIA_VID_DEV_CAP_ORIENTATION) ||
        (vfd->size.h > vfd->size.w))
    {
        if (param->orient == PJMEDIA_ORIENT_UNKNOWN)
    	    param->orient = PJMEDIA_ORIENT_NATURAL;
        ov5000_stream_set_cap(&strm->base, PJMEDIA_VID_DEV_CAP_ORIENTATION,
    		           &param->orient);
    }
    #endif//

	
    // add for adaptor.
    vid_stream = &strm->base;

on_return: 
    /* Success */
    if (status == PJ_SUCCESS) {
	strm->base.op = &stream_op;
	*p_vid_strm = &strm->base;
    }

    return status;
}


/****************************************************************************
 * Stream operations
 */


/* API: Get stream info. */
static pj_status_t ov5000_stream_get_param(pjmedia_vid_dev_stream *s,
                                        pjmedia_vid_dev_param *pi)
{
    ov5000_stream *strm = (ov5000_stream*)s;
    
    PJ_ASSERT_RETURN(strm && pi, PJ_EINVAL);
    
    pj_memcpy(pi, &strm->param, sizeof(*pi));

    if (ov5000_stream_get_cap(s, PJMEDIA_VID_DEV_CAP_OUTPUT_WINDOW,
                             &pi->window) == PJ_SUCCESS)
    {
        pi->flags |= PJMEDIA_VID_DEV_CAP_OUTPUT_WINDOW;
    }
    
    return PJ_SUCCESS;
}


/* API: get capability */
static pj_status_t ov5000_stream_get_cap(pjmedia_vid_dev_stream *s,
                                      pjmedia_vid_dev_cap cap,
                                      void *pval)
{
    ov5000_stream *strm = (ov5000_stream*)s;
    
    PJ_UNUSED_ARG(strm);
    
    PJ_ASSERT_RETURN(s && pval, PJ_EINVAL);
    
    if (cap == PJMEDIA_VID_DEV_CAP_OUTPUT_WINDOW) {
        //pjmedia_vid_dev_hwnd *wnd = (pjmedia_vid_dev_hwnd *)pval;
        //wnd->info.Ov5000.window = strm->window;
        //return PJ_SUCCESS;
    }

    return PJMEDIA_EVID_INVCAP;
}


/* API: set capability */
static pj_status_t ov5000_stream_set_cap(pjmedia_vid_dev_stream *s,
                                      pjmedia_vid_dev_cap cap,
                                      const void *pval)
{
    ov5000_stream *strm = (ov5000_stream*)s; 
    pj_bool_t with_attach;
    pj_status_t status = PJ_SUCCESS;
    
    PJ_ASSERT_RETURN(s && pval, PJ_EINVAL);

    switch (cap) {
       case PJMEDIA_VID_DEV_CAP_INPUT_PREVIEW:
	 {
        }
	break;
	case PJMEDIA_VID_DEV_CAP_SWITCH:
	{
	    pjmedia_vid_dev_switch_param *p = (pjmedia_vid_dev_switch_param*)
					      pval;
	    ov5000_dev_info *adi;
	    int res;
            
	    /* Just return if current and target device are the same */
            if (strm->param.cap_id == p->target_id)
                return PJ_SUCCESS;

	    /* Verify target capture ID */
	    if (p->target_id < 0 || p->target_id >= strm->factory->dev_count)
		return PJ_EINVAL;

	    /* Ok, let's do the switch */
	    adi = &strm->factory->dev_info[p->target_id];
	    PJ_LOG(4, (THIS_FILE, "Switching camera to %s..", adi->info.name));
 
	    break;
	}

        case PJMEDIA_VID_DEV_CAP_ORIENTATION:
        {
            pjmedia_orient orient = *(pjmedia_orient *)pval;
            pjmedia_orient eff_ori;
            ov5000_dev_info *adi;

	    pj_assert(orient >= PJMEDIA_ORIENT_UNKNOWN &&
	              orient <= PJMEDIA_ORIENT_ROTATE_270DEG);

            if (orient == PJMEDIA_ORIENT_UNKNOWN)
                return PJ_EINVAL;

            pj_memcpy(&strm->param.orient, pval,
                      sizeof(strm->param.orient));

	    if (!strm->conv.conv) {
	        status = pjmedia_vid_dev_conv_create_converter(
	        				 &strm->conv, strm->pool,
	        		        	 &strm->param.fmt,
	        		        	 strm->cam_size,
	        		        	 strm->param.fmt.det.vid.size,
	        		        	 PJ_TRUE,
	        		        	 MAINTAIN_ASPECT_RATIO);
	    	
	    	if (status != PJ_SUCCESS)
	    	    return status;
	    }
	    
	    eff_ori = strm->param.orient;
	    adi = &strm->factory->dev_info[strm->param.cap_id];
	    /* Normalize the orientation for back-facing camera */
	    if (!adi->facing) {
		if (eff_ori == PJMEDIA_ORIENT_ROTATE_90DEG)
		    eff_ori = PJMEDIA_ORIENT_ROTATE_270DEG;
		else if (eff_ori == PJMEDIA_ORIENT_ROTATE_270DEG)
		    eff_ori = PJMEDIA_ORIENT_ROTATE_90DEG;
	    }
	    pjmedia_vid_dev_conv_set_rotation(&strm->conv, eff_ori);
	    
	    PJ_LOG(4, (THIS_FILE, "Video capture orientation set to %d",
	    			  strm->param.orient));

            break;
        }

	default:
	    status = PJMEDIA_EVID_INVCAP;
	    break;
    }
    
    return status;
}


static int ov5000_capture_thread_func (void *arg)
{
    struct ov5000_stream* stream = (struct ov5000_stream*) arg;

	
    pj_thread_t  *call_thread = NULL;
    //脳垄虏谩脢脗录镁
    if(!pj_thread_is_registered())
    {    
      pj_thread_desc      thread_desc;

        if (pj_thread_register("ov5000_capture_thread_func", thread_desc, &call_thread) == PJ_SUCCESS)
        {
          printf("pj_thread_register ok.\r\n");  
        }
    } 
	
}


typedef enum {
    NALU_TYPE_SLICE = 1,
    NALU_TYPE_DPA = 2,
    NALU_TYPE_DPB = 3,
    NALU_TYPE_DPC = 4,
    NALU_TYPE_IDR = 5,
    NALU_TYPE_SEI = 6,
    NALU_TYPE_SPS = 7,
    NALU_TYPE_PPS = 8,
    NALU_TYPE_AUD = 9,
    NALU_TYPE_EOSEQ = 10,
    NALU_TYPE_EOSTREAM = 11,
    NALU_TYPE_FILL = 12,
} NaluType;

typedef struct Nalu_ {
    char * packet;
    int length;
    NaluType type;
}NALU;
typedef enum {
	NALU_PRIPORITY_DISPOSABLE = 0,
	NALU_PRIORITY_LOW = 1,
	NALU_PRIORITY_HIGH = 2,
	NALU_PRIORITY_HIGHTEST = 3, a
} NaluPriority;

typedef struct {
	int             startcodeprefix_len;        //! 4 for parameter sets and first slice in picture, 3 for everything else (suggest)
	unsigned        len;                        //! Length of the NAL unit (Excluding the start code, which does not belong to the NALU
	int             max_size;                   //! Nalu Unit Buffer size
	int             forbidden_bit;              //! should be always FALSE
	int             nal_reference_idc;          //! NALU_PRIPORITY_xxxx
	int             nal_unit_type;              //! NALU_TYPE_xxxx
	char*           buf;                        //! contains the first byte followed by the EBSP
} NALU_t;
static int info2 = 0, info3 = 0;

static int FindStartCode2(unsigned char *Buf) {
	if (Buf[0] != 0 || Buf[1] != 0 || Buf[2] != 1) return 0;    //0x00 0001 拢驴
	else return 1;
}

static int FindStartCode3(unsigned char *Buf) {
	if (Buf[0] != 0 || Buf[1] != 0 || Buf[2] != 0 || Buf[3] != 1) return 0;     //0x00 000001?
	else return 1;
}

static int GetAnnexbNALU(FILE *h264bitstream, NALU_t *nalu) {
	int pos = 0;
	int startCodeFound, rewind;
	//unsigned char *Buf;
	char Buf[50000];//tmp code

	if (h264bitstream == NULL || nalu == NULL){
	    return 0;
	}
	//if ((Buf = (unsigned char*)calloc(nalu->max_size, sizeof(char))) == NULL)
		//printf("GetAnnexbNALU: Could not allocate Buf memory\n");

	nalu->startcodeprefix_len = 3;

	if (3 != fread(Buf, 1, 3, h264bitstream)) {
		free(Buf);
		return 0;
	}

	info2 = FindStartCode2(Buf);
	if (info2 != 1) {   //虏禄脢脟0x000001
		if (1 != fread(Buf + 3, 1, 1, h264bitstream)) {
			free(Buf);
			return -1;
		}
		info3 = FindStartCode3(Buf);
		if (info3 != 1) {       //虏禄脢脟0x00 000001?
			free(Buf);
			return -1;
		}
		else {
			pos = 4;
			nalu->startcodeprefix_len = 4;
		}
	}
	else {
		pos = 3;
		nalu->startcodeprefix_len = 3;
	}

	startCodeFound = 0;
	info2 = 0;
	info3 = 0;

	while (!startCodeFound) {
		if (feof(h264bitstream)) {
			if ((pos - 1) < nalu->startcodeprefix_len){
				printf("GetAnnexbNALU: faile, pos:%d, nalu->startcodeprefix_len:%d", pos, nalu->startcodeprefix_len);
				return pos - 1;
			}
		    nalu->len = (pos - 1) - nalu->startcodeprefix_len;
			memcpy(nalu->buf, &Buf[nalu->startcodeprefix_len], nalu->len);
			nalu->forbidden_bit = nalu->buf[0] & 0x80;       //1 bit
			nalu->nal_reference_idc = nalu->buf[0] & 0x60;   //2 bit
			nalu->nal_unit_type = (nalu->buf[0]) & 0x1f;     //5 bit
			//free(Buf);
			return pos - 1;
		}
		if (pos > (sizeof(Buf)-1)){
		       printf("error GetAnnexbNALU: faile, pos:%d, nalu->startcodeprefix_len:%d", pos, nalu->startcodeprefix_len);
			break;
		}
		Buf[pos++] = fgetc(h264bitstream);
		info3 = FindStartCode3(&Buf[pos - 4]);
		if (info3 != 1)
			info2 = FindStartCode2(&Buf[pos - 3]);
		startCodeFound = (info2 == 1 || info3 == 1);
	}

	// Here we have found another start code (and read length of startcode bytes more than we should
	// have, hence ,go back in the file)
	rewind = ((info3 == 1) ? -4 : -3);
	if (0 != fseek(h264bitstream, rewind, SEEK_CUR)) {
		//free(Buf);
		printf("GetAnnexbNALU:Cannot fseek in the bit stream file");
	}


	// Here the Start code, the complete NALU, and the next start code is in the Buf
	// The size of Buf is pos , pos+rewind are the number of bytes excluding the next
	// start code, and (pos+rewind)-startcodeprefix_len is the size of the NALU excluding the start code
	nalu->len = (pos + rewind) - nalu->startcodeprefix_len;
	memcpy(nalu->buf, Buf, nalu->len+ nalu->startcodeprefix_len);
	nalu->forbidden_bit = nalu->buf[nalu->startcodeprefix_len] & 0x80;       // 1 bit
	nalu->nal_reference_idc = nalu->buf[nalu->startcodeprefix_len] & 0x60;   // 2 bit
	nalu->nal_unit_type = nalu->buf[nalu->startcodeprefix_len] & 0x1f;       // 5 bit
	
	//nalu->len = (pos + rewind) - nalu->startcodeprefix_len;
	//memcpy(nalu->buf, &Buf[nalu->startcodeprefix_len], nalu->len);
	//nalu->forbidden_bit = nalu->buf[0] & 0x80;       // 1 bit
	//nalu->nal_reference_idc = nalu->buf[0] & 0x60;   // 2 bit
	//nalu->nal_unit_type = nalu->buf[0] & 0x1f;       // 5 bit
	
	//free(Buf);
 
	return (pos + rewind);
}


static int ov5000_read_h264file_thread_func (void *arg)
{
    struct ov5000_stream* stream = (struct ov5000_stream*) arg;

	
    pj_thread_t  *call_thread = NULL;
	
    if(!pj_thread_is_registered())
    {    
      pj_thread_desc      thread_desc;

        if (pj_thread_register("ov5000_read_h264file_thread_func", thread_desc, &call_thread) == PJ_SUCCESS)
        {
          printf("pj_thread_register ok.\r\n");  
        }
    }
    pj_thread_sleep(100);//send_packet_interval

    ov_stream_capture_end = 0;
   
    int time_base = 90000;
	int fps = 24;
	int send_packet_interval = 1000 / fps;

	int interval = time_base / fps;
	long pts = 0;
	char  filename[256]; 
	set_take_ps_packet(1);
	sprintf(filename, "/home//work/broadcast_app/app_linux/thirds_libs_src/pjproject-2.12.1/AW_VirviEncoder.H264");  
	FILE *fd = fopen(filename, "rb");
	if (fd  == -1){
	    return -1;
	}
	
	int buffersize = 500000;
       FILE *myout = stdout;
	NALU_t *n;
	n = (NALU_t *)calloc(1, sizeof(NALU_t));
	if (n == NULL) {
		fclose(fd);
		printf("Alloc NALU Error");
	    return -1;
	}
	n->max_size = buffersize;
	n->buf = (char *)calloc(buffersize, sizeof(char));
	if (n->buf == NULL) {
		fclose(fd);
		free(n);
		printf("AllocNALU:n->buf");
	    return -1;
	}

	int data_offset = 0;
	int nal_num = 0;
	printf("-----+-------- NALU Table ------+---------+\n");
	printf(" NUM |    POS  |    IDC |  TYPE |   LEN   |\n");
	printf("-----+---------+--------+-------+---------+\n");
	//read h264 file to rtp port.
	do {
		
		int data_lenth = GetAnnexbNALU(fd, n);
		char type_str[20] = { 0 };
		switch (n->nal_unit_type) {
			case NALU_TYPE_SLICE:       sprintf(type_str, "SLICE");      break;
			case NALU_TYPE_DPA:         sprintf(type_str, "DPA");        break;
			case NALU_TYPE_DPB:         sprintf(type_str, "DPB");        break;
			case NALU_TYPE_DPC:         sprintf(type_str, "DPC");        break;
			case NALU_TYPE_IDR:         sprintf(type_str, "IDR");	     break;
			case NALU_TYPE_SEI:         sprintf(type_str, "SEI");        break;
			case NALU_TYPE_SPS:         sprintf(type_str, "SPS");        break;
			case NALU_TYPE_PPS:         sprintf(type_str, "PPS");		 break;
			case NALU_TYPE_AUD:         sprintf(type_str, "AUD");        break;
			case NALU_TYPE_EOSEQ:       sprintf(type_str, "EOSEQ");      break;
			case NALU_TYPE_EOSTREAM:    sprintf(type_str, "EOSTREAM");   break;
			case NALU_TYPE_FILL:        sprintf(type_str, "FILL");       break;
		}

		char idc_str[20] = { 0 };
		switch (n->nal_reference_idc >> 5) {
			case NALU_PRIPORITY_DISPOSABLE: sprintf(idc_str, "DISPOS");     break;
			case NALU_PRIORITY_LOW:         sprintf(idc_str, "LOW");        break;
			case NALU_PRIORITY_HIGH:        sprintf(idc_str, "HIGH");       break;
			case NALU_PRIORITY_HIGHTEST:    sprintf(idc_str, "HIGHTEST");   break;
		}
		
		fprintf(myout, "%5d| %8d| %7s| %6s| %8d| %d|\n", nal_num, data_offset, idc_str, type_str, n->len, data_lenth);

		if (data_lenth == 0){
			break;
		} 
	       //脠隆脪禄脰隆
	       OnGetFrame2(n->buf, data_lenth, stream );
		 data_offset = data_offset + data_lenth;
		 nal_num++;
		  
		  /* Sleep to allow log messages to flush */
		 //sleep(1);
		 if (feof(fd)){
			fseek(fd, 0, SEEK_SET);
		 }
		  pj_thread_sleep(10);//send_packet_interval
	} while(stream->is_running);

	ov_stream_capture_end = 1;
	
       PJ_LOG(4,(THIS_FILE, "2---ov5000_read_h264file_thread_func-run end."));
	free(n->buf);
	free(n);
	fclose((FILE*)fd);

}

void set_frame_rate(int rate, int samplerate){
	ov5000_framerate = rate;
	ov5000_samplerate = samplerate;
}

#ifdef VIDEO_USE_SOCKET
typedef  void (*get_video_frame_t)(char *data, int len);
get_video_frame_t	g_video_frame_callback = NULL;
void set_get_video_frame_callback(get_video_frame_t function){
	g_video_frame_callback=function;
}

static void start_connect_local_tcpserver(void *param ){	
    struct ov5000_stream* stream = (struct ov5000_stream*) param;

	
    pj_thread_t  *call_thread = NULL;
    if(!pj_thread_is_registered())
    {    
      pj_thread_desc      thread_desc;

        if (pj_thread_register("ov5000_socket_capture_thread_func", thread_desc, &call_thread) == PJ_SUCCESS)
        {
          printf("ov5000_socket_capture_thread_func ok.\r\n");  
        }
    }

    int server_socket;
    struct sockaddr_in server_addr;
    unsigned char flag;
    unsigned int length = 0;
    unsigned int data_length = 0;
    unsigned char *data = NULL;
    ssize_t bytes_received;

    // 创建socket
    server_socket = socket(AF_INET, SOCK_STREAM, 0);
    if (server_socket == -1) {
        perror("socket");
	return;
    }
	
    int rcv_size = 204800; /*发送缓冲区大小 */
    int optlen = sizeof(rcv_size);
    setsockopt(server_socket , SOL_SOCKET , SO_RCVBUF , &rcv_size , optlen);

    // 设置服务器地址和端口
    server_addr.sin_family = AF_INET;
    server_addr.sin_addr.s_addr = inet_addr("127.0.0.1");
    server_addr.sin_port = htons(11603);

    // 连接到服务器
    if (connect(server_socket, (struct sockaddr *)&server_addr, sizeof(server_addr)) == -1) {
        perror("connect");
	return;
    }

    int MAX_RECV_FRAME_SIZE = 150*1024;
	 
	data= (char *)malloc(MAX_RECV_FRAME_SIZE);//	
    if (data== NULL){
		return ;
    }
    while(stream->is_running){
	        // 接收数据块
	        ssize_t bytes_received = recv(server_socket, &flag, 1, 0);
	        if (bytes_received <= 0) {
	            // 服务器断开连接或接收错误
	            close(server_socket);
	            break;
	        }

	        bytes_received = recv(server_socket, &length, 4, 0);
	        if (bytes_received <= 0) {
	            // 服务器断开连接或接收错误
	            close(server_socket);
	            break;
	        }

		 int old_length = length;  
	        length = ntohl(length);  
			if (length < 0 || length > 5000000){
    		    PJ_LOG(4, (THIS_FILE, "!!!!!! invalid data, length:%d,old_length:%d\r\n", length, old_length));
				continue;
			} 
	        if (length >= MAX_RECV_FRAME_SIZE) {
			    
    			PJ_LOG(4, (THIS_FILE, "length:%d\r\n", length));
				data = realloc(data, length );
	        }
	        bytes_received = recv(server_socket, data, length, 0);
	        if (bytes_received <= 0) {
	            // 服务器断开连接或接收错误
	            close(server_socket);
	            break;
	        }
	        //
	        //handle_data(flag, length, data);
	        // flag : 0 ppsinfo; 1: I Frame; 2: P Frame
	        OnGetFrame2(data, length, stream);
    }

    free(data);
    // 关闭socket
    close(server_socket);

	
    PJ_LOG(4, (THIS_FILE, "start_connect_local_tcpserver end\r\n"));
}

#endif
/* API: Start stream. */
static pj_status_t ov5000_stream_start(pjmedia_vid_dev_stream *s)
{
    ov5000_stream *stream = (ov5000_stream*)s; 
    int res;
    pj_status_t status = PJ_SUCCESS;

    PJ_LOG(4, (THIS_FILE, "Starting Ov5000 camera stream, gb28181_history_play_source_type:%d", gb28181_history_play_source_type));

    stream->is_running = PJ_TRUE;
	
    if (gb28181_history_play_source_type != PJMEDIA_VID_S_PLAY){
		status = pj_thread_create (stream->pool,
					   "ov5000_file_read",
					   ov5000_read_h264file_thread_func,
					   stream,
					   0, //ZERO,
					   0,
					   &stream->ca_thread);
		if (status != PJ_SUCCESS) {
		    stream->is_running = PJ_FALSE;
			return status;
		}
	   return;
    }
	
	#ifdef VIDEO_USE_SOCKET
		status = pj_thread_create (stream->pool,
					   "ov5000_capture",
					   start_connect_local_tcpserver,
					   stream,
					   0, //ZERO,
					   0,
					   &stream->ca_thread);
		if (status != PJ_SUCCESS) {
		    stream->is_running = PJ_FALSE;
			return status;
		}
		
	#endif
    system("echo 1 > /sys/class/gpio/gpio226/value");
 
    /* Call PjCamera::Start() method */


on_return: 
    return status;
}


/* API: Stop stream. */
static pj_status_t ov5000_stream_stop(pjmedia_vid_dev_stream *s)
{
    ov5000_stream *strm = (ov5000_stream*)s; 
    pj_status_t status = PJ_SUCCESS;

    PJ_ASSERT_RETURN(strm != NULL, PJ_EINVAL);
    
    PJ_LOG(4, (THIS_FILE, "*Stopping Ov5000 camera stream, gb28181_history_play_source_type:%d", gb28181_history_play_source_type));
    strm->is_running = PJ_FALSE;

    system("echo 0 > /sys/class/gpio/gpio226/value");
	
    pj_thread_sleep(10);//send_packet_interval
	
   //ov_stream_capture_end = 0;
    if (gb28181_history_play_source_type != PJMEDIA_VID_S_PLAY){
	  int wait_times = 0;
	   while(!ov_stream_capture_end){
		pj_thread_sleep(10);
		
    
    		PJ_LOG(4, (THIS_FILE, "*Stopping Ov5000 wait:%d", wait_times++));
	   }
	   return;
    } 
    PJ_LOG(4, (THIS_FILE, "Stop Ov5000 camera stream end."));

    return status;
}

 
/* API: Destroy stream. */
static pj_status_t ov5000_stream_destroy(pjmedia_vid_dev_stream *s)
{
    ov5000_stream *strm = (ov5000_stream*)s; 
    pj_bool_t with_attach;

    if (strm == NULL){
	return PJ_SUCCESS;
	}
    //PJ_ASSERT_RETURN(strm != NULL, PJ_EINVAL);
     

	
    //pjmedia_vid_dev_conv_destroy_converter(&strm->conv);
       
    if (strm->pool){
	    pj_pool_release(strm->pool);
    	
	}

    PJ_LOG(4, (THIS_FILE, "Ov5000 camera stream destroyed"));

    return PJ_SUCCESS;
}
 
PJ_INLINE(void) strip_padding(void *dst, void *src, int w, int h, int stride)
{
    int i;
    for (i = 0; i < h; ++i) {
	pj_memmove(dst, src, w);
	src += stride;
	dst += w;
    }
}

static void printf_data(uint8_t *data, int size)
{
    static char resp_str[512];
    memset(resp_str, 0x0, 512);

    if (size > (512 / 2))
    {
        size = 512 / 2;
    }

    int index = 0;

    for(int i = 0; i < size; i++)
    {
        sprintf(resp_str + index, "%02x ", data[i]);
        index = strlen(resp_str);

        if (index > 512)
        {
            break;
        }
    }

    PJ_LOG(4,(THIS_FILE,"data %d:%s\r\n", size, resp_str));
}

static void  OnGetFrame2(uint8_t* data, int length,
			       void* user_data)
{
    ov5000_stream *strm = (ov5000_stream*)(intptr_t)user_data;
    pjmedia_frame f;
    pj_uint8_t *Y, *U, *V;
    pj_status_t status; 
    void *frame_buf, *data_buf;
    if (strm == NULL|| !strm->vid_cb.capture_cb || length < 0){
		return;
    }  
    strm->frame_ts.u64 += strm->ts_inc;

#if 0
    if (strm->thread_initialized == 0 || !pj_thread_is_registered()) {
	pj_status_t status;
	pj_bzero(strm->thread_desc, sizeof(pj_thread_desc));
	status = pj_thread_register("ov5000_cam", strm->thread_desc,
				    &strm->ca_thread);
	if (status != PJ_SUCCESS)
	    return;
	strm->thread_initialized = 1;
	PJ_LOG(5,(THIS_FILE, "Ov5000 camera thread registered"));
    }
#endif//
    f.type = PJMEDIA_FRAME_TYPE_VIDEO;
    f.size = length;
    f.timestamp.u64 = strm->frame_ts.u64;
    f.buf = data_buf = data;// (*env)->GetByteArrayElements(env, data, 0);

  	//PJ_LOG(4,(THIS_FILE, "2---OnGetFrame2-length:%d",length));
   uint8_t found_pps = 0;
   uint8_t send_pps = 0;
    if ( length < 56 && (*data == 0x00 && *(data+1)==0x00 && *(data+2) == 0x00 && *(data+3) == 0x01 && *(data+4) == 0x67)){
		//printf_data(data, length);
		
		//save SPS/PPS
		pj_memcpy(strm->sps_pps, data, length);
		strm->sps_pps_len = length;
		#if 0
		uint8_t *p_sps_data = data+5;
		int  i = 0;
		while(i < length-5){
			//pps
			if ((*(p_sps_data+i) == 0x00) && *(p_sps_data+i +1)==0x00 && *(p_sps_data+i +2) == 0x00 && *(p_sps_data+i +3) == 0x01 && *(p_sps_data+i +4) == 0x68){
				//pps
				found_pps = 1;
				strm->sps_pps_len = i+5;//26-8
				strm->pps_len = length - strm->sps_pps_len;//8
				break;
			}
			i++;
		}  
		#else
		strm->pps_len = 0;
		#endif//no need split pps
    		//PJ_LOG(4,(THIS_FILE, "2--get sps len:%d,found_pps:%d, sps_len:%d, pps_len:%d",length, found_pps, strm->sps_pps_len, strm->pps_len));
	}
	strm->recive_video_packet_count++;
	#if 1
	 if ((*data == 0x00 && *(data+1)==0x00 && *(data+2) == 0x01 && *(data+3) == 0x65) ||
	 	(*data == 0x00 && *(data+1)==0x00 && *(data+2) == 0x00 && *(data+3) == 0x01 && *(data+4) == 0x65)){
    		//PJ_LOG(4,(THIS_FILE, "1--send sps len:%d",length));
		send_pps = 1;
	}
	if (found_pps){
	    return;
	}
	if (send_pps){
				//sps
    		f.buf = data_buf = strm->sps_pps;
   	 	f.size = strm->sps_pps_len;
   		 (*strm->vid_cb.capture_cb)(&strm->base, strm->user_data, &f);
		 
		//pps
		if (strm->pps_len > 0){
	    		f.buf = data_buf = strm->sps_pps + strm->sps_pps_len;
	   	 	f.size = strm->pps_len;
	   		 (*strm->vid_cb.capture_cb)(&strm->base, strm->user_data, &f);
		}
	}
	
	f.size = length;
    	f.buf = data_buf = data;
	#endif//
    	//maybe get the encoded data ,no need encoded, direct to rtp packetlization
   	 (*strm->vid_cb.capture_cb)(&strm->base, strm->user_data, &f);
     //PJ_LOG(4,(THIS_FILE, "2---OnGetFrame2-length:%d end",length));
}
 

#endif	/* PJMEDIA_VIDEO_DEV_HAS_Ov5000 */

3. ov5000_dev摄像头采集的数据最终会回调到strm->vid_cb.capture_cb ,这个回调方法是video_port.c中的vidstream_cap_cb方法;

static void copy_frame_to_buffer(pjmedia_vid_port *vp,
                                 pjmedia_frame *frame)
{
	if (frame == NULL){
		return;
	}
    pj_mutex_lock(vp->frm_mutex);
	#if PJMEDIA_VIDEO_DEV_HAS_OV5000
	 //PJ_LOG(4, (THIS_FILE, "-1--copy_frame_to_buffer: frame.size:%d, %d len", frame->size, vp->frm_buf->size));
		#if 1
        if (vp->frm_buf == NULL){
    		pj_mutex_unlock(vp->frm_mutex);
			return;
		}

		ringput(frame->buf, frame->size, 0);
		vp->frm_buf->size = frame->size;
		    vp->frm_buf->type = frame->type;
		    vp->frm_buf->timestamp = frame->timestamp;
		    vp->frm_buf->bit_info = frame->bit_info;
		#else
		//direct put frame?
    		pjmedia_frame_copy(vp->frm_buf, frame);
		
		#endif
	#endif//
    pj_mutex_unlock(vp->frm_mutex);
}

另外,vid_port中的frm_buf缓存数据太少,数据帧存在明显的跳帧现象,所以参考之前的fifobuffer思路,实现了一个fifobuffer;

//add for ring buffer
#if PJMEDIA_VIDEO_DEV_HAS_OV5000
pthread_mutex_t ring_mutex;

struct ringbuf {
    unsigned char *buffer;
	int frame_type;
    int size;
};
static int addring (int i);
static int ringget(struct ringbuf *getinfo);
static void ringput(unsigned char *buffer,int size,int encode_type);
static void ringfree();
static void ringmalloc(int size);
static void ringreset();


#define NMAX 10//30
#define RING_BUFFER_SIZE 145000//50000
 
static volatile  int iput = 0; /*   */
static volatile  int iget = 0; /* */
static volatile  int n = 0; /* */
#define USE_MALLOC_MEM
#ifndef USE_MALLOC_MEM
static uint8_t mem_buffer[RING_BUFFER_SIZE*NMAX];
#endif
static volatile struct ringbuf ringfifo[NMAX]; 
static volatile int init_flag = 0;

static void ringmalloc(int size)
{
    int i;
	#ifdef USE_MALLOC_MEM
	//
    pthread_mutex_init(&ring_mutex, 0);
	if (init_flag){ 
	    return;
	}
    for(i =0; i<NMAX; i++)
    {  
        ringfifo[i].buffer = malloc(size);
        ringfifo[i].size = 0;
        ringfifo[i].frame_type = 0;
       // printf("FIFO INFO:idx:%d,len:%d,ptr:%x\n",i,ringfifo[i].size,(int)(ringfifo[i].buffer));
    }
	init_flag = 1;
	#else
    for(i =0; i<NMAX; i++)
    { 
        ringfifo[i].buffer = &mem_buffer[i*RING_BUFFER_SIZE];
        ringfifo[i].size = 0;
        ringfifo[i].frame_type = 0;
       // printf("FIFO INFO:idx:%d,len:%d,ptr:%x\n",i,ringfifo[i].size,(int)(ringfifo[i].buffer));
    }
	#endif
    iput = 0; /* ?隆陇D??o3???娄脤?娄脤隆脌?隆茫隆陇?篓篓????? */
    iget = 0; /* ?o3???娄脤?娄脤隆脌?隆茫篓篓?3????? */
    n = 0; /* ?隆陇D??o3????D娄脤??a??隆脕篓鹿篓潞y篓垄? */
}
/**************************************************************************************************
**
**
**
**************************************************************************************************/
static void ringreset()
{

    pthread_mutex_lock(&ring_mutex);
    iput = 0; /* ?隆陇D??o3???娄脤?娄脤隆脌?隆茫隆陇?篓篓????? */
    iget = 0; /* ?o3???娄脤?娄脤隆脌?隆茫篓篓?3????? */
    n = 0; /* ?隆陇D??o3????D娄脤??a??隆脕篓鹿篓潞y篓垄? */ 
    pthread_mutex_unlock(&ring_mutex);
}
/**************************************************************************************************
**
**
**
**************************************************************************************************/
static void ringfree(void)
{
    int i;
    printf("begin free mem\n");
    for(i =0; i<NMAX; i++)
    {
       // printf("FREE FIFO INFO:idx:%d,len:%d,ptr:%x\n",i,ringfifo[i].size,(int)(ringfifo[i].buffer));
	#ifdef USE_MALLOC_MEM
       free(ringfifo[i].buffer);
	 ringfifo[i].buffer = NULL;
	#endif//#ifdef USE_MALLOC_MEM
        ringfifo[i].size = 0;
    }
	init_flag = 0;
    //pthread_mutex_destroy(&ring_mutex);
}
/**************************************************************************************************
**
**
**
**************************************************************************************************/
static int addring(int i)
{
    return (i+1) == NMAX ? 0 : i+1;
}

/**************************************************************************************************
**
**
**
**************************************************************************************************/ 

static int ringget(struct ringbuf *getinfo)
{
    int Pos;
    if(n>0)
    {
    	pthread_mutex_lock(&ring_mutex);
        Pos = iget;
        iget = addring(iget);
        n--;
        getinfo->buffer = (ringfifo[Pos].buffer);
		if (getinfo->buffer == NULL){
    		pthread_mutex_unlock(&ring_mutex);
			return 0;
		}
        getinfo->frame_type = ringfifo[Pos].frame_type;
        getinfo->size = ringfifo[Pos].size;
    	pthread_mutex_unlock(&ring_mutex);
        //printf("Get FIFO INFO:idx:%d,len:%d,ptr:%x,type:%d\n",Pos,getinfo->size,(int)(getinfo->buffer),getinfo->frame_type);
        return ringfifo[Pos].size;
    }
    else
    {
        //printf("Buffer is empty\n");
        return 0;
    }
}
/**************************************************************************************************
**
**
**
**************************************************************************************************/ 
static void ringput(unsigned char *buffer,int size,int encode_type)
{

    if (size > RING_BUFFER_SIZE){
	    PJ_PERROR(4,(THIS_FILE, 0, "Error ringput, size:%d > %d", size, RING_BUFFER_SIZE));
		return;
    }
    if(n >= 0 && n<NMAX)
    {
    	pthread_mutex_lock(&ring_mutex);
		if (ringfifo[iput].buffer == NULL){
    		pthread_mutex_unlock(&ring_mutex);
			return;
		}
		if (size > RING_BUFFER_SIZE){
    			//pthread_mutex_unlock(&ring_mutex);
			//return;
			ringfifo[iput].buffer = realloc(ringfifo[iput].buffer, size);
		} 
        memcpy(ringfifo[iput].buffer,buffer,size);
        ringfifo[iput].size= size;
        ringfifo[iput].frame_type = encode_type;
        //printf("Put FIFO INFO:idx:%d,len:%d,ptr:%x,type:%d\n",iput,ringfifo[iput].size,(int)(ringfifo[iput].buffer),ringfifo[iput].frame_type);
        iput = addring(iput);
    	pthread_mutex_unlock(&ring_mutex);
        n++;
    }
    else
    {
        //  printf("Buffer is full\n");
    }
}
 
#endif
//add end.


vid_port.c中的get_frame_from_buffer做如下修改:
static pj_status_t get_frame_from_buffer(pjmedia_vid_port *vp,
                                         pjmedia_frame *frame)
{
    pj_status_t status = PJ_SUCCESS;

    pj_mutex_lock(vp->frm_mutex);
    if (vp->conv.conv)
        status = convert_frame(vp, vp->frm_buf, frame);
    else{
		
		//for bug
		//PJ_LOG(4, (THIS_FILE, "-1--get_frame_from_buffer: frame.size:%d, %d len", frame->size, vp->frm_buf->size));
	 
		#if PJMEDIA_VIDEO_DEV_HAS_OV5000
		struct ringbuf ringitem ;
		int itemlen  = ringget(&ringitem);
		if (itemlen > 0 && frame->buf != NULL){
			int copy_len = itemlen;
			if (itemlen > frame->size){
				copy_len = frame->size;	
			}
			memcpy(frame->buf, ringitem.buffer, copy_len);
			frame->size = copy_len;
		}else{
		    frame->size = 0;
	    		pj_mutex_unlock(vp->frm_mutex);
			return -1;
		}
		if (vp->frm_buf != NULL){
	       frame->type = vp->frm_buf->type;
	       frame->timestamp = vp->frm_buf->timestamp;
	       frame->bit_info = vp->frm_buf->bit_info;
		   //PJ_LOG(4, (THIS_FILE, "-2--get_frame_from_buffer: frame.size:%d, %d len, itemlen:%d", frame->size, vp->frm_buf->size, itemlen));
    
	    }
		#else
		int itemlen  = vp->frm_buf->size;
		pjmedia_frame_copy(frame, vp->frm_buf);
		#endif
		}
    pj_mutex_unlock(vp->frm_mutex);
    
    return status;
}

3. 视频帧获取驱动定时器适配,调试发现视频流卡顿明显,发现是驱动的定时器跑的太慢,涉及到vid_conf.c中的on_clock_tick方法。

    clock_param.clock_rate = 900000;//TS_CLOCK_RATE;   
    clock_param.usec_interval = 1000000 /1000;// vid_conf->opt.frame_rate;
    status = pjmedia_clock_create2(pool, &clock_param, 0, &on_clock_tick,
                                   vid_conf, &vid_conf->clock);
                                   
                                   
	#if PJMEDIA_VIDEO_DEV_HAS_OV5000//lyz@ no need converter

	vp->conv.conv_param.src.id = vp->conv.conv_param.dst.id;
	vp->conv.conv_param.src.det.vid.size.w = vp->conv.conv_param.dst.det.vid.size.w;
	vp->conv.conv_param.src.det.vid.size.h= vp->conv.conv_param.dst.det.vid.size.h;
	//vp->role = ROLE_ACTIVE;
    	//return PJ_SUCCESS;
	#endif
	
vconf_port结构体中增加
    pj_size_t		 get_buf_real_size;	/**< Data size for get_frame().   */
    pj_size_t		put_buf_real_size;	/**< Data size for put_frame().   */

on_clock_tick方法中
                status = pjmedia_port_get_frame(src->port, &frame);
                if (status != PJ_SUCCESS) {
                    PJ_PERROR(5, (THIS_FILE, status,
                                  "Failed to get frame from port %d [%s]!",
                                  src->idx, src->port->info.name.ptr));
                    src->got_frame = PJ_FALSE;
                } else {
			#if PJMEDIA_VIDEO_DEV_HAS_OV5000//just set got_frame by 
		       //PJ_PERROR(4, (THIS_FILE, status, "get frame from port %d  ,len:%d, src_buf_size:%d!", src->idx, frame.size, src->get_buf_size));
			src->got_frame = PJ_TRUE;
			src->get_buf_real_size = frame.size;
			#else
		    	src->got_frame = (frame.size == src->get_buf_size);
			#endif

                    /* There is a possibility that the source port's format has
                     * changed, but we haven't received the event yet.
                     */
                    cur_fmt = &src->format;
                    new_fmt = &src->port->info.fmt;
                    if (cmp_fps(cur_fmt, new_fmt) ||
                        cmp_size(cur_fmt, new_fmt))
                    {
                        op_param prm;
                        prm.update_port.port = src->idx;
                        op_update_port(vid_conf, &prm);
                    }
                }

                render_src_frame方法中做下面的修改
static pj_status_t render_src_frame(vconf_port *src, vconf_port *sink,
                                    unsigned transmitter_idx)
    if (sink->transmitter_cnt == 1 && (!rs || !rs->converter)) {
        /* The only transmitter and no conversion needed */
	    #if PJMEDIA_VIDEO_DEV_HAS_OV5000//just set got_frame  
		int get_buf_size = src->get_buf_real_size < sink->put_buf_size?src->get_buf_real_size:sink->put_buf_size;
		sink->put_buf_real_size = get_buf_size;
		#else
		/* The only transmitter and no conversion needed */
		if (src->get_buf_size != sink->put_buf_size)
		    return PJMEDIA_EVID_BADFORMAT;
		int get_buf_size = src->put_buf_size;
		#endif//
		pj_memcpy(sink->put_buf, src->get_buf, get_buf_size);
    } else if (rs && rs->converter) {

4、rtp h264 fu-a组包发送和回调,涉及的文件,vid_stream.c 的 put_frame 方法。

   /*
	 mark need modify later.
   */
   #if PJMEDIA_VIDEO_DEV_HAS_OV5000
   int rtp_per_packet_len = 1200;//1300;
   int i=0;
   int send_len = 0;
   int reserved_len = frame->size;
   int data_start_index = 0;

#if 1//SUPPORT_PS_ENPACKED
	char ps_header[PS_HDR_LEN];

	char ps_system_header[SYS_HDR_LEN];

	char ps_map_header[PSM_HDR_LEN];

	char pes_header[PES_HDR_LEN];
	char temp_frame[1024 * 128];
#endif//

   uint8_t nalu = 0;
   uint8_t *data = (uint8_t *)frame->buf;
	if ( *data == 0x00 && *(data+1)==0x00 && *(data+2) == 0x00 && *(data+3) == 0x01){
		nalu = *(data+4);
		data_start_index = 4;
		if (reserved_len > rtp_per_packet_len){
		    //fu-a
			data_start_index = 5;			
		}
	}else  if ( *data == 0x00 && *(data+1)==0x00 && *(data+2) == 0x01 ){
		nalu = *(data+3); 
		data_start_index = 3;
		if (reserved_len > rtp_per_packet_len){
		    //fu-a
			data_start_index = 4;			 
		}
	}else{
		nalu = *(data);
		data_start_index = 0;
	}
    
      int index = 0;
	if (ps_packet_flag){
		
		int time_base = 90000;
		int fps = 24;
		int send_packet_interval = 1000 / fps;

		int interval = time_base / fps;
		stream->pts += interval;
	       long pts = stream->pts;
		//ps封装
		if (nalu == 0x67 || nalu == 0x68 || nalu == 0x65){
			//I frame
                gb28181_make_ps_header(ps_header, pts);
                memcpy(temp_frame,ps_header,PS_HDR_LEN);
                index += PS_HDR_LEN;
                gb28181_make_sys_header(ps_system_header, 0x3f);

                memcpy(temp_frame+ index, ps_system_header, SYS_HDR_LEN);
                index += SYS_HDR_LEN;

                gb28181_make_psm_header(ps_map_header);

                memcpy(temp_frame + index, ps_map_header, PSM_HDR_LEN);
                index += PSM_HDR_LEN;
			
		}else{

                gb28181_make_ps_header(ps_header, pts);

                memcpy(temp_frame, ps_header, PS_HDR_LEN);
                index += PS_HDR_LEN;
		}
            //封装pes
            gb28181_make_pes_header(pes_header, 0xe0, reserved_len, pts, pts);

            memcpy(temp_frame+index, pes_header, PES_HDR_LEN);
            index += PES_HDR_LEN;

            memcpy(temp_frame + index, data, reserved_len);
            index += reserved_len;
			
            data = temp_frame;
	      reserved_len = index;
	      data_start_index = 0; 
	}else{
		//data_start_index = 0;
		reserved_len -= data_start_index;
       }
   
    while(1){
   	    send_len = rtp_per_packet_len;
		if (reserved_len < rtp_per_packet_len){
			send_len = reserved_len;
			has_more_data = PJ_FALSE;
		}else{
			has_more_data = PJ_TRUE;
		}
		
		status = pjmedia_rtp_encode_rtp(&channel->rtp,
		                                channel->pt,
	                                (has_more_data == PJ_FALSE ? 1 : 0),
		                                (int)send_len,
	 	                                rtp_ts_len,
		                                (const void**)&rtphdr,
		                                &rtphdrlen);
		if (status != PJ_SUCCESS) {
		    LOGERR_((channel->port.info.name.ptr, status,
			    "RTP encode_rtp() error"));
		    return status;
		}
		/* When the payload length is zero, we should not send anything,
		 * but proceed the rest normally.
		 */
		 int fu_a_index = 0;
		uint8_t *p_data = (uint8_t *)channel->buf;
		if (reserved_len > 0) {
			#if 1
			if (frame->size > rtp_per_packet_len){
				//fu-a
				if (total_sent == 0){
					//start
					p_data[sizeof(pjmedia_rtp_hdr)] =  (nalu & 0x60) | 28;
					// |S|E|R|  Type   |
					//S 1 E 0 R 0
					p_data[sizeof(pjmedia_rtp_hdr)+1] = (1 << 7) | (nalu & 0x1f);
					fu_a_index += 2;
				}else{
					if (has_more_data){
						//end
						p_data[sizeof(pjmedia_rtp_hdr)] = 28;
						// |S|E|R|  Type   |
						//S 0 E 0 R 0
						p_data[sizeof(pjmedia_rtp_hdr)+1] = (nalu & 0x1f);
						fu_a_index += 2;
					}else{							//end
						p_data[sizeof(pjmedia_rtp_hdr)] = 28;
						// |S|E|R|  Type   |
						//S 0 E 1 R 0
						p_data[sizeof(pjmedia_rtp_hdr)+1] = (1 << 6) | (nalu & 0x1f);
						fu_a_index += 2;
					}
				} 
				//send_len+=fu_a_index;
			}
			#endif//no -fu-a
		    /* Copy RTP header to the beginning of packet */
		    pj_memcpy(channel->buf, rtphdr, sizeof(pjmedia_rtp_hdr));
			//copy data
		    pj_memcpy(channel->buf + fu_a_index + sizeof(pjmedia_rtp_hdr), data +total_sent + data_start_index, send_len+fu_a_index);

		    if (stream->transport == NULL){
				break;
		    }
		    /* Send the RTP packet to the transport. */
		    status = pjmedia_transport_send_rtp(stream->transport,
							(char*)channel->buf,
							send_len +
							    sizeof(pjmedia_rtp_hdr) +fu_a_index);
		    if (status != PJ_SUCCESS) {
				if (stream->rtp_tx_err_cnt++ == 0) {
				    LOGERR_((channel->port.info.name.ptr, status,
					     "Error sending RTP"));
				}
				if (stream->rtp_tx_err_cnt > SEND_ERR_COUNT_TO_REPORT) {
				    stream->rtp_tx_err_cnt = 0;
				}
				break;
		    }
		    pjmedia_rtcp_tx_rtp(&stream->rtcp, (unsigned)send_len);
		    //total_sent += frame_out.size;
			pj_thread_sleep(2);//2ms
		    pkt_cnt++;
		}
			
		/* Next packets use same timestamp */
		rtp_ts_len = 0; 
		
		reserved_len -= send_len; 
		total_sent += send_len;
		if (reserved_len <= 0){
			break;
		}
   	}
       //PJ_PERROR(4,(THIS_FILE, status,  "put_frame len:%d,total_sent:%d", frame->size,total_sent));
	goto ov5000_end;
   #endif
   
  ov5000_end:
#if TRACE_RC
    /* Trace log for rate control */
    {
        pj_timestamp end_time;
        unsigned total_sleep;

        pj_get_timestamp(&end_time);
        total_sleep = pj_elapsed_msec(&initial_time, &end_time);
        PJ_LOG(5, (stream->name.ptr, "total pkt=%d size=%d sleep=%d",
                   pkt_cnt, total_sent, total_sleep));

        if (stream->tx_start.u64 == 0)
            stream->tx_start = initial_time;
        stream->tx_end = end_time;
        stream->rc_total_pkt += pkt_cnt;
        stream->rc_total_sleep += total_sleep;
        stream->rc_total_img++;
    }
#endif

其他修改:

vid_port_destroy

	#if PJMEDIA_VIDEO_DEV_HAS_OV5000
	//free ringbuffer  
	ringfree();
	//add end.
	#endif
 

标签:stream,dev,pjsip,通话,pj,ov5000,data,摄像头,size
From: https://www.cnblogs.com/kn-zheng/p/17784660.html

相关文章

  • 海康萤石摄像头C3HC学习
    title:海康萤石摄像头C3HC学习date:2023-10-1320:55:48tags:[摄像头]categories:摄像头CVE-2017-7921海康威视(Hikvision)摄像头漏洞复现https://www.cnblogs.com/yier-G/p/16632842.htmlCVE-2021-36260漏洞复现https://blog.csdn.net/qq_50854662/article/d......
  • 基于 SpringBoot+Hikvision SDK 远程查看配置海康网络摄像头配置
    写在前面工作中遇到,简单整理理解不足小伙伴帮忙指正对每个人而言,真正的职责只有一个:找到自我。然后在心中坚守其一生,全心全意,永不停息。所有其它的路都是不完整的,是人的逃避方式,是对大众理想的懦弱回归,是随波逐流,是对内心的恐惧——赫尔曼·黑塞《德米安》海康设备通过SDK查看......
  • 使用ffmpeg将opencv捕获的摄像头数据推流到本地rtsp器上
    首先,为什么使用opencv?答:方便对视频进行处理,各种深度学习网络就有了用物之地。具体流程参考的FFmpeg/opencv+C++实现直播拉流和直播推流(对视频帧进行处理)_c++ffmpeg拉流_酒神无忧的博客-CSDN博客,但是细节不同。简述一下流程:使用opencv从摄像头中读取数据。将cv::Mat转换为A......
  • qt读取摄像头
    Qt中实时调取摄像头(利用OpenCV)_opencv获取网络摄像头qt_Loading_create的博客-CSDN博客C++版本#include<opencv2/opencv.hpp>#include"mainwindow.h"usingnamespacestd;usingnamespacecv;#undefmainintmain(){Matimage;VideoCapturecap(0);//c......
  • 记录--h5调用手机摄像头踩坑
    这里给大家分享我在网上总结出来的一些知识,希望对大家有所帮助1.背景一般业务也很少接触摄像头,有也是现成的工具库扫个二维码。难得用一次,记录下踩坑。2.调用摄像头的方法2.1. input<!--调用相机--><inputtype="file"accept="image/*"capture="camera"><!--调用......
  • 如何将海康、大华、TP等监控摄像头、硬盘录像机的监控视频集成到网页中无插件播放,实现
    方法介绍今天我们介绍一个开放的监控接入平台NTVGBS,可以轻松实现将监控摄像头或录像机的监控视频信号集成到网页或APP中,并提供云台控制、录像回看等高级功能。NTVGBS是一款非常规范好用的开放监控平台,平台支持GB28181国标、RTMP/RTSP直播推送和ONVIF等开放规范,如果您有摄像头或......
  • 车载摄像头CAM
        经纬恒润根据客户的需求定制开发配套摄像头产品,目前产品包括ADAS(AdvancedDrivingAssistantSystem,高级驾驶辅助系统)、驾驶员监控系统摄像头和以太网摄像头。 产品种类ADAS摄像头1-box/2-box系列DMS/OMS摄像头系列环视、后视和流媒体摄像头系列(规划中)产品优......
  • 水星 Mercury MIPC251C-4 网络摄像头 ONVIF 与 PTZ 云台控制
    概况最近在什么值得买上发现一款水星的网络摄像头,除了支持云台/夜视功能之外,还标明支持onvif协议.所以想着买来接入到HomeAssistat作为监控使用.可到手之后发现事情并没有那么简单,记录如下.接入HomeAssistant按照HA的文档 ONVIFCamera 接入无非就是配置文......
  • 智能安全帽-GPS定位摄像头视频语音通话功能设计方案
    智能安全帽是一种集成了多种传感器和通信模块的创新设备。它内部包含陀螺仪、高度传感器、脱帽传感器、心率传感器、4G/5G通讯、GNSS定位、可选环境传感器以及语音播报系统等。这些功能使得智能安全帽能够实现人员定位、脱帽检测、生命体征监测、一键呼救、跌落监测、电子围栏......
  • 恋爱聊天追女神沟通话术小程序开发演示
    现在什么最有市场?婚恋、交友、恋爱……单身多需求就自然而然的产生了,而且还很大。我们可以搜素查看各平台这类项目的流量,基本都不低。因此针对细分领域开发两款恋爱聊天沟通话术小程序,一款为本地数据版,一款为云端接口款。云端接口更适合懒人运营,本地数据版功能更强大,可以自己设计很......