[GEM-dev] ffmpeg plugin

Patrice Colet colet.patrice at free.fr
Wed Aug 14 15:45:04 CEST 2019


  Hello,

  I'm writing a gem_film plugin using FFMPEG libraries, and expect to 
write a record and a video plugin with same libraries.

The plugin is able to open and render correctly many video files, but I 
couldn't find a way to seek those files.

When a file is opened, if thread message is set to '1', the cpu meter 
goes to 100%, and the seeking process is almost working.

If thread message is set to '0' the cpu is almost not working and the 
video file is rendered until the end.

I don't really understand how the film object is managed by the thread 
functions, so I couldn't go further with this code.

Attached is the .cpp and it's header file if someone could give any help.

-------------- next part --------------
////////////////////////////////////////////////////////
//
// GEM - Graphics Environment for Multimedia
//
// zmoelnig at iem.at
//
// Implementation file
//
//    Copyright (c) 1997-1999 Mark Danks.
//    Copyright (c) Günther Geiger.
//    Copyright (c) 2001-2011 IOhannes m zmölnig. forum::für::umläute. IEM. zmoelnig at iem.at
//    For information on usage and redistribution, and for a DISCLAIMER OF ALL
//    WARRANTIES, see the file, "GEM.LICENSE.TERMS" in this distribution.
//
/////////////////////////////////////////////////////////
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif

#include <string.h>
#include "filmFFMPEG.h"
#include "plugins/PluginFactory.h"
#include "Gem/RTE.h"
#include "Gem/Properties.h"

using namespace gem::plugins;

REGISTER_FILMFACTORY("ffmpeg", filmFFMPEG);

/////////////////////////////////////////////////////////
//
// filmFFMPEG
//
/////////////////////////////////////////////////////////
// Constructor
//
/////////////////////////////////////////////////////////

filmFFMPEG :: filmFFMPEG(void) :
   m_fps(20.), m_fps_num(1), m_fps_denum(1),
   m_numFrames(-1), m_numTracks(-1),
    m_codec(NULL),
   m_codecParam(NULL),
   sws_ctx(NULL) 
  {
  }

/////////////////////////////////////////////////////////
// open the file
//
/////////////////////////////////////////////////////////
bool filmFFMPEG :: open(const std::string&filename,
                      const gem::Properties&wantProps)
{

// define destination pix_fmt format	
  double d;
  if(wantProps.get("colorspace", d) && d>0) {
    m_wantedFormat=d;
  }

  switch(m_wantedFormat) {
    case GEM_RGBA:
      dst_pix_fmt = AV_PIX_FMT_RGBA;
      break;
    case GEM_YUV:
      dst_pix_fmt = AV_PIX_FMT_YUV422P;
      break;
    case GEM_GRAY:
      dst_pix_fmt = AV_PIX_FMT_GRAY10;
      break;
    default:
      AV_PIX_FMT_RGBA;
      break;
  }
    
  close();  
 
// create libavformat context, find best video stream and get best codec 
  m_formatContext = avformat_alloc_context();  
  
   if (avformat_open_input(&m_formatContext, filename.c_str(), NULL, NULL) != 0) {
	error("could not open the file %s", filename.c_str());
    return false;
  } 

  post("format %s", m_formatContext->iformat->name);
  post("format %d", m_formatContext->nb_streams);
  m_numTracks =  m_formatContext->nb_streams;
   
  if (avformat_find_stream_info(m_formatContext,  NULL) < 0) {
    error("could not get the stream info");
    return false;
  }

    // find the video stream information
    ret = av_find_best_stream(m_formatContext, AVMEDIA_TYPE_VIDEO, -1, -1, &m_codec, 0);
    if (ret < 0) {
        error("Cannot find a video stream in the input file\n");
        return false;
    }
    video_stream = ret;  
  m_in_stream = m_formatContext->streams[video_stream];
  m_codecParam = m_in_stream->codecpar;
  verbose(0,"AVStream->start_time %" PRId64, m_in_stream->start_time);


  m_codec = avcodec_find_decoder(m_codecParam->codec_id);
  if (m_codec==NULL) {
      error("unsupported codec!");
      return false;
    } 	
   	
// print codec name, id and bitrate
  post("FFMPEG: \tCodec %s", m_codec->name);

  m_codecContext = avcodec_alloc_context3(m_codec);
  if (avcodec_parameters_to_context(m_codecContext, m_codecParam) < 0)
  {
    error("failed to copy codec params to codec context");
    return NULL;
  }

     
  if (avcodec_open2(m_codecContext, m_codec, NULL) < 0)
  {
    error("failed to open codec through avcodec_open2");
    return NULL;
  }
  
// now we can define video properties

  m_fps_num = m_in_stream->r_frame_rate.num;
  m_fps_denum = m_in_stream->r_frame_rate.den;
  m_fps = (float)m_fps_num / (float)m_fps_denum;
  m_numFrames =  m_in_stream->nb_frames;
  m_width = m_codecContext->width;
  m_height = m_codecContext->height;
  src_pix_fmt = m_codecContext->pix_fmt;
// get the image size 
  m_image.newfilm=true;
  m_image.image.notowned=true;
  m_image.image.xsize=m_width;
  m_image.image.ysize=m_height;
  m_image.image.setCsizeByFormat(m_wantedFormat);  
  
  const char* colorSpace = av_get_pix_fmt_name(m_codecContext->pix_fmt);
  post("FFMPEG: codec_type %d, color space %s ( %d )", m_codecParam->codec_type, colorSpace, m_codecContext->pix_fmt); 



  m_avframe = av_frame_alloc();
  m_finalFrame = av_frame_alloc(); 
  m_packet = av_packet_alloc();
   
   
  uint8_t *buffer = NULL;
  int numBytes; 
 // Determine required buffer size and allocate buffer
  numBytes=avpicture_get_size(dst_pix_fmt, m_width,
                            m_height);
  buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));

 
  avpicture_fill((AVPicture *)m_finalFrame, buffer, dst_pix_fmt,
                m_width, m_height); 
   // create scaling context
    sws_ctx = sws_getContext(m_width, m_height, src_pix_fmt,
                             m_width, m_height, dst_pix_fmt,
                             SWS_BILINEAR, NULL, NULL, NULL);
    if (!sws_ctx) {
        error(
                "Impossible to create scale context for the conversion "
                "fmt:%s s:%dx%d -> fmt:%s s:%dx%d\n",
                av_get_pix_fmt_name(src_pix_fmt), m_width, m_height,
                av_get_pix_fmt_name(dst_pix_fmt), m_width, m_height);
        ret = AVERROR(EINVAL);
        return false;
    }

 							  
  return true;
}

void filmFFMPEG::close(void) {

// crash if I uncomment those lines
	
//	avformat_close_input(&m_formatContext);
//	 sws_freeContext	(ctx)	;
//	avformat_free_context(m_formatContext);
//	av_packet_free(&m_packet);
//	av_frame_free(&m_avframe);
//	avcodec_free_context(&m_codecContext);
}
bool filmFFMPEG::isThreadable(void)
{
  return true;
}

/////////////////////////////////////////////////////////
// changeFrame
//
/////////////////////////////////////////////////////////

film::errCode filmFFMPEG :: changeImage(int imgNum, int trackNum)
{
//	post("track: %d", trackNum);
 	
  if(trackNum<0) {
	  error ("no track defined");
    // just automatically proceed to the next frame: this might speed up things for linear decoding 
    return film::FAILURE;
  }

  if(imgNum>=m_numFrames || imgNum<0) {
    return film::FAILURE;
  }

// convert timestamp to frame for seeking		
  int64_t seekTarget = frameToPts(imgNum);
  
  av_seek_frame( m_formatContext , 0 ,  seekTarget , 
  AVSEEK_FLAG_ANY );
 
  return film::SUCCESS;
}

 
int64_t filmFFMPEG::frameToPts(int frame) const
{
  return (int64_t(frame) * m_fps_denum * m_in_stream->time_base.den) / 
    (int64_t(m_fps_num) * m_in_stream->time_base.num);
}
 
/////////////////////////////////////////////////////////
// render
// 
/////////////////////////////////////////////////////////
pixBlock* filmFFMPEG :: getFrame()
{ 
//post("rendering"); 
  response = 0;
  how_many_packets_to_process = 4; 
  while ( av_read_frame(m_formatContext, m_packet) >= 0) 
  {
	if (m_packet->stream_index == video_stream) {
      response = decode_packet(m_packet, m_codecContext, m_avframe);
      if (response < 0)
        break;
      if (--how_many_packets_to_process <= 0) break;
	}
	av_packet_unref(m_packet);
  }    
// av_packet_free(&m_packet);
// av_frame_free(&m_avframe);
  m_image.newimage=true;
  return &m_image;
//  return NULL;
}

filmFFMPEG :: decode_packet(AVPacket *m_packet,
							AVCodecContext *m_codecContext,
							AVFrame *m_avframe) 
{
   
  int response = avcodec_send_packet(m_codecContext, m_packet);

  if (response < 0) {
    error("Error while sending a packet to the decoder");
    return response;
  }
  while (response >= 0)
  {
    response = avcodec_receive_frame(m_codecContext, m_avframe);
    if (response == AVERROR(EAGAIN) || response == AVERROR_EOF) { 
//	how_many_packets_to_process = 0;
      break;
    } else if (response < 0) {
      error("Error while receiving a frame from the decoder");
      return response;
    }
	if (response >= 0) {
		
        sws_scale(sws_ctx,m_avframe->data,
                  m_avframe->linesize, 0, m_height, m_finalFrame->data, m_finalFrame->linesize); 

//	

    verbose(0,"frame : %d", m_avframe->coded_picture_number);
    m_image.image.data = m_finalFrame->data[0];
    break;
      }
    } 
  return 0;	
}


bool filmFFMPEG::enumProperties(gem::Properties&readprops,
                              gem::Properties&writeprops)
{
  readprops.clear();
  writeprops.clear();

  double value=0.;

  readprops.set("width", value);
  readprops.set("height", value);
  readprops.set("fps", value);
  readprops.set("frames", value);

  return true;
}

void filmFFMPEG::setProperties(gem::Properties&props)
{
}

void filmFFMPEG::getProperties(gem::Properties&props)
{
  std::vector<std::string> keys=props.keys();
  gem::any value;
  double d;
  unsigned int i=0;
  for(i=0; i<keys.size(); i++) {
    std::string key=keys[i];
    props.erase(key);
    if("fps"==key) {
      d=m_fps;
      value=d;
      props.set(key, value);
    }
    if("frames"==key && m_numFrames>=0) {
      d=m_numFrames;
      value=d;
      props.set(key, value);
    }
    if("tracks"==key && m_numTracks>=0) {
      d=m_numTracks;
      value=d;
      props.set(key, value);
    } 
    if("width"==key) {
      d=m_image.image.xsize;
      value=d;
      props.set(key, value);
    }
    if("height"==key) {
      d=m_image.image.ysize;
      value=d;
      props.set(key, value);
    }
  }

}
-------------- next part --------------
/*-----------------------------------------------------------------

GEM - Graphics Environment for Multimedia

Load an digital video (like AVI, Mpeg, Quicktime) into a pix block (Linux)

Copyright (c) 1997-1999 Mark Danks. mark at danks.org
Copyright (c) Günther Geiger. geiger at epy.co.at
Copyright (c) 2001-2011 IOhannes m zmölnig. forum::für::umläute. IEM. zmoelnig at iem.at
For information on usage and redistribution, and for a DISCLAIMER OF ALL
WARRANTIES, see the file, "GEM.LICENSE.TERMS" in this distribution.


-----------------------------------------------------------------*/

#ifndef _INCLUDE_GEMPLUGIN__FILMFFMPEG_FILMFFMPEG_H_
#define _INCLUDE_GEMPLUGIN__FILMFFMPEG_FILMFFMPEG_H_
#include "plugins/film.h"
#include "Gem/Image.h"


# ifdef __cplusplus
extern "C" {
# endif
 
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>


#include <libswscale/swscale.h>

#include <libavutil/imgutils.h> 

# ifdef __cplusplus
}
# endif

/*-----------------------------------------------------------------
  -------------------------------------------------------------------
  CLASS
  filmFFMPEG

  Loads in a film

  KEYWORDS
  pix

  DESCRIPTION

  -----------------------------------------------------------------*/
namespace gem
{
namespace plugins
{
class GEM_EXPORT filmFFMPEG : public film
{
public:

  //////////
  // Constructor
  filmFFMPEG(void);

  //////////
  // open a movie up
  virtual bool open(const std::string&filename, const gem::Properties&);

  virtual void close(void);

  //////////
  // get the next frame
  virtual pixBlock* getFrame(void);

  //////////
  // set the next frame to read;
  virtual errCode changeImage(int imgNum, int trackNum = -1);

  virtual bool enumProperties(gem::Properties&readprops,
                              gem::Properties&writeprops);

  virtual void getProperties(gem::Properties&props);
  virtual void setProperties(gem::Properties&props);
  

  virtual bool isThreadable(void);
  
 
  virtual int64_t frameToPts(int frame) const;  
  virtual int decode_packet(AVPacket *m_packet,
								   AVCodecContext *m_codecContext,
								   AVFrame *m_avframe); 

  //-----------------------------------
  pixBlock m_image;
  unsigned int  m_wantedFormat; // format requested by the user
  double m_fps;  // the frame-rate
  int m_fps_num, m_fps_denum;
  int m_width,m_height;
  unsigned int m_numFrames, m_numTracks;
  int             m_avframeBytes;
  uint8_t         *m_avframeBuffer;
  int ret, video_stream;
   
 
  enum AVPixelFormat src_pix_fmt, dst_pix_fmt;

  struct SwsContext *sws_ctx;  
  
  AVFormatContext *m_formatContext;
  AVStream *m_in_stream;
  AVCodecParameters *m_codecParam;
  
  AVCodec *m_codec;
  AVCodecContext *m_codecContext;
  AVFrame *m_avframe;
  AVPacket *m_packet;
  AVFrame *m_finalFrame;

  
  unsigned char *m_rawdata;
  
  int how_many_packets_to_process,response;
};
};
};

#endif  // for header file
-------------- next part --------------

ACLOCAL_AMFLAGS = -I $(top_srcdir)/m4
AM_CPPFLAGS = -I$(top_srcdir)/src $(GEM_EXTERNAL_CPPFLAGS)

pkglib_LTLIBRARIES= gem_filmFFMPEG.la

gem_filmFFMPEG_la_CXXFLAGS =
gem_filmFFMPEG_la_LDFLAGS  = -module -avoid-version -shared -static-libgcc
if WINDOWS
gem_filmFFMPEG_la_LDFLAGS += -no-undefined
endif
gem_filmFFMPEG_la_LIBADD   =

# RTE
gem_filmFFMPEG_la_CXXFLAGS += $(GEM_RTE_CFLAGS) $(GEM_ARCH_CXXFLAGS)
gem_filmFFMPEG_la_LDFLAGS  += $(GEM_RTE_LIBS)   $(GEM_ARCH_LDFLAGS)
# flags for building Gem externals
gem_filmFFMPEG_la_CXXFLAGS += $(GEM_EXTERNAL_CFLAGS)
gem_filmFFMPEG_la_LIBADD   += -L$(top_builddir) $(GEM_EXTERNAL_LIBS)
# gem_filmFFMPEG_la @MOREFLAGS@

# Dependencies
gem_filmFFMPEG_la_CXXFLAGS +=
gem_filmFFMPEG_la_LIBADD   += -lavcodec -lavformat -lavutil -lswscale

# convenience symlinks
include $(srcdir)/../symlink_ltlib.mk


### SOURCES
gem_filmFFMPEG_la_SOURCES= filmFFMPEG.cpp filmFFMPEG.h



More information about the GEM-dev mailing list