MediaPlayer到ACodec到OMX流程(Android7.1 NuPlayer)

版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/u010164190/article/details/82287797
********************************************************************************
OpenMax(OMX)框架位置及作用
1.android系统中只用openmax来做code,所以android向上抽象了一层OMXCodec,提供给上层播放器用。
  播放器中音视频解码器mVideosource、mAudiosource都是OMXCodec的实例。
2.OMXCodec通过IOMX 依赖binder机制获得OMX服务,OMX服务才是openmax 在android中 实现。
3.OMX把软编解码和硬件编解码统一看作插件的形式管理起来。
*********************************************************************************

栗子:
MediaPlayer基本使用方式:播放一首MP3歌曲
MediaPlayer mp = new MediaPlayer();
//1.方法来设置音频文件的路径 
mp.setDataSource("/sdcard/test.mp3");
//2.MediaPlayer进入到准备状态 
mp.prepare();
//3.开始播放音频 
mp.start();

一、分析mp.setDataSource("/sdcard/test.mp3")流程
1. IMediaPlayer.cpp/h
<1>.头文件
frameworks/av/include/media/IMediaPlayer.h
#include <utils/RefBase.h> 
#include <binder/IInterface.h> 
#include <binder/Parcel.h>

class IMediaPlayer: public IInterface{
public:
  //使用宏,申明MediaPlayer ——> IMediaPlayer
  DECLARE_META_INTERFACE(MediaPlayer);

  //声明setDataSource()纯虚方法
  virtual status_t  setDataSource(const sp<IDataSource>& source) = 0;
  virtual status_t  setDataSource(const sp<IMediaHTTPService> &httpService, const char *url, const KeyedVector<String8, String8>* headers) = 0;
  virtual status_t  setDataSource(int fd, int64_t offset, int64_t length) = 0; 
  virtual status_t  setDataSource(const sp<IStreamSource>& source) = 0;
};

//声明一下服务端BnMediaPlayer
class BnMediaPlayer: public BnInterface<IMediaPlayer>{
  public:
      virtual status_t  onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags = 0);
}

<2>.源文件
frameworks/av/media/libmedia/IMediaPlayer.cpp

class BpMediaPlayer: public BpInterface<IMediaPlayer>{

public:
 //BpMediaPlayer代理客户端初始化
 BpMediaPlayer(const sp<IBinder>& impl) : BpInterface<IMediaPlayer>(impl){
 
 } 

 //实现客户端setDataSource方法
 status_t setDataSource(const sp<IStreamSource> &source) {
  Parcel data, reply;
  data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
  data.writeStrongBinder(IInterface::asBinder(source)); 

  //发送SET_DATA_SOURCE_STREAM命令在Bn端远程调用setDataSource()方法
  remote()->transact(SET_DATA_SOURCE_STREAM, data, &reply);

  //客户端通过reply.readInt32()方法返回Bn远程端返回的数据,返回给java应用层使用
  return reply.readInt32();
 }
 
 //利用Binder引用远程调用setDataSource()方法
 IMPLEMENT_META_INTERFACE(MediaPlayer, "android.media.IMediaPlayer”);

 //服务端,接收远程消息,处理onTransact方法
 status_t BnMediaPlayer::onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags){
  switch (code) {
        //收到SET_DATA_SOURCE_STREAM命令的处理流程
    case SET_DATA_SOURCE_STREAM:{
             CHECK_INTERFACE(IMediaPlayer, data, reply);
             sp<IStreamSource> source = interface_cast<IStreamSource>(data.readStrongBinder());
             if (source == NULL) {
            reply->writeInt32(BAD_VALUE);
         }else {
                //调用BN服务端实现的setDataSource()方法,然后通过reply->writeInt32(setDataSource(source))返回给客户端
                reply->writeInt32(setDataSource(source));
         }
    return NO_ERROR;
    }
 }

}
}

2. MediaPlayerService.cpp/h
<1>.头文件
frameworks/av/media/libmediaplayerservice/MediaPlayerService.h
#include <media/MediaPlayerInterface.h> 
class MediaPlayerService : public BnMediaPlayerService{
  private:
         //因为Client类继承了BnMediaPlayer远程的服务端
         class Client : public BnMediaPlayer {
           //BnMediaPlayer端的setDataSource()方法实现在这里
       virtual status_t  setDataSource(const sp<IStreamSource> &source);
  };

};

<2>.源文件
frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp
第一:
MediaPlayerService::Client::Client(const sp<MediaPlayerService>& service, pid_t pid, int32_t connId, 
                                   const sp<IMediaPlayerClient>& client, audio_session_t audioSessionId, uid_t uid){
    //Client类初始化    
}
第二:
//Bn服务端执行的setDataSource()方法:这里就是Client类继承了BnMediaPlayer远程的服务端真正执行
status_t MediaPlayerService::Client::setDataSource(const sp<IStreamSource> &source) {
   player_type playerType = MediaPlayerFactory::getPlayerType(this, source);
   sp<MediaPlayerBase> p = setDataSource_pre(playerType);
   if(p == NULL) {
      return NO_INIT;
    }

   // now set data source
   //这里的setDataSource()最终调用第七步NuPlayerDriver.cpp中的status_t NuPlayerDriver::setDataSource()
   setDataSource_post(p, p->setDataSource(source));
   return mStatus;    
}
NuPlayerDriver.cpp
 ————>setDataSource(){mPlayer->setDataSourceAsync(httpService, url, headers);}
NuPlayer.cpp
   —————> setDataSourceAsync(){
          //发送命令kWhatSetDataSource和返回命令kWhatSourceNotify消息命令
          sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
          sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
          msg->setObject("source", new StreamingSource(notify, source));
          msg->post();
         }
      —————>onMessageReceive(){
              switch (msg->what()) {
           case kWhatSetDataSource:{
             status_t err = OK;//OK = 0;
             sp<RefBase> obj;
             mSource = static_cast<Source *>(obj.get());
              //把弱指针提升为强指针:定义在:NuPlayer.h: wp<NuPlayerDriver> mDriver;
                  sp<NuPlayerDriver> driver = mDriver.promote();
             if (driver != NULL)
            driver->notifySetDataSourceCompleted(err);
              }
            }
          }
    ————————->frameworks/av/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
         NuPlayer::StreamingSource::StreamingSource(const sp<AMessage> &notify, const sp<IStreamSource> &source): Source(notify), mSource(source{    
           }
           ————————>void NuPlayer::StreamingSource::start(){
          mStreamListener = new NuPlayerStreamListener(mSource, NULL);
          mTSParser = new ATSParser(parserFlags); 
              mStreamListener->start();
              postReadBuffer();          
        }
        ——————————>status_t NuPlayer::StreamingSource::postReadBuffer() {
                (new AMessage(kWhatReadBuffer, this))->post();//发送kWhatReadBuffer命令
               }
           ———————>void NuPlayer::StreamingSource::onMessageReceived(){
                  case kWhatReadBuffer:
                   onReadBuffer();
               }
            ———————>void NuPlayer::StreamingSource::onReadBuffer() {
                for (int32_t i = 0; i < kNumListenerQueuePackets; ++i) { //for循环不停的读数据
                    ssize_t n = mStreamListener->read(buffer, sizeof(buffer), &extra);
                    status_t err = mTSParser->feedTSPacket(buffer, sizeof(buffer)); 
                                 }
                   }
               —————————>frameworks/av/media/libstagefright/mpeg2ts/ATSParser.cpp
                     status_t ATSParser::feedTSPacket(){
                     ABitReader br((const uint8_t *)data, kTSPacketSize);
                             return parseTS(&br, event);
                                 }
第三:
sp<MediaPlayerBase> MediaPlayerService::Client::setDataSource_pre(player_type playerType){
   sp<MediaPlayerBase> p = createPlayer(playerType);
    if(p == NULL) 
      return p;
}

第四:
sp<MediaPlayerBase> MediaPlayerService::Client::createPlayer(player_type playerType){
  sp<MediaPlayerBase> p = getPlayer();
  if ((p != NULL) && (p->playerType() != playerType)) {
    p.clear(); 
  }

  if (p == NULL) {
    p = MediaPlayerFactory::createPlayer(playerType, this, notify, mPid);
  }

  return p;
}

第五:
frameworks/av/media/libmediaplayerservice/MediaPlayerFactory.cpp
#include "nuplayer/NuPlayerDriver.h" 
sp<MediaPlayerBase> MediaPlayerFactory::createPlayer(player_type playerType, void* cookie, notify_callback_f notifyFunc,  pid_t pid) {
  sp<MediaPlayerBase> p;
  factory = sFactoryMap.valueFor(playerType);
  p = factory->createPlayer(pid);
  if (p == NULL) {
    return p;
  }
}

第六:
class NuPlayerFactory : public MediaPlayerFactory::IFactory { 
  public:
       virtual sp<MediaPlayerBase> createPlayer(pid_t pid) { 
         ALOGV(" create NuPlayer");
         //在NuPlayerDriver实现创建播放器方法createPlayer()
         return new NuPlayerDriver(pid);
     }
};

第七:
frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
NuPlayerDriver::NuPlayerDriver(pid_t pid) : mState(STATE_IDLE), mIsAsyncPrepare(false), mAsyncResult(UNKNOWN_ERROR), mSetSurfaceInProgress(false), ….){
  ALOGV("NuPlayerDriver(%p)", this);
  mLooper->setName("NuPlayerDriver Looper");
  mLooper->start(false, /* runOnCallingThread */ true,  /* canCallJava */  PRIORITY_AUDIO); 
  //在这里创建播放器
  mPlayer = AVNuFactory::get()->createNuPlayer(pid);
  mLooper->registerHandler(mPlayer);
  mPlayer->setDriver(this);
} 

第八:
frameworks/av/media/libavextensions/mediaplayerservice/AVNuFactory.cpp
sp<NuPlayer> AVNuFactory::createNuPlayer(pid_t pid) {
   return new NuPlayer(pid);
} 

第九:
frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
NuPlayer::NuPlayer(pid_t pid) : mUIDValid(false), mPID(pid), mPausedForBuffering(false) {
    clearFlushComplete(); 
}

二、分析mp.prepare()流程

三、分析mp.start()流程
setDataSource()设置好之后,app层发送播放命令start()开始播放,创建解码器.
frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
1.void NuPlayer::start(){
   (new AMessage(kWhatStart, this))->post();
}

2.void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatStart:
        {
            ALOGV("kWhatStart");
            if (mStarted) {
                // do not resume yet if the source is still buffering
                if (!mPausedForBuffering) {
                    onResume();
                }
            } else {
                onStart();//走这个分支
            }
            mPausedByClient = false;
            break;
        }
   }
}

3.void NuPlayer::onStart(int64_t startPositionUs) {
    postScanSources();
}

4.void NuPlayer::postScanSources() {
    sp<AMessage> msg = new AMessage(kWhatScanSources, this);
    msg->setInt32("generation", mScanSourcesGeneration);
    msg->post();
}

5.void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatScanSources:
        {
 
            // initialize video before audio because successful initialization of
            // video may change deep buffer mode of audio.
            if (mSurface != NULL) {
                instantiateDecoder(false, &mVideoDecoder);//先初始化视频解码器
            }
 
            // Don't try to re-open audio sink if there's an existing decoder.
            if (mAudioSink != NULL && mAudioDecoder == NULL) {
                instantiateDecoder(true, &mAudioDecoder);//再初始化音频解码器
            }
}

6.status_t NuPlayer::instantiateDecoder(bool audio, sp<DecoderBase> *decoder) {
    if (audio) {                                                                                                                                                                                              
        sp<AMessage> notify = new AMessage(kWhatAudioNotify, this);                                                                                                                                           
        ++mAudioDecoderGeneration;                                                                                                                                                                            
        notify->setInt32("generation", mAudioDecoderGeneration);                                                                                                                                              
                                                                                                                                               
        if (mOffloadAudio) {                                                                                                                                                                                  
            const bool hasVideo = (mSource->getFormat(false /*audio */) != NULL);                                                                                                                             
            format->setInt32("has-video", hasVideo);                                                                                                                                                          
            *decoder = AVNuFactory::get()->createPassThruDecoder(notify, mSource, mRenderer);                                                                                                                 
        } else {                                                                                                                                                                                              
            AVNuUtils::get()->setCodecOutputFormat(format);                                                                                                                                                   
            mSource->setOffloadAudio(false /* offload */);                                                                                                                                                    
            *decoder = AVNuFactory::get()->createDecoder(notify, mSource, mPID, mRenderer);                                                                                                                   
        }                                                                                                                                                                                                     
    } else {                                                                                                                                                                                                  
        sp<AMessage> notify = new AMessage(kWhatVideoNotify, this);                                                                                                                                                                                                                                                                                       
                                                                                                                                                                                                              
        *decoder = new Decoder(notify, mSource, mPID, mRenderer, mSurface, mCCDecoder);                                                                                                                                      
                                                                                                                                                                                                              
        // enable FRC if high-quality AV sync is requested, even if not                                                                                                                                       
        // directly queuing to display, as this will even improve textureview                                                                                                                                 
        // playback.                                                                                                                                                                                          
        {                                                                                                                                                                                                     
            char value[PROPERTY_VALUE_MAX];                                                                                                                                                                   
            if (property_get("persist.sys.media.avsync", value, NULL) &&                                                                                                                                      
                    (!strcmp("1", value) || !strcasecmp("true", value))) {                                                                                                                                    
                format->setInt32("auto-frc", 1);                                                                                                                                                              
            }                                                                                                                                                                                                 
        }                                                                                                                                                                                                     
    }                                                                                                                                                                                                         
    (*decoder)->init();                                                                                                                                                                                       
    (*decoder)->configure(format); //定义:sp<DecoderBase> *decoder;
}

7.frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
void NuPlayer::DecoderBase::configure(const sp<AMessage> &format) {
   sp<AMessage> msg = new AMessage(kWhatConfigure, this);
   msg->setMessage("format", format);
   msg->post();
}

void NuPlayer::DecoderBase::onMessageReceived(const sp<AMessage> &msg) {
   switch (msg->what()) {
    case kWhatConfigure: {
         sp<AMessage> format;
         onConfigure(format); 
	 break;
    }
  }
}

8.frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp 
//Decoder类继承于DecoderBase类
NuPlayer::Decoder::Decoder(const sp<AMessage> &notify, const sp<CCDecoder> &ccDecoder) : DecoderBase(notify);){
}

void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {
   MediaCodec::CreateByType(mCodecLooper, mime.c_str(), false /* encoder */, NULL /* err */, mPid); 
}

9.frameworks/av/media/libstagefright/MediaCodec.cpp
sp<MediaCodec> MediaCodec::CreateByType(){
  sp<MediaCodec> codec = new MediaCodec(looper, pid);
  const status_t ret = codec->init(mime, true /* nameIsType */, encoder);
}

status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) { 
   sp<AMessage> msg = new AMessage(kWhatInit, this);
}

void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
   case kWhatInit: {
        mCodec->initiateAllocateComponent(format);
  }
}

10.frameworks/av/media/libstagefright/ACodec.cpp
void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) {
    msg->setWhat(kWhatAllocateComponent); 
    msg->setTarget(this); 
    msg->post();
}

//消息接收
bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
     onAllocateComponent(msg);
     handled = true;
     break;
}

//初始化组件
/*
ACodec将omx bp代理对象和node id保存在其私有变量中,方面后面和omx进行联系,omx bp代理对象和omx bn通信,omx bn再根据node id找到存放在mNodeIDToInstance中OMXNodeInstance对象,而该对象中存放有编解码组件的操作句柄。 
在onAllocateComponent接口实现内容中,omxclient连接omx,通过interface接口将连接过程中获取到omx bp对象返回给ACodec保存并使用。连接omx的过程是在media.player和media.codec进程获取omx bp代理对象。
*/
bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
    (1) omx client连接omx
    OMXClient client;
    if (client.connect() != OK) {
        mCodec->signalError(OMX_ErrorUndefined, NO_INIT);
        return false;
    }

    (2). 获取omx bp代理对象
    sp<IOMX> omx = client.interface();

    //根据mVideoTrack传进来的视频信息,查找相匹配的解码器。    
    MediaCodecList::findMatchingCodecs(mime.c_str(), encoder, 0, &matchingCodecs);
   
    //创建CodecObserver实例,创建一个node并初始化为0.
    sp<CodecObserver> observer = new CodecObserver;
    IOMX::node_id node = 0;

   (3). allocate node.
   //通过omx入口依靠binder 机制调用OMX服务中的allocateNode(),这一步把匹配得到的解码器组件名、CodecObserver实例和初始化为0的node一并传入。    
    err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node);

    notify = new AMessage(kWhatOMXMessageList, mCodec);
    observer->setNotificationMessage(notify);
    mCodec->mOMX = omx;
    mCodec->mNode = node;
}

(1). omx client连接omx
frameworks/av/media/libstagefright/OMXClient.cpp
//从media.player和media.codec服务获取iomx的bp代理对象,并且封装在MuxOMX中,返回给ACodec的就是mOMX变量。
status_t OMXClient::connect() { 
  sp<IServiceManager> sm = defaultServiceManager();
  sp<IBinder> playerbinder = sm->getService(String16("media.player"));
  sp<IMediaPlayerService> mediaservice = interface_cast<IMediaPlayerService>(playerbinder);

  sp<IOMX> mediaServerOMX = mediaservice->getOMX();//

  sp<IBinder> codecbinder = sm->getService(String16("media.codec"));
  sp<IMediaCodecService> codecservice = interface_cast<IMediaCodecService>(codecbinder); 
  sp<IOMX> mediaCodecOMX = codecservice->getOMX();

  mOMX = new MuxOMX(mediaServerOMX, mediaCodecOMX);//MuxOMX类创建出mOMX给ACodec用,然后使用interface()获取实例对象
}
//MuxOMX类实现
MuxOMX::MuxOMX(const sp<IOMX> &mediaServerOMX, const sp<IOMX> &mediaCodecOMX): mMediaServerOMX(mediaServerOMX),mMediaCodecOMX(mediaCodecOMX) {
    ALOGI("MuxOMX ctor");    
}

frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp
sp<IOMX> MediaPlayerService::getOMX() {
  mOMX = new OMX;

  return mOMX;
}

frameworks/av/media/libstagefright/omx/OMX.cpp
OMX::OMX(): mMaster(new OMXMaster), mNodeCounter(0) {
}

frameworks/av/media/libstagefright/omx/OMXMaster.cpp
OMXMaster::OMXMaster(){
   addVendorPlugin(); ======>调用addPlugin("libstagefrighthw.so”);加载厂商硬解码器so
   addPlugin(new SoftOMXPlugin); =======>加载软解码器so
}

(2).获取omx bp代理对象
frameworks/av/include/media/stagefright/OMXClient.h
//这部分的内容可以和step1结合起来,interface接口调用返回step1中构建出来的mOMX对象。接口实现如下:
sp<IOMX> interface() {
    return mOMX;//使用interface()获取实例对象
}

(3).allocate node
/*
onAllocateComponent接口的局部变量omx实质上是一个MuxOMX对象,所以被调用函数allocateNode在OMXClient.cpp文件。getPreferredCodecLocation接口返回值决定着后面采用的iomx bp代理对象是处于media.player服务,还是media.codec进程。毕竟还是需要通过iomx bp代理对象和omx进行通信的,从而获取node节点。这个node节点只是一个无符号的32位整型数字,但是通过类似键值对的方式 对应着一个解码组件的操作句柄。
*/
frameworks/av/include/media/stagefright/OMXClient.cpp
status_t MuxOMX::allocateNode(){
   sp<IOMX> omx;
   node_location loc = getPreferredCodecLocation(name);
   status_t err = omx->allocateNode(name, observer, nodeBinder, node); 
}

//从下面接口中的注释来看,非安全解码器、google软解等正常是运行在media.codec进程下,而硬解是运行在mediaserver进程中更加安全。
MuxOMX::node_location MuxOMX::getPreferredCodecLocation(const char *name) { 
   if (sCodecProcessEnabled) {
        // all codecs go to codec process unless excluded using system property, in which case
        // all non-secure decoders, OMX.google.* codecs and encoders can go in the codec process
        // (non-OMX.google.* encoders can be excluded using system property.)
        if ((strcasestr(name, "decoder")
                        && strcasestr(name, ".secure") != name + strlen(name) - 7)
                || (strcasestr(name, "encoder")
                        && !property_get_bool("media.stagefright.legacyencoder", false))
                || !property_get_bool("media.stagefright.less-secure", false)
                || !strncasecmp(name, "OMX.google.", 11)) {
            return CODECPROCESS;
        }
        // everything else runs in the media server
        return MEDIAPROCESS;
    } else {
        ......  
    }
}

iomx bp代理对象和omx进行通信,发送“ALLOCATE_NODE”指令过去,omx调用allocateNode接口,并且将node节点打包返回给iomx bp代理对象,方便后面两者交互。 
omx allocateNode接口分析:大致分3步。
frameworks/av/media/libmedia/IOMX.cpp
class BpOMX : public BpInterface<IOMX> {
  virtual status_t allocateNode(const char *name, const sp<IOMXObserver> &observer,sp<IBinder> *nodeBinder,node_id *node) {
       Parcel data, reply;
       remote()->transact(ALLOCATE_NODE, data, &reply);
  }
}

//IOMX代理实现部分,调用OMX.cpp里
IMPLEMENT_META_INTERFACE(OMX, "android.hardware.IOMX");
status_t BnOMX::onTransact(int32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
  case ALLOCATE_NODE:{
       sp<IOMXObserver> observer =interface_cast<IOMXObserver>(data.readStrongBinder());
       status_t err = allocateNode(name, observer, NULL /* nodeBinder */, &node);
       reply->writeInt32(err);
  }
}

//IOMX转化为OMX,代理(IOMX.cpp)转换类型指向(OMX.cpp)
frameworks/av/include/media/IOMX.h
class IOMX : public IInterface {
  public:
         DECLARE_META_INTERFACE(OMX);
};

frameworks/av/media/libstagefright/omx/OMX.cpp
status_t OMX::allocateNode(
    const char *name, const sp<IOMXObserver> &observer,
    sp<IBinder> *nodeBinder, node_id *node) {
   Mutex::Autolock autoLock(mLock);

   *node = 0;
   if (nodeBinder != NULL) {
       *nodeBinder = NULL;
   }

   if (mNodeIDToInstance.size() == kMaxNodeInstances) {
       // all possible node IDs are in use
       return NO_MEMORY;
   }

   (3)的第一步:实例化OMXNodeInstance对象,存放node id、解码组件handle、ACodec传递下来的observer等
   OMXNodeInstance *instance = new OMXNodeInstance(this, observer, name);

   OMX_COMPONENTTYPE *handle;

   (3)的第二步:master通知plugin创建对应的解码组件、并返回其操作句柄 
   OMX_ERRORTYPE err = mMaster->makeComponentInstance(
           name, &OMXNodeInstance::kCallbacks,
           instance, &handle);

   if (err != OMX_ErrorNone) {
       ALOGE("FAILED to allocate omx component '%s' err=%s(%#x)", name, asString(err), err);

       instance->onGetHandleFailed();

       return StatusFromOMXError(err);
   }

   (3)的第三步: 生成node id,将该id和instance对应保存起来    
   *node = makeNodeID_l(instance);
   mDispatchers.add(*node, new CallbackDispatcher(instance));

   instance->setHandle(*node, handle);

   mLiveNodes.add(IInterface::asBinder(observer), instance);
   IInterface::asBinder(observer)->linkToDeath(this);

   return OK;
}

(3)的第一步:实例化OMXNodeInstance对象,存放一些关键信息
frameworks/av/media/libstagefright/omx/OMXNodeInstance.cpp
/*OMXNodeInstance对象持有一些关键信息,比如生成的节点id(mNodeID)、ACodec传递下来的observer、plugin里创建的解码组件handle和构造OMXNodeInstance时传入的omx对象等。 
mNodeID:最后返回给ACodec,node id主要使用在omx。omx的私有变量mNodeIDToInstance和mDispatcher是将node id和OMXNodeInstance、CallbackDispatcher对应保存起来。后面函数调用再通过ACodec传下来的node id查找对应的OMXNodeInstance或者CallbackDispatcher,最后找到需要的解码组件的操作句柄等。 
observer: ACodec初始化解码组件时创建的omxobserver bn对象,用于OMXNodeInstance内FBD、EBD、Event事件回掉通知给ACodec处理。 
handle:编解码组建的操作句柄,实际操作的对象。上面的存粹是兜兜转转,真正干活的就是这个handle,用这个handle操作plugin。 
omx:在OMXNodeInstance内保存一份而已,看着有用到,但后面操作又回到OMXNodeInstance中。
*/

OMXNodeInstance::OMXNodeInstance(
    OMX *owner, const sp<IOMXObserver> &observer, const char *name)
: mOwner(owner),
  mNodeID(0),
  mHandle(NULL),
  mObserver(observer),
  mDying(false),
  mSailed(false),
  mQueriedProhibitedExtensions(false),
  mBufferIDCount(0)
{
    ......
}

(3)的第二步:master通知plugin创建对应的解码组件、并返回其操作句柄
/*
makeComponentInstance接口实现关键的地方在于先找到指定的plugin、再通知plugin去创建对应的解码组件。这部分的内容后面再细看。
*/
OMX_ERRORTYPE OMXMaster::makeComponentInstance(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component) {
        ALOGI("makeComponentInstance(%s) in %s process", name, mProcessName);
        Mutex::Autolock autoLock(mLock);

        *component = NULL;

        ssize_t index = mPluginByComponentName.indexOfKey(String8(name));

        if (index < 0) {
            return OMX_ErrorInvalidComponentName;
        }

        OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
        OMX_ERRORTYPE err =
            plugin->makeComponentInstance(name, callbacks, appData, component);

        if (err != OMX_ErrorNone) {
            return err;
        }

        mPluginByInstance.add(*component, plugin);

        return err;
}

(3)的第三步:生成node id,将该id和instance对应保存起来
/*
生成的node id是node_id 类型,实质是一个32位的无符号int型。高16位为进程号,低16位为mNodeCounter计数值。关键是将这个node id和instance变量关联起来,以键值对形式存放到mNodeIDToInstance中。后面只要根据node id,就可以取到对应的instance,再拿到存放在instance中的解码器操作句柄。
*/
OMX::node_id OMX::makeNodeID_l(OMXNodeInstance *instance) {
    // mLock is already held.

    node_id prefix = node_id(getpid() << 16);
    node_id node = 0;
    do  {
        if (++mNodeCounter >= kMaxNodeInstances) {
            mNodeCounter = 0; // OK to use because we're combining with the pid
        }
        node = node_id(prefix | mNodeCounter);
    } while (mNodeIDToInstance.indexOfKey(node) >= 0);
    mNodeIDToInstance.add(node, instance);

    return node;
}

ACodec在创建编解码组件的过程中,通过omxclient拿到omx bp代理对象,根据这个代理对象和omx进行通信。omx接收到ALLOCATE_NODE的请求后,调用allocateNode()接口进一步通知plugin创建对应的编解码组件,并返回其操作句柄,后面就是操作这个句柄来工作。这个操作句柄保存在instance(OmxNodeInstance类型)对象中,与之对应的是一个node id,是一个32位的无符号int值,两者以键值对形式存放在mNodeIDToInstance变量中。acodec可以拿到的就是这个node id和omx bp代理对象。

在O版本上由于HIDL化,omx bp和bn通信部分发生了较大的变化,但是最后调用的还是instance对象,操作的还是保存在instance对象中的操作句柄。

猜你喜欢

转载自blog.csdn.net/u010164190/article/details/82287797