Android Camera模块(二)

Camera Framework层

Camera Framework层即CameraServer服务实现.CameraServer是Native Service,代码在frameworks/av/services/camera/libcameraservice/
CameraServer承上启下,上对应用提供Aosp的接口服务,下和Hal直接交互.一般而言,CamerServer出现问题的概率极低,大部分还是App层及HAL层出现的问题居多.
我们这里仍然是从上往下进行分析:

一、OpenCamera流程

1.1 CameraManager.openCamera

在Camera模块(一)中2.3小节提到过打开相机调用的是CameraManager的openCamera返回CameraDevice。这节我们继续深入分析。

/frameworks/base/core/java/android/hardware/camera2/CameraManager.java
720      @RequiresPermission(android.Manifest.permission.CAMERA)
721      public void openCamera(@NonNull String cameraId,
722              @NonNull final CameraDevice.StateCallback callback, @Nullable Handler handler)
723              throws CameraAccessException {
724  
725          openCameraForUid(cameraId, callback, CameraDeviceImpl.checkAndWrapHandler(handler),
726                  USE_CALLING_UID);
727      }
853      public void openCameraForUid(@NonNull String cameraId,
854              @NonNull final CameraDevice.StateCallback callback, @NonNull Executor executor,
855              int clientUid, int oomScoreOffset) throws CameraAccessException {
856  
857          if (cameraId == null) {
858              throw new IllegalArgumentException("cameraId was null");
859          } else if (callback == null) {
860              throw new IllegalArgumentException("callback was null");
861          }
862          if (CameraManagerGlobal.sCameraServiceDisabled) {
863              throw new IllegalArgumentException("No cameras available on device");
864          }
865  
866          openCameraDeviceUserAsync(cameraId, callback, executor, clientUid, oomScoreOffset);
867      }
567      private CameraDevice openCameraDeviceUserAsync(String cameraId,
568              CameraDevice.StateCallback callback, Executor executor, final int uid,
569              final int oomScoreOffset) throws CameraAccessException {
570          CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
571          CameraDevice device = null;
572          Map<String, CameraCharacteristics> physicalIdsToChars =
573                  getPhysicalIdToCharsMap(characteristics);
574          synchronized (mLock) {
575  
576              ICameraDeviceUser cameraUser = null;
                 //1.构建需要返回的CameraDevice
577              android.hardware.camera2.impl.CameraDeviceImpl deviceImpl =
578                      new android.hardware.camera2.impl.CameraDeviceImpl(
579                          cameraId,
580                          callback,
581                          executor,
582                          characteristics,
583                          physicalIdsToChars,
584                          mContext.getApplicationInfo().targetSdkVersion,
585                          mContext);
586  
587              ICameraDeviceCallbacks callbacks = deviceImpl.getCallbacks();
588  
589              try {
590                  ICameraService cameraService = CameraManagerGlobal.get().getCameraService();
591                  if (cameraService == null) {
592                      throw new ServiceSpecificException(
593                          ICameraService.ERROR_DISCONNECTED,
594                          "Camera service is currently unavailable");
595                  }
                     //2.通过binder调用CameraService的connectDevice
596                  cameraUser = cameraService.connectDevice(callbacks, cameraId,
597                      mContext.getOpPackageName(),  mContext.getAttributionTag(), uid,
598                      oomScoreOffset, mContext.getApplicationInfo().targetSdkVersion);
599              } catch (ServiceSpecificException e) {
600                  ...
622              } catch (RemoteException e) {
623                  ...
629              }
630  
631              // TODO: factor out callback to be non-nested, then move setter to constructor
632              // For now, calling setRemoteDevice will fire initial
633              // onOpened/onUnconfigured callbacks.
634              // This function call may post onDisconnected and throw CAMERA_DISCONNECTED if
635              // cameraUser dies during setup.
    
                 //3.将ICameraDeviceUser设置给deviceImpl返回给CameraApp
636              deviceImpl.setRemoteDevice(cameraUser);
637              device = deviceImpl;
638          }
639  
640          return device;
641      }

CameraManager是通过ICameraService和CameraServer进行通信,Service的name为”media.camera“最终调用到CameraService的connectDeivce

1.2 CameraService.connectDevice

/frameworks/av/services/camera/libcameraservice/CameraService.cpp
1609  Status CameraService::connectDevice(
1610          const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
1611          const String16& cameraId,
1612          const String16& clientPackageName,
1613          const std::optional<String16>& clientFeatureId,
1614          int clientUid, int oomScoreOffset, int targetSdkVersion,
1615          /*out*/
1616          sp<hardware::camera2::ICameraDeviceUser>* device) {
1617  
1618      ATRACE_CALL();
1619      Status ret = Status::ok();
1620      String8 id = String8(cameraId);
1621      sp<CameraDeviceClient> client = nullptr;
1622      String16 clientPackageNameAdj = clientPackageName;
1623      int callingPid = CameraThreadState::getCallingPid();
1624  
1625      if (getCurrentServingCall() == BinderCallType::HWBINDER) {
1626          std::string vendorClient =
1627                  StringPrintf("vendor.client.pid<%d>", CameraThreadState::getCallingPid());
1628          clientPackageNameAdj = String16(vendorClient.c_str());
1629      }
1630  
1631      if (oomScoreOffset < 0) {
1632          String8 msg =
1633                  String8::format("Cannot increase the priority of a client %s pid %d for "
1634                          "camera id %s", String8(clientPackageNameAdj).string(), callingPid,
1635                          id.string());
1636          ALOGE("%s: %s", __FUNCTION__, msg.string());
1637          return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
1638      }
1639       //1.检查系统的camera权限 
1640      // enforce system camera permissions
1641      if (oomScoreOffset > 0 &&
1642              !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid())) {
1643          String8 msg =
1644                  String8::format("Cannot change the priority of a client %s pid %d for "
1645                          "camera id %s without SYSTEM_CAMERA permissions",
1646                          String8(clientPackageNameAdj).string(), callingPid, id.string());
1647          ALOGE("%s: %s", __FUNCTION__, msg.string());
1648          return STATUS_ERROR(ERROR_PERMISSION_DENIED, msg.string());
1649      }
1650      //2.调用connectHelper来构建返回的CamreaDevice这里是CameraDeviceClient
1651      ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
1652              /*api1CameraId*/-1, clientPackageNameAdj, clientFeatureId,
1653              clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
1654              targetSdkVersion, /*out*/client);
1655  
1656      if(!ret.isOk()) {
1657          logRejected(id, callingPid, String8(clientPackageNameAdj), ret.toString8());
1658          return ret;
1659      }
1660  
1661      *device = client;
           ...
1678      return ret;
1679  }
1680

这时已经通过Binder调用到到CameraServer进程。connectDevice主要做了两件事,1.检查权限2.调用connectHelper来连接Camera,并传入client来接收返回的结果。接着我们来看connectHelper是如何返回client的:

/frameworks/av/services/camera/libcameraservice/CameraService.cpp
1681  template<class CALLBACK, class CLIENT>
1682  Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
1683          int api1CameraId, const String16& clientPackageName,
1684          const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
1685          apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
1686          /*out*/sp<CLIENT>& device) {
          ...
1698  
1699      sp<CLIENT> client = nullptr;
              ...
1773  
1774          sp<BasicClient> tmp = nullptr;
1775          bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
1776                  mPerfClassPrimaryCameraIds, cameraId.string(), targetSdkVersion);
              //1.创建通过makeClient创建BasicClient传入tmp接收结果
1777          if(!(ret = makeClient(this, cameraCb, clientPackageName, clientFeatureId,
1778                  cameraId, api1CameraId, facing, orientation,
1779                  clientPid, clientUid, getpid(),
1780                  deviceVersion, effectiveApiLevel, overrideForPerfClass,
1781                  /*out*/&tmp)).isOk()) {
1782              return ret;
1783          }
1784          client = static_cast<CLIENT*>(tmp.get());
1785          
1786          LOG_ALWAYS_FATAL_IF(client.get() == nullptr, "%s: CameraService in invalid state",
1787                  __FUNCTION__);
1788          //2.初始化client
1789          err = client->initialize(mCameraProviderManager, mMonitorTags);
              // client->initialize的错误处理
1790          if (err != OK) {
1791              ALOGE("%s: Could not initialize client from HAL.", __FUNCTION__);
1792              // Errors could be from the HAL module open call or from AppOpsManager
1793              switch(err) {
1794                  ...
1816          }
1817          //这里effectiveApiLevel传入的是API_2
1818          // Update shim paremeters for legacy clients
1819          if (effectiveApiLevel == API_1) {
1820              ...
1832          }
1833  
              ...
1882  
1883      // Important: release the mutex here so the client can call back into the service from its
1884      // destructor (can be at the end of the call)
          //3.将client赋给device返回
1885      device = client;
1886  
1887      int32_t openLatencyMs = ns2ms(systemTime() - openTimeNs);
1888      CameraServiceProxyWrapper::logOpen(cameraId, facing, clientPackageName,
1889              effectiveApiLevel, isNdk, openLatencyMs);
1890  
1891      return ret;
1892  }

connecthelper首先会通过makeClient初始化一个CameraDeviceClient对象,如果CameraDevice的hal的version为1.0那么会直接报错,其次如果effectiveApiLevel是API_1,那么会新建一个Camera2Client,如果是API_2的话会新建一个CameraDeviceClient对象。这里我们的effectiveApiLevel是API_2,这两个Client都继承自BasicClient。初始化完CameraDeviceClient后会调用initialize来完成初始化。

1.3 initialize初始化

接着看一下initialize都做了一些什么吧。

/frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
109  status_t CameraDeviceClient::initialize(sp<CameraProviderManager> manager,
110          const String8& monitorTags) {
111      return initializeImpl(manager, monitorTags);
112  }
114  template<typename TProviderPtr>
115  status_t CameraDeviceClient::initializeImpl(TProviderPtr providerPtr, const String8& monitorTags) {
116      ATRACE_CALL();
117      status_t res;
118      //1.调用Camera2ClientBase的initialize最终调用到mDevice->initialize初始化
119      res = Camera2ClientBase::initialize(providerPtr, monitorTags);
120      if (res != OK) {
121          return res;
122      }
123  
124      String8 threadName;
125      mFrameProcessor = new FrameProcessorBase(mDevice);
126      threadName = String8::format("CDU-%s-FrameProc", mCameraIdStr.string());
127      mFrameProcessor->run(threadName.string());
128  
129      mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
130                                        camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
131                                        /*listener*/this,
132                                        /*sendPartials*/true);
133  
134      const CameraMetadata &deviceInfo = mDevice->info();
135      camera_metadata_ro_entry_t physicalKeysEntry = deviceInfo.find(
136              ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS);
137      if (physicalKeysEntry.count > 0) {
138          mSupportedPhysicalRequestKeys.insert(mSupportedPhysicalRequestKeys.begin(),
139                  physicalKeysEntry.data.i32,
140                  physicalKeysEntry.data.i32 + physicalKeysEntry.count);
141      }
142  
143      mProviderManager = providerPtr;
144      // Cache physical camera ids corresponding to this device and also the high
145      // resolution sensors in this device + physical camera ids
146      mProviderManager->isLogicalCamera(mCameraIdStr.string(), &mPhysicalCameraIds);
147      if (isUltraHighResolutionSensor(mCameraIdStr)) {
148          mHighResolutionSensors.insert(mCameraIdStr.string());
149      }
150      for (auto &physicalId : mPhysicalCameraIds) {
151          if (isUltraHighResolutionSensor(String8(physicalId.c_str()))) {
152              mHighResolutionSensors.insert(physicalId.c_str());
153          }
154      }
155      return OK;
156  }

CameraDeviceClient继承自Camera2ClientBase在CameraDeviceClient新建时Camera2ClientBase的构造函数也会新建一个Camera3Device

/frameworks/av/services/camera/libcameraservice/common/Camera2ClientBase.cpp
45  template <typename TClientBase>
46  Camera2ClientBase<TClientBase>::Camera2ClientBase(
47          ...
            //新建一个Camera3Device对象给mDevice
64          mDevice(new Camera3Device(cameraId, overrideForPerfClass)),
65          mDeviceActive(false), mApi1CameraId(api1CameraId)
66  {
67      ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.string(),
68              String8(clientPackageName).string(), clientPid, clientUid);
69  
70      mInitialClientPid = clientPid;
71      LOG_ALWAYS_FATAL_IF(mDevice == 0, "Device should never be NULL here.");
72  }
    
86  template <typename TClientBase>
87  status_t Camera2ClientBase<TClientBase>::initialize(sp<CameraProviderManager> manager,
88          const String8& monitorTags) {
89      return initializeImpl(manager, monitorTags);
90  }
91  
92  template <typename TClientBase>
93  template <typename TProviderPtr>
94  status_t Camera2ClientBase<TClientBase>::initializeImpl(TProviderPtr providerPtr,
95          const String8& monitorTags) {
96      ATRACE_CALL();
97      ALOGV("%s: Initializing client for camera %s", __FUNCTION__,
98            TClientBase::mCameraIdStr.string());
99      status_t res;
100      //验证appop 权限
101      // Verify ops permissions
102      res = TClientBase::startCameraOps();
103      if (res != OK) {
104          return res;
105      }
106  
         ...
112      //这里的mDevice指的是Camera3Device。
113      res = mDevice->initialize(providerPtr, monitorTags);
114      if (res != OK) {
115          ALOGE("%s: Camera %s: unable to initialize device: %s (%d)",
116                  __FUNCTION__, TClientBase::mCameraIdStr.string(), strerror(-res), res);
117          return res;
118      }
119  
120      wp<NotificationListener> weakThis(this);
121      res = mDevice->setNotifyCallback(weakThis);
122  
123      return OK;
124  }
125

所以当调用到Camera2ClientBase的initialize时最终会调用到Camera3Device的initialize,

/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
114  status_t Camera3Device::initialize(sp<CameraProviderManager> manager, const String8& monitorTags) {
         ...
         //1.获取hal层接口ICameraDeviceSession 这里的ICameraDeviceSession是和CameraHal沟通的主要接口
126      sp<ICameraDeviceSession> session;
127      ATRACE_BEGIN("CameraHal::openSession");
128      status_t res = manager->openSession(mId.string(), this,
129              /*out*/ &session);
130      ATRACE_END();
131      if (res != OK) {
132          SET_ERR_L("Could not open camera session: %s (%d)", strerror(-res), res);
133          return res;
134      }
135  
         ...
183      //2.从session中获取RequestMetadataQueue的queue
184      std::shared_ptr<RequestMetadataQueue> queue;
185      auto requestQueueRet = session->getCaptureRequestMetadataQueue(
186          [&queue](const auto& descriptor) {
187              queue = std::make_shared<RequestMetadataQueue>(descriptor);
188              if (!queue->isValid() || queue->availableToWrite() <= 0) {
189                  ALOGE("HAL returns empty request metadata fmq, not use it");
190                  queue = nullptr;
191                  // don't use the queue onwards.
192              }
193          });
194      if (!requestQueueRet.isOk()) {
195          ALOGE("Transaction error when getting request metadata fmq: %s, not use it",
196                  requestQueueRet.description().c_str());
197          return DEAD_OBJECT;
198      }
199  
         ...
         //3.利用session和获取到的RequestMetadataQueue新建一个HalInterface
240      mInterface = new HalInterface(session, queue, mUseHalBufManager, mSupportOfflineProcessing);
241      std::string providerType;
242      mVendorTagId = manager->getProviderTagIdLocked(mId.string());
243      mTagMonitor.initialize(mVendorTagId);
244      if (!monitorTags.isEmpty()) {
245          mTagMonitor.parseTagsToMonitor(String8(monitorTags));
246      }
247  
248      // Metadata tags needs fixup for monochrome camera device version less
249      // than 3.5.
250      hardware::hidl_version maxVersion{0,0};
251      res = manager->getHighestSupportedVersion(mId.string(), &maxVersion);
252      if (res != OK) {
253          ALOGE("%s: Error in getting camera device version id: %s (%d)",
254                  __FUNCTION__, strerror(-res), res);
255          return res;
256      }
257      int deviceVersion = HARDWARE_DEVICE_API_VERSION(
258              maxVersion.get_major(), maxVersion.get_minor());
259  
260      bool isMonochrome = false;
261      for (size_t i = 0; i < capabilities.count; i++) {
262          uint8_t capability = capabilities.data.u8[i];
263          if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) {
264              isMonochrome = true;
265          }
266      }
267      mNeedFixupMonochromeTags = (isMonochrome && deviceVersion < CAMERA_DEVICE_API_VERSION_3_5);
268      //4.完成剩余的初始化工作
269      return initializeCommonLocked();
270  }

Camera3Device在初始化时会做以下几件事情:

1.通过CameraProviderManager的opensession获取hal层的ICameraDeviceSession接口。

2.从上面的获取到的session中获取到类型为RequestMetadataQueue的queue

3.将上面的session和queue作为参数构建一个HalInterface

4.完成剩余的初始化的工作。

下面我们主要看一下如何获取session的

1.3.1 CameraProviderManager.openSession

/frameworks/av/services/camera/libcameraservice/common/CameraProviderManager.cpp
374  status_t CameraProviderManager::openSession(const std::string &id,
375          const sp<device::V3_2::ICameraDeviceCallback>& callback,
376          /*out*/
377          sp<device::V3_2::ICameraDeviceSession> *session) {
378  
379      std::lock_guard<std::mutex> lock(mInterfaceMutex);
380      //1.获取到HalInterface
381      auto deviceInfo = findDeviceInfoLocked(id,
382              /*minVersion*/ {3,0}, /*maxVersion*/ {4,0});
383      if (deviceInfo == nullptr) return NAME_NOT_FOUND;
384       
385      auto *deviceInfo3 = static_cast<ProviderInfo::DeviceInfo3*>(deviceInfo);
         ...
395  
396      Status status;
397      hardware::Return<void> ret;
398      auto interface = deviceInfo3->startDeviceInterface<
399              CameraProviderManager::ProviderInfo::DeviceInfo3::InterfaceT>();
400      if (interface == nullptr) {
401          return DEAD_OBJECT;
402      }
403      //2.调用open方法并返回一个hal的代理接口ICameraDeviceSession
404      ret = interface->open(callback, [&status, &session]
405              (Status s, const sp<device::V3_2::ICameraDeviceSession>& cameraSession) {
406                  status = s;
407                  if (status == Status::OK) {
408                      *session = cameraSession;
409                  }
410              });
411      if (!ret.isOk()) {
412          removeRef(DeviceMode::CAMERA, id);
413          ALOGE("%s: Transaction error opening a session for camera device %s: %s",
414                  __FUNCTION__, id.c_str(), ret.description().c_str());
415          return DEAD_OBJECT;
416      }
417      return mapToStatusT(status);
418  }

CameraProviderManager.openSession会调用findDeviceInfoLocked获取HalInterface接着调用open打开hal并返回一个ICameraDeviceSession用于和halservice进行通信。拿到session后接着从session中获取queue并将session和queue构建成一个halInterface,最后调用initializeCommonLocked完成初始化的工作。

1.3.2 initializeCommonLocked

/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp 
272  status_t Camera3Device::initializeCommonLocked() {
273       //1.开启StatusTracker线程
274      /** Start up status tracker thread */
275      mStatusTracker = new StatusTracker(this);
276      status_t res = mStatusTracker->run(String8::format("C3Dev-%s-Status", mId.string()).string());
277      if (res != OK) {
278          SET_ERR_L("Unable to start status tracking thread: %s (%d)",
279                  strerror(-res), res);
280          mInterface->close();
281          mStatusTracker.clear();
282          return res;
283      }
284  
285      /** Register in-flight map to the status tracker */
286      mInFlightStatusId = mStatusTracker->addComponent("InflightRequests");
287  
288      if (mUseHalBufManager) {
289          res = mRequestBufferSM.initialize(mStatusTracker);
290          if (res != OK) {
291              SET_ERR_L("Unable to start request buffer state machine: %s (%d)",
292                      strerror(-res), res);
293              mInterface->close();
294              mStatusTracker.clear();
295              return res;
296          }
297      }
298       //2.创建Camera3BufferManager
299      /** Create buffer manager */
300      mBufferManager = new Camera3BufferManager();
301  
302      Vector<int32_t> sessionParamKeys;
303      camera_metadata_entry_t sessionKeysEntry = mDeviceInfo.find(
304              ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
305      if (sessionKeysEntry.count > 0) {
306          sessionParamKeys.insertArrayAt(sessionKeysEntry.data.i32, 0, sessionKeysEntry.count);
307      }
308  
309      camera_metadata_entry_t availableTestPatternModes = mDeviceInfo.find(
310              ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES);
311      for (size_t i = 0; i < availableTestPatternModes.count; i++) {
312          if (availableTestPatternModes.data.i32[i] ==
313                  ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) {
314              mSupportCameraMute = true;
315              mSupportTestPatternSolidColor = true;
316              break;
317          } else if (availableTestPatternModes.data.i32[i] ==
318                  ANDROID_SENSOR_TEST_PATTERN_MODE_BLACK) {
319              mSupportCameraMute = true;
320              mSupportTestPatternSolidColor = false;
321          }
322      }
323      //3.启动Request线程 循环app层的request的到来 
324      /** Start up request queue thread */
325      mRequestThread = new RequestThread(
326              this, mStatusTracker, mInterface, sessionParamKeys,
327              mUseHalBufManager, mSupportCameraMute);
328      res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
329      if (res != OK) {
330          SET_ERR_L("Unable to start request queue thread: %s (%d)",
331                  strerror(-res), res);
332          mInterface->close();
333          mRequestThread.clear();
334          return res;
335      }
336     
         ...
384  
385      return OK;
386  }

此时app层就连接上Camera硬件和硬件建立了联系,接着app获取到的deivce对象需要回到1.1小节中的openCamera中继续进行。

1.4 openCameraDeviceUserAsync

回到CameraManager中的openCameraDeviceUserAsync中,1.1小节中的OpenCamera主要是调用CameraService connectDevice获取ICameraDeviceUser的流程,接着会走到android.hardware.camera2.impl.CameraDeviceImpl的setRemoteDevice:

/frameworks/base/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java
153      private final Runnable mCallOnOpened = new Runnable() {
154          @Override
155          public void run() {
156              StateCallbackKK sessionCallback = null;
157              synchronized(mInterfaceLock) {
158                  if (mRemoteDevice == null) return; // Camera already closed
159                  //mSessionStateCallback是拍照时创建session时传进来的callback
160                  sessionCallback = mSessionStateCallback;
161              }
162              if (sessionCallback != null) {
163                  sessionCallback.onOpened(CameraDeviceImpl.this);
164              }
                 //我们在openCamera时传入的callback是mDeviceCallback,最终返回的Camera就是CameraDeviceImpl
165              mDeviceCallback.onOpened(CameraDeviceImpl.this);
166          }
167      };
309      public void setRemoteDevice(ICameraDeviceUser remoteDevice) throws CameraAccessException {
310          synchronized(mInterfaceLock) {
311              // TODO: Move from decorator to direct binder-mediated exceptions
312              // If setRemoteFailure already called, do nothing
313              if (mInError) return;
314  
315              mRemoteDevice = new ICameraDeviceUserWrapper(remoteDevice);
316  
317              IBinder remoteDeviceBinder = remoteDevice.asBinder();
318              // For legacy camera device, remoteDevice is in the same process, and
319              // asBinder returns NULL.
320              if (remoteDeviceBinder != null) {
321                  try {
322                      remoteDeviceBinder.linkToDeath(this, /*flag*/ 0);
323                  } catch (RemoteException e) {
324                      CameraDeviceImpl.this.mDeviceExecutor.execute(mCallOnDisconnected);
325  
326                      throw new CameraAccessException(CameraAccessException.CAMERA_DISCONNECTED,
327                              "The camera device has encountered a serious error");
328                  }
329              }
330              //执行回调
331              mDeviceExecutor.execute(mCallOnOpened);
332              mDeviceExecutor.execute(mCallOnUnconfigured);
333          }
334      }

可以看到setRemoteDevice执行了mCallOnOpened并执行了onOpened回调。

二、startPreview

onOpened回调,最后会回调到CaptureModule的onCameraOpened,在onCameraOpened中会执行oneCamera的startPreview传入一个surface和CaptureReadyCallback开启预览,预览画面最终会被绘制到传入的surface上。详见**Camera模块(一)**的2.4小节。

这里的oneCamera是一个接口,具体传回来的是什么实例还得看Camera2OneCameraOpenerImpl在接收到CameraManager的onOpened的回调后是如何初始化OneCamera实例的如下:

/packages/apps/Camera2/src/com/android/camera/one/v2/Camera2OneCameraOpenerImpl.java
158                  @Override
159                  public void onOpened(CameraDevice device) {
160                      if (isFirstCallback) {
161                          isFirstCallback = false;
162                          try {
163                              CameraCharacteristics characteristics = mCameraManager
164                                      .getCameraCharacteristics(device.getId());
165                              // TODO: Set boolean based on whether HDR+ is
166                              // enabled.
167                              OneCamera oneCamera = OneCameraCreator.create(
168                                      device,
169                                      characteristics,
170                                      mFeatureConfig,
171                                      captureSetting,
172                                      mDisplayMetrics,
173                                      mContext,
174                                      mainThread,
175                                      imageRotationCalculator,
176                                      burstController,
177                                      soundPlayer, fatalErrorHandler);
178  
179                              if (oneCamera != null) {
                                     //这里的openCallback就是CaptureModule在调用 mOneCameraOpener.open传入的回调接口
180                                  openCallback.onCameraOpened(oneCamera);
181                              ...
214      }

这里的重点是OneCamera是如何实例化的:

2.1 OneCamera的实例化

/packages/apps/Camera2/src_pd/com/android/camera/one/v2/OneCameraCreator.java
48      public static OneCamera create(CameraDevice device,...) throws OneCameraAccessException {
60          // TODO: Might want to switch current camera to vendor HDR.
61  
62          CaptureSupportLevel captureSupportLevel = featureConfig
63                  .getCaptureSupportLevel(characteristics);
64          Log.i(TAG, "Camera support level: " + captureSupportLevel.name());
65  
66          OneCameraCharacteristics oneCharacteristics =
67                  new OneCameraCharacteristicsImpl(characteristics);
68  
69          PictureSizeCalculator pictureSizeCalculator =
70                  new PictureSizeCalculator(oneCharacteristics);
71          PictureSizeCalculator.Configuration configuration = null;
72  
73          OneCameraFactory cameraFactory = null;
74          ImageSaver.Builder imageSaverBuilder = null;
75          ImageBackend imageBackend = ProcessingServiceManager.instance().getImageBackend();
76  		//
77          // Depending on the support level of the camera, choose the right
78          // configuration.
79          switch (captureSupportLevel) {
80              case LIMITED_JPEG:
81              case LEGACY_JPEG:
82                  // LIMITED and LEGACY have different picture takers which will
83                  // be selected by the support level that is passes into
84                  // #createOneCamera below - otherwise they use the same OneCamera and image backend.
85                  cameraFactory = new SimpleOneCameraFactory(ImageFormat.JPEG,
86                          featureConfig.getMaxAllowedImageReaderCount(),
87                          imageRotationCalculator);
88                  configuration = pictureSizeCalculator.computeConfiguration(
89                          captureSetting.getCaptureSize(),
90                          ImageFormat.JPEG);
91                  imageSaverBuilder = new JpegImageBackendImageSaver(imageRotationCalculator,
92                          imageBackend, configuration.getPostCaptureCrop());
93                  break;
94              case LIMITED_YUV:
95                  // Same as above, but we're using YUV images.
96                  cameraFactory = new SimpleOneCameraFactory(ImageFormat.YUV_420_888,
97                          featureConfig.getMaxAllowedImageReaderCount(),
98                          imageRotationCalculator);
99                  configuration = pictureSizeCalculator.computeConfiguration(
100                          captureSetting.getCaptureSize(),
101                          ImageFormat.YUV_420_888);
102                  imageSaverBuilder = new YuvImageBackendImageSaver(imageRotationCalculator,
103                          imageBackend,
104                          configuration.getPostCaptureCrop());
105                  break;
106              case ZSL:
107                  // ZSL has its own OneCamera and produces YUV images.
108                  cameraFactory = new ZslOneCameraFactory(ImageFormat.YUV_420_888,
109                          featureConfig.getMaxAllowedImageReaderCount());
110                  configuration = pictureSizeCalculator.computeConfiguration(
111                          captureSetting.getCaptureSize(),
112                          ImageFormat.YUV_420_888);
113                  imageSaverBuilder = new YuvImageBackendImageSaver(imageRotationCalculator,
114                          imageBackend, configuration.getPostCaptureCrop());
115                  break;
116          }
117  
118          Log.i(TAG, "Picture Size Configuration: " + configuration);
119  
120          return cameraFactory.createOneCamera(new AndroidCameraDeviceProxy(device),
121                  new OneCameraCharacteristicsImpl(characteristics),
122                  captureSupportLevel,
123                  mainThread,
124                  configuration.getNativeOutputSize(),
125                  imageSaverBuilder,
126                  captureSetting.getFlashSetting(),
127                  captureSetting.getExposureSetting(),
128                  captureSetting.getHdrSceneSetting(),
129                  burstController,
130                  fatalErrorHandler);
131      }

这里会根据featureConfig的CaptureSupportLevel的类型创建不同的OneCameraFactory,接着调用createOneCamera去初始化一个OneCamera实例,具体的实例化过程经过了如下的过程:(这里以SimpleOneCameraFactory为例)

/packages/apps/Camera2/src/com/android/camera/one/v2/SimpleOneCameraFactory.java
99      @Override
100      public OneCamera createOneCamera(final CameraDeviceProxy device,
101              final OneCameraCharacteristics characteristics,
102              final OneCameraFeatureConfig.CaptureSupportLevel supportLevel,
103              final MainThread mainExecutor,
104              final Size pictureSize,
105              final ImageSaver.Builder imageSaverBuilder,
106              final Observable<OneCamera.PhotoCaptureParameters.Flash> flashSetting,
107              final Observable<Integer> exposureSetting,
108              final Observable<Boolean> hdrSceneSetting,
109              final BurstFacade burstFacade,
110              final FatalErrorHandler fatalErrorHandler) {
111          final Lifetime lifetime = new Lifetime();
112  
113          final ImageReaderProxy imageReader = new CloseWhenDoneImageReader(new LoggingImageReader(
114                  AndroidImageReaderProxy.newInstance(
115                          pictureSize.getWidth(), pictureSize.getHeight(),
116                          mImageFormat, mMaxImageCount),
117                  Loggers.tagFactory()));
118  
119          lifetime.add(imageReader);
120          lifetime.add(device);
121  
122          List<Surface> outputSurfaces = new ArrayList<>();
123          outputSurfaces.add(imageReader.getSurface());
        
    		...
252          return new InitializedOneCameraFactory(lifetime, cameraStarter, device, outputSurfaces,
253                  mainExecutor, new HandlerFactory(), maxZoom, supportedPreviewSizes,
254                  characteristics.getLensFocusRange(), direction)
255                  .provideOneCamera();
				}

createOneCamera返回的是InitializedOneCameraFactory提供的oneCamera。

/packages/apps/Camera2/src/com/android/camera/one/v2/initialization/InitializedOneCameraFactory.java 
71      public InitializedOneCameraFactory(
72              final Lifetime lifetime, final CameraStarter cameraStarter, CameraDeviceProxy device,
73              List<Surface> outputSurfaces, MainThread mainThreadExecutor,
74              HandlerFactory handlerFactory, float maxZoom, List<Size> supportedPreviewSizes,
75              LinearScale lensRange, OneCamera.Facing direction) {
76          // Assembles and returns a OneCamera based on the CameraStarter.
77  
             ....
119  
120          // The following handles the initialization sequence in which we receive
121          // various dependencies at different times in the following sequence:
122          // 1. CameraDevice
123          // 2. The Surface on which to render the preview stream
124          // 3. The CaptureSession
125          // When all three of these are available, the {@link #CameraFactory} can
126          // be used to assemble the actual camera functionality (e.g. to take
127          // pictures, and run AF scans).
128  
129          // Note that these must be created in reverse-order to when they are run
130          // because each stage depends on the previous one.
131          final CaptureSessionCreator captureSessionCreator = new CaptureSessionCreator(device,
132                  cameraHandler);
133  
134          PreviewStarter mPreviewStarter = new PreviewStarter(outputSurfaces,
135                  captureSessionCreator,
136                  new PreviewStarter.CameraCaptureSessionCreatedListener() {
137                      @Override
138                      public void onCameraCaptureSessionCreated(CameraCaptureSessionProxy session,
139                              Surface previewSurface) {
140                          CameraStarter.CameraControls controls = cameraStarter.startCamera(
141                                  new Lifetime(lifetime),
142                                  session, previewSurface,
143                                  zoomState, metadataCallback, readyState);
144                          mPictureTaker.set(controls.getPictureTaker());
145                          mManualAutoFocus.set(controls.getManualAutoFocus());
146                      }
147                  });
148  
149          PreviewSizeSelector previewSizeSelector =
150                new Camera2PreviewSizeSelector(supportedPreviewSizes);
151  
152          mOneCamera = new GenericOneCameraImpl(lifetime, pictureTaker, manualAutoFocus, lensRange,
153                  mainThreadExecutor, afStateListenable, focusStateListenable, readyStateListenable,
154                  maxZoom, zoomState, direction, previewSizeSelector, mPreviewStarter);
155      } 
157      public OneCamera provideOneCamera() {
158          return mOneCamera;
159      }

所以我们最后获取到的oneCamera就是GenericOneCameraImpl实例对象。

2.2 oneCamera.startPreview

在调用startPreview时最终也会调用到GenericOneCameraImpl的startPreview

/packages/apps/Camera2/src/com/android/camera/one/v2/initialization/GenericOneCameraImpl.java
150      @Override
151      public void startPreview(Surface surface, final CaptureReadyCallback listener) {
             //这里的mPreviewStarter就是在InitializedOneCameraFactory初始化时创建的。
152          ListenableFuture<Void> result = mPreviewStarter.startPreview(surface);
153          Futures.addCallback(result, new FutureCallback<Void>() {
154              @Override
155              public void onSuccess(@Nonnull Void aVoid) {
156                  listener.onReadyForCapture();
157              }
158  
159              @Override
160              public void onFailure(@Nonnull Throwable throwable) {
161                  listener.onSetupFailed();
162              }
163          }, MoreExecutors.directExecutor());
164      }

GenericOneCameraImpl的startPreview调用的是PreviewStarter的startPreview并把surface传入用来预览。

/packages/apps/Camera2/src/com/android/camera/one/v2/initialization/PreviewStarter.java
66      public ListenableFuture<Void> startPreview(final Surface surface) {
67          // When we have the preview surface, start the capture session.
68          List<Surface> surfaceList = new ArrayList<>();
69  
70          // Workaround of the face detection failure on Nexus 5 and L. (b/21039466)
71          // Need to create a capture session with the single preview stream first
72          // to lock it as the first stream. Then resend the another session with preview
73          // and JPEG stream.
            //1.由于我们目标设备是Android12 走else分支
74          if (ApiHelper.isLorLMr1() && ApiHelper.IS_NEXUS_5) {
75              surfaceList.add(surface);
76              mCaptureSessionCreator.createCaptureSession(surfaceList);
77              surfaceList.addAll(mOutputSurfaces);
78          } else {
                //这里的mOutputSurfaces里面只保存了imageReader的surface用于拍照
79              surfaceList.addAll(mOutputSurfaces);
80              surfaceList.add(surface);
81          }
82          //mCaptureSessionCreator也是在InitializedOneCameraFactory中初始化并传给PreviewStarter的
83          final ListenableFuture<CameraCaptureSessionProxy> sessionFuture =
84                  mCaptureSessionCreator.createCaptureSession(surfaceList);
85  
86          return Futures.transformAsync(sessionFuture,
87                  new AsyncFunction<CameraCaptureSessionProxy, Void>() {
88                      @Override
89                      public ListenableFuture<Void> apply(
90                              CameraCaptureSessionProxy captureSession) throws Exception {
91                          mSessionListener.onCameraCaptureSessionCreated(captureSession, surface);
92                          return Futures.immediateFuture(null);
93                      }
94                  }, MoreExecutors.directExecutor());
95      }

最终startPreview调用的是CaptureSessionCreator.createCaptureSession来开启预览,传入的surface被包含在了surfaceList中。

2.3 createCaptureSession

/packages/apps/Camera2/src/com/android/camera/one/v2/initialization/CaptureSessionCreator.java
54      public ListenableFuture<CameraCaptureSessionProxy> createCaptureSession(
55              List<Surface> surfaces) {
56          final SettableFuture<CameraCaptureSessionProxy> sessionFuture = SettableFuture.create();
57          try {
                //这里的mDevice是4.4.1小节中调用createOneCamera时传入的AndroidCameraDeviceProxy,最终调用到了CameraDevice的createCaptureSession
58              mDevice.createCaptureSession(surfaces, new CameraCaptureSessionProxy.StateCallback() {
59                  @Override
60                  public void onActive(CameraCaptureSessionProxy session) {
61                      // Ignore.
62                  }
63  
64                  @Override
65                  public void onConfigureFailed(CameraCaptureSessionProxy session) {
66                      sessionFuture.cancel(true);
67                      session.close();
68                  }
69  
70                  @Override
71                  public void onConfigured(CameraCaptureSessionProxy session) {
72                      boolean valueSet = sessionFuture.set(session);
73                      if (!valueSet) {
74                          // If the future was already marked with cancellation or
75                          // an exception, close the session.
76                          session.close();
77                      }
78                  }
79  
80                  @Override
81                  public void onReady(CameraCaptureSessionProxy session) {
82                      // Ignore.
83                  }
84  
85                  @Override
86                  public void onClosed(CameraCaptureSessionProxy session) {
87                      sessionFuture.cancel(true);
88                      session.close();
89                  }
90              }, mCameraHandler);
91          } catch (CameraAccessException e) {
92              sessionFuture.setException(e);
93          }
94          return sessionFuture;
95      }

这里的mDevice是2.1小节中调用createOneCamera时传入的AndroidCameraDeviceProxy,最终调用到了CameraDevice的createCaptureSession而在1.3小节,我们知道返回给app的CameraDevice就是CameraDeviceImpl,所以最后会调用到CameraDeviceImpl的createCaptureSession。

/frameworks/base/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java
545      public void createCaptureSession(List<Surface> outputs,
546              CameraCaptureSession.StateCallback callback, Handler handler)
547              throws CameraAccessException {
548          List<OutputConfiguration> outConfigurations = new ArrayList<>(outputs.size());
549          for (Surface surface : outputs) {
                 //将传入的surface都包装成OutputConfiguration
550              outConfigurations.add(new OutputConfiguration(surface));
551          }
552          createCaptureSessionInternal(null, outConfigurations, callback,
553                  checkAndWrapHandler(handler), /*operatingMode*/ICameraDeviceUser.NORMAL_MODE,
554                  /*sessionParams*/ null);
555      }
674      private void createCaptureSessionInternal(InputConfiguration inputConfig,
675              List<OutputConfiguration> outputConfigurations,
676              CameraCaptureSession.StateCallback callback, Executor executor,
677              int operatingMode, CaptureRequest sessionParams) throws CameraAccessException {
678          long createSessionStartTime = SystemClock.uptimeMillis();
679          synchronized(mInterfaceLock) {
680              if (DEBUG) {
681                  Log.d(TAG, "createCaptureSessionInternal");
682              }
683  
684              checkIfCameraClosedOrInError();
685              //因为传进来的operatingMode为ICameraDeviceUser.NORMAL_MODE,所以isConstrainedHighSpeed为false
686              boolean isConstrainedHighSpeed =
687                      (operatingMode == ICameraDeviceUser.CONSTRAINED_HIGH_SPEED_MODE);
                 //这里由于inputConfig传进来的为null所以不会抛出异常 
688              if (isConstrainedHighSpeed && inputConfig != null) {
689                  throw new IllegalArgumentException("Constrained high speed session doesn't support"
690                          + " input configuration yet.");
691              }
692  
                 ....
708  
709              // TODO: dont block for this
710              boolean configureSuccess = true;
711              CameraAccessException pendingException = null;
712              Surface input = null;
713              try {
714                  // configure streams and then block until IDLE 
                     //configureStreamsChecked会一直block直到完成 其中outputConfigurations中包含我们的surface
715                  configureSuccess = configureStreamsChecked(inputConfig, outputConfigurations,
716                          operatingMode, sessionParams, createSessionStartTime);
717                  if (configureSuccess == true && inputConfig != null) {
718                      input = mRemoteDevice.getInputSurface();
719                  }
720              } catch (CameraAccessException e) {
721                  configureSuccess = false;
722                  pendingException = e;
723                  input = null;
724                  if (DEBUG) {
725                      Log.v(TAG, "createCaptureSession - failed with exception ", e);
726                  }
727              }
728              //如果configureStreamsChecked成功,那么将会回调onConfigured 否则会回调onConfigureFailed
729              // Fire onConfigured if configureOutputs succeeded, fire onConfigureFailed otherwise.
730              CameraCaptureSessionCore newSession = null;
    			//isConstrainedHighSpeed为false所以走else
731              if (isConstrainedHighSpeed) {
732                 ...
743              } else {
744                  newSession = new CameraCaptureSessionImpl(mNextSessionId++, input,
745                          callback, executor, this, mDeviceExecutor, configureSuccess);
746              }
747  
748              // TODO: wait until current session closes, then create the new session
749              mCurrentSession = newSession;
750  
751              if (pendingException != null) {
752                  throw pendingException;
753              }
754  
755              mSessionStateCallback = mCurrentSession.getDeviceStateCallback();
756          }
757      }

在createCaptureSessionInternal中主要就是将Surface包装成outputConfigurations然后调用configureStreamsChecked将surface给CameraServer,

这一步会阻塞直到调用完成,如果成功,那么会回调到onConfigured否则会回调到onConfigureFailed。同时会新建一个CameraCaptureSessionImpl保存在mCurrentSession中

2.4 configureStreamsChecked

/frameworks/base/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java
424      public boolean configureStreamsChecked(InputConfiguration inputConfig,
425              List<OutputConfiguration> outputs, int operatingMode, CaptureRequest sessionParams,
426              long createSessionStartTime)
427                      throws CameraAccessException {
428          // Treat a null input the same an empty list
             //这里outputs包含我们传入的surface,operatingMode为ICameraDeviceUser.NORMAL_MODE ,inputConfig还是null 
429          if (outputs == null) {
430              outputs = new ArrayList<OutputConfiguration>();
431          }
432          if (outputs.size() == 0 && inputConfig != null) {
433              throw new IllegalArgumentException("cannot configure an input stream without " +
434                      "any output streams");
435          }
436  
437          checkInputConfiguration(inputConfig);
438  
439          boolean success = false;
440  
441          synchronized(mInterfaceLock) {
442              checkIfCameraClosedOrInError();
443              // Streams to create
444              HashSet<OutputConfiguration> addSet = new HashSet<OutputConfiguration>(outputs);
445              // Streams to delete 一些延期的configure和一些当前输入不包含在内的configure 
446              List<Integer> deleteList = new ArrayList<Integer>();
447  
448              // Determine which streams need to be created, which to be deleted
                 //mConfiguredOutputs中保存的是之前已经创建过的stream
449              for (int i = 0; i < mConfiguredOutputs.size(); ++i) {                    
450                  int streamId = mConfiguredOutputs.keyAt(i);
451                  OutputConfiguration outConfig = mConfiguredOutputs.valueAt(i);
452                  //如果之前创建过的stream不在当前传进来的surface中那么就需要删除这个stream否则就不用再创建了
453                  if (!outputs.contains(outConfig) || outConfig.isDeferredConfiguration()) {
454                      // Always delete the deferred output configuration when the session
455                      // is created, as the deferred output configuration doesn't have unique surface
456                      // related identifies.
457                      deleteList.add(streamId);
458                  } else {
459                      addSet.remove(outConfig);  // Don't create a stream previously created
460                  }
461              }
462  
463              mDeviceExecutor.execute(mCallOnBusy);
464              stopRepeating();
465  
466              try {
467                  waitUntilIdle();
468                  //1.开启真正的configrestrem的流程 未实现CameraDeviceClient直接返回OK
469                  mRemoteDevice.beginConfigure();
470                   //如果输入的配置改变了,那么需要重新配置input stream
471                  // reconfigure the input stream if the input configuration is different.
472                  InputConfiguration currentInputConfig = mConfiguredInput.getValue();
473                  if (inputConfig != currentInputConfig &&
474                          (inputConfig == null || !inputConfig.equals(currentInputConfig))) {
475                      if (currentInputConfig != null) {
476                          mRemoteDevice.deleteStream(mConfiguredInput.getKey());
477                          mConfiguredInput = new SimpleEntry<Integer, InputConfiguration>(
478                                  REQUEST_ID_NONE, null);
479                      }
480                      if (inputConfig != null) {
481                          int streamId = mRemoteDevice.createInputStream(inputConfig.getWidth(),
482                                  inputConfig.getHeight(), inputConfig.getFormat(),
483                                  inputConfig.isMultiResolution());
484                          mConfiguredInput = new SimpleEntry<Integer, InputConfiguration>(
485                                  streamId, inputConfig);
486                      }
487                  }
488                  //删除一些需要删除的stream 可以释放hw的资源
489                  // Delete all streams first (to free up HW resources)
490                  for (Integer streamId : deleteList) {
491                      mRemoteDevice.deleteStream(streamId);
492                      mConfiguredOutputs.delete(streamId);
493                  }
494  									
495                  // Add all new streams
496                  for (OutputConfiguration outConfig : outputs) {
497                      if (addSet.contains(outConfig)) {
                             //2.创建我们传入的surface对应的stream
498                          int streamId = mRemoteDevice.createStream(outConfig);
499                          mConfiguredOutputs.put(streamId, outConfig);
500                      }
501                  }
502  
503                  int offlineStreamIds[];
504                  if (sessionParams != null) {
                           //3.结束configure
505                      offlineStreamIds = mRemoteDevice.endConfigure(operatingMode,
506                              sessionParams.getNativeCopy(), createSessionStartTime);
507                  } else {
508                      offlineStreamIds = mRemoteDevice.endConfigure(operatingMode, null,
509                              createSessionStartTime);
510                  }
511  
512                  mOfflineSupport.clear();
513                  if ((offlineStreamIds != null) && (offlineStreamIds.length > 0)) {
514                      for (int offlineStreamId : offlineStreamIds) {
515                          mOfflineSupport.add(offlineStreamId);
516                      }
517                  }
518  
519                  success = true;
520              } catch (IllegalArgumentException e) {
521                  // OK. camera service can reject stream config if it's not supported by HAL
522                  // This is only the result of a programmer misusing the camera2 api.
523                  Log.w(TAG, "Stream configuration failed due to: " + e.getMessage());
524                  return false;
525              } catch (CameraAccessException e) {
526                  if (e.getReason() == CameraAccessException.CAMERA_IN_USE) {
527                      throw new IllegalStateException("The camera is currently busy." +
528                              " You must wait until the previous operation completes.", e);
529                  }
530                  throw e;
531              } finally {
532                  if (success && outputs.size() > 0) {
533                      mDeviceExecutor.execute(mCallOnIdle);
534                  } else {
535                      // Always return to the 'unconfigured' state if we didn't hit a fatal error
536                      mDeviceExecutor.execute(mCallOnUnconfigured);
537                  }
538              }
539          }
540  
541          return success;
542      }

configureStreamsChecked主要是配置stream,决定那些stream需要创建,那些需要删除,创建和删除都是调用mRemoteDevice的API来完成的,outputs中包含以我们传入的surface创建的OutputConfiguration。接着我们需要知道mRemoteDevice是如何产生的。在2.3小节,我们知道mRemoteDevice是由传入的ICameraDeviceUser创建出来的ICameraDeviceUserWrapper,而ICameraDeviceUser则是Camera App进程和cameraserver 进程进行通信的接口。这里我们主要关注创建stream的流程。ICameraDeviceUserWrapper的createstream是通过传入的ICameraDeviceUser调用到cameraserver的createstream。那么最终会调用到cameraserver的那个createstream呢?我们在2.1小节知道openCamera在调用cameraservice的connectDevice时返回的ICameraDeviceUser就是我们要找的mRemoteDevice,最终返回的是CameraDeviceClient,所以createstream最终调用到了CameraDeviceClient的createstream

2.5 CameraDeviceClient.createstream

从这边开始进入camera server进程

/frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
778  binder::Status CameraDeviceClient::createStream(
779          const hardware::camera2::params::OutputConfiguration &outputConfiguration,
780          /*out*/
781          int32_t* newStreamId) {
782      ATRACE_CALL();
783  
784      binder::Status res;
785      if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
786  
787      Mutex::Autolock icl(mBinderSerializationLock);
788      //从outputConfiguration其实就是surface中取出GraphicBufferProducer
789      const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
790              outputConfiguration.getGraphicBufferProducers();
         //numBufferProducers为1因为只有一个surface
791      size_t numBufferProducers = bufferProducers.size();
         //deferredConsumer 为false
792      bool deferredConsumer = outputConfiguration.isDeferred();
793      bool isShared = outputConfiguration.isShared();
794      String8 physicalCameraId = String8(outputConfiguration.getPhysicalCameraId());
795      bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
796      bool isMultiResolution = outputConfiguration.isMultiResolution();
797      //检查Surface的类型 surface的type是SURFACE_TYPE_SURFACE_TEXTURE
798      res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
799              outputConfiguration.getSurfaceType());
800      if (!res.isOk()) {
801          return res;
802      }
803  
804      if (!mDevice.get()) {
805          return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
806      }
807      res = SessionConfigurationUtils::checkPhysicalCameraId(mPhysicalCameraIds,
808              physicalCameraId, mCameraIdStr);
809      if (!res.isOk()) {
810          return res;
811      }
812  
813      std::vector<sp<Surface>> surfaces;
814      std::vector<sp<IBinder>> binders;
815      status_t err;
816  
817      // Create stream for deferred surface case.
818      if (deferredConsumerOnly) {
819          return createDeferredSurfaceStreamLocked(outputConfiguration, isShared, newStreamId);
820      }
821  
822      OutputStreamInfo streamInfo;
823      bool isStreamInfoValid = false;
824      const std::vector<int32_t> &sensorPixelModesUsed =
825              outputConfiguration.getSensorPixelModesUsed();
826      for (auto& bufferProducer : bufferProducers) {
827          // Don't create multiple streams for the same target surface
828          sp<IBinder> binder = IInterface::asBinder(bufferProducer);
829          ssize_t index = mStreamMap.indexOfKey(binder);
830          if (index != NAME_NOT_FOUND) {
831              String8 msg = String8::format("Camera %s: Surface already has a stream created for it "
832                      "(ID %zd)", mCameraIdStr.string(), index);
833              ALOGW("%s: %s", __FUNCTION__, msg.string());
834              return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.string());
835          }
836          //从outputConfiguration创建surface
837          sp<Surface> surface;
838          res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
839                  isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
840                  mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed);
841  
842          if (!res.isOk())
843              return res;
844  
845          if (!isStreamInfoValid) {
846              isStreamInfoValid = true;
847          }
848  
849          binders.push_back(IInterface::asBinder(bufferProducer));
850          surfaces.push_back(surface);
851      }
852  
853      // If mOverrideForPerfClass is true, do not fail createStream() for small
854      // JPEG sizes because existing createSurfaceFromGbp() logic will find the
855      // closest possible supported size.
856  
857      int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
858      std::vector<int> surfaceIds;
859      bool isDepthCompositeStream =
860              camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
861      bool isHeicCompisiteStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
862      if (isDepthCompositeStream || isHeicCompisiteStream) {
    		 //创建特殊的stream 这里讨论一般的情况
863           ...
879      } else {
             //创建一般的stream 这里的surfaces保存着我们传进来的surface的GraphicBufferProducer
880          err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
881                  streamInfo.height, streamInfo.format, streamInfo.dataSpace,
882                  static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
883                  &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
884                  outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
885      }
886      ...
924  
925      return res;
926  }

接着会调用mDevice->createStream来创建stream,这里的mDevice在4.2小节提到过,CameraDeviceClient继承自Camera2ClientBase在CameraDeviceClient新建时Camera2ClientBase的构造函数也会新建一个Camera3Device,而mDevice就是Camera3Device对象。所以接着会调用到Camera3Device的createStream。

/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
1388  status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
1389          bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
1390          android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
1391          const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
1392          std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
1393          uint64_t consumerUsage) {
1394      ATRACE_CALL();
1395  
1396      Mutex::Autolock il(mInterfaceLock);
1397      nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
1398      Mutex::Autolock l(mLock);
1399      ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
1400              " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d",
1401              mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
1402              consumerUsage, isShared, physicalCameraId.string(), isMultiResolution);
1403  
1404      status_t res;
1405      bool wasActive = false;
1406      //检查当前Camer的状态 可能直接返回 有效的状态时STATUS_UNCONFIGURED,STATUS_CONFIGURED,STATUS_ACTIVE
1407      switch (mStatus) {
1408          case STATUS_ERROR:
1409              CLOGE("Device has encountered a serious error");
1410              return INVALID_OPERATION;
1411          case STATUS_UNINITIALIZED:
1412              CLOGE("Device not initialized");
1413              return INVALID_OPERATION;
1414          case STATUS_UNCONFIGURED:
1415          case STATUS_CONFIGURED:
1416              // OK
1417              break;
1418          case STATUS_ACTIVE:
1419              ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
1420              res = internalPauseAndWaitLocked(maxExpectedDuration);
1421              if (res != OK) {
1422                  SET_ERR_L("Can't pause captures to reconfigure streams!");
1423                  return res;
1424              }
1425              wasActive = true;
1426              break;
1427          default:
1428              SET_ERR_L("Unexpected status: %d", mStatus);
1429              return INVALID_OPERATION;
1430      }
1431      assert(mStatus != STATUS_ACTIVE);
1432  
1433      sp<Camera3OutputStream> newStream;
1434  
1435      if (consumers.size() == 0 && !hasDeferredConsumer) {
1436          ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
1437          return BAD_VALUE;
1438      }
1439  
1440      if (hasDeferredConsumer && format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
1441          ALOGE("Deferred consumer stream creation only support IMPLEMENTATION_DEFINED format");
1442          return BAD_VALUE;
1443      }
1444  
1445      if (isRawFormat(format) && sensorPixelModesUsed.size() > 1) {
1446          // We can't use one stream with a raw format in both sensor pixel modes since its going to
1447          // be found in only one sensor pixel mode.
1448          ALOGE("%s: RAW opaque stream cannot be used with > 1 sensor pixel modes", __FUNCTION__);
1449          return BAD_VALUE;
1450      }
         //分条件创建Camera3OutputStream
1451      if (format == HAL_PIXEL_FORMAT_BLOB) {
1452          ssize_t blobBufferSize;
1453          if (dataSpace == HAL_DATASPACE_DEPTH) {
1454              blobBufferSize = getPointCloudBufferSize();
1455              if (blobBufferSize <= 0) {
1456                  SET_ERR_L("Invalid point cloud buffer size %zd", blobBufferSize);
1457                  return BAD_VALUE;
1458              }
1459          } else if (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS)) {
1460              blobBufferSize = width * height;
1461          } else {
1462              blobBufferSize = getJpegBufferSize(width, height);
1463              if (blobBufferSize <= 0) {
1464                  SET_ERR_L("Invalid jpeg buffer size %zd", blobBufferSize);
1465                  return BAD_VALUE;
1466              }
1467          }
1468          newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
1469                  width, height, blobBufferSize, format, dataSpace, rotation,
1470                  mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
1471                  isMultiResolution);
1472      } else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
1473          bool maxResolution =
1474                  sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
1475                          sensorPixelModesUsed.end();
1476          ssize_t rawOpaqueBufferSize = getRawOpaqueBufferSize(width, height, maxResolution);
1477          if (rawOpaqueBufferSize <= 0) {
1478              SET_ERR_L("Invalid RAW opaque buffer size %zd", rawOpaqueBufferSize);
1479              return BAD_VALUE;
1480          }
1481          newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
1482                  width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
1483                  mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
1484                  isMultiResolution);
1485      } else if (isShared) {
1486          newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
1487                  width, height, format, consumerUsage, dataSpace, rotation,
1488                  mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
1489                  mUseHalBufManager);
1490      } else if (consumers.size() == 0 && hasDeferredConsumer) {
1491          newStream = new Camera3OutputStream(mNextStreamId,
1492                  width, height, format, consumerUsage, dataSpace, rotation,
1493                  mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
1494                  isMultiResolution);
1495      } else {
1496          newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
1497                  width, height, format, dataSpace, rotation,
1498                  mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
1499                  isMultiResolution);
1500      }
1501  
1502      size_t consumerCount = consumers.size();
1503      for (size_t i = 0; i < consumerCount; i++) {
1504          int id = newStream->getSurfaceId(consumers[i]);
1505          if (id < 0) {
1506              SET_ERR_L("Invalid surface id");
1507              return BAD_VALUE;
1508          }
1509          if (surfaceIds != nullptr) {
1510              surfaceIds->push_back(id);
1511          }
1512      }
1513  
1514      newStream->setStatusTracker(mStatusTracker);
1515  
1516      newStream->setBufferManager(mBufferManager);
1517  
1518      newStream->setImageDumpMask(mImageDumpMask);
1519      //将新创建的Stream加入到mOutputStreams中
1520      res = mOutputStreams.add(mNextStreamId, newStream);
1521      if (res < 0) {
1522          SET_ERR_L("Can't add new stream to set: %s (%d)", strerror(-res), res);
1523          return res;
1524      }
1525  
1526      mSessionStatsBuilder.addStream(mNextStreamId);
1527      
1528      *id = mNextStreamId++;
1529      mNeedConfig = true;
1530  
1531      // Continue captures if active at start
1532      if (wasActive) {
1533          ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__);
1534          // Reuse current operating mode and session parameters for new stream config
1535          res = configureStreamsLocked(mOperatingMode, mSessionParams);
1536          if (res != OK) {
1537              CLOGE("Can't reconfigure device for new stream %d: %s (%d)",
1538                      mNextStreamId, strerror(-res), res);
1539              return res;
1540          }
1541          internalResumeLocked();
1542      }
1543      ALOGV("Camera %s: Created new stream", mId.string());
1544      return OK;
1545  }

到这里创建stream的流程就完成了,最终时创建了一个Camera3OutputStream,接下来就是设置stream的流程了,设置stream的流程从mRemoteDevice.endConfigure开始,而mRemoteDevice则是CameraDeviceClient的代理对象,最终通过binder跨进程调用到camera server进程中。

2.6 CameraDeviceClient.endConfigure

/frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
520  binder::Status CameraDeviceClient::endConfigure(int operatingMode,
521          const hardware::camera2::impl::CameraMetadataNative& sessionParams, int64_t startTimeMs,
522          std::vector<int>* offlineStreamIds /*out*/) {
523      ATRACE_CALL();
524      ALOGV("%s: ending configure (%d input stream, %zu output surfaces)",
525              __FUNCTION__, mInputStream.configured ? 1 : 0,
526              mStreamMap.size());
		 ...
549      status_t err = mDevice->configureStreams(sessionParams, operatingMode);
         ...
603  
604      return res;
605  }

接着调用Camera3Device.configureStreams:

/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
1672  status_t Camera3Device::configureStreams(const CameraMetadata& sessionParams, int operatingMode) {
1673      ATRACE_CALL();
1674      ALOGV("%s: E", __FUNCTION__);
1675  
1676      Mutex::Autolock il(mInterfaceLock);
1677      Mutex::Autolock l(mLock);
1678  
1679      // In case the client doesn't include any session parameter, try a
1680      // speculative configuration using the values from the last cached
1681      // default request.
          //如果没有sessionParams那么就用上次缓存的sessionParams
1682      if (sessionParams.isEmpty() &&
1683              ((mLastTemplateId > 0) && (mLastTemplateId < CAMERA_TEMPLATE_COUNT)) &&
1684              (!mRequestTemplateCache[mLastTemplateId].isEmpty())) {
1685          ALOGV("%s: Speculative session param configuration with template id: %d", __func__,
1686                  mLastTemplateId);
1687          return filterParamsAndConfigureLocked(mRequestTemplateCache[mLastTemplateId],
1688                  operatingMode);
1689      }
1690  
1691      return filterParamsAndConfigureLocked(sessionParams, operatingMode);
1692  }
      
1694  status_t Camera3Device::filterParamsAndConfigureLocked(const CameraMetadata& sessionParams,
1695          int operatingMode) {
1696      //过滤掉即将到来的session parameters
1697      const CameraMetadata params(sessionParams);
1698      camera_metadata_entry_t availableSessionKeys = mDeviceInfo.find(
1699              ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
1700      CameraMetadata filteredParams(availableSessionKeys.count);
1701      camera_metadata_t *meta = const_cast<camera_metadata_t *>(
1702              filteredParams.getAndLock());
1703      set_camera_metadata_vendor_id(meta, mVendorTagId);
1704      filteredParams.unlock(meta);
1705      if (availableSessionKeys.count > 0) {
1706          for (size_t i = 0; i < availableSessionKeys.count; i++) {
1707              camera_metadata_ro_entry entry = params.find(
1708                      availableSessionKeys.data.i32[i]);
1709              if (entry.count > 0) {
1710                  filteredParams.update(entry);
1711              }
1712          }
1713      }
1714  
1715      return configureStreamsLocked(operatingMode, filteredParams);
1716  }

2590  status_t Camera3Device::configureStreamsLocked(int operatingMode,
2591          const CameraMetadata& sessionParams, bool notifyRequestThread) {
          ...
2650  
2651      // Start configuring the streams
2652      ALOGV("%s: Camera %s: Starting stream configuration", __FUNCTION__, mId.string());
2653  
2654      mPreparerThread->pause();
2655  	  //新建camera_stream_configuration对象
2656      camera_stream_configuration config;
2657      config.operation_mode = mOperatingMode;
2658      config.num_streams = (mInputStream != NULL) + mOutputStreams.size();
2659      config.input_is_multi_resolution = false;
2660  
2661      Vector<camera3::camera_stream_t*> streams;
2662      streams.setCapacity(config.num_streams);
2663      std::vector<uint32_t> bufferSizes(config.num_streams, 0);
2664  
2665  
          ...
2678  
2679      mGroupIdPhysicalCameraMap.clear();
2680      for (size_t i = 0; i < mOutputStreams.size(); i++) {
2681  
2682          // Don't configure bidi streams twice, nor add them twice to the list
2683          if (mOutputStreams[i].get() ==
2684              static_cast<Camera3StreamInterface*>(mInputStream.get())) {
2685  
2686              config.num_streams--;
2687              continue;
2688          }
2689  
2690          camera3::camera_stream_t *outputStream;
              //对于每一个在2.5小节创建的stream调用startConfiguration
2691          outputStream = mOutputStreams[i]->startConfiguration();
2692          if (outputStream == NULL) {
2693              CLOGE("Can't start output stream configuration");
2694              cancelStreamsConfigurationLocked();
2695              return INVALID_OPERATION;
2696          }
              //将返回的outputStream放入到streams
2697          streams.add(outputStream);
2698  
2699          if (outputStream->format == HAL_PIXEL_FORMAT_BLOB) {
2700              size_t k = i + ((mInputStream != nullptr) ? 1 : 0); // Input stream if present should
2701                                                                  // always occupy the initial entry.
2702              if (outputStream->data_space == HAL_DATASPACE_V0_JFIF) {
2703                  bufferSizes[k] = static_cast<uint32_t>(
2704                          getJpegBufferSize(outputStream->width, outputStream->height));
2705              } else if (outputStream->data_space ==
2706                      static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS)) {
2707                  bufferSizes[k] = outputStream->width * outputStream->height;
2708              } else {
2709                  ALOGW("%s: Blob dataSpace %d not supported",
2710                          __FUNCTION__, outputStream->data_space);
2711              }
2712          }
2713  
2714          if (mOutputStreams[i]->isMultiResolution()) {
2715              int32_t streamGroupId = mOutputStreams[i]->getHalStreamGroupId();
2716              const String8& physicalCameraId = mOutputStreams[i]->getPhysicalCameraId();
2717              mGroupIdPhysicalCameraMap[streamGroupId].insert(physicalCameraId);
2718          }
2719      }
2720  	  //将streams赋给 config.streams
2721      config.streams = streams.editArray();
2722  
2723      // Do the HAL configuration; will potentially touch stream
2724      // max_buffers, usage, and priv fields, as well as data_space and format
2725      // fields for IMPLEMENTATION_DEFINED formats.
2726  
2727      const camera_metadata_t *sessionBuffer = sessionParams.getAndLock();
          //调用hal层的configureStreams
2728      res = mInterface->configureStreams(sessionBuffer, &config, bufferSizes);
2729      sessionParams.unlock(sessionBuffer);
2730  
2731      if (res == BAD_VALUE) {
2732          // HAL rejected this set of streams as unsupported, clean up config
2733          // attempt and return to unconfigured state
2734          CLOGE("Set of requested inputs/outputs not supported by HAL");
2735          cancelStreamsConfigurationLocked();
2736          return BAD_VALUE;
2737      } else if (res != OK) {
2738          // Some other kind of error from configure_streams - this is not
2739          // expected
2740          SET_ERR_L("Unable to configure streams with HAL: %s (%d)",
2741                  strerror(-res), res);
2742          return res;
2743      }
2744  
2745      // Finish all stream configuration immediately.
2746      // TODO: Try to relax this later back to lazy completion, which should be
2747      // faster
2748  
2749      if (mInputStream != NULL && mInputStream->isConfiguring()) {
2750          bool streamReConfigured = false;
2751          res = mInputStream->finishConfiguration(&streamReConfigured);
2752          if (res != OK) {
2753              CLOGE("Can't finish configuring input stream %d: %s (%d)",
2754                      mInputStream->getId(), strerror(-res), res);
2755              cancelStreamsConfigurationLocked();
2756              if ((res == NO_INIT || res == DEAD_OBJECT) && mInputStream->isAbandoned()) {
2757                  return DEAD_OBJECT;
2758              }
2759              return BAD_VALUE;
2760          }
2761          if (streamReConfigured) {
2762              mInterface->onStreamReConfigured(mInputStream->getId());
2763          }
2764      }
2765  
2766      for (size_t i = 0; i < mOutputStreams.size(); i++) {
2767          sp<Camera3OutputStreamInterface> outputStream = mOutputStreams[i];
2768          if (outputStream->isConfiguring() && !outputStream->isConsumerConfigurationDeferred()) {
2769              bool streamReConfigured = false;
2770              res = outputStream->finishConfiguration(&streamReConfigured);
2771              if (res != OK) {
2772                  CLOGE("Can't finish configuring output stream %d: %s (%d)",
2773                          outputStream->getId(), strerror(-res), res);
2774                  cancelStreamsConfigurationLocked();
2775                  if ((res == NO_INIT || res == DEAD_OBJECT) && outputStream->isAbandoned()) {
2776                      return DEAD_OBJECT;
2777                  }
2778                  return BAD_VALUE;
2779              }
2780              if (streamReConfigured) {
2781                  mInterface->onStreamReConfigured(outputStream->getId());
2782              }
2783          }
2784      }
2785  
2786      // Request thread needs to know to avoid using repeat-last-settings protocol
2787      // across configure_streams() calls
2788      if (notifyRequestThread) {
2789          mRequestThread->configurationComplete(mIsConstrainedHighSpeedConfiguration,
2790                  sessionParams, mGroupIdPhysicalCameraMap);
2791      }
2792  
2793      char value[PROPERTY_VALUE_MAX];
2794      property_get("camera.fifo.disable", value, "0");
2795      int32_t disableFifo = atoi(value);
2796      if (disableFifo != 1) {
2797          // Boost priority of request thread to SCHED_FIFO.
2798          pid_t requestThreadTid = mRequestThread->getTid();
2799          res = requestPriority(getpid(), requestThreadTid,
2800                  kRequestThreadPriority, /*isForApp*/ false, /*asynchronous*/ false);
2801          if (res != OK) {
2802              ALOGW("Can't set realtime priority for request processing thread: %s (%d)",
2803                      strerror(-res), res);
2804          } else {
2805              ALOGD("Set real time priority for request queue thread (tid %d)", requestThreadTid);
2806          }
2807      }
2808  
2809      // Update device state
2810      const camera_metadata_t *newSessionParams = sessionParams.getAndLock();
2811      const camera_metadata_t *currentSessionParams = mSessionParams.getAndLock();
2812      bool updateSessionParams = (newSessionParams != currentSessionParams) ? true : false;
2813      sessionParams.unlock(newSessionParams);
2814      mSessionParams.unlock(currentSessionParams);
2815      if (updateSessionParams)  {
2816          mSessionParams = sessionParams;
2817      }
2818  
2819      mNeedConfig = false;
2820  
2821      internalUpdateStatusLocked((mFakeStreamId == NO_STREAM) ?
2822              STATUS_CONFIGURED : STATUS_UNCONFIGURED);
2823  
2824      ALOGV("%s: Camera %s: Stream configuration complete", __FUNCTION__, mId.string());
2825  
2826      // tear down the deleted streams after configure streams.
2827      mDeletedStreams.clear();
2828  
2829      auto rc = mPreparerThread->resume();
2830      if (rc != OK) {
2831          SET_ERR_L("%s: Camera %s: Preparer thread failed to resume!", __FUNCTION__, mId.string());
2832          return rc;
2833      }
2834  
2835      if (mFakeStreamId == NO_STREAM) {
2836          mRequestBufferSM.onStreamsConfigured();
2837      }
2838  
2839      // Since the streams configuration of the injection camera is based on the internal camera, we
2840      // must wait until the internal camera configure streams before calling injectCamera() to
2841      // configure the injection streams.
2842      if (mInjectionMethods->isInjecting()) {
2843          ALOGV("%s: Injection camera %s: Start to configure streams.",
2844                __FUNCTION__, mInjectionMethods->getInjectedCamId().string());
2845          res = mInjectionMethods->injectCamera(config, bufferSizes);
2846          if (res != OK) {
2847              ALOGE("Can't finish inject camera process!");
2848              return res;
2849          }
2850      }
2852      return OK;
2853  }

CameraDevice在配置stream时会首先过滤即将到来的session Params,接着将调用configureStreamsLocked进行配置stream,在configureStreamsLocked中会先将outputStream的stream信息保存在camera_stream_configuration中,然后调用hal接口的mInterface->configureStreams传给hal层去进行配置。在传入前会进行hal版本的判断,如果没有高版本的halservice那么就会使用v3.2版本的halsession来配置stream。

2.7 CaptureSession

上面两小节我们说到了创建和设置stream,其中创建stream主要是新建了一个Camera3OutputStream并将其加入到了StreamSet类型的mOutputStreams,接着在设置stream的流程中取出对应的stream信息放到camera_stream_configuration中,最终传给hal调用configureStreams去设置stream。创建和设置stream都是上层java层调用CameraDeviceImpl的configureStreamsChecked的。

/frameworks/base/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java
674      private void createCaptureSessionInternal(InputConfiguration inputConfig,
675              List<OutputConfiguration> outputConfigurations,
676              CameraCaptureSession.StateCallback callback, Executor executor,
677              int operatingMode, CaptureRequest sessionParams) throws CameraAccessException {
    			 ...
713              try {
714                  // configure streams and then block until IDLE 
                     //configureStreamsChecked会一直block直到完成 其中outputConfigurations中包含我们的surface
715                  configureSuccess = configureStreamsChecked(inputConfig, outputConfigurations,
716                          operatingMode, sessionParams, createSessionStartTime);
717                  if (configureSuccess == true && inputConfig != null) {
718                      input = mRemoteDevice.getInputSurface();
719                  }
720              } catch (CameraAccessException e) {
721                  configureSuccess = false;
722                  pendingException = e;
723                  input = null;
724                  if (DEBUG) {
725                      Log.v(TAG, "createCaptureSession - failed with exception ", e);
726                  }
727              }
728              //如果configureStreamsChecked成功,那么将会回调onConfigured 否则会回调onConfigureFailed
729              // Fire onConfigured if configureOutputs succeeded, fire onConfigureFailed otherwise.
730              CameraCaptureSessionCore newSession = null;
    			//isConstrainedHighSpeed为false所以走else
731              if (isConstrainedHighSpeed) {
732                 ...
743              } else {
744                  newSession = new CameraCaptureSessionImpl(mNextSessionId++, input,
745                          callback, executor, this, mDeviceExecutor, configureSuccess);
746              }

可以看到调用configureStreamsChecked会返回一个boolean值configureSuccess代表是否设置stream成功,接着会创建一个CameraCaptureSessionImpl,并将configureSuccess传入。

/frameworks/base/core/java/android/hardware/camera2/impl/CameraCaptureSessionImpl.java
90      CameraCaptureSessionImpl(int id, Surface input,
91              CameraCaptureSession.StateCallback callback, Executor stateExecutor,
92              android.hardware.camera2.impl.CameraDeviceImpl deviceImpl,
93              Executor deviceStateExecutor, boolean configureSuccess) {
             ...
121  
122          // CameraDevice should call configureOutputs and have it finish before constructing us
123  
124          if (configureSuccess) {
125              mStateCallback.onConfigured(this);
126              if (DEBUG) Log.v(TAG, mIdString + "Created session successfully");
127              mConfigureSuccess = true;
128          } else {
129              mStateCallback.onConfigureFailed(this);
130              mClosed = true; // do not fire any other callbacks, do not allow any other work
131              Log.e(TAG, mIdString + "Failed to create capture session; configuration failed");
132              mConfigureSuccess = false;
133          }
134      }

这边通过传进来的 configureSuccess来决定是回调onConfigured还是onConfigureFailed,同时回调时会将CameraCaptureSessionImpl传回去。所以我们APP拿到的CaptureSession就是CameraCaptureSessionImpl。这里的mStateCallback其实是当时2.3小节createCaptureSession传入的CameraCaptureSessionProxy.StateCallback被包装成AndroidCaptureSessionStateCallback,而AndroidCaptureSessionStateCallback则是继承于CameraCaptureSession.StateCallback。所以会回调到2.3小节的CameraCaptureSessionProxy.StateCallback的onConfigured。在CameraCaptureSessionProxy.StateCallback的onConfigured中会将session赋给sessionFuture。

/packages/apps/Camera2/src/com/android/camera/one/v2/initialization/CaptureSessionCreator.java
54      public ListenableFuture<CameraCaptureSessionProxy> createCaptureSession(
55              List<Surface> surfaces) {
56          final SettableFuture<CameraCaptureSessionProxy> sessionFuture = SettableFuture.create();
57          try {
                //这里的mDevice是2.1小节中调用createOneCamera时传入的AndroidCameraDeviceProxy,最终调用到了CameraDevice的createCaptureSession
58              mDevice.createCaptureSession(surfaces, new CameraCaptureSessionProxy.StateCallback() {
                   ...
69  
70                  @Override
71                  public void onConfigured(CameraCaptureSessionProxy session) {
72                      boolean valueSet = sessionFuture.set(session);
73                      if (!valueSet) {
74                          // If the future was already marked with cancellation or
75                          // an exception, close the session.
76                          session.close();
77                      }
78                  }
    				...
90              }, mCameraHandler);
91          } catch (CameraAccessException e) {
92              sessionFuture.setException(e);
93          }
94          return sessionFuture;
95      }

接着返回到2.2小节的PreviewStarter的startPreview

/packages/apps/Camera2/src/com/android/camera/one/v2/initialization/PreviewStarter.java
66      public ListenableFuture<Void> startPreview(final Surface surface) {
           ...
82          //mCaptureSessionCreator也是在InitializedOneCameraFactory中初始化并传给PreviewStarter的
83          final ListenableFuture<CameraCaptureSessionProxy> sessionFuture =
84                  mCaptureSessionCreator.createCaptureSession(surfaceList);
85  
86          return Futures.transformAsync(sessionFuture,
87                  new AsyncFunction<CameraCaptureSessionProxy, Void>() {
88                      @Override
89                      public ListenableFuture<Void> apply(
90                              CameraCaptureSessionProxy captureSession) throws Exception {
91                          mSessionListener.onCameraCaptureSessionCreated(captureSession, surface);
92                          return Futures.immediateFuture(null);
93                      }
94                  }, MoreExecutors.directExecutor());

接着回调到mSessionListener.onCameraCaptureSessionCreated将surface和创建的session都传过去,接着就是这个mSessionListener是什么时候创建的了,在PreviewStarter中搜索可以知道这个Listener是在创建PreviewStarter传入的,而PreviewStarter则是在InitializedOneCameraFactory初始化时创建的。如下:

/packages/apps/Camera2/src/com/android/camera/one/v2/initialization/InitializedOneCameraFactory.java
71      public InitializedOneCameraFactory(
72              final Lifetime lifetime, final CameraStarter cameraStarter, CameraDeviceProxy device,
73              List<Surface> outputSurfaces, MainThread mainThreadExecutor,
74              HandlerFactory handlerFactory, float maxZoom, List<Size> supportedPreviewSizes,
75              LinearScale lensRange, OneCamera.Facing direction) {
134          PreviewStarter mPreviewStarter = new PreviewStarter(outputSurfaces,
135                  captureSessionCreator,
136                  new PreviewStarter.CameraCaptureSessionCreatedListener() {
137                      @Override
138                      public void onCameraCaptureSessionCreated(CameraCaptureSessionProxy session,
139                              Surface previewSurface) {
140                          CameraStarter.CameraControls controls = cameraStarter.startCamera(
141                                  new Lifetime(lifetime),
142                                  session, previewSurface,
143                                  zoomState, metadataCallback, readyState);
144                          mPictureTaker.set(controls.getPictureTaker());
145                          mManualAutoFocus.set(controls.getManualAutoFocus());
146                      }
147                  });
 }

所以当回调onCameraCaptureSessionCreated时就调到了cameraStarter.startCamera,这里的cameraStarter又是哪里来的呢?

2.8 cameraStarter.startCamera

接着往上找可以知道是在SimpleOneCameraFactory的createOneCamera中创建的。

/packages/apps/Camera2/src/com/android/camera/one/v2/SimpleOneCameraFactory.java
99      @Override
100      public OneCamera createOneCamera(final CameraDeviceProxy device,
101              final OneCameraCharacteristics characteristics,
102              final OneCameraFeatureConfig.CaptureSupportLevel supportLevel,
103              final MainThread mainExecutor,
104              final Size pictureSize,
105              final ImageSaver.Builder imageSaverBuilder,
106              final Observable<OneCamera.PhotoCaptureParameters.Flash> flashSetting,
107              final Observable<Integer> exposureSetting,
108              final Observable<Boolean> hdrSceneSetting,
109              final BurstFacade burstFacade,
110              final FatalErrorHandler fatalErrorHandler) {
            //cameraStarter的建立
129          CameraStarter cameraStarter = new CameraStarter() {
130              @Override
131              public CameraStarter.CameraControls startCamera(Lifetime cameraLifetime,
132                      CameraCaptureSessionProxy cameraCaptureSession,
133                      Surface previewSurface,
134                      Observable<Float> zoomState,
135                      Updatable<TotalCaptureResultProxy> metadataCallback,
136                      Updatable<Boolean> readyState) {
137                  // 1.以传入的cameraCaptureSession创建一个FrameServerFactory
138                  FrameServerFactory frameServerComponent = new FrameServerFactory(
139                          new Lifetime(cameraLifetime), cameraCaptureSession, new HandlerFactory());
140                  //2.构建CameraCommand执行线程池 是一个可缓存线程池
141                  CameraCommandExecutor cameraCommandExecutor = new CameraCommandExecutor(
142                          Loggers.tagFactory(),
143                          new Provider<ExecutorService>() {
144                              @Override
145                              public ExecutorService get() {
146                                  // Use a dynamically-expanding thread pool to
147                                  // allow any number of commands to execute
148                                  // simultaneously.
149                                  return Executors.newCachedThreadPool();
150                              }
151                          });
152  
153                  // Create the shared image reader.
154                  SharedImageReaderFactory sharedImageReaderFactory =
155                          new SharedImageReaderFactory(new Lifetime(cameraLifetime), imageReader,
156                                  new HandlerFactory());
157                  Updatable<Long> globalTimestampCallback =
158                          sharedImageReaderFactory.provideGlobalTimestampQueue();
159                  ManagedImageReader managedImageReader =
160                          sharedImageReaderFactory.provideSharedImageReader();
161                  //3.创建一个请求用于所有camera的操作,主要忘其中添加stream和addResponseListener用于不同的操作
162                  // Create the request builder used by all camera operations.
163                  // Streams, ResponseListeners, and Parameters added to
164                  // this will be applied to *all* requests sent to the camera.
165                  RequestTemplate rootBuilder = new RequestTemplate
166                          (new CameraDeviceRequestBuilderFactory(device));
167                  // The shared image reader must be wired to receive every
168                  // timestamp for every image (including the preview).
169                  rootBuilder.addResponseListener(
170                          ResponseListeners.forTimestamps(globalTimestampCallback));
171                  rootBuilder.addStream(new SimpleCaptureStream(previewSurface));
172                  rootBuilder.addResponseListener(ResponseListeners.forFinalMetadata(
173                          metadataCallback));
174                  //4.从上面构造的FrameServerFactory获取一个临时的FrameServer
175                  FrameServer ephemeralFrameServer =
176                        frameServerComponent.provideEphemeralFrameServer();
177                  //5.以上面创建的camera请求的模板创建一个用于预览的请求
178                  // Create basic functionality (zoom, AE, AF).
179                  BasicCameraFactory basicCameraFactory = new BasicCameraFactory(new Lifetime
180                          (cameraLifetime),
181                          characteristics,
182                          ephemeralFrameServer,
183                          rootBuilder,
184                          cameraCommandExecutor,
185                          new BasicPreviewCommandFactory(ephemeralFrameServer),
186                          flashSetting,
187                          exposureSetting,
188                          zoomState,
189                          hdrSceneSetting,
190                          CameraDevice.TEMPLATE_PREVIEW);
191  
192                  // Register the dynamic updater via orientation supplier
193                  rootBuilder.setParam(CaptureRequest.JPEG_ORIENTATION,
194                          mImageRotationCalculator.getSupplier());
195  
196                  if (GservicesHelper.isJankStatisticsEnabled(AndroidContext.instance().get()
197                        .getContentResolver())) {
198                      rootBuilder.addResponseListener(
199                            new FramerateJankDetector(Loggers.tagFactory(),
200                                  UsageStatistics.instance()));
201                  }
202  
203                  RequestBuilder.Factory meteredZoomedRequestBuilder =
204                          basicCameraFactory.provideMeteredZoomedRequestBuilder();
205                    
206                  // Create the picture-taker.
207                  PictureTaker pictureTaker;
208                  if (supportLevel == OneCameraFeatureConfig.CaptureSupportLevel.LEGACY_JPEG) {
209                      pictureTaker = new LegacyPictureTakerFactory(imageSaverBuilder,
210                              cameraCommandExecutor, mainExecutor,
211                              frameServerComponent.provideFrameServer(),
212                              meteredZoomedRequestBuilder, managedImageReader).providePictureTaker();
213                  } else {
214                      pictureTaker = PictureTakerFactory.create(Loggers.tagFactory(), mainExecutor,
215                              cameraCommandExecutor, imageSaverBuilder,
216                              frameServerComponent.provideFrameServer(),
217                              meteredZoomedRequestBuilder, managedImageReader, flashSetting,
218                              characteristics.isContinuousPictureAutoFocusSupported())
219                              .providePictureTaker();
220                  }
221  
222                  // Wire-together ready-state.
223                  final Observable<Integer> availableImageCount = sharedImageReaderFactory
224                          .provideAvailableImageCount();
225                  final Observable<Boolean> frameServerAvailability = frameServerComponent
226                          .provideReadyState();
227                  Observable<Boolean> ready = Observables.transform(
228                          Arrays.asList(availableImageCount, frameServerAvailability),
229                          new Supplier<Boolean>() {
230                              @Override
231                              public Boolean get() {
232                                  boolean atLeastOneImageAvailable = availableImageCount.get() >= 1;
233                                  boolean frameServerAvailable = frameServerAvailability.get();
234                                  return atLeastOneImageAvailable && frameServerAvailable;
235                              }
236                          });
237  
238                  lifetime.add(Observables.addThreadSafeCallback(ready, readyState));
239                  //6.执行预览操作
240                  basicCameraFactory.providePreviewUpdater().run();
241  
242                  return new CameraStarter.CameraControls(
243                          pictureTaker,
244                          basicCameraFactory.provideManualAutoFocus());
245              }
246          };
247  
248          float maxZoom = characteristics.getAvailableMaxDigitalZoom();
249          List<Size> supportedPreviewSizes = characteristics.getSupportedPreviewSizes();
250          OneCamera.Facing direction = characteristics.getCameraDirection();
251  
252          return new InitializedOneCameraFactory(lifetime, cameraStarter, device, outputSurfaces,
253                  mainExecutor, new HandlerFactory(), maxZoom, supportedPreviewSizes,
254                  characteristics.getLensFocusRange(), direction)
255                  .provideOneCamera();

在创建cameraStarter也看到了startCamera所做的工作主要有以下几点:

1.先看我们传进来的比较重要的两个参数 cameraCaptureSession 和 previewSurface

2.在CameraStarter的startCamera中首先就用我们传进来的cameraCaptureSession 创建了一个FrameServerFactory

3.接着构建了一个执行CameraCommand的可缓存线程池,可缓存线程池的特点是如果线程池长度超过处理需要,可灵活回收空闲线程,若无可回收,则新建线程。

4.构造Camera操作的基本模板RequestTemplate,先向模版中加入两个addResponseListener globalTimestampCallback和metadataCallback接着将我们传入的另外一个重要参数previewSurface包装成SimpleCaptureStream也加入到模板中

5.从我们第二步构建的FrameServerFactory获取一个临时的FrameServer,这个FrameServer包含了我们传入的cameraCaptureSession

6.以上一步获取的FrameServer构建一个BasicPreviewCommandFactory并将这个BasicPreviewCommandFactory和之前构建的线程池,请求模板作为参数构建一个BasicCameraFactory,请求类型是CameraDevice.TEMPLATE_PREVIEW。

7.调用basicCameraFactory中的PreviewUpdater的run来执行预览操作。

首先先看一下SimpleOneCameraFactory中的PreviewUpdater是如何执行的

/packages/apps/Camera2/src/com/android/camera/one/v2/common/BasicCameraFactory.java
78      public BasicCameraFactory(Lifetime lifetime,
79              OneCameraCharacteristics cameraCharacteristics,
80              FrameServer frameServer,
81              RequestBuilder.Factory rootTemplate,
82              CameraCommandExecutor cameraCommandExecutor,
83              PreviewCommandFactory previewCommandFactory,
84              Observable<OneCamera.PhotoCaptureParameters.Flash> flash,
85              Observable<Integer> exposure,
86              Observable<Float> zoom,
87              Observable<Boolean> hdrSceneSetting,
88              int templateType) {
            //1.以传入的Request基本模板构建一个请求模板
89          RequestTemplate requestTemplate = new RequestTemplate(rootTemplate);
119  		//2.给模板设置一些参数
    		...
    		//3.根据模板和传入的请求类型构建一个CameraCommand 这里传入的请求类型是CameraDevice.TEMPLATE_PREVIEW
120          CameraCommand previewUpdaterCommand =
121                previewCommandFactory.get(requestTemplate, templateType);
122  
123          //4.以传入的线程池和构建的CameraCommand 构建PreviewUpdater也就是一个runnable
125          mPreviewUpdater = new ResettingRunnableCameraCommand(cameraCommandExecutor,
126                previewUpdaterCommand);
127  
              ...
163      }
173      public Runnable providePreviewUpdater() {
174          return mPreviewUpdater;
175      }

可以看到这里的mPreviewUpdater是一个ResettingRunnableCameraCommand包含了线程池和CameraDevice.TEMPLATE_PREVIEW类型的CameraCommand

看一下ResettingRunnableCameraCommand的run函数:

/packages/apps/Camera2/src/com/android/camera/one/v2/commands/ResettingRunnableCameraCommand.java
44      public ResettingRunnableCameraCommand(CameraCommandExecutor executor, CameraCommand command) {
45          mExecutor = executor;
46          mCommand = command;
47          mLock = new Object();
48          mInProgressCommand = Futures.immediateFuture(new Object());
49      }
50  
51      @Override
52      public void run() {
53          synchronized (mLock) {
54              // Cancel, via interruption, the already-running command, one has
55              // been started and has not yet completed.
56              mInProgressCommand.cancel(true /* mayInterruptIfRunning */);
57              mInProgressCommand = mExecutor.execute(mCommand);
58          }
59      }

ResettingRunnableCameraCommand的run函数就是用传入的线程池取执行传入的CameraCommand。传入的CameraCommand是根据请求模板和请求类型从previewCommandFactory中获取的,而previewCommandFactory则是在startcamera中创建的。

179                  BasicCameraFactory basicCameraFactory = new BasicCameraFactory(new Lifetime
180                          (cameraLifetime),
181                          characteristics,
182                          ephemeralFrameServer,
183                          rootBuilder,
184                          cameraCommandExecutor,
185                          new BasicPreviewCommandFactory(ephemeralFrameServer),
186                          flashSetting,
187                          exposureSetting,
188                          zoomState,
189                          hdrSceneSetting,
190                          CameraDevice.TEMPLATE_PREVIEW);

BasicPreviewCommandFactory是一个BasicPreviewCommandFactory,传入的ephemeralFrameServer则是包含cameraCaptureSession 的FrameServerFactory中获取到的。

/packages/apps/Camera2/src/com/android/camera/one/v2/commands/BasicPreviewCommandFactory.java
28      public BasicPreviewCommandFactory(FrameServer frameServer) {
29          mFrameServer = frameServer;
30      }
31  
32      @Override
33      public CameraCommand get(RequestBuilder.Factory previewRequestBuilder, int templateType) {
34          return new PreviewCommand(mFrameServer, previewRequestBuilder, templateType);
35      }

BasicPreviewCommandFactory的类很简单,就是获取PreviewCommand,上面我们说过mPreviewUpdater的run就是用线程池执行CameraCommand,这里的CameraCommand就是PreviewCommand,看一下PreviewCommand的run函数

/packages/apps/Camera2/src/com/android/camera/one/v2/commands/PreviewCommand.java
49      public void run() throws InterruptedException, CameraAccessException,
50              CameraCaptureSessionClosedException, ResourceAcquisitionFailedException {
51          try (FrameServer.Session session = mFrameServer.createExclusiveSession()) {
52              RequestBuilder photoRequest = mBuilderFactory.create(mRequestType);
53              session.submitRequest(Arrays.asList(photoRequest.build()),
54                      FrameServer.RequestType.REPEATING);
55          }
56      }

PreviewCommand的run函数则是创建一个FrameServer.Session的session,接着以传入的模板和请求类型构建一个RequestBuilder,最终通过session.submitRequest提交给了FrameServer.Session。此时的RequestType变成了FrameServer.RequestType.REPEATING。

我们首先找一下mFrameServer,这个mFrameServer是传进来的ephemeralFrameServer也就是最初包含cameraCaptureSession 的FrameServerFactory中获取到的。是一个FrameServerImpl对象,最终调用的createExclusiveSession和submitRequest如下:

/packages/apps/Camera2/src/com/android/camera/one/v2/core/FrameServerImpl.java 
88      @Override
89      @Nonnull
90      public Session createExclusiveSession() throws InterruptedException {
91          checkState(!mCameraLock.isHeldByCurrentThread(), "Cannot acquire another " +
92                  "FrameServer.Session on the same thread.");
93          mCameraLock.lockInterruptibly();
94          return new Session();
95      }

createExclusiveSession返回的session则是FrameServerImpl的一个内部类:

/packages/apps/Camera2/src/com/android/camera/one/v2/core/FrameServerImpl.java
36      public class Session implements FrameServer.Session {
37          private final Object mLock;
38          private boolean mClosed;
39  
40          private Session() {
41              mLock = new Object();
42              mClosed = false;
43          }
44  
45          @Override
46          public void submitRequest(List<Request> burstRequests, RequestType type)
47                  throws CameraAccessException, InterruptedException,
48                  CameraCaptureSessionClosedException, ResourceAcquisitionFailedException {
49              synchronized (mLock) {
50                  try {
51                      if (mClosed) {
52                          throw new SessionClosedException();
53                      }
54  
55                      mCaptureSession.submitRequest(burstRequests, type);
56                  } catch (Exception e) {
57                      for (Request r : burstRequests) {
58                          r.abort();
59                      }
60                      throw e;
61                  }
62              }
63          }
64  
65          @Override
66          public void close() {
67              synchronized (mLock) {
68                  if (!mClosed) {
69                      mClosed = true;
70                      mCameraLock.unlock();
71                  }
72              }
73          }
74      }

可以看到session的submitRequest最终调用到了mCaptureSession.submitRequest,而这里的mCaptureSession则是创建FrameServerImpl时将我们最初传入的

cameraCaptureSession包装成TagDispatchCaptureSession传进来的,所以最后会调用到TagDispatchCaptureSessionc的submitRequest

/packages/apps/Camera2/src/com/android/camera/one/v2/core/TagDispatchCaptureSession.java
133      public void submitRequest(List<Request> burstRequests, FrameServer.RequestType requestType)
134              throws
135              CameraAccessException, InterruptedException, CameraCaptureSessionClosedException,
136              ResourceAcquisitionFailedException {
137          try {
138              Map<Object, ResponseListener> tagListenerMap = new HashMap<Object, ResponseListener>();
139              List<CaptureRequest> captureRequests = new ArrayList<>(burstRequests.size());
140  
141              for (Request request : burstRequests) {
142                  Object tag = generateTag();
143  
144                  tagListenerMap.put(tag, request.getResponseListener());
145  
146                  CaptureRequestBuilderProxy builder = request.allocateCaptureRequest();
147                  builder.setTag(tag);
148                  captureRequests.add(builder.build());
149              }
150  
151              if (requestType == FrameServer.RequestType.REPEATING) {
152                  mCaptureSession.setRepeatingBurst(captureRequests, new
153                          CaptureCallback(tagListenerMap), mCameraHandler);
154              } else {
155                  mCaptureSession.captureBurst(captureRequests, new
156                          CaptureCallback(tagListenerMap), mCameraHandler);
157              }
158          } catch (Exception e) {
159              for (Request r : burstRequests) {
160                  r.abort();
161              }
162              throw e;
163          }
164      }

TagDispatchCaptureSession的submitRequest则是遍历传入的requestlist,接着根据requestType分别调用mCaptureSession的setRepeatingBurst或者captureBurst,上面我们也提到过在构建PreviewCommand时传入的requestType是FrameServer.RequestType.REPEATING,所以这里走if分支。接着我们确定一下mCaptureSession,这个mCaptureSession就是我们最初传入的cameraCaptureSession。他的对象实例则是我们在2.7小节提到过的CameraCaptureSessionImpl。

/frameworks/base/core/java/android/hardware/camera2/impl/CameraCaptureSessionImpl.java
298      @Override
299      public int setRepeatingRequest(CaptureRequest request, CaptureCallback callback,
300              Handler handler) throws CameraAccessException {
301          checkRepeatingRequest(request);
302  
303          synchronized (mDeviceImpl.mInterfaceLock) {
304              checkNotClosed();
305  
306              handler = checkHandler(handler, callback);
307  
308              if (DEBUG) {
309                  Log.v(TAG, mIdString + "setRepeatingRequest - request " + request + ", callback " +
310                          callback + " handler" + " " + handler);
311              }
312              //将mDeviceImpl.setRepeatingRequest加入了时序执行队列中
313              return addPendingSequence(mDeviceImpl.setRepeatingRequest(request,
314                      createCaptureCallbackProxy(handler, callback), mDeviceExecutor));
315          }
316      }

当mDeviceExecutor有空闲时间时则会调用mDeviceImpl.setRepeatingRequest继续进行:

2.9 mDeviceImpl.setRepeatingRequest

/frameworks/base/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java
59      public int setRepeatingRequest(CaptureRequest request, CaptureCallback callback,
1260              Executor executor) throws CameraAccessException {
1261          List<CaptureRequest> requestList = new ArrayList<CaptureRequest>();
1262          requestList.add(request);
1263          return submitCaptureRequest(requestList, callback, executor, /*streaming*/true);
1264      }
1182      private int submitCaptureRequest(List<CaptureRequest> requestList, CaptureCallback callback,
1183              Executor executor, boolean repeating) throws CameraAccessException {
1184  
1185          // Need a valid executor, or current thread needs to have a looper, if
1186          // callback is valid
1187          executor = checkExecutor(executor, callback);
1188  
1189          synchronized(mInterfaceLock) {
1190              checkIfCameraClosedOrInError();
1191  
1192              // Make sure that there all requests have at least 1 surface; all surfaces are non-null;
1193              for (CaptureRequest request : requestList) {
1194                  if (request.getTargets().isEmpty()) {
1195                      throw new IllegalArgumentException(
1196                              "Each request must have at least one Surface target");
1197                  }
1198  
1199                  for (Surface surface : request.getTargets()) {
1200                      if (surface == null) {
1201                          throw new IllegalArgumentException("Null Surface targets are not allowed");
1202                      }
1203                  }
1204              }
1205              //1.这里的repeating为true
1206              if (repeating) {
1207                  stopRepeating();
1208              }
1209  
1210              SubmitInfo requestInfo;
1211  
1212              CaptureRequest[] requestArray = requestList.toArray(new CaptureRequest[requestList.size()]);
                  //2.将request中的surface和mConfiguredOutputs的streamIdx 和 surfaceIdx对应起来
1213              // Convert Surface to streamIdx and surfaceIdx
1214              for (CaptureRequest request : requestArray) {
1215                  request.convertSurfaceToStreamId(mConfiguredOutputs);
1216              }
1217              //3.跨进程调用 CameraDeviceClient的submitRequestList repeating为true
1218              requestInfo = mRemoteDevice.submitRequestList(requestArray, repeating);
1219              if (DEBUG) {
1220                  Log.v(TAG, "last frame number " + requestInfo.getLastFrameNumber());
1221              }
1222              
1223              for (CaptureRequest request : requestArray) {
1224                  request.recoverStreamIdToSurface();
1225              }
1226              //4.构建一个CaptureCallbackHolder用来接受回调信息
1227              if (callback != null) {
1228                  mCaptureCallbackMap.put(requestInfo.getRequestId(),
1229                          new CaptureCallbackHolder(
1230                              callback, requestList, executor, repeating, mNextSessionId - 1));
1231              } else {
1232                  if (DEBUG) {
1233                      Log.d(TAG, "Listen for request " + requestInfo.getRequestId() + " is null");
1234                  }
1235              }
1236  
1237              if (repeating) {
1238                  if (mRepeatingRequestId != REQUEST_ID_NONE) {
1239                      checkEarlyTriggerSequenceCompleteLocked(mRepeatingRequestId,
1240                              requestInfo.getLastFrameNumber(),
1241                              mRepeatingRequestTypes);
1242                  }
1243                  mRepeatingRequestId = requestInfo.getRequestId();
1244                  mRepeatingRequestTypes = getRequestTypes(requestArray);
1245              } else {
1246                  mRequestLastFrameNumbersList.add(
1247                      new RequestLastFrameNumbersHolder(requestList, requestInfo));
1248              }
1249  
1250              if (mIdle) {
1251                  mDeviceExecutor.execute(mCallOnActive);
1252              }
1253              mIdle = false;
1254  
1255              return requestInfo.getRequestId();
1256          }
1257      }

submitCaptureRequestsh首先会取消上一次重复的请求,接着通过Binder调用 CameraDeviceClient的submitRequestList来请求预览,传入的repeating为true。

/frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
217  binder::Status CameraDeviceClient::submitRequestList(
218          const std::vector<hardware::camera2::CaptureRequest>& requests,
219          bool streaming,
220          /*out*/
221          hardware::camera2::utils::SubmitInfo *submitInfo) {
         //1.参数有效性的确认
         ...
242  
243      List<const CameraDeviceBase::PhysicalCameraSettingsList> metadataRequestList;
244      std::list<const SurfaceMap> surfaceMapList;
245      submitInfo->mRequestId = mRequestIdCounter;
246      uint32_t loopCounter = 0;
247  
248      for (auto&& request: requests) {
              //request合法性确认
292           ...
293          /**
294           * Write in the output stream IDs and map from stream ID to surface ID
295           * which we calculate from the capture request's list of surface target
296           */
297          SurfaceMap surfaceMap;
298          Vector<int32_t> outputStreamIds;
299          std::vector<std::string> requestedPhysicalIds;
    		//构建requeset携带的surface id和streameid的映射关系
300          if (request.mSurfaceList.size() > 0) {
301              for (const sp<Surface>& surface : request.mSurfaceList) {
302                  if (surface == 0) continue;
303  
304                  int32_t streamId;
305                  sp<IGraphicBufferProducer> gbp = surface->getIGraphicBufferProducer();
306                  res = insertGbpLocked(gbp, &surfaceMap, &outputStreamIds, &streamId);
307                  if (!res.isOk()) {
308                      return res;
309                  }
310  
311                  ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
312                  if (index >= 0) {
313                      String8 requestedPhysicalId(
314                              mConfiguredOutputs.valueAt(index).getPhysicalCameraId());
315                      requestedPhysicalIds.push_back(requestedPhysicalId.string());
316                  } else {
317                      ALOGW("%s: Output stream Id not found among configured outputs!", __FUNCTION__);
318                  }
319              }
320          } else {
321              for (size_t i = 0; i < request.mStreamIdxList.size(); i++) {
322                  int streamId = request.mStreamIdxList.itemAt(i);
323                  int surfaceIdx = request.mSurfaceIdxList.itemAt(i);
324  
325                  ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
326                  if (index < 0) {
327                      ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
328                              " we have not called createStream on: stream %d",
329                              __FUNCTION__, mCameraIdStr.string(), streamId);
330                      return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
331                              "Request targets Surface that is not part of current capture session");
332                  }
333  
334                  const auto& gbps = mConfiguredOutputs.valueAt(index).getGraphicBufferProducers();
335                  if ((size_t)surfaceIdx >= gbps.size()) {
336                      ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
337                              " we have not called createStream on: stream %d, surfaceIdx %d",
338                              __FUNCTION__, mCameraIdStr.string(), streamId, surfaceIdx);
339                      return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
340                              "Request targets Surface has invalid surface index");
341                  }
342  
343                  res = insertGbpLocked(gbps[surfaceIdx], &surfaceMap, &outputStreamIds, nullptr);
344                  if (!res.isOk()) {
345                      return res;
346                  }
347  
348                  String8 requestedPhysicalId(
349                          mConfiguredOutputs.valueAt(index).getPhysicalCameraId());
350                  requestedPhysicalIds.push_back(requestedPhysicalId.string());
351              }
352          }
353  
354          CameraDeviceBase::PhysicalCameraSettingsList physicalSettingsList;
355          for (const auto& it : request.mPhysicalCameraSettings) {
356              if (it.settings.isEmpty()) {
357                  ALOGE("%s: Camera %s: Sent empty metadata packet. Rejecting request.",
358                          __FUNCTION__, mCameraIdStr.string());
359                  return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
360                          "Request settings are empty");
361              }
362  
363              // Check whether the physical / logical stream has settings
364              // consistent with the sensor pixel mode(s) it was configured with.
365              // mCameraIdToStreamSet will only have ids that are high resolution
366              const auto streamIdSetIt = mHighResolutionCameraIdToStreamIdSet.find(it.id);
367              if (streamIdSetIt != mHighResolutionCameraIdToStreamIdSet.end()) {
368                  std::list<int> streamIdsUsedInRequest = getIntersection(streamIdSetIt->second,
369                          outputStreamIds);
370                  if (!request.mIsReprocess &&
371                          !isSensorPixelModeConsistent(streamIdsUsedInRequest, it.settings)) {
372                       ALOGE("%s: Camera %s: Request settings CONTROL_SENSOR_PIXEL_MODE not "
373                              "consistent with configured streams. Rejecting request.",
374                              __FUNCTION__, it.id.c_str());
375                      return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
376                          "Request settings CONTROL_SENSOR_PIXEL_MODE are not consistent with "
377                          "streams configured");
378                  }
379              }
380  
381              String8 physicalId(it.id.c_str());
382              if (physicalId != mDevice->getId()) {
383                  auto found = std::find(requestedPhysicalIds.begin(), requestedPhysicalIds.end(),
384                          it.id);
385                  if (found == requestedPhysicalIds.end()) {
386                      ALOGE("%s: Camera %s: Physical camera id: %s not part of attached outputs.",
387                              __FUNCTION__, mCameraIdStr.string(), physicalId.string());
388                      return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
389                              "Invalid physical camera id");
390                  }
391  
392                  if (!mSupportedPhysicalRequestKeys.empty()) {
393                      // Filter out any unsupported physical request keys.
394                      CameraMetadata filteredParams(mSupportedPhysicalRequestKeys.size());
395                      camera_metadata_t *meta = const_cast<camera_metadata_t *>(
396                              filteredParams.getAndLock());
397                      set_camera_metadata_vendor_id(meta, mDevice->getVendorTagId());
398                      filteredParams.unlock(meta);
399  
400                      for (const auto& keyIt : mSupportedPhysicalRequestKeys) {
401                          camera_metadata_ro_entry entry = it.settings.find(keyIt);
402                          if (entry.count > 0) {
403                              filteredParams.update(entry);
404                          }
405                      }
406  
407                      physicalSettingsList.push_back({it.id, filteredParams});
408                  }
409              } else {
410                  physicalSettingsList.push_back({it.id, it.settings});
411              }
412          }
413  
414          if (!enforceRequestPermissions(physicalSettingsList.begin()->metadata)) {
415              // Callee logs
416              return STATUS_ERROR(CameraService::ERROR_PERMISSION_DENIED,
417                      "Caller does not have permission to change restricted controls");
418          }
419  
420          physicalSettingsList.begin()->metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS,
421                  &outputStreamIds[0], outputStreamIds.size());
422  
423          if (request.mIsReprocess) {
424              physicalSettingsList.begin()->metadata.update(ANDROID_REQUEST_INPUT_STREAMS,
425                      &mInputStream.id, 1);
426          }
427  
428          physicalSettingsList.begin()->metadata.update(ANDROID_REQUEST_ID,
429                  &(submitInfo->mRequestId), /*size*/1);
430          loopCounter++; // loopCounter starts from 1
431          ALOGV("%s: Camera %s: Creating request with ID %d (%d of %zu)",
432                  __FUNCTION__, mCameraIdStr.string(), submitInfo->mRequestId,
433                  loopCounter, requests.size());
434  
435          metadataRequestList.push_back(physicalSettingsList);
436          surfaceMapList.push_back(surfaceMap);
437      }
438      mRequestIdCounter++;
439      //streaming为true
440      if (streaming) {
              //重点调用
441          err = mDevice->setStreamingRequestList(metadataRequestList, surfaceMapList,
442                  &(submitInfo->mLastFrameNumber));
443          if (err != OK) {
444              String8 msg = String8::format(
445                  "Camera %s:  Got error %s (%d) after trying to set streaming request",
446                  mCameraIdStr.string(), strerror(-err), err);
447              ALOGE("%s: %s", __FUNCTION__, msg.string());
448              res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
449                      msg.string());
450          } else {
451              Mutex::Autolock idLock(mStreamingRequestIdLock);
452              mStreamingRequestId = submitInfo->mRequestId;
453          }
454      } else {
455          err = mDevice->captureList(metadataRequestList, surfaceMapList,
456                  &(submitInfo->mLastFrameNumber));
457          if (err != OK) {
458              String8 msg = String8::format(
459                  "Camera %s: Got error %s (%d) after trying to submit capture request",
460                  mCameraIdStr.string(), strerror(-err), err);
461              ALOGE("%s: %s", __FUNCTION__, msg.string());
462              res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
463                      msg.string());
464          }
465          ALOGV("%s: requestId = %d ", __FUNCTION__, submitInfo->mRequestId);
466      }
467  
468      ALOGV("%s: Camera %s: End of function", __FUNCTION__, mCameraIdStr.string());
469      return res;
470  }

CameraDeviceClient的submitRequestList主要从requeset中分离出surfaceMapList和physicalSettingsList继续向下传递,调用mDevice->setStreamingRequestList,这里的mDevice则是Camera3Device。

/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
1220  status_t Camera3Device::setStreamingRequestList(
1221          const List<const PhysicalCameraSettingsList> &requestsList,
1222          const std::list<const SurfaceMap> &surfaceMaps, int64_t *lastFrameNumber) {
1223      ATRACE_CALL();
1224  
1225      return submitRequestsHelper(requestsList, surfaceMaps, /*repeating*/true, lastFrameNumber);
1226  }

981  status_t Camera3Device::submitRequestsHelper(
982          const List<const PhysicalCameraSettingsList> &requests,
983          const std::list<const SurfaceMap> &surfaceMaps,
984          bool repeating,
985          /*out*/
986          int64_t *lastFrameNumber) {
987      ATRACE_CALL();
988      nsecs_t requestTimeNs = systemTime();
989  
990      Mutex::Autolock il(mInterfaceLock);
991      Mutex::Autolock l(mLock);
992  
993      status_t res = checkStatusOkToCaptureLocked();
994      if (res != OK) {
995          // error logged by previous call
996          return res;
997      }
998  
999      RequestList requestList;
1000      //1.将元数据转换成请求列表
1001      res = convertMetadataListToRequestListLocked(requests, surfaceMaps,
1002              repeating, requestTimeNs, /*out*/&requestList);
1003      if (res != OK) {
1004          // error logged by previous call
1005          return res;
1006      }
1007      //repeating为true
1008      if (repeating) {
              //2.调用mRequestThread的setRepeatingRequests接受请求
1009          res = mRequestThread->setRepeatingRequests(requestList, lastFrameNumber);
1010      } else {
1011          res = mRequestThread->queueRequestList(requestList, lastFrameNumber);
1012      }
1013  
          ...
1026  
1027      return res;
1028  }

Camera3Device::setStreamingRequestList首先是将传入的PhysicalCameraSettingsList和SurfaceMap遍历调用setUpRequestLocked构建CaptureRequest,接着将构建好的CaptureRequest加入到CaptureRequest后传给mRequestThread。

/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
4293  status_t Camera3Device::RequestThread::setRepeatingRequests(
4294          const RequestList &requests,
4295          /*out*/
4296          int64_t *lastFrameNumber) {
4297      ATRACE_CALL();
4298      Mutex::Autolock l(mRequestLock);
4299      if (lastFrameNumber != NULL) {
4300          *lastFrameNumber = mRepeatingLastFrameNumber;
4301      }
4302      mRepeatingRequests.clear();
4303      mFirstRepeating = true;
4304      mRepeatingRequests.insert(mRepeatingRequests.begin(),
4305              requests.begin(), requests.end());
4306  
4307      unpauseForNewRequests();
4308  
4309      mRepeatingLastFrameNumber = hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES;
4310      return OK;
4311  }

RequestThread::setRepeatingRequests将请求插入到mRepeatingRequests中,然后调用unpauseForNewRequests激活RequestThread线程

在RequestThread的threadLoop中取出消费:

/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
4653  bool Camera3Device::RequestThread::threadLoop() {
4654      ATRACE_CALL();
4655      status_t res;
4656      // Any function called from threadLoop() must not hold mInterfaceLock since
4657      // it could lead to deadlocks (disconnect() -> hold mInterfaceMutex -> wait for request thread
4658      // to finish -> request thread waits on mInterfaceMutex) http://b/143513518
4659  
4660      // Handle paused state.
4661      if (waitIfPaused()) {
4662          return true;
4663      }
4664  
4665      // Wait for the next batch of requests.
4666      waitForNextRequestBatch();
4667      if (mNextRequests.size() == 0) {
4668          return true;
4669      }
4670  
4671      // Get the latest request ID, if any
4672      int latestRequestId;
4673      camera_metadata_entry_t requestIdEntry = mNextRequests[mNextRequests.size() - 1].
4674              captureRequest->mSettingsList.begin()->metadata.find(ANDROID_REQUEST_ID);
4675      if (requestIdEntry.count > 0) {
4676          latestRequestId = requestIdEntry.data.i32[0];
4677      } else {
4678          ALOGW("%s: Did not have android.request.id set in the request.", __FUNCTION__);
4679          latestRequestId = NAME_NOT_FOUND;
4680      }
4681  
4682      // 'mNextRequests' will at this point contain either a set of HFR batched requests
4683      //  or a single request from streaming or burst. In either case the first element
4684      //  should contain the latest camera settings that we need to check for any session
4685      //  parameter updates.
4686      if (updateSessionParameters(mNextRequests[0].captureRequest->mSettingsList.begin()->metadata)) {
4687          res = OK;
4688  
4689          //Input stream buffers are already acquired at this point so an input stream
4690          //will not be able to move to idle state unless we force it.
4691          if (mNextRequests[0].captureRequest->mInputStream != nullptr) {
4692              res = mNextRequests[0].captureRequest->mInputStream->forceToIdle();
4693              if (res != OK) {
4694                  ALOGE("%s: Failed to force idle input stream: %d", __FUNCTION__, res);
4695                  cleanUpFailedRequests(/*sendRequestError*/ false);
4696                  return false;
4697              }
4698          }
4699  
4700          if (res == OK) {
4701              sp<Camera3Device> parent = mParent.promote();
4702              if (parent != nullptr) {
4703                  mReconfigured |= parent->reconfigureCamera(mLatestSessionParams, mStatusId);
4704              }
4705              setPaused(false);
4706  
4707              if (mNextRequests[0].captureRequest->mInputStream != nullptr) {
4708                  mNextRequests[0].captureRequest->mInputStream->restoreConfiguredState();
4709                  if (res != OK) {
4710                      ALOGE("%s: Failed to restore configured input stream: %d", __FUNCTION__, res);
4711                      cleanUpFailedRequests(/*sendRequestError*/ false);
4712                      return false;
4713                  }
4714              }
4715          }
4716      }
4717  
4718      // Prepare a batch of HAL requests and output buffers.
4719      res = prepareHalRequests();
4720      if (res == TIMED_OUT) {
4721          // Not a fatal error if getting output buffers time out.
4722          cleanUpFailedRequests(/*sendRequestError*/ true);
4723          // Check if any stream is abandoned.
4724          checkAndStopRepeatingRequest();
4725          return true;
4726      } else if (res != OK) {
4727          cleanUpFailedRequests(/*sendRequestError*/ false);
4728          return false;
4729      }
4730  
4731      // Inform waitUntilRequestProcessed thread of a new request ID
4732      {
4733          Mutex::Autolock al(mLatestRequestMutex);
4734  
4735          mLatestRequestId = latestRequestId;
4736          mLatestRequestSignal.signal();
4737      }
4738  
4739      // Submit a batch of requests to HAL.
4740      // Use flush lock only when submitting multilple requests in a batch.
4741      // TODO: The problem with flush lock is flush() will be blocked by process_capture_request()
4742      // which may take a long time to finish so synchronizing flush() and
4743      // process_capture_request() defeats the purpose of cancelling requests ASAP with flush().
4744      // For now, only synchronize for high speed recording and we should figure something out for
4745      // removing the synchronization.
4746      bool useFlushLock = mNextRequests.size() > 1;
4747  
4748      if (useFlushLock) {
4749          mFlushLock.lock();
4750      }
4751  
4752      ALOGVV("%s: %d: submitting %zu requests in a batch.", __FUNCTION__, __LINE__,
4753              mNextRequests.size());
4754  
4755      sp<Camera3Device> parent = mParent.promote();
4756      if (parent != nullptr) {
4757          parent->mRequestBufferSM.onSubmittingRequest();
4758      }
4759  
4760      bool submitRequestSuccess = false;
4761      nsecs_t tRequestStart = systemTime(SYSTEM_TIME_MONOTONIC);
4762      submitRequestSuccess = sendRequestsBatch();
4763  
4764      nsecs_t tRequestEnd = systemTime(SYSTEM_TIME_MONOTONIC);
4765      mRequestLatency.add(tRequestStart, tRequestEnd);
4766  
4767      if (useFlushLock) {
4768          mFlushLock.unlock();
4769      }
4770  
4771      // Unset as current request
4772      {
4773          Mutex::Autolock l(mRequestLock);
4774          mNextRequests.clear();
4775      }
4776      mRequestSubmittedSignal.signal();
4777  
4778      return submitRequestSuccess;
4779  }

在调用完RequestThread::setRepeatingRequests后mNextRequests就添加了我们传进来的request,接着会调用prepareHalRequests准备向hal发送request,主要是将mOutputStream中的buffer出列交给了halrequest,接着调用sendRequestsBatch向hal发送request。

/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
4464  bool Camera3Device::RequestThread::sendRequestsBatch() {
4465      ATRACE_CALL();
4466      status_t res;
4467      size_t batchSize = mNextRequests.size();
4468      std::vector<camera_capture_request_t*> requests(batchSize);
4469      uint32_t numRequestProcessed = 0;
4470      for (size_t i = 0; i < batchSize; i++) {
4471          requests[i] = &mNextRequests.editItemAt(i).halRequest;
4472          ATRACE_ASYNC_BEGIN("frame capture", mNextRequests[i].halRequest.frame_number);
4473      }
4474      //调用hal接口processBatchCaptureRequests发送request到hal层
4475      res = mInterface->processBatchCaptureRequests(requests, &numRequestProcessed);
4476  
          ...
4513      return true;
4514  }

最终是调用 mInterface->processBatchCaptureRequests将request发送给hal层的。

/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
3798  status_t Camera3Device::HalInterface::processBatchCaptureRequests(
3799          std::vector<camera_capture_request_t*>& requests,/*out*/uint32_t* numRequestProcessed) {
          ...
3934  
3935      hardware::details::return_status err;
3936      auto resultCallback =
3937          [&status, &numRequestProcessed] (auto s, uint32_t n) {
3938                  status = s;
3939                  *numRequestProcessed = n;
3940          };
3941      if (hidlSession_3_7 != nullptr) {
3942          err = hidlSession_3_7->processCaptureRequest_3_7(captureRequests_3_7, cachesToRemove,
3943                                                           resultCallback);
3944      } else if (hidlSession_3_4 != nullptr) {
3945          err = hidlSession_3_4->processCaptureRequest_3_4(captureRequests_3_4, cachesToRemove,
3946                                                           resultCallback);
3947      } else {
3948          err = mHidlSession->processCaptureRequest(captureRequests, cachesToRemove,
3949                                                    resultCallback);
3950      }

3976      return res;
3977  }

processBatchCaptureRequests可以看到真正通过hidlSession来和hal层进行通信。当hal接收到request后会将数据填充到prepareHalRequests准备的buffer中,接着回调到processCaptureResult来处理从hal层返回的数据,此时数据被存放在之前出列的buffer中,这些buffer就是surface中的GraphicBuffer,由于这个surface是preview传进来的,所以最终会交给surfaceflinger去消费。

三、takePicture

在**Camera 模块(一)**中的第三章提到过Camera 的拍照流程,最终时调用到了CaptureModule的takePictureNow函数:

/packages/apps/Camera2/src/com/android/camera/CaptureModule.java
530      private void takePictureNow() {
531          if (mCamera == null) {
532              Log.i(TAG, "Not taking picture since Camera is closed.");
533              return;
534          }
535          //1.创建并启动CaptureSession
536          CaptureSession session = createAndStartCaptureSession();
537          int orientation = mAppController.getOrientationManager().getDeviceOrientation()
538                  .getDegrees();
539          //2.组装PhotoCaptureParameters
540          // TODO: This should really not use getExternalCacheDir and instead use
541          // the SessionStorage API. Need to sync with gcam if that's OK.
542          PhotoCaptureParameters params = new PhotoCaptureParameters(
543                  session.getTitle(), orientation, session.getLocation(),
544                  mContext.getExternalCacheDir(), this, mPictureSaverCallback,
545                  mHeadingSensor.getCurrentHeading(), mZoomValue, 0);
546          decorateSessionAtCaptureTime(session);
             //调用Camera的takePicture进行拍照
547          mCamera.takePicture(params, session);
548      }

takePictureNow主要干了三件事情:

1.创建并启动CaptureSession

2.组装PhotoCaptureParameters

3.调用mCamera.takePicture进行拍照 这里的mCamera是OneCamera具体实现从2.1小节我们知道是 GenericOneCameraImpl

3.1 createAndStartCaptureSession

/packages/apps/Camera2/src/com/android/camera/CaptureModule.java
554      private CaptureSession createAndStartCaptureSession() {
555          long sessionTime = getSessionTime();
556          Location location = mLocationManager.getCurrentLocation();
557          String title = CameraUtil.instance().createJpegName(sessionTime);
558          CaptureSession session = getServices().getCaptureSessionManager()
559                  .createNewSession(title, sessionTime, location);
560  
561          session.startEmpty(new CaptureStats(mHdrPlusEnabled),
562                new Size((int) mPreviewArea.width(), (int) mPreviewArea.height()));
563          return session;
564      }

这里主要看CaptureSession的获取是通过CaptureSessionManager创建的。首先getServices在CaptureModule中没有实现,但是CaptureModule继承于 CameraModule所以去 CameraModule中找getServices如下:

/packages/apps/Camera2/src/com/android/camera/CameraModule.java
32      public CameraModule(AppController app) {
33          mServices = app.getServices();
34          mCameraProvider = app.getCameraProvider();
35      }
60      protected CameraServices getServices() {
61          return mServices;
62      }

由**Camera模块(一)**中的2.2小节可以知道在创建CameraModule时传进来的AppController就是CameraActivity,所以接着去看CameraActivity中的getServices

/packages/apps/Camera2/src/com/android/camera/CameraActivity.java
2584      @Override
2585      public CameraServices getServices() {
2586          return CameraServicesImpl.instance();
2587      }

从上面代码可以知道CaptureSession是通过CameraServicesImpl的CaptureSessionManager创建的

/packages/apps/Camera2/src/com/android/camera/app/CameraServicesImpl.java
63      private CameraServicesImpl(Context context) {
64          mMediaSaver = new MediaSaverImpl(context.getContentResolver());
65          PlaceholderManager mPlaceHolderManager = new PlaceholderManager(context);
66          SessionStorageManager mSessionStorageManager = SessionStorageManagerImpl.create(context);
67  
68          StackSaverFactory mStackSaverFactory = new StackSaverFactory(Storage.instance().DIRECTORY,
69                context.getContentResolver());
70          CaptureSessionFactory captureSessionFactory = new CaptureSessionFactoryImpl(
71                  mMediaSaver, mPlaceHolderManager, mSessionStorageManager, mStackSaverFactory);
72          mSessionManager = new CaptureSessionManagerImpl(
73                  captureSessionFactory, mSessionStorageManager, MainThread.create());
74          mMemoryManager = MemoryManagerImpl.create(context, mMediaSaver);
75          mRemoteShutterListener = RemoteShutterHelper.create(context);
76          mSettingsManager = new SettingsManager(context);
77  
78          mMotionManager = new MotionManager(context);
79      }
80  
81      @Override
82      public CaptureSessionManager getCaptureSessionManager() {
83          return mSessionManager;
84      }

所以最后是调用的CaptureSessionFactoryImpl的createNewSession来创建session

/packages/apps/Camera2/src/com/android/camera/session/CaptureSessionManagerImpl.java
278      @Override
279      public CaptureSession createNewSession(String title, long sessionStartMillis, Location location) {
280          return mSessionFactory.createNewSession(this, mSessionNotifier, title, sessionStartMillis,
281                  location);
282      }
/packages/apps/Camera2/src/com/android/camera/session/CaptureSessionFactoryImpl.java
40      @Override
41      public CaptureSession createNewSession(CaptureSessionManager sessionManager,
42              SessionNotifier sessionNotifier, String title, long sessionStartTime,
43              Location location) {
44          TemporarySessionFile temporarySessionFile = new TemporarySessionFile(
45                  mSessionStorageManager, TEMP_SESSIONS, title);
46          return new CaptureSessionImpl(title, sessionStartTime, location, temporarySessionFile,
47                  sessionManager, sessionNotifier, mPlaceholderManager, mMediaSaver,
48                  mStackSaverFactory.create(title, location));
49      }

最终返回的是一个CaptureSessionImpl。

3.2 mCamera.takePicture

在上面也说过mCamera是一个GenericOneCameraImpl实例,所以最后会调用GenericOneCameraImpl的takepicture。

/packages/apps/Camera2/src/com/android/camera/one/v2/initialization/GenericOneCameraImpl.java
96      @Override
97      public void takePicture(PhotoCaptureParameters params, CaptureSession session) {
98          mPictureTaker.takePicture(params, session);
99      }

这里的mPictureTaker则是在InitializedOneCameraFactory的构造函数中创建的:

/packages/apps/Camera2/src/com/android/camera/one/v2/initialization/InitializedOneCameraFactory.java
71      public InitializedOneCameraFactory(
72              final Lifetime lifetime, final CameraStarter cameraStarter, CameraDeviceProxy device,
73              List<Surface> outputSurfaces, MainThread mainThreadExecutor,
74              HandlerFactory handlerFactory, float maxZoom, List<Size> supportedPreviewSizes,
75              LinearScale lensRange, OneCamera.Facing direction) {
            ...
84          final SettableFuture<PictureTaker> mPictureTaker = SettableFuture.create();
85          PictureTaker pictureTaker = new DeferredPictureTaker(mPictureTaker);
    
134          PreviewStarter mPreviewStarter = new PreviewStarter(outputSurfaces,
135                  captureSessionCreator,
136                  new PreviewStarter.CameraCaptureSessionCreatedListener() {
137                      @Override
138                      public void onCameraCaptureSessionCreated(CameraCaptureSessionProxy session,
139                              Surface previewSurface) {
140                          CameraStarter.CameraControls controls = cameraStarter.startCamera(
141                                  new Lifetime(lifetime),
142                                  session, previewSurface,
143                                  zoomState, metadataCallback, readyState);
144                          mPictureTaker.set(controls.getPictureTaker());
145                          mManualAutoFocus.set(controls.getManualAutoFocus());
146                      }
147                  });
            ...
152          mOneCamera = new GenericOneCameraImpl(lifetime, pictureTaker, manualAutoFocus, lensRange,
153                  mainThreadExecutor, afStateListenable, focusStateListenable, readyStateListenable,
154                  maxZoom, zoomState, direction, previewSizeSelector, mPreviewStarter);
155      }

这里的pictureTaker的takePicture有点绕,我们一步一步来看:

首先传入的pictureTaker是一个DeferredPictureTaker

/packages/apps/Camera2/src/com/android/camera/one/v2/initialization/DeferredPictureTaker.java
34      public DeferredPictureTaker(Future<PictureTaker> pictureTakerFuture) {
35          mPictureTakerFuture = pictureTakerFuture;
36      }
37  
38      @Override
39      public void takePicture(OneCamera.PhotoCaptureParameters params, CaptureSession session) {
40          if (mPictureTakerFuture.isDone()) {
41              try {
42                  PictureTaker taker = mPictureTakerFuture.get();
43                  taker.takePicture(params, session);
44              } catch (InterruptedException | ExecutionException | CancellationException e) {
45                  return;
46              }
47          }
48      }

可以看到DeferredPictureTaker的takePicture是要需要mPictureTakerFuture执行完毕后才会得到执行mPictureTakerFuture则是传进来的SettableFuture。其实主要就是设置PictureTaker。所以我们要想确认PictureTaker那么需要知道mPictureTaker在哪儿调用的set函数:在其中搜索mPictureTaker可以知道只有当CameraSession创建时会CameraStarter.CameraControls的pictureTaker,而CameraControls在2.8小节也知道是如何创建的,其中正有pictureTaker的创建:

/packages/apps/Camera2/src/com/android/camera/one/v2/SimpleOneCameraFactory.java
99      @Override
100      public OneCamera createOneCamera(...) {
111          final Lifetime lifetime = new Lifetime();
112  
113          final ImageReaderProxy imageReader = new CloseWhenDoneImageReader(new LoggingImageReader(
114                  AndroidImageReaderProxy.newInstance(
115                          pictureSize.getWidth(), pictureSize.getHeight(),
116                          mImageFormat, mMaxImageCount),
117                  Loggers.tagFactory()));
118  
119          lifetime.add(imageReader);
120          lifetime.add(device);
121  
122          List<Surface> outputSurfaces = new ArrayList<>();
123          outputSurfaces.add(imageReader.getSurface());
124  
125          /**
126           * Finishes constructing the camera when prerequisites, e.g. the preview
127           * stream and capture session, are ready.
128           */
129          CameraStarter cameraStarter = new CameraStarter() {
130              @Override
131              public CameraStarter.CameraControls startCamera(...) {
                     ...
206                  // Create the picture-taker.
207                  PictureTaker pictureTaker;
208                  if (supportLevel == OneCameraFeatureConfig.CaptureSupportLevel.LEGACY_JPEG) {
209                      pictureTaker = new LegacyPictureTakerFactory(imageSaverBuilder,
210                              cameraCommandExecutor, mainExecutor,
211                              frameServerComponent.provideFrameServer(),
212                              meteredZoomedRequestBuilder, managedImageReader).providePictureTaker();
213                  } else {
214                      pictureTaker = PictureTakerFactory.create(Loggers.tagFactory(), mainExecutor,
215                              cameraCommandExecutor, imageSaverBuilder,
216                              frameServerComponent.provideFrameServer(),
217                              meteredZoomedRequestBuilder, managedImageReader, flashSetting,
218                              characteristics.isContinuousPictureAutoFocusSupported())
219                              .providePictureTaker();
220                  }
242                  return new CameraStarter.CameraControls(
243                          pictureTaker,
244                          basicCameraFactory.provideManualAutoFocus());
245              }
246          };

252          return new InitializedOneCameraFactory(...)
255                  .provideOneCamera();
256      }

所以真正的pictureTaker是PictureTakerFactory提供出来的

/packages/apps/Camera2/src/com/android/camera/one/v2/photo/PictureTakerFactory.java
36      private PictureTakerFactory(PictureTakerImpl pictureTaker) {
37          mPictureTaker = pictureTaker;
38      }
39  
40      public static PictureTakerFactory create(Logger.Factory logFactory, MainThread mainExecutor,
41              CameraCommandExecutor commandExecutor,
42              ImageSaver.Builder imageSaverBuilder,
43              FrameServer frameServer,
44              RequestBuilder.Factory rootRequestBuilder,
45              ManagedImageReader sharedImageReader,
46              Supplier<OneCamera.PhotoCaptureParameters.Flash> flashMode, boolean cafSupport) {
47          // When flash is ON, always use the ConvergedImageCaptureCommand which
48          // performs the AE precapture sequence and AF precapture if supported.
49          ImageCaptureCommand flashOnCommand = cafSupport ? new ConvergedImageCaptureCommand() :
54              new ConvergedImageCaptureCommand();
59  
60          // When flash is OFF, wait for AF convergence if AF is supported, but not AE convergence
61          // (which can be very slow).
62          ImageCaptureCommand flashOffCommand = cafSupport ? new ConvergedImageCaptureCommand() :
67              new ConvergedImageCaptureCommand();
72  
73          // When flash is AUTO, wait for AE and AF if supported.
74          // TODO OPTIMIZE If the last converged-AE state indicates that flash is
75          // not necessary, then this could skip waiting for AE convergence.
76          ImageCaptureCommand flashAutoCommand = cafSupport ? new ConvergedImageCaptureCommand(
77                  sharedImageReader, frameServer, rootRequestBuilder,
78                  CameraDevice.TEMPLATE_PREVIEW /* repeatingRequestTemplate */,
79                  CameraDevice.TEMPLATE_STILL_CAPTURE /* stillCaptureRequestTemplate */,
80                  Arrays.asList(rootRequestBuilder), true /* ae */, true /* af */) :
81              new ConvergedImageCaptureCommand(
82                      sharedImageReader, frameServer, rootRequestBuilder,
83                      CameraDevice.TEMPLATE_PREVIEW /* repeatingRequestTemplate */,
84                      CameraDevice.TEMPLATE_STILL_CAPTURE /* stillCaptureRequestTemplate */,
85                      Arrays.asList(rootRequestBuilder), true /* ae */);
86  
87          ImageCaptureCommand flashBasedCommand = new FlashBasedPhotoCommand(logFactory, flashMode,
88                  flashOnCommand, flashAutoCommand, flashOffCommand);
89          return new PictureTakerFactory(new PictureTakerImpl(mainExecutor, commandExecutor,
90                  imageSaverBuilder, flashBasedCommand));
91      }
92  
93      public PictureTaker providePictureTaker() {
94          return mPictureTaker;
95      }

到这里我们就知道PictureTaker的实现是PictureTakerImpl,调用的takepicture最终是调用的PictureTakerImpl的takepicture

/packages/apps/Camera2/src/com/android/camera/one/v2/photo/PictureTakerImpl.java
79      @Override
80      public void takePicture(OneCamera.PhotoCaptureParameters params, final CaptureSession session) {
81          OneCamera.PictureCallback pictureCallback = params.callback;
82  
83          // Wrap the pictureCallback with a thread-safe adapter which guarantees
84          // that they are always invoked on the main thread.
85          PictureCallbackAdapter pictureCallbackAdapter =
86                  new PictureCallbackAdapter(pictureCallback, mMainExecutor);
87  
88          final Updatable<Void> imageExposureCallback =
89                  pictureCallbackAdapter.provideQuickExposeUpdatable();
90  
91          final ImageSaver imageSaver = mImageSaverBuilder.build(
92                  params.saverCallback,
93                  OrientationManager.DeviceOrientation.from(params.orientation),
94                  session);
95  
96          mCameraCommandExecutor.execute(new PictureTakerCommand(
97                  imageExposureCallback, imageSaver, session));
98      }

PictureTakerImpl的takePicture主要是执行了PictureTakerCommand我们看一下PictureTakerCommand主要是干嘛的:

/packages/apps/Camera2/src/com/android/camera/one/v2/photo/PictureTakerImpl.java
private final class PictureTakerCommand implements CameraCommand {
52          private PictureTakerCommand(Updatable<Void> imageExposureCallback,
53                  ImageSaver imageSaver,
54                  CaptureSession session) {
55              mImageExposureCallback = imageExposureCallback;
56              mImageSaver = imageSaver;
57              mSession = session;
58          }
59  
60          @Override
61          public void run() throws InterruptedException, CameraAccessException,
62                  CameraCaptureSessionClosedException, ResourceAcquisitionFailedException {
63              try {
64                  mCommand.run(mImageExposureCallback, mImageSaver);
65              } catch (Exception e) {
66                  mSession.cancel();
67                  throw e;
68              }
69          }

PictureTakerCommand实现了CameraCommandji接口他的run方法主要是执行了mCommand的run,这个mCommand则是创建PictureTakerImpl传入的flashBasedCommand是在PictureTakerFactory在create时创建的。

/packages/apps/Camera2/src/com/android/camera/one/v2/photo/FlashBasedPhotoCommand.java
53      public void run(Updatable<Void> imageExposeCallback, ImageSaver imageSaver)
54              throws InterruptedException, CameraAccessException,
55              CameraCaptureSessionClosedException,
56              ResourceAcquisitionFailedException {
57          OneCamera.PhotoCaptureParameters.Flash flashMode = mFlashMode.get();
58          if (flashMode == OneCamera.PhotoCaptureParameters.Flash.ON) {
59              mLog.i("running flash-on command: " + mFlashOnCommand);
60              mFlashOnCommand.run(imageExposeCallback, imageSaver);
61          } else if (flashMode == OneCamera.PhotoCaptureParameters.Flash.AUTO) {
62              mLog.i("running flash-auto command: " + mFlashAutoCommand);
63              mFlashAutoCommand.run(imageExposeCallback, imageSaver);
64          } else {
65              mLog.i("running flash-off command: " + mFlashOffCommand);
66              mFlashOffCommand.run(imageExposeCallback, imageSaver);
67          }
68      }

可以看到主要是闪光灯的区别。这里以无论调用那个mFlashCommand,最终都会调用到ConvergedImageCaptureCommand的run函数:以mFlashAutoCommand为例:

76          ImageCaptureCommand flashAutoCommand = cafSupport ? new ConvergedImageCaptureCommand(
77                  sharedImageReader, frameServer, rootRequestBuilder,
78                  CameraDevice.TEMPLATE_PREVIEW /* repeatingRequestTemplate */,
79                  CameraDevice.TEMPLATE_STILL_CAPTURE /* stillCaptureRequestTemplate */,
80                  Arrays.asList(rootRequestBuilder), true /* ae */, true /* af */) :
81              new ConvergedImageCaptureCommand(
82                      sharedImageReader, frameServer, rootRequestBuilder,
83                      CameraDevice.TEMPLATE_PREVIEW /* repeatingRequestTemplate */,
84                      CameraDevice.TEMPLATE_STILL_CAPTURE /* stillCaptureRequestTemplate */,
85                      Arrays.asList(rootRequestBuilder), true /* ae */);

接着我们看一下ConvergedImageCaptureCommand的执行流程:

/packages/apps/Camera2/src/com/android/camera/one/v2/photo/ConvergedImageCaptureCommand.java
149      @Override
150      public void run(Updatable<Void> imageExposureUpdatable, ImageSaver imageSaver) throws
151              InterruptedException, CameraAccessException, CameraCaptureSessionClosedException,
152              ResourceAcquisitionFailedException {
153          try (FrameServer.Session session = mFrameServer.createExclusiveSession()) {
154              try (ImageStream imageStream = mImageReader.createPreallocatedStream(mBurst.size())) {
155                  if (mWaitForAFConvergence) {
156                      waitForAFConvergence(session);
157                  }
158                  if (mWaitForAEConvergence) {
159                      waitForAEConvergence(session);
160                  }
161                  captureBurst(session, imageStream, imageExposureUpdatable, imageSaver);
162              } finally {
163                  // Always reset the repeating stream to ensure AF/AE are not
164                  // locked when this exits.
165                  // Note that this may still throw if the camera or session is
166                  // closed.
167                  resetRepeating(session);
168              }
169          } finally {
170              imageSaver.close();
171          }
172      }

主要是调用captureBurst传进来了session imageStream

packages/apps/Camera2/src/com/android/camera/one/v2/photo/ConvergedImageCaptureCommand.java
220      private void captureBurst(FrameServer.Session session, ImageStream imageStream, Updatable<Void>
221              imageExposureUpdatable, ImageSaver imageSaver) throws CameraAccessException,
222              InterruptedException, ResourceAcquisitionFailedException,
223              CameraCaptureSessionClosedException {
224          List<Request> burstRequest = new ArrayList<>(mBurst.size());
225          List<ListenableFuture<TotalCaptureResultProxy>> metadata = new ArrayList<>(mBurst.size());
226          boolean first = true;
             //这里的mStillCaptureRequestTemplate是创建时传进来的CameraDevice.TEMPLATE_STILL_CAPTURE
227          for (RequestBuilder.Factory builderTemplate : mBurst) {
228              RequestBuilder builder = builderTemplate.create(mStillCaptureRequestTemplate);
229  
230              if (mCAFSupport) {
231                  builder.setParam(CaptureRequest.CONTROL_AF_MODE, CaptureRequest
232                          .CONTROL_AF_MODE_CONTINUOUS_PICTURE);
233              }
234              builder.setParam(CaptureRequest.CONTROL_CAPTURE_INTENT,
235                      CaptureRequest.CONTROL_CAPTURE_INTENT_STILL_CAPTURE);
236  
237              if (first) {
238                  first = false;
239                  builder.addResponseListener(forFrameExposure(imageExposureUpdatable));
240              }
241              //接受camera 回传的数据
242              MetadataFuture metadataFuture = new MetadataFuture();
243              builder.addResponseListener(metadataFuture);
244              metadata.add(metadataFuture.getMetadata());
245  
246              builder.addStream(imageStream);
247  
248              burstRequest.add(builder.build());
249          }
250          //到这里就和我们之前在startPreview的流程差不多了就是RequestType变化了
251          session.submitRequest(burstRequest, FrameServer.RequestType.NON_REPEATING);
252  
253          for (int i = 0; i < mBurst.size(); i++) {
254              try {
255                  ImageProxy image = imageStream.getNext();
256                  imageSaver.addFullSizeImage(image, metadata.get(i));
257              } catch (BufferQueue.BufferQueueClosedException e) {
258                  // No more images will be available, so just quit.
259                  return;
260              }
261          }
262      }

最终也是通过CameraCaptureSessionImpl调用到了CameraDeviceImpl的captureBurst函数不过这次的streaming/repeating设置成了false

所以拍照也是Camera Server发送request只不过发送的request的类型为CameraDevice.TEMPLATE_STILL_CAPTURE,并且不需要repeating。Camera server接收到request后也会将转发给camera hal接着camera hal填充数据到ImageReader的surface中,接着通过BufferQueue的机制回传给app去生成图片消费。

四、关闭相机

模式切换,前后置相机切换,相机关闭等会调用到相机的close,这里以相机退出时的情况为例介绍:当Activity执行到onPause时会关闭相机代码如下:

/packages/apps/Camera2/src/com/android/camera/CameraActivity.java
1833      @Override
1834      public void onPauseTasks() {
             ...
1877          // Camera is in fatal state. A fatal dialog is presented to users, but users just hit home
1878          // button. Let's just kill the process.
1879          if (mCameraFatalError && !isFinishing()) {
1880              Log.v(TAG, "onPause when camera is in fatal state, call Activity.finish()");
1881              finish();
1882          } else {
1883              // Close the camera and wait for the operation done.
1884              Log.v(TAG, "onPause closing camera");
1885              if (mCameraController != null) {
1886                  mCameraController.closeCamera(true);
1887              }
1888          }
1889  
1890          profile.stop();
1891      }

其中关闭相机主要是调用mCameraController.closeCamera关闭相机的,那么这个mCameraController是什么时候创建的呢?答案是在Camera 模块(一)2.1小节的onCreateTasks中创建的如下:

/packages/apps/Camera2/src/com/android/camera/CameraActivity.java
1481              mCameraController = new CameraController(mAppContext, this, mMainHandler,
1482                      CameraAgentFactory.getAndroidCameraAgent(mAppContext,
1483                              CameraAgentFactory.CameraApi.API_1),
1484                      CameraAgentFactory.getAndroidCameraAgent(mAppContext,
1485                              CameraAgentFactory.CameraApi.AUTO),
1486                      mActiveCameraDeviceTracker);
1487              mCameraController.setCameraExceptionHandler(
1488                      new CameraExceptionHandler(mCameraExceptionCallback, mMainHandler));
/packages/apps/Camera2/src/com/android/camera/app/CameraController.java
79      public CameraController(@Nonnull Context context,
80            @Nullable CameraAgent.CameraOpenCallback callbackReceiver,
81            @Nonnull Handler handler,
82            @Nonnull CameraAgent cameraManager,
83            @Nonnull CameraAgent cameraManagerNg,
84            @Nonnull ActiveCameraDeviceTracker activeCameraDeviceTracker) {
85          mContext = context;
86          mCallbackReceiver = callbackReceiver;
87          mCallbackHandler = handler;
            //cameraManager是AndroidCameraAgentImpl对象
88          mCameraAgent = cameraManager;
89          // If the new implementation is the same as the old, the
90          // CameraAgentFactory decided this device doesn't support the new API.
            //这里的mCameraAgentNg就是AndroidCamera2AgentImpl对象
91          mCameraAgentNg = cameraManagerNg != cameraManager ? cameraManagerNg : null;
92          mActiveCameraDeviceTracker = activeCameraDeviceTracker;
93          mInfo = mCameraAgent.getCameraDeviceInfo();
94          if (mInfo == null && mCallbackReceiver != null) {
95              mCallbackReceiver.onDeviceOpenFailure(-1, "GETTING_CAMERA_INFO");
96          }
97      }

可以看到创建CameraController传入了两个AndroidCameraAgent其中一个是CameraApi.API_1的,另外一个则是CameraApi.AUTO,其中AUTO类型则是指的这个设备最高支持的Camera的API,比如SDK > 21 以后的设备就支持CameraApi.API_2,而之前的仅支持API_1.如果设备的SDK大于21那么构建mCameraController时传进来的两个CameraAgent就分别是AndroidCameraAgentImpl和AndroidCamera2AgentImpl,而mCameraController.closeCamera则是调用的

/packages/apps/Camera2/src/com/android/camera/app/CameraController.java
305      public void closeCamera(boolean synced) {
306          Log.v(TAG, "Closing camera");
307          mCameraProxy = null;
             //由于我们看的是Android12的代码,所以一定是支持CameraApi.API_2的这里的mUsingNewApi = true
308          if (mUsingNewApi) {
309              mCameraAgentNg.closeCamera(mCameraProxy, synced);
310          } else {
311              mCameraAgent.closeCamera(mCameraProxy, synced);
312          }
313          mRequestingCameraId = EMPTY_REQUEST;
314          mUsingNewApi = false;
315      }

所以接着会调用到AndroidCamera2AgentImpl的closeCamera,但是AndroidCamera2AgentImpl没有closeCamera的实现,所以调用到了他的父类CameraAgent的closeCamera

/frameworks/ex/camera2/portability/src/com/android/ex/camera2/portability/CameraAgent.java
307      public void closeCamera(CameraProxy camera, boolean synced) {
308          try {
                 //这里传进来的synced为true
309              if (synced) {
310                  // Don't bother to wait since camera is in bad state.
311                  if (getCameraState().isInvalid()) {
312                      return;
313                  }
314                  final WaitDoneBundle bundle = new WaitDoneBundle();
315  
316                  getDispatchThread().runJobSync(new Runnable() {
317                      @Override
318                      public void run() {
319                          get().obtainMessage(CameraActions.RELEASE).sendToTarget();
320                          getCameraHandler().post(bundle.mUnlockRunnable);
321                      }}, bundle.mWaitLock, CAMERA_OPERATION_TIMEOUT_MS, "camera release");
322              } else {
323                  getDispatchThread().runJob(new Runnable() {
324                      @Override
325                      public void run() {
326                          getCameraHandler().removeCallbacksAndMessages(null);
327                          getCameraHandler().obtainMessage(CameraActions.RELEASE).sendToTarget();
328                      }});
329              }
330          } catch (final RuntimeException ex) {
331              getCameraExceptionHandler().onDispatchThreadException(ex);
332          }
333      }

CameraAgent的closeCamera主要是向DispatchThread的CameraHandler发送了一条CameraActions.RELEASE的message。而DispatchThread和CameraHandler都是在新建AndroidCamera2AgentImpl时构建的。所以这条CameraActions.RELEASE的message会在AndroidCamera2AgentImpl对象的DispatchThread中处理主要是Camera2Handler处理的:

/frameworks/ex/camera2/portability/src/com/android/ex/camera2/portability/AndroidCamera2AgentImpl.java$Camera2Handler
254                      case CameraActions.RELEASE: {
255                          if (mCameraState.getState() == AndroidCamera2StateHolder.CAMERA_UNOPENED) {
256                              Log.w(TAG, "Ignoring release at inappropriate time");
257                              break;
258                          }
259                          //这里的mSession是CameraCaptureSession 在2.3小节中可以知道具体就是CameraCaptureSessionImpl对象
260                          if (mSession != null) {
261                              closePreviewSession();
262                              mSession = null;
263                          }
                             //mCamera是CameraDevice 从1.1小节可以知道具体就是CameraDeviceImpl
264                          if (mCamera != null) {
265                              mCamera.close();
266                              mCamera = null;
267                          }
268                          mCameraProxy = null;
269                          mPersistentSettings = null;
270                          mActiveArray = null;
271                          if (mPreviewSurface != null) {
272                              mPreviewSurface.release();
273                              mPreviewSurface = null;
274                          }
275                          mPreviewTexture = null;
276                          if (mCaptureReader != null) {
277                              mCaptureReader.close();
278                              mCaptureReader = null;
279                          }
280                          mPreviewSize = null;
281                          mPhotoSize = null;
282                          mCameraIndex = 0;
283                          mCameraId = null;
284                          changeState(AndroidCamera2StateHolder.CAMERA_UNOPENED);
285                          break;
286                      }

上面的主要的工作是两条:

  1. 调用closePreviewSession关闭预览画面
  2. 调用mCamera.close()关闭相机

4.1 closePreviewSession

/frameworks/ex/camera2/portability/src/com/android/ex/camera2/portability/AndroidCamera2AgentImpl.java
755          private void closePreviewSession() {
756              try {
757                  mSession.abortCaptures();
758                  mSession = null;
759              } catch (CameraAccessException ex) {
760                  Log.e(TAG, "Failed to close existing camera capture session", ex);
761              }
762              changeState(AndroidCamera2StateHolder.CAMERA_CONFIGURED);
763          }
/frameworks/base/core/java/android/hardware/camera2/impl/CameraCaptureSessionImpl.java
429      public void abortCaptures() throws CameraAccessException {
430          synchronized (mDeviceImpl.mInterfaceLock) {
431              checkNotClosed();
432  
433              if (DEBUG) {
434                  Log.v(TAG, mIdString + "abortCaptures");
435              }
436  
437              if (mAborting) {
438                  Log.w(TAG, mIdString + "abortCaptures - Session is already aborting; doing nothing");
439                  return;
440              }
441  
442              mAborting = true;
443              mAbortDrainer.taskStarted();
444              // 重点就是就是调用了CameraDeviceImpl的flush()函数
445              mDeviceImpl.flush();
446              // The next BUSY -> IDLE set of transitions will mark the end of the abort.
447          }
448      }

从上面可以看到closePreviewSession最终调用到了CameraDeviceImpl的flush()函数。

/frameworks/base/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java
1314      public void flush() throws CameraAccessException {
1315          synchronized(mInterfaceLock) {
1316              checkIfCameraClosedOrInError();
1317  
1318              mDeviceExecutor.execute(mCallOnBusy);
1319  
1320              // If already idle, just do a busy->idle transition immediately, don't actually
1321              // flush.
1322              if (mIdle) {
1323                  mDeviceExecutor.execute(mCallOnIdle);
1324                  return;
1325              }
1326              //调用 mRemoteDevice.flush先flush正在处理的request
1327              long lastFrameNumber = mRemoteDevice.flush();
1328              if (mRepeatingRequestId != REQUEST_ID_NONE) {
1329                  checkEarlyTriggerSequenceCompleteLocked(mRepeatingRequestId, lastFrameNumber,
1330                          mRepeatingRequestTypes);
1331                  mRepeatingRequestId = REQUEST_ID_NONE;
1332                  mRepeatingRequestTypes = null;
1333              }
1334          }
1335      }

这里的mRemoteDevice是ICameraDeviceUserWrapper接口是一个aidl接口用于进程通信,其服务端是1.2小节创建的CameraDeviceClient。

/frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
1332  binder::Status CameraDeviceClient::flush(
1333          /*out*/
1334          int64_t* lastFrameNumber) {
          ...
1346  
1347      Mutex::Autolock idLock(mStreamingRequestIdLock);
1348      mStreamingRequestId = REQUEST_ID_NONE;
1349      status_t err = mDevice->flush(lastFrameNumber);
1350      if (err != OK) {
1351          res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
1352                  "Camera %s: Error flushing device: %s (%d)", mCameraIdStr.string(), strerror(-err), err);
1353      }
1354      return res;
1355  }

CameraDeviceClient的flush主要是调用mDevice->flush并将最后一帧的序号传出去,这里的mDevice是1.3小节提到的Camera3Device

/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
2068  status_t Camera3Device::flush(int64_t *frameNumber) {
2069      ATRACE_CALL();
2070      ALOGV("%s: Camera %s: Flushing all requests", __FUNCTION__, mId.string());
2071      Mutex::Autolock il(mInterfaceLock);
2072  
2073      {
2074          Mutex::Autolock l(mLock);
2075  
2076          // b/116514106 "disconnect()" can get called twice for the same device. The
2077          // camera device will not be initialized during the second run.
2078          if (mStatus == STATUS_UNINITIALIZED) {
2079              return OK;
2080          }
2081  
2082          mRequestThread->clear(/*out*/frameNumber);
2083  
2084          // Stop session and stream counter
2085          mSessionStatsBuilder.stopCounter();
2086      }
2087  
2088      return mRequestThread->flush();
2089  }

Camera3Device::flush主要是对RequestThread的操作,先调用clear接着调用flush

其中Clear的代码如下:

/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
4340  status_t Camera3Device::RequestThread::clear(
4341          /*out*/int64_t *lastFrameNumber) {
4342      ATRACE_CALL();
4343      Mutex::Autolock l(mRequestLock);
4344      ALOGV("RequestThread::%s:", __FUNCTION__);
4345      // 1.首先清空mRepeatingRequests
4346      mRepeatingRequests.clear();
4347  
4348      // Send errors for all requests pending in the request queue, including
4349      // pending repeating requests
4350      sp<NotificationListener> listener = mListener.promote();
4351      if (listener != NULL) {
4352          for (RequestList::iterator it = mRequestQueue.begin();
4353                   it != mRequestQueue.end(); ++it) {
4354              // Abort the input buffers for reprocess requests.
4355              if ((*it)->mInputStream != NULL) {
4356                  camera_stream_buffer_t inputBuffer;
4357                  camera3::Size inputBufferSize;
4358                  status_t res = (*it)->mInputStream->getInputBuffer(&inputBuffer,
4359                          &inputBufferSize, /*respectHalLimit*/ false);
4360                  if (res != OK) {
4361                      ALOGW("%s: %d: couldn't get input buffer while clearing the request "
4362                              "list: %s (%d)", __FUNCTION__, __LINE__, strerror(-res), res);
4363                  } else {
4364                      inputBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
4365                      res = (*it)->mInputStream->returnInputBuffer(inputBuffer);
4366                      if (res != OK) {
4367                          ALOGE("%s: %d: couldn't return input buffer while clearing the request "
4368                                  "list: %s (%d)", __FUNCTION__, __LINE__, strerror(-res), res);
4369                      }
4370                  }
4371              }
4372              // Set the frame number this request would have had, if it
4373              // had been submitted; this frame number will not be reused.
4374              // The requestId and burstId fields were set when the request was
4375              // submitted originally (in convertMetadataListToRequestListLocked)
4376              (*it)->mResultExtras.frameNumber = mFrameNumber++;
                  //2. 发送Error给所有的Request
4377              listener->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
4378                      (*it)->mResultExtras);
4379          }
4380      }
          //3.清除mRequestQueue
4381      mRequestQueue.clear();
4382  
4383      Mutex::Autolock al(mTriggerMutex);
4384      mTriggerMap.clear();
4385      if (lastFrameNumber != NULL) {
4386          *lastFrameNumber = mRepeatingLastFrameNumber;
4387      }
4388      mRepeatingLastFrameNumber = hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES;
4389      mRequestSignal.signal();
4390      return OK;
4391  }

RequestThread的Clear动作主要就是

1.清空mRepeatingRequests

2.发送Error给当前队列中所有的Request

3.清除mRequestQueue

RequestThread的flush代码如下:

/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
4393  status_t Camera3Device::RequestThread::flush() {
4394      ATRACE_CALL();
4395      Mutex::Autolock l(mFlushLock);
4396  
4397      return mInterface->flush();
4398  }
3979  status_t Camera3Device::HalInterface::flush() {
3980      ATRACE_NAME("CameraHal::flush");
3981      if (!valid()) return INVALID_OPERATION;
3982      status_t res = OK;
3983  
3984      auto err = mHidlSession->flush();
3985      if (!err.isOk()) {
3986          ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
3987          res = DEAD_OBJECT;
3988      } else {
3989          res = CameraProviderManager::mapToStatusT(err);
3990      }
3991  
3992      return res;
3993  }

其中mHidlSession用来和Camera Hal进行通信,所以RequestThread的flush最后会调用到Camera Hal的flush。

4.2 mCamera.close()

由于mCamera.close会调用到CameraDeviceImpl的close,

/frameworks/base/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java
1337      @Override
1338      public void close() {
1339          synchronized (mInterfaceLock) {
1340              if (mClosing.getAndSet(true)) {
1341                  return;
1342              }
1343  
1344              if (mOfflineSwitchService != null) {
1345                  mOfflineSwitchService.shutdownNow();
1346                  mOfflineSwitchService = null;
1347              }
1348              //mRemoteDevice是Native的CameraClient的远程代理 主要是调用disconnect完成关闭的动作
1349              if (mRemoteDevice != null) {
1350                  mRemoteDevice.disconnect();
1351                  mRemoteDevice.unlinkToDeath(this, /*flags*/0);
1352              }
1363  
1364              // Only want to fire the onClosed callback once;
1365              // either a normal close where the remote device is valid
1366              // or a close after a startup error (no remote device but in error state)
                  //通知APP camera已经关闭
1367              if (mRemoteDevice != null || mInError) {
1368                  mDeviceExecutor.execute(mCallOnClosed);
1369              }
1370  
1371              mRemoteDevice = null;
1372          }
1373      }

这里的mRemoteDevice从1.2小节可以知道是CameraDeviceClient的远程代理对象,所以disconnect也会调用到CameraDeviceClient的disconnect,但是CameraDeviceClient中没有disconnect函数,所以接着从他的父类一层层往上找,最后发现是Camera2ClientBase中有这个disconnect方法:

/frameworks/av/services/camera/libcameraservice/common/Camera2ClientBase.cpp
186  template <typename TClientBase>
187  binder::Status Camera2ClientBase<TClientBase>::disconnect() {
188      ATRACE_CALL();
189      Mutex::Autolock icl(mBinderSerializationLock);
		 //这里会调用到CameraDeviceClient的detachDevice
206      detachDevice();
    
208      CameraService::BasicClient::disconnect();
209  
210      ALOGV("Camera %s: Shut down complete", TClientBase::mCameraIdStr.string());
211  
212      return res;
213  }
215  template <typename TClientBase>
216  void Camera2ClientBase<TClientBase>::detachDevice() {
217      if (mDevice == 0) return;
         //这里的mDevice是Camera3Device
218      mDevice->disconnect();
219  
220      ALOGV("Camera %s: Detach complete", TClientBase::mCameraIdStr.string());
221  }

/frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
1870  void CameraDeviceClient::detachDevice() {
1871      if (mDevice == 0) return;
1872  
1873      nsecs_t startTime = systemTime();
1874      ALOGV("Camera %s: Stopping processors", mCameraIdStr.string());
1875  	  //移除mFrameProcessor回调并停止线程
1876      mFrameProcessor->removeListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
1877                                      camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
1878                                      /*listener*/this);
1879      mFrameProcessor->requestExit();
1880      ALOGV("Camera %s: Waiting for threads", mCameraIdStr.string());
1881      mFrameProcessor->join();
1882      ALOGV("Camera %s: Disconnecting device", mCameraIdStr.string());
1883  
1884      // WORKAROUND: HAL refuses to disconnect while there's streams in flight
1885      {
1886          int64_t lastFrameNumber;
1887          status_t code;
1888          if ((code = mDevice->flush(&lastFrameNumber)) != OK) {
1889              ALOGE("%s: flush failed with code 0x%x", __FUNCTION__, code);
1890          }
1891  
1892          if ((code = mDevice->waitUntilDrained()) != OK) {
1893              ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__,
1894                    code);
1895          }
1896      }
1897  
1898      for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
1899          auto ret = mCompositeStreamMap.valueAt(i)->deleteInternalStreams();
1900          if (ret != OK) {
1901              ALOGE("%s: Failed removing composite stream  %s (%d)", __FUNCTION__,
1902                      strerror(-ret), ret);
1903          }
1904      }
1905      mCompositeStreamMap.clear();
1906      //调用父类的Camera2ClientBase的detachDevice
1907      Camera2ClientBase::detachDevice();
1908  
1909      int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
1910      CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
}

从上面可以看出disconnect最终会调用到Camera3Device的disconnect

/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
388  status_t Camera3Device::disconnect() {
389      return disconnectImpl();
390  }
391  
392  status_t Camera3Device::disconnectImpl() {
393      ATRACE_CALL();
394      ALOGI("%s: E", __FUNCTION__);
395  
396      status_t res = OK;
397      std::vector<wp<Camera3StreamInterface>> streams;
398      {
399          Mutex::Autolock il(mInterfaceLock);
400          nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
401          {
402              Mutex::Autolock l(mLock);
403              if (mStatus == STATUS_UNINITIALIZED) return res;
404  
405              if (mStatus == STATUS_ACTIVE ||
406                      (mStatus == STATUS_ERROR && mRequestThread != NULL)) {
407                  res = mRequestThread->clearRepeatingRequests();
408                  if (res != OK) {
409                      SET_ERR_L("Can't stop streaming");
410                      // Continue to close device even in case of error
411                  } else {
412                      res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
413                      if (res != OK) {
414                          SET_ERR_L("Timeout waiting for HAL to drain (% " PRIi64 " ns)",
415                                  maxExpectedDuration);
416                          // Continue to close device even in case of error
417                      }
418                  }
419              }
420  
421              if (mStatus == STATUS_ERROR) {
422                  CLOGE("Shutting down in an error state");
423              }
424  
425              if (mStatusTracker != NULL) {
426                  mStatusTracker->requestExit();
427              }
428  
429              if (mRequestThread != NULL) {
430                  mRequestThread->requestExit();
431              }
432  
433              streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
434              for (size_t i = 0; i < mOutputStreams.size(); i++) {
435                  streams.push_back(mOutputStreams[i]);
436              }
437              if (mInputStream != nullptr) {
438                  streams.push_back(mInputStream);
439              }
440          }
441      }
442      // Joining done without holding mLock and mInterfaceLock, otherwise deadlocks may ensue
443      // as the threads try to access parent state (b/143513518)
444      if (mRequestThread != NULL && mStatus != STATUS_ERROR) {
445          // HAL may be in a bad state, so waiting for request thread
446          // (which may be stuck in the HAL processCaptureRequest call)
447          // could be dangerous.
448          // give up mInterfaceLock here and then lock it again. Could this lead
449          // to other deadlocks
450          mRequestThread->join();
451      }
452      {
453          Mutex::Autolock il(mInterfaceLock);
454          if (mStatusTracker != NULL) {
455              mStatusTracker->join();
456          }
457  
458          if (mInjectionMethods->isInjecting()) {
459              mInjectionMethods->stopInjection();
460          }
461  
462          HalInterface* interface;
463          {
464              Mutex::Autolock l(mLock);
465              mRequestThread.clear();
466              Mutex::Autolock stLock(mTrackerLock);
467              mStatusTracker.clear();
468              interface = mInterface.get();
469          }
470  
471          // Call close without internal mutex held, as the HAL close may need to
472          // wait on assorted callbacks,etc, to complete before it can return.
473          interface->close();
474  
475          flushInflightRequests();
476  
477          {
478              Mutex::Autolock l(mLock);
479              mInterface->clear();
480              mOutputStreams.clear();
481              mInputStream.clear();
482              mDeletedStreams.clear();
483              mBufferManager.clear();
484              internalUpdateStatusLocked(STATUS_UNINITIALIZED);
485          }
486  
487          for (auto& weakStream : streams) {
488                sp<Camera3StreamInterface> stream = weakStream.promote();
489              if (stream != nullptr) {
490                  ALOGE("%s: Stream %d leaked! strong reference (%d)!",
491                          __FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
492              }
493          }
494      }
495      ALOGI("%s: X", __FUNCTION__);
496      return res;
497  }

Camera3Device的disconnect主要就是停止RequestThread并且调用interface->close关闭hal session断开和hal service的连接,自此Camera的关闭流程就完成了