Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions packages/camera/camera_avfoundation/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## 0.9.21+4

* Migrates `updateOrientation` and `setCaptureSessionPreset` methods to Swift.
* Removes `FLTCam` class.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yay!


## 0.9.21+3

* Removes code for versions of iOS older than 13.0.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import CoreMotion
import camera_avfoundation_objc
#endif

final class DefaultCamera: FLTCam, Camera {
final class DefaultCamera: NSObject, Camera {
var dartAPI: FCPCameraEventApi?
var onFrameAvailable: (() -> Void)?

Expand All @@ -23,16 +23,6 @@ final class DefaultCamera: FLTCam, Camera {

private(set) var isPreviewPaused = false

override var deviceOrientation: UIDeviceOrientation {
get { super.deviceOrientation }
set {
guard newValue != super.deviceOrientation else { return }

super.deviceOrientation = newValue
updateOrientation()
}
}

var minimumExposureOffset: CGFloat { CGFloat(captureDevice.minExposureTargetBias) }
var maximumExposureOffset: CGFloat { CGFloat(captureDevice.maxExposureTargetBias) }
var minimumAvailableZoomFactor: CGFloat { captureDevice.minAvailableVideoZoomFactor }
Expand All @@ -53,18 +43,34 @@ final class DefaultCamera: FLTCam, Camera {
private let mediaSettings: FCPPlatformMediaSettings
private let mediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper

private let videoCaptureSession: FLTCaptureSession
private let audioCaptureSession: FLTCaptureSession

/// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests.
private let captureDeviceFactory: CaptureDeviceFactory
private let audioCaptureDeviceFactory: AudioCaptureDeviceFactory
private let captureDeviceInputFactory: FLTCaptureDeviceInputFactory
private let assetWriterFactory: AssetWriterFactory
private let inputPixelBufferAdaptorFactory: InputPixelBufferAdaptorFactory

/// A wrapper for CMVideoFormatDescriptionGetDimensions.
/// Allows for alternate implementations in tests.
private let videoDimensionsForFormat: VideoDimensionsForFormat

private let deviceOrientationProvider: FLTDeviceOrientationProviding
private let motionManager = CMMotionManager()

private(set) var captureDevice: FLTCaptureDevice
// Setter exposed for tests.
var captureVideoOutput: FLTCaptureVideoDataOutput
// Setter exposed for tests.
var capturePhotoOutput: FLTCapturePhotoOutput
private var captureVideoInput: FLTCaptureInput

private var videoWriter: FLTAssetWriter?
private var videoWriterInput: FLTAssetWriterInput?
private var audioWriterInput: FLTAssetWriterInput?
private var assetWriterPixelBufferAdaptor: FLTAssetWriterInputPixelBufferAdaptor?
private var videoAdaptor: FLTAssetWriterInputPixelBufferAdaptor?

/// A dictionary to retain all in-progress FLTSavePhotoDelegates. The key of the dictionary is the
Expand All @@ -76,11 +82,20 @@ final class DefaultCamera: FLTCam, Camera {

private var imageStreamHandler: FLTImageStreamHandler?

private var previewSize: CGSize?
var deviceOrientation: UIDeviceOrientation {
didSet {
guard deviceOrientation != oldValue else { return }
updateOrientation()
}
}

/// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback.
/// Used to deliver the latest pixel buffer to the flutter engine via the `copyPixelBuffer` API.
private var latestPixelBuffer: CVPixelBuffer?

private var videoRecordingPath: String?
private var isRecording = false
private var isRecordingPaused = false
private var isFirstVideoSample = false
private var videoIsDisconnected = false
Expand All @@ -103,6 +118,8 @@ final class DefaultCamera: FLTCam, Camera {
/// https://github.com/flutter/plugins/pull/4520#discussion_r766335637
private var maxStreamingPendingFramesCount = 4

private var fileFormat = FCPPlatformImageFileFormat.jpeg
private var lockedCaptureOrientation = UIDeviceOrientation.unknown
private var exposureMode = FCPPlatformExposureMode.auto
private var focusMode = FCPPlatformFocusMode.auto
private var flashMode: FCPPlatformFlashMode
Expand Down Expand Up @@ -146,24 +163,19 @@ final class DefaultCamera: FLTCam, Camera {
captureSessionQueue = configuration.captureSessionQueue
mediaSettings = configuration.mediaSettings
mediaSettingsAVWrapper = configuration.mediaSettingsWrapper
videoCaptureSession = configuration.videoCaptureSession
audioCaptureSession = configuration.audioCaptureSession
captureDeviceFactory = configuration.captureDeviceFactory
audioCaptureDeviceFactory = configuration.audioCaptureDeviceFactory
captureDeviceInputFactory = configuration.captureDeviceInputFactory
assetWriterFactory = configuration.assetWriterFactory
inputPixelBufferAdaptorFactory = configuration.inputPixelBufferAdaptorFactory
videoDimensionsForFormat = configuration.videoDimensionsForFormat
deviceOrientationProvider = configuration.deviceOrientationProvider

let captureDevice = captureDeviceFactory(configuration.initialCameraName)
captureDevice = captureDeviceFactory(configuration.initialCameraName)
flashMode = captureDevice.hasFlash ? .auto : .off

super.init()

videoCaptureSession = configuration.videoCaptureSession
audioCaptureSession = configuration.audioCaptureSession
videoDimensionsForFormat = configuration.videoDimensionsForFormat

self.captureDevice = captureDevice

capturePhotoOutput = FLTDefaultCapturePhotoOutput(photoOutput: AVCapturePhotoOutput())
capturePhotoOutput.highResolutionCaptureEnabled = true

Expand All @@ -178,6 +190,8 @@ final class DefaultCamera: FLTCam, Camera {
videoFormat: videoFormat,
captureDeviceInputFactory: configuration.captureDeviceInputFactory)

super.init()

captureVideoOutput.setSampleBufferDelegate(self, queue: captureSessionQueue)

videoCaptureSession.addInputWithNoConnections(captureVideoInput)
Expand All @@ -196,11 +210,6 @@ final class DefaultCamera: FLTCam, Camera {
mediaSettingsAVWrapper.beginConfiguration(for: videoCaptureSession)
defer { mediaSettingsAVWrapper.commitConfiguration(for: videoCaptureSession) }

// Possible values for presets are hard-coded in FLT interface having
// corresponding AVCaptureSessionPreset counterparts.
// If _resolutionPreset is not supported by camera there is
// fallback to lower resolution presets.
// If none can be selected there is error condition.
try setCaptureSessionPreset(mediaSettings.resolutionPreset)

FLTSelectBestFormatForRequestedFrameRate(
Expand All @@ -225,6 +234,110 @@ final class DefaultCamera: FLTCam, Camera {
updateOrientation()
}

// Possible values for presets are hard-coded in FLT interface having
// corresponding AVCaptureSessionPreset counterparts.
// If _resolutionPreset is not supported by camera there is
// fallback to lower resolution presets.
// If none can be selected there is error condition.
private func setCaptureSessionPreset(
_ resolutionPreset: FCPPlatformResolutionPreset
) throws {
switch resolutionPreset {
case .max:
if let bestFormat = highestResolutionFormat(forCaptureDevice: captureDevice) {
videoCaptureSession.sessionPreset = .inputPriority
do {
try captureDevice.lockForConfiguration()
// Set the best device format found and finish the device configuration.
captureDevice.activeFormat = bestFormat
captureDevice.unlockForConfiguration()
break
}
}
fallthrough
case .ultraHigh:
if videoCaptureSession.canSetSessionPreset(.hd4K3840x2160) {
videoCaptureSession.sessionPreset = .hd4K3840x2160
break
}
if videoCaptureSession.canSetSessionPreset(.high) {
videoCaptureSession.sessionPreset = .high
break
}
fallthrough
case .veryHigh:
if videoCaptureSession.canSetSessionPreset(.hd1920x1080) {
videoCaptureSession.sessionPreset = .hd1920x1080
break
}
fallthrough
case .high:
if videoCaptureSession.canSetSessionPreset(.hd1280x720) {
videoCaptureSession.sessionPreset = .hd1280x720
break
}
fallthrough
case .medium:
if videoCaptureSession.canSetSessionPreset(.vga640x480) {
videoCaptureSession.sessionPreset = .vga640x480
break
}
fallthrough
case .low:
if videoCaptureSession.canSetSessionPreset(.cif352x288) {
videoCaptureSession.sessionPreset = .cif352x288
break
}
fallthrough
default:
if videoCaptureSession.canSetSessionPreset(.low) {
videoCaptureSession.sessionPreset = .low
} else {
throw NSError(
domain: NSCocoaErrorDomain,
code: URLError.unknown.rawValue,
userInfo: [
NSLocalizedDescriptionKey: "No capture session available for current capture session."
])
}
}

let size = videoDimensionsForFormat(captureDevice.activeFormat)
previewSize = CGSize(width: CGFloat(size.width), height: CGFloat(size.height))
audioCaptureSession.sessionPreset = videoCaptureSession.sessionPreset
}

/// Finds the highest available resolution in terms of pixel count for the given device.
/// Preferred are formats with the same subtype as current activeFormat.
private func highestResolutionFormat(forCaptureDevice captureDevice: FLTCaptureDevice)
-> FLTCaptureDeviceFormat?
{
let preferredSubType = CMFormatDescriptionGetMediaSubType(
captureDevice.activeFormat.formatDescription)
var bestFormat: FLTCaptureDeviceFormat? = nil
var maxPixelCount: UInt = 0
var isBestSubTypePreferred = false

for format in captureDevice.formats {
let resolution = videoDimensionsForFormat(format)
let height = UInt(resolution.height)
let width = UInt(resolution.width)
let pixelCount = height * width
let subType = CMFormatDescriptionGetMediaSubType(format.formatDescription)
let isSubTypePreferred = subType == preferredSubType

if pixelCount > maxPixelCount
|| (pixelCount == maxPixelCount && isSubTypePreferred && !isBestSubTypePreferred)
{
bestFormat = format
maxPixelCount = pixelCount
isBestSubTypePreferred = isSubTypePreferred
}
}

return bestFormat
}

func setUpCaptureSessionForAudioIfNeeded() {
// Don't setup audio twice or we will lose the audio.
guard !mediaSettings.enableAudio || !isAudioSetup else { return }
Expand Down Expand Up @@ -315,8 +428,9 @@ final class DefaultCamera: FLTCam, Camera {
// Get all the state on the current thread, not the main thread.
let state = FCPPlatformCameraState.make(
withPreviewSize: FCPPlatformSize.make(
withWidth: Double(previewSize.width),
height: Double(previewSize.height)
// previewSize is set during init, so it will never be nil.
withWidth: previewSize!.width,
height: previewSize!.height
),
exposureMode: exposureMode,
focusMode: focusMode,
Expand Down Expand Up @@ -621,6 +735,45 @@ final class DefaultCamera: FLTCam, Camera {
return file
}

private func updateOrientation() {
guard !isRecording else { return }

let orientation =
(lockedCaptureOrientation != .unknown)
? lockedCaptureOrientation
: deviceOrientation

updateOrientation(orientation, forCaptureOutput: capturePhotoOutput)
updateOrientation(orientation, forCaptureOutput: captureVideoOutput)
}

private func updateOrientation(
_ orientation: UIDeviceOrientation, forCaptureOutput captureOutput: FLTCaptureOutput
) {
if let connection = captureOutput.connection(withMediaType: .video),
connection.isVideoOrientationSupported
{
connection.videoOrientation = videoOrientation(forDeviceOrientation: orientation)
}
}

private func videoOrientation(forDeviceOrientation deviceOrientation: UIDeviceOrientation)
-> AVCaptureVideoOrientation
{
switch deviceOrientation {
case .portrait:
return .portrait
case .landscapeLeft:
return .landscapeRight
case .landscapeRight:
return .landscapeLeft
case .portraitUpsideDown:
return .portraitUpsideDown
default:
return .portrait
}
}

func lockCaptureOrientation(_ pigeonOrientation: FCPPlatformDeviceOrientation) {
let orientation = FCPGetUIDeviceOrientationForPigeonDeviceOrientation(pigeonOrientation)
if lockedCaptureOrientation != orientation {
Expand Down
Loading