Skip to content

Commit 97c5ed8

Browse files
authored
Merge branch 'main' into main
2 parents 9d52c0d + c99425e commit 97c5ed8

File tree

127 files changed

+9723
-7152
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

127 files changed

+9723
-7152
lines changed

.ci/flutter_master.version

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
8303a96a0a9968b98eb3dadc5475d1324cbcab7b
1+
d733bea58c1a761547a3566476ef2d5840631376

.ci/flutter_stable.version

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
b25305a8832cfc6ba632a7f87ad455e319dccce8
1+
6fba2447e95c451518584c35e25f5433f14d888c

packages/camera/camera_avfoundation/CHANGELOG.md

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,17 @@
1+
## 0.9.20+1
2+
3+
* Migrates lifecycle methods (`start`, `stop`, `close`) to Swift.
4+
* Migrates exposure and focus related methods to Swift.
5+
* Migrates `receivedImageStreamData` and `reportInitializationState` methods to Swift.
6+
7+
## 0.9.20
8+
9+
* Fixes incorrect types in image stream events.
10+
111
## 0.9.19+3
212

313
* Fixes race condition when starting image stream.
414

5-
615
## 0.9.19+2
716

817
* Adds the `Camera` Swift protocol.

packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -123,4 +123,41 @@ final class StreamingTests: XCTestCase {
123123

124124
waitForExpectations(timeout: 30, handler: nil)
125125
}
126+
127+
func testImageStreamEventFormat() {
128+
let (camera, testAudioOutput, sampleBuffer, testAudioConnection) = createCamera()
129+
130+
let expectation = expectation(description: "Received a valid event")
131+
132+
let handlerMock = MockImageStreamHandler()
133+
handlerMock.eventSinkStub = { event in
134+
let imageBuffer = event as! [String: Any]
135+
136+
XCTAssertTrue(imageBuffer["width"] is NSNumber)
137+
XCTAssertTrue(imageBuffer["height"] is NSNumber)
138+
XCTAssertTrue(imageBuffer["format"] is NSNumber)
139+
XCTAssertTrue(imageBuffer["lensAperture"] is NSNumber)
140+
XCTAssertTrue(imageBuffer["sensorExposureTime"] is NSNumber)
141+
XCTAssertTrue(imageBuffer["sensorSensitivity"] is NSNumber)
142+
143+
let planes = imageBuffer["planes"] as! [[String: Any]]
144+
let planeBuffer = planes[0]
145+
146+
XCTAssertTrue(planeBuffer["bytesPerRow"] is NSNumber)
147+
XCTAssertTrue(planeBuffer["width"] is NSNumber)
148+
XCTAssertTrue(planeBuffer["height"] is NSNumber)
149+
XCTAssertTrue(planeBuffer["bytes"] is FlutterStandardTypedData)
150+
151+
expectation.fulfill()
152+
}
153+
let messenger = MockFlutterBinaryMessenger()
154+
camera.startImageStream(with: messenger, imageStreamHandler: handlerMock) { _ in }
155+
156+
waitForQueueRoundTrip(with: DispatchQueue.main)
157+
XCTAssertEqual(camera.isStreamingImages, true)
158+
159+
camera.captureOutput(testAudioOutput, didOutput: sampleBuffer, from: testAudioConnection)
160+
161+
waitForExpectations(timeout: 30, handler: nil)
162+
}
126163
}

packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,15 +34,23 @@ protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate,
3434

3535
func setUpCaptureSessionForAudioIfNeeded()
3636

37+
/// Informs the Dart side of the plugin of the current camera state and capabilities.
3738
func reportInitializationState()
3839

3940
/// Acknowledges the receipt of one image stream frame.
41+
///
42+
/// This should be called each time a frame is received. Failing to call it may
43+
/// cause later frames to be dropped instead of streamed.
4044
func receivedImageStreamData()
4145

4246
func start()
4347
func stop()
4448

4549
/// Starts recording a video with an optional streaming messenger.
50+
/// If the messenger is non-nil then it will be called for each
51+
/// captured frame, allowing streaming concurrently with recording.
52+
///
53+
/// @param messenger Nullable messenger for capturing each frame.
4654
func startVideoRecording(
4755
completion: @escaping (_ error: FlutterError?) -> Void,
4856
messengerForStreaming: FlutterBinaryMessenger?
@@ -61,12 +69,31 @@ protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate,
6169

6270
func setExposureMode(_ mode: FCPPlatformExposureMode)
6371
func setExposureOffset(_ offset: Double)
72+
73+
/// Sets the exposure point, in a (0,1) coordinate system.
74+
///
75+
/// If @c point is nil, the exposure point will reset to the center.
6476
func setExposurePoint(
6577
_ point: FCPPlatformPoint?,
6678
withCompletion: @escaping (_ error: FlutterError?) -> Void
6779
)
6880

81+
/// Sets FocusMode on the current AVCaptureDevice.
82+
///
83+
/// If the @c focusMode is set to FocusModeAuto the AVCaptureDevice is configured to use
84+
/// AVCaptureFocusModeContinuousModeAutoFocus when supported, otherwise it is set to
85+
/// AVCaptureFocusModeAutoFocus. If neither AVCaptureFocusModeContinuousModeAutoFocus nor
86+
/// AVCaptureFocusModeAutoFocus are supported focus mode will not be set.
87+
/// If @c focusMode is set to FocusModeLocked the AVCaptureDevice is configured to use
88+
/// AVCaptureFocusModeAutoFocus. If AVCaptureFocusModeAutoFocus is not supported focus mode will not
89+
/// be set.
90+
///
91+
/// @param mode The focus mode that should be applied.
6992
func setFocusMode(_ mode: FCPPlatformFocusMode)
93+
94+
/// Sets the focus point, in a (0,1) coordinate system.
95+
///
96+
/// If @c point is nil, the focus point will reset to the center.
7097
func setFocusPoint(
7198
_ point: FCPPlatformPoint?,
7299
completion: @escaping (_ error: FlutterError?) -> Void

packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift

Lines changed: 192 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,182 @@ final class DefaultCamera: FLTCam, Camera {
2323

2424
/// Maximum number of frames pending processing.
2525
/// To limit memory consumption, limit the number of frames pending processing.
26-
/// After some testing, 4 was determined to be the best maximuńm value.
26+
/// After some testing, 4 was determined to be the best maximum value.
2727
/// https://github.com/flutter/plugins/pull/4520#discussion_r766335637
2828
private var maxStreamingPendingFramesCount = 4
2929

30+
private var exposureMode = FCPPlatformExposureMode.auto
31+
private var focusMode = FCPPlatformFocusMode.auto
32+
33+
func reportInitializationState() {
34+
// Get all the state on the current thread, not the main thread.
35+
let state = FCPPlatformCameraState.make(
36+
withPreviewSize: FCPPlatformSize.make(
37+
withWidth: Double(previewSize.width),
38+
height: Double(previewSize.height)
39+
),
40+
exposureMode: exposureMode,
41+
focusMode: focusMode,
42+
exposurePointSupported: captureDevice.isExposurePointOfInterestSupported,
43+
focusPointSupported: captureDevice.isFocusPointOfInterestSupported
44+
)
45+
46+
FLTEnsureToRunOnMainQueue { [weak self] in
47+
self?.dartAPI?.initialized(with: state) { _ in
48+
// Ignore any errors, as this is just an event broadcast.
49+
}
50+
}
51+
}
52+
53+
func receivedImageStreamData() {
54+
streamingPendingFramesCount -= 1
55+
}
56+
57+
func start() {
58+
videoCaptureSession.startRunning()
59+
audioCaptureSession.startRunning()
60+
}
61+
62+
func stop() {
63+
videoCaptureSession.stopRunning()
64+
audioCaptureSession.stopRunning()
65+
}
66+
67+
func setExposureMode(_ mode: FCPPlatformExposureMode) {
68+
exposureMode = mode
69+
applyExposureMode()
70+
}
71+
72+
private func applyExposureMode() {
73+
try? captureDevice.lockForConfiguration()
74+
switch exposureMode {
75+
case .locked:
76+
// AVCaptureExposureMode.autoExpose automatically adjusts the exposure one time, and then locks exposure for the device
77+
captureDevice.setExposureMode(.autoExpose)
78+
case .auto:
79+
if captureDevice.isExposureModeSupported(.continuousAutoExposure) {
80+
captureDevice.setExposureMode(.continuousAutoExposure)
81+
} else {
82+
captureDevice.setExposureMode(.autoExpose)
83+
}
84+
@unknown default:
85+
assertionFailure("Unknown exposure mode")
86+
}
87+
captureDevice.unlockForConfiguration()
88+
}
89+
90+
func setExposureOffset(_ offset: Double) {
91+
try? captureDevice.lockForConfiguration()
92+
captureDevice.setExposureTargetBias(Float(offset), completionHandler: nil)
93+
captureDevice.unlockForConfiguration()
94+
}
95+
96+
func setExposurePoint(
97+
_ point: FCPPlatformPoint?, withCompletion completion: @escaping (FlutterError?) -> Void
98+
) {
99+
guard captureDevice.isExposurePointOfInterestSupported else {
100+
completion(
101+
FlutterError(
102+
code: "setExposurePointFailed",
103+
message: "Device does not have exposure point capabilities",
104+
details: nil))
105+
return
106+
}
107+
108+
let orientation = UIDevice.current.orientation
109+
try? captureDevice.lockForConfiguration()
110+
// A nil point resets to the center.
111+
let exposurePoint = cgPoint(
112+
for: point ?? FCPPlatformPoint.makeWith(x: 0.5, y: 0.5), withOrientation: orientation)
113+
captureDevice.setExposurePointOfInterest(exposurePoint)
114+
captureDevice.unlockForConfiguration()
115+
// Retrigger auto exposure
116+
applyExposureMode()
117+
completion(nil)
118+
}
119+
120+
func setFocusMode(_ mode: FCPPlatformFocusMode) {
121+
focusMode = mode
122+
applyFocusMode()
123+
}
124+
125+
func setFocusPoint(_ point: FCPPlatformPoint?, completion: @escaping (FlutterError?) -> Void) {
126+
guard captureDevice.isFocusPointOfInterestSupported else {
127+
completion(
128+
FlutterError(
129+
code: "setFocusPointFailed",
130+
message: "Device does not have focus point capabilities",
131+
details: nil))
132+
return
133+
}
134+
135+
let orientation = deviceOrientationProvider.orientation()
136+
try? captureDevice.lockForConfiguration()
137+
// A nil point resets to the center.
138+
captureDevice.setFocusPointOfInterest(
139+
cgPoint(
140+
for: point ?? .makeWith(x: 0.5, y: 0.5),
141+
withOrientation: orientation)
142+
)
143+
captureDevice.unlockForConfiguration()
144+
// Retrigger auto focus
145+
applyFocusMode()
146+
completion(nil)
147+
}
148+
149+
private func applyFocusMode() {
150+
applyFocusMode(focusMode, onDevice: captureDevice)
151+
}
152+
153+
private func applyFocusMode(
154+
_ focusMode: FCPPlatformFocusMode, onDevice captureDevice: FLTCaptureDevice
155+
) {
156+
try? captureDevice.lockForConfiguration()
157+
switch focusMode {
158+
case .locked:
159+
// AVCaptureFocusMode.autoFocus automatically adjusts the focus one time, and then locks focus
160+
if captureDevice.isFocusModeSupported(.autoFocus) {
161+
captureDevice.setFocusMode(.autoFocus)
162+
}
163+
case .auto:
164+
if captureDevice.isFocusModeSupported(.continuousAutoFocus) {
165+
captureDevice.setFocusMode(.continuousAutoFocus)
166+
} else if captureDevice.isFocusModeSupported(.autoFocus) {
167+
captureDevice.setFocusMode(.autoFocus)
168+
}
169+
@unknown default:
170+
assertionFailure("Unknown focus mode")
171+
}
172+
captureDevice.unlockForConfiguration()
173+
}
174+
175+
private func cgPoint(
176+
for point: FCPPlatformPoint, withOrientation orientation: UIDeviceOrientation
177+
)
178+
-> CGPoint
179+
{
180+
var x = point.x
181+
var y = point.y
182+
switch orientation {
183+
case .portrait: // 90 ccw
184+
y = 1 - point.x
185+
x = point.y
186+
case .portraitUpsideDown: // 90 cw
187+
x = 1 - point.y
188+
y = point.x
189+
case .landscapeRight: // 180
190+
x = 1 - point.x
191+
y = 1 - point.y
192+
case .landscapeLeft:
193+
// No rotation required
194+
break
195+
default:
196+
// No rotation required
197+
break
198+
}
199+
return CGPoint(x: x, y: y)
200+
}
201+
30202
func captureOutput(
31203
_ output: AVCaptureOutput,
32204
didOutput sampleBuffer: CMSampleBuffer,
@@ -105,10 +277,9 @@ final class DefaultCamera: FLTCam, Camera {
105277
"height": imageHeight,
106278
"format": videoFormat,
107279
"planes": planes,
108-
"lensAperture": captureDevice.lensAperture,
109-
"sensorExposureTime": NSNumber(
110-
value: captureDevice.exposureDuration().seconds * 1_000_000_000),
111-
"sensorSensitivity": NSNumber(value: captureDevice.iso()),
280+
"lensAperture": Double(captureDevice.lensAperture()),
281+
"sensorExposureTime": Int(captureDevice.exposureDuration().seconds * 1_000_000_000),
282+
"sensorSensitivity": Double(captureDevice.iso()),
112283
]
113284

114285
DispatchQueue.main.async {
@@ -241,6 +412,22 @@ final class DefaultCamera: FLTCam, Camera {
241412
}
242413
}
243414

415+
func close() {
416+
stop()
417+
for input in videoCaptureSession.inputs {
418+
videoCaptureSession.removeInput(FLTDefaultCaptureInput(input: input))
419+
}
420+
for output in videoCaptureSession.outputs {
421+
videoCaptureSession.removeOutput(output)
422+
}
423+
for input in audioCaptureSession.inputs {
424+
audioCaptureSession.removeInput(FLTDefaultCaptureInput(input: input))
425+
}
426+
for output in audioCaptureSession.outputs {
427+
audioCaptureSession.removeOutput(output)
428+
}
429+
}
430+
244431
func copyPixelBuffer() -> Unmanaged<CVPixelBuffer>? {
245432
var pixelBuffer: CVPixelBuffer?
246433
pixelBufferSynchronizationQueue.sync {

0 commit comments

Comments
 (0)