The method captureOutput captures both video and audio frames inside my class CameraViewModel (not a good name). Inside the method there are a lot of references to isolated properties declared in CameraViewModel and that generates a lot of warnings since is captureOutput nonisolated. For example self.videoWriterInput: AVAssetWriterInput and self.audioWriterInput: AVAssetWriterInput gives
Main actor-isolated property 'videoWriterInput' can not be referenced from a nonisolated context
.
My question: I could move all of the code inside captureoutput to be executed on the MainActor, but is this really the most optimal way of doing it? Since it's called from another thread it feels like it would be more efficient to keep it on that thread.
class CameraViewModel: UIViewController, AVCaptureDepthDataOutputDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
// ...
nonisolated func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard CMSampleBufferDataIsReady(sampleBuffer), self.canWrite() else { return }
// Extract the presentation timestamp (PTS) from the sample buffer
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
//sessionAtSourceTime is the first buffer we will write to the file
if self.sessionAtSourceTime == nil {
//Make sure we start by capturing the videoDataOutput (if we start with the audio the file gets corrupted)
guard output == self.videoDataOutput else { return }
//Make sure we don't start recording until the buffer reaches the correct time (buffer is always behind, this will fix the difference in time)
guard sampleBuffer.presentationTimeStamp >= self.recordFromTime! else { return }
self.sessionAtSourceTime = sampleBuffer.presentationTimeStamp
self.videoWriter!.startSession(atSourceTime: sampleBuffer.presentationTimeStamp)
self.audioWriter!.startSession(atSourceTime: sampleBuffer.presentationTimeStamp)
}
if output == self.videoDataOutput {
if self.videoWriterInput!.isReadyForMoreMediaData {
self.videoWriterInput!.append(sampleBuffer)
self.videoTimestamps.append(
Timestamp(
frame: videoTimestamps.count,
value: timestamp.value,
timescale: timestamp.timescale
)
)
}
} else if output == self.audioDataOutput {
if self.audioWriterInput!.isReadyForMoreMediaData {
self.audioWriterInput!.append(sampleBuffer)
self.audioTimestamps.append(
Timestamp(
frame: audioTimestamps.count,
value: timestamp.value,
timescale: timestamp.timescale
)
)
}
}
}
// ...
}