이 글은 누구를 위한 것인가
- UIImagePickerController 이상의 커스텀 카메라 UI가 필요한 팀
- 실시간 필터나 AR 효과를 카메라에 추가하고 싶은 개발자
- 동영상 녹화와 편집 기능을 구현하려는 iOS 개발자
들어가며
UIImagePickerController는 편하지만 커스터마이징이 제한적이다. AVFoundation으로 직접 카메라를 구현하면 실시간 필터, AR 오버레이, 커스텀 UI를 자유롭게 만들 수 있다.
이 글은 bluefoxdev.kr의 iOS 미디어 처리 가이드 를 참고하여 작성했습니다.
1. AVFoundation 아키텍처
[AVCaptureSession 구성]
Input:
AVCaptureDeviceInput (카메라)
AVCaptureAudioDeviceInput (마이크)
Output:
AVCapturePhotoOutput → 사진 촬영
AVCaptureMovieFileOutput → 동영상 파일
AVCaptureVideoDataOutput → 실시간 프레임
AVCaptureMetadataOutput → QR/바코드
Preview:
AVCaptureVideoPreviewLayer → 뷰에 연결
[카메라 종류]
.builtInWideAngleCamera: 기본 카메라
.builtInTelephotoCamera: 망원
.builtInUltraWideCamera: 광각
.builtInDualCamera: 듀얼 시스템
.builtInTrueDepthCamera: TrueDepth (Face ID 카메라)
[실시간 필터 파이프라인]
AVCaptureVideoDataOutput
→ SampleBuffer (CMSampleBuffer)
→ CIImage 변환
→ CIFilter 적용
→ Metal/OpenGL 렌더링
[권한]
NSCameraUsageDescription (Info.plist)
NSMicrophoneUsageDescription (동영상)
NSPhotoLibraryAddUsageDescription (저장)
2. 커스텀 카메라 구현
import AVFoundation
import SwiftUI
import Photos
class CameraManager: NSObject, ObservableObject {
@Published var previewLayer: AVCaptureVideoPreviewLayer?
@Published var isRecording = false
@Published var currentZoomFactor: CGFloat = 1.0
private let session = AVCaptureSession()
private let photoOutput = AVCapturePhotoOutput()
private let videoOutput = AVCaptureMovieFileOutput()
private var currentDevice: AVCaptureDevice?
private var currentInput: AVCaptureDeviceInput?
private let sessionQueue = DispatchQueue(label: "camera.session")
// 카메라 설정
func configure() async {
await requestPermissions()
sessionQueue.async { [weak self] in
guard let self = self else { return }
self.session.beginConfiguration()
self.session.sessionPreset = .photo
// 후면 카메라 추가
if let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back),
let input = try? AVCaptureDeviceInput(device: device) {
if self.session.canAddInput(input) {
self.session.addInput(input)
self.currentDevice = device
self.currentInput = input
}
}
// Photo Output
if self.session.canAddOutput(self.photoOutput) {
self.session.addOutput(self.photoOutput)
self.photoOutput.maxPhotoQualityPrioritization = .quality
}
// Video Output
if self.session.canAddOutput(self.videoOutput) {
self.session.addOutput(self.videoOutput)
}
self.session.commitConfiguration()
// Preview Layer 생성
DispatchQueue.main.async {
let preview = AVCaptureVideoPreviewLayer(session: self.session)
preview.videoGravity = .resizeAspectFill
self.previewLayer = preview
}
self.session.startRunning()
}
}
// 사진 촬영
func capturePhoto(completion: @escaping (UIImage?) -> Void) {
let settings = AVCapturePhotoSettings()
settings.flashMode = .auto
photoOutput.capturePhoto(with: settings, delegate: PhotoCaptureDelegate { photo in
guard let data = photo.fileDataRepresentation(),
let image = UIImage(data: data) else {
completion(nil)
return
}
completion(image)
})
}
// 동영상 녹화 시작/종료
func toggleRecording(outputURL: URL) {
if isRecording {
videoOutput.stopRecording()
} else {
videoOutput.startRecording(to: outputURL, recordingDelegate: self)
isRecording = true
}
}
// 전/후면 전환
func switchCamera() {
sessionQueue.async { [weak self] in
guard let self = self,
let currentInput = self.currentInput else { return }
let newPosition: AVCaptureDevice.Position = currentInput.device.position == .back ? .front : .back
guard let newDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: newPosition),
let newInput = try? AVCaptureDeviceInput(device: newDevice) else { return }
self.session.beginConfiguration()
self.session.removeInput(currentInput)
if self.session.canAddInput(newInput) {
self.session.addInput(newInput)
self.currentDevice = newDevice
self.currentInput = newInput
}
self.session.commitConfiguration()
}
}
// 줌 제어
func setZoom(_ factor: CGFloat) {
guard let device = currentDevice else { return }
try? device.lockForConfiguration()
let clampedFactor = max(1.0, min(factor, device.activeFormat.videoMaxZoomFactor))
device.videoZoomFactor = clampedFactor
device.unlockForConfiguration()
DispatchQueue.main.async {
self.currentZoomFactor = clampedFactor
}
}
// Photo Library에 저장
func saveToLibrary(_ image: UIImage) async throws {
try await PHPhotoLibrary.shared().performChanges {
PHAssetChangeRequest.creationRequestForAsset(from: image)
}
}
private func requestPermissions() async {
await AVCaptureDevice.requestAccess(for: .video)
}
}
extension CameraManager: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
DispatchQueue.main.async {
self.isRecording = false
}
if error == nil {
// 동영상을 Photo Library에 저장
PHPhotoLibrary.shared().performChanges {
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputFileURL)
} completionHandler: { _, _ in }
}
}
}
// Photo Capture Delegate
class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
let completion: (AVCapturePhoto) -> Void
init(completion: @escaping (AVCapturePhoto) -> Void) {
self.completion = completion
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if error == nil { completion(photo) }
}
}
// SwiftUI 카메라 뷰
struct CameraView: View {
@StateObject private var cameraManager = CameraManager()
@State private var capturedImage: UIImage?
@GestureState private var pinchScale: CGFloat = 1.0
var body: some View {
ZStack {
// 카메라 프리뷰
CameraPreviewView(previewLayer: cameraManager.previewLayer)
.ignoresSafeArea()
.gesture(
MagnificationGesture()
.updating($pinchScale) { value, state, _ in state = value }
.onChanged { value in
cameraManager.setZoom(cameraManager.currentZoomFactor * value)
}
)
// 컨트롤 UI
VStack {
Spacer()
HStack(spacing: 40) {
Button { cameraManager.switchCamera() } label: {
Image(systemName: "arrow.triangle.2.circlepath.camera")
.font(.title).foregroundColor(.white)
}
Button {
cameraManager.capturePhoto { image in
capturedImage = image
}
} label: {
Circle().fill(.white).frame(width: 70, height: 70)
}
Button {
let url = FileManager.default.temporaryDirectory
.appendingPathComponent(UUID().uuidString + ".mov")
cameraManager.toggleRecording(outputURL: url)
} label: {
Circle()
.fill(cameraManager.isRecording ? .red : .clear)
.overlay(Circle().stroke(.white, lineWidth: 3))
.frame(width: 50, height: 50)
}
}
.padding(.bottom, 40)
}
}
.task { await cameraManager.configure() }
}
}
struct CameraPreviewView: UIViewRepresentable {
let previewLayer: AVCaptureVideoPreviewLayer?
func makeUIView(context: Context) -> UIView {
let view = UIView()
view.backgroundColor = .black
return view
}
func updateUIView(_ uiView: UIView, context: Context) {
guard let layer = previewLayer else { return }
layer.frame = uiView.bounds
if layer.superlayer == nil {
uiView.layer.addSublayer(layer)
}
}
}
마무리
AVFoundation 카메라의 핵심은 AVCaptureSession 설정과 스레드 관리다. 세션 설정은 반드시 백그라운드 큐에서, UI 업데이트는 메인 큐에서 해야 한다. 실시간 필터는 AVCaptureVideoDataOutput에서 CMSampleBuffer를 받아 CIFilter를 적용하고 Metal로 렌더링하는 파이프라인으로 구현한다.