こんにちは、私はJared Davidsonのコースに従って、カスタムカメラビューを作成し、AVFoundationを使用して写真を保存しました。 https://www.youtube.com/watch?v=w0O3ZGUS3pk
ただし、画像ではなく動画を記録して保存したい。誰かが私をここで助けてくれますか?確かにシンプルですが、AppleのドキュメントはObjective-Cで書かれており、解読できません。
これは私のコードです。ありがとう。
import UIKit
import AVFoundation
class ViewController: UIViewController {
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
@IBOutlet var cameraView: UIView!
override func viewWillAppear(animated: Bool) {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
if device.position == AVCaptureDevicePosition.Front{
do{
let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
if captureSession.canAddInput(input){
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
captureSession.startRunning()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
cameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
previewLayer.bounds = cameraView.frame
}
}
}
catch{
print("Error")
}
}
}
}
@IBAction func TakePhoto(sender: AnyObject) {
if let videoConnection = sessionOutput.connectionWithMediaType(AVMediaTypeVideo){
sessionOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
buffer, error in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData)!, nil, nil, nil)
})
}
}
}
AVCaptureMovieFileOutput
を作成してキャプチャセッションに追加し、ViewController
をAVCaptureFileOutputRecordingDelegate
に準拠させることで、ビデオをファイルに保存して保存できます。
この例では、アプリのドキュメントディレクトリにある「output.mov」というファイルに5秒間のビデオを記録します。
class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var movieOutput = AVCaptureMovieFileOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
@IBOutlet var cameraView: UIView!
override func viewWillAppear(animated: Bool) {
self.cameraView = self.view
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
if device.position == AVCaptureDevicePosition.Front{
do{
let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
if captureSession.canAddInput(input){
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
cameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
previewLayer.bounds = cameraView.frame
}
captureSession.addOutput(movieOutput)
captureSession.startRunning()
let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
let fileUrl = paths[0].URLByAppendingPathComponent("output.mov")
try? NSFileManager.defaultManager().removeItemAtURL(fileUrl)
movieOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self)
let delayTime = dispatch_time(DISPATCH_TIME_NOW, Int64(5 * Double(NSEC_PER_SEC)))
dispatch_after(delayTime, dispatch_get_main_queue()) {
print("stopping")
self.movieOutput.stopRecording()
}
}
}
catch{
print("Error")
}
}
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
print("FINISHED \(error)")
// save video to camera roll
if error == nil {
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, nil, nil, nil)
}
}
}
これありがとう。とても助かりました。これは、必要なインポートステートメントとデリゲートメソッドを含むSwift 3に移植された Rhythmic Fistman の回答のバージョンです。
import UIKit
import AVFoundation
class ViewController: UIViewController,
AVCaptureFileOutputRecordingDelegate {
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var movieOutput = AVCaptureMovieFileOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
@IBOutlet var cameraView: UIView!
override func viewWillAppear(_ animated: Bool) {
self.cameraView = self.view
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices! {
if (device as AnyObject).position == AVCaptureDevicePosition.front{
do{
let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
if captureSession.canAddInput(input){
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
previewLayer.bounds = cameraView.frame
}
captureSession.addOutput(movieOutput)
captureSession.startRunning()
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let fileUrl = paths[0].appendingPathComponent("output.mov")
try? FileManager.default.removeItem(at: fileUrl)
movieOutput.startRecording(toOutputFileURL: fileUrl, recordingDelegate: self)
let delayTime = DispatchTime.now() + 5
DispatchQueue.main.asyncAfter(deadline: delayTime) {
print("stopping")
self.movieOutput.stopRecording()
}
}
}
catch{
print("Error")
}
}
}
}
//MARK: AVCaptureFileOutputRecordingDelegate Methods
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
print("FINISHED \(error)")
// save video to camera roll
if error == nil {
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, nil, nil)
}
}
}
このコードを使用してビデオをフォトライブラリに保存できます。次のパラメータを指定する必要があります。最も重要なのは、カメラロールアルバムに保存するムービーファイルへのファイルシステムパスであるOutputURL.pathです。残りのパラメーターにはそれぞれの値を渡すか、必要に応じてnilを割り当てることもできます
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if (error != nil) {
print("Error recording movie: \(error!.localizedDescription)")
} else {
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, #selector(CameraController.video(_:didFinishSavingWithError:contextInfo:)), nil)
}
outputURL = nil
}
録音問題の場合
CaptureSessionを作成するときにこのコードを追加します
askMicroPhonePermission(completion:{(isMicrophonePermissionGiven)in
if isMicrophonePermissionGiven {
do {
try self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureAudio))
} catch {
print("Error creating the database")
}
}
})
////////////////////////////////////////////////// //////////////
askMicroPhonePermission関数は以下の通りです
func askMicroPhonePermission(completion: @escaping (_ success: Bool)-> Void) {
switch AVAudioSession.sharedInstance().recordPermission() {
case AVAudioSessionRecordPermission.granted:
completion(true)
case AVAudioSessionRecordPermission.denied:
completion(false) //show alert if required
case AVAudioSessionRecordPermission.undetermined:
AVAudioSession.sharedInstance().requestRecordPermission({ (granted) in
if granted {
completion(true)
} else {
completion(false) // show alert if required
}
})
default:
completion(false)
}
}
そして、NSMicrophoneUsageDescriptionキー値をinfo.plistファイルに追加する必要があります。
後if (device as AnyObject).position == AVCaptureDevicePosition.front{
追加
// Audio Input
let audioInputDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
do
{
let audioInput = try AVCaptureDeviceInput(device: audioInputDevice)
// Add Audio Input
if captureSession.canAddInput(audioInput)
{
captureSession.addInput(audioInput)
}
else
{
NSLog("Can't Add Audio Input")
}
}
catch let error
{
NSLog("Error Getting Input Device: \(error)")
}
ありがとう