[iOSアプリ開発!] swift2xAVFoundationxPhotoKit で動画の撮影&保存してみた

Swift2 においてAVFoundationによる動画撮影&保存方法について
sampleコードを共有しておきます。

<br />
import UIKit<br />
import AVKit<br />
import AVFoundation<br />
import Photos</p>
<p>class ViewController: UIViewController,AVCaptureFileOutputRecordingDelegate{</p>
<p>private var isRecording = false<br />
private var movieFileOutput:AVCaptureMovieFileOutput? = nil</p>
<p>var input:AVCaptureDeviceInput!<br />
var output:AVCaptureMovieFileOutput!</p>
<p>private var myButtonStart : UIButton!</p>
<p>private var myButtonStop : UIButton!</p>
<p>var session:AVCaptureSession!<br />
var preView:UIView!<br />
var camera:AVCaptureDevice!</p>
<p>override func viewDidLoad() {<br />
super.viewDidLoad()</p>
<p>myButtonStart = UIButton(frame: CGRectMake(0,0,120,50))<br />
myButtonStop = UIButton(frame: CGRectMake(0,0,120,50))</p>
<p>myButtonStart.backgroundColor = UIColor.redColor();<br />
myButtonStop.backgroundColor = UIColor.grayColor();</p>
<p>myButtonStart.layer.masksToBounds = true<br />
myButtonStop.layer.masksToBounds = true</p>
<p>myButtonStart.setTitle(&quot;撮影&quot;, forState: .Normal)<br />
myButtonStop.setTitle(&quot;停止&quot;, forState: .Normal)</p>
<p>myButtonStart.layer.cornerRadius = 20.0<br />
myButtonStop.layer.cornerRadius = 20.0</p>
<p>myButtonStart.layer.position = CGPoint(x: self.view.bounds.width/2 - 70, y:self.view.bounds.height-50)<br />
myButtonStop.layer.position = CGPoint(x: self.view.bounds.width/2 + 70, y:self.view.bounds.height-50)</p>
<p>myButtonStart.addTarget(self, action: &quot;onClickMyButton:&quot;, forControlEvents: .TouchUpInside)<br />
myButtonStop.addTarget(self, action: &quot;onClickMyButton:&quot;, forControlEvents: .TouchUpInside)</p>
<p>self.view.addSubview(myButtonStart);<br />
self.view.addSubview(myButtonStop);<br />
}</p>
<p>override func viewWillAppear(animated: Bool) {<br />
// スクリーン設定<br />
setupDisplay()<br />
// カメラの設定<br />
setupCamera()<br />
}<br />
override func viewDidDisappear(animated: Bool) {<br />
session.stopRunning()</p>
<p>for output in session.outputs{<br />
session.removeOutput(output as? AVCaptureOutput)<br />
}</p>
<p>for input in session.inputs{<br />
session.removeInput(input as? AVCaptureInput)<br />
}<br />
session = nil<br />
camera = nil<br />
}</p>
<p>func setupDisplay(){<br />
let screenWidth = UIScreen.mainScreen().bounds.size.width;<br />
let screenHeight = UIScreen.mainScreen().bounds.size.height;</p>
<p>preView = UIView(frame: CGRectMake(0.0,0.0,screenWidth,screenHeight*2/3))<br />
}</p>
<p>func setupCamera(){<br />
session = AVCaptureSession()<br />
for captureDevice:AnyObject in AVCaptureDevice.devices(){<br />
if captureDevice.position == AVCaptureDevicePosition.Back{<br />
camera = captureDevice as? AVCaptureDevice<br />
}<br />
}</p>
<p>do{<br />
input = try AVCaptureDeviceInput(device: camera) as AVCaptureDeviceInput<br />
}catch let error as NSError{<br />
print(error)<br />
}</p>
<p>if(session.canAddInput(input)){<br />
session.addInput(input)<br />
}</p>
<p>output = AVCaptureMovieFileOutput()</p>
<p>if(session.canAddOutput(output)){<br />
session.addOutput(output)<br />
}</p>
<p>let previewLayer = AVCaptureVideoPreviewLayer(session: session)<br />
previewLayer.frame = preView.frame<br />
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill</p>
<p>self.view.layer.addSublayer(previewLayer)</p>
<p>session.startRunning()</p>
<p>}</p>
<p>internal func onClickMyButton(sender:UIButton){<br />
if(sender == myButtonStart){<br />
self.startRecordingVideo()<br />
}else{<br />
self.stopRecordingVideo()<br />
}<br />
}</p>
<p>func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {<br />
self.saveFile(outputFileURL)<br />
}<br />
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {<br />
}</p>
<p>override func didReceiveMemoryWarning() {<br />
super.didReceiveMemoryWarning()<br />
}</p>
<p>func startRecordingVideo() {<br />
if self.isRecording == false {<br />
self.isRecording = true<br />
output.startRecordingToOutputFileURL(createTempFileURL(), recordingDelegate: self)<br />
print(&quot;STARTED RECORDING VIDEO.&quot;)<br />
}<br />
}</p>
<p>func stopRecordingVideo(){<br />
if self.isRecording == true {<br />
self.isRecording = false<br />
output.stopRecording()<br />
print(&quot;STOPPED RECORDING VIDEO.&quot;)<br />
}<br />
}</p>
<p>func createTempFileURL() -&gt; NSURL {<br />
var path:String = &quot;&quot;<br />
let fm = NSFileManager.defaultManager()<br />
var i = 0<br />
while(path == &quot;&quot; || fm.fileExistsAtPath(path)) {<br />
path = &quot;\(NSTemporaryDirectory())output-\(i).mov&quot;<br />
i++<br />
}<br />
return NSURL(fileURLWithPath: path)<br />
}</p>
<p>func saveFile(fileURL: NSURL) {<br />
let library = PHPhotoLibrary.sharedPhotoLibrary()<br />
library.performChanges({<br />
PHAssetChangeRequest.creationRequestForAssetFromVideoAtFileURL(fileURL)<br />
}, completionHandler: {(result: Bool, error: NSError?) -&gt; () in<br />
print(&quot;VIDEO FILE SAVED.&quot;)<br />
})<br />
}</p>
<p>}<br />

注意点としては、ios9からALAssetsLibraryの使用は非推奨となったので、PHPhotoLibaryを使って保存するようにします。

参考にしたサイト
1: http://codegur.com/34427360/ios-9-swift-2-how-do-i-save-video-in-the-landscape-orientation-to-the-video-r

2: https://sites.google.com/a/gclue.jp/swift-docs/