Commit 17b67d99 by Daniel Dahan

Capture progression commit for Swift 2.3

parent d5096568
...@@ -43,14 +43,14 @@ public class CapturePreview : MaterialView { ...@@ -43,14 +43,14 @@ public class CapturePreview : MaterialView {
:name: captureDevicePointOfInterestForPoint :name: captureDevicePointOfInterestForPoint
*/ */
public func captureDevicePointOfInterestForPoint(point: CGPoint) -> CGPoint { public func captureDevicePointOfInterestForPoint(point: CGPoint) -> CGPoint {
return (layer as! AVCaptureVideoPreviewLayer).captureDevicePointOfInterestForPoint(point) return (layer as! AVCaptureVideoPreviewLayer).captureDevicePointOfInterest(for: point)
} }
/** /**
:name: pointForCaptureDevicePointOfInterest :name: pointForCaptureDevicePointOfInterest
*/ */
public func pointForCaptureDevicePointOfInterest(point: CGPoint) -> CGPoint { public func pointForCaptureDevicePointOfInterest(point: CGPoint) -> CGPoint {
return (layer as! AVCaptureVideoPreviewLayer).pointForCaptureDevicePointOfInterest(point) return (layer as! AVCaptureVideoPreviewLayer).pointForCaptureDevicePoint(ofInterest: point)
} }
/** /**
...@@ -65,7 +65,7 @@ public class CapturePreview : MaterialView { ...@@ -65,7 +65,7 @@ public class CapturePreview : MaterialView {
:name: preparePreviewLayer :name: preparePreviewLayer
*/ */
private func preparePreviewLayer() { private func preparePreviewLayer() {
layer.backgroundColor = Color.black.CGColor layer.backgroundColor = Color.black.cgColor
layer.masksToBounds = true layer.masksToBounds = true
(layer as! AVCaptureVideoPreviewLayer).videoGravity = AVLayerVideoGravityResizeAspectFill (layer as! AVCaptureVideoPreviewLayer).videoGravity = AVLayerVideoGravityResizeAspectFill
} }
......
...@@ -87,57 +87,76 @@ public func CaptureSessionPresetToString(preset: CaptureSessionPreset) -> String ...@@ -87,57 +87,76 @@ public func CaptureSessionPresetToString(preset: CaptureSessionPreset) -> String
@objc(CaptureSessionDelegate) @objc(CaptureSessionDelegate)
public protocol CaptureSessionDelegate { public protocol CaptureSessionDelegate {
/** /**
:name: captureSessionFailedWithError A delegation method that is fired when the captureSesstion failes with an error.
- Parameter captureSession: A reference to the calling CaptureSession.
- Parameter error: A NSError corresponding to the error.
*/ */
optional func captureSessionFailedWithError(capture: CaptureSession, error: NSError) @objc
optional func captureSessionFailedWithError(captureSession: CaptureSession, error: NSError)
/** /**
:name: captureSessionDidSwitchCameras A delegation method that is fired when the camera has been switched to another.
- Parameter captureSession: A reference to the calling CaptureSession.
- Parameter position: An AVCaptureDevicePosition that the camera has switched to.
*/ */
optional func captureSessionDidSwitchCameras(capture: CaptureSession, position: AVCaptureDevicePosition) @objc
optional func captureSessionDidSwitchCameras(captureSession: CaptureSession, position: AVCaptureDevicePosition)
/** /**
:name: captureSessionWillSwitchCameras A delegation method that is fired before the camera has been switched to another.
- Parameter captureSession: A reference to the calling CaptureSession.
- Parameter position: An AVCaptureDevicePosition that the camera will switch to.
*/ */
optional func captureSessionWillSwitchCameras(capture: CaptureSession, position: AVCaptureDevicePosition) @objc
optional func captureSessionWillSwitchCameras(captureSession: CaptureSession, position: AVCaptureDevicePosition)
/** /**
:name: captureStillImageAsynchronously A delegation method that is fired when an image has been captured asynchronously.
- Parameter captureSession: A reference to the calling CaptureSession.
- Parameter image: An image that has been captured.
*/ */
optional func captureStillImageAsynchronously(capture: CaptureSession, image: UIImage) @objc
optional func captureStillImageAsynchronously(captureSession: CaptureSession, image: UIImage)
/** /**
:name: captureStillImageAsynchronouslyFailedWithError A delegation method that is fired when capturing an image asynchronously has failed.
- Parameter captureSession: A reference to the calling CaptureSession.
- Parameter error: A NSError corresponding to the error.
*/ */
optional func captureStillImageAsynchronouslyFailedWithError(capture: CaptureSession, error: NSError) @objc
optional func captureStillImageAsynchronouslyFailedWithError(captureSession: CaptureSession, error: NSError)
/** /**
:name: captureCreateMovieFileFailedWithError :name: captureCreateMovieFileFailedWithError
*/ */
optional func captureCreateMovieFileFailedWithError(capture: CaptureSession, error: NSError) @objc
optional func captureCreateMovieFileFailedWithError(captureSession: CaptureSession, error: NSError)
/** /**
:name: captureMovieFailedWithError :name: captureMovieFailedWithError
*/ */
optional func captureMovieFailedWithError(capture: CaptureSession, error: NSError) @objc
optional func captureMovieFailedWithError(captureSession: CaptureSession, error: NSError)
/** /**
:name: captureDidStartRecordingToOutputFileAtURL :name: captureDidStartRecordingToOutputFileAtURL
*/ */
optional func captureDidStartRecordingToOutputFileAtURL(capture: CaptureSession, captureOutput: AVCaptureFileOutput, fileURL: NSURL, fromConnections connections: [AnyObject]) @objc
optional func captureDidStartRecordingToOutputFileAtURL(captureSession: CaptureSession, captureOutput: AVCaptureFileOutput, fileURL: NSURL, fromConnections connections: [AnyObject])
/** /**
:name: captureDidFinishRecordingToOutputFileAtURL :name: captureDidFinishRecordingToOutputFileAtURL
*/ */
optional func captureDidFinishRecordingToOutputFileAtURL(capture: CaptureSession, captureOutput: AVCaptureFileOutput, outputFileURL: NSURL, fromConnections connections: [AnyObject], error: NSError!) @objc
optional func captureDidFinishRecordingToOutputFileAtURL(captureSession: CaptureSession, captureOutput: AVCaptureFileOutput, outputFileURL: NSURL, fromConnections connections: [AnyObject], error: NSError!)
} }
@objc(CaptureSession) @objc(CaptureSession)
public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { public class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
/** /**
:name: sessionQueue :name: sessionQueue
*/ */
private lazy var sessionQueue: dispatch_queue_t = dispatch_queue_create("io.material.CaptureSession", DISPATCH_QUEUE_SERIAL) private var sessionQueue: DispatchQueue!
/** /**
:name: activeVideoInput :name: activeVideoInput
...@@ -199,10 +218,10 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -199,10 +218,10 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
public var inactiveCamera: AVCaptureDevice? { public var inactiveCamera: AVCaptureDevice? {
var device: AVCaptureDevice? var device: AVCaptureDevice?
if 1 < cameraCount { if 1 < cameraCount {
if activeCamera?.position == .Back { if activeCamera?.position == .back {
device = cameraWithPosition(.Front) device = cameraWithPosition(position: .front)
} else { } else {
device = cameraWithPosition(.Back) device = cameraWithPosition(position: .back)
} }
} }
return device return device
...@@ -212,7 +231,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -212,7 +231,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
:name: cameraCount :name: cameraCount
*/ */
public var cameraCount: Int { public var cameraCount: Int {
return AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo).count return AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo).count
} }
/** /**
...@@ -226,14 +245,14 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -226,14 +245,14 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
:name: caneraSupportsTapToFocus :name: caneraSupportsTapToFocus
*/ */
public var cameraSupportsTapToFocus: Bool { public var cameraSupportsTapToFocus: Bool {
return nil == activeCamera ? false : activeCamera!.focusPointOfInterestSupported return nil == activeCamera ? false : activeCamera!.isFocusPointOfInterestSupported
} }
/** /**
:name: cameraSupportsTapToExpose :name: cameraSupportsTapToExpose
*/ */
public var cameraSupportsTapToExpose: Bool { public var cameraSupportsTapToExpose: Bool {
return nil == activeCamera ? false : activeCamera!.exposurePointOfInterestSupported return nil == activeCamera ? false : activeCamera!.isExposurePointOfInterestSupported
} }
/** /**
...@@ -266,7 +285,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -266,7 +285,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
} }
set(value) { set(value) {
var error: NSError? var error: NSError?
if isFocusModeSupported(focusMode) { if isFocusModeSupported(focusMode: focusMode) {
do { do {
let device: AVCaptureDevice = activeCamera! let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration() try device.lockForConfiguration()
...@@ -283,7 +302,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -283,7 +302,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
userInfo[NSUnderlyingErrorKey] = error userInfo[NSUnderlyingErrorKey] = error
} }
if let e: NSError = error { if let e: NSError = error {
delegate?.captureSessionFailedWithError?(self, error: e) delegate?.captureSessionFailedWithError?(capture: self, error: e)
} }
} }
} }
...@@ -297,7 +316,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -297,7 +316,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
} }
set(value) { set(value) {
var error: NSError? var error: NSError?
if isFlashModeSupported(flashMode) { if isFlashModeSupported(flashMode: flashMode) {
do { do {
let device: AVCaptureDevice = activeCamera! let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration() try device.lockForConfiguration()
...@@ -314,7 +333,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -314,7 +333,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
userInfo[NSUnderlyingErrorKey] = error userInfo[NSUnderlyingErrorKey] = error
} }
if let e: NSError = error { if let e: NSError = error {
delegate?.captureSessionFailedWithError?(self, error: e) delegate?.captureSessionFailedWithError?(capture: self, error: e)
} }
} }
} }
...@@ -328,7 +347,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -328,7 +347,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
} }
set(value) { set(value) {
var error: NSError? var error: NSError?
if isTorchModeSupported(torchMode) { if isTorchModeSupported(torchMode: torchMode) {
do { do {
let device: AVCaptureDevice = activeCamera! let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration() try device.lockForConfiguration()
...@@ -345,7 +364,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -345,7 +364,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
userInfo[NSUnderlyingErrorKey] = error userInfo[NSUnderlyingErrorKey] = error
} }
if let e: NSError = error { if let e: NSError = error {
delegate?.captureSessionFailedWithError?(self, error: e) delegate?.captureSessionFailedWithError?(capture: self, error: e)
} }
} }
} }
...@@ -353,22 +372,22 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -353,22 +372,22 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
/// The session quality preset. /// The session quality preset.
public var sessionPreset: CaptureSessionPreset { public var sessionPreset: CaptureSessionPreset {
didSet { didSet {
session.sessionPreset = CaptureSessionPresetToString(sessionPreset) session.sessionPreset = CaptureSessionPresetToString(preset: sessionPreset)
} }
} }
/// The capture video orientation. /// The capture video orientation.
public var videoOrientation: AVCaptureVideoOrientation { public var videoOrientation: AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation var orientation: AVCaptureVideoOrientation
switch UIDevice.currentDevice().orientation { switch UIDevice.current().orientation {
case .Portrait: case .portrait:
orientation = .Portrait orientation = .portrait
case .LandscapeRight: case .landscapeRight:
orientation = .LandscapeLeft orientation = .landscapeLeft
case .PortraitUpsideDown: case .portraitUpsideDown:
orientation = .PortraitUpsideDown orientation = .portraitUpsideDown
default: default:
orientation = .LandscapeRight orientation = .landscapeRight
} }
return orientation return orientation
} }
...@@ -386,7 +405,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -386,7 +405,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
/// Starts the session. /// Starts the session.
public func startSession() { public func startSession() {
if !isRunning { if !isRunning {
dispatch_async(sessionQueue) { [weak self] in sessionQueue.async() { [weak self] in
self?.session.startRunning() self?.session.startRunning()
} }
} }
...@@ -395,7 +414,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -395,7 +414,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
/// Stops the session. /// Stops the session.
public func stopSession() { public func stopSession() {
if isRunning { if isRunning {
dispatch_async(sessionQueue) { [weak self] in sessionQueue.async() { [weak self] in
self?.session.stopRunning() self?.session.stopRunning()
} }
} }
...@@ -406,7 +425,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -406,7 +425,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
if canSwitchCameras { if canSwitchCameras {
do { do {
if let v: AVCaptureDevicePosition = cameraPosition { if let v: AVCaptureDevicePosition = cameraPosition {
delegate?.captureSessionWillSwitchCameras?(self, position: v) delegate?.captureSessionWillSwitchCameras?(capture: self, position: v)
let videoInput: AVCaptureDeviceInput? = try AVCaptureDeviceInput(device: inactiveCamera!) let videoInput: AVCaptureDeviceInput? = try AVCaptureDeviceInput(device: inactiveCamera!)
session.beginConfiguration() session.beginConfiguration()
session.removeInput(activeVideoInput) session.removeInput(activeVideoInput)
...@@ -418,10 +437,10 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -418,10 +437,10 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
session.addInput(activeVideoInput) session.addInput(activeVideoInput)
} }
session.commitConfiguration() session.commitConfiguration()
delegate?.captureSessionDidSwitchCameras?(self, position: cameraPosition!) delegate?.captureSessionDidSwitchCameras?(capture: self, position: cameraPosition!)
} }
} catch let e as NSError { } catch let e as NSError {
delegate?.captureSessionFailedWithError?(self, error: e) delegate?.captureSessionFailedWithError?(capture: self, error: e)
} }
} }
} }
...@@ -459,12 +478,12 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -459,12 +478,12 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/ */
public func focusAtPoint(point: CGPoint) { public func focusAtPoint(point: CGPoint) {
var error: NSError? var error: NSError?
if cameraSupportsTapToFocus && isFocusModeSupported(.AutoFocus) { if cameraSupportsTapToFocus && isFocusModeSupported(focusMode: .autoFocus) {
do { do {
let device: AVCaptureDevice = activeCamera! let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration() try device.lockForConfiguration()
device.focusPointOfInterest = point device.focusPointOfInterest = point
device.focusMode = .AutoFocus device.focusMode = .autoFocus
device.unlockForConfiguration() device.unlockForConfiguration()
} catch let e as NSError { } catch let e as NSError {
error = e error = e
...@@ -477,7 +496,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -477,7 +496,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
userInfo[NSUnderlyingErrorKey] = error userInfo[NSUnderlyingErrorKey] = error
} }
if let e: NSError = error { if let e: NSError = error {
delegate?.captureSessionFailedWithError?(self, error: e) delegate?.captureSessionFailedWithError?(capture: self, error: e)
} }
} }
...@@ -486,14 +505,14 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -486,14 +505,14 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/ */
public func exposeAtPoint(point: CGPoint) { public func exposeAtPoint(point: CGPoint) {
var error: NSError? var error: NSError?
if cameraSupportsTapToExpose && isExposureModeSupported(.ContinuousAutoExposure) { if cameraSupportsTapToExpose && isExposureModeSupported(exposureMode: .continuousAutoExposure) {
do { do {
let device: AVCaptureDevice = activeCamera! let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration() try device.lockForConfiguration()
device.exposurePointOfInterest = point device.exposurePointOfInterest = point
device.exposureMode = .ContinuousAutoExposure device.exposureMode = .continuousAutoExposure
if device.isExposureModeSupported(.Locked) { if device.isExposureModeSupported(.locked) {
device.addObserver(self, forKeyPath: "adjustingExposure", options: .New, context: &CaptureSessionAdjustingExposureContext) device.addObserver(self, forKeyPath: "adjustingExposure", options: .new, context: &CaptureSessionAdjustingExposureContext)
} }
device.unlockForConfiguration() device.unlockForConfiguration()
} catch let e as NSError { } catch let e as NSError {
...@@ -507,30 +526,30 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -507,30 +526,30 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
userInfo[NSUnderlyingErrorKey] = error userInfo[NSUnderlyingErrorKey] = error
} }
if let e: NSError = error { if let e: NSError = error {
delegate?.captureSessionFailedWithError?(self, error: e) delegate?.captureSessionFailedWithError?(capture: self, error: e)
} }
} }
/** /**
:name: observeValueForKeyPath :name: observeValueForKeyPath
*/ */
public override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) { public override func observeValue(forKeyPath keyPath: String?, of object: AnyObject?, change: [NSKeyValueChangeKey: AnyObject]?, context: UnsafeMutablePointer<Void>?) {
if context == &CaptureSessionAdjustingExposureContext { if context == &CaptureSessionAdjustingExposureContext {
let device: AVCaptureDevice = object as! AVCaptureDevice let device: AVCaptureDevice = object as! AVCaptureDevice
if !device.adjustingExposure && device.isExposureModeSupported(.Locked) { if !device.isAdjustingExposure && device.isExposureModeSupported(.locked) {
object!.removeObserver(self, forKeyPath: "adjustingExposure", context: &CaptureSessionAdjustingExposureContext) object!.removeObserver(self, forKeyPath: "adjustingExposure", context: &CaptureSessionAdjustingExposureContext)
dispatch_async(dispatch_get_main_queue()) { DispatchQueue.main.async() {
do { do {
try device.lockForConfiguration() try device.lockForConfiguration()
device.exposureMode = .Locked device.exposureMode = .locked
device.unlockForConfiguration() device.unlockForConfiguration()
} catch let e as NSError { } catch let e as NSError {
self.delegate?.captureSessionFailedWithError?(self, error: e) self.delegate?.captureSessionFailedWithError?(capture: self, error: e)
} }
} }
} }
} else { } else {
super.observeValueForKeyPath(keyPath, ofObject: object, change: change, context: context) super.observeValue(forKeyPath: keyPath, of : object, change: change, context: context)
} }
} }
...@@ -539,22 +558,22 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -539,22 +558,22 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/ */
public func resetFocusAndExposureModes() { public func resetFocusAndExposureModes() {
let device: AVCaptureDevice = activeCamera! let device: AVCaptureDevice = activeCamera!
let canResetFocus: Bool = device.focusPointOfInterestSupported && device.isFocusModeSupported(.ContinuousAutoFocus) let canResetFocus: Bool = device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.continuousAutoFocus)
let canResetExposure: Bool = device.exposurePointOfInterestSupported && device.isExposureModeSupported(.ContinuousAutoExposure) let canResetExposure: Bool = device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.continuousAutoExposure)
let centerPoint: CGPoint = CGPointMake(0.5, 0.5) let centerPoint: CGPoint = CGPoint(x: 0.5, y: 0.5)
do { do {
try device.lockForConfiguration() try device.lockForConfiguration()
if canResetFocus { if canResetFocus {
device.focusMode = .ContinuousAutoFocus device.focusMode = .continuousAutoFocus
device.focusPointOfInterest = centerPoint device.focusPointOfInterest = centerPoint
} }
if canResetExposure { if canResetExposure {
device.exposureMode = .ContinuousAutoExposure device.exposureMode = .continuousAutoExposure
device.exposurePointOfInterest = centerPoint device.exposurePointOfInterest = centerPoint
} }
device.unlockForConfiguration() device.unlockForConfiguration()
} catch let e as NSError { } catch let e as NSError {
delegate?.captureSessionFailedWithError?(self, error: e) delegate?.captureSessionFailedWithError?(capture: self, error: e)
} }
} }
...@@ -562,18 +581,18 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -562,18 +581,18 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
:name: captureStillImage :name: captureStillImage
*/ */
public func captureStillImage() { public func captureStillImage() {
dispatch_async(sessionQueue) { [weak self] in sessionQueue.async() { [weak self] in
if let s: CaptureSession = self { if let s: CaptureSession = self {
if let v: AVCaptureConnection = s.imageOutput.connectionWithMediaType(AVMediaTypeVideo) { if let v: AVCaptureConnection = s.imageOutput.connection(withMediaType: AVMediaTypeVideo) {
v.videoOrientation = s.videoOrientation v.videoOrientation = s.videoOrientation
s.imageOutput.captureStillImageAsynchronouslyFromConnection(v) { [weak self] (sampleBuffer: CMSampleBuffer!, error: NSError!) -> Void in s.imageOutput.captureStillImageAsynchronously(from: v) { [weak self] (sampleBuffer: CMSampleBuffer?, error: NSError?) -> Void in
if let s: CaptureSession = self { if let s: CaptureSession = self {
var captureError: NSError? = error var captureError: NSError? = error
if nil == captureError { if nil == captureError {
let data: NSData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer) let data: NSData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
if let image1: UIImage = UIImage(data: data) { if let image1: UIImage = UIImage(data: data as Data) {
if let image2: UIImage = s.adjustOrientationForImage(image1) { if let image2: UIImage = s.adjustOrientationForImage(image: image1) {
s.delegate?.captureStillImageAsynchronously?(s, image: image2) s.delegate?.captureStillImageAsynchronously?(capture: s, image: image2)
} else { } else {
var userInfo: Dictionary<String, AnyObject> = Dictionary<String, AnyObject>() var userInfo: Dictionary<String, AnyObject> = Dictionary<String, AnyObject>()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot fix image orientation.]" userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot fix image orientation.]"
...@@ -591,7 +610,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -591,7 +610,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
} }
if let e: NSError = captureError { if let e: NSError = captureError {
s.delegate?.captureStillImageAsynchronouslyFailedWithError?(s, error: e) s.delegate?.captureStillImageAsynchronouslyFailedWithError?(capture: s, error: e)
} }
} }
} }
...@@ -605,26 +624,26 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -605,26 +624,26 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/ */
public func startRecording() { public func startRecording() {
if !isRecording { if !isRecording {
dispatch_async(sessionQueue) { [weak self] in sessionQueue.async() { [weak self] in
if let s: CaptureSession = self { if let s: CaptureSession = self {
if let v: AVCaptureConnection = s.movieOutput.connectionWithMediaType(AVMediaTypeVideo) { if let v: AVCaptureConnection = s.movieOutput.connection(withMediaType: AVMediaTypeVideo) {
v.videoOrientation = s.videoOrientation v.videoOrientation = s.videoOrientation
v.preferredVideoStabilizationMode = .Auto v.preferredVideoStabilizationMode = .auto
} }
if let v: AVCaptureDevice = s.activeCamera { if let v: AVCaptureDevice = s.activeCamera {
if v.smoothAutoFocusSupported { if v.isSmoothAutoFocusSupported {
do { do {
try v.lockForConfiguration() try v.lockForConfiguration()
v.smoothAutoFocusEnabled = true v.isSmoothAutoFocusEnabled = true
v.unlockForConfiguration() v.unlockForConfiguration()
} catch let e as NSError { } catch let e as NSError {
s.delegate?.captureSessionFailedWithError?(s, error: e) s.delegate?.captureSessionFailedWithError?(capture: s, error: e)
} }
} }
s.movieOutputURL = s.uniqueURL() s.movieOutputURL = s.uniqueURL()
if let v: NSURL = s.movieOutputURL { if let v: NSURL = s.movieOutputURL {
s.movieOutput.startRecordingToOutputFileURL(v, recordingDelegate: s) s.movieOutput.startRecording(toOutputFileURL: v as URL!, recordingDelegate: s)
} }
} }
} }
...@@ -644,17 +663,22 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -644,17 +663,22 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
/** /**
:name: captureOutput :name: captureOutput
*/ */
public func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) { public func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [AnyObject]!) {
isRecording = true isRecording = true
delegate?.captureDidStartRecordingToOutputFileAtURL?(self, captureOutput: captureOutput, fileURL: fileURL, fromConnections: connections) delegate?.captureDidStartRecordingToOutputFileAtURL?(capture: self, captureOutput: captureOutput, fileURL: fileURL, fromConnections: connections)
} }
/** /**
:name: captureOutput :name: captureOutput
*/ */
public func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) { public func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [AnyObject]!, error: NSError!) {
isRecording = false isRecording = false
delegate?.captureDidFinishRecordingToOutputFileAtURL?(self, captureOutput: captureOutput, outputFileURL: outputFileURL, fromConnections: connections, error: error) delegate?.captureDidFinishRecordingToOutputFileAtURL?(capture: self, captureOutput: captureOutput, outputFileURL: outputFileURL, fromConnections: connections, error: error)
}
/// Prepares the sessionQueue.
private func prepareSessionQueue() {
sessionQueue = DispatchQueue(label: "io.cosmicmind.Material.CaptureSession", attributes: .serial, target: nil)
} }
/** /**
...@@ -672,12 +696,12 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -672,12 +696,12 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/ */
private func prepareVideoInput() { private func prepareVideoInput() {
do { do {
activeVideoInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)) activeVideoInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo))
if session.canAddInput(activeVideoInput) { if session.canAddInput(activeVideoInput) {
session.addInput(activeVideoInput) session.addInput(activeVideoInput)
} }
} catch let e as NSError { } catch let e as NSError {
delegate?.captureSessionFailedWithError?(self, error: e) delegate?.captureSessionFailedWithError?(capture: self, error: e)
} }
} }
...@@ -686,12 +710,12 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -686,12 +710,12 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/ */
private func prepareAudioInput() { private func prepareAudioInput() {
do { do {
activeAudioInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)) activeAudioInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio))
if session.canAddInput(activeAudioInput) { if session.canAddInput(activeAudioInput) {
session.addInput(activeAudioInput) session.addInput(activeAudioInput)
} }
} catch let e as NSError { } catch let e as NSError {
delegate?.captureSessionFailedWithError?(self, error: e) delegate?.captureSessionFailedWithError?(capture: self, error: e)
} }
} }
...@@ -718,7 +742,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -718,7 +742,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
:name: cameraWithPosition :name: cameraWithPosition
*/ */
private func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? { private func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? {
let devices: Array<AVCaptureDevice> = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! Array<AVCaptureDevice> let devices: Array<AVCaptureDevice> = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! Array<AVCaptureDevice>
for device in devices { for device in devices {
if device.position == position { if device.position == position {
return device return device
...@@ -732,13 +756,13 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -732,13 +756,13 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/ */
private func uniqueURL() -> NSURL? { private func uniqueURL() -> NSURL? {
do { do {
let directory: NSURL = try NSFileManager.defaultManager().URLForDirectory(.DocumentDirectory, inDomain: .UserDomainMask, appropriateForURL: nil, create: true) let directory: NSURL = try FileManager.default().urlForDirectory(.documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true)
let dateFormatter = NSDateFormatter() let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .FullStyle dateFormatter.dateStyle = .fullStyle
dateFormatter.timeStyle = .FullStyle dateFormatter.timeStyle = .fullStyle
return directory.URLByAppendingPathComponent(dateFormatter.stringFromDate(NSDate()) + ".mov") return directory.appendingPathComponent(dateFormatter.string(from: NSDate() as Date) + ".mov")
} catch let e as NSError { } catch let e as NSError {
delegate?.captureCreateMovieFileFailedWithError?(self, error: e) delegate?.captureCreateMovieFileFailedWithError?(capture: self, error: e)
} }
return nil return nil
} }
...@@ -751,55 +775,55 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -751,55 +775,55 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
- Returns: An optional UIImage if successful. - Returns: An optional UIImage if successful.
*/ */
private func adjustOrientationForImage(image: UIImage) -> UIImage? { private func adjustOrientationForImage(image: UIImage) -> UIImage? {
guard .Up != image.imageOrientation else { guard .up != image.imageOrientation else {
return image return image
} }
var transform: CGAffineTransform = CGAffineTransformIdentity var transform: CGAffineTransform = .identity
// Rotate if Left, Right, or Down. // Rotate if Left, Right, or Down.
switch image.imageOrientation { switch image.imageOrientation {
case .Down, .DownMirrored: case .down, .downMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, image.size.height) transform = transform.translateBy(x: image.size.width, y: image.size.height)
transform = CGAffineTransformRotate(transform, CGFloat(M_PI)) transform = transform.rotate(CGFloat(M_PI))
case .Left, .LeftMirrored: case .left, .leftMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0) transform = transform.translateBy(x: image.size.width, y: 0)
transform = CGAffineTransformRotate(transform, CGFloat(M_PI_2)) transform = transform.rotate(CGFloat(M_PI_2))
case .Right, .RightMirrored: case .right, .rightMirrored:
transform = CGAffineTransformTranslate(transform, 0, image.size.height) transform = transform.translateBy(x: 0, y: image.size.height)
transform = CGAffineTransformRotate(transform, -CGFloat(M_PI_2)) transform = transform.rotate(-CGFloat(M_PI_2))
default:break default:break
} }
// Flip if mirrored. // Flip if mirrored.
switch image.imageOrientation { switch image.imageOrientation {
case .UpMirrored, .DownMirrored: case .upMirrored, .downMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0) transform = transform.translateBy(x: image.size.width, y: 0)
transform = CGAffineTransformScale(transform, -1, 1) transform = transform.scaleBy(x: -1, y: 1)
case .LeftMirrored, .RightMirrored: case .leftMirrored, .rightMirrored:
transform = CGAffineTransformTranslate(transform, image.size.height, 0) transform = transform.translateBy(x: image.size.height, y: 0)
transform = CGAffineTransformScale(transform, -1, 1) transform = transform.scaleBy(x: -1, y: 1)
default:break default:break
} }
// Draw the underlying CGImage with the calculated transform. // Draw the underlying CGImage with the calculated transform.
guard let context = CGBitmapContextCreate(nil, Int(image.size.width), Int(image.size.height), CGImageGetBitsPerComponent(image.CGImage), 0, CGImageGetColorSpace(image.CGImage), CGImageGetBitmapInfo(image.CGImage).rawValue) else { guard let context = CGContext(data: nil, width: Int(image.size.width), height: Int(image.size.height), bitsPerComponent: image.cgImage!.bitsPerComponent, bytesPerRow: 0, space: image.cgImage!.colorSpace!, bitmapInfo: image.cgImage!.bitmapInfo.rawValue) else {
return nil return nil
} }
CGContextConcatCTM(context, transform) context.concatCTM(transform)
switch image.imageOrientation { switch image.imageOrientation {
case .Left, .LeftMirrored, .Right, .RightMirrored: case .left, .leftMirrored, .right, .rightMirrored:
CGContextDrawImage(context, CGRect(x: 0, y: 0, width: image.size.height, height: image.size.width), image.CGImage) context.draw(in: CGRect(x: 0, y: 0, width: image.size.height, height: image.size.width), image: image.cgImage!)
default: default:
CGContextDrawImage(context, CGRect(origin: .zero, size: image.size), image.CGImage) context.draw(in: CGRect(origin: .zero, size: image.size), image: image.cgImage!)
} }
guard let CGImage = CGBitmapContextCreateImage(context) else { guard let cgImage = context.makeImage() else {
return nil return nil
} }
return UIImage(CGImage: CGImage) return UIImage(cgImage: cgImage)
} }
} }
...@@ -32,268 +32,259 @@ import UIKit ...@@ -32,268 +32,259 @@ import UIKit
import AVFoundation import AVFoundation
public enum CaptureMode { public enum CaptureMode {
case Photo case photo
case Video case video
} }
@objc(CaptureViewDelegate) @objc(CaptureViewDelegate)
public protocol CaptureViewDelegate : MaterialDelegate { public protocol CaptureViewDelegate: MaterialDelegate {
/** /**
:name: captureViewDidStartRecordTimer A delegation method that is fired when the record timer has started.
- Parameter captureView: A reference to the calling captureView.
*/ */
@objc
optional func captureViewDidStartRecordTimer(captureView: CaptureView) optional func captureViewDidStartRecordTimer(captureView: CaptureView)
/** /**
:name: captureViewDidUpdateRecordTimer A delegation method that is fired when the record timer was updated.
- Parameter captureView: A reference to the calling captureView.
- Parameter hours: An integer representing hours.
- Parameter minutes: An integer representing minutes.
- Parameter seconds: An integer representing seconds.
*/ */
@objc
optional func captureViewDidUpdateRecordTimer(captureView: CaptureView, hours: Int, minutes: Int, seconds: Int) optional func captureViewDidUpdateRecordTimer(captureView: CaptureView, hours: Int, minutes: Int, seconds: Int)
/** /**
:name: captureViewDidStopRecordTimer A delegation method that is fired when the record timer has stopped.
- Parameter captureView: A reference to the calling captureView.
- Parameter hours: An integer representing hours.
- Parameter minutes: An integer representing minutes.
- Parameter seconds: An integer representing seconds.
*/ */
@objc
optional func captureViewDidStopRecordTimer(captureView: CaptureView, hours: Int, minutes: Int, seconds: Int) optional func captureViewDidStopRecordTimer(captureView: CaptureView, hours: Int, minutes: Int, seconds: Int)
/** /**
:name: captureViewDidTapToFocusAtPoint A delegation method that is fired when the user tapped to adjust the focus.
- Parameter captureView: A reference to the calling captureView.
- Parameter point: CGPoint that the user tapped at.
*/ */
@objc
optional func captureViewDidTapToFocusAtPoint(captureView: CaptureView, point: CGPoint) optional func captureViewDidTapToFocusAtPoint(captureView: CaptureView, point: CGPoint)
/** /**
:name: captureViewDidTapToExposeAtPoint A delegation method that is fired when the user tapped to adjust the exposure.
- Parameter captureView: A reference to the calling captureView.
- Parameter point: CGPoint that the user tapped at.
*/ */
@objc
optional func captureViewDidTapToExposeAtPoint(captureView: CaptureView, point: CGPoint) optional func captureViewDidTapToExposeAtPoint(captureView: CaptureView, point: CGPoint)
/** /**
:name: captureViewDidTapToResetAtPoint A delegation method that is fired when the user tapped to reset.
- Parameter captureView: A reference to the calling captureView.
- Parameter point: CGPoint that the user tapped at.
*/ */
@objc
optional func captureViewDidTapToResetAtPoint(captureView: CaptureView, point: CGPoint) optional func captureViewDidTapToResetAtPoint(captureView: CaptureView, point: CGPoint)
/** /**
:name: captureViewDidPressFlashButton A delegation method that is fired when the user pressed the flash button.
- Parameter captureView: A reference to the calling captureView.
- Parameter button: A reference to the UIButton that the user pressed.
*/ */
@objc
optional func captureViewDidPressFlashButton(captureView: CaptureView, button: UIButton) optional func captureViewDidPressFlashButton(captureView: CaptureView, button: UIButton)
/** /**
:name: captureViewDidPressSwitchCamerasButton A delegation method that is fired when the user pressed the switch camera button.
- Parameter captureView: A reference to the calling captureView.
- Parameter button: A reference to the UIButton that the user pressed.
*/ */
@objc
optional func captureViewDidPressSwitchCamerasButton(captureView: CaptureView, button: UIButton) optional func captureViewDidPressSwitchCamerasButton(captureView: CaptureView, button: UIButton)
/** /**
:name: captureViewDidPressCaptureButton A delegation method that is fired when the user pressed capture button.
- Parameter captureView: A reference to the calling captureView.
- Parameter button: A reference to the UIButton that the user pressed.
*/ */
@objc
optional func captureViewDidPressCaptureButton(captureView: CaptureView, button: UIButton) optional func captureViewDidPressCaptureButton(captureView: CaptureView, button: UIButton)
/** /**
:name: captureViewDidPressCameraButton A delegation method that is fired when the user enabled the photo camera.
- Parameter captureView: A reference to the calling captureView.
- Parameter button: A reference to the UIButton that the user pressed.
*/ */
@objc
optional func captureViewDidPressCameraButton(captureView: CaptureView, button: UIButton) optional func captureViewDidPressCameraButton(captureView: CaptureView, button: UIButton)
/** /**
:name: captureViewDidPressVideoButton A delegation method that is fired when the user enabled the video camera.
- Parameter captureView: A reference to the calling captureView.
- Parameter button: A reference to the UIButton that the user pressed.
*/ */
@objc
optional func captureViewDidPressVideoButton(captureView: CaptureView, button: UIButton) optional func captureViewDidPressVideoButton(captureView: CaptureView, button: UIButton)
} }
public class CaptureView : MaterialView, UIGestureRecognizerDelegate { public class CaptureView : MaterialView, UIGestureRecognizerDelegate {
/** /// A Timer reference for when recording is enabled.
:name: timer private var timer: Timer?
*/
private var timer: NSTimer?
/** /// A tap gesture reference for focus events.
:name: tapToFocusGesture
*/
private var tapToFocusGesture: UITapGestureRecognizer? private var tapToFocusGesture: UITapGestureRecognizer?
/** /// A tap gesture reference for exposure events.
:name: tapToExposeGesture
*/
private var tapToExposeGesture: UITapGestureRecognizer? private var tapToExposeGesture: UITapGestureRecognizer?
/** /// A tap gesture reference for reset events.
:name: tapToResetGesture
*/
private var tapToResetGesture: UITapGestureRecognizer? private var tapToResetGesture: UITapGestureRecognizer?
/** /// A reference to the capture mode.
:name: captureMode public lazy var captureMode: CaptureMode = .video
*/
public lazy var captureMode: CaptureMode = .Video
/** /// A boolean indicating whether to enable tap to focus.
:name: tapToFocusEnabled @IBInspectable public var enableTapToFocus: Bool = false {
*/
@IBInspectable public var tapToFocusEnabled: Bool = false {
didSet { didSet {
if tapToFocusEnabled { if enableTapToFocus {
tapToResetEnabled = true enableTapToReset = true
prepareFocusLayer() prepareFocusLayer()
prepareTapGesture(&tapToFocusGesture, numberOfTapsRequired: 1, numberOfTouchesRequired: 1, selector: #selector(handleTapToFocusGesture)) prepareTapGesture(gesture: &tapToFocusGesture, numberOfTapsRequired: 1, numberOfTouchesRequired: 1, selector: #selector(handleTapToFocusGesture))
if let v: UITapGestureRecognizer = tapToExposeGesture { if let v: UITapGestureRecognizer = tapToExposeGesture {
tapToFocusGesture!.requireGestureRecognizerToFail(v) tapToFocusGesture!.require(toFail: v)
} }
} else { } else {
removeTapGesture(&tapToFocusGesture) removeTapGesture(gesture: &tapToFocusGesture)
focusLayer?.removeFromSuperlayer() focusLayer?.removeFromSuperlayer()
focusLayer = nil focusLayer = nil
} }
} }
} }
/** /// A boolean indicating whether to enable tap to expose.
:name: tapToExposeEnabled @IBInspectable public var enableTapToExpose: Bool = false {
*/
@IBInspectable public var tapToExposeEnabled: Bool = false {
didSet { didSet {
if tapToExposeEnabled { if enableTapToExpose {
tapToResetEnabled = true enableTapToReset = true
prepareExposureLayer() prepareExposureLayer()
prepareTapGesture(&tapToExposeGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 1, selector: #selector(handleTapToExposeGesture)) prepareTapGesture(gesture: &tapToExposeGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 1, selector: #selector(handleTapToExposeGesture))
if let v: UITapGestureRecognizer = tapToFocusGesture { if let v: UITapGestureRecognizer = tapToFocusGesture {
v.requireGestureRecognizerToFail(tapToExposeGesture!) v.require(toFail: tapToExposeGesture!)
} }
} else { } else {
removeTapGesture(&tapToExposeGesture) removeTapGesture(gesture: &tapToExposeGesture)
exposureLayer?.removeFromSuperlayer() exposureLayer?.removeFromSuperlayer()
exposureLayer = nil exposureLayer = nil
} }
} }
} }
/** /// A boolean indicating whether to enable tap to reset.
:name: tapToResetEnabled @IBInspectable public var enableTapToReset: Bool = false {
*/
@IBInspectable public var tapToResetEnabled: Bool = false {
didSet { didSet {
if tapToResetEnabled { if enableTapToReset {
prepareResetLayer() prepareResetLayer()
prepareTapGesture(&tapToResetGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 2, selector: #selector(handleTapToResetGesture)) prepareTapGesture(gesture: &tapToResetGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 2, selector: #selector(handleTapToResetGesture))
if let v: UITapGestureRecognizer = tapToFocusGesture { if let v: UITapGestureRecognizer = tapToFocusGesture {
v.requireGestureRecognizerToFail(tapToResetGesture!) v.require(toFail: tapToResetGesture!)
} }
if let v: UITapGestureRecognizer = tapToExposeGesture { if let v: UITapGestureRecognizer = tapToExposeGesture {
v.requireGestureRecognizerToFail(tapToResetGesture!) v.require(toFail: tapToResetGesture!)
} }
} else { } else {
removeTapGesture(&tapToResetGesture) removeTapGesture(gesture: &tapToResetGesture)
resetLayer?.removeFromSuperlayer() resetLayer?.removeFromSuperlayer()
resetLayer = nil resetLayer = nil
} }
} }
} }
/** /// MaterialEdgeInset preset value for content.
:name: contentInsets
*/
public var contentInsetPreset: MaterialEdgeInset = .None { public var contentInsetPreset: MaterialEdgeInset = .None {
didSet { didSet {
contentInset = MaterialEdgeInsetToValue(contentInsetPreset) contentInset = MaterialEdgeInsetToValue(inset: contentInsetPreset)
} }
} }
/** /// Content insert value.
:name: contentInset public var contentInset: UIEdgeInsets = MaterialEdgeInsetToValue(inset: .Square4) {
*/
public var contentInset: UIEdgeInsets = MaterialEdgeInsetToValue(.Square4) {
didSet { didSet {
reloadView() reloadView()
} }
} }
/** /// A reference to the CapturePreview view.
:name: previewView public private(set) var previewView: CapturePreview!
*/
public private(set) lazy var previewView: CapturePreview = CapturePreview()
/** /// A reference to the CaptureSession.
:name: capture public private(set) var captureSession: CaptureSession!
*/
public private(set) lazy var captureSession: CaptureSession = CaptureSession()
/** /// A reference to the focus layer used in focus animations.
:name: focusLayer
*/
public private(set) var focusLayer: MaterialLayer? public private(set) var focusLayer: MaterialLayer?
/** /// A reference to the exposure layer used in exposure animations.
:name: exposureLayer
*/
public private(set) var exposureLayer: MaterialLayer? public private(set) var exposureLayer: MaterialLayer?
/** /// A reference to the reset layer used in reset animations.
:name: resetLayer
*/
public private(set) var resetLayer: MaterialLayer? public private(set) var resetLayer: MaterialLayer?
/** /// A reference to the cameraButton.
:name: cameraButton
*/
public var cameraButton: UIButton? { public var cameraButton: UIButton? {
didSet { didSet {
if let v: UIButton = cameraButton { if let v: UIButton = cameraButton {
v.addTarget(self, action: #selector(handleCameraButton), forControlEvents: .TouchUpInside) v.addTarget(self, action: #selector(handleCameraButton), for: .touchUpInside)
} }
reloadView() reloadView()
} }
} }
/** /// A reference to the captureButton.
:name: captureButton
*/
public var captureButton: UIButton? { public var captureButton: UIButton? {
didSet { didSet {
if let v: UIButton = captureButton { if let v: UIButton = captureButton {
v.addTarget(self, action: #selector(handleCaptureButton), forControlEvents: .TouchUpInside) v.addTarget(self, action: #selector(handleCaptureButton), for: .touchUpInside)
} }
reloadView() reloadView()
} }
} }
/** /// A reference to the videoButton.
:name: videoButton
*/
public var videoButton: UIButton? { public var videoButton: UIButton? {
didSet { didSet {
if let v: UIButton = videoButton { if let v: UIButton = videoButton {
v.addTarget(self, action: #selector(handleVideoButton), forControlEvents: .TouchUpInside) v.addTarget(self, action: #selector(handleVideoButton), for: .touchUpInside)
} }
reloadView() reloadView()
} }
} }
/** /// A reference to the switchCameraButton.
:name: switchCamerasButton
*/
public var switchCamerasButton: UIButton? { public var switchCamerasButton: UIButton? {
didSet { didSet {
if let v: UIButton = switchCamerasButton { if let v: UIButton = switchCamerasButton {
v.addTarget(self, action: #selector(handleSwitchCamerasButton), forControlEvents: .TouchUpInside) v.addTarget(self, action: #selector(handleSwitchCamerasButton), for: .touchUpInside)
} }
} }
} }
/** /// A reference to the flashButton.
:name: flashButton
*/
public var flashButton: UIButton? { public var flashButton: UIButton? {
didSet { didSet {
if let v: UIButton = flashButton { if let v: UIButton = flashButton {
v.addTarget(self, action: #selector(handleFlashButton), forControlEvents: .TouchUpInside) v.addTarget(self, action: #selector(handleFlashButton), for: .touchUpInside)
} }
} }
} }
/** /// A convenience initializer.
:name: init
*/
public convenience init() { public convenience init() {
self.init(frame: CGRect.zero) self.init(frame: CGRect.zero)
} }
/**
:name: layoutSubviews
*/
public override func layoutSubviews() { public override func layoutSubviews() {
super.layoutSubviews() super.layoutSubviews()
previewView.frame = bounds previewView.frame = bounds
...@@ -316,17 +307,20 @@ public class CaptureView : MaterialView, UIGestureRecognizerDelegate { ...@@ -316,17 +307,20 @@ public class CaptureView : MaterialView, UIGestureRecognizerDelegate {
} }
/** /**
:name: prepareView Prepares the view instance when intialized. When subclassing,
it is recommended to override the prepareView method
to initialize property values and other setup operations.
The super.prepareView method should always be called immediately
when subclassing.
*/ */
public override func prepareView() { public override func prepareView() {
super.prepareView() super.prepareView()
backgroundColor = Color.black backgroundColor = Color.black
prepareCaptureSession()
preparePreviewView() preparePreviewView()
} }
/** /// Reloads the view.
:name: reloadView
*/
public func reloadView() { public func reloadView() {
// clear constraints so new ones do not conflict // clear constraints so new ones do not conflict
removeConstraints(constraints) removeConstraints(constraints)
...@@ -334,79 +328,76 @@ public class CaptureView : MaterialView, UIGestureRecognizerDelegate { ...@@ -334,79 +328,76 @@ public class CaptureView : MaterialView, UIGestureRecognizerDelegate {
v.removeFromSuperview() v.removeFromSuperview()
} }
insertSubview(previewView, atIndex: 0) insertSubview(previewView, at: 0)
if let v: UIButton = captureButton { if let v: UIButton = captureButton {
insertSubview(v, atIndex: 1) insertSubview(v, at: 1)
} }
if let v: UIButton = cameraButton { if let v: UIButton = cameraButton {
insertSubview(v, atIndex: 2) insertSubview(v, at: 2)
} }
if let v: UIButton = videoButton { if let v: UIButton = videoButton {
insertSubview(v, atIndex: 3) insertSubview(v, at: 3)
} }
} }
/** /// Starts the timer for recording.
:name: startTimer
*/
internal func startTimer() { internal func startTimer() {
timer?.invalidate() timer?.invalidate()
timer = NSTimer(timeInterval: 0.5, target: self, selector: #selector(updateTimer), userInfo: nil, repeats: true) timer = Timer(timeInterval: 0.5, target: self, selector: #selector(updateTimer), userInfo: nil, repeats: true)
NSRunLoop.mainRunLoop().addTimer(timer!, forMode: NSRunLoopCommonModes) RunLoop.main().add(timer!, forMode: .commonModes)
(delegate as? CaptureViewDelegate)?.captureViewDidStartRecordTimer?(self) (delegate as? CaptureViewDelegate)?.captureViewDidStartRecordTimer?(captureView: self)
} }
/** /// Updates the timer when recording.
:name: updateTimer
*/
internal func updateTimer() { internal func updateTimer() {
let duration: CMTime = captureSession.recordedDuration let duration: CMTime = captureSession.recordedDuration
let time: Double = CMTimeGetSeconds(duration) let time: Double = CMTimeGetSeconds(duration)
let hours: Int = Int(time / 3600) let hours: Int = Int(time / 3600)
let minutes: Int = Int((time / 60) % 60) let minutes: Int = Int((time / 60).truncatingRemainder(dividingBy: 60))
let seconds: Int = Int(time % 60) let seconds: Int = Int(time.truncatingRemainder(dividingBy: 60))
(delegate as? CaptureViewDelegate)?.captureViewDidUpdateRecordTimer?(self, hours: hours, minutes: minutes, seconds: seconds) (delegate as? CaptureViewDelegate)?.captureViewDidUpdateRecordTimer?(captureView: self, hours: hours, minutes: minutes, seconds: seconds)
} }
/** /// Stops the timer when recording.
:name: stopTimer
*/
internal func stopTimer() { internal func stopTimer() {
let duration: CMTime = captureSession.recordedDuration let duration: CMTime = captureSession.recordedDuration
let time: Double = CMTimeGetSeconds(duration) let time: Double = CMTimeGetSeconds(duration)
let hours: Int = Int(time / 3600) let hours: Int = Int(time / 3600)
let minutes: Int = Int((time / 60) % 60) let minutes: Int = Int((time / 60).truncatingRemainder(dividingBy: 60))
let seconds: Int = Int(time % 60) let seconds: Int = Int(time.truncatingRemainder(dividingBy: 60))
timer?.invalidate() timer?.invalidate()
timer = nil timer = nil
(delegate as? CaptureViewDelegate)?.captureViewDidStopRecordTimer?(self, hours: hours, minutes: minutes, seconds: seconds) (delegate as? CaptureViewDelegate)?.captureViewDidStopRecordTimer?(captureView: self, hours: hours, minutes: minutes, seconds: seconds)
} }
/** /**
:name: handleFlashButton Handler for the flashButton.
- Parameter button: A UIButton that is associated with the event.
*/ */
internal func handleFlashButton(button: UIButton) { internal func handleFlashButton(button: UIButton) {
(delegate as? CaptureViewDelegate)?.captureViewDidPressFlashButton?(self, button: button) (delegate as? CaptureViewDelegate)?.captureViewDidPressFlashButton?(captureView: self, button: button)
} }
/** /**
:name: handleSwitchCamerasButton Handler for the switchCameraButton.
- Parameter button: A UIButton that is associated with the event.
*/ */
internal func handleSwitchCamerasButton(button: UIButton) { internal func handleSwitchCamerasButton(button: UIButton) {
captureSession.switchCameras() captureSession.switchCameras()
(delegate as? CaptureViewDelegate)?.captureViewDidPressSwitchCamerasButton?(self, button: button) (delegate as? CaptureViewDelegate)?.captureViewDidPressSwitchCamerasButton?(captureView: self, button: button)
} }
/** /**
:name: handleCaptureButton Handler for the captureButton.
- Parameter button: A UIButton that is associated with the event.
*/ */
internal func handleCaptureButton(button: UIButton) { internal func handleCaptureButton(button: UIButton) {
if .Photo == captureMode { if .photo == captureMode {
captureSession.captureStillImage() captureSession.captureStillImage()
} else if .Video == captureMode { } else if .video == captureMode {
if captureSession.isRecording { if captureSession.isRecording {
captureSession.stopRecording() captureSession.stopRecording()
stopTimer() stopTimer()
...@@ -415,69 +406,80 @@ public class CaptureView : MaterialView, UIGestureRecognizerDelegate { ...@@ -415,69 +406,80 @@ public class CaptureView : MaterialView, UIGestureRecognizerDelegate {
startTimer() startTimer()
} }
} }
(delegate as? CaptureViewDelegate)?.captureViewDidPressCaptureButton?(self, button: button) (delegate as? CaptureViewDelegate)?.captureViewDidPressCaptureButton?(captureView: self, button: button)
} }
/** /**
:name: handleCameraButton Handler for the cameraButton.
- Parameter button: A UIButton that is associated with the event.
*/ */
internal func handleCameraButton(button: UIButton) { internal func handleCameraButton(button: UIButton) {
captureMode = .Photo captureMode = .photo
(delegate as? CaptureViewDelegate)?.captureViewDidPressCameraButton?(self, button: button) (delegate as? CaptureViewDelegate)?.captureViewDidPressCameraButton?(captureView: self, button: button)
} }
/** /**
:name: handleVideoButton Handler for the videoButton.
- Parameter button: A UIButton that is associated with the event.
*/ */
internal func handleVideoButton(button: UIButton) { internal func handleVideoButton(button: UIButton) {
captureMode = .Video captureMode = .video
(delegate as? CaptureViewDelegate)?.captureViewDidPressVideoButton?(self, button: button) (delegate as? CaptureViewDelegate)?.captureViewDidPressVideoButton?(captureView: self, button: button)
} }
/** /**
:name: handleTapToFocusGesture Handler for the tapToFocusGesture.
- Parameter recognizer: A UITapGestureRecognizer that is associated with the event.
*/ */
@objc(handleTapToFocusGesture:) @objc
internal func handleTapToFocusGesture(recognizer: UITapGestureRecognizer) { internal func handleTapToFocusGesture(recognizer: UITapGestureRecognizer) {
if tapToFocusEnabled && captureSession.cameraSupportsTapToFocus { if enableTapToFocus && captureSession.cameraSupportsTapToFocus {
let point: CGPoint = recognizer.locationInView(self) let point: CGPoint = recognizer.location(in: self)
captureSession.focusAtPoint(previewView.captureDevicePointOfInterestForPoint(point)) captureSession.focusAtPoint(point: previewView.captureDevicePointOfInterestForPoint(point: point))
animateTapLayer(layer: focusLayer!, point: point) animateTapLayer(layer: focusLayer!, point: point)
(delegate as? CaptureViewDelegate)?.captureViewDidTapToFocusAtPoint?(self, point: point) (delegate as? CaptureViewDelegate)?.captureViewDidTapToFocusAtPoint?(captureView: self, point: point)
} }
} }
/** /**
:name: handleTapToExposeGesture Handler for the tapToExposeGesture.
- Parameter recognizer: A UITapGestureRecognizer that is associated with the event.
*/ */
@objc(handleTapToExposeGesture:) @objc
internal func handleTapToExposeGesture(recognizer: UITapGestureRecognizer) { internal func handleTapToExposeGesture(recognizer: UITapGestureRecognizer) {
if tapToExposeEnabled && captureSession.cameraSupportsTapToExpose { if enableTapToExpose && captureSession.cameraSupportsTapToExpose {
let point: CGPoint = recognizer.locationInView(self) let point: CGPoint = recognizer.location(in: self)
captureSession.exposeAtPoint(previewView.captureDevicePointOfInterestForPoint(point)) captureSession.exposeAtPoint(point: previewView.captureDevicePointOfInterestForPoint(point: point))
animateTapLayer(layer: exposureLayer!, point: point) animateTapLayer(layer: exposureLayer!, point: point)
(delegate as? CaptureViewDelegate)?.captureViewDidTapToExposeAtPoint?(self, point: point) (delegate as? CaptureViewDelegate)?.captureViewDidTapToExposeAtPoint?(captureView: self, point: point)
} }
} }
/** /**
:name: handleTapToResetGesture Handler for the tapToResetGesture.
- Parameter recognizer: A UITapGestureRecognizer that is associated with the event.
*/ */
@objc(handleTapToResetGesture:) @objc
internal func handleTapToResetGesture(recognizer: UITapGestureRecognizer) { internal func handleTapToResetGesture(recognizer: UITapGestureRecognizer) {
if tapToResetEnabled { if enableTapToReset {
captureSession.resetFocusAndExposureModes() captureSession.resetFocusAndExposureModes()
let point: CGPoint = previewView.pointForCaptureDevicePointOfInterest(CGPointMake(0.5, 0.5)) let point: CGPoint = previewView.pointForCaptureDevicePointOfInterest(point: CGPoint(x: 0.5, y: 0.5))
animateTapLayer(layer: resetLayer!, point: point) animateTapLayer(layer: resetLayer!, point: point)
(delegate as? CaptureViewDelegate)?.captureViewDidTapToResetAtPoint?(self, point: point) (delegate as? CaptureViewDelegate)?.captureViewDidTapToResetAtPoint?(captureView: self, point: point)
} }
} }
/** /**
:name: prepareTapGesture Prepares a given tap gesture.
- Parameter gesture: An optional UITapGestureRecognizer to prepare.
- Parameter numberOfTapsRequired: An integer of the number of taps required
to activate the gesture.
- Parameter numberOfTouchesRequired: An integer of the number of touches, fingers,
required to activate the gesture.
- Parameter selector: A Selector to handle the event.
*/ */
private func prepareTapGesture(inout gesture: UITapGestureRecognizer?, numberOfTapsRequired: Int, numberOfTouchesRequired: Int, selector: Selector) { private func prepareTapGesture(gesture: inout UITapGestureRecognizer?, numberOfTapsRequired: Int, numberOfTouchesRequired: Int, selector: Selector) {
removeTapGesture(&gesture) removeTapGesture(gesture: &gesture)
gesture = UITapGestureRecognizer(target: self, action: selector) gesture = UITapGestureRecognizer(target: self, action: selector)
gesture!.delegate = self gesture!.delegate = self
gesture!.numberOfTapsRequired = numberOfTapsRequired gesture!.numberOfTapsRequired = numberOfTapsRequired
...@@ -486,77 +488,74 @@ public class CaptureView : MaterialView, UIGestureRecognizerDelegate { ...@@ -486,77 +488,74 @@ public class CaptureView : MaterialView, UIGestureRecognizerDelegate {
} }
/** /**
:name: removeTapToFocusGesture Removes a given tap gesture.
- Parameter gesture: An optional UITapGestureRecognizer to remove.
*/ */
private func removeTapGesture(inout gesture: UITapGestureRecognizer?) { private func removeTapGesture(gesture: inout UITapGestureRecognizer?) {
if let v: UIGestureRecognizer = gesture { if let v: UIGestureRecognizer = gesture {
removeGestureRecognizer(v) removeGestureRecognizer(v)
gesture = nil gesture = nil
} }
} }
/** /// Prepare the captureSession.
:name: preparePreviewView private func prepareCaptureSession() {
*/ captureSession = CaptureSession()
}
/// Prepares the previewView.
private func preparePreviewView() { private func preparePreviewView() {
previewView = CapturePreview()
(previewView.layer as! AVCaptureVideoPreviewLayer).session = captureSession.session (previewView.layer as! AVCaptureVideoPreviewLayer).session = captureSession.session
captureSession.startSession() captureSession.startSession()
} }
/** /// Prepares the focusLayer.
:name: prepareFocusLayer
*/
private func prepareFocusLayer() { private func prepareFocusLayer() {
if nil == focusLayer { if nil == focusLayer {
focusLayer = MaterialLayer(frame: CGRectMake(0, 0, 150, 150)) focusLayer = MaterialLayer(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
focusLayer!.hidden = true focusLayer!.isHidden = true
focusLayer!.borderWidth = 2 focusLayer!.borderWidth = 2
focusLayer!.borderColor = Color.white.CGColor focusLayer!.borderColor = Color.white.cgColor
previewView.layer.addSublayer(focusLayer!) previewView.layer.addSublayer(focusLayer!)
} }
} }
/** /// Prepares the exposureLayer.
:name: prepareExposureLayer
*/
private func prepareExposureLayer() { private func prepareExposureLayer() {
if nil == exposureLayer { if nil == exposureLayer {
exposureLayer = MaterialLayer(frame: CGRectMake(0, 0, 150, 150)) exposureLayer = MaterialLayer(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
exposureLayer!.hidden = true exposureLayer!.isHidden = true
exposureLayer!.borderWidth = 2 exposureLayer!.borderWidth = 2
exposureLayer!.borderColor = Color.yellow.darken1.CGColor exposureLayer!.borderColor = Color.yellow.darken1.cgColor
previewView.layer.addSublayer(exposureLayer!) previewView.layer.addSublayer(exposureLayer!)
} }
} }
/** /// Prepares the resetLayer.
:name: prepareResetLayer
*/
private func prepareResetLayer() { private func prepareResetLayer() {
if nil == resetLayer { if nil == resetLayer {
resetLayer = MaterialLayer(frame: CGRectMake(0, 0, 150, 150)) resetLayer = MaterialLayer(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
resetLayer!.hidden = true resetLayer!.isHidden = true
resetLayer!.borderWidth = 2 resetLayer!.borderWidth = 2
resetLayer!.borderColor = Color.red.accent1.CGColor resetLayer!.borderColor = Color.red.accent1.cgColor
previewView.layer.addSublayer(resetLayer!) previewView.layer.addSublayer(resetLayer!)
} }
} }
/** /// Animates the tap and layer.
:name: animateTapLayer
*/
private func animateTapLayer(layer v: MaterialLayer, point: CGPoint) { private func animateTapLayer(layer v: MaterialLayer, point: CGPoint) {
MaterialAnimation.animationDisabled { MaterialAnimation.animationDisabled {
v.transform = CATransform3DIdentity v.transform = CATransform3DIdentity
v.position = point v.position = point
v.hidden = false v.isHidden = false
} }
MaterialAnimation.animateWithDuration(0.25, animations: { MaterialAnimation.animateWithDuration(duration: 0.25, animations: {
v.transform = CATransform3DMakeScale(0.5, 0.5, 1) v.transform = CATransform3DMakeScale(0.5, 0.5, 1)
}) { }) {
MaterialAnimation.delay(0.4) { MaterialAnimation.delay(0.4) {
MaterialAnimation.animationDisabled { MaterialAnimation.animationDisabled {
v.hidden = true v.isHidden = true
} }
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment