Commit 17b67d99 by Daniel Dahan

Capture progression commit for Swift 2.3

parent d5096568
......@@ -43,14 +43,14 @@ public class CapturePreview : MaterialView {
:name: captureDevicePointOfInterestForPoint
*/
public func captureDevicePointOfInterestForPoint(point: CGPoint) -> CGPoint {
return (layer as! AVCaptureVideoPreviewLayer).captureDevicePointOfInterestForPoint(point)
return (layer as! AVCaptureVideoPreviewLayer).captureDevicePointOfInterest(for: point)
}
/**
:name: pointForCaptureDevicePointOfInterest
*/
public func pointForCaptureDevicePointOfInterest(point: CGPoint) -> CGPoint {
return (layer as! AVCaptureVideoPreviewLayer).pointForCaptureDevicePointOfInterest(point)
return (layer as! AVCaptureVideoPreviewLayer).pointForCaptureDevicePoint(ofInterest: point)
}
/**
......@@ -65,7 +65,7 @@ public class CapturePreview : MaterialView {
:name: preparePreviewLayer
*/
private func preparePreviewLayer() {
layer.backgroundColor = Color.black.CGColor
layer.backgroundColor = Color.black.cgColor
layer.masksToBounds = true
(layer as! AVCaptureVideoPreviewLayer).videoGravity = AVLayerVideoGravityResizeAspectFill
}
......
......@@ -87,57 +87,76 @@ public func CaptureSessionPresetToString(preset: CaptureSessionPreset) -> String
@objc(CaptureSessionDelegate)
public protocol CaptureSessionDelegate {
/**
:name: captureSessionFailedWithError
A delegation method that is fired when the captureSesstion failes with an error.
- Parameter captureSession: A reference to the calling CaptureSession.
- Parameter error: A NSError corresponding to the error.
*/
optional func captureSessionFailedWithError(capture: CaptureSession, error: NSError)
@objc
optional func captureSessionFailedWithError(captureSession: CaptureSession, error: NSError)
/**
:name: captureSessionDidSwitchCameras
A delegation method that is fired when the camera has been switched to another.
- Parameter captureSession: A reference to the calling CaptureSession.
- Parameter position: An AVCaptureDevicePosition that the camera has switched to.
*/
optional func captureSessionDidSwitchCameras(capture: CaptureSession, position: AVCaptureDevicePosition)
@objc
optional func captureSessionDidSwitchCameras(captureSession: CaptureSession, position: AVCaptureDevicePosition)
/**
:name: captureSessionWillSwitchCameras
A delegation method that is fired before the camera has been switched to another.
- Parameter captureSession: A reference to the calling CaptureSession.
- Parameter position: An AVCaptureDevicePosition that the camera will switch to.
*/
optional func captureSessionWillSwitchCameras(capture: CaptureSession, position: AVCaptureDevicePosition)
@objc
optional func captureSessionWillSwitchCameras(captureSession: CaptureSession, position: AVCaptureDevicePosition)
/**
:name: captureStillImageAsynchronously
A delegation method that is fired when an image has been captured asynchronously.
- Parameter captureSession: A reference to the calling CaptureSession.
- Parameter image: An image that has been captured.
*/
optional func captureStillImageAsynchronously(capture: CaptureSession, image: UIImage)
@objc
optional func captureStillImageAsynchronously(captureSession: CaptureSession, image: UIImage)
/**
:name: captureStillImageAsynchronouslyFailedWithError
A delegation method that is fired when capturing an image asynchronously has failed.
- Parameter captureSession: A reference to the calling CaptureSession.
- Parameter error: A NSError corresponding to the error.
*/
optional func captureStillImageAsynchronouslyFailedWithError(capture: CaptureSession, error: NSError)
@objc
optional func captureStillImageAsynchronouslyFailedWithError(captureSession: CaptureSession, error: NSError)
/**
:name: captureCreateMovieFileFailedWithError
*/
optional func captureCreateMovieFileFailedWithError(capture: CaptureSession, error: NSError)
@objc
optional func captureCreateMovieFileFailedWithError(captureSession: CaptureSession, error: NSError)
/**
:name: captureMovieFailedWithError
*/
optional func captureMovieFailedWithError(capture: CaptureSession, error: NSError)
@objc
optional func captureMovieFailedWithError(captureSession: CaptureSession, error: NSError)
/**
:name: captureDidStartRecordingToOutputFileAtURL
*/
optional func captureDidStartRecordingToOutputFileAtURL(capture: CaptureSession, captureOutput: AVCaptureFileOutput, fileURL: NSURL, fromConnections connections: [AnyObject])
@objc
optional func captureDidStartRecordingToOutputFileAtURL(captureSession: CaptureSession, captureOutput: AVCaptureFileOutput, fileURL: NSURL, fromConnections connections: [AnyObject])
/**
:name: captureDidFinishRecordingToOutputFileAtURL
*/
optional func captureDidFinishRecordingToOutputFileAtURL(capture: CaptureSession, captureOutput: AVCaptureFileOutput, outputFileURL: NSURL, fromConnections connections: [AnyObject], error: NSError!)
@objc
optional func captureDidFinishRecordingToOutputFileAtURL(captureSession: CaptureSession, captureOutput: AVCaptureFileOutput, outputFileURL: NSURL, fromConnections connections: [AnyObject], error: NSError!)
}
@objc(CaptureSession)
public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
public class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
/**
:name: sessionQueue
*/
private lazy var sessionQueue: dispatch_queue_t = dispatch_queue_create("io.material.CaptureSession", DISPATCH_QUEUE_SERIAL)
private var sessionQueue: DispatchQueue!
/**
:name: activeVideoInput
......@@ -199,10 +218,10 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
public var inactiveCamera: AVCaptureDevice? {
var device: AVCaptureDevice?
if 1 < cameraCount {
if activeCamera?.position == .Back {
device = cameraWithPosition(.Front)
if activeCamera?.position == .back {
device = cameraWithPosition(position: .front)
} else {
device = cameraWithPosition(.Back)
device = cameraWithPosition(position: .back)
}
}
return device
......@@ -212,7 +231,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
:name: cameraCount
*/
public var cameraCount: Int {
return AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo).count
return AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo).count
}
/**
......@@ -226,14 +245,14 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
:name: caneraSupportsTapToFocus
*/
public var cameraSupportsTapToFocus: Bool {
return nil == activeCamera ? false : activeCamera!.focusPointOfInterestSupported
return nil == activeCamera ? false : activeCamera!.isFocusPointOfInterestSupported
}
/**
:name: cameraSupportsTapToExpose
*/
public var cameraSupportsTapToExpose: Bool {
return nil == activeCamera ? false : activeCamera!.exposurePointOfInterestSupported
return nil == activeCamera ? false : activeCamera!.isExposurePointOfInterestSupported
}
/**
......@@ -266,7 +285,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
}
set(value) {
var error: NSError?
if isFocusModeSupported(focusMode) {
if isFocusModeSupported(focusMode: focusMode) {
do {
let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration()
......@@ -283,7 +302,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
userInfo[NSUnderlyingErrorKey] = error
}
if let e: NSError = error {
delegate?.captureSessionFailedWithError?(self, error: e)
delegate?.captureSessionFailedWithError?(capture: self, error: e)
}
}
}
......@@ -297,7 +316,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
}
set(value) {
var error: NSError?
if isFlashModeSupported(flashMode) {
if isFlashModeSupported(flashMode: flashMode) {
do {
let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration()
......@@ -314,7 +333,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
userInfo[NSUnderlyingErrorKey] = error
}
if let e: NSError = error {
delegate?.captureSessionFailedWithError?(self, error: e)
delegate?.captureSessionFailedWithError?(capture: self, error: e)
}
}
}
......@@ -328,7 +347,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
}
set(value) {
var error: NSError?
if isTorchModeSupported(torchMode) {
if isTorchModeSupported(torchMode: torchMode) {
do {
let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration()
......@@ -345,7 +364,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
userInfo[NSUnderlyingErrorKey] = error
}
if let e: NSError = error {
delegate?.captureSessionFailedWithError?(self, error: e)
delegate?.captureSessionFailedWithError?(capture: self, error: e)
}
}
}
......@@ -353,22 +372,22 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
/// The session quality preset.
public var sessionPreset: CaptureSessionPreset {
didSet {
session.sessionPreset = CaptureSessionPresetToString(sessionPreset)
session.sessionPreset = CaptureSessionPresetToString(preset: sessionPreset)
}
}
/// The capture video orientation.
public var videoOrientation: AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation
switch UIDevice.currentDevice().orientation {
case .Portrait:
orientation = .Portrait
case .LandscapeRight:
orientation = .LandscapeLeft
case .PortraitUpsideDown:
orientation = .PortraitUpsideDown
switch UIDevice.current().orientation {
case .portrait:
orientation = .portrait
case .landscapeRight:
orientation = .landscapeLeft
case .portraitUpsideDown:
orientation = .portraitUpsideDown
default:
orientation = .LandscapeRight
orientation = .landscapeRight
}
return orientation
}
......@@ -386,7 +405,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
/// Starts the session.
public func startSession() {
if !isRunning {
dispatch_async(sessionQueue) { [weak self] in
sessionQueue.async() { [weak self] in
self?.session.startRunning()
}
}
......@@ -395,7 +414,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
/// Stops the session.
public func stopSession() {
if isRunning {
dispatch_async(sessionQueue) { [weak self] in
sessionQueue.async() { [weak self] in
self?.session.stopRunning()
}
}
......@@ -406,7 +425,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
if canSwitchCameras {
do {
if let v: AVCaptureDevicePosition = cameraPosition {
delegate?.captureSessionWillSwitchCameras?(self, position: v)
delegate?.captureSessionWillSwitchCameras?(capture: self, position: v)
let videoInput: AVCaptureDeviceInput? = try AVCaptureDeviceInput(device: inactiveCamera!)
session.beginConfiguration()
session.removeInput(activeVideoInput)
......@@ -418,10 +437,10 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
session.addInput(activeVideoInput)
}
session.commitConfiguration()
delegate?.captureSessionDidSwitchCameras?(self, position: cameraPosition!)
delegate?.captureSessionDidSwitchCameras?(capture: self, position: cameraPosition!)
}
} catch let e as NSError {
delegate?.captureSessionFailedWithError?(self, error: e)
delegate?.captureSessionFailedWithError?(capture: self, error: e)
}
}
}
......@@ -459,12 +478,12 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/
public func focusAtPoint(point: CGPoint) {
var error: NSError?
if cameraSupportsTapToFocus && isFocusModeSupported(.AutoFocus) {
if cameraSupportsTapToFocus && isFocusModeSupported(focusMode: .autoFocus) {
do {
let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration()
device.focusPointOfInterest = point
device.focusMode = .AutoFocus
device.focusMode = .autoFocus
device.unlockForConfiguration()
} catch let e as NSError {
error = e
......@@ -477,7 +496,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
userInfo[NSUnderlyingErrorKey] = error
}
if let e: NSError = error {
delegate?.captureSessionFailedWithError?(self, error: e)
delegate?.captureSessionFailedWithError?(capture: self, error: e)
}
}
......@@ -486,14 +505,14 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/
public func exposeAtPoint(point: CGPoint) {
var error: NSError?
if cameraSupportsTapToExpose && isExposureModeSupported(.ContinuousAutoExposure) {
if cameraSupportsTapToExpose && isExposureModeSupported(exposureMode: .continuousAutoExposure) {
do {
let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration()
device.exposurePointOfInterest = point
device.exposureMode = .ContinuousAutoExposure
if device.isExposureModeSupported(.Locked) {
device.addObserver(self, forKeyPath: "adjustingExposure", options: .New, context: &CaptureSessionAdjustingExposureContext)
device.exposureMode = .continuousAutoExposure
if device.isExposureModeSupported(.locked) {
device.addObserver(self, forKeyPath: "adjustingExposure", options: .new, context: &CaptureSessionAdjustingExposureContext)
}
device.unlockForConfiguration()
} catch let e as NSError {
......@@ -507,30 +526,30 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
userInfo[NSUnderlyingErrorKey] = error
}
if let e: NSError = error {
delegate?.captureSessionFailedWithError?(self, error: e)
delegate?.captureSessionFailedWithError?(capture: self, error: e)
}
}
/**
:name: observeValueForKeyPath
*/
public override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
public override func observeValue(forKeyPath keyPath: String?, of object: AnyObject?, change: [NSKeyValueChangeKey: AnyObject]?, context: UnsafeMutablePointer<Void>?) {
if context == &CaptureSessionAdjustingExposureContext {
let device: AVCaptureDevice = object as! AVCaptureDevice
if !device.adjustingExposure && device.isExposureModeSupported(.Locked) {
if !device.isAdjustingExposure && device.isExposureModeSupported(.locked) {
object!.removeObserver(self, forKeyPath: "adjustingExposure", context: &CaptureSessionAdjustingExposureContext)
dispatch_async(dispatch_get_main_queue()) {
DispatchQueue.main.async() {
do {
try device.lockForConfiguration()
device.exposureMode = .Locked
device.exposureMode = .locked
device.unlockForConfiguration()
} catch let e as NSError {
self.delegate?.captureSessionFailedWithError?(self, error: e)
self.delegate?.captureSessionFailedWithError?(capture: self, error: e)
}
}
}
} else {
super.observeValueForKeyPath(keyPath, ofObject: object, change: change, context: context)
super.observeValue(forKeyPath: keyPath, of : object, change: change, context: context)
}
}
......@@ -539,22 +558,22 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/
public func resetFocusAndExposureModes() {
let device: AVCaptureDevice = activeCamera!
let canResetFocus: Bool = device.focusPointOfInterestSupported && device.isFocusModeSupported(.ContinuousAutoFocus)
let canResetExposure: Bool = device.exposurePointOfInterestSupported && device.isExposureModeSupported(.ContinuousAutoExposure)
let centerPoint: CGPoint = CGPointMake(0.5, 0.5)
let canResetFocus: Bool = device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.continuousAutoFocus)
let canResetExposure: Bool = device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.continuousAutoExposure)
let centerPoint: CGPoint = CGPoint(x: 0.5, y: 0.5)
do {
try device.lockForConfiguration()
if canResetFocus {
device.focusMode = .ContinuousAutoFocus
device.focusMode = .continuousAutoFocus
device.focusPointOfInterest = centerPoint
}
if canResetExposure {
device.exposureMode = .ContinuousAutoExposure
device.exposureMode = .continuousAutoExposure
device.exposurePointOfInterest = centerPoint
}
device.unlockForConfiguration()
} catch let e as NSError {
delegate?.captureSessionFailedWithError?(self, error: e)
delegate?.captureSessionFailedWithError?(capture: self, error: e)
}
}
......@@ -562,18 +581,18 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
:name: captureStillImage
*/
public func captureStillImage() {
dispatch_async(sessionQueue) { [weak self] in
sessionQueue.async() { [weak self] in
if let s: CaptureSession = self {
if let v: AVCaptureConnection = s.imageOutput.connectionWithMediaType(AVMediaTypeVideo) {
if let v: AVCaptureConnection = s.imageOutput.connection(withMediaType: AVMediaTypeVideo) {
v.videoOrientation = s.videoOrientation
s.imageOutput.captureStillImageAsynchronouslyFromConnection(v) { [weak self] (sampleBuffer: CMSampleBuffer!, error: NSError!) -> Void in
s.imageOutput.captureStillImageAsynchronously(from: v) { [weak self] (sampleBuffer: CMSampleBuffer?, error: NSError?) -> Void in
if let s: CaptureSession = self {
var captureError: NSError? = error
if nil == captureError {
let data: NSData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
if let image1: UIImage = UIImage(data: data) {
if let image2: UIImage = s.adjustOrientationForImage(image1) {
s.delegate?.captureStillImageAsynchronously?(s, image: image2)
if let image1: UIImage = UIImage(data: data as Data) {
if let image2: UIImage = s.adjustOrientationForImage(image: image1) {
s.delegate?.captureStillImageAsynchronously?(capture: s, image: image2)
} else {
var userInfo: Dictionary<String, AnyObject> = Dictionary<String, AnyObject>()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot fix image orientation.]"
......@@ -591,7 +610,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
}
if let e: NSError = captureError {
s.delegate?.captureStillImageAsynchronouslyFailedWithError?(s, error: e)
s.delegate?.captureStillImageAsynchronouslyFailedWithError?(capture: s, error: e)
}
}
}
......@@ -605,26 +624,26 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/
public func startRecording() {
if !isRecording {
dispatch_async(sessionQueue) { [weak self] in
sessionQueue.async() { [weak self] in
if let s: CaptureSession = self {
if let v: AVCaptureConnection = s.movieOutput.connectionWithMediaType(AVMediaTypeVideo) {
if let v: AVCaptureConnection = s.movieOutput.connection(withMediaType: AVMediaTypeVideo) {
v.videoOrientation = s.videoOrientation
v.preferredVideoStabilizationMode = .Auto
v.preferredVideoStabilizationMode = .auto
}
if let v: AVCaptureDevice = s.activeCamera {
if v.smoothAutoFocusSupported {
if v.isSmoothAutoFocusSupported {
do {
try v.lockForConfiguration()
v.smoothAutoFocusEnabled = true
v.isSmoothAutoFocusEnabled = true
v.unlockForConfiguration()
} catch let e as NSError {
s.delegate?.captureSessionFailedWithError?(s, error: e)
s.delegate?.captureSessionFailedWithError?(capture: s, error: e)
}
}
s.movieOutputURL = s.uniqueURL()
if let v: NSURL = s.movieOutputURL {
s.movieOutput.startRecordingToOutputFileURL(v, recordingDelegate: s)
s.movieOutput.startRecording(toOutputFileURL: v as URL!, recordingDelegate: s)
}
}
}
......@@ -644,17 +663,22 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
/**
:name: captureOutput
*/
public func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
public func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [AnyObject]!) {
isRecording = true
delegate?.captureDidStartRecordingToOutputFileAtURL?(self, captureOutput: captureOutput, fileURL: fileURL, fromConnections: connections)
delegate?.captureDidStartRecordingToOutputFileAtURL?(capture: self, captureOutput: captureOutput, fileURL: fileURL, fromConnections: connections)
}
/**
:name: captureOutput
*/
public func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
public func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [AnyObject]!, error: NSError!) {
isRecording = false
delegate?.captureDidFinishRecordingToOutputFileAtURL?(self, captureOutput: captureOutput, outputFileURL: outputFileURL, fromConnections: connections, error: error)
delegate?.captureDidFinishRecordingToOutputFileAtURL?(capture: self, captureOutput: captureOutput, outputFileURL: outputFileURL, fromConnections: connections, error: error)
}
/// Prepares the sessionQueue.
private func prepareSessionQueue() {
sessionQueue = DispatchQueue(label: "io.cosmicmind.Material.CaptureSession", attributes: .serial, target: nil)
}
/**
......@@ -672,12 +696,12 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/
private func prepareVideoInput() {
do {
activeVideoInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo))
activeVideoInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo))
if session.canAddInput(activeVideoInput) {
session.addInput(activeVideoInput)
}
} catch let e as NSError {
delegate?.captureSessionFailedWithError?(self, error: e)
delegate?.captureSessionFailedWithError?(capture: self, error: e)
}
}
......@@ -686,12 +710,12 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/
private func prepareAudioInput() {
do {
activeAudioInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio))
activeAudioInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio))
if session.canAddInput(activeAudioInput) {
session.addInput(activeAudioInput)
}
} catch let e as NSError {
delegate?.captureSessionFailedWithError?(self, error: e)
delegate?.captureSessionFailedWithError?(capture: self, error: e)
}
}
......@@ -718,7 +742,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
:name: cameraWithPosition
*/
private func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? {
let devices: Array<AVCaptureDevice> = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! Array<AVCaptureDevice>
let devices: Array<AVCaptureDevice> = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! Array<AVCaptureDevice>
for device in devices {
if device.position == position {
return device
......@@ -732,13 +756,13 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/
private func uniqueURL() -> NSURL? {
do {
let directory: NSURL = try NSFileManager.defaultManager().URLForDirectory(.DocumentDirectory, inDomain: .UserDomainMask, appropriateForURL: nil, create: true)
let dateFormatter = NSDateFormatter()
dateFormatter.dateStyle = .FullStyle
dateFormatter.timeStyle = .FullStyle
return directory.URLByAppendingPathComponent(dateFormatter.stringFromDate(NSDate()) + ".mov")
let directory: NSURL = try FileManager.default().urlForDirectory(.documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true)
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .fullStyle
dateFormatter.timeStyle = .fullStyle
return directory.appendingPathComponent(dateFormatter.string(from: NSDate() as Date) + ".mov")
} catch let e as NSError {
delegate?.captureCreateMovieFileFailedWithError?(self, error: e)
delegate?.captureCreateMovieFileFailedWithError?(capture: self, error: e)
}
return nil
}
......@@ -751,55 +775,55 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
- Returns: An optional UIImage if successful.
*/
private func adjustOrientationForImage(image: UIImage) -> UIImage? {
guard .Up != image.imageOrientation else {
guard .up != image.imageOrientation else {
return image
}
var transform: CGAffineTransform = CGAffineTransformIdentity
var transform: CGAffineTransform = .identity
// Rotate if Left, Right, or Down.
switch image.imageOrientation {
case .Down, .DownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, image.size.height)
transform = CGAffineTransformRotate(transform, CGFloat(M_PI))
case .Left, .LeftMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0)
transform = CGAffineTransformRotate(transform, CGFloat(M_PI_2))
case .Right, .RightMirrored:
transform = CGAffineTransformTranslate(transform, 0, image.size.height)
transform = CGAffineTransformRotate(transform, -CGFloat(M_PI_2))
case .down, .downMirrored:
transform = transform.translateBy(x: image.size.width, y: image.size.height)
transform = transform.rotate(CGFloat(M_PI))
case .left, .leftMirrored:
transform = transform.translateBy(x: image.size.width, y: 0)
transform = transform.rotate(CGFloat(M_PI_2))
case .right, .rightMirrored:
transform = transform.translateBy(x: 0, y: image.size.height)
transform = transform.rotate(-CGFloat(M_PI_2))
default:break
}
// Flip if mirrored.
switch image.imageOrientation {
case .UpMirrored, .DownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0)
transform = CGAffineTransformScale(transform, -1, 1)
case .LeftMirrored, .RightMirrored:
transform = CGAffineTransformTranslate(transform, image.size.height, 0)
transform = CGAffineTransformScale(transform, -1, 1)
case .upMirrored, .downMirrored:
transform = transform.translateBy(x: image.size.width, y: 0)
transform = transform.scaleBy(x: -1, y: 1)
case .leftMirrored, .rightMirrored:
transform = transform.translateBy(x: image.size.height, y: 0)
transform = transform.scaleBy(x: -1, y: 1)
default:break
}
// Draw the underlying CGImage with the calculated transform.
guard let context = CGBitmapContextCreate(nil, Int(image.size.width), Int(image.size.height), CGImageGetBitsPerComponent(image.CGImage), 0, CGImageGetColorSpace(image.CGImage), CGImageGetBitmapInfo(image.CGImage).rawValue) else {
guard let context = CGContext(data: nil, width: Int(image.size.width), height: Int(image.size.height), bitsPerComponent: image.cgImage!.bitsPerComponent, bytesPerRow: 0, space: image.cgImage!.colorSpace!, bitmapInfo: image.cgImage!.bitmapInfo.rawValue) else {
return nil
}
CGContextConcatCTM(context, transform)
context.concatCTM(transform)
switch image.imageOrientation {
case .Left, .LeftMirrored, .Right, .RightMirrored:
CGContextDrawImage(context, CGRect(x: 0, y: 0, width: image.size.height, height: image.size.width), image.CGImage)
case .left, .leftMirrored, .right, .rightMirrored:
context.draw(in: CGRect(x: 0, y: 0, width: image.size.height, height: image.size.width), image: image.cgImage!)
default:
CGContextDrawImage(context, CGRect(origin: .zero, size: image.size), image.CGImage)
context.draw(in: CGRect(origin: .zero, size: image.size), image: image.cgImage!)
}
guard let CGImage = CGBitmapContextCreateImage(context) else {
guard let cgImage = context.makeImage() else {
return nil
}
return UIImage(CGImage: CGImage)
return UIImage(cgImage: cgImage)
}
}
......@@ -32,268 +32,259 @@ import UIKit
import AVFoundation
public enum CaptureMode {
case Photo
case Video
case photo
case video
}
@objc(CaptureViewDelegate)
public protocol CaptureViewDelegate : MaterialDelegate {
public protocol CaptureViewDelegate: MaterialDelegate {
/**
:name: captureViewDidStartRecordTimer
A delegation method that is fired when the record timer has started.
- Parameter captureView: A reference to the calling captureView.
*/
@objc
optional func captureViewDidStartRecordTimer(captureView: CaptureView)
/**
:name: captureViewDidUpdateRecordTimer
A delegation method that is fired when the record timer was updated.
- Parameter captureView: A reference to the calling captureView.
- Parameter hours: An integer representing hours.
- Parameter minutes: An integer representing minutes.
- Parameter seconds: An integer representing seconds.
*/
@objc
optional func captureViewDidUpdateRecordTimer(captureView: CaptureView, hours: Int, minutes: Int, seconds: Int)
/**
:name: captureViewDidStopRecordTimer
A delegation method that is fired when the record timer has stopped.
- Parameter captureView: A reference to the calling captureView.
- Parameter hours: An integer representing hours.
- Parameter minutes: An integer representing minutes.
- Parameter seconds: An integer representing seconds.
*/
@objc
optional func captureViewDidStopRecordTimer(captureView: CaptureView, hours: Int, minutes: Int, seconds: Int)
/**
:name: captureViewDidTapToFocusAtPoint
A delegation method that is fired when the user tapped to adjust the focus.
- Parameter captureView: A reference to the calling captureView.
- Parameter point: CGPoint that the user tapped at.
*/
@objc
optional func captureViewDidTapToFocusAtPoint(captureView: CaptureView, point: CGPoint)
/**
:name: captureViewDidTapToExposeAtPoint
A delegation method that is fired when the user tapped to adjust the exposure.
- Parameter captureView: A reference to the calling captureView.
- Parameter point: CGPoint that the user tapped at.
*/
@objc
optional func captureViewDidTapToExposeAtPoint(captureView: CaptureView, point: CGPoint)
/**
:name: captureViewDidTapToResetAtPoint
A delegation method that is fired when the user tapped to reset.
- Parameter captureView: A reference to the calling captureView.
- Parameter point: CGPoint that the user tapped at.
*/
@objc
optional func captureViewDidTapToResetAtPoint(captureView: CaptureView, point: CGPoint)
/**
:name: captureViewDidPressFlashButton
A delegation method that is fired when the user pressed the flash button.
- Parameter captureView: A reference to the calling captureView.
- Parameter button: A reference to the UIButton that the user pressed.
*/
@objc
optional func captureViewDidPressFlashButton(captureView: CaptureView, button: UIButton)
/**
:name: captureViewDidPressSwitchCamerasButton
A delegation method that is fired when the user pressed the switch camera button.
- Parameter captureView: A reference to the calling captureView.
- Parameter button: A reference to the UIButton that the user pressed.
*/
@objc
optional func captureViewDidPressSwitchCamerasButton(captureView: CaptureView, button: UIButton)
/**
:name: captureViewDidPressCaptureButton
A delegation method that is fired when the user pressed capture button.
- Parameter captureView: A reference to the calling captureView.
- Parameter button: A reference to the UIButton that the user pressed.
*/
@objc
optional func captureViewDidPressCaptureButton(captureView: CaptureView, button: UIButton)
/**
:name: captureViewDidPressCameraButton
A delegation method that is fired when the user enabled the photo camera.
- Parameter captureView: A reference to the calling captureView.
- Parameter button: A reference to the UIButton that the user pressed.
*/
@objc
optional func captureViewDidPressCameraButton(captureView: CaptureView, button: UIButton)
/**
:name: captureViewDidPressVideoButton
A delegation method that is fired when the user enabled the video camera.
- Parameter captureView: A reference to the calling captureView.
- Parameter button: A reference to the UIButton that the user pressed.
*/
@objc
optional func captureViewDidPressVideoButton(captureView: CaptureView, button: UIButton)
}
public class CaptureView : MaterialView, UIGestureRecognizerDelegate {
/**
:name: timer
*/
private var timer: NSTimer?
/// A Timer reference for when recording is enabled.
private var timer: Timer?
/**
:name: tapToFocusGesture
*/
/// A tap gesture reference for focus events.
private var tapToFocusGesture: UITapGestureRecognizer?
/**
:name: tapToExposeGesture
*/
/// A tap gesture reference for exposure events.
private var tapToExposeGesture: UITapGestureRecognizer?
/**
:name: tapToResetGesture
*/
/// A tap gesture reference for reset events.
private var tapToResetGesture: UITapGestureRecognizer?
/**
:name: captureMode
*/
public lazy var captureMode: CaptureMode = .Video
/// A reference to the capture mode.
public lazy var captureMode: CaptureMode = .video
/**
:name: tapToFocusEnabled
*/
@IBInspectable public var tapToFocusEnabled: Bool = false {
/// A boolean indicating whether to enable tap to focus.
@IBInspectable public var enableTapToFocus: Bool = false {
didSet {
if tapToFocusEnabled {
tapToResetEnabled = true
if enableTapToFocus {
enableTapToReset = true
prepareFocusLayer()
prepareTapGesture(&tapToFocusGesture, numberOfTapsRequired: 1, numberOfTouchesRequired: 1, selector: #selector(handleTapToFocusGesture))
prepareTapGesture(gesture: &tapToFocusGesture, numberOfTapsRequired: 1, numberOfTouchesRequired: 1, selector: #selector(handleTapToFocusGesture))
if let v: UITapGestureRecognizer = tapToExposeGesture {
tapToFocusGesture!.requireGestureRecognizerToFail(v)
tapToFocusGesture!.require(toFail: v)
}
} else {
removeTapGesture(&tapToFocusGesture)
removeTapGesture(gesture: &tapToFocusGesture)
focusLayer?.removeFromSuperlayer()
focusLayer = nil
}
}
}
/**
:name: tapToExposeEnabled
*/
@IBInspectable public var tapToExposeEnabled: Bool = false {
/// A boolean indicating whether to enable tap to expose.
@IBInspectable public var enableTapToExpose: Bool = false {
didSet {
if tapToExposeEnabled {
tapToResetEnabled = true
if enableTapToExpose {
enableTapToReset = true
prepareExposureLayer()
prepareTapGesture(&tapToExposeGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 1, selector: #selector(handleTapToExposeGesture))
prepareTapGesture(gesture: &tapToExposeGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 1, selector: #selector(handleTapToExposeGesture))
if let v: UITapGestureRecognizer = tapToFocusGesture {
v.requireGestureRecognizerToFail(tapToExposeGesture!)
v.require(toFail: tapToExposeGesture!)
}
} else {
removeTapGesture(&tapToExposeGesture)
removeTapGesture(gesture: &tapToExposeGesture)
exposureLayer?.removeFromSuperlayer()
exposureLayer = nil
}
}
}
/**
:name: tapToResetEnabled
*/
@IBInspectable public var tapToResetEnabled: Bool = false {
/// A boolean indicating whether to enable tap to reset.
@IBInspectable public var enableTapToReset: Bool = false {
didSet {
if tapToResetEnabled {
if enableTapToReset {
prepareResetLayer()
prepareTapGesture(&tapToResetGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 2, selector: #selector(handleTapToResetGesture))
prepareTapGesture(gesture: &tapToResetGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 2, selector: #selector(handleTapToResetGesture))
if let v: UITapGestureRecognizer = tapToFocusGesture {
v.requireGestureRecognizerToFail(tapToResetGesture!)
v.require(toFail: tapToResetGesture!)
}
if let v: UITapGestureRecognizer = tapToExposeGesture {
v.requireGestureRecognizerToFail(tapToResetGesture!)
v.require(toFail: tapToResetGesture!)
}
} else {
removeTapGesture(&tapToResetGesture)
removeTapGesture(gesture: &tapToResetGesture)
resetLayer?.removeFromSuperlayer()
resetLayer = nil
}
}
}
/**
:name: contentInsets
*/
/// MaterialEdgeInset preset value for content.
public var contentInsetPreset: MaterialEdgeInset = .None {
didSet {
contentInset = MaterialEdgeInsetToValue(contentInsetPreset)
contentInset = MaterialEdgeInsetToValue(inset: contentInsetPreset)
}
}
/**
:name: contentInset
*/
public var contentInset: UIEdgeInsets = MaterialEdgeInsetToValue(.Square4) {
/// Content insert value.
public var contentInset: UIEdgeInsets = MaterialEdgeInsetToValue(inset: .Square4) {
didSet {
reloadView()
}
}
/**
:name: previewView
*/
public private(set) lazy var previewView: CapturePreview = CapturePreview()
/// A reference to the CapturePreview view.
public private(set) var previewView: CapturePreview!
/**
:name: capture
*/
public private(set) lazy var captureSession: CaptureSession = CaptureSession()
/// A reference to the CaptureSession.
public private(set) var captureSession: CaptureSession!
/**
:name: focusLayer
*/
/// A reference to the focus layer used in focus animations.
public private(set) var focusLayer: MaterialLayer?
/**
:name: exposureLayer
*/
/// A reference to the exposure layer used in exposure animations.
public private(set) var exposureLayer: MaterialLayer?
/**
:name: resetLayer
*/
/// A reference to the reset layer used in reset animations.
public private(set) var resetLayer: MaterialLayer?
/**
:name: cameraButton
*/
/// A reference to the cameraButton.
public var cameraButton: UIButton? {
didSet {
if let v: UIButton = cameraButton {
v.addTarget(self, action: #selector(handleCameraButton), forControlEvents: .TouchUpInside)
v.addTarget(self, action: #selector(handleCameraButton), for: .touchUpInside)
}
reloadView()
}
}
/**
:name: captureButton
*/
/// A reference to the captureButton.
public var captureButton: UIButton? {
didSet {
if let v: UIButton = captureButton {
v.addTarget(self, action: #selector(handleCaptureButton), forControlEvents: .TouchUpInside)
v.addTarget(self, action: #selector(handleCaptureButton), for: .touchUpInside)
}
reloadView()
}
}
/**
:name: videoButton
*/
/// A reference to the videoButton.
public var videoButton: UIButton? {
didSet {
if let v: UIButton = videoButton {
v.addTarget(self, action: #selector(handleVideoButton), forControlEvents: .TouchUpInside)
v.addTarget(self, action: #selector(handleVideoButton), for: .touchUpInside)
}
reloadView()
}
}
/**
:name: switchCamerasButton
*/
/// A reference to the switchCameraButton.
public var switchCamerasButton: UIButton? {
didSet {
if let v: UIButton = switchCamerasButton {
v.addTarget(self, action: #selector(handleSwitchCamerasButton), forControlEvents: .TouchUpInside)
v.addTarget(self, action: #selector(handleSwitchCamerasButton), for: .touchUpInside)
}
}
}
/**
:name: flashButton
*/
/// A reference to the flashButton.
public var flashButton: UIButton? {
didSet {
if let v: UIButton = flashButton {
v.addTarget(self, action: #selector(handleFlashButton), forControlEvents: .TouchUpInside)
v.addTarget(self, action: #selector(handleFlashButton), for: .touchUpInside)
}
}
}
/**
:name: init
*/
/// A convenience initializer.
public convenience init() {
self.init(frame: CGRect.zero)
}
/**
:name: layoutSubviews
*/
public override func layoutSubviews() {
super.layoutSubviews()
previewView.frame = bounds
......@@ -316,17 +307,20 @@ public class CaptureView : MaterialView, UIGestureRecognizerDelegate {
}
/**
:name: prepareView
Prepares the view instance when intialized. When subclassing,
it is recommended to override the prepareView method
to initialize property values and other setup operations.
The super.prepareView method should always be called immediately
when subclassing.
*/
public override func prepareView() {
super.prepareView()
backgroundColor = Color.black
prepareCaptureSession()
preparePreviewView()
}
/**
:name: reloadView
*/
/// Reloads the view.
public func reloadView() {
// clear constraints so new ones do not conflict
removeConstraints(constraints)
......@@ -334,79 +328,76 @@ public class CaptureView : MaterialView, UIGestureRecognizerDelegate {
v.removeFromSuperview()
}
insertSubview(previewView, atIndex: 0)
insertSubview(previewView, at: 0)
if let v: UIButton = captureButton {
insertSubview(v, atIndex: 1)
insertSubview(v, at: 1)
}
if let v: UIButton = cameraButton {
insertSubview(v, atIndex: 2)
insertSubview(v, at: 2)
}
if let v: UIButton = videoButton {
insertSubview(v, atIndex: 3)
insertSubview(v, at: 3)
}
}
/**
:name: startTimer
*/
/// Starts the timer for recording.
internal func startTimer() {
timer?.invalidate()
timer = NSTimer(timeInterval: 0.5, target: self, selector: #selector(updateTimer), userInfo: nil, repeats: true)
NSRunLoop.mainRunLoop().addTimer(timer!, forMode: NSRunLoopCommonModes)
(delegate as? CaptureViewDelegate)?.captureViewDidStartRecordTimer?(self)
timer = Timer(timeInterval: 0.5, target: self, selector: #selector(updateTimer), userInfo: nil, repeats: true)
RunLoop.main().add(timer!, forMode: .commonModes)
(delegate as? CaptureViewDelegate)?.captureViewDidStartRecordTimer?(captureView: self)
}
/**
:name: updateTimer
*/
/// Updates the timer when recording.
internal func updateTimer() {
let duration: CMTime = captureSession.recordedDuration
let time: Double = CMTimeGetSeconds(duration)
let hours: Int = Int(time / 3600)
let minutes: Int = Int((time / 60) % 60)
let seconds: Int = Int(time % 60)
(delegate as? CaptureViewDelegate)?.captureViewDidUpdateRecordTimer?(self, hours: hours, minutes: minutes, seconds: seconds)
let minutes: Int = Int((time / 60).truncatingRemainder(dividingBy: 60))
let seconds: Int = Int(time.truncatingRemainder(dividingBy: 60))
(delegate as? CaptureViewDelegate)?.captureViewDidUpdateRecordTimer?(captureView: self, hours: hours, minutes: minutes, seconds: seconds)
}
/**
:name: stopTimer
*/
/// Stops the timer when recording.
internal func stopTimer() {
let duration: CMTime = captureSession.recordedDuration
let time: Double = CMTimeGetSeconds(duration)
let hours: Int = Int(time / 3600)
let minutes: Int = Int((time / 60) % 60)
let seconds: Int = Int(time % 60)
let minutes: Int = Int((time / 60).truncatingRemainder(dividingBy: 60))
let seconds: Int = Int(time.truncatingRemainder(dividingBy: 60))
timer?.invalidate()
timer = nil
(delegate as? CaptureViewDelegate)?.captureViewDidStopRecordTimer?(self, hours: hours, minutes: minutes, seconds: seconds)
(delegate as? CaptureViewDelegate)?.captureViewDidStopRecordTimer?(captureView: self, hours: hours, minutes: minutes, seconds: seconds)
}
/**
:name: handleFlashButton
Handler for the flashButton.
- Parameter button: A UIButton that is associated with the event.
*/
internal func handleFlashButton(button: UIButton) {
(delegate as? CaptureViewDelegate)?.captureViewDidPressFlashButton?(self, button: button)
(delegate as? CaptureViewDelegate)?.captureViewDidPressFlashButton?(captureView: self, button: button)
}
/**
:name: handleSwitchCamerasButton
Handler for the switchCameraButton.
- Parameter button: A UIButton that is associated with the event.
*/
internal func handleSwitchCamerasButton(button: UIButton) {
captureSession.switchCameras()
(delegate as? CaptureViewDelegate)?.captureViewDidPressSwitchCamerasButton?(self, button: button)
(delegate as? CaptureViewDelegate)?.captureViewDidPressSwitchCamerasButton?(captureView: self, button: button)
}
/**
:name: handleCaptureButton
Handler for the captureButton.
- Parameter button: A UIButton that is associated with the event.
*/
internal func handleCaptureButton(button: UIButton) {
if .Photo == captureMode {
if .photo == captureMode {
captureSession.captureStillImage()
} else if .Video == captureMode {
} else if .video == captureMode {
if captureSession.isRecording {
captureSession.stopRecording()
stopTimer()
......@@ -415,69 +406,80 @@ public class CaptureView : MaterialView, UIGestureRecognizerDelegate {
startTimer()
}
}
(delegate as? CaptureViewDelegate)?.captureViewDidPressCaptureButton?(self, button: button)
(delegate as? CaptureViewDelegate)?.captureViewDidPressCaptureButton?(captureView: self, button: button)
}
/**
:name: handleCameraButton
Handler for the cameraButton.
- Parameter button: A UIButton that is associated with the event.
*/
internal func handleCameraButton(button: UIButton) {
captureMode = .Photo
(delegate as? CaptureViewDelegate)?.captureViewDidPressCameraButton?(self, button: button)
captureMode = .photo
(delegate as? CaptureViewDelegate)?.captureViewDidPressCameraButton?(captureView: self, button: button)
}
/**
:name: handleVideoButton
Handler for the videoButton.
- Parameter button: A UIButton that is associated with the event.
*/
internal func handleVideoButton(button: UIButton) {
captureMode = .Video
(delegate as? CaptureViewDelegate)?.captureViewDidPressVideoButton?(self, button: button)
captureMode = .video
(delegate as? CaptureViewDelegate)?.captureViewDidPressVideoButton?(captureView: self, button: button)
}
/**
:name: handleTapToFocusGesture
Handler for the tapToFocusGesture.
- Parameter recognizer: A UITapGestureRecognizer that is associated with the event.
*/
@objc(handleTapToFocusGesture:)
@objc
internal func handleTapToFocusGesture(recognizer: UITapGestureRecognizer) {
if tapToFocusEnabled && captureSession.cameraSupportsTapToFocus {
let point: CGPoint = recognizer.locationInView(self)
captureSession.focusAtPoint(previewView.captureDevicePointOfInterestForPoint(point))
if enableTapToFocus && captureSession.cameraSupportsTapToFocus {
let point: CGPoint = recognizer.location(in: self)
captureSession.focusAtPoint(point: previewView.captureDevicePointOfInterestForPoint(point: point))
animateTapLayer(layer: focusLayer!, point: point)
(delegate as? CaptureViewDelegate)?.captureViewDidTapToFocusAtPoint?(self, point: point)
(delegate as? CaptureViewDelegate)?.captureViewDidTapToFocusAtPoint?(captureView: self, point: point)
}
}
/**
:name: handleTapToExposeGesture
Handler for the tapToExposeGesture.
- Parameter recognizer: A UITapGestureRecognizer that is associated with the event.
*/
@objc(handleTapToExposeGesture:)
@objc
internal func handleTapToExposeGesture(recognizer: UITapGestureRecognizer) {
if tapToExposeEnabled && captureSession.cameraSupportsTapToExpose {
let point: CGPoint = recognizer.locationInView(self)
captureSession.exposeAtPoint(previewView.captureDevicePointOfInterestForPoint(point))
if enableTapToExpose && captureSession.cameraSupportsTapToExpose {
let point: CGPoint = recognizer.location(in: self)
captureSession.exposeAtPoint(point: previewView.captureDevicePointOfInterestForPoint(point: point))
animateTapLayer(layer: exposureLayer!, point: point)
(delegate as? CaptureViewDelegate)?.captureViewDidTapToExposeAtPoint?(self, point: point)
(delegate as? CaptureViewDelegate)?.captureViewDidTapToExposeAtPoint?(captureView: self, point: point)
}
}
/**
:name: handleTapToResetGesture
Handler for the tapToResetGesture.
- Parameter recognizer: A UITapGestureRecognizer that is associated with the event.
*/
@objc(handleTapToResetGesture:)
@objc
internal func handleTapToResetGesture(recognizer: UITapGestureRecognizer) {
if tapToResetEnabled {
if enableTapToReset {
captureSession.resetFocusAndExposureModes()
let point: CGPoint = previewView.pointForCaptureDevicePointOfInterest(CGPointMake(0.5, 0.5))
let point: CGPoint = previewView.pointForCaptureDevicePointOfInterest(point: CGPoint(x: 0.5, y: 0.5))
animateTapLayer(layer: resetLayer!, point: point)
(delegate as? CaptureViewDelegate)?.captureViewDidTapToResetAtPoint?(self, point: point)
(delegate as? CaptureViewDelegate)?.captureViewDidTapToResetAtPoint?(captureView: self, point: point)
}
}
/**
:name: prepareTapGesture
Prepares a given tap gesture.
- Parameter gesture: An optional UITapGestureRecognizer to prepare.
- Parameter numberOfTapsRequired: An integer of the number of taps required
to activate the gesture.
- Parameter numberOfTouchesRequired: An integer of the number of touches, fingers,
required to activate the gesture.
- Parameter selector: A Selector to handle the event.
*/
private func prepareTapGesture(inout gesture: UITapGestureRecognizer?, numberOfTapsRequired: Int, numberOfTouchesRequired: Int, selector: Selector) {
removeTapGesture(&gesture)
private func prepareTapGesture(gesture: inout UITapGestureRecognizer?, numberOfTapsRequired: Int, numberOfTouchesRequired: Int, selector: Selector) {
removeTapGesture(gesture: &gesture)
gesture = UITapGestureRecognizer(target: self, action: selector)
gesture!.delegate = self
gesture!.numberOfTapsRequired = numberOfTapsRequired
......@@ -486,77 +488,74 @@ public class CaptureView : MaterialView, UIGestureRecognizerDelegate {
}
/**
:name: removeTapToFocusGesture
Removes a given tap gesture.
- Parameter gesture: An optional UITapGestureRecognizer to remove.
*/
private func removeTapGesture(inout gesture: UITapGestureRecognizer?) {
private func removeTapGesture(gesture: inout UITapGestureRecognizer?) {
if let v: UIGestureRecognizer = gesture {
removeGestureRecognizer(v)
gesture = nil
}
}
/**
:name: preparePreviewView
*/
/// Prepare the captureSession.
private func prepareCaptureSession() {
captureSession = CaptureSession()
}
/// Prepares the previewView.
private func preparePreviewView() {
previewView = CapturePreview()
(previewView.layer as! AVCaptureVideoPreviewLayer).session = captureSession.session
captureSession.startSession()
}
/**
:name: prepareFocusLayer
*/
/// Prepares the focusLayer.
private func prepareFocusLayer() {
if nil == focusLayer {
focusLayer = MaterialLayer(frame: CGRectMake(0, 0, 150, 150))
focusLayer!.hidden = true
focusLayer = MaterialLayer(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
focusLayer!.isHidden = true
focusLayer!.borderWidth = 2
focusLayer!.borderColor = Color.white.CGColor
focusLayer!.borderColor = Color.white.cgColor
previewView.layer.addSublayer(focusLayer!)
}
}
/**
:name: prepareExposureLayer
*/
/// Prepares the exposureLayer.
private func prepareExposureLayer() {
if nil == exposureLayer {
exposureLayer = MaterialLayer(frame: CGRectMake(0, 0, 150, 150))
exposureLayer!.hidden = true
exposureLayer = MaterialLayer(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
exposureLayer!.isHidden = true
exposureLayer!.borderWidth = 2
exposureLayer!.borderColor = Color.yellow.darken1.CGColor
exposureLayer!.borderColor = Color.yellow.darken1.cgColor
previewView.layer.addSublayer(exposureLayer!)
}
}
/**
:name: prepareResetLayer
*/
/// Prepares the resetLayer.
private func prepareResetLayer() {
if nil == resetLayer {
resetLayer = MaterialLayer(frame: CGRectMake(0, 0, 150, 150))
resetLayer!.hidden = true
resetLayer = MaterialLayer(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
resetLayer!.isHidden = true
resetLayer!.borderWidth = 2
resetLayer!.borderColor = Color.red.accent1.CGColor
resetLayer!.borderColor = Color.red.accent1.cgColor
previewView.layer.addSublayer(resetLayer!)
}
}
/**
:name: animateTapLayer
*/
/// Animates the tap and layer.
private func animateTapLayer(layer v: MaterialLayer, point: CGPoint) {
MaterialAnimation.animationDisabled {
v.transform = CATransform3DIdentity
v.position = point
v.hidden = false
v.isHidden = false
}
MaterialAnimation.animateWithDuration(0.25, animations: {
MaterialAnimation.animateWithDuration(duration: 0.25, animations: {
v.transform = CATransform3DMakeScale(0.5, 0.5, 1)
}) {
MaterialAnimation.delay(0.4) {
MaterialAnimation.animationDisabled {
v.hidden = true
v.isHidden = true
}
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment