Commit 8b825830 by Daniel Dahan

autolayout adjustments made for convenience

parent d01fc27f
////
//// Copyright (C) 2015 GraphKit, Inc. <http://graphkit.io> and other GraphKit contributors.
////
//// This program is free software: you can redistribute it and/or modify
//// it under the terms of the GNU Affero General Public License as published
//// by the Free Software Foundation, either version 3 of the License, or
//// (at your option) any later version.
////
//// This program is distributed in the hope that it will be useful,
//// but WITHOUT ANY WARRANTY; without even the implied warranty of
//// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//// GNU Affero General Public License for more details.
////
//// You should have received a copy of the GNU Affero General Public License
//// along with this program located at the root of the software package
//// in a file called LICENSE. If not, see <http://www.gnu.org/licenses/>.
////
// //
// Copyright (C) 2015 GraphKit, Inc. <http://graphkit.io> and other GraphKit contributors. //import UIKit
// //import AVFoundation
// This program is free software: you can redistribute it and/or modify //import AssetsLibrary
// it under the terms of the GNU Affero General Public License as published //
// by the Free Software Foundation, either version 3 of the License, or //@objc(CaptureDelegate)
// (at your option) any later version. //public protocol CaptureDelegate {
// // optional func captureDeviceConfigurationFailed(capture: Capture, error: NSError!)
// This program is distributed in the hope that it will be useful, // optional func captureMediaCaptureFailed(capture: Capture, error: NSError!)
// but WITHOUT ANY WARRANTY; without even the implied warranty of // optional func captureAsetLibraryWriteFailed(capture: Capture, error: NSError!)
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // optional func capture(capture: Capture, assetLibraryDidWrite image: UIImage!)
// GNU Affero General Public License for more details. //}
// //
// You should have received a copy of the GNU Affero General Public License //public class Capture: NSObject, AVCaptureFileOutputRecordingDelegate {
// along with this program located at the root of the software package // //
// in a file called LICENSE. If not, see <http://www.gnu.org/licenses/>. // // :name: activeVideoInput
// // // :description: The video input that is currently active.
// //
import UIKit // private var activeVideoInput: AVCaptureDeviceInput?
import AVFoundation //
import AssetsLibrary // //
// // :name: imageOutput
@objc(CaptureDelegate) // // :description: When the session is taking a photo, this is the output manager.
public protocol CaptureDelegate { // //
optional func captureDeviceConfigurationFailed(capture: Capture, error: NSError!) // private lazy var imageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput()
optional func captureMediaCaptureFailed(capture: Capture, error: NSError!) //
optional func captureAsetLibraryWriteFailed(capture: Capture, error: NSError!) // //
optional func capture(capture: Capture, assetLibraryDidWrite image: UIImage!) // // :name: movieOutput
} // // :description: When the session is shooting a video, this is the output manager.
// //
public class Capture: NSObject, AVCaptureFileOutputRecordingDelegate { // private lazy var movieOutput: AVCaptureMovieFileOutput = AVCaptureMovieFileOutput()
// //
// :name: activeVideoInput // //
// :description: The video input that is currently active. // // :name: movieOutputURL
// // // :description: The output URL of the movie file.
private var activeVideoInput: AVCaptureDeviceInput? // //
// private var movieOutputURL: NSURL?
// //
// :name: imageOutput // //
// :description: When the session is taking a photo, this is the output manager. // // :name: queue
// // // :description: Async job queue.
private lazy var imageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput() // //
// private lazy var queue: dispatch_queue_t = {
// // return dispatch_queue_create("io.graphkit.Capture", nil)
// :name: movieOutput // }()
// :description: When the session is shooting a video, this is the output manager. //
// // //
private lazy var movieOutput: AVCaptureMovieFileOutput = AVCaptureMovieFileOutput() // // :name: CaptureAdjustingExposureContext
// // :description: Used for KVO observation context.
// // //
// :name: movieOutputURL // public var CaptureAdjustingExposureContext: NSString?
// :description: The output URL of the movie file. //
// // /**
private var movieOutputURL: NSURL? // * cameraCount
// * The number of available cameras on the device.
// // */
// :name: queue // public var cameraCount: Int {
// :description: Async job queue. // return AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo).count
// // }
private lazy var queue: dispatch_queue_t = { //
return dispatch_queue_create("io.graphkit.Capture", nil) // /**
}() // * session
// * An AVCaptureSession that manages all inputs and outputs in that session.
// // */
// :name: CaptureAdjustingExposureContext // public lazy var session: AVCaptureSession = AVCaptureSession()
// :description: Used for KVO observation context. //
// // /**
public var CaptureAdjustingExposureContext: NSString? // * delegate
// * An optional instance of CaptureDelegate to handle events that are triggered during various
/** // * stages in the session.
* cameraCount // */
* The number of available cameras on the device. // public weak var delegate: CaptureDelegate?
*/ //
public var cameraCount: Int { // /**
return AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo).count // * prepareSession
} // * A helper method that prepares the session with the various available inputs and outputs.
// * @param preset: String, default: AVCaptureSessionPresetHigh
/** // * @return A boolean value, true if successful, false otherwise.
* session // */
* An AVCaptureSession that manages all inputs and outputs in that session. // public func prepareSession(preset: String = AVCaptureSessionPresetHigh) -> Bool {
*/ // session.sessionPreset = preset
public lazy var session: AVCaptureSession = AVCaptureSession() //
// // setup default camera device
/** // let videoDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
* delegate // let videoInput: AVCaptureDeviceInput? = try? AVCaptureDeviceInput(device: videoDevice)
* An optional instance of CaptureDelegate to handle events that are triggered during various //
* stages in the session. // if nil == videoInput {
*/ // return false
public weak var delegate: CaptureDelegate? // }
//
/** // if session.canAddInput(videoInput) {
* prepareSession // session.addInput(videoInput)
* A helper method that prepares the session with the various available inputs and outputs. // activeVideoInput = videoInput
* @param preset: String, default: AVCaptureSessionPresetHigh // }
* @return A boolean value, true if successful, false otherwise. //
*/ // let audioDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
public func prepareSession(preset: String = AVCaptureSessionPresetHigh) -> Bool { // let audioInput: AVCaptureDeviceInput? = try? AVCaptureDeviceInput(device: audioDevice)
session.sessionPreset = preset //
// if nil == audioInput {
// setup default camera device // return false
let videoDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) // }
let videoInput: AVCaptureDeviceInput? = try? AVCaptureDeviceInput(device: videoDevice) //
// if session.canAddInput(audioInput) {
if nil == videoInput { // session.addInput(audioInput)
return false // }
} //
// imageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if session.canAddInput(videoInput) { //
session.addInput(videoInput) // if session.canAddOutput(imageOutput) {
activeVideoInput = videoInput // session.addOutput(imageOutput)
} // }
//
let audioDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio) // if session.canAddOutput(movieOutput) {
let audioInput: AVCaptureDeviceInput? = try? AVCaptureDeviceInput(device: audioDevice) // session.addOutput(movieOutput)
// }
if nil == audioInput { //
return false // return true
} // }
//
if session.canAddInput(audioInput) { // /**
session.addInput(audioInput) // * startSession
} // * Starts the capture session if it is not already running.
// */
imageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG] // public func startSession() {
// if !session.running {
if session.canAddOutput(imageOutput) { // dispatch_async(queue) {
session.addOutput(imageOutput) // self.session.startRunning()
} // }
// }
if session.canAddOutput(movieOutput) { // }
session.addOutput(movieOutput) //
} // /**
// * stopSession
return true // * Stops the capture session if it is already running.
} // */
// public func stopSession() {
/** // if session.running {
* startSession // dispatch_async(queue) {
* Starts the capture session if it is not already running. // self.session.stopRunning()
*/ // }
public func startSession() { // }
if !session.running { // }
dispatch_async(queue) { //
self.session.startRunning() // /**
} // * cameraWithPosition
} // * @param position: AVCaptureDevicePosition
} // * @return An AVCaptureDevice optional.
// */
/** // public func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? {
* stopSession // for device in AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) {
* Stops the capture session if it is already running. // if position == device.position {
*/ // return device as? AVCaptureDevice
public func stopSession() { // }
if session.running { // }
dispatch_async(queue) { // return nil
self.session.stopRunning() // }
} //
} // /**
} // * activeCamera
// * @return The active cameras video input device.
/** // */
* cameraWithPosition // public var activeCamera: AVCaptureDevice {
* @param position: AVCaptureDevicePosition // get {
* @return An AVCaptureDevice optional. // return activeVideoInput!.device
*/ // }
public func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? { // }
for device in AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) { //
if position == device.position { // /**
return device as? AVCaptureDevice // * inactiveCamera
} // * @return The inactive cameras video input device.
} // */
return nil // public var inactiveCamera: AVCaptureDevice? {
} // get {
// var device: AVCaptureDevice?
/** // if 1 < cameraCount {
* activeCamera // device = activeCamera.position == .Back ? cameraWithPosition(.Front) : cameraWithPosition(.Back)
* @return The active cameras video input device. // }
*/ // return device
public var activeCamera: AVCaptureDevice { // }
get { // }
return activeVideoInput!.device //
} // /**
} // * canSwitchCameras
// * Checks whether the camera can be switched. This would require at least two cameras.
/** // * @return A boolean of the result, true if yes, false otherwise.
* inactiveCamera // */
* @return The inactive cameras video input device. // public var canSwitchCameras: Bool {
*/ // return 1 < cameraCount
public var inactiveCamera: AVCaptureDevice? { // }
get { //
var device: AVCaptureDevice? // /**
if 1 < cameraCount { // * switchCamera
device = activeCamera.position == .Back ? cameraWithPosition(.Front) : cameraWithPosition(.Back) // * If it is possible to switch cameras, then the camera will be switched from the opposite facing camera.
} // * @return A boolean of the result, true if switched, false otherwise.
return device // * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
} // */
} // public func switchCamera() -> Bool {
// if !canSwitchCameras {
/** // return false
* canSwitchCameras // }
* Checks whether the camera can be switched. This would require at least two cameras. //
* @return A boolean of the result, true if yes, false otherwise. // let videoDevice: AVCaptureDevice? = inactiveCamera
*/ // let videoInput: AVCaptureDeviceInput? = try? AVCaptureDeviceInput(device: videoDevice)
public var canSwitchCameras: Bool { //
return 1 < cameraCount // if nil == videoInput {
} // session.beginConfiguration()
// session.removeInput(activeVideoInput)
/** //
* switchCamera // if session.canAddInput(videoInput) {
* If it is possible to switch cameras, then the camera will be switched from the opposite facing camera. // activeVideoInput = videoInput
* @return A boolean of the result, true if switched, false otherwise. // } else {
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called. // session.addInput(activeVideoInput)
*/ // }
public func switchCamera() -> Bool { //
if !canSwitchCameras { // session.commitConfiguration()
return false // } else {
} // delegate?.captureDeviceConfigurationFailed?(self, error: nil)
// return false
let videoDevice: AVCaptureDevice? = inactiveCamera // }
let videoInput: AVCaptureDeviceInput? = try? AVCaptureDeviceInput(device: videoDevice) //
// return true
if nil == videoInput { // }
session.beginConfiguration() //
session.removeInput(activeVideoInput) // /**
// * cameraHasFlash
if session.canAddInput(videoInput) { // * Checks whether the camera supports flash.
activeVideoInput = videoInput // * @return A boolean of the result, true if yes, false otherwise.
} else { // */
session.addInput(activeVideoInput) // public var cameraHasFlash: Bool {
} // return activeCamera.hasFlash
// }
session.commitConfiguration() //
} else { // /**
delegate?.captureDeviceConfigurationFailed?(self, error: nil) // * flashMode
return false // * A mutator and accessor for the flashMode property.
} // * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// */
return true // public var flashMode: AVCaptureFlashMode {
} // get {
// return activeCamera.flashMode
/** // }
* cameraHasFlash // set(value) {
* Checks whether the camera supports flash. // let device: AVCaptureDevice = activeCamera
* @return A boolean of the result, true if yes, false otherwise. // if flashMode != device.flashMode && device.isFlashModeSupported(flashMode) {
*/ // var error: NSError?
public var cameraHasFlash: Bool { // do {
return activeCamera.hasFlash // try device.lockForConfiguration()
} // device.flashMode = flashMode
// device.unlockForConfiguration()
/** // } catch let error1 as NSError {
* flashMode // error = error1
* A mutator and accessor for the flashMode property. // delegate?.captureDeviceConfigurationFailed?(self, error: error)
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called. // }
*/ // }
public var flashMode: AVCaptureFlashMode { // }
get { // }
return activeCamera.flashMode //
} // /**
set(value) { // * cameraHasTorch
let device: AVCaptureDevice = activeCamera // * Checks whether the device supports torch feature.
if flashMode != device.flashMode && device.isFlashModeSupported(flashMode) { // * @return A boolean of the result, true if yes, false otherwise.
var error: NSError? // */
do { // public var cameraHasTorch: Bool {
try device.lockForConfiguration() // get {
device.flashMode = flashMode // return activeCamera.hasTorch
device.unlockForConfiguration() // }
} catch let error1 as NSError { // }
error = error1 //
delegate?.captureDeviceConfigurationFailed?(self, error: error) // /**
} // * torchMode
} // * A mutator and accessor for the torchMode property.
} // * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
} // */
// public var torchMode: AVCaptureTorchMode {
/** // get {
* cameraHasTorch // return activeCamera.torchMode
* Checks whether the device supports torch feature. // }
* @return A boolean of the result, true if yes, false otherwise. // set(value) {
*/ // let device: AVCaptureDevice = activeCamera
public var cameraHasTorch: Bool { // if torchMode != device.torchMode && device.isTorchModeSupported(torchMode) {
get { // var error: NSError?
return activeCamera.hasTorch // do {
} // try device.lockForConfiguration()
} // device.torchMode = torchMode
// device.unlockForConfiguration()
/** // } catch let error1 as NSError {
* torchMode // error = error1
* A mutator and accessor for the torchMode property. // delegate?.captureDeviceConfigurationFailed?(self, error: error)
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called. // }
*/ // }
public var torchMode: AVCaptureTorchMode { // }
get { // }
return activeCamera.torchMode //
} // /**
set(value) { // * cameraSupportsTapToFocus
let device: AVCaptureDevice = activeCamera // * Checks whether the device supports tap to focus.
if torchMode != device.torchMode && device.isTorchModeSupported(torchMode) { // * @return A boolean of the result, true if yes, false otherwise.
var error: NSError? // */
do { // public var cameraSupportsTapToFocus: Bool {
try device.lockForConfiguration() // get {
device.torchMode = torchMode // return activeCamera.focusPointOfInterestSupported
device.unlockForConfiguration() // }
} catch let error1 as NSError { // }
error = error1 //
delegate?.captureDeviceConfigurationFailed?(self, error: error) // /**
} // * focusAtpoint
} // * Sets the point to focus at on the screen.
} // * @param point: CGPoint
} // * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// */
/** // public func focusAtPoint(point: CGPoint) {
* cameraSupportsTapToFocus // let device: AVCaptureDevice = activeCamera
* Checks whether the device supports tap to focus. // if device.focusPointOfInterestSupported && device.isFocusModeSupported(.AutoFocus) {
* @return A boolean of the result, true if yes, false otherwise. // var error: NSError?
*/ // do {
public var cameraSupportsTapToFocus: Bool { // try device.lockForConfiguration()
get { // device.focusPointOfInterest = point
return activeCamera.focusPointOfInterestSupported // device.focusMode = .AutoFocus
} // device.unlockForConfiguration()
} // } catch let error1 as NSError {
// error = error1
/** // delegate?.captureDeviceConfigurationFailed?(self, error: error)
* focusAtpoint // }
* Sets the point to focus at on the screen. // }
* @param point: CGPoint // }
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called. //
*/ // /**
public func focusAtPoint(point: CGPoint) { // * cameraSupportsTapToExpose
let device: AVCaptureDevice = activeCamera // * Checks whether the device supports tap to expose.
if device.focusPointOfInterestSupported && device.isFocusModeSupported(.AutoFocus) { // * @return A boolean of the result, true if yes, false otherwise.
var error: NSError? // */
do { // public var cameraSupportsTapToExpose: Bool {
try device.lockForConfiguration() // get {
device.focusPointOfInterest = point // return activeCamera.exposurePointOfInterestSupported
device.focusMode = .AutoFocus // }
device.unlockForConfiguration() // }
} catch let error1 as NSError { //
error = error1 // /**
delegate?.captureDeviceConfigurationFailed?(self, error: error) // * exposeAtPoint
} // * Sets a point for exposure.
} // * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
} // */
// public func exposeAtPoint(point: CGPoint) {
/** // let device: AVCaptureDevice = activeCamera
* cameraSupportsTapToExpose // let exposureMode: AVCaptureExposureMode = .ContinuousAutoExposure
* Checks whether the device supports tap to expose. //
* @return A boolean of the result, true if yes, false otherwise. // if device.exposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode) {
*/ // var error: NSError?
public var cameraSupportsTapToExpose: Bool { // do {
get { // try device.lockForConfiguration()
return activeCamera.exposurePointOfInterestSupported // device.exposurePointOfInterest = point
} // device.exposureMode = exposureMode
} //
// if device.isExposureModeSupported(.Locked) {
/** // device.addObserver(self, forKeyPath: "adjustingExposure", options: .New, context: &CaptureAdjustingExposureContext)
* exposeAtPoint // }
* Sets a point for exposure. // device.unlockForConfiguration()
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called. // } catch let error1 as NSError {
*/ // error = error1
public func exposeAtPoint(point: CGPoint) { // delegate?.captureDeviceConfigurationFailed?(self, error: error)
let device: AVCaptureDevice = activeCamera // }
let exposureMode: AVCaptureExposureMode = .ContinuousAutoExposure // }
// }
if device.exposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode) { //
var error: NSError? // /**
do { // * override to set observeValueForKeyPath and handle exposure observance.
try device.lockForConfiguration() // * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
device.exposurePointOfInterest = point // */
device.exposureMode = exposureMode // override public func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
// if context == &CaptureAdjustingExposureContext {
if device.isExposureModeSupported(.Locked) { // let device: AVCaptureDevice = object as! AVCaptureDevice
device.addObserver(self, forKeyPath: "adjustingExposure", options: .New, context: &CaptureAdjustingExposureContext) //
} // if device.adjustingExposure && device.isExposureModeSupported(.Locked) {
device.unlockForConfiguration() // object!.removeObserver(self, forKeyPath: "adjustingExposure", context: &CaptureAdjustingExposureContext)
} catch let error1 as NSError { // dispatch_async(queue) {
error = error1 // var error: NSError?
delegate?.captureDeviceConfigurationFailed?(self, error: error) // do {
} // try device.lockForConfiguration()
} // device.unlockForConfiguration()
} // } catch let e as NSError {
// error = e
/** // self.delegate?.captureDeviceConfigurationFailed?(self, error: error)
* override to set observeValueForKeyPath and handle exposure observance. // } catch {
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called. // fatalError()
*/ // }
override public func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) { // }
if context == &CaptureAdjustingExposureContext { // } else {
let device: AVCaptureDevice = object as! AVCaptureDevice // super.observeValueForKeyPath(keyPath, ofObject: object, change: change, context: context)
// }
if device.adjustingExposure && device.isExposureModeSupported(.Locked) { // }
object!.removeObserver(self, forKeyPath: "adjustingExposure", context: &CaptureAdjustingExposureContext) // }
dispatch_async(queue) { //
var error: NSError? // /**
do { // * resetFocusAndExposureModes
try device.lockForConfiguration() // * Resets to default configuration for device focus and exposure mode.
device.unlockForConfiguration() // * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
} catch let e as NSError { // */
error = e // public func resetFocusAndExposureModes() {
self.delegate?.captureDeviceConfigurationFailed?(self, error: error) // let device: AVCaptureDevice = activeCamera
} catch { //
fatalError() // let exposureMode: AVCaptureExposureMode = .ContinuousAutoExposure
} // let canResetExposure: Bool = device.focusPointOfInterestSupported && device.isExposureModeSupported(exposureMode)
} //
} else { // let focusMode: AVCaptureFocusMode = .ContinuousAutoFocus
super.observeValueForKeyPath(keyPath, ofObject: object, change: change, context: context) // let canResetFocus: Bool = device.focusPointOfInterestSupported && device.isFocusModeSupported(focusMode)
} //
} // let centerPoint: CGPoint = CGPointMake(0.5, 0.5)
} //
// var error: NSError?
/** // do {
* resetFocusAndExposureModes // try device.lockForConfiguration()
* Resets to default configuration for device focus and exposure mode. // if canResetFocus {
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called. // device.focusMode = focusMode
*/ // device.focusPointOfInterest = centerPoint
public func resetFocusAndExposureModes() { // }
let device: AVCaptureDevice = activeCamera // if canResetExposure {
// device.exposureMode = exposureMode
let exposureMode: AVCaptureExposureMode = .ContinuousAutoExposure // device.exposurePointOfInterest = centerPoint
let canResetExposure: Bool = device.focusPointOfInterestSupported && device.isExposureModeSupported(exposureMode) // }
// device.unlockForConfiguration()
let focusMode: AVCaptureFocusMode = .ContinuousAutoFocus // } catch let error1 as NSError {
let canResetFocus: Bool = device.focusPointOfInterestSupported && device.isFocusModeSupported(focusMode) // error = error1
// delegate?.captureDeviceConfigurationFailed?(self, error: error)
let centerPoint: CGPoint = CGPointMake(0.5, 0.5) // }
// }
var error: NSError? //
do { // /**
try device.lockForConfiguration() // * captureStillImage
if canResetFocus { // * Captures the image and write the photo to the user's asset library.
device.focusMode = focusMode // * @delegate If the success, the capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) is called.
device.focusPointOfInterest = centerPoint // * @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
} // */
if canResetExposure { // public func captureStillImage() {
device.exposureMode = exposureMode // let connection: AVCaptureConnection = imageOutput.connectionWithMediaType(AVMediaTypeVideo)
device.exposurePointOfInterest = centerPoint // if connection.supportsVideoOrientation {
} // connection.videoOrientation = currentVideoOrientation
device.unlockForConfiguration() // }
} catch let error1 as NSError { // imageOutput.captureStillImageAsynchronouslyFromConnection(connection) { (sampleBuffer: CMSampleBufferRef?, error: NSError?) in
error = error1 // if nil == sampleBuffer {
delegate?.captureDeviceConfigurationFailed?(self, error: error) // self.delegate?.captureAsetLibraryWriteFailed?(self, error: error)
} // } else {
} // let imageData: NSData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
// let image: UIImage = UIImage(data: imageData)!
/** // self.writeImageToAssetsLibrary(image)
* captureStillImage // }
* Captures the image and write the photo to the user's asset library. // }
* @delegate If the success, the capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) is called. // }
* @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called. //
*/ // /**
public func captureStillImage() { // * isRecording
let connection: AVCaptureConnection = imageOutput.connectionWithMediaType(AVMediaTypeVideo) // * Checkts whether the device is currently recording.
if connection.supportsVideoOrientation { // * @return A boolean of the result, true if yes, false otherwise.
connection.videoOrientation = currentVideoOrientation // */
} // public var isRecording: Bool {
imageOutput.captureStillImageAsynchronouslyFromConnection(connection) { (sampleBuffer: CMSampleBufferRef?, error: NSError?) in // get {
if nil == sampleBuffer { // return movieOutput.recording
self.delegate?.captureAsetLibraryWriteFailed?(self, error: error) // }
} else { // }
let imageData: NSData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer) //
let image: UIImage = UIImage(data: imageData)! // /**
self.writeImageToAssetsLibrary(image) // * startRecording
} // * If the device is not currently recording, this starts the movie recording.
} // * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
} // */
// public func startRecording() {
/** // if !isRecording {
* isRecording // let connection: AVCaptureConnection = movieOutput.connectionWithMediaType(AVMediaTypeVideo)
* Checkts whether the device is currently recording. // if connection.supportsVideoOrientation {
* @return A boolean of the result, true if yes, false otherwise. // connection.videoOrientation = currentVideoOrientation
*/ // }
public var isRecording: Bool { // if connection.supportsVideoStabilization {
get { // connection.preferredVideoStabilizationMode = .Auto
return movieOutput.recording // }
} //
} // let device: AVCaptureDevice = activeCamera
//
/** // if device.smoothAutoFocusSupported {
* startRecording // var error: NSError?
* If the device is not currently recording, this starts the movie recording. // do {
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called. // try device.lockForConfiguration()
*/ // device.smoothAutoFocusEnabled = false
public func startRecording() { // device.unlockForConfiguration()
if !isRecording { // } catch let error1 as NSError {
let connection: AVCaptureConnection = movieOutput.connectionWithMediaType(AVMediaTypeVideo) // error = error1
if connection.supportsVideoOrientation { // delegate?.captureDeviceConfigurationFailed?(self, error: error)
connection.videoOrientation = currentVideoOrientation // }
} // }
if connection.supportsVideoStabilization { // movieOutputURL = uniqueURL
connection.preferredVideoStabilizationMode = .Auto // movieOutput.startRecordingToOutputFileURL(movieOutputURL, recordingDelegate: self)
} // }
// }
let device: AVCaptureDevice = activeCamera //
// /**
if device.smoothAutoFocusSupported { // * stopRecording
var error: NSError? // * If the device is currently recoring, this stops the movie recording.
do { // */
try device.lockForConfiguration() // public func stopRecording() {
device.smoothAutoFocusEnabled = false // if isRecording {
device.unlockForConfiguration() // movieOutput.stopRecording()
} catch let error1 as NSError { // }
error = error1 // }
delegate?.captureDeviceConfigurationFailed?(self, error: error) //
} // /**
} // * recordedDuration
movieOutputURL = uniqueURL // * Retrieves the movie recorded duration.
movieOutput.startRecordingToOutputFileURL(movieOutputURL, recordingDelegate: self) // * @return A CMTime value.
} // */
} // public var recordedDuration: CMTime {
// get {
/** // return movieOutput.recordedDuration
* stopRecording // }
* If the device is currently recoring, this stops the movie recording. // }
*/ //
public func stopRecording() { // /**
if isRecording { // * currentVideoOrientation
movieOutput.stopRecording() // * Retrieves the current orientation of the device.
} // * @return A AVCaptureVideoOrientation value, [Portrait, LandscapeLeft, PortraitUpsideDown, LandscapeRight].
} // */
// public var currentVideoOrientation: AVCaptureVideoOrientation {
/** // var orientation: AVCaptureVideoOrientation?
* recordedDuration // switch UIDevice.currentDevice().orientation {
* Retrieves the movie recorded duration. // case .Portrait:
* @return A CMTime value. // orientation = .Portrait
*/ // break
public var recordedDuration: CMTime { // case .LandscapeRight:
get { // orientation = .LandscapeLeft
return movieOutput.recordedDuration // break
} // case .PortraitUpsideDown:
} // orientation = .PortraitUpsideDown
// break
/** // default:
* currentVideoOrientation // orientation = .LandscapeRight
* Retrieves the current orientation of the device. // }
* @return A AVCaptureVideoOrientation value, [Portrait, LandscapeLeft, PortraitUpsideDown, LandscapeRight]. // return orientation!
*/ // }
public var currentVideoOrientation: AVCaptureVideoOrientation { //
var orientation: AVCaptureVideoOrientation? // /**
switch UIDevice.currentDevice().orientation { // * uniqueURL
case .Portrait: // * A unique URL generated for the movie video.
orientation = .Portrait // * @return An optional NSURL value.
break // */
case .LandscapeRight: // private var uniqueURL: NSURL? {
orientation = .LandscapeLeft // let fileManager: NSFileManager = NSFileManager.defaultManager()
break // let tempDirectoryTemplate: String = (NSTemporaryDirectory() as NSString).stringByAppendingPathComponent("FocusLibrary")
case .PortraitUpsideDown: // do {
orientation = .PortraitUpsideDown // try fileManager.createDirectoryAtPath(tempDirectoryTemplate, withIntermediateDirectories: true, attributes: nil)
break // return NSURL.fileURLWithPath(tempDirectoryTemplate + "/test.mov")
default: // } catch {}
orientation = .LandscapeRight // return nil
} // }
return orientation! //
} // /**
// * postAssetLibraryNotification
/** // * Fires an asynchronous call to the capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
* uniqueURL // * @param image: UIImage!
* A unique URL generated for the movie video. // * @delegate An asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
* @return An optional NSURL value. // */
*/ // private func postAssetLibraryNotification(image: UIImage!) {
private var uniqueURL: NSURL? { // dispatch_async(queue) {
let fileManager: NSFileManager = NSFileManager.defaultManager() // self.delegate?.capture?(self, assetLibraryDidWrite: image)
let tempDirectoryTemplate: String = (NSTemporaryDirectory() as NSString).stringByAppendingPathComponent("FocusLibrary") // }
do { // }
try fileManager.createDirectoryAtPath(tempDirectoryTemplate, withIntermediateDirectories: true, attributes: nil) //
return NSURL.fileURLWithPath(tempDirectoryTemplate + "/test.mov") // /**
} catch {} // * writeImageToAssetsLibrary
return nil // * Writes the image file to the user's asset library.
} // * @param image: UIImage!
// * @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
/** // * @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
* postAssetLibraryNotification // */
* Fires an asynchronous call to the capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate. // private func writeImageToAssetsLibrary(image: UIImage) {
* @param image: UIImage! // let library: ALAssetsLibrary = ALAssetsLibrary()
* @delegate An asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate. // library.writeImageToSavedPhotosAlbum(image.CGImage, orientation: ALAssetOrientation(rawValue: image.imageOrientation.rawValue)!) { (path: NSURL!, error: NSError?) -> Void in
*/ // if nil == error {
private func postAssetLibraryNotification(image: UIImage!) { // self.postAssetLibraryNotification(image)
dispatch_async(queue) { // } else {
self.delegate?.capture?(self, assetLibraryDidWrite: image) // self.delegate?.captureAsetLibraryWriteFailed?(self, error: error)
} // }
} // }
// }
/** //
* writeImageToAssetsLibrary // /**
* Writes the image file to the user's asset library. // * writeVideoToAssetsLibrary
* @param image: UIImage! // * Writes the video file to the user's asset library.
* @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate. // * @param videoURL: NSURL!
* @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called. // * @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
*/ // * @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
private func writeImageToAssetsLibrary(image: UIImage) { // */
let library: ALAssetsLibrary = ALAssetsLibrary() // private func writeVideoToAssetsLibrary(videoURL: NSURL!) {
library.writeImageToSavedPhotosAlbum(image.CGImage, orientation: ALAssetOrientation(rawValue: image.imageOrientation.rawValue)!) { (path: NSURL!, error: NSError?) -> Void in // let library: ALAssetsLibrary = ALAssetsLibrary()
if nil == error { // if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(videoURL) {
self.postAssetLibraryNotification(image) // library.writeVideoAtPathToSavedPhotosAlbum(videoURL) { (path: NSURL!, error: NSError?) in
} else { // if nil == error {
self.delegate?.captureAsetLibraryWriteFailed?(self, error: error) // self.generateThumbnailForVideoAtURL(videoURL)
} // } else {
} // self.delegate?.captureAsetLibraryWriteFailed?(self, error: error)
} // }
// }
/** // }
* writeVideoToAssetsLibrary // }
* Writes the video file to the user's asset library. //
* @param videoURL: NSURL! // /**
* @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate. // * generateThumbnailForVideoAtURL
* @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called. // * Generates a thumbnail for the video URL specified.
*/ // * @param videoURL: NSURL!
private func writeVideoToAssetsLibrary(videoURL: NSURL!) { // * @delegate An asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
let library: ALAssetsLibrary = ALAssetsLibrary() // */
if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(videoURL) { // private func generateThumbnailForVideoAtURL(videoURL: NSURL!) {
library.writeVideoAtPathToSavedPhotosAlbum(videoURL) { (path: NSURL!, error: NSError?) in // dispatch_async(queue) {
if nil == error { // do {
self.generateThumbnailForVideoAtURL(videoURL) // let asset: AVAsset = AVAsset(URL: videoURL)
} else { // let imageGenerator: AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
self.delegate?.captureAsetLibraryWriteFailed?(self, error: error) // imageGenerator.maximumSize = CGSizeMake(100, 0)
} // imageGenerator.appliesPreferredTrackTransform = true
} //
} // let imageRef: CGImageRef = try imageGenerator.copyCGImageAtTime(kCMTimeZero, actualTime: nil)
} // let image: UIImage = UIImage(CGImage: imageRef)
//
/** // dispatch_async(dispatch_get_main_queue()) {
* generateThumbnailForVideoAtURL // self.postAssetLibraryNotification(image)
* Generates a thumbnail for the video URL specified. // }
* @param videoURL: NSURL! // } catch {}
* @delegate An asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate. // }
*/ // }
private func generateThumbnailForVideoAtURL(videoURL: NSURL!) { //
dispatch_async(queue) { // /**
do { // * delegate method for capturing video file.
let asset: AVAsset = AVAsset(URL: videoURL) // * @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
let imageGenerator: AVAssetImageGenerator = AVAssetImageGenerator(asset: asset) // * @delegate If failure, capture(capture: Capture!, mediaCaptureFailed error: NSError!) is called.
imageGenerator.maximumSize = CGSizeMake(100, 0) // */
imageGenerator.appliesPreferredTrackTransform = true // public func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
// if nil == error {
let imageRef: CGImageRef = try imageGenerator.copyCGImageAtTime(kCMTimeZero, actualTime: nil) // writeVideoToAssetsLibrary(movieOutputURL!.copy() as! NSURL)
let image: UIImage = UIImage(CGImage: imageRef) // } else {
// delegate?.captureMediaCaptureFailed?(self, error: error)
dispatch_async(dispatch_get_main_queue()) { // }
self.postAssetLibraryNotification(image) // movieOutputURL = nil
} // }
} catch {} //}
} \ No newline at end of file
}
/**
* delegate method for capturing video file.
* @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
* @delegate If failure, capture(capture: Capture!, mediaCaptureFailed error: NSError!) is called.
*/
public func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
if nil == error {
writeVideoToAssetsLibrary(movieOutputURL!.copy() as! NSURL)
} else {
delegate?.captureMediaCaptureFailed?(self, error: error)
}
movieOutputURL = nil
}
}
\ No newline at end of file
////
//// Copyright (C) 2015 GraphKit, Inc. <http://graphkit.io> and other GraphKit contributors.
////
//// This program is free software: you can redistribute it and/or modify
//// it under the terms of the GNU Affero General Public License as published
//// by the Free Software Foundation, either version 3 of the License, or
//// (at your option) any later version.
////
//// This program is distributed in the hope that it will be useful,
//// but WITHOUT ANY WARRANTY; without even the implied warranty of
//// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//// GNU Affero General Public License for more details.
////
//// You should have received a copy of the GNU Affero General Public License
//// along with this program located at the root of the software package
//// in a file called LICENSE. If not, see <http://www.gnu.org/licenses/>.
////
// //
// Copyright (C) 2015 GraphKit, Inc. <http://graphkit.io> and other GraphKit contributors. //import UIKit
//import AVFoundation
// //
// This program is free software: you can redistribute it and/or modify //@objc(PreviewDelegate)
// it under the terms of the GNU Affero General Public License as published //public protocol PreviewDelegate {
// by the Free Software Foundation, either version 3 of the License, or // optional func previewTappedToFocusAt(preview: Preview, point: CGPoint)
// (at your option) any later version. // optional func previewTappedToExposeAt(preview: Preview, point: CGPoint)
// optional func previewTappedToReset(preview: Preview, focus: UIView, exposure: UIView)
//}
// //
// This program is distributed in the hope that it will be useful, //public class Preview: UIView {
// but WITHOUT ANY WARRANTY; without even the implied warranty of // /**
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // :name: boxBounds
// GNU Affero General Public License for more details. // :description: A static property that sets the initial size of the focusBox and exposureBox properties.
// */
// static public var boxBounds: CGRect = CGRectMake(0, 0, 150, 150)
// //
// You should have received a copy of the GNU Affero General Public License // /**
// along with this program located at the root of the software package // :name: delegate
// in a file called LICENSE. If not, see <http://www.gnu.org/licenses/>. // :description: An optional instance of PreviewDelegate to handle events that are triggered during various
// stages of engagement.
// */
// public weak var delegate: PreviewDelegate?
// //
// /**
import UIKit // :name: tapToFocusEnabled
import AVFoundation // :description: A mutator and accessor that enables and disables tap to focus gesture.
// */
@objc(PreviewDelegate) // public var tapToFocusEnabled: Bool {
public protocol PreviewDelegate { // get {
optional func previewTappedToFocusAt(preview: Preview, point: CGPoint) // return singleTapRecognizer!.enabled
optional func previewTappedToExposeAt(preview: Preview, point: CGPoint) // }
optional func previewTappedToReset(preview: Preview, focus: UIView, exposure: UIView) // set(value) {
} // singleTapRecognizer!.enabled = value
// }
public class Preview: UIView { // }
/** //
:name: boxBounds // /**
:description: A static property that sets the initial size of the focusBox and exposureBox properties. // :name: tapToExposeEnabled
*/ // :description: A mutator and accessor that enables and disables tap to expose gesture.
static public var boxBounds: CGRect = CGRectMake(0, 0, 150, 150) // */
// public var tapToExposeEnabled: Bool {
/** // get {
:name: delegate // return doubleTapRecognizer!.enabled
:description: An optional instance of PreviewDelegate to handle events that are triggered during various // }
stages of engagement. // set(value) {
*/ // doubleTapRecognizer!.enabled = value
public weak var delegate: PreviewDelegate? // }
// }
/** //
:name: tapToFocusEnabled // //
:description: A mutator and accessor that enables and disables tap to focus gesture. // // override for layerClass
*/ // //
public var tapToFocusEnabled: Bool { // override public class func layerClass() -> AnyClass {
get { // return AVCaptureVideoPreviewLayer.self
return singleTapRecognizer!.enabled // }
} //
set(value) { // /**
singleTapRecognizer!.enabled = value // :name: session
} // :description: A mutator and accessor for the preview AVCaptureSession value.
} // */
// public var session: AVCaptureSession {
/** // get {
:name: tapToExposeEnabled // return (layer as! AVCaptureVideoPreviewLayer).session
:description: A mutator and accessor that enables and disables tap to expose gesture. // }
*/ // set(value) {
public var tapToExposeEnabled: Bool { // (layer as! AVCaptureVideoPreviewLayer).session = value
get { // }
return doubleTapRecognizer!.enabled // }
} //
set(value) { // /**
doubleTapRecognizer!.enabled = value // :name: focusBox
} // :description: An optional UIView for the focusBox animation. This is used when the
} // tapToFocusEnabled property is set to true.
// */
// // public var focusBox: UIView?
// override for layerClass //
// // /**
override public class func layerClass() -> AnyClass { // :name: exposureBox
return AVCaptureVideoPreviewLayer.self // :description: An optional UIView for the exposureBox animation. This is used when the
} // tapToExposeEnabled property is set to true.
// */
/** // public var exposureBox: UIView?
:name: session //
:description: A mutator and accessor for the preview AVCaptureSession value. // //
*/ // // :name: singleTapRecognizer
public var session: AVCaptureSession { // // :description: Gesture recognizer for single tap.
get { // //
return (layer as! AVCaptureVideoPreviewLayer).session // private var singleTapRecognizer: UITapGestureRecognizer?
} //
set(value) { // //
(layer as! AVCaptureVideoPreviewLayer).session = value // // :name: doubleTapRecognizer
} // // :description: Gesture recognizer for double tap.
} // //
// private var doubleTapRecognizer: UITapGestureRecognizer?
/** //
:name: focusBox // //
:description: An optional UIView for the focusBox animation. This is used when the // // :name: doubleDoubleTapRecognizer
tapToFocusEnabled property is set to true. // // :description: Gesture recognizer for double/double tap.
*/ // //
public var focusBox: UIView? // private var doubleDoubleTapRecognizer: UITapGestureRecognizer?
//
/** // required public init?(coder aDecoder: NSCoder) {
:name: exposureBox // super.init(coder: aDecoder)
:description: An optional UIView for the exposureBox animation. This is used when the // prepareView()
tapToExposeEnabled property is set to true. // }
*/ //
public var exposureBox: UIView? // public override init(frame: CGRect) {
// super.init(frame: frame)
// // prepareView()
// :name: singleTapRecognizer // }
// :description: Gesture recognizer for single tap. //
// // public init() {
private var singleTapRecognizer: UITapGestureRecognizer? // super.init(frame: CGRectZero)
// translatesAutoresizingMaskIntoConstraints = false
// // prepareView()
// :name: doubleTapRecognizer // }
// :description: Gesture recognizer for double tap. //
// // //
private var doubleTapRecognizer: UITapGestureRecognizer? // // :name: handleSingleTap
// //
// // internal func handleSingleTap(recognizer: UIGestureRecognizer) {
// :name: doubleDoubleTapRecognizer // let point: CGPoint = recognizer.locationInView(self)
// :description: Gesture recognizer for double/double tap. // runBoxAnimationOnView(focusBox, point: point)
// // delegate?.previewTappedToFocusAt?(self, point: captureDevicePointForPoint(point))
private var doubleDoubleTapRecognizer: UITapGestureRecognizer? // }
//
required public init?(coder aDecoder: NSCoder) { // //
super.init(coder: aDecoder) // // :name: handleDoubleTap
prepareView() // //
} // internal func handleDoubleTap(recognizer: UIGestureRecognizer) {
// let point: CGPoint = recognizer.locationInView(self)
public override init(frame: CGRect) { // runBoxAnimationOnView(exposureBox, point: point)
super.init(frame: frame) // delegate?.previewTappedToExposeAt?(self, point: captureDevicePointForPoint(point))
prepareView() // }
} //
// //
public init() { // // :name: handleDoubleDoubleTap
super.init(frame: CGRectZero) // //
translatesAutoresizingMaskIntoConstraints = false // internal func handleDoubleDoubleTap(recognizer: UIGestureRecognizer) {
prepareView() // runResetAnimation()
} // }
//
// // //
// :name: handleSingleTap // // :name: prepareView
// // // :description: Common setup for view.
internal func handleSingleTap(recognizer: UIGestureRecognizer) { // //
let point: CGPoint = recognizer.locationInView(self) // private func prepareView() {
runBoxAnimationOnView(focusBox, point: point) // let captureLayer: AVCaptureVideoPreviewLayer = layer as! AVCaptureVideoPreviewLayer
delegate?.previewTappedToFocusAt?(self, point: captureDevicePointForPoint(point)) // captureLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
} //
// singleTapRecognizer = UITapGestureRecognizer(target: self, action: "handleSingleTap:")
// // singleTapRecognizer!.numberOfTapsRequired = 1
// :name: handleDoubleTap //
// // doubleTapRecognizer = UITapGestureRecognizer(target: self, action: "handleDoubleTap:")
internal func handleDoubleTap(recognizer: UIGestureRecognizer) { // doubleTapRecognizer!.numberOfTapsRequired = 2
let point: CGPoint = recognizer.locationInView(self) //
runBoxAnimationOnView(exposureBox, point: point) // doubleDoubleTapRecognizer = UITapGestureRecognizer(target: self, action: "handleDoubleDoubleTap:")
delegate?.previewTappedToExposeAt?(self, point: captureDevicePointForPoint(point)) // doubleDoubleTapRecognizer!.numberOfTapsRequired = 2
} // doubleDoubleTapRecognizer!.numberOfTouchesRequired = 2
//
// // addGestureRecognizer(singleTapRecognizer!)
// :name: handleDoubleDoubleTap // addGestureRecognizer(doubleTapRecognizer!)
// // addGestureRecognizer(doubleDoubleTapRecognizer!)
internal func handleDoubleDoubleTap(recognizer: UIGestureRecognizer) { // singleTapRecognizer!.requireGestureRecognizerToFail(doubleTapRecognizer!)
runResetAnimation() //
} // focusBox = viewWithColor(.redColor())
// exposureBox = viewWithColor(.blueColor())
// // addSubview(focusBox!)
// :name: prepareView // addSubview(exposureBox!)
// :description: Common setup for view. // }
// //
private func prepareView() { // //
let captureLayer: AVCaptureVideoPreviewLayer = layer as! AVCaptureVideoPreviewLayer // // :name: viewWithColor
captureLayer.videoGravity = AVLayerVideoGravityResizeAspectFill // // :description: Initializes a UIView with a set UIColor.
// //
singleTapRecognizer = UITapGestureRecognizer(target: self, action: "handleSingleTap:") // private func viewWithColor(color: UIColor) -> UIView {
singleTapRecognizer!.numberOfTapsRequired = 1 // let view: UIView = UIView(frame: Preview.boxBounds)
// view.backgroundColor = MaterialTheme.clear.color
doubleTapRecognizer = UITapGestureRecognizer(target: self, action: "handleDoubleTap:") // view.layer.borderColor = color.CGColor
doubleTapRecognizer!.numberOfTapsRequired = 2 // view.layer.borderWidth = 5
// view.hidden = true
doubleDoubleTapRecognizer = UITapGestureRecognizer(target: self, action: "handleDoubleDoubleTap:") // return view
doubleDoubleTapRecognizer!.numberOfTapsRequired = 2 // }
doubleDoubleTapRecognizer!.numberOfTouchesRequired = 2 //
// //
addGestureRecognizer(singleTapRecognizer!) // // :name: runBoxAnimationOnView
addGestureRecognizer(doubleTapRecognizer!) // // :description: Runs the animation used for focusBox and exposureBox on single and double
addGestureRecognizer(doubleDoubleTapRecognizer!) // // taps respectively at a given point.
singleTapRecognizer!.requireGestureRecognizerToFail(doubleTapRecognizer!) // //
// private func runBoxAnimationOnView(view: UIView!, point: CGPoint) {
focusBox = viewWithColor(.redColor()) // view.center = point
exposureBox = viewWithColor(.blueColor()) // view.hidden = false
addSubview(focusBox!) // UIView.animateWithDuration(0.15, delay: 0, options: .CurveEaseInOut, animations: { _ in
addSubview(exposureBox!) // view.layer.transform = CATransform3DMakeScale(0.5, 0.5, 1)
} // }) { _ in
// let delayInSeconds: Double = 0.5
// // let popTime: dispatch_time_t = dispatch_time(DISPATCH_TIME_NOW, Int64(delayInSeconds * Double(NSEC_PER_SEC)))
// :name: viewWithColor // dispatch_after(popTime, dispatch_get_main_queue()) {
// :description: Initializes a UIView with a set UIColor. // view.hidden = true
// // view.transform = CGAffineTransformIdentity
private func viewWithColor(color: UIColor) -> UIView { // }
let view: UIView = UIView(frame: Preview.boxBounds) // }
view.backgroundColor = MaterialTheme.clear.color // }
view.layer.borderColor = color.CGColor //
view.layer.borderWidth = 5 // //
view.hidden = true // // :name: captureDevicePointForPoint
return view // // :description: Interprets the correct point from touch to preview layer.
} // //
// private func captureDevicePointForPoint(point: CGPoint) -> CGPoint {
// // let previewLayer: AVCaptureVideoPreviewLayer = layer as! AVCaptureVideoPreviewLayer
// :name: runBoxAnimationOnView // return previewLayer.captureDevicePointOfInterestForPoint(point)
// :description: Runs the animation used for focusBox and exposureBox on single and double // }
// taps respectively at a given point. //
// // //
private func runBoxAnimationOnView(view: UIView!, point: CGPoint) { // // :name: runResetAnimation
view.center = point // // :description: Executes the reset animation for focus and exposure.
view.hidden = false // //
UIView.animateWithDuration(0.15, delay: 0, options: .CurveEaseInOut, animations: { _ in // private func runResetAnimation() {
view.layer.transform = CATransform3DMakeScale(0.5, 0.5, 1) // if !tapToFocusEnabled && !tapToExposeEnabled {
}) { _ in // return
let delayInSeconds: Double = 0.5 // }
let popTime: dispatch_time_t = dispatch_time(DISPATCH_TIME_NOW, Int64(delayInSeconds * Double(NSEC_PER_SEC))) //
dispatch_after(popTime, dispatch_get_main_queue()) { // let previewLayer: AVCaptureVideoPreviewLayer = layer as! AVCaptureVideoPreviewLayer
view.hidden = true // let centerPoint: CGPoint = previewLayer.pointForCaptureDevicePointOfInterest(CGPointMake(0.5, 0.5))
view.transform = CGAffineTransformIdentity // focusBox!.center = centerPoint
} // exposureBox!.center = centerPoint
} // exposureBox!.transform = CGAffineTransformMakeScale(1.2, 1.2)
} // focusBox!.hidden = false
// exposureBox!.hidden = false
// //
// :name: captureDevicePointForPoint // UIView.animateWithDuration(0.15, delay: 0, options: .CurveEaseInOut, animations: { _ in
// :description: Interprets the correct point from touch to preview layer. // self.focusBox!.layer.transform = CATransform3DMakeScale(0.5, 0.5, 1)
// // self.exposureBox!.layer.transform = CATransform3DMakeScale(0.7, 0.7, 1)
private func captureDevicePointForPoint(point: CGPoint) -> CGPoint { // }) { _ in
let previewLayer: AVCaptureVideoPreviewLayer = layer as! AVCaptureVideoPreviewLayer // let delayInSeconds: Double = 0.5
return previewLayer.captureDevicePointOfInterestForPoint(point) // let popTime: dispatch_time_t = dispatch_time(DISPATCH_TIME_NOW, Int64(delayInSeconds * Double(NSEC_PER_SEC)))
} // dispatch_after(popTime, dispatch_get_main_queue()) {
// self.focusBox!.hidden = true
// // self.exposureBox!.hidden = true
// :name: runResetAnimation // self.focusBox!.transform = CGAffineTransformIdentity
// :description: Executes the reset animation for focus and exposure. // self.exposureBox!.transform = CGAffineTransformIdentity
// // self.delegate?.previewTappedToReset?(self, focus: self.focusBox!, exposure: self.exposureBox!)
private func runResetAnimation() { // }
if !tapToFocusEnabled && !tapToExposeEnabled { // }
return // }
} //}
let previewLayer: AVCaptureVideoPreviewLayer = layer as! AVCaptureVideoPreviewLayer
let centerPoint: CGPoint = previewLayer.pointForCaptureDevicePointOfInterest(CGPointMake(0.5, 0.5))
focusBox!.center = centerPoint
exposureBox!.center = centerPoint
exposureBox!.transform = CGAffineTransformMakeScale(1.2, 1.2)
focusBox!.hidden = false
exposureBox!.hidden = false
UIView.animateWithDuration(0.15, delay: 0, options: .CurveEaseInOut, animations: { _ in
self.focusBox!.layer.transform = CATransform3DMakeScale(0.5, 0.5, 1)
self.exposureBox!.layer.transform = CATransform3DMakeScale(0.7, 0.7, 1)
}) { _ in
let delayInSeconds: Double = 0.5
let popTime: dispatch_time_t = dispatch_time(DISPATCH_TIME_NOW, Int64(delayInSeconds * Double(NSEC_PER_SEC)))
dispatch_after(popTime, dispatch_get_main_queue()) {
self.focusBox!.hidden = true
self.exposureBox!.hidden = true
self.focusBox!.transform = CGAffineTransformIdentity
self.exposureBox!.transform = CGAffineTransformIdentity
self.delegate?.previewTappedToReset?(self, focus: self.focusBox!, exposure: self.exposureBox!)
}
}
}
}
...@@ -99,7 +99,6 @@ public class MaterialButton : UIButton { ...@@ -99,7 +99,6 @@ public class MaterialButton : UIButton {
:name: drawRect :name: drawRect
*/ */
final public override func drawRect(rect: CGRect) { final public override func drawRect(rect: CGRect) {
prepareContext(rect)
prepareBackgroundColorView() prepareBackgroundColorView()
prepareButton() prepareButton()
} }
...@@ -152,18 +151,6 @@ public class MaterialButton : UIButton { ...@@ -152,18 +151,6 @@ public class MaterialButton : UIButton {
} }
// //
// :name: prepareContext
//
private func prepareContext(rect: CGRect) {
let context = UIGraphicsGetCurrentContext()
CGContextSaveGState(context);
CGContextAddEllipseInRect(context, rect)
CGContextSetFillColorWithColor(context, MaterialTheme.clear.color.CGColor)
CGContextFillPath(context)
CGContextRestoreGState(context);
}
//
// :name: prepareBackgroundColorView // :name: prepareBackgroundColorView
// //
private func prepareBackgroundColorView() { private func prepareBackgroundColorView() {
......
...@@ -25,6 +25,7 @@ public class NavigationViewController: UIViewController { ...@@ -25,6 +25,7 @@ public class NavigationViewController: UIViewController {
public var titleLabel: UILabel? { public var titleLabel: UILabel? {
didSet { didSet {
if let v = titleLabel { if let v = titleLabel {
v.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(v) view.addSubview(v)
} }
} }
...@@ -36,6 +37,7 @@ public class NavigationViewController: UIViewController { ...@@ -36,6 +37,7 @@ public class NavigationViewController: UIViewController {
public var leftButton: FlatButton? { public var leftButton: FlatButton? {
didSet { didSet {
if let v = leftButton { if let v = leftButton {
v.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(v) view.addSubview(v)
} }
} }
...@@ -47,6 +49,7 @@ public class NavigationViewController: UIViewController { ...@@ -47,6 +49,7 @@ public class NavigationViewController: UIViewController {
public var rightButton: FlatButton? { public var rightButton: FlatButton? {
didSet { didSet {
if let v = rightButton { if let v = rightButton {
v.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(v) view.addSubview(v)
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment