Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
M
Material
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Dmitriy Stepanets
Material
Commits
0f4f4ba0
Commit
0f4f4ba0
authored
Sep 18, 2015
by
Daniel Dahan
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
removed commented out AV Capture library
parent
8b825830
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
916 additions
and
916 deletions
+916
-916
Source/Capture.swift
+654
-654
Source/CapturePreview.swift
+262
-262
No files found.
Source/Capture.swift
View file @
0f4f4ba0
////
//// Copyright (C) 2015 GraphKit, Inc. <http://graphkit.io> and other GraphKit contributors.
////
//// This program is free software: you can redistribute it and/or modify
//// it under the terms of the GNU Affero General Public License as published
//// by the Free Software Foundation, either version 3 of the License, or
//// (at your option) any later version.
////
//// This program is distributed in the hope that it will be useful,
//// but WITHOUT ANY WARRANTY; without even the implied warranty of
//// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//// GNU Affero General Public License for more details.
////
//// You should have received a copy of the GNU Affero General Public License
//// along with this program located at the root of the software package
//// in a file called LICENSE. If not, see <http://www.gnu.org/licenses/>.
////
//
//
//import UIKit
// Copyright (C) 2015 GraphKit, Inc. <http://graphkit.io> and other GraphKit contributors.
//import AVFoundation
//import AssetsLibrary
//
//
//@objc(CaptureDelegate)
// This program is free software: you can redistribute it and/or modify
//public protocol CaptureDelegate {
// it under the terms of the GNU Affero General Public License as published
// optional func captureDeviceConfigurationFailed(capture: Capture, error: NSError!)
// by the Free Software Foundation, either version 3 of the License, or
// optional func captureMediaCaptureFailed(capture: Capture, error: NSError!)
// (at your option) any later version.
// optional func captureAsetLibraryWriteFailed(capture: Capture, error: NSError!)
// optional func capture(capture: Capture, assetLibraryDidWrite image: UIImage!)
//}
//
//
//public class Capture: NSObject, AVCaptureFileOutputRecordingDelegate {
// This program is distributed in the hope that it will be useful,
// //
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// // :name: activeVideoInput
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// // :description: The video input that is currently active.
// GNU Affero General Public License for more details.
// //
//
// private var activeVideoInput: AVCaptureDeviceInput?
// You should have received a copy of the GNU Affero General Public License
//
// along with this program located at the root of the software package
// //
// in a file called LICENSE. If not, see <http://www.gnu.org/licenses/>.
// // :name: imageOutput
//
// // :description: When the session is taking a photo, this is the output manager.
// //
import
UIKit
// private lazy var imageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput()
import
AVFoundation
//
import
AssetsLibrary
// //
// // :name: movieOutput
@objc(CaptureDelegate)
// // :description: When the session is shooting a video, this is the output manager.
public
protocol
CaptureDelegate
{
// //
optional
func
captureDeviceConfigurationFailed
(
capture
:
Capture
,
error
:
NSError
!
)
// private lazy var movieOutput: AVCaptureMovieFileOutput = AVCaptureMovieFileOutput()
optional
func
captureMediaCaptureFailed
(
capture
:
Capture
,
error
:
NSError
!
)
//
optional
func
captureAsetLibraryWriteFailed
(
capture
:
Capture
,
error
:
NSError
!
)
// //
optional
func
capture
(
capture
:
Capture
,
assetLibraryDidWrite
image
:
UIImage
!
)
// // :name: movieOutputURL
}
// // :description: The output URL of the movie file.
// //
public
class
Capture
:
NSObject
,
AVCaptureFileOutputRecordingDelegate
{
// private var movieOutputURL: NSURL?
//
//
// :name: activeVideoInput
// //
// :description: The video input that is currently active.
// // :name: queue
//
// // :description: Async job queue.
private
var
activeVideoInput
:
AVCaptureDeviceInput
?
// //
// private lazy var queue: dispatch_queue_t = {
//
// return dispatch_queue_create("io.graphkit.Capture", nil)
// :name: imageOutput
// }()
// :description: When the session is taking a photo, this is the output manager.
//
//
// //
private
lazy
var
imageOutput
:
AVCaptureStillImageOutput
=
AVCaptureStillImageOutput
()
// // :name: CaptureAdjustingExposureContext
// // :description: Used for KVO observation context.
//
// //
// :name: movieOutput
// public var CaptureAdjustingExposureContext: NSString?
// :description: When the session is shooting a video, this is the output manager.
//
//
// /**
private
lazy
var
movieOutput
:
AVCaptureMovieFileOutput
=
AVCaptureMovieFileOutput
()
// * cameraCount
// * The number of available cameras on the device.
//
// */
// :name: movieOutputURL
// public var cameraCount: Int {
// :description: The output URL of the movie file.
// return AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo).count
//
// }
private
var
movieOutputURL
:
NSURL
?
//
// /**
//
// * session
// :name: queue
// * An AVCaptureSession that manages all inputs and outputs in that session.
// :description: Async job queue.
// */
//
// public lazy var session: AVCaptureSession = AVCaptureSession()
private
lazy
var
queue
:
dispatch_queue_t
=
{
//
return
dispatch_queue_create
(
"io.graphkit.Capture"
,
nil
)
// /**
}()
// * delegate
// * An optional instance of CaptureDelegate to handle events that are triggered during various
//
// * stages in the session.
// :name: CaptureAdjustingExposureContext
// */
// :description: Used for KVO observation context.
// public weak var delegate: CaptureDelegate?
//
//
public
var
CaptureAdjustingExposureContext
:
NSString
?
// /**
// * prepareSession
/**
// * A helper method that prepares the session with the various available inputs and outputs.
* cameraCount
// * @param preset: String, default: AVCaptureSessionPresetHigh
* The number of available cameras on the device.
// * @return A boolean value, true if successful, false otherwise.
*/
// */
public
var
cameraCount
:
Int
{
// public func prepareSession(preset: String = AVCaptureSessionPresetHigh) -> Bool {
return
AVCaptureDevice
.
devicesWithMediaType
(
AVMediaTypeVideo
)
.
count
// session.sessionPreset = preset
}
//
// // setup default camera device
/**
// let videoDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
* session
// let videoInput: AVCaptureDeviceInput? = try? AVCaptureDeviceInput(device: videoDevice)
* An AVCaptureSession that manages all inputs and outputs in that session.
//
*/
// if nil == videoInput {
public
lazy
var
session
:
AVCaptureSession
=
AVCaptureSession
()
// return false
// }
/**
//
* delegate
// if session.canAddInput(videoInput) {
* An optional instance of CaptureDelegate to handle events that are triggered during various
// session.addInput(videoInput)
* stages in the session.
// activeVideoInput = videoInput
*/
// }
public
weak
var
delegate
:
CaptureDelegate
?
//
// let audioDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
/**
// let audioInput: AVCaptureDeviceInput? = try? AVCaptureDeviceInput(device: audioDevice)
* prepareSession
//
* A helper method that prepares the session with the various available inputs and outputs.
// if nil == audioInput {
* @param preset: String, default: AVCaptureSessionPresetHigh
// return false
* @return A boolean value, true if successful, false otherwise.
// }
*/
//
public
func
prepareSession
(
preset
:
String
=
AVCaptureSessionPresetHigh
)
->
Bool
{
// if session.canAddInput(audioInput) {
session
.
sessionPreset
=
preset
// session.addInput(audioInput)
// }
// setup default camera device
//
let
videoDevice
:
AVCaptureDevice
=
AVCaptureDevice
.
defaultDeviceWithMediaType
(
AVMediaTypeVideo
)
// imageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
let
videoInput
:
AVCaptureDeviceInput
?
=
try
?
AVCaptureDeviceInput
(
device
:
videoDevice
)
//
// if session.canAddOutput(imageOutput) {
if
nil
==
videoInput
{
// session.addOutput(imageOutput)
return
false
// }
}
//
// if session.canAddOutput(movieOutput) {
if
session
.
canAddInput
(
videoInput
)
{
// session.addOutput(movieOutput)
session
.
addInput
(
videoInput
)
// }
activeVideoInput
=
videoInput
//
}
// return true
// }
let
audioDevice
:
AVCaptureDevice
=
AVCaptureDevice
.
defaultDeviceWithMediaType
(
AVMediaTypeAudio
)
//
let
audioInput
:
AVCaptureDeviceInput
?
=
try
?
AVCaptureDeviceInput
(
device
:
audioDevice
)
// /**
// * startSession
if
nil
==
audioInput
{
// * Starts the capture session if it is not already running.
return
false
// */
}
// public func startSession() {
// if !session.running {
if
session
.
canAddInput
(
audioInput
)
{
// dispatch_async(queue) {
session
.
addInput
(
audioInput
)
// self.session.startRunning()
}
// }
// }
imageOutput
.
outputSettings
=
[
AVVideoCodecKey
:
AVVideoCodecJPEG
]
// }
//
if
session
.
canAddOutput
(
imageOutput
)
{
// /**
session
.
addOutput
(
imageOutput
)
// * stopSession
}
// * Stops the capture session if it is already running.
// */
if
session
.
canAddOutput
(
movieOutput
)
{
// public func stopSession() {
session
.
addOutput
(
movieOutput
)
// if session.running {
}
// dispatch_async(queue) {
// self.session.stopRunning()
return
true
// }
}
// }
// }
/**
//
* startSession
// /**
* Starts the capture session if it is not already running.
// * cameraWithPosition
*/
// * @param position: AVCaptureDevicePosition
public
func
startSession
()
{
// * @return An AVCaptureDevice optional.
if
!
session
.
running
{
// */
dispatch_async
(
queue
)
{
// public func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? {
self
.
session
.
startRunning
()
// for device in AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) {
}
// if position == device.position {
}
// return device as? AVCaptureDevice
}
// }
// }
/**
// return nil
* stopSession
// }
* Stops the capture session if it is already running.
//
*/
// /**
public
func
stopSession
()
{
// * activeCamera
if
session
.
running
{
// * @return The active cameras video input device.
dispatch_async
(
queue
)
{
// */
self
.
session
.
stopRunning
()
// public var activeCamera: AVCaptureDevice {
}
// get {
}
// return activeVideoInput!.device
}
// }
// }
/**
//
* cameraWithPosition
// /**
* @param position: AVCaptureDevicePosition
// * inactiveCamera
* @return An AVCaptureDevice optional.
// * @return The inactive cameras video input device.
*/
// */
public
func
cameraWithPosition
(
position
:
AVCaptureDevicePosition
)
->
AVCaptureDevice
?
{
// public var inactiveCamera: AVCaptureDevice? {
for
device
in
AVCaptureDevice
.
devicesWithMediaType
(
AVMediaTypeVideo
)
{
// get {
if
position
==
device
.
position
{
// var device: AVCaptureDevice?
return
device
as?
AVCaptureDevice
// if 1 < cameraCount {
}
// device = activeCamera.position == .Back ? cameraWithPosition(.Front) : cameraWithPosition(.Back)
}
// }
return
nil
// return device
}
// }
// }
/**
//
* activeCamera
// /**
* @return The active cameras video input device.
// * canSwitchCameras
*/
// * Checks whether the camera can be switched. This would require at least two cameras.
public
var
activeCamera
:
AVCaptureDevice
{
// * @return A boolean of the result, true if yes, false otherwise.
get
{
// */
return
activeVideoInput
!.
device
// public var canSwitchCameras: Bool {
}
// return 1 < cameraCount
}
// }
//
/**
// /**
* inactiveCamera
// * switchCamera
* @return The inactive cameras video input device.
// * If it is possible to switch cameras, then the camera will be switched from the opposite facing camera.
*/
// * @return A boolean of the result, true if switched, false otherwise.
public
var
inactiveCamera
:
AVCaptureDevice
?
{
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
get
{
// */
var
device
:
AVCaptureDevice
?
// public func switchCamera() -> Bool {
if
1
<
cameraCount
{
// if !canSwitchCameras {
device
=
activeCamera
.
position
==
.
Back
?
cameraWithPosition
(
.
Front
)
:
cameraWithPosition
(
.
Back
)
// return false
}
// }
return
device
//
}
// let videoDevice: AVCaptureDevice? = inactiveCamera
}
// let videoInput: AVCaptureDeviceInput? = try? AVCaptureDeviceInput(device: videoDevice)
//
/**
// if nil == videoInput {
* canSwitchCameras
// session.beginConfiguration()
* Checks whether the camera can be switched. This would require at least two cameras.
// session.removeInput(activeVideoInput)
* @return A boolean of the result, true if yes, false otherwise.
//
*/
// if session.canAddInput(videoInput) {
public
var
canSwitchCameras
:
Bool
{
// activeVideoInput = videoInput
return
1
<
cameraCount
// } else {
}
// session.addInput(activeVideoInput)
// }
/**
//
* switchCamera
// session.commitConfiguration()
* If it is possible to switch cameras, then the camera will be switched from the opposite facing camera.
// } else {
* @return A boolean of the result, true if switched, false otherwise.
// delegate?.captureDeviceConfigurationFailed?(self, error: nil)
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// return false
*/
// }
public
func
switchCamera
()
->
Bool
{
//
if
!
canSwitchCameras
{
// return true
return
false
// }
}
//
// /**
let
videoDevice
:
AVCaptureDevice
?
=
inactiveCamera
// * cameraHasFlash
let
videoInput
:
AVCaptureDeviceInput
?
=
try
?
AVCaptureDeviceInput
(
device
:
videoDevice
)
// * Checks whether the camera supports flash.
// * @return A boolean of the result, true if yes, false otherwise.
if
nil
==
videoInput
{
// */
session
.
beginConfiguration
()
// public var cameraHasFlash: Bool {
session
.
removeInput
(
activeVideoInput
)
// return activeCamera.hasFlash
// }
if
session
.
canAddInput
(
videoInput
)
{
//
activeVideoInput
=
videoInput
// /**
}
else
{
// * flashMode
session
.
addInput
(
activeVideoInput
)
// * A mutator and accessor for the flashMode property.
}
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// */
session
.
commitConfiguration
()
// public var flashMode: AVCaptureFlashMode {
}
else
{
// get {
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
nil
)
// return activeCamera.flashMode
return
false
// }
}
// set(value) {
// let device: AVCaptureDevice = activeCamera
return
true
// if flashMode != device.flashMode && device.isFlashModeSupported(flashMode) {
}
// var error: NSError?
// do {
/**
// try device.lockForConfiguration()
* cameraHasFlash
// device.flashMode = flashMode
* Checks whether the camera supports flash.
// device.unlockForConfiguration()
* @return A boolean of the result, true if yes, false otherwise.
// } catch let error1 as NSError {
*/
// error = error1
public
var
cameraHasFlash
:
Bool
{
// delegate?.captureDeviceConfigurationFailed?(self, error: error)
return
activeCamera
.
hasFlash
// }
}
// }
// }
/**
// }
* flashMode
//
* A mutator and accessor for the flashMode property.
// /**
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// * cameraHasTorch
*/
// * Checks whether the device supports torch feature.
public
var
flashMode
:
AVCaptureFlashMode
{
// * @return A boolean of the result, true if yes, false otherwise.
get
{
// */
return
activeCamera
.
flashMode
// public var cameraHasTorch: Bool {
}
// get {
set
(
value
)
{
// return activeCamera.hasTorch
let
device
:
AVCaptureDevice
=
activeCamera
// }
if
flashMode
!=
device
.
flashMode
&&
device
.
isFlashModeSupported
(
flashMode
)
{
// }
var
error
:
NSError
?
//
do
{
// /**
try
device
.
lockForConfiguration
()
// * torchMode
device
.
flashMode
=
flashMode
// * A mutator and accessor for the torchMode property.
device
.
unlockForConfiguration
()
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
}
catch
let
error1
as
NSError
{
// */
error
=
error1
// public var torchMode: AVCaptureTorchMode {
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// get {
}
// return activeCamera.torchMode
}
// }
}
// set(value) {
}
// let device: AVCaptureDevice = activeCamera
// if torchMode != device.torchMode && device.isTorchModeSupported(torchMode) {
/**
// var error: NSError?
* cameraHasTorch
// do {
* Checks whether the device supports torch feature.
// try device.lockForConfiguration()
* @return A boolean of the result, true if yes, false otherwise.
// device.torchMode = torchMode
*/
// device.unlockForConfiguration()
public
var
cameraHasTorch
:
Bool
{
// } catch let error1 as NSError {
get
{
// error = error1
return
activeCamera
.
hasTorch
// delegate?.captureDeviceConfigurationFailed?(self, error: error)
}
// }
}
// }
// }
/**
// }
* torchMode
//
* A mutator and accessor for the torchMode property.
// /**
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// * cameraSupportsTapToFocus
*/
// * Checks whether the device supports tap to focus.
public
var
torchMode
:
AVCaptureTorchMode
{
// * @return A boolean of the result, true if yes, false otherwise.
get
{
// */
return
activeCamera
.
torchMode
// public var cameraSupportsTapToFocus: Bool {
}
// get {
set
(
value
)
{
// return activeCamera.focusPointOfInterestSupported
let
device
:
AVCaptureDevice
=
activeCamera
// }
if
torchMode
!=
device
.
torchMode
&&
device
.
isTorchModeSupported
(
torchMode
)
{
// }
var
error
:
NSError
?
//
do
{
// /**
try
device
.
lockForConfiguration
()
// * focusAtpoint
device
.
torchMode
=
torchMode
// * Sets the point to focus at on the screen.
device
.
unlockForConfiguration
()
// * @param point: CGPoint
}
catch
let
error1
as
NSError
{
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
error
=
error1
// */
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// public func focusAtPoint(point: CGPoint) {
}
// let device: AVCaptureDevice = activeCamera
}
// if device.focusPointOfInterestSupported && device.isFocusModeSupported(.AutoFocus) {
}
// var error: NSError?
}
// do {
// try device.lockForConfiguration()
/**
// device.focusPointOfInterest = point
* cameraSupportsTapToFocus
// device.focusMode = .AutoFocus
* Checks whether the device supports tap to focus.
// device.unlockForConfiguration()
* @return A boolean of the result, true if yes, false otherwise.
// } catch let error1 as NSError {
*/
// error = error1
public
var
cameraSupportsTapToFocus
:
Bool
{
// delegate?.captureDeviceConfigurationFailed?(self, error: error)
get
{
// }
return
activeCamera
.
focusPointOfInterestSupported
// }
}
// }
}
//
// /**
/**
// * cameraSupportsTapToExpose
* focusAtpoint
// * Checks whether the device supports tap to expose.
* Sets the point to focus at on the screen.
// * @return A boolean of the result, true if yes, false otherwise.
* @param point: CGPoint
// */
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// public var cameraSupportsTapToExpose: Bool {
*/
// get {
public
func
focusAtPoint
(
point
:
CGPoint
)
{
// return activeCamera.exposurePointOfInterestSupported
let
device
:
AVCaptureDevice
=
activeCamera
// }
if
device
.
focusPointOfInterestSupported
&&
device
.
isFocusModeSupported
(
.
AutoFocus
)
{
// }
var
error
:
NSError
?
//
do
{
// /**
try
device
.
lockForConfiguration
()
// * exposeAtPoint
device
.
focusPointOfInterest
=
point
// * Sets a point for exposure.
device
.
focusMode
=
.
AutoFocus
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
device
.
unlockForConfiguration
()
// */
}
catch
let
error1
as
NSError
{
// public func exposeAtPoint(point: CGPoint) {
error
=
error1
// let device: AVCaptureDevice = activeCamera
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// let exposureMode: AVCaptureExposureMode = .ContinuousAutoExposure
}
//
}
// if device.exposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode) {
}
// var error: NSError?
// do {
/**
// try device.lockForConfiguration()
* cameraSupportsTapToExpose
// device.exposurePointOfInterest = point
* Checks whether the device supports tap to expose.
// device.exposureMode = exposureMode
* @return A boolean of the result, true if yes, false otherwise.
//
*/
// if device.isExposureModeSupported(.Locked) {
public
var
cameraSupportsTapToExpose
:
Bool
{
// device.addObserver(self, forKeyPath: "adjustingExposure", options: .New, context: &CaptureAdjustingExposureContext)
get
{
// }
return
activeCamera
.
exposurePointOfInterestSupported
// device.unlockForConfiguration()
}
// } catch let error1 as NSError {
}
// error = error1
// delegate?.captureDeviceConfigurationFailed?(self, error: error)
/**
// }
* exposeAtPoint
// }
* Sets a point for exposure.
// }
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
//
*/
// /**
public
func
exposeAtPoint
(
point
:
CGPoint
)
{
// * override to set observeValueForKeyPath and handle exposure observance.
let
device
:
AVCaptureDevice
=
activeCamera
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
let
exposureMode
:
AVCaptureExposureMode
=
.
ContinuousAutoExposure
// */
// override public func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
if
device
.
exposurePointOfInterestSupported
&&
device
.
isExposureModeSupported
(
exposureMode
)
{
// if context == &CaptureAdjustingExposureContext {
var
error
:
NSError
?
// let device: AVCaptureDevice = object as! AVCaptureDevice
do
{
//
try
device
.
lockForConfiguration
()
// if device.adjustingExposure && device.isExposureModeSupported(.Locked) {
device
.
exposurePointOfInterest
=
point
// object!.removeObserver(self, forKeyPath: "adjustingExposure", context: &CaptureAdjustingExposureContext)
device
.
exposureMode
=
exposureMode
// dispatch_async(queue) {
// var error: NSError?
if
device
.
isExposureModeSupported
(
.
Locked
)
{
// do {
device
.
addObserver
(
self
,
forKeyPath
:
"adjustingExposure"
,
options
:
.
New
,
context
:
&
CaptureAdjustingExposureContext
)
// try device.lockForConfiguration()
}
// device.unlockForConfiguration()
device
.
unlockForConfiguration
()
// } catch let e as NSError {
}
catch
let
error1
as
NSError
{
// error = e
error
=
error1
// self.delegate?.captureDeviceConfigurationFailed?(self, error: error)
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// } catch {
}
// fatalError()
}
// }
}
// }
// } else {
/**
// super.observeValueForKeyPath(keyPath, ofObject: object, change: change, context: context)
* override to set observeValueForKeyPath and handle exposure observance.
// }
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// }
*/
// }
override
public
func
observeValueForKeyPath
(
keyPath
:
String
?,
ofObject
object
:
AnyObject
?,
change
:
[
String
:
AnyObject
]?,
context
:
UnsafeMutablePointer
<
Void
>
)
{
//
if
context
==
&
CaptureAdjustingExposureContext
{
// /**
let
device
:
AVCaptureDevice
=
object
as!
AVCaptureDevice
// * resetFocusAndExposureModes
// * Resets to default configuration for device focus and exposure mode.
if
device
.
adjustingExposure
&&
device
.
isExposureModeSupported
(
.
Locked
)
{
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
object
!.
removeObserver
(
self
,
forKeyPath
:
"adjustingExposure"
,
context
:
&
CaptureAdjustingExposureContext
)
// */
dispatch_async
(
queue
)
{
// public func resetFocusAndExposureModes() {
var
error
:
NSError
?
// let device: AVCaptureDevice = activeCamera
do
{
//
try
device
.
lockForConfiguration
()
// let exposureMode: AVCaptureExposureMode = .ContinuousAutoExposure
device
.
unlockForConfiguration
()
// let canResetExposure: Bool = device.focusPointOfInterestSupported && device.isExposureModeSupported(exposureMode)
}
catch
let
e
as
NSError
{
//
error
=
e
// let focusMode: AVCaptureFocusMode = .ContinuousAutoFocus
self
.
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// let canResetFocus: Bool = device.focusPointOfInterestSupported && device.isFocusModeSupported(focusMode)
}
catch
{
//
fatalError
()
// let centerPoint: CGPoint = CGPointMake(0.5, 0.5)
}
//
}
// var error: NSError?
}
else
{
// do {
super
.
observeValueForKeyPath
(
keyPath
,
ofObject
:
object
,
change
:
change
,
context
:
context
)
// try device.lockForConfiguration()
}
// if canResetFocus {
}
// device.focusMode = focusMode
}
// device.focusPointOfInterest = centerPoint
// }
/**
// if canResetExposure {
* resetFocusAndExposureModes
// device.exposureMode = exposureMode
* Resets to default configuration for device focus and exposure mode.
// device.exposurePointOfInterest = centerPoint
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// }
*/
// device.unlockForConfiguration()
public
func
resetFocusAndExposureModes
()
{
// } catch let error1 as NSError {
let
device
:
AVCaptureDevice
=
activeCamera
// error = error1
// delegate?.captureDeviceConfigurationFailed?(self, error: error)
let
exposureMode
:
AVCaptureExposureMode
=
.
ContinuousAutoExposure
// }
let
canResetExposure
:
Bool
=
device
.
focusPointOfInterestSupported
&&
device
.
isExposureModeSupported
(
exposureMode
)
// }
//
let
focusMode
:
AVCaptureFocusMode
=
.
ContinuousAutoFocus
// /**
let
canResetFocus
:
Bool
=
device
.
focusPointOfInterestSupported
&&
device
.
isFocusModeSupported
(
focusMode
)
// * captureStillImage
// * Captures the image and write the photo to the user's asset library.
let
centerPoint
:
CGPoint
=
CGPointMake
(
0.5
,
0.5
)
// * @delegate If the success, the capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) is called.
// * @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
var
error
:
NSError
?
// */
do
{
// public func captureStillImage() {
try
device
.
lockForConfiguration
()
// let connection: AVCaptureConnection = imageOutput.connectionWithMediaType(AVMediaTypeVideo)
if
canResetFocus
{
// if connection.supportsVideoOrientation {
device
.
focusMode
=
focusMode
// connection.videoOrientation = currentVideoOrientation
device
.
focusPointOfInterest
=
centerPoint
// }
}
// imageOutput.captureStillImageAsynchronouslyFromConnection(connection) { (sampleBuffer: CMSampleBufferRef?, error: NSError?) in
if
canResetExposure
{
// if nil == sampleBuffer {
device
.
exposureMode
=
exposureMode
// self.delegate?.captureAsetLibraryWriteFailed?(self, error: error)
device
.
exposurePointOfInterest
=
centerPoint
// } else {
}
// let imageData: NSData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
device
.
unlockForConfiguration
()
// let image: UIImage = UIImage(data: imageData)!
}
catch
let
error1
as
NSError
{
// self.writeImageToAssetsLibrary(image)
error
=
error1
// }
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// }
}
// }
}
//
// /**
/**
// * isRecording
* captureStillImage
// * Checkts whether the device is currently recording.
* Captures the image and write the photo to the user's asset library.
// * @return A boolean of the result, true if yes, false otherwise.
* @delegate If the success, the capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) is called.
// */
* @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
// public var isRecording: Bool {
*/
// get {
public
func
captureStillImage
()
{
// return movieOutput.recording
let
connection
:
AVCaptureConnection
=
imageOutput
.
connectionWithMediaType
(
AVMediaTypeVideo
)
// }
if
connection
.
supportsVideoOrientation
{
// }
connection
.
videoOrientation
=
currentVideoOrientation
//
}
// /**
imageOutput
.
captureStillImageAsynchronouslyFromConnection
(
connection
)
{
(
sampleBuffer
:
CMSampleBufferRef
?,
error
:
NSError
?)
in
// * startRecording
if
nil
==
sampleBuffer
{
// * If the device is not currently recording, this starts the movie recording.
self
.
delegate
?
.
captureAsetLibraryWriteFailed
?(
self
,
error
:
error
)
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
}
else
{
// */
let
imageData
:
NSData
=
AVCaptureStillImageOutput
.
jpegStillImageNSDataRepresentation
(
sampleBuffer
)
// public func startRecording() {
let
image
:
UIImage
=
UIImage
(
data
:
imageData
)
!
// if !isRecording {
self
.
writeImageToAssetsLibrary
(
image
)
// let connection: AVCaptureConnection = movieOutput.connectionWithMediaType(AVMediaTypeVideo)
}
// if connection.supportsVideoOrientation {
}
// connection.videoOrientation = currentVideoOrientation
}
// }
// if connection.supportsVideoStabilization {
/**
// connection.preferredVideoStabilizationMode = .Auto
* isRecording
// }
* Checkts whether the device is currently recording.
//
* @return A boolean of the result, true if yes, false otherwise.
// let device: AVCaptureDevice = activeCamera
*/
//
public
var
isRecording
:
Bool
{
// if device.smoothAutoFocusSupported {
get
{
// var error: NSError?
return
movieOutput
.
recording
// do {
}
// try device.lockForConfiguration()
}
// device.smoothAutoFocusEnabled = false
// device.unlockForConfiguration()
/**
// } catch let error1 as NSError {
* startRecording
// error = error1
* If the device is not currently recording, this starts the movie recording.
// delegate?.captureDeviceConfigurationFailed?(self, error: error)
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// }
*/
// }
public
func
startRecording
()
{
// movieOutputURL = uniqueURL
if
!
isRecording
{
// movieOutput.startRecordingToOutputFileURL(movieOutputURL, recordingDelegate: self)
let
connection
:
AVCaptureConnection
=
movieOutput
.
connectionWithMediaType
(
AVMediaTypeVideo
)
// }
if
connection
.
supportsVideoOrientation
{
// }
connection
.
videoOrientation
=
currentVideoOrientation
//
}
// /**
if
connection
.
supportsVideoStabilization
{
// * stopRecording
connection
.
preferredVideoStabilizationMode
=
.
Auto
// * If the device is currently recoring, this stops the movie recording.
}
// */
// public func stopRecording() {
let
device
:
AVCaptureDevice
=
activeCamera
// if isRecording {
// movieOutput.stopRecording()
if
device
.
smoothAutoFocusSupported
{
// }
var
error
:
NSError
?
// }
do
{
//
try
device
.
lockForConfiguration
()
// /**
device
.
smoothAutoFocusEnabled
=
false
// * recordedDuration
device
.
unlockForConfiguration
()
// * Retrieves the movie recorded duration.
}
catch
let
error1
as
NSError
{
// * @return A CMTime value.
error
=
error1
// */
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// public var recordedDuration: CMTime {
}
// get {
}
// return movieOutput.recordedDuration
movieOutputURL
=
uniqueURL
// }
movieOutput
.
startRecordingToOutputFileURL
(
movieOutputURL
,
recordingDelegate
:
self
)
// }
}
//
}
// /**
// * currentVideoOrientation
/**
// * Retrieves the current orientation of the device.
* stopRecording
// * @return A AVCaptureVideoOrientation value, [Portrait, LandscapeLeft, PortraitUpsideDown, LandscapeRight].
* If the device is currently recoring, this stops the movie recording.
// */
*/
// public var currentVideoOrientation: AVCaptureVideoOrientation {
public
func
stopRecording
()
{
// var orientation: AVCaptureVideoOrientation?
if
isRecording
{
// switch UIDevice.currentDevice().orientation {
movieOutput
.
stopRecording
()
// case .Portrait:
}
// orientation = .Portrait
}
// break
// case .LandscapeRight:
/**
// orientation = .LandscapeLeft
* recordedDuration
// break
* Retrieves the movie recorded duration.
// case .PortraitUpsideDown:
* @return A CMTime value.
// orientation = .PortraitUpsideDown
*/
// break
public
var
recordedDuration
:
CMTime
{
// default:
get
{
// orientation = .LandscapeRight
return
movieOutput
.
recordedDuration
// }
}
// return orientation!
}
// }
//
/**
// /**
* currentVideoOrientation
// * uniqueURL
* Retrieves the current orientation of the device.
// * A unique URL generated for the movie video.
* @return A AVCaptureVideoOrientation value, [Portrait, LandscapeLeft, PortraitUpsideDown, LandscapeRight].
// * @return An optional NSURL value.
*/
// */
public
var
currentVideoOrientation
:
AVCaptureVideoOrientation
{
// private var uniqueURL: NSURL? {
var
orientation
:
AVCaptureVideoOrientation
?
// let fileManager: NSFileManager = NSFileManager.defaultManager()
switch
UIDevice
.
currentDevice
()
.
orientation
{
// let tempDirectoryTemplate: String = (NSTemporaryDirectory() as NSString).stringByAppendingPathComponent("FocusLibrary")
case
.
Portrait
:
// do {
orientation
=
.
Portrait
// try fileManager.createDirectoryAtPath(tempDirectoryTemplate, withIntermediateDirectories: true, attributes: nil)
break
// return NSURL.fileURLWithPath(tempDirectoryTemplate + "/test.mov")
case
.
LandscapeRight
:
// } catch {}
orientation
=
.
LandscapeLeft
// return nil
break
// }
case
.
PortraitUpsideDown
:
//
orientation
=
.
PortraitUpsideDown
// /**
break
// * postAssetLibraryNotification
default
:
// * Fires an asynchronous call to the capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
orientation
=
.
LandscapeRight
// * @param image: UIImage!
}
// * @delegate An asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
return
orientation
!
// */
}
// private func postAssetLibraryNotification(image: UIImage!) {
// dispatch_async(queue) {
/**
// self.delegate?.capture?(self, assetLibraryDidWrite: image)
* uniqueURL
// }
* A unique URL generated for the movie video.
// }
* @return An optional NSURL value.
//
*/
// /**
private
var
uniqueURL
:
NSURL
?
{
// * writeImageToAssetsLibrary
let
fileManager
:
NSFileManager
=
NSFileManager
.
defaultManager
()
// * Writes the image file to the user's asset library.
let
tempDirectoryTemplate
:
String
=
(
NSTemporaryDirectory
()
as
NSString
)
.
stringByAppendingPathComponent
(
"FocusLibrary"
)
// * @param image: UIImage!
do
{
// * @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
try
fileManager
.
createDirectoryAtPath
(
tempDirectoryTemplate
,
withIntermediateDirectories
:
true
,
attributes
:
nil
)
// * @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
return
NSURL
.
fileURLWithPath
(
tempDirectoryTemplate
+
"/test.mov"
)
// */
}
catch
{}
// private func writeImageToAssetsLibrary(image: UIImage) {
return
nil
// let library: ALAssetsLibrary = ALAssetsLibrary()
}
// library.writeImageToSavedPhotosAlbum(image.CGImage, orientation: ALAssetOrientation(rawValue: image.imageOrientation.rawValue)!) { (path: NSURL!, error: NSError?) -> Void in
// if nil == error {
/**
// self.postAssetLibraryNotification(image)
* postAssetLibraryNotification
// } else {
* Fires an asynchronous call to the capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
// self.delegate?.captureAsetLibraryWriteFailed?(self, error: error)
* @param image: UIImage!
// }
* @delegate An asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
// }
*/
// }
private
func
postAssetLibraryNotification
(
image
:
UIImage
!
)
{
//
dispatch_async
(
queue
)
{
// /**
self
.
delegate
?
.
capture
?(
self
,
assetLibraryDidWrite
:
image
)
// * writeVideoToAssetsLibrary
}
// * Writes the video file to the user's asset library.
}
// * @param videoURL: NSURL!
// * @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
/**
// * @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
* writeImageToAssetsLibrary
// */
* Writes the image file to the user's asset library.
// private func writeVideoToAssetsLibrary(videoURL: NSURL!) {
* @param image: UIImage!
// let library: ALAssetsLibrary = ALAssetsLibrary()
* @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
// if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(videoURL) {
* @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
// library.writeVideoAtPathToSavedPhotosAlbum(videoURL) { (path: NSURL!, error: NSError?) in
*/
// if nil == error {
private
func
writeImageToAssetsLibrary
(
image
:
UIImage
)
{
// self.generateThumbnailForVideoAtURL(videoURL)
let
library
:
ALAssetsLibrary
=
ALAssetsLibrary
()
// } else {
library
.
writeImageToSavedPhotosAlbum
(
image
.
CGImage
,
orientation
:
ALAssetOrientation
(
rawValue
:
image
.
imageOrientation
.
rawValue
)
!
)
{
(
path
:
NSURL
!
,
error
:
NSError
?)
->
Void
in
// self.delegate?.captureAsetLibraryWriteFailed?(self, error: error)
if
nil
==
error
{
// }
self
.
postAssetLibraryNotification
(
image
)
// }
}
else
{
// }
self
.
delegate
?
.
captureAsetLibraryWriteFailed
?(
self
,
error
:
error
)
// }
}
//
}
// /**
}
// * generateThumbnailForVideoAtURL
// * Generates a thumbnail for the video URL specified.
/**
// * @param videoURL: NSURL!
* writeVideoToAssetsLibrary
// * @delegate An asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
* Writes the video file to the user's asset library.
// */
* @param videoURL: NSURL!
// private func generateThumbnailForVideoAtURL(videoURL: NSURL!) {
* @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
// dispatch_async(queue) {
* @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
// do {
*/
// let asset: AVAsset = AVAsset(URL: videoURL)
private
func
writeVideoToAssetsLibrary
(
videoURL
:
NSURL
!
)
{
// let imageGenerator: AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
let
library
:
ALAssetsLibrary
=
ALAssetsLibrary
()
// imageGenerator.maximumSize = CGSizeMake(100, 0)
if
library
.
videoAtPathIsCompatibleWithSavedPhotosAlbum
(
videoURL
)
{
// imageGenerator.appliesPreferredTrackTransform = true
library
.
writeVideoAtPathToSavedPhotosAlbum
(
videoURL
)
{
(
path
:
NSURL
!
,
error
:
NSError
?)
in
//
if
nil
==
error
{
// let imageRef: CGImageRef = try imageGenerator.copyCGImageAtTime(kCMTimeZero, actualTime: nil)
self
.
generateThumbnailForVideoAtURL
(
videoURL
)
// let image: UIImage = UIImage(CGImage: imageRef)
}
else
{
//
self
.
delegate
?
.
captureAsetLibraryWriteFailed
?(
self
,
error
:
error
)
// dispatch_async(dispatch_get_main_queue()) {
}
// self.postAssetLibraryNotification(image)
}
// }
}
// } catch {}
}
// }
// }
/**
//
* generateThumbnailForVideoAtURL
// /**
* Generates a thumbnail for the video URL specified.
// * delegate method for capturing video file.
* @param videoURL: NSURL!
// * @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
* @delegate An asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
// * @delegate If failure, capture(capture: Capture!, mediaCaptureFailed error: NSError!) is called.
*/
// */
private
func
generateThumbnailForVideoAtURL
(
videoURL
:
NSURL
!
)
{
// public func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
dispatch_async
(
queue
)
{
// if nil == error {
do
{
// writeVideoToAssetsLibrary(movieOutputURL!.copy() as! NSURL)
let
asset
:
AVAsset
=
AVAsset
(
URL
:
videoURL
)
// } else {
let
imageGenerator
:
AVAssetImageGenerator
=
AVAssetImageGenerator
(
asset
:
asset
)
// delegate?.captureMediaCaptureFailed?(self, error: error)
imageGenerator
.
maximumSize
=
CGSizeMake
(
100
,
0
)
// }
imageGenerator
.
appliesPreferredTrackTransform
=
true
// movieOutputURL = nil
// }
let
imageRef
:
CGImageRef
=
try
imageGenerator
.
copyCGImageAtTime
(
kCMTimeZero
,
actualTime
:
nil
)
//}
let
image
:
UIImage
=
UIImage
(
CGImage
:
imageRef
)
\ No newline at end of file
dispatch_async
(
dispatch_get_main_queue
())
{
self
.
postAssetLibraryNotification
(
image
)
}
}
catch
{}
}
}
/**
* delegate method for capturing video file.
* @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
* @delegate If failure, capture(capture: Capture!, mediaCaptureFailed error: NSError!) is called.
*/
public
func
captureOutput
(
captureOutput
:
AVCaptureFileOutput
!
,
didFinishRecordingToOutputFileAtURL
outputFileURL
:
NSURL
!
,
fromConnections
connections
:
[
AnyObject
]
!
,
error
:
NSError
!
)
{
if
nil
==
error
{
writeVideoToAssetsLibrary
(
movieOutputURL
!.
copy
()
as!
NSURL
)
}
else
{
delegate
?
.
captureMediaCaptureFailed
?(
self
,
error
:
error
)
}
movieOutputURL
=
nil
}
}
\ No newline at end of file
Source/CapturePreview.swift
View file @
0f4f4ba0
////
//// Copyright (C) 2015 GraphKit, Inc. <http://graphkit.io> and other GraphKit contributors.
////
//// This program is free software: you can redistribute it and/or modify
//// it under the terms of the GNU Affero General Public License as published
//// by the Free Software Foundation, either version 3 of the License, or
//// (at your option) any later version.
////
//// This program is distributed in the hope that it will be useful,
//// but WITHOUT ANY WARRANTY; without even the implied warranty of
//// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//// GNU Affero General Public License for more details.
////
//// You should have received a copy of the GNU Affero General Public License
//// along with this program located at the root of the software package
//// in a file called LICENSE. If not, see <http://www.gnu.org/licenses/>.
////
//
//
//import UIKit
// Copyright (C) 2015 GraphKit, Inc. <http://graphkit.io> and other GraphKit contributors.
//import AVFoundation
//
//
//@objc(PreviewDelegate)
// This program is free software: you can redistribute it and/or modify
//public protocol PreviewDelegate {
// it under the terms of the GNU Affero General Public License as published
// optional func previewTappedToFocusAt(preview: Preview, point: CGPoint)
// by the Free Software Foundation, either version 3 of the License, or
// optional func previewTappedToExposeAt(preview: Preview, point: CGPoint)
// (at your option) any later version.
// optional func previewTappedToReset(preview: Preview, focus: UIView, exposure: UIView)
//}
//
//
//public class Preview: UIView {
// This program is distributed in the hope that it will be useful,
// /**
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// :name: boxBounds
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// :description: A static property that sets the initial size of the focusBox and exposureBox properties.
// GNU Affero General Public License for more details.
// */
//
// static public var boxBounds: CGRect = CGRectMake(0, 0, 150, 150)
// You should have received a copy of the GNU Affero General Public License
//
// along with this program located at the root of the software package
// /**
// in a file called LICENSE. If not, see <http://www.gnu.org/licenses/>.
// :name: delegate
//
// :description: An optional instance of PreviewDelegate to handle events that are triggered during various
// stages of engagement.
import
UIKit
// */
import
AVFoundation
// public weak var delegate: PreviewDelegate?
//
@objc(PreviewDelegate)
// /**
public
protocol
PreviewDelegate
{
// :name: tapToFocusEnabled
optional
func
previewTappedToFocusAt
(
preview
:
Preview
,
point
:
CGPoint
)
// :description: A mutator and accessor that enables and disables tap to focus gesture.
optional
func
previewTappedToExposeAt
(
preview
:
Preview
,
point
:
CGPoint
)
// */
optional
func
previewTappedToReset
(
preview
:
Preview
,
focus
:
UIView
,
exposure
:
UIView
)
// public var tapToFocusEnabled: Bool {
}
// get {
// return singleTapRecognizer!.enabled
public
class
Preview
:
UIView
{
// }
/**
// set(value) {
:name: boxBounds
// singleTapRecognizer!.enabled = value
:description: A static property that sets the initial size of the focusBox and exposureBox properties.
// }
*/
// }
static
public
var
boxBounds
:
CGRect
=
CGRectMake
(
0
,
0
,
150
,
150
)
//
// /**
/**
// :name: tapToExposeEnabled
:name: delegate
// :description: A mutator and accessor that enables and disables tap to expose gesture.
:description: An optional instance of PreviewDelegate to handle events that are triggered during various
// */
stages of engagement.
// public var tapToExposeEnabled: Bool {
*/
// get {
public
weak
var
delegate
:
PreviewDelegate
?
// return doubleTapRecognizer!.enabled
// }
/**
// set(value) {
:name: tapToFocusEnabled
// doubleTapRecognizer!.enabled = value
:description: A mutator and accessor that enables and disables tap to focus gesture.
// }
*/
// }
public
var
tapToFocusEnabled
:
Bool
{
//
get
{
// //
return
singleTapRecognizer
!.
enabled
// // override for layerClass
}
// //
set
(
value
)
{
// override public class func layerClass() -> AnyClass {
singleTapRecognizer
!.
enabled
=
value
// return AVCaptureVideoPreviewLayer.self
}
// }
}
//
// /**
/**
// :name: session
:name: tapToExposeEnabled
// :description: A mutator and accessor for the preview AVCaptureSession value.
:description: A mutator and accessor that enables and disables tap to expose gesture.
// */
*/
// public var session: AVCaptureSession {
public
var
tapToExposeEnabled
:
Bool
{
// get {
get
{
// return (layer as! AVCaptureVideoPreviewLayer).session
return
doubleTapRecognizer
!.
enabled
// }
}
// set(value) {
set
(
value
)
{
// (layer as! AVCaptureVideoPreviewLayer).session = value
doubleTapRecognizer
!.
enabled
=
value
// }
}
// }
}
//
// /**
//
// :name: focusBox
// override for layerClass
// :description: An optional UIView for the focusBox animation. This is used when the
//
// tapToFocusEnabled property is set to true.
override
public
class
func
layerClass
()
->
AnyClass
{
// */
return
AVCaptureVideoPreviewLayer
.
self
// public var focusBox: UIView?
}
//
// /**
/**
// :name: exposureBox
:name: session
// :description: An optional UIView for the exposureBox animation. This is used when the
:description: A mutator and accessor for the preview AVCaptureSession value.
// tapToExposeEnabled property is set to true.
*/
// */
public
var
session
:
AVCaptureSession
{
// public var exposureBox: UIView?
get
{
//
return
(
layer
as!
AVCaptureVideoPreviewLayer
)
.
session
// //
}
// // :name: singleTapRecognizer
set
(
value
)
{
// // :description: Gesture recognizer for single tap.
(
layer
as!
AVCaptureVideoPreviewLayer
)
.
session
=
value
// //
}
// private var singleTapRecognizer: UITapGestureRecognizer?
}
//
// //
/**
// // :name: doubleTapRecognizer
:name: focusBox
// // :description: Gesture recognizer for double tap.
:description: An optional UIView for the focusBox animation. This is used when the
// //
tapToFocusEnabled property is set to true.
// private var doubleTapRecognizer: UITapGestureRecognizer?
*/
//
public
var
focusBox
:
UIView
?
// //
// // :name: doubleDoubleTapRecognizer
/**
// // :description: Gesture recognizer for double/double tap.
:name: exposureBox
// //
:description: An optional UIView for the exposureBox animation. This is used when the
// private var doubleDoubleTapRecognizer: UITapGestureRecognizer?
tapToExposeEnabled property is set to true.
//
*/
// required public init?(coder aDecoder: NSCoder) {
public
var
exposureBox
:
UIView
?
// super.init(coder: aDecoder)
// prepareView()
//
// }
// :name: singleTapRecognizer
//
// :description: Gesture recognizer for single tap.
// public override init(frame: CGRect) {
//
// super.init(frame: frame)
private
var
singleTapRecognizer
:
UITapGestureRecognizer
?
// prepareView()
// }
//
//
// :name: doubleTapRecognizer
// public init() {
// :description: Gesture recognizer for double tap.
// super.init(frame: CGRectZero)
//
// translatesAutoresizingMaskIntoConstraints = false
private
var
doubleTapRecognizer
:
UITapGestureRecognizer
?
// prepareView()
// }
//
//
// :name: doubleDoubleTapRecognizer
// //
// :description: Gesture recognizer for double/double tap.
// // :name: handleSingleTap
//
// //
private
var
doubleDoubleTapRecognizer
:
UITapGestureRecognizer
?
// internal func handleSingleTap(recognizer: UIGestureRecognizer) {
// let point: CGPoint = recognizer.locationInView(self)
required
public
init
?(
coder
aDecoder
:
NSCoder
)
{
// runBoxAnimationOnView(focusBox, point: point)
super
.
init
(
coder
:
aDecoder
)
// delegate?.previewTappedToFocusAt?(self, point: captureDevicePointForPoint(point))
prepareView
()
// }
}
//
// //
public
override
init
(
frame
:
CGRect
)
{
// // :name: handleDoubleTap
super
.
init
(
frame
:
frame
)
// //
prepareView
()
// internal func handleDoubleTap(recognizer: UIGestureRecognizer) {
}
// let point: CGPoint = recognizer.locationInView(self)
// runBoxAnimationOnView(exposureBox, point: point)
public
init
()
{
// delegate?.previewTappedToExposeAt?(self, point: captureDevicePointForPoint(point))
super
.
init
(
frame
:
CGRectZero
)
// }
translatesAutoresizingMaskIntoConstraints
=
false
//
prepareView
()
// //
}
// // :name: handleDoubleDoubleTap
// //
//
// internal func handleDoubleDoubleTap(recognizer: UIGestureRecognizer) {
// :name: handleSingleTap
// runResetAnimation()
//
// }
internal
func
handleSingleTap
(
recognizer
:
UIGestureRecognizer
)
{
//
let
point
:
CGPoint
=
recognizer
.
locationInView
(
self
)
// //
runBoxAnimationOnView
(
focusBox
,
point
:
point
)
// // :name: prepareView
delegate
?
.
previewTappedToFocusAt
?(
self
,
point
:
captureDevicePointForPoint
(
point
))
// // :description: Common setup for view.
}
// //
// private func prepareView() {
//
// let captureLayer: AVCaptureVideoPreviewLayer = layer as! AVCaptureVideoPreviewLayer
// :name: handleDoubleTap
// captureLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
//
//
internal
func
handleDoubleTap
(
recognizer
:
UIGestureRecognizer
)
{
// singleTapRecognizer = UITapGestureRecognizer(target: self, action: "handleSingleTap:")
let
point
:
CGPoint
=
recognizer
.
locationInView
(
self
)
// singleTapRecognizer!.numberOfTapsRequired = 1
runBoxAnimationOnView
(
exposureBox
,
point
:
point
)
//
delegate
?
.
previewTappedToExposeAt
?(
self
,
point
:
captureDevicePointForPoint
(
point
))
// doubleTapRecognizer = UITapGestureRecognizer(target: self, action: "handleDoubleTap:")
}
// doubleTapRecognizer!.numberOfTapsRequired = 2
//
//
// doubleDoubleTapRecognizer = UITapGestureRecognizer(target: self, action: "handleDoubleDoubleTap:")
// :name: handleDoubleDoubleTap
// doubleDoubleTapRecognizer!.numberOfTapsRequired = 2
//
// doubleDoubleTapRecognizer!.numberOfTouchesRequired = 2
internal
func
handleDoubleDoubleTap
(
recognizer
:
UIGestureRecognizer
)
{
//
runResetAnimation
()
// addGestureRecognizer(singleTapRecognizer!)
}
// addGestureRecognizer(doubleTapRecognizer!)
// addGestureRecognizer(doubleDoubleTapRecognizer!)
//
// singleTapRecognizer!.requireGestureRecognizerToFail(doubleTapRecognizer!)
// :name: prepareView
//
// :description: Common setup for view.
// focusBox = viewWithColor(.redColor())
//
// exposureBox = viewWithColor(.blueColor())
private
func
prepareView
()
{
// addSubview(focusBox!)
let
captureLayer
:
AVCaptureVideoPreviewLayer
=
layer
as!
AVCaptureVideoPreviewLayer
// addSubview(exposureBox!)
captureLayer
.
videoGravity
=
AVLayerVideoGravityResizeAspectFill
// }
//
singleTapRecognizer
=
UITapGestureRecognizer
(
target
:
self
,
action
:
"handleSingleTap:"
)
// //
singleTapRecognizer
!.
numberOfTapsRequired
=
1
// // :name: viewWithColor
// // :description: Initializes a UIView with a set UIColor.
doubleTapRecognizer
=
UITapGestureRecognizer
(
target
:
self
,
action
:
"handleDoubleTap:"
)
// //
doubleTapRecognizer
!.
numberOfTapsRequired
=
2
// private func viewWithColor(color: UIColor) -> UIView {
// let view: UIView = UIView(frame: Preview.boxBounds)
doubleDoubleTapRecognizer
=
UITapGestureRecognizer
(
target
:
self
,
action
:
"handleDoubleDoubleTap:"
)
// view.backgroundColor = MaterialTheme.clear.color
doubleDoubleTapRecognizer
!.
numberOfTapsRequired
=
2
// view.layer.borderColor = color.CGColor
doubleDoubleTapRecognizer
!.
numberOfTouchesRequired
=
2
// view.layer.borderWidth = 5
// view.hidden = true
addGestureRecognizer
(
singleTapRecognizer
!
)
// return view
addGestureRecognizer
(
doubleTapRecognizer
!
)
// }
addGestureRecognizer
(
doubleDoubleTapRecognizer
!
)
//
singleTapRecognizer
!.
requireGestureRecognizerToFail
(
doubleTapRecognizer
!
)
// //
// // :name: runBoxAnimationOnView
focusBox
=
viewWithColor
(
.
redColor
())
// // :description: Runs the animation used for focusBox and exposureBox on single and double
exposureBox
=
viewWithColor
(
.
blueColor
())
// // taps respectively at a given point.
addSubview
(
focusBox
!
)
// //
addSubview
(
exposureBox
!
)
// private func runBoxAnimationOnView(view: UIView!, point: CGPoint) {
}
// view.center = point
// view.hidden = false
//
// UIView.animateWithDuration(0.15, delay: 0, options: .CurveEaseInOut, animations: { _ in
// :name: viewWithColor
// view.layer.transform = CATransform3DMakeScale(0.5, 0.5, 1)
// :description: Initializes a UIView with a set UIColor.
// }) { _ in
//
// let delayInSeconds: Double = 0.5
private
func
viewWithColor
(
color
:
UIColor
)
->
UIView
{
// let popTime: dispatch_time_t = dispatch_time(DISPATCH_TIME_NOW, Int64(delayInSeconds * Double(NSEC_PER_SEC)))
let
view
:
UIView
=
UIView
(
frame
:
Preview
.
boxBounds
)
// dispatch_after(popTime, dispatch_get_main_queue()) {
view
.
backgroundColor
=
MaterialTheme
.
clear
.
color
// view.hidden = true
view
.
layer
.
borderColor
=
color
.
CGColor
// view.transform = CGAffineTransformIdentity
view
.
layer
.
borderWidth
=
5
// }
view
.
hidden
=
true
// }
return
view
// }
}
//
// //
//
// // :name: captureDevicePointForPoint
// :name: runBoxAnimationOnView
// // :description: Interprets the correct point from touch to preview layer.
// :description: Runs the animation used for focusBox and exposureBox on single and double
// //
// taps respectively at a given point.
// private func captureDevicePointForPoint(point: CGPoint) -> CGPoint {
//
// let previewLayer: AVCaptureVideoPreviewLayer = layer as! AVCaptureVideoPreviewLayer
private
func
runBoxAnimationOnView
(
view
:
UIView
!
,
point
:
CGPoint
)
{
// return previewLayer.captureDevicePointOfInterestForPoint(point)
view
.
center
=
point
// }
view
.
hidden
=
false
//
UIView
.
animateWithDuration
(
0.15
,
delay
:
0
,
options
:
.
CurveEaseInOut
,
animations
:
{
_
in
// //
view
.
layer
.
transform
=
CATransform3DMakeScale
(
0.5
,
0.5
,
1
)
// // :name: runResetAnimation
})
{
_
in
// // :description: Executes the reset animation for focus and exposure.
let
delayInSeconds
:
Double
=
0.5
// //
let
popTime
:
dispatch_time_t
=
dispatch_time
(
DISPATCH_TIME_NOW
,
Int64
(
delayInSeconds
*
Double
(
NSEC_PER_SEC
)))
// private func runResetAnimation() {
dispatch_after
(
popTime
,
dispatch_get_main_queue
())
{
// if !tapToFocusEnabled && !tapToExposeEnabled {
view
.
hidden
=
true
// return
view
.
transform
=
CGAffineTransformIdentity
// }
}
//
}
// let previewLayer: AVCaptureVideoPreviewLayer = layer as! AVCaptureVideoPreviewLayer
}
// let centerPoint: CGPoint = previewLayer.pointForCaptureDevicePointOfInterest(CGPointMake(0.5, 0.5))
// focusBox!.center = centerPoint
//
// exposureBox!.center = centerPoint
// :name: captureDevicePointForPoint
// exposureBox!.transform = CGAffineTransformMakeScale(1.2, 1.2)
// :description: Interprets the correct point from touch to preview layer.
// focusBox!.hidden = false
//
// exposureBox!.hidden = false
private
func
captureDevicePointForPoint
(
point
:
CGPoint
)
->
CGPoint
{
//
let
previewLayer
:
AVCaptureVideoPreviewLayer
=
layer
as!
AVCaptureVideoPreviewLayer
// UIView.animateWithDuration(0.15, delay: 0, options: .CurveEaseInOut, animations: { _ in
return
previewLayer
.
captureDevicePointOfInterestForPoint
(
point
)
// self.focusBox!.layer.transform = CATransform3DMakeScale(0.5, 0.5, 1)
}
// self.exposureBox!.layer.transform = CATransform3DMakeScale(0.7, 0.7, 1)
// }) { _ in
//
// let delayInSeconds: Double = 0.5
// :name: runResetAnimation
// let popTime: dispatch_time_t = dispatch_time(DISPATCH_TIME_NOW, Int64(delayInSeconds * Double(NSEC_PER_SEC)))
// :description: Executes the reset animation for focus and exposure.
// dispatch_after(popTime, dispatch_get_main_queue()) {
//
// self.focusBox!.hidden = true
private
func
runResetAnimation
()
{
// self.exposureBox!.hidden = true
if
!
tapToFocusEnabled
&&
!
tapToExposeEnabled
{
// self.focusBox!.transform = CGAffineTransformIdentity
return
// self.exposureBox!.transform = CGAffineTransformIdentity
}
// self.delegate?.previewTappedToReset?(self, focus: self.focusBox!, exposure: self.exposureBox!)
// }
let
previewLayer
:
AVCaptureVideoPreviewLayer
=
layer
as!
AVCaptureVideoPreviewLayer
// }
let
centerPoint
:
CGPoint
=
previewLayer
.
pointForCaptureDevicePointOfInterest
(
CGPointMake
(
0.5
,
0.5
))
// }
focusBox
!.
center
=
centerPoint
//}
exposureBox
!.
center
=
centerPoint
exposureBox
!.
transform
=
CGAffineTransformMakeScale
(
1.2
,
1.2
)
focusBox
!.
hidden
=
false
exposureBox
!.
hidden
=
false
UIView
.
animateWithDuration
(
0.15
,
delay
:
0
,
options
:
.
CurveEaseInOut
,
animations
:
{
_
in
self
.
focusBox
!.
layer
.
transform
=
CATransform3DMakeScale
(
0.5
,
0.5
,
1
)
self
.
exposureBox
!.
layer
.
transform
=
CATransform3DMakeScale
(
0.7
,
0.7
,
1
)
})
{
_
in
let
delayInSeconds
:
Double
=
0.5
let
popTime
:
dispatch_time_t
=
dispatch_time
(
DISPATCH_TIME_NOW
,
Int64
(
delayInSeconds
*
Double
(
NSEC_PER_SEC
)))
dispatch_after
(
popTime
,
dispatch_get_main_queue
())
{
self
.
focusBox
!.
hidden
=
true
self
.
exposureBox
!.
hidden
=
true
self
.
focusBox
!.
transform
=
CGAffineTransformIdentity
self
.
exposureBox
!.
transform
=
CGAffineTransformIdentity
self
.
delegate
?
.
previewTappedToReset
?(
self
,
focus
:
self
.
focusBox
!
,
exposure
:
self
.
exposureBox
!
)
}
}
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment