Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
M
Material
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Dmitriy Stepanets
Material
Commits
8b825830
Commit
8b825830
authored
Sep 18, 2015
by
Daniel Dahan
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
autolayout adjustments made for convenience
parent
d01fc27f
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
919 additions
and
929 deletions
+919
-929
Source/Capture.swift
+654
-654
Source/CapturePreview.swift
+262
-262
Source/MaterialButton.swift
+0
-13
Source/NavigationViewController.swift
+3
-0
No files found.
Source/Capture.swift
View file @
8b825830
////
//// Copyright (C) 2015 GraphKit, Inc. <http://graphkit.io> and other GraphKit contributors.
////
//// This program is free software: you can redistribute it and/or modify
//// it under the terms of the GNU Affero General Public License as published
//// by the Free Software Foundation, either version 3 of the License, or
//// (at your option) any later version.
////
//// This program is distributed in the hope that it will be useful,
//// but WITHOUT ANY WARRANTY; without even the implied warranty of
//// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//// GNU Affero General Public License for more details.
////
//// You should have received a copy of the GNU Affero General Public License
//// along with this program located at the root of the software package
//// in a file called LICENSE. If not, see <http://www.gnu.org/licenses/>.
////
//
//
// Copyright (C) 2015 GraphKit, Inc. <http://graphkit.io> and other GraphKit contributors.
//import UIKit
//import AVFoundation
//import AssetsLibrary
//
//
// This program is free software: you can redistribute it and/or modify
//@objc(CaptureDelegate)
// it under the terms of the GNU Affero General Public License as published
//public protocol CaptureDelegate {
// by the Free Software Foundation, either version 3 of the License, or
// optional func captureDeviceConfigurationFailed(capture: Capture, error: NSError!)
// (at your option) any later version.
// optional func captureMediaCaptureFailed(capture: Capture, error: NSError!)
// optional func captureAsetLibraryWriteFailed(capture: Capture, error: NSError!)
// optional func capture(capture: Capture, assetLibraryDidWrite image: UIImage!)
//}
//
//
// This program is distributed in the hope that it will be useful,
//public class Capture: NSObject, AVCaptureFileOutputRecordingDelegate {
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// //
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// // :name: activeVideoInput
// GNU Affero General Public License for more details.
// // :description: The video input that is currently active.
//
// //
// You should have received a copy of the GNU Affero General Public License
// private var activeVideoInput: AVCaptureDeviceInput?
// along with this program located at the root of the software package
//
// in a file called LICENSE. If not, see <http://www.gnu.org/licenses/>.
// //
//
// // :name: imageOutput
// // :description: When the session is taking a photo, this is the output manager.
import
UIKit
// //
import
AVFoundation
// private lazy var imageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput()
import
AssetsLibrary
//
// //
@objc(CaptureDelegate)
// // :name: movieOutput
public
protocol
CaptureDelegate
{
// // :description: When the session is shooting a video, this is the output manager.
optional
func
captureDeviceConfigurationFailed
(
capture
:
Capture
,
error
:
NSError
!
)
// //
optional
func
captureMediaCaptureFailed
(
capture
:
Capture
,
error
:
NSError
!
)
// private lazy var movieOutput: AVCaptureMovieFileOutput = AVCaptureMovieFileOutput()
optional
func
captureAsetLibraryWriteFailed
(
capture
:
Capture
,
error
:
NSError
!
)
//
optional
func
capture
(
capture
:
Capture
,
assetLibraryDidWrite
image
:
UIImage
!
)
// //
}
// // :name: movieOutputURL
// // :description: The output URL of the movie file.
public
class
Capture
:
NSObject
,
AVCaptureFileOutputRecordingDelegate
{
// //
//
// private var movieOutputURL: NSURL?
// :name: activeVideoInput
//
// :description: The video input that is currently active.
// //
//
// // :name: queue
private
var
activeVideoInput
:
AVCaptureDeviceInput
?
// // :description: Async job queue.
// //
//
// private lazy var queue: dispatch_queue_t = {
// :name: imageOutput
// return dispatch_queue_create("io.graphkit.Capture", nil)
// :description: When the session is taking a photo, this is the output manager.
// }()
//
//
private
lazy
var
imageOutput
:
AVCaptureStillImageOutput
=
AVCaptureStillImageOutput
()
// //
// // :name: CaptureAdjustingExposureContext
//
// // :description: Used for KVO observation context.
// :name: movieOutput
// //
// :description: When the session is shooting a video, this is the output manager.
// public var CaptureAdjustingExposureContext: NSString?
//
//
private
lazy
var
movieOutput
:
AVCaptureMovieFileOutput
=
AVCaptureMovieFileOutput
()
// /**
// * cameraCount
//
// * The number of available cameras on the device.
// :name: movieOutputURL
// */
// :description: The output URL of the movie file.
// public var cameraCount: Int {
//
// return AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo).count
private
var
movieOutputURL
:
NSURL
?
// }
//
//
// /**
// :name: queue
// * session
// :description: Async job queue.
// * An AVCaptureSession that manages all inputs and outputs in that session.
//
// */
private
lazy
var
queue
:
dispatch_queue_t
=
{
// public lazy var session: AVCaptureSession = AVCaptureSession()
return
dispatch_queue_create
(
"io.graphkit.Capture"
,
nil
)
//
}()
// /**
// * delegate
//
// * An optional instance of CaptureDelegate to handle events that are triggered during various
// :name: CaptureAdjustingExposureContext
// * stages in the session.
// :description: Used for KVO observation context.
// */
//
// public weak var delegate: CaptureDelegate?
public
var
CaptureAdjustingExposureContext
:
NSString
?
//
// /**
/**
// * prepareSession
* cameraCount
// * A helper method that prepares the session with the various available inputs and outputs.
* The number of available cameras on the device.
// * @param preset: String, default: AVCaptureSessionPresetHigh
*/
// * @return A boolean value, true if successful, false otherwise.
public
var
cameraCount
:
Int
{
// */
return
AVCaptureDevice
.
devicesWithMediaType
(
AVMediaTypeVideo
)
.
count
// public func prepareSession(preset: String = AVCaptureSessionPresetHigh) -> Bool {
}
// session.sessionPreset = preset
//
/**
// // setup default camera device
* session
// let videoDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
* An AVCaptureSession that manages all inputs and outputs in that session.
// let videoInput: AVCaptureDeviceInput? = try? AVCaptureDeviceInput(device: videoDevice)
*/
//
public
lazy
var
session
:
AVCaptureSession
=
AVCaptureSession
()
// if nil == videoInput {
// return false
/**
// }
* delegate
//
* An optional instance of CaptureDelegate to handle events that are triggered during various
// if session.canAddInput(videoInput) {
* stages in the session.
// session.addInput(videoInput)
*/
// activeVideoInput = videoInput
public
weak
var
delegate
:
CaptureDelegate
?
// }
//
/**
// let audioDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
* prepareSession
// let audioInput: AVCaptureDeviceInput? = try? AVCaptureDeviceInput(device: audioDevice)
* A helper method that prepares the session with the various available inputs and outputs.
//
* @param preset: String, default: AVCaptureSessionPresetHigh
// if nil == audioInput {
* @return A boolean value, true if successful, false otherwise.
// return false
*/
// }
public
func
prepareSession
(
preset
:
String
=
AVCaptureSessionPresetHigh
)
->
Bool
{
//
session
.
sessionPreset
=
preset
// if session.canAddInput(audioInput) {
// session.addInput(audioInput)
// setup default camera device
// }
let
videoDevice
:
AVCaptureDevice
=
AVCaptureDevice
.
defaultDeviceWithMediaType
(
AVMediaTypeVideo
)
//
let
videoInput
:
AVCaptureDeviceInput
?
=
try
?
AVCaptureDeviceInput
(
device
:
videoDevice
)
// imageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
//
if
nil
==
videoInput
{
// if session.canAddOutput(imageOutput) {
return
false
// session.addOutput(imageOutput)
}
// }
//
if
session
.
canAddInput
(
videoInput
)
{
// if session.canAddOutput(movieOutput) {
session
.
addInput
(
videoInput
)
// session.addOutput(movieOutput)
activeVideoInput
=
videoInput
// }
}
//
// return true
let
audioDevice
:
AVCaptureDevice
=
AVCaptureDevice
.
defaultDeviceWithMediaType
(
AVMediaTypeAudio
)
// }
let
audioInput
:
AVCaptureDeviceInput
?
=
try
?
AVCaptureDeviceInput
(
device
:
audioDevice
)
//
// /**
if
nil
==
audioInput
{
// * startSession
return
false
// * Starts the capture session if it is not already running.
}
// */
// public func startSession() {
if
session
.
canAddInput
(
audioInput
)
{
// if !session.running {
session
.
addInput
(
audioInput
)
// dispatch_async(queue) {
}
// self.session.startRunning()
// }
imageOutput
.
outputSettings
=
[
AVVideoCodecKey
:
AVVideoCodecJPEG
]
// }
// }
if
session
.
canAddOutput
(
imageOutput
)
{
//
session
.
addOutput
(
imageOutput
)
// /**
}
// * stopSession
// * Stops the capture session if it is already running.
if
session
.
canAddOutput
(
movieOutput
)
{
// */
session
.
addOutput
(
movieOutput
)
// public func stopSession() {
}
// if session.running {
// dispatch_async(queue) {
return
true
// self.session.stopRunning()
}
// }
// }
/**
// }
* startSession
//
* Starts the capture session if it is not already running.
// /**
*/
// * cameraWithPosition
public
func
startSession
()
{
// * @param position: AVCaptureDevicePosition
if
!
session
.
running
{
// * @return An AVCaptureDevice optional.
dispatch_async
(
queue
)
{
// */
self
.
session
.
startRunning
()
// public func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? {
}
// for device in AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) {
}
// if position == device.position {
}
// return device as? AVCaptureDevice
// }
/**
// }
* stopSession
// return nil
* Stops the capture session if it is already running.
// }
*/
//
public
func
stopSession
()
{
// /**
if
session
.
running
{
// * activeCamera
dispatch_async
(
queue
)
{
// * @return The active cameras video input device.
self
.
session
.
stopRunning
()
// */
}
// public var activeCamera: AVCaptureDevice {
}
// get {
}
// return activeVideoInput!.device
// }
/**
// }
* cameraWithPosition
//
* @param position: AVCaptureDevicePosition
// /**
* @return An AVCaptureDevice optional.
// * inactiveCamera
*/
// * @return The inactive cameras video input device.
public
func
cameraWithPosition
(
position
:
AVCaptureDevicePosition
)
->
AVCaptureDevice
?
{
// */
for
device
in
AVCaptureDevice
.
devicesWithMediaType
(
AVMediaTypeVideo
)
{
// public var inactiveCamera: AVCaptureDevice? {
if
position
==
device
.
position
{
// get {
return
device
as?
AVCaptureDevice
// var device: AVCaptureDevice?
}
// if 1 < cameraCount {
}
// device = activeCamera.position == .Back ? cameraWithPosition(.Front) : cameraWithPosition(.Back)
return
nil
// }
}
// return device
// }
/**
// }
* activeCamera
//
* @return The active cameras video input device.
// /**
*/
// * canSwitchCameras
public
var
activeCamera
:
AVCaptureDevice
{
// * Checks whether the camera can be switched. This would require at least two cameras.
get
{
// * @return A boolean of the result, true if yes, false otherwise.
return
activeVideoInput
!.
device
// */
}
// public var canSwitchCameras: Bool {
}
// return 1 < cameraCount
// }
/**
//
* inactiveCamera
// /**
* @return The inactive cameras video input device.
// * switchCamera
*/
// * If it is possible to switch cameras, then the camera will be switched from the opposite facing camera.
public
var
inactiveCamera
:
AVCaptureDevice
?
{
// * @return A boolean of the result, true if switched, false otherwise.
get
{
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
var
device
:
AVCaptureDevice
?
// */
if
1
<
cameraCount
{
// public func switchCamera() -> Bool {
device
=
activeCamera
.
position
==
.
Back
?
cameraWithPosition
(
.
Front
)
:
cameraWithPosition
(
.
Back
)
// if !canSwitchCameras {
}
// return false
return
device
// }
}
//
}
// let videoDevice: AVCaptureDevice? = inactiveCamera
// let videoInput: AVCaptureDeviceInput? = try? AVCaptureDeviceInput(device: videoDevice)
/**
//
* canSwitchCameras
// if nil == videoInput {
* Checks whether the camera can be switched. This would require at least two cameras.
// session.beginConfiguration()
* @return A boolean of the result, true if yes, false otherwise.
// session.removeInput(activeVideoInput)
*/
//
public
var
canSwitchCameras
:
Bool
{
// if session.canAddInput(videoInput) {
return
1
<
cameraCount
// activeVideoInput = videoInput
}
// } else {
// session.addInput(activeVideoInput)
/**
// }
* switchCamera
//
* If it is possible to switch cameras, then the camera will be switched from the opposite facing camera.
// session.commitConfiguration()
* @return A boolean of the result, true if switched, false otherwise.
// } else {
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// delegate?.captureDeviceConfigurationFailed?(self, error: nil)
*/
// return false
public
func
switchCamera
()
->
Bool
{
// }
if
!
canSwitchCameras
{
//
return
false
// return true
}
// }
//
let
videoDevice
:
AVCaptureDevice
?
=
inactiveCamera
// /**
let
videoInput
:
AVCaptureDeviceInput
?
=
try
?
AVCaptureDeviceInput
(
device
:
videoDevice
)
// * cameraHasFlash
// * Checks whether the camera supports flash.
if
nil
==
videoInput
{
// * @return A boolean of the result, true if yes, false otherwise.
session
.
beginConfiguration
()
// */
session
.
removeInput
(
activeVideoInput
)
// public var cameraHasFlash: Bool {
// return activeCamera.hasFlash
if
session
.
canAddInput
(
videoInput
)
{
// }
activeVideoInput
=
videoInput
//
}
else
{
// /**
session
.
addInput
(
activeVideoInput
)
// * flashMode
}
// * A mutator and accessor for the flashMode property.
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
session
.
commitConfiguration
()
// */
}
else
{
// public var flashMode: AVCaptureFlashMode {
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
nil
)
// get {
return
false
// return activeCamera.flashMode
}
// }
// set(value) {
return
true
// let device: AVCaptureDevice = activeCamera
}
// if flashMode != device.flashMode && device.isFlashModeSupported(flashMode) {
// var error: NSError?
/**
// do {
* cameraHasFlash
// try device.lockForConfiguration()
* Checks whether the camera supports flash.
// device.flashMode = flashMode
* @return A boolean of the result, true if yes, false otherwise.
// device.unlockForConfiguration()
*/
// } catch let error1 as NSError {
public
var
cameraHasFlash
:
Bool
{
// error = error1
return
activeCamera
.
hasFlash
// delegate?.captureDeviceConfigurationFailed?(self, error: error)
}
// }
// }
/**
// }
* flashMode
// }
* A mutator and accessor for the flashMode property.
//
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// /**
*/
// * cameraHasTorch
public
var
flashMode
:
AVCaptureFlashMode
{
// * Checks whether the device supports torch feature.
get
{
// * @return A boolean of the result, true if yes, false otherwise.
return
activeCamera
.
flashMode
// */
}
// public var cameraHasTorch: Bool {
set
(
value
)
{
// get {
let
device
:
AVCaptureDevice
=
activeCamera
// return activeCamera.hasTorch
if
flashMode
!=
device
.
flashMode
&&
device
.
isFlashModeSupported
(
flashMode
)
{
// }
var
error
:
NSError
?
// }
do
{
//
try
device
.
lockForConfiguration
()
// /**
device
.
flashMode
=
flashMode
// * torchMode
device
.
unlockForConfiguration
()
// * A mutator and accessor for the torchMode property.
}
catch
let
error1
as
NSError
{
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
error
=
error1
// */
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// public var torchMode: AVCaptureTorchMode {
}
// get {
}
// return activeCamera.torchMode
}
// }
}
// set(value) {
// let device: AVCaptureDevice = activeCamera
/**
// if torchMode != device.torchMode && device.isTorchModeSupported(torchMode) {
* cameraHasTorch
// var error: NSError?
* Checks whether the device supports torch feature.
// do {
* @return A boolean of the result, true if yes, false otherwise.
// try device.lockForConfiguration()
*/
// device.torchMode = torchMode
public
var
cameraHasTorch
:
Bool
{
// device.unlockForConfiguration()
get
{
// } catch let error1 as NSError {
return
activeCamera
.
hasTorch
// error = error1
}
// delegate?.captureDeviceConfigurationFailed?(self, error: error)
}
// }
// }
/**
// }
* torchMode
// }
* A mutator and accessor for the torchMode property.
//
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// /**
*/
// * cameraSupportsTapToFocus
public
var
torchMode
:
AVCaptureTorchMode
{
// * Checks whether the device supports tap to focus.
get
{
// * @return A boolean of the result, true if yes, false otherwise.
return
activeCamera
.
torchMode
// */
}
// public var cameraSupportsTapToFocus: Bool {
set
(
value
)
{
// get {
let
device
:
AVCaptureDevice
=
activeCamera
// return activeCamera.focusPointOfInterestSupported
if
torchMode
!=
device
.
torchMode
&&
device
.
isTorchModeSupported
(
torchMode
)
{
// }
var
error
:
NSError
?
// }
do
{
//
try
device
.
lockForConfiguration
()
// /**
device
.
torchMode
=
torchMode
// * focusAtpoint
device
.
unlockForConfiguration
()
// * Sets the point to focus at on the screen.
}
catch
let
error1
as
NSError
{
// * @param point: CGPoint
error
=
error1
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// */
}
// public func focusAtPoint(point: CGPoint) {
}
// let device: AVCaptureDevice = activeCamera
}
// if device.focusPointOfInterestSupported && device.isFocusModeSupported(.AutoFocus) {
}
// var error: NSError?
// do {
/**
// try device.lockForConfiguration()
* cameraSupportsTapToFocus
// device.focusPointOfInterest = point
* Checks whether the device supports tap to focus.
// device.focusMode = .AutoFocus
* @return A boolean of the result, true if yes, false otherwise.
// device.unlockForConfiguration()
*/
// } catch let error1 as NSError {
public
var
cameraSupportsTapToFocus
:
Bool
{
// error = error1
get
{
// delegate?.captureDeviceConfigurationFailed?(self, error: error)
return
activeCamera
.
focusPointOfInterestSupported
// }
}
// }
}
// }
//
/**
// /**
* focusAtpoint
// * cameraSupportsTapToExpose
* Sets the point to focus at on the screen.
// * Checks whether the device supports tap to expose.
* @param point: CGPoint
// * @return A boolean of the result, true if yes, false otherwise.
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// */
*/
// public var cameraSupportsTapToExpose: Bool {
public
func
focusAtPoint
(
point
:
CGPoint
)
{
// get {
let
device
:
AVCaptureDevice
=
activeCamera
// return activeCamera.exposurePointOfInterestSupported
if
device
.
focusPointOfInterestSupported
&&
device
.
isFocusModeSupported
(
.
AutoFocus
)
{
// }
var
error
:
NSError
?
// }
do
{
//
try
device
.
lockForConfiguration
()
// /**
device
.
focusPointOfInterest
=
point
// * exposeAtPoint
device
.
focusMode
=
.
AutoFocus
// * Sets a point for exposure.
device
.
unlockForConfiguration
()
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
}
catch
let
error1
as
NSError
{
// */
error
=
error1
// public func exposeAtPoint(point: CGPoint) {
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// let device: AVCaptureDevice = activeCamera
}
// let exposureMode: AVCaptureExposureMode = .ContinuousAutoExposure
}
//
}
// if device.exposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode) {
// var error: NSError?
/**
// do {
* cameraSupportsTapToExpose
// try device.lockForConfiguration()
* Checks whether the device supports tap to expose.
// device.exposurePointOfInterest = point
* @return A boolean of the result, true if yes, false otherwise.
// device.exposureMode = exposureMode
*/
//
public
var
cameraSupportsTapToExpose
:
Bool
{
// if device.isExposureModeSupported(.Locked) {
get
{
// device.addObserver(self, forKeyPath: "adjustingExposure", options: .New, context: &CaptureAdjustingExposureContext)
return
activeCamera
.
exposurePointOfInterestSupported
// }
}
// device.unlockForConfiguration()
}
// } catch let error1 as NSError {
// error = error1
/**
// delegate?.captureDeviceConfigurationFailed?(self, error: error)
* exposeAtPoint
// }
* Sets a point for exposure.
// }
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// }
*/
//
public
func
exposeAtPoint
(
point
:
CGPoint
)
{
// /**
let
device
:
AVCaptureDevice
=
activeCamera
// * override to set observeValueForKeyPath and handle exposure observance.
let
exposureMode
:
AVCaptureExposureMode
=
.
ContinuousAutoExposure
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// */
if
device
.
exposurePointOfInterestSupported
&&
device
.
isExposureModeSupported
(
exposureMode
)
{
// override public func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
var
error
:
NSError
?
// if context == &CaptureAdjustingExposureContext {
do
{
// let device: AVCaptureDevice = object as! AVCaptureDevice
try
device
.
lockForConfiguration
()
//
device
.
exposurePointOfInterest
=
point
// if device.adjustingExposure && device.isExposureModeSupported(.Locked) {
device
.
exposureMode
=
exposureMode
// object!.removeObserver(self, forKeyPath: "adjustingExposure", context: &CaptureAdjustingExposureContext)
// dispatch_async(queue) {
if
device
.
isExposureModeSupported
(
.
Locked
)
{
// var error: NSError?
device
.
addObserver
(
self
,
forKeyPath
:
"adjustingExposure"
,
options
:
.
New
,
context
:
&
CaptureAdjustingExposureContext
)
// do {
}
// try device.lockForConfiguration()
device
.
unlockForConfiguration
()
// device.unlockForConfiguration()
}
catch
let
error1
as
NSError
{
// } catch let e as NSError {
error
=
error1
// error = e
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// self.delegate?.captureDeviceConfigurationFailed?(self, error: error)
}
// } catch {
}
// fatalError()
}
// }
// }
/**
// } else {
* override to set observeValueForKeyPath and handle exposure observance.
// super.observeValueForKeyPath(keyPath, ofObject: object, change: change, context: context)
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// }
*/
// }
override
public
func
observeValueForKeyPath
(
keyPath
:
String
?,
ofObject
object
:
AnyObject
?,
change
:
[
String
:
AnyObject
]?,
context
:
UnsafeMutablePointer
<
Void
>
)
{
// }
if
context
==
&
CaptureAdjustingExposureContext
{
//
let
device
:
AVCaptureDevice
=
object
as!
AVCaptureDevice
// /**
// * resetFocusAndExposureModes
if
device
.
adjustingExposure
&&
device
.
isExposureModeSupported
(
.
Locked
)
{
// * Resets to default configuration for device focus and exposure mode.
object
!.
removeObserver
(
self
,
forKeyPath
:
"adjustingExposure"
,
context
:
&
CaptureAdjustingExposureContext
)
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
dispatch_async
(
queue
)
{
// */
var
error
:
NSError
?
// public func resetFocusAndExposureModes() {
do
{
// let device: AVCaptureDevice = activeCamera
try
device
.
lockForConfiguration
()
//
device
.
unlockForConfiguration
()
// let exposureMode: AVCaptureExposureMode = .ContinuousAutoExposure
}
catch
let
e
as
NSError
{
// let canResetExposure: Bool = device.focusPointOfInterestSupported && device.isExposureModeSupported(exposureMode)
error
=
e
//
self
.
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// let focusMode: AVCaptureFocusMode = .ContinuousAutoFocus
}
catch
{
// let canResetFocus: Bool = device.focusPointOfInterestSupported && device.isFocusModeSupported(focusMode)
fatalError
()
//
}
// let centerPoint: CGPoint = CGPointMake(0.5, 0.5)
}
//
}
else
{
// var error: NSError?
super
.
observeValueForKeyPath
(
keyPath
,
ofObject
:
object
,
change
:
change
,
context
:
context
)
// do {
}
// try device.lockForConfiguration()
}
// if canResetFocus {
}
// device.focusMode = focusMode
// device.focusPointOfInterest = centerPoint
/**
// }
* resetFocusAndExposureModes
// if canResetExposure {
* Resets to default configuration for device focus and exposure mode.
// device.exposureMode = exposureMode
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// device.exposurePointOfInterest = centerPoint
*/
// }
public
func
resetFocusAndExposureModes
()
{
// device.unlockForConfiguration()
let
device
:
AVCaptureDevice
=
activeCamera
// } catch let error1 as NSError {
// error = error1
let
exposureMode
:
AVCaptureExposureMode
=
.
ContinuousAutoExposure
// delegate?.captureDeviceConfigurationFailed?(self, error: error)
let
canResetExposure
:
Bool
=
device
.
focusPointOfInterestSupported
&&
device
.
isExposureModeSupported
(
exposureMode
)
// }
// }
let
focusMode
:
AVCaptureFocusMode
=
.
ContinuousAutoFocus
//
let
canResetFocus
:
Bool
=
device
.
focusPointOfInterestSupported
&&
device
.
isFocusModeSupported
(
focusMode
)
// /**
// * captureStillImage
let
centerPoint
:
CGPoint
=
CGPointMake
(
0.5
,
0.5
)
// * Captures the image and write the photo to the user's asset library.
// * @delegate If the success, the capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) is called.
var
error
:
NSError
?
// * @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
do
{
// */
try
device
.
lockForConfiguration
()
// public func captureStillImage() {
if
canResetFocus
{
// let connection: AVCaptureConnection = imageOutput.connectionWithMediaType(AVMediaTypeVideo)
device
.
focusMode
=
focusMode
// if connection.supportsVideoOrientation {
device
.
focusPointOfInterest
=
centerPoint
// connection.videoOrientation = currentVideoOrientation
}
// }
if
canResetExposure
{
// imageOutput.captureStillImageAsynchronouslyFromConnection(connection) { (sampleBuffer: CMSampleBufferRef?, error: NSError?) in
device
.
exposureMode
=
exposureMode
// if nil == sampleBuffer {
device
.
exposurePointOfInterest
=
centerPoint
// self.delegate?.captureAsetLibraryWriteFailed?(self, error: error)
}
// } else {
device
.
unlockForConfiguration
()
// let imageData: NSData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
}
catch
let
error1
as
NSError
{
// let image: UIImage = UIImage(data: imageData)!
error
=
error1
// self.writeImageToAssetsLibrary(image)
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// }
}
// }
}
// }
//
/**
// /**
* captureStillImage
// * isRecording
* Captures the image and write the photo to the user's asset library.
// * Checkts whether the device is currently recording.
* @delegate If the success, the capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) is called.
// * @return A boolean of the result, true if yes, false otherwise.
* @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
// */
*/
// public var isRecording: Bool {
public
func
captureStillImage
()
{
// get {
let
connection
:
AVCaptureConnection
=
imageOutput
.
connectionWithMediaType
(
AVMediaTypeVideo
)
// return movieOutput.recording
if
connection
.
supportsVideoOrientation
{
// }
connection
.
videoOrientation
=
currentVideoOrientation
// }
}
//
imageOutput
.
captureStillImageAsynchronouslyFromConnection
(
connection
)
{
(
sampleBuffer
:
CMSampleBufferRef
?,
error
:
NSError
?)
in
// /**
if
nil
==
sampleBuffer
{
// * startRecording
self
.
delegate
?
.
captureAsetLibraryWriteFailed
?(
self
,
error
:
error
)
// * If the device is not currently recording, this starts the movie recording.
}
else
{
// * @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
let
imageData
:
NSData
=
AVCaptureStillImageOutput
.
jpegStillImageNSDataRepresentation
(
sampleBuffer
)
// */
let
image
:
UIImage
=
UIImage
(
data
:
imageData
)
!
// public func startRecording() {
self
.
writeImageToAssetsLibrary
(
image
)
// if !isRecording {
}
// let connection: AVCaptureConnection = movieOutput.connectionWithMediaType(AVMediaTypeVideo)
}
// if connection.supportsVideoOrientation {
}
// connection.videoOrientation = currentVideoOrientation
// }
/**
// if connection.supportsVideoStabilization {
* isRecording
// connection.preferredVideoStabilizationMode = .Auto
* Checkts whether the device is currently recording.
// }
* @return A boolean of the result, true if yes, false otherwise.
//
*/
// let device: AVCaptureDevice = activeCamera
public
var
isRecording
:
Bool
{
//
get
{
// if device.smoothAutoFocusSupported {
return
movieOutput
.
recording
// var error: NSError?
}
// do {
}
// try device.lockForConfiguration()
// device.smoothAutoFocusEnabled = false
/**
// device.unlockForConfiguration()
* startRecording
// } catch let error1 as NSError {
* If the device is not currently recording, this starts the movie recording.
// error = error1
* @delegate If the configuration fails, the capture(capture: Capture!, deviceConfigurationFailed error: NSError!) is called.
// delegate?.captureDeviceConfigurationFailed?(self, error: error)
*/
// }
public
func
startRecording
()
{
// }
if
!
isRecording
{
// movieOutputURL = uniqueURL
let
connection
:
AVCaptureConnection
=
movieOutput
.
connectionWithMediaType
(
AVMediaTypeVideo
)
// movieOutput.startRecordingToOutputFileURL(movieOutputURL, recordingDelegate: self)
if
connection
.
supportsVideoOrientation
{
// }
connection
.
videoOrientation
=
currentVideoOrientation
// }
}
//
if
connection
.
supportsVideoStabilization
{
// /**
connection
.
preferredVideoStabilizationMode
=
.
Auto
// * stopRecording
}
// * If the device is currently recoring, this stops the movie recording.
// */
let
device
:
AVCaptureDevice
=
activeCamera
// public func stopRecording() {
// if isRecording {
if
device
.
smoothAutoFocusSupported
{
// movieOutput.stopRecording()
var
error
:
NSError
?
// }
do
{
// }
try
device
.
lockForConfiguration
()
//
device
.
smoothAutoFocusEnabled
=
false
// /**
device
.
unlockForConfiguration
()
// * recordedDuration
}
catch
let
error1
as
NSError
{
// * Retrieves the movie recorded duration.
error
=
error1
// * @return A CMTime value.
delegate
?
.
captureDeviceConfigurationFailed
?(
self
,
error
:
error
)
// */
}
// public var recordedDuration: CMTime {
}
// get {
movieOutputURL
=
uniqueURL
// return movieOutput.recordedDuration
movieOutput
.
startRecordingToOutputFileURL
(
movieOutputURL
,
recordingDelegate
:
self
)
// }
}
// }
}
//
// /**
/**
// * currentVideoOrientation
* stopRecording
// * Retrieves the current orientation of the device.
* If the device is currently recoring, this stops the movie recording.
// * @return A AVCaptureVideoOrientation value, [Portrait, LandscapeLeft, PortraitUpsideDown, LandscapeRight].
*/
// */
public
func
stopRecording
()
{
// public var currentVideoOrientation: AVCaptureVideoOrientation {
if
isRecording
{
// var orientation: AVCaptureVideoOrientation?
movieOutput
.
stopRecording
()
// switch UIDevice.currentDevice().orientation {
}
// case .Portrait:
}
// orientation = .Portrait
// break
/**
// case .LandscapeRight:
* recordedDuration
// orientation = .LandscapeLeft
* Retrieves the movie recorded duration.
// break
* @return A CMTime value.
// case .PortraitUpsideDown:
*/
// orientation = .PortraitUpsideDown
public
var
recordedDuration
:
CMTime
{
// break
get
{
// default:
return
movieOutput
.
recordedDuration
// orientation = .LandscapeRight
}
// }
}
// return orientation!
// }
/**
//
* currentVideoOrientation
// /**
* Retrieves the current orientation of the device.
// * uniqueURL
* @return A AVCaptureVideoOrientation value, [Portrait, LandscapeLeft, PortraitUpsideDown, LandscapeRight].
// * A unique URL generated for the movie video.
*/
// * @return An optional NSURL value.
public
var
currentVideoOrientation
:
AVCaptureVideoOrientation
{
// */
var
orientation
:
AVCaptureVideoOrientation
?
// private var uniqueURL: NSURL? {
switch
UIDevice
.
currentDevice
()
.
orientation
{
// let fileManager: NSFileManager = NSFileManager.defaultManager()
case
.
Portrait
:
// let tempDirectoryTemplate: String = (NSTemporaryDirectory() as NSString).stringByAppendingPathComponent("FocusLibrary")
orientation
=
.
Portrait
// do {
break
// try fileManager.createDirectoryAtPath(tempDirectoryTemplate, withIntermediateDirectories: true, attributes: nil)
case
.
LandscapeRight
:
// return NSURL.fileURLWithPath(tempDirectoryTemplate + "/test.mov")
orientation
=
.
LandscapeLeft
// } catch {}
break
// return nil
case
.
PortraitUpsideDown
:
// }
orientation
=
.
PortraitUpsideDown
//
break
// /**
default
:
// * postAssetLibraryNotification
orientation
=
.
LandscapeRight
// * Fires an asynchronous call to the capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
}
// * @param image: UIImage!
return
orientation
!
// * @delegate An asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
}
// */
// private func postAssetLibraryNotification(image: UIImage!) {
/**
// dispatch_async(queue) {
* uniqueURL
// self.delegate?.capture?(self, assetLibraryDidWrite: image)
* A unique URL generated for the movie video.
// }
* @return An optional NSURL value.
// }
*/
//
private
var
uniqueURL
:
NSURL
?
{
// /**
let
fileManager
:
NSFileManager
=
NSFileManager
.
defaultManager
()
// * writeImageToAssetsLibrary
let
tempDirectoryTemplate
:
String
=
(
NSTemporaryDirectory
()
as
NSString
)
.
stringByAppendingPathComponent
(
"FocusLibrary"
)
// * Writes the image file to the user's asset library.
do
{
// * @param image: UIImage!
try
fileManager
.
createDirectoryAtPath
(
tempDirectoryTemplate
,
withIntermediateDirectories
:
true
,
attributes
:
nil
)
// * @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
return
NSURL
.
fileURLWithPath
(
tempDirectoryTemplate
+
"/test.mov"
)
// * @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
}
catch
{}
// */
return
nil
// private func writeImageToAssetsLibrary(image: UIImage) {
}
// let library: ALAssetsLibrary = ALAssetsLibrary()
// library.writeImageToSavedPhotosAlbum(image.CGImage, orientation: ALAssetOrientation(rawValue: image.imageOrientation.rawValue)!) { (path: NSURL!, error: NSError?) -> Void in
/**
// if nil == error {
* postAssetLibraryNotification
// self.postAssetLibraryNotification(image)
* Fires an asynchronous call to the capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
// } else {
* @param image: UIImage!
// self.delegate?.captureAsetLibraryWriteFailed?(self, error: error)
* @delegate An asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
// }
*/
// }
private
func
postAssetLibraryNotification
(
image
:
UIImage
!
)
{
// }
dispatch_async
(
queue
)
{
//
self
.
delegate
?
.
capture
?(
self
,
assetLibraryDidWrite
:
image
)
// /**
}
// * writeVideoToAssetsLibrary
}
// * Writes the video file to the user's asset library.
// * @param videoURL: NSURL!
/**
// * @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
* writeImageToAssetsLibrary
// * @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
* Writes the image file to the user's asset library.
// */
* @param image: UIImage!
// private func writeVideoToAssetsLibrary(videoURL: NSURL!) {
* @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
// let library: ALAssetsLibrary = ALAssetsLibrary()
* @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
// if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(videoURL) {
*/
// library.writeVideoAtPathToSavedPhotosAlbum(videoURL) { (path: NSURL!, error: NSError?) in
private
func
writeImageToAssetsLibrary
(
image
:
UIImage
)
{
// if nil == error {
let
library
:
ALAssetsLibrary
=
ALAssetsLibrary
()
// self.generateThumbnailForVideoAtURL(videoURL)
library
.
writeImageToSavedPhotosAlbum
(
image
.
CGImage
,
orientation
:
ALAssetOrientation
(
rawValue
:
image
.
imageOrientation
.
rawValue
)
!
)
{
(
path
:
NSURL
!
,
error
:
NSError
?)
->
Void
in
// } else {
if
nil
==
error
{
// self.delegate?.captureAsetLibraryWriteFailed?(self, error: error)
self
.
postAssetLibraryNotification
(
image
)
// }
}
else
{
// }
self
.
delegate
?
.
captureAsetLibraryWriteFailed
?(
self
,
error
:
error
)
// }
}
// }
}
//
}
// /**
// * generateThumbnailForVideoAtURL
/**
// * Generates a thumbnail for the video URL specified.
* writeVideoToAssetsLibrary
// * @param videoURL: NSURL!
* Writes the video file to the user's asset library.
// * @delegate An asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
* @param videoURL: NSURL!
// */
* @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
// private func generateThumbnailForVideoAtURL(videoURL: NSURL!) {
* @delegate If failure, capture(capture: Capture!, assetLibraryWriteFailed error: NSError!) is called.
// dispatch_async(queue) {
*/
// do {
private
func
writeVideoToAssetsLibrary
(
videoURL
:
NSURL
!
)
{
// let asset: AVAsset = AVAsset(URL: videoURL)
let
library
:
ALAssetsLibrary
=
ALAssetsLibrary
()
// let imageGenerator: AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
if
library
.
videoAtPathIsCompatibleWithSavedPhotosAlbum
(
videoURL
)
{
// imageGenerator.maximumSize = CGSizeMake(100, 0)
library
.
writeVideoAtPathToSavedPhotosAlbum
(
videoURL
)
{
(
path
:
NSURL
!
,
error
:
NSError
?)
in
// imageGenerator.appliesPreferredTrackTransform = true
if
nil
==
error
{
//
self
.
generateThumbnailForVideoAtURL
(
videoURL
)
// let imageRef: CGImageRef = try imageGenerator.copyCGImageAtTime(kCMTimeZero, actualTime: nil)
}
else
{
// let image: UIImage = UIImage(CGImage: imageRef)
self
.
delegate
?
.
captureAsetLibraryWriteFailed
?(
self
,
error
:
error
)
//
}
// dispatch_async(dispatch_get_main_queue()) {
}
// self.postAssetLibraryNotification(image)
}
// }
}
// } catch {}
// }
/**
// }
* generateThumbnailForVideoAtURL
//
* Generates a thumbnail for the video URL specified.
// /**
* @param videoURL: NSURL!
// * delegate method for capturing video file.
* @delegate An asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
// * @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
*/
// * @delegate If failure, capture(capture: Capture!, mediaCaptureFailed error: NSError!) is called.
private
func
generateThumbnailForVideoAtURL
(
videoURL
:
NSURL
!
)
{
// */
dispatch_async
(
queue
)
{
// public func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
do
{
// if nil == error {
let
asset
:
AVAsset
=
AVAsset
(
URL
:
videoURL
)
// writeVideoToAssetsLibrary(movieOutputURL!.copy() as! NSURL)
let
imageGenerator
:
AVAssetImageGenerator
=
AVAssetImageGenerator
(
asset
:
asset
)
// } else {
imageGenerator
.
maximumSize
=
CGSizeMake
(
100
,
0
)
// delegate?.captureMediaCaptureFailed?(self, error: error)
imageGenerator
.
appliesPreferredTrackTransform
=
true
// }
// movieOutputURL = nil
let
imageRef
:
CGImageRef
=
try
imageGenerator
.
copyCGImageAtTime
(
kCMTimeZero
,
actualTime
:
nil
)
// }
let
image
:
UIImage
=
UIImage
(
CGImage
:
imageRef
)
//}
\ No newline at end of file
dispatch_async
(
dispatch_get_main_queue
())
{
self
.
postAssetLibraryNotification
(
image
)
}
}
catch
{}
}
}
/**
* delegate method for capturing video file.
* @delegate If successful, an asynchronous call to capture(capture: Capture!, assetLibraryDidWrite image: UIImage!) delegate.
* @delegate If failure, capture(capture: Capture!, mediaCaptureFailed error: NSError!) is called.
*/
public
func
captureOutput
(
captureOutput
:
AVCaptureFileOutput
!
,
didFinishRecordingToOutputFileAtURL
outputFileURL
:
NSURL
!
,
fromConnections
connections
:
[
AnyObject
]
!
,
error
:
NSError
!
)
{
if
nil
==
error
{
writeVideoToAssetsLibrary
(
movieOutputURL
!.
copy
()
as!
NSURL
)
}
else
{
delegate
?
.
captureMediaCaptureFailed
?(
self
,
error
:
error
)
}
movieOutputURL
=
nil
}
}
\ No newline at end of file
Source/CapturePreview.swift
View file @
8b825830
////
//// Copyright (C) 2015 GraphKit, Inc. <http://graphkit.io> and other GraphKit contributors.
////
//// This program is free software: you can redistribute it and/or modify
//// it under the terms of the GNU Affero General Public License as published
//// by the Free Software Foundation, either version 3 of the License, or
//// (at your option) any later version.
////
//// This program is distributed in the hope that it will be useful,
//// but WITHOUT ANY WARRANTY; without even the implied warranty of
//// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//// GNU Affero General Public License for more details.
////
//// You should have received a copy of the GNU Affero General Public License
//// along with this program located at the root of the software package
//// in a file called LICENSE. If not, see <http://www.gnu.org/licenses/>.
////
//
//
// Copyright (C) 2015 GraphKit, Inc. <http://graphkit.io> and other GraphKit contributors.
//import UIKit
//import AVFoundation
//
//
// This program is free software: you can redistribute it and/or modify
//@objc(PreviewDelegate)
// it under the terms of the GNU Affero General Public License as published
//public protocol PreviewDelegate {
// by the Free Software Foundation, either version 3 of the License, or
// optional func previewTappedToFocusAt(preview: Preview, point: CGPoint)
// (at your option) any later version.
// optional func previewTappedToExposeAt(preview: Preview, point: CGPoint)
// optional func previewTappedToReset(preview: Preview, focus: UIView, exposure: UIView)
//}
//
//
// This program is distributed in the hope that it will be useful,
//public class Preview: UIView {
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// /**
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// :name: boxBounds
// GNU Affero General Public License for more details.
// :description: A static property that sets the initial size of the focusBox and exposureBox properties.
//
// */
// You should have received a copy of the GNU Affero General Public License
// static public var boxBounds: CGRect = CGRectMake(0, 0, 150, 150)
// along with this program located at the root of the software package
//
// in a file called LICENSE. If not, see <http://www.gnu.org/licenses/>.
// /**
//
// :name: delegate
// :description: An optional instance of PreviewDelegate to handle events that are triggered during various
import
UIKit
// stages of engagement.
import
AVFoundation
// */
// public weak var delegate: PreviewDelegate?
@objc(PreviewDelegate)
//
public
protocol
PreviewDelegate
{
// /**
optional
func
previewTappedToFocusAt
(
preview
:
Preview
,
point
:
CGPoint
)
// :name: tapToFocusEnabled
optional
func
previewTappedToExposeAt
(
preview
:
Preview
,
point
:
CGPoint
)
// :description: A mutator and accessor that enables and disables tap to focus gesture.
optional
func
previewTappedToReset
(
preview
:
Preview
,
focus
:
UIView
,
exposure
:
UIView
)
// */
}
// public var tapToFocusEnabled: Bool {
// get {
public
class
Preview
:
UIView
{
// return singleTapRecognizer!.enabled
/**
// }
:name: boxBounds
// set(value) {
:description: A static property that sets the initial size of the focusBox and exposureBox properties.
// singleTapRecognizer!.enabled = value
*/
// }
static
public
var
boxBounds
:
CGRect
=
CGRectMake
(
0
,
0
,
150
,
150
)
// }
//
/**
// /**
:name: delegate
// :name: tapToExposeEnabled
:description: An optional instance of PreviewDelegate to handle events that are triggered during various
// :description: A mutator and accessor that enables and disables tap to expose gesture.
stages of engagement.
// */
*/
// public var tapToExposeEnabled: Bool {
public
weak
var
delegate
:
PreviewDelegate
?
// get {
// return doubleTapRecognizer!.enabled
/**
// }
:name: tapToFocusEnabled
// set(value) {
:description: A mutator and accessor that enables and disables tap to focus gesture.
// doubleTapRecognizer!.enabled = value
*/
// }
public
var
tapToFocusEnabled
:
Bool
{
// }
get
{
//
return
singleTapRecognizer
!.
enabled
// //
}
// // override for layerClass
set
(
value
)
{
// //
singleTapRecognizer
!.
enabled
=
value
// override public class func layerClass() -> AnyClass {
}
// return AVCaptureVideoPreviewLayer.self
}
// }
//
/**
// /**
:name: tapToExposeEnabled
// :name: session
:description: A mutator and accessor that enables and disables tap to expose gesture.
// :description: A mutator and accessor for the preview AVCaptureSession value.
*/
// */
public
var
tapToExposeEnabled
:
Bool
{
// public var session: AVCaptureSession {
get
{
// get {
return
doubleTapRecognizer
!.
enabled
// return (layer as! AVCaptureVideoPreviewLayer).session
}
// }
set
(
value
)
{
// set(value) {
doubleTapRecognizer
!.
enabled
=
value
// (layer as! AVCaptureVideoPreviewLayer).session = value
}
// }
}
// }
//
//
// /**
// override for layerClass
// :name: focusBox
//
// :description: An optional UIView for the focusBox animation. This is used when the
override
public
class
func
layerClass
()
->
AnyClass
{
// tapToFocusEnabled property is set to true.
return
AVCaptureVideoPreviewLayer
.
self
// */
}
// public var focusBox: UIView?
//
/**
// /**
:name: session
// :name: exposureBox
:description: A mutator and accessor for the preview AVCaptureSession value.
// :description: An optional UIView for the exposureBox animation. This is used when the
*/
// tapToExposeEnabled property is set to true.
public
var
session
:
AVCaptureSession
{
// */
get
{
// public var exposureBox: UIView?
return
(
layer
as!
AVCaptureVideoPreviewLayer
)
.
session
//
}
// //
set
(
value
)
{
// // :name: singleTapRecognizer
(
layer
as!
AVCaptureVideoPreviewLayer
)
.
session
=
value
// // :description: Gesture recognizer for single tap.
}
// //
}
// private var singleTapRecognizer: UITapGestureRecognizer?
//
/**
// //
:name: focusBox
// // :name: doubleTapRecognizer
:description: An optional UIView for the focusBox animation. This is used when the
// // :description: Gesture recognizer for double tap.
tapToFocusEnabled property is set to true.
// //
*/
// private var doubleTapRecognizer: UITapGestureRecognizer?
public
var
focusBox
:
UIView
?
//
// //
/**
// // :name: doubleDoubleTapRecognizer
:name: exposureBox
// // :description: Gesture recognizer for double/double tap.
:description: An optional UIView for the exposureBox animation. This is used when the
// //
tapToExposeEnabled property is set to true.
// private var doubleDoubleTapRecognizer: UITapGestureRecognizer?
*/
//
public
var
exposureBox
:
UIView
?
// required public init?(coder aDecoder: NSCoder) {
// super.init(coder: aDecoder)
//
// prepareView()
// :name: singleTapRecognizer
// }
// :description: Gesture recognizer for single tap.
//
//
// public override init(frame: CGRect) {
private
var
singleTapRecognizer
:
UITapGestureRecognizer
?
// super.init(frame: frame)
// prepareView()
//
// }
// :name: doubleTapRecognizer
//
// :description: Gesture recognizer for double tap.
// public init() {
//
// super.init(frame: CGRectZero)
private
var
doubleTapRecognizer
:
UITapGestureRecognizer
?
// translatesAutoresizingMaskIntoConstraints = false
// prepareView()
//
// }
// :name: doubleDoubleTapRecognizer
//
// :description: Gesture recognizer for double/double tap.
// //
//
// // :name: handleSingleTap
private
var
doubleDoubleTapRecognizer
:
UITapGestureRecognizer
?
// //
// internal func handleSingleTap(recognizer: UIGestureRecognizer) {
required
public
init
?(
coder
aDecoder
:
NSCoder
)
{
// let point: CGPoint = recognizer.locationInView(self)
super
.
init
(
coder
:
aDecoder
)
// runBoxAnimationOnView(focusBox, point: point)
prepareView
()
// delegate?.previewTappedToFocusAt?(self, point: captureDevicePointForPoint(point))
}
// }
//
public
override
init
(
frame
:
CGRect
)
{
// //
super
.
init
(
frame
:
frame
)
// // :name: handleDoubleTap
prepareView
()
// //
}
// internal func handleDoubleTap(recognizer: UIGestureRecognizer) {
// let point: CGPoint = recognizer.locationInView(self)
public
init
()
{
// runBoxAnimationOnView(exposureBox, point: point)
super
.
init
(
frame
:
CGRectZero
)
// delegate?.previewTappedToExposeAt?(self, point: captureDevicePointForPoint(point))
translatesAutoresizingMaskIntoConstraints
=
false
// }
prepareView
()
//
}
// //
// // :name: handleDoubleDoubleTap
//
// //
// :name: handleSingleTap
// internal func handleDoubleDoubleTap(recognizer: UIGestureRecognizer) {
//
// runResetAnimation()
internal
func
handleSingleTap
(
recognizer
:
UIGestureRecognizer
)
{
// }
let
point
:
CGPoint
=
recognizer
.
locationInView
(
self
)
//
runBoxAnimationOnView
(
focusBox
,
point
:
point
)
// //
delegate
?
.
previewTappedToFocusAt
?(
self
,
point
:
captureDevicePointForPoint
(
point
))
// // :name: prepareView
}
// // :description: Common setup for view.
// //
//
// private func prepareView() {
// :name: handleDoubleTap
// let captureLayer: AVCaptureVideoPreviewLayer = layer as! AVCaptureVideoPreviewLayer
//
// captureLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
internal
func
handleDoubleTap
(
recognizer
:
UIGestureRecognizer
)
{
//
let
point
:
CGPoint
=
recognizer
.
locationInView
(
self
)
// singleTapRecognizer = UITapGestureRecognizer(target: self, action: "handleSingleTap:")
runBoxAnimationOnView
(
exposureBox
,
point
:
point
)
// singleTapRecognizer!.numberOfTapsRequired = 1
delegate
?
.
previewTappedToExposeAt
?(
self
,
point
:
captureDevicePointForPoint
(
point
))
//
}
// doubleTapRecognizer = UITapGestureRecognizer(target: self, action: "handleDoubleTap:")
// doubleTapRecognizer!.numberOfTapsRequired = 2
//
//
// :name: handleDoubleDoubleTap
// doubleDoubleTapRecognizer = UITapGestureRecognizer(target: self, action: "handleDoubleDoubleTap:")
//
// doubleDoubleTapRecognizer!.numberOfTapsRequired = 2
internal
func
handleDoubleDoubleTap
(
recognizer
:
UIGestureRecognizer
)
{
// doubleDoubleTapRecognizer!.numberOfTouchesRequired = 2
runResetAnimation
()
//
}
// addGestureRecognizer(singleTapRecognizer!)
// addGestureRecognizer(doubleTapRecognizer!)
//
// addGestureRecognizer(doubleDoubleTapRecognizer!)
// :name: prepareView
// singleTapRecognizer!.requireGestureRecognizerToFail(doubleTapRecognizer!)
// :description: Common setup for view.
//
//
// focusBox = viewWithColor(.redColor())
private
func
prepareView
()
{
// exposureBox = viewWithColor(.blueColor())
let
captureLayer
:
AVCaptureVideoPreviewLayer
=
layer
as!
AVCaptureVideoPreviewLayer
// addSubview(focusBox!)
captureLayer
.
videoGravity
=
AVLayerVideoGravityResizeAspectFill
// addSubview(exposureBox!)
// }
singleTapRecognizer
=
UITapGestureRecognizer
(
target
:
self
,
action
:
"handleSingleTap:"
)
//
singleTapRecognizer
!.
numberOfTapsRequired
=
1
// //
// // :name: viewWithColor
doubleTapRecognizer
=
UITapGestureRecognizer
(
target
:
self
,
action
:
"handleDoubleTap:"
)
// // :description: Initializes a UIView with a set UIColor.
doubleTapRecognizer
!.
numberOfTapsRequired
=
2
// //
// private func viewWithColor(color: UIColor) -> UIView {
doubleDoubleTapRecognizer
=
UITapGestureRecognizer
(
target
:
self
,
action
:
"handleDoubleDoubleTap:"
)
// let view: UIView = UIView(frame: Preview.boxBounds)
doubleDoubleTapRecognizer
!.
numberOfTapsRequired
=
2
// view.backgroundColor = MaterialTheme.clear.color
doubleDoubleTapRecognizer
!.
numberOfTouchesRequired
=
2
// view.layer.borderColor = color.CGColor
// view.layer.borderWidth = 5
addGestureRecognizer
(
singleTapRecognizer
!
)
// view.hidden = true
addGestureRecognizer
(
doubleTapRecognizer
!
)
// return view
addGestureRecognizer
(
doubleDoubleTapRecognizer
!
)
// }
singleTapRecognizer
!.
requireGestureRecognizerToFail
(
doubleTapRecognizer
!
)
//
// //
focusBox
=
viewWithColor
(
.
redColor
())
// // :name: runBoxAnimationOnView
exposureBox
=
viewWithColor
(
.
blueColor
())
// // :description: Runs the animation used for focusBox and exposureBox on single and double
addSubview
(
focusBox
!
)
// // taps respectively at a given point.
addSubview
(
exposureBox
!
)
// //
}
// private func runBoxAnimationOnView(view: UIView!, point: CGPoint) {
// view.center = point
//
// view.hidden = false
// :name: viewWithColor
// UIView.animateWithDuration(0.15, delay: 0, options: .CurveEaseInOut, animations: { _ in
// :description: Initializes a UIView with a set UIColor.
// view.layer.transform = CATransform3DMakeScale(0.5, 0.5, 1)
//
// }) { _ in
private
func
viewWithColor
(
color
:
UIColor
)
->
UIView
{
// let delayInSeconds: Double = 0.5
let
view
:
UIView
=
UIView
(
frame
:
Preview
.
boxBounds
)
// let popTime: dispatch_time_t = dispatch_time(DISPATCH_TIME_NOW, Int64(delayInSeconds * Double(NSEC_PER_SEC)))
view
.
backgroundColor
=
MaterialTheme
.
clear
.
color
// dispatch_after(popTime, dispatch_get_main_queue()) {
view
.
layer
.
borderColor
=
color
.
CGColor
// view.hidden = true
view
.
layer
.
borderWidth
=
5
// view.transform = CGAffineTransformIdentity
view
.
hidden
=
true
// }
return
view
// }
}
// }
//
//
// //
// :name: runBoxAnimationOnView
// // :name: captureDevicePointForPoint
// :description: Runs the animation used for focusBox and exposureBox on single and double
// // :description: Interprets the correct point from touch to preview layer.
// taps respectively at a given point.
// //
//
// private func captureDevicePointForPoint(point: CGPoint) -> CGPoint {
private
func
runBoxAnimationOnView
(
view
:
UIView
!
,
point
:
CGPoint
)
{
// let previewLayer: AVCaptureVideoPreviewLayer = layer as! AVCaptureVideoPreviewLayer
view
.
center
=
point
// return previewLayer.captureDevicePointOfInterestForPoint(point)
view
.
hidden
=
false
// }
UIView
.
animateWithDuration
(
0.15
,
delay
:
0
,
options
:
.
CurveEaseInOut
,
animations
:
{
_
in
//
view
.
layer
.
transform
=
CATransform3DMakeScale
(
0.5
,
0.5
,
1
)
// //
})
{
_
in
// // :name: runResetAnimation
let
delayInSeconds
:
Double
=
0.5
// // :description: Executes the reset animation for focus and exposure.
let
popTime
:
dispatch_time_t
=
dispatch_time
(
DISPATCH_TIME_NOW
,
Int64
(
delayInSeconds
*
Double
(
NSEC_PER_SEC
)))
// //
dispatch_after
(
popTime
,
dispatch_get_main_queue
())
{
// private func runResetAnimation() {
view
.
hidden
=
true
// if !tapToFocusEnabled && !tapToExposeEnabled {
view
.
transform
=
CGAffineTransformIdentity
// return
}
// }
}
//
}
// let previewLayer: AVCaptureVideoPreviewLayer = layer as! AVCaptureVideoPreviewLayer
// let centerPoint: CGPoint = previewLayer.pointForCaptureDevicePointOfInterest(CGPointMake(0.5, 0.5))
//
// focusBox!.center = centerPoint
// :name: captureDevicePointForPoint
// exposureBox!.center = centerPoint
// :description: Interprets the correct point from touch to preview layer.
// exposureBox!.transform = CGAffineTransformMakeScale(1.2, 1.2)
//
// focusBox!.hidden = false
private
func
captureDevicePointForPoint
(
point
:
CGPoint
)
->
CGPoint
{
// exposureBox!.hidden = false
let
previewLayer
:
AVCaptureVideoPreviewLayer
=
layer
as!
AVCaptureVideoPreviewLayer
//
return
previewLayer
.
captureDevicePointOfInterestForPoint
(
point
)
// UIView.animateWithDuration(0.15, delay: 0, options: .CurveEaseInOut, animations: { _ in
}
// self.focusBox!.layer.transform = CATransform3DMakeScale(0.5, 0.5, 1)
// self.exposureBox!.layer.transform = CATransform3DMakeScale(0.7, 0.7, 1)
//
// }) { _ in
// :name: runResetAnimation
// let delayInSeconds: Double = 0.5
// :description: Executes the reset animation for focus and exposure.
// let popTime: dispatch_time_t = dispatch_time(DISPATCH_TIME_NOW, Int64(delayInSeconds * Double(NSEC_PER_SEC)))
//
// dispatch_after(popTime, dispatch_get_main_queue()) {
private
func
runResetAnimation
()
{
// self.focusBox!.hidden = true
if
!
tapToFocusEnabled
&&
!
tapToExposeEnabled
{
// self.exposureBox!.hidden = true
return
// self.focusBox!.transform = CGAffineTransformIdentity
}
// self.exposureBox!.transform = CGAffineTransformIdentity
// self.delegate?.previewTappedToReset?(self, focus: self.focusBox!, exposure: self.exposureBox!)
let
previewLayer
:
AVCaptureVideoPreviewLayer
=
layer
as!
AVCaptureVideoPreviewLayer
// }
let
centerPoint
:
CGPoint
=
previewLayer
.
pointForCaptureDevicePointOfInterest
(
CGPointMake
(
0.5
,
0.5
))
// }
focusBox
!.
center
=
centerPoint
// }
exposureBox
!.
center
=
centerPoint
//}
exposureBox
!.
transform
=
CGAffineTransformMakeScale
(
1.2
,
1.2
)
focusBox
!.
hidden
=
false
exposureBox
!.
hidden
=
false
UIView
.
animateWithDuration
(
0.15
,
delay
:
0
,
options
:
.
CurveEaseInOut
,
animations
:
{
_
in
self
.
focusBox
!.
layer
.
transform
=
CATransform3DMakeScale
(
0.5
,
0.5
,
1
)
self
.
exposureBox
!.
layer
.
transform
=
CATransform3DMakeScale
(
0.7
,
0.7
,
1
)
})
{
_
in
let
delayInSeconds
:
Double
=
0.5
let
popTime
:
dispatch_time_t
=
dispatch_time
(
DISPATCH_TIME_NOW
,
Int64
(
delayInSeconds
*
Double
(
NSEC_PER_SEC
)))
dispatch_after
(
popTime
,
dispatch_get_main_queue
())
{
self
.
focusBox
!.
hidden
=
true
self
.
exposureBox
!.
hidden
=
true
self
.
focusBox
!.
transform
=
CGAffineTransformIdentity
self
.
exposureBox
!.
transform
=
CGAffineTransformIdentity
self
.
delegate
?
.
previewTappedToReset
?(
self
,
focus
:
self
.
focusBox
!
,
exposure
:
self
.
exposureBox
!
)
}
}
}
}
Source/MaterialButton.swift
View file @
8b825830
...
@@ -99,7 +99,6 @@ public class MaterialButton : UIButton {
...
@@ -99,7 +99,6 @@ public class MaterialButton : UIButton {
:name: drawRect
:name: drawRect
*/
*/
final
public
override
func
drawRect
(
rect
:
CGRect
)
{
final
public
override
func
drawRect
(
rect
:
CGRect
)
{
prepareContext
(
rect
)
prepareBackgroundColorView
()
prepareBackgroundColorView
()
prepareButton
()
prepareButton
()
}
}
...
@@ -152,18 +151,6 @@ public class MaterialButton : UIButton {
...
@@ -152,18 +151,6 @@ public class MaterialButton : UIButton {
}
}
//
//
// :name: prepareContext
//
private
func
prepareContext
(
rect
:
CGRect
)
{
let
context
=
UIGraphicsGetCurrentContext
()
CGContextSaveGState
(
context
);
CGContextAddEllipseInRect
(
context
,
rect
)
CGContextSetFillColorWithColor
(
context
,
MaterialTheme
.
clear
.
color
.
CGColor
)
CGContextFillPath
(
context
)
CGContextRestoreGState
(
context
);
}
//
// :name: prepareBackgroundColorView
// :name: prepareBackgroundColorView
//
//
private
func
prepareBackgroundColorView
()
{
private
func
prepareBackgroundColorView
()
{
...
...
Source/NavigationViewController.swift
View file @
8b825830
...
@@ -25,6 +25,7 @@ public class NavigationViewController: UIViewController {
...
@@ -25,6 +25,7 @@ public class NavigationViewController: UIViewController {
public
var
titleLabel
:
UILabel
?
{
public
var
titleLabel
:
UILabel
?
{
didSet
{
didSet
{
if
let
v
=
titleLabel
{
if
let
v
=
titleLabel
{
v
.
translatesAutoresizingMaskIntoConstraints
=
false
view
.
addSubview
(
v
)
view
.
addSubview
(
v
)
}
}
}
}
...
@@ -36,6 +37,7 @@ public class NavigationViewController: UIViewController {
...
@@ -36,6 +37,7 @@ public class NavigationViewController: UIViewController {
public
var
leftButton
:
FlatButton
?
{
public
var
leftButton
:
FlatButton
?
{
didSet
{
didSet
{
if
let
v
=
leftButton
{
if
let
v
=
leftButton
{
v
.
translatesAutoresizingMaskIntoConstraints
=
false
view
.
addSubview
(
v
)
view
.
addSubview
(
v
)
}
}
}
}
...
@@ -47,6 +49,7 @@ public class NavigationViewController: UIViewController {
...
@@ -47,6 +49,7 @@ public class NavigationViewController: UIViewController {
public
var
rightButton
:
FlatButton
?
{
public
var
rightButton
:
FlatButton
?
{
didSet
{
didSet
{
if
let
v
=
rightButton
{
if
let
v
=
rightButton
{
v
.
translatesAutoresizingMaskIntoConstraints
=
false
view
.
addSubview
(
v
)
view
.
addSubview
(
v
)
}
}
}
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment