Commit 5975857b by Daniel Dahan

development: combined Capture with CaptureSession

parent e4da7356
......@@ -28,7 +28,6 @@
96717B111DBE6AF600DA84DB /* Capture.swift in Sources */ = {isa = PBXBuildFile; fileRef = 96717B0D1DBE6AF600DA84DB /* Capture.swift */; };
96717B121DBE6AF600DA84DB /* CaptureController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 96717B0E1DBE6AF600DA84DB /* CaptureController.swift */; };
96717B131DBE6AF600DA84DB /* CapturePreview.swift in Sources */ = {isa = PBXBuildFile; fileRef = 96717B0F1DBE6AF600DA84DB /* CapturePreview.swift */; };
96717B141DBE6AF600DA84DB /* CaptureSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 96717B101DBE6AF600DA84DB /* CaptureSession.swift */; };
96717B181DBE6B1800DA84DB /* PhotoLibrary.swift in Sources */ = {isa = PBXBuildFile; fileRef = 96717B161DBE6B1800DA84DB /* PhotoLibrary.swift */; };
96717B191DBE6B1800DA84DB /* PhotoLibraryController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 96717B171DBE6B1800DA84DB /* PhotoLibraryController.swift */; };
967A48191D0F425A00B8CEB7 /* StatusBarController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 967A48181D0F425A00B8CEB7 /* StatusBarController.swift */; };
......@@ -227,7 +226,6 @@
96717B0D1DBE6AF600DA84DB /* Capture.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Capture.swift; sourceTree = "<group>"; };
96717B0E1DBE6AF600DA84DB /* CaptureController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CaptureController.swift; sourceTree = "<group>"; };
96717B0F1DBE6AF600DA84DB /* CapturePreview.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CapturePreview.swift; sourceTree = "<group>"; };
96717B101DBE6AF600DA84DB /* CaptureSession.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CaptureSession.swift; sourceTree = "<group>"; };
96717B161DBE6B1800DA84DB /* PhotoLibrary.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoLibrary.swift; sourceTree = "<group>"; };
96717B171DBE6B1800DA84DB /* PhotoLibraryController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoLibraryController.swift; sourceTree = "<group>"; };
967887881C9777CB0037F6C9 /* MaterialViewTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MaterialViewTests.swift; sourceTree = "<group>"; };
......@@ -520,7 +518,6 @@
children = (
96717B0D1DBE6AF600DA84DB /* Capture.swift */,
96717B0F1DBE6AF600DA84DB /* CapturePreview.swift */,
96717B101DBE6AF600DA84DB /* CaptureSession.swift */,
96717B0E1DBE6AF600DA84DB /* CaptureController.swift */,
);
path = Capture;
......@@ -1121,7 +1118,6 @@
96BCB7DB1CB40DC500C806FE /* NavigationItem.swift in Sources */,
96BCB7E01CB40DC500C806FE /* NavigationDrawerController.swift in Sources */,
9628645F1D540AF300690B69 /* DynamicFontType.swift in Sources */,
96717B141DBE6AF600DA84DB /* CaptureSession.swift in Sources */,
961EFC581D738FF600E84652 /* SnackbarController.swift in Sources */,
96BCB7AD1CB40DC500C806FE /* Material+String.swift in Sources */,
96BCB7B91CB40DC500C806FE /* Button.swift in Sources */,
......
......@@ -47,7 +47,7 @@ open class Bar: View {
}
/// Should center the contentView.
open var contentViewAlignment = ContentViewAlignment.any {
open var contentViewAlignment = ContentViewAlignment.center {
didSet {
layoutSubviews()
}
......@@ -188,14 +188,9 @@ open class Bar: View {
open func reload() {
var lc = 0
var rc = 0
let l = (CGFloat(leftViews.count) * interimSpace)
let r = (CGFloat(rightViews.count) * interimSpace)
let p = width - l - r - contentEdgeInsets.left - contentEdgeInsets.right
let columns = Int(ceil(p / gridFactor))
grid.begin()
grid.views.removeAll()
grid.axis.columns = columns
for v in leftViews {
if let b = v as? UIButton {
......@@ -230,6 +225,23 @@ open class Bar: View {
}
contentView.grid.begin()
contentView.grid.offset.columns = 0
var l: CGFloat = 0
var r: CGFloat = 0
if .center == contentViewAlignment {
if leftViews.count < rightViews.count {
r = CGFloat(rightViews.count) * interimSpace
l = r
} else {
l = CGFloat(leftViews.count) * interimSpace
r = l
}
}
let p = width - l - r - contentEdgeInsets.left - contentEdgeInsets.right
let columns = Int(ceil(p / gridFactor))
if .center == contentViewAlignment {
if lc < rc {
......@@ -237,13 +249,15 @@ open class Bar: View {
contentView.grid.offset.columns = rc - lc
} else {
contentView.grid.columns = columns - 2 * lc
contentView.grid.offset.columns = 0
rightViews.first?.grid.offset.columns = lc - rc
}
} else {
contentView.grid.columns = columns - lc - rc
}
print(contentView.grid.columns)
grid.axis.columns = columns
grid.commit()
contentView.grid.commit()
......
......@@ -37,6 +37,61 @@ public enum CaptureMode: Int {
case video
}
private var CaptureAdjustingExposureContext: UInt8 = 0
@objc(CapturePreset)
public enum CapturePreset: Int {
case presetPhoto
case presetHigh
case presetMedium
case presetLow
case preset352x288
case preset640x480
case preset1280x720
case preset1920x1080
case preset3840x2160
case presetiFrame960x540
case presetiFrame1280x720
case presetInputPriority
}
/**
Converts a given CaptureSessionPreset to a String value.
- Parameter preset: A CaptureSessionPreset to convert.
*/
public func CapturePresetToString(preset: CapturePreset) -> String {
switch preset {
case .presetPhoto:
return AVCaptureSessionPresetPhoto
case .presetHigh:
return AVCaptureSessionPresetHigh
case .presetMedium:
return AVCaptureSessionPresetMedium
case .presetLow:
return AVCaptureSessionPresetLow
case .preset352x288:
return AVCaptureSessionPreset352x288
case .preset640x480:
return AVCaptureSessionPreset640x480
case .preset1280x720:
return AVCaptureSessionPreset1280x720
case .preset1920x1080:
return AVCaptureSessionPreset1920x1080
case .preset3840x2160:
if #available(iOS 9.0, *) {
return AVCaptureSessionPreset3840x2160
} else {
return AVCaptureSessionPresetHigh
}
case .presetiFrame960x540:
return AVCaptureSessionPresetiFrame960x540
case .presetiFrame1280x720:
return AVCaptureSessionPresetiFrame1280x720
case .presetInputPriority:
return AVCaptureSessionPresetInputPriority
}
}
@objc(CaptureDelegate)
public protocol CaptureDelegate {
/**
......@@ -129,9 +184,91 @@ public protocol CaptureDelegate {
*/
@objc
optional func captureDidPressVideoButton(capture: Capture, button: UIButton)
/**
A delegation method that is fired when the captureSesstion failes with an error.
- Parameter capture: A reference to the calling capture.
- Parameter error: A Error corresponding to the error.
*/
@objc
optional func captureFailedWithError(capture: Capture, error: Error)
/**
A delegation method that is fired when the camera has been switched to another.
- Parameter capture: A reference to the calling capture.
- Parameter device position: An AVCaptureDevicePosition that the camera has switched to.
*/
@objc
optional func captureDidSwitchCameras(capture: Capture, device position: AVCaptureDevicePosition)
/**
A delegation method that is fired before the camera has been switched to another.
- Parameter capture: A reference to the calling capture.
- Parameter device position: An AVCaptureDevicePosition that the camera will switch to.
*/
@objc
optional func captureWillSwitchCameras(capture: Capture, device position: AVCaptureDevicePosition)
/**
A delegation method that is fired when an image has been captured asynchronously.
- Parameter capture: A reference to the calling capture.
- Parameter image: An image that has been captured.
*/
@objc
optional func captureStillImageAsynchronously(capture: Capture, image: UIImage)
/**
A delegation method that is fired when capturing an image asynchronously has failed.
- Parameter capture: A reference to the calling capture.
- Parameter error: A Error corresponding to the error.
*/
@objc
optional func captureStillImageAsynchronouslyFailedWithError(capture: Capture, error: Error)
/**
A delegation method that is fired when creating a movie file has failed.
- Parameter capture: A reference to the calling capture.
- Parameter error: A Error corresponding to the error.
*/
@objc
optional func captureCreateMovieFileFailedWithError(capture: Capture, error: Error)
/**
A delegation method that is fired when capturing a movie has failed.
- Parameter capture: A reference to the calling capture.
- Parameter error: A Error corresponding to the error.
*/
@objc
optional func captureMovieFailedWithError(capture: Capture, error: Error)
/**
A delegation method that is fired when a session started recording and writing
to a file.
- Parameter capture: A reference to the calling capture.
- Parameter captureOut: An AVCaptureFileOutput.
- Parameter fileURL: A file URL.
- Parameter fromConnections: An array of Anys.
*/
@objc
optional func captureDidStartRecordingToOutputFileAtURL(capture: Capture, captureOutput: AVCaptureFileOutput, fileURL: NSURL, fromConnections connections: [Any])
/**
A delegation method that is fired when a session finished recording and writing
to a file.
- Parameter capture: A reference to the calling capture.
- Parameter captureOut: An AVCaptureFileOutput.
- Parameter fileURL: A file URL.
- Parameter fromConnections: An array of Anys.
- Parameter error: A Error corresponding to an error.
*/
@objc
optional func captureDidFinishRecordingToOutputFileAtURL(capture: Capture, captureOutput: AVCaptureFileOutput, outputFileURL: NSURL, fromConnections connections: [Any], error: Error!)
}
open class Capture: View, UIGestureRecognizerDelegate {
open class Capture: View {
/// A boolean indicating if an animation is in progress.
open var isAnimating = false
/// A reference to the capture mode.
open var mode = CaptureMode.video
......@@ -141,20 +278,218 @@ open class Capture: View, UIGestureRecognizerDelegate {
/// A reference to the CapturePreview view.
open internal(set) var preview: CapturePreview!
/// A reference to the CaptureSession.
open internal(set) var session: CaptureSession!
/// A Timer reference for when recording is enabled.
internal var timer: Timer?
/// A reference to the visualEffect .
internal var visualEffect: UIView!
/// A tap gesture reference for focus events.
private var tapToFocusGesture: UITapGestureRecognizer?
internal var tapToFocusGesture: UITapGestureRecognizer?
/// A tap gesture reference for exposure events.
private var tapToExposeGesture: UITapGestureRecognizer?
internal var tapToExposeGesture: UITapGestureRecognizer?
/// A tap gesture reference for reset events.
private var tapToResetGesture: UITapGestureRecognizer?
internal var tapToResetGesture: UITapGestureRecognizer?
/// A reference to the session DispatchQueue.
internal var sessionQueue: DispatchQueue!
/// A reference to the active video input.
internal var activeVideoInput: AVCaptureDeviceInput?
/// A reference to the active audio input.
internal var activeAudioInput: AVCaptureDeviceInput?
/// A reference to the image output.
internal var imageOutput: AVCaptureStillImageOutput!
/// A reference to the movie output.
internal var movieOutput: AVCaptureMovieFileOutput!
/// A reference to the movie output URL.
internal var movieOutputURL: URL?
/// A reference to the AVCaptureSession.
internal var session: AVCaptureSession!
/// A boolean indicating if the session is running.
open internal(set) var isRunning = false
/// A boolean indicating if the session is recording.
open internal(set) var isRecording = false
/// A reference to the recorded time duration.
open var recordedDuration: CMTime {
return movieOutput.recordedDuration
}
/// An optional reference to the active camera if one exists.
open var activeCamera: AVCaptureDevice? {
return activeVideoInput?.device
}
/// An optional reference to the inactive camera if one exists.
open var inactiveCamera: AVCaptureDevice? {
var device: AVCaptureDevice?
if 1 < cameraCount {
if activeCamera?.position == .back {
device = camera(at: .front)
} else {
device = camera(at: .back)
}
}
return device
}
/// Available number of cameras.
open var cameraCount: Int {
return AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo).count
}
/// A boolean indicating whether the camera can switch to another.
open var canSwitchCameras: Bool {
return 1 < cameraCount
}
/// A booealn indicating whether the camrea supports focus.
open var isFocusPointOfInterestSupported: Bool {
return nil == activeCamera ? false : activeCamera!.isFocusPointOfInterestSupported
}
/// A booealn indicating whether the camrea supports exposure.
open var isExposurePointOfInterestSupported: Bool {
return nil == activeCamera ? false : activeCamera!.isExposurePointOfInterestSupported
}
/// A boolean indicating if the active camera has flash.
open var isFlashAvailable: Bool {
return nil == activeCamera ? false : activeCamera!.hasFlash
}
/// A boolean indicating if the active camera has a torch.
open var isTorchAvailable: Bool {
return nil == activeCamera ? false : activeCamera!.hasTorch
}
/// A reference to the active camera position if the active camera exists.
open var devicePosition: AVCaptureDevicePosition? {
return activeCamera?.position
}
/// A reference to the focusMode.
open var focusMode: AVCaptureFocusMode {
get {
return activeCamera!.focusMode
}
set(value) {
var error: NSError?
if isFocusModeSupported(focusMode: focusMode) {
do {
let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration()
device.focusMode = value
device.unlockForConfiguration()
} catch let e as NSError {
error = e
}
} else {
var userInfo: Dictionary<String, Any> = Dictionary<String, Any>()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Unsupported focusMode.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Unsupported focusMode.]"
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0001, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.captureFailedWithError?(capture: self, error: e)
}
}
}
/// A reference to the flashMode.
open var flashMode: AVCaptureFlashMode {
get {
return activeCamera!.flashMode
}
set(value) {
var error: Error?
if isFlashModeSupported(flashMode: flashMode) {
do {
let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration()
device.flashMode = value
device.unlockForConfiguration()
} catch let e as NSError {
error = e
}
} else {
var userInfo: Dictionary<String, Any> = Dictionary<String, Any>()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Unsupported flashMode.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Unsupported flashMode.]"
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0002, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.captureFailedWithError?(capture: self, error: e)
}
}
}
/// A reference to the torchMode.
open var torchMode: AVCaptureTorchMode {
get {
return activeCamera!.torchMode
}
set(value) {
var error: NSError?
if isTorchModeSupported(torchMode: torchMode) {
do {
let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration()
device.torchMode = value
device.unlockForConfiguration()
} catch let e as NSError {
error = e
}
} else {
var userInfo: Dictionary<String, Any> = Dictionary<String, Any>()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Unsupported torchMode.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Unsupported torchMode.]"
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0003, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.captureFailedWithError?(capture: self, error: e)
}
}
}
/// The session quality preset.
open var capturePreset = CapturePreset.presetHigh {
didSet {
session.sessionPreset = CapturePresetToString(preset: capturePreset)
}
}
/// The capture video orientation.
open var videoOrientation: AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
case .portrait:
orientation = .portrait
case .landscapeRight:
orientation = .landscapeLeft
case .portraitUpsideDown:
orientation = .portraitUpsideDown
default:
orientation = .landscapeRight
}
return orientation
}
/// A reference to the captureButton.
@IBInspectable
......@@ -260,11 +595,6 @@ open class Capture: View, UIGestureRecognizerDelegate {
self.init(frame: .zero)
}
open override func layoutSubviews() {
super.layoutSubviews()
reload()
}
/**
Prepares the view instance when intialized. When subclassing,
it is recommended to override the prepare method
......@@ -276,89 +606,470 @@ open class Capture: View, UIGestureRecognizerDelegate {
super.prepare()
backgroundColor = .black
prepareCaptureSession()
prepareVisualEffect()
prepareSession()
prepareSessionQueue()
prepareActiveVideoInput()
prepareActiveAudioInput()
prepareImageOutput()
prepareMovieOutput()
preparePreview()
isTapToFocusEnabled = true
isTapToExposeEnabled = true
}
}
/// Reloads the view.
open func reload() {
preview.frame = bounds
}
/// Prepare the session.
private func prepareCaptureSession() {
session = CaptureSession()
extension Capture {
/// Prepares the visualEffect.
internal func prepareVisualEffect() {
let blurEffect = UIVisualEffectView(effect: UIBlurEffect(style: .light))
visualEffect = UIView()
visualEffect.backgroundColor = nil
visualEffect.layout(blurEffect).edges()
}
/// Prepares the preview.
private func preparePreview() {
internal func preparePreview() {
preview = CapturePreview()
addSubview(preview)
layout(preview).edges()
bringSubview(toFront: visualEffect)
(preview.layer as! AVCaptureVideoPreviewLayer).session = session.session
session.startSession()
(preview.layer as! AVCaptureVideoPreviewLayer).session = session
startSession()
}
/// Prepares the captureButton.
private func prepareCaptureButton() {
internal func prepareCaptureButton() {
captureButton?.addTarget(self, action: #selector(handleCaptureButton), for: .touchUpInside)
}
/// Prepares the cameraButton.
private func prepareCameraButton() {
internal func prepareCameraButton() {
cameraButton?.addTarget(self, action: #selector(handleCameraButton), for: .touchUpInside)
}
/// Preapres the videoButton.
private func prepareVideoButton() {
internal func prepareVideoButton() {
videoButton?.addTarget(self, action: #selector(handleVideoButton), for: .touchUpInside)
}
/// Prepares the switchCameraButton.
private func prepareSwitchCamerasButton() {
internal func prepareSwitchCamerasButton() {
switchCamerasButton?.addTarget(self, action: #selector(handleSwitchCamerasButton), for: .touchUpInside)
}
/// Prepares the flashButton.
private func prepareFlashButton() {
internal func prepareFlashButton() {
flashButton?.addTarget(self, action: #selector(handleFlashButton), for: .touchUpInside)
}
/// Prepares the sessionQueue.
internal func prepareSessionQueue() {
sessionQueue = DispatchQueue(label: "io.cosmicmind.Material.CaptureSession", attributes: .concurrent, target: nil)
}
/// Prepares the session.
internal func prepareSession() {
session = AVCaptureSession()
}
/// Prepares the activeVideoInput.
internal func prepareActiveVideoInput() {
do {
activeVideoInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo))
guard session.canAddInput(activeVideoInput) else {
return
}
session.addInput(activeVideoInput)
} catch let e as NSError {
delegate?.captureFailedWithError?(capture: self, error: e)
}
}
/// Prepares the activeAudioInput.
internal func prepareActiveAudioInput() {
do {
activeAudioInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio))
guard session.canAddInput(activeAudioInput) else {
return
}
session.addInput(activeAudioInput)
} catch let e as NSError {
delegate?.captureFailedWithError?(capture: self, error: e)
}
}
/// Prepares the imageOutput.
internal func prepareImageOutput() {
imageOutput = AVCaptureStillImageOutput()
guard session.canAddOutput(imageOutput) else {
return
}
imageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
session.addOutput(imageOutput)
}
/// Prepares the movieOutput.
internal func prepareMovieOutput() {
movieOutput = AVCaptureMovieFileOutput()
guard session.canAddOutput(movieOutput) else {
return
}
session.addOutput(movieOutput)
}
}
extension Capture {
/// Starts the session.
open func startSession() {
guard !isRunning else {
return
}
sessionQueue.async() { [weak self] in
self?.session.startRunning()
}
}
/// Stops the session.
open func stopSession() {
guard isRunning else {
return
}
sessionQueue.async() { [weak self] in
self?.session.stopRunning()
}
}
/// Switches the camera if possible.
open func switchCameras() {
guard canSwitchCameras && !isAnimating else {
return
}
do {
guard let v = devicePosition else {
return
}
delegate?.captureWillSwitchCameras?(capture: self, device: v)
let videoInput = try AVCaptureDeviceInput(device: inactiveCamera!)
session.beginConfiguration()
session.removeInput(activeVideoInput)
if session.canAddInput(videoInput) {
session.addInput(videoInput)
activeVideoInput = videoInput
} else {
session.addInput(activeVideoInput)
}
session.commitConfiguration()
isAnimating = true
Motion.delay(time: 0.15) { [weak self] in
guard let s = self else {
return
}
s.delegate?.captureDidSwitchCameras?(capture: s, device: s.devicePosition!)
UIView.animate(withDuration: 0.15, animations: { [weak self] in
self?.visualEffect.alpha = 0
}, completion: { [weak self] _ in
guard let s = self else {
return
}
s.visualEffect.removeFromSuperview()
s.isAnimating = false
})
}
} catch let e as NSError {
delegate?.captureFailedWithError?(capture: self, error: e)
}
}
/**
Prepares a given tap gesture.
- Parameter gesture: An optional UITapGestureRecognizer to prepare.
- Parameter numberOfTapsRequired: An integer of the number of taps required
to activate the gesture.
- Parameter numberOfTouchesRequired: An integer of the number of touches, fingers,
required to activate the gesture.
- Parameter selector: A Selector to handle the event.
Checks if a given focus mode is supported.
- Parameter focusMode: An AVCaptureFocusMode.
- Returns: A boolean of the result, true if supported, false otherwise.
*/
private func prepareTapGesture(gesture: inout UITapGestureRecognizer?, numberOfTapsRequired: Int, numberOfTouchesRequired: Int, selector: Selector) {
guard nil == gesture else {
return
open func isFocusModeSupported(focusMode: AVCaptureFocusMode) -> Bool {
return activeCamera!.isFocusModeSupported(focusMode)
}
gesture = UITapGestureRecognizer(target: self, action: selector)
gesture!.delegate = self
gesture!.numberOfTapsRequired = numberOfTapsRequired
gesture!.numberOfTouchesRequired = numberOfTouchesRequired
addGestureRecognizer(gesture!)
/**
Checks if a given exposure mode is supported.
- Parameter exposureMode: An AVCaptureExposureMode.
- Returns: A boolean of the result, true if supported, false otherwise.
*/
open func isExposureModeSupported(exposureMode: AVCaptureExposureMode) -> Bool {
return activeCamera!.isExposureModeSupported(exposureMode)
}
/**
Removes a given tap gesture.
- Parameter gesture: An optional UITapGestureRecognizer to remove.
Checks if a given flash mode is supported.
- Parameter flashMode: An AVCaptureFlashMode.
- Returns: A boolean of the result, true if supported, false otherwise.
*/
private func removeTapGesture(gesture: inout UITapGestureRecognizer?) {
guard let v = gesture else {
open func isFlashModeSupported(flashMode: AVCaptureFlashMode) -> Bool {
return activeCamera!.isFlashModeSupported(flashMode)
}
/**
Checks if a given torch mode is supported.
- Parameter torchMode: An AVCaptureTorchMode.
- Returns: A boolean of the result, true if supported, false otherwise.
*/
open func isTorchModeSupported(torchMode: AVCaptureTorchMode) -> Bool {
return activeCamera!.isTorchModeSupported(torchMode)
}
/**
Focuses the camera at a given point.
- Parameter at: A CGPoint to focus at.
*/
open func focus(at point: CGPoint) {
var error: NSError?
if isFocusPointOfInterestSupported && isFocusModeSupported(focusMode: .autoFocus) {
do {
let device = activeCamera!
try device.lockForConfiguration()
device.focusPointOfInterest = point
device.focusMode = .autoFocus
device.unlockForConfiguration()
} catch let e as NSError {
error = e
}
} else {
var userInfo = [String: Any]()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Unsupported focus.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Unsupported focus.]"
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0004, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.captureFailedWithError?(capture: self, error: e)
}
}
/**
Exposes the camera at a given point.
- Parameter at: A CGPoint to expose at.
*/
open func expose(at point: CGPoint) {
var error: NSError?
if isExposurePointOfInterestSupported && isExposureModeSupported(exposureMode: .continuousAutoExposure) {
do {
let device = activeCamera!
try device.lockForConfiguration()
device.exposurePointOfInterest = point
device.exposureMode = .continuousAutoExposure
if device.isExposureModeSupported(.locked) {
device.addObserver(self, forKeyPath: "adjustingExposure", options: .new, context: &CaptureAdjustingExposureContext)
}
device.unlockForConfiguration()
} catch let e as NSError {
error = e
}
} else {
var userInfo = [String: Any]()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Unsupported expose.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Unsupported expose.]"
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0005, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.captureFailedWithError?(capture: self, error: e)
}
}
open override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey: Any]?, context: UnsafeMutableRawPointer?) {
if context == &CaptureAdjustingExposureContext {
let device = object as! AVCaptureDevice
if !device.isAdjustingExposure && device.isExposureModeSupported(.locked) {
(object! as AnyObject).removeObserver(self, forKeyPath: "adjustingExposure", context: &CaptureAdjustingExposureContext)
DispatchQueue.main.async() {
do {
try device.lockForConfiguration()
device.exposureMode = .locked
device.unlockForConfiguration()
} catch let e as NSError {
self.delegate?.captureFailedWithError?(capture: self, error: e)
}
}
}
} else {
super.observeValue(forKeyPath: keyPath, of : object, change: change, context: context)
}
}
/**
Resets the camera focus and exposure.
- Parameter focus: A boolean indicating to reset the focus.
- Parameter exposure: A boolean indicating to reset the exposure.
*/
open func reset(focus: Bool = true, exposure: Bool = true) {
let device = activeCamera!
let canResetFocus = device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.continuousAutoFocus)
let canResetExposure = device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.continuousAutoExposure)
let centerPoint = CGPoint(x: 0.5, y: 0.5)
do {
try device.lockForConfiguration()
if canResetFocus && focus {
device.focusMode = .continuousAutoFocus
device.focusPointOfInterest = centerPoint
}
if canResetExposure && exposure {
device.exposureMode = .continuousAutoExposure
device.exposurePointOfInterest = centerPoint
}
device.unlockForConfiguration()
} catch let e as NSError {
delegate?.captureFailedWithError?(capture: self, error: e)
}
}
/// Captures a still image.
open func captureStillImage() {
sessionQueue.async() { [weak self] in
guard let s = self else {
return
}
removeGestureRecognizer(v)
gesture = nil
guard let v = s.imageOutput.connection(withMediaType: AVMediaTypeVideo) else {
return
}
v.videoOrientation = s.videoOrientation
s.imageOutput.captureStillImageAsynchronously(from: v) { [weak self] (sampleBuffer: CMSampleBuffer?, error: Error?) -> Void in
guard let s = self else {
return
}
var captureError = error
if nil == captureError {
let data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)!
if let image1 = UIImage(data: data) {
if let image2 = image1.adjustOrientation() {
s.delegate?.captureStillImageAsynchronously?(capture: s, image: image2)
} else {
var userInfo = [String: Any]()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot fix image orientation.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Cannot fix image orientation.]"
captureError = NSError(domain: "io.cosmicmind.Material.Capture", code: 0006, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
} else {
var userInfo = [String: Any]()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot capture image from data.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Cannot capture image from data.]"
captureError = NSError(domain: "io.cosmicmind.Material.Capture", code: 0007, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
}
if let e = captureError {
s.delegate?.captureStillImageAsynchronouslyFailedWithError?(capture: s, error: e)
}
}
}
}
/// Starts recording.
open func startRecording() {
if !isRecording {
sessionQueue.async() { [weak self] in
guard let s = self else {
return
}
if let v = s.movieOutput.connection(withMediaType: AVMediaTypeVideo) {
v.videoOrientation = s.videoOrientation
v.preferredVideoStabilizationMode = .auto
}
guard let v = s.activeCamera else {
return
}
if v.isSmoothAutoFocusSupported {
do {
try v.lockForConfiguration()
v.isSmoothAutoFocusEnabled = true
v.unlockForConfiguration()
} catch let e as NSError {
s.delegate?.captureFailedWithError?(capture: s, error: e)
}
}
s.movieOutputURL = s.uniqueURL()
if let v = s.movieOutputURL {
s.movieOutput.startRecording(toOutputFileURL: v as URL!, recordingDelegate: s)
}
}
}
}
/// Stops recording.
open func stopRecording() {
guard isRecording else {
return
}
movieOutput.stopRecording()
}
/**
A reference to the camera at a given position, if one exists.
- Parameter at: An AVCaptureDevicePosition.
- Returns: An AVCaptureDevice if one exists, or nil otherwise.
*/
internal func camera(at position: AVCaptureDevicePosition) -> AVCaptureDevice? {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
for device in devices {
if device.position == position {
return device
}
}
return nil
}
/**
Creates a unique URL if possible.
- Returns: A NSURL if it is possible to create one.
*/
internal func uniqueURL() -> URL? {
do {
let directory = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true)
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .full
dateFormatter.timeStyle = .full
return directory.appendingPathComponent(dateFormatter.string(from: NSDate() as Date) + ".mov")
} catch let e as NSError {
delegate?.captureCreateMovieFileFailedWithError?(capture: self, error: e)
}
return nil
}
}
......@@ -371,13 +1082,13 @@ extension Capture {
internal func handleCaptureButton(button: UIButton) {
switch mode {
case .photo:
session.captureStillImage()
captureStillImage()
case .video:
if session.isRecording {
session.stopRecording()
if isRecording {
stopRecording()
stopTimer()
} else {
session.startRecording()
startRecording()
startTimer()
}
}
......@@ -410,7 +1121,15 @@ extension Capture {
*/
@objc
internal func handleSwitchCamerasButton(button: UIButton) {
session.switchCameras()
visualEffect.alpha = 0
layout(visualEffect).edges()
UIView.animate(withDuration: 0.15, animations: { [weak self] in
self?.visualEffect.alpha = 1
}) { [weak self] _ in
self?.switchCameras()
}
delegate?.captureDidPressSwitchCamerasButton?(capture: self, button: button)
}
......@@ -430,12 +1149,12 @@ extension Capture {
*/
@objc
internal func handleTapToFocusGesture(recognizer: UITapGestureRecognizer) {
guard isTapToFocusEnabled && session.isFocusPointOfInterestSupported else {
guard isTapToFocusEnabled && isFocusPointOfInterestSupported else {
return
}
let point = recognizer.location(in: self)
session.focus(at: preview.captureDevicePointOfInterestForPoint(point: point))
focus(at: preview.captureDevicePointOfInterestForPoint(point: point))
delegate?.captureDidTapToFocusAtPoint?(capture: self, point: point)
}
......@@ -445,12 +1164,12 @@ extension Capture {
*/
@objc
internal func handleTapToExposeGesture(recognizer: UITapGestureRecognizer) {
guard isTapToExposeEnabled && session.isExposurePointOfInterestSupported else {
guard isTapToExposeEnabled && isExposurePointOfInterestSupported else {
return
}
let point = recognizer.location(in: self)
session.expose(at: preview.captureDevicePointOfInterestForPoint(point: point))
expose(at: preview.captureDevicePointOfInterestForPoint(point: point))
delegate?.captureDidTapToExposeAtPoint?(capture: self, point: point)
}
......@@ -464,7 +1183,7 @@ extension Capture {
return
}
session.reset()
reset()
let point = preview.pointForCaptureDevicePointOfInterest(point: CGPoint(x: 0.5, y: 0.5))
delegate?.captureDidTapToResetAtPoint?(capture: self, point: point)
......@@ -484,7 +1203,7 @@ extension Capture {
/// Updates the timer when recording.
internal func updateTimer() {
let duration = session.recordedDuration
let duration = recordedDuration
let time = CMTimeGetSeconds(duration)
let hours = Int(time / 3600)
let minutes = Int((time / 60).truncatingRemainder(dividingBy: 60))
......@@ -495,7 +1214,7 @@ extension Capture {
/// Stops the timer when recording.
internal func stopTimer() {
let duration = session.recordedDuration
let duration = recordedDuration
let time = CMTimeGetSeconds(duration)
let hours = Int(time / 3600)
let minutes = Int((time / 60).truncatingRemainder(dividingBy: 60))
......@@ -507,3 +1226,51 @@ extension Capture {
delegate?.captureDidStopRecordTimer?(capture: self, hours: hours, minutes: minutes, seconds: seconds)
}
}
extension Capture: UIGestureRecognizerDelegate {
/**
Prepares a given tap gesture.
- Parameter gesture: An optional UITapGestureRecognizer to prepare.
- Parameter numberOfTapsRequired: An integer of the number of taps required
to activate the gesture.
- Parameter numberOfTouchesRequired: An integer of the number of touches, fingers,
required to activate the gesture.
- Parameter selector: A Selector to handle the event.
*/
internal func prepareTapGesture(gesture: inout UITapGestureRecognizer?, numberOfTapsRequired: Int, numberOfTouchesRequired: Int, selector: Selector) {
guard nil == gesture else {
return
}
gesture = UITapGestureRecognizer(target: self, action: selector)
gesture!.delegate = self
gesture!.numberOfTapsRequired = numberOfTapsRequired
gesture!.numberOfTouchesRequired = numberOfTouchesRequired
addGestureRecognizer(gesture!)
}
/**
Removes a given tap gesture.
- Parameter gesture: An optional UITapGestureRecognizer to remove.
*/
internal func removeTapGesture(gesture: inout UITapGestureRecognizer?) {
guard let v = gesture else {
return
}
removeGestureRecognizer(v)
gesture = nil
}
}
extension Capture: AVCaptureFileOutputRecordingDelegate {
public func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
isRecording = true
delegate?.captureDidStartRecordingToOutputFileAtURL?(capture: self, captureOutput: captureOutput, fileURL: fileURL as NSURL, fromConnections: connections)
}
public func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
isRecording = false
delegate?.captureDidFinishRecordingToOutputFileAtURL?(capture: self, captureOutput: captureOutput, outputFileURL: outputFileURL as NSURL, fromConnections: connections, error: error)
}
}
......@@ -49,7 +49,7 @@ extension UIViewController {
}
}
open class CaptureController: ToolbarController, CaptureDelegate, CaptureSessionDelegate {
open class CaptureController: ToolbarController, CaptureDelegate {
/// A reference to the Capture instance.
@IBInspectable
open private(set) lazy var capture: Capture = Capture()
......@@ -93,7 +93,7 @@ open class CaptureController: ToolbarController, CaptureDelegate, CaptureSession
/// Prepares capture.
private func prepareCapture() {
capture.delegate = self
capture.session.delegate = self
capture.session.flashMode = .auto
capture.delegate = self
capture.flashMode = .auto
}
}
/*
* Copyright (C) 2015 - 2016, Daniel Dahan and CosmicMind, Inc. <http://cosmicmind.io>.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of CosmicMind nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import UIKit
import AVFoundation
private var CaptureSessionAdjustingExposureContext: UInt8 = 0
@objc(CaptureSessionPreset)
public enum CaptureSessionPreset: Int {
case presetPhoto
case presetHigh
case presetMedium
case presetLow
case preset352x288
case preset640x480
case preset1280x720
case preset1920x1080
case preset3840x2160
case presetiFrame960x540
case presetiFrame1280x720
case presetInputPriority
}
/**
Converts a given CaptureSessionPreset to a String value.
- Parameter preset: A CaptureSessionPreset to convert.
*/
public func CaptureSessionPresetToString(preset: CaptureSessionPreset) -> String {
switch preset {
case .presetPhoto:
return AVCaptureSessionPresetPhoto
case .presetHigh:
return AVCaptureSessionPresetHigh
case .presetMedium:
return AVCaptureSessionPresetMedium
case .presetLow:
return AVCaptureSessionPresetLow
case .preset352x288:
return AVCaptureSessionPreset352x288
case .preset640x480:
return AVCaptureSessionPreset640x480
case .preset1280x720:
return AVCaptureSessionPreset1280x720
case .preset1920x1080:
return AVCaptureSessionPreset1920x1080
case .preset3840x2160:
if #available(iOS 9.0, *) {
return AVCaptureSessionPreset3840x2160
} else {
return AVCaptureSessionPresetHigh
}
case .presetiFrame960x540:
return AVCaptureSessionPresetiFrame960x540
case .presetiFrame1280x720:
return AVCaptureSessionPresetiFrame1280x720
case .presetInputPriority:
return AVCaptureSessionPresetInputPriority
}
}
@objc(CaptureSessionDelegate)
public protocol CaptureSessionDelegate {
/**
A delegation method that is fired when the captureSesstion failes with an error.
- Parameter session: A reference to the calling CaptureSession.
- Parameter error: A Error corresponding to the error.
*/
@objc
optional func captureSessionFailedWithError(session: CaptureSession, error: Error)
/**
A delegation method that is fired when the camera has been switched to another.
- Parameter session: A reference to the calling CaptureSession.
- Parameter position: An AVCaptureDevicePosition that the camera has switched to.
*/
@objc
optional func captureSessionDidSwitchCameras(session: CaptureSession, position: AVCaptureDevicePosition)
/**
A delegation method that is fired before the camera has been switched to another.
- Parameter session: A reference to the calling CaptureSession.
- Parameter position: An AVCaptureDevicePosition that the camera will switch to.
*/
@objc
optional func captureSessionWillSwitchCameras(session: CaptureSession, position: AVCaptureDevicePosition)
/**
A delegation method that is fired when an image has been captured asynchronously.
- Parameter session: A reference to the calling CaptureSession.
- Parameter image: An image that has been captured.
*/
@objc
optional func captureSessionStillImageAsynchronously(session: CaptureSession, image: UIImage)
/**
A delegation method that is fired when capturing an image asynchronously has failed.
- Parameter session: A reference to the calling CaptureSession.
- Parameter error: A Error corresponding to the error.
*/
@objc
optional func captureSessionStillImageAsynchronouslyFailedWithError(session: CaptureSession, error: Error)
/**
A delegation method that is fired when creating a movie file has failed.
- Parameter session: A reference to the calling CaptureSession.
- Parameter error: A Error corresponding to the error.
*/
@objc
optional func captureSessionCreateMovieFileFailedWithError(session: CaptureSession, error: Error)
/**
A delegation method that is fired when capturing a movie has failed.
- Parameter session: A reference to the calling CaptureSession.
- Parameter error: A Error corresponding to the error.
*/
@objc
optional func captureSessionMovieFailedWithError(session: CaptureSession, error: Error)
/**
A delegation method that is fired when a session started recording and writing
to a file.
- Parameter session: A reference to the calling CaptureSession.
- Parameter captureOut: An AVCaptureFileOutput.
- Parameter fileURL: A file URL.
- Parameter fromConnections: An array of Anys.
*/
@objc
optional func captureSessionDidStartRecordingToOutputFileAtURL(session: CaptureSession, captureOutput: AVCaptureFileOutput, fileURL: NSURL, fromConnections connections: [Any])
/**
A delegation method that is fired when a session finished recording and writing
to a file.
- Parameter session: A reference to the calling CaptureSession.
- Parameter captureOut: An AVCaptureFileOutput.
- Parameter fileURL: A file URL.
- Parameter fromConnections: An array of Anys.
- Parameter error: A Error corresponding to an error.
*/
@objc
optional func captureSessionDidFinishRecordingToOutputFileAtURL(session: CaptureSession, captureOutput: AVCaptureFileOutput, outputFileURL: NSURL, fromConnections connections: [Any], error: Error!)
}
@objc(CaptureSession)
open class CaptureSession: NSObject {
/// A reference to the session DispatchQueue.
private var sessionQueue: DispatchQueue!
/// A reference to the active video input.
private var activeVideoInput: AVCaptureDeviceInput?
/// A reference to the active audio input.
private var activeAudioInput: AVCaptureDeviceInput?
/// A reference to the image output.
private var imageOutput: AVCaptureStillImageOutput!
/// A reference to the movie output.
private var movieOutput: AVCaptureMovieFileOutput!
/// A reference to the movie output URL.
private var movieOutputURL: URL?
/// A reference to the AVCaptureSession.
internal private(set) var session: AVCaptureSession!
/// A boolean indicating if the session is running.
open internal(set) var isRunning = false
/// A boolean indicating if the session is recording.
open internal(set) var isRecording = false
/// A reference to the recorded time duration.
open var recordedDuration: CMTime {
return movieOutput.recordedDuration
}
/// An optional reference to the active camera if one exists.
open var activeCamera: AVCaptureDevice? {
return activeVideoInput?.device
}
/// An optional reference to the inactive camera if one exists.
open var inactiveCamera: AVCaptureDevice? {
var device: AVCaptureDevice?
if 1 < cameraCount {
if activeCamera?.position == .back {
device = camera(at: .front)
} else {
device = camera(at: .back)
}
}
return device
}
/// Available number of cameras.
open var cameraCount: Int {
return AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo).count
}
/// A boolean indicating whether the camera can switch to another.
open var canSwitchCameras: Bool {
return 1 < cameraCount
}
/// A booealn indicating whether the camrea supports focus.
open var isFocusPointOfInterestSupported: Bool {
return nil == activeCamera ? false : activeCamera!.isFocusPointOfInterestSupported
}
/// A booealn indicating whether the camrea supports exposure.
open var isExposurePointOfInterestSupported: Bool {
return nil == activeCamera ? false : activeCamera!.isExposurePointOfInterestSupported
}
/// A boolean indicating if the active camera has flash.
open var isFlashAvailable: Bool {
return nil == activeCamera ? false : activeCamera!.hasFlash
}
/// A boolean indicating if the active camera has a torch.
open var isTorchAvailable: Bool {
return nil == activeCamera ? false : activeCamera!.hasTorch
}
/// A reference to the active camera position if the active camera exists.
open var position: AVCaptureDevicePosition? {
return activeCamera?.position
}
/// A reference to the focusMode.
open var focusMode: AVCaptureFocusMode {
get {
return activeCamera!.focusMode
}
set(value) {
var error: NSError?
if isFocusModeSupported(focusMode: focusMode) {
do {
let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration()
device.focusMode = value
device.unlockForConfiguration()
} catch let e as NSError {
error = e
}
} else {
var userInfo: Dictionary<String, Any> = Dictionary<String, Any>()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Unsupported focusMode.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Unsupported focusMode.]"
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0001, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.captureSessionFailedWithError?(session: self, error: e)
}
}
}
/// A reference to the flashMode.
open var flashMode: AVCaptureFlashMode {
get {
return activeCamera!.flashMode
}
set(value) {
var error: Error?
if isFlashModeSupported(flashMode: flashMode) {
do {
let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration()
device.flashMode = value
device.unlockForConfiguration()
} catch let e as NSError {
error = e
}
} else {
var userInfo: Dictionary<String, Any> = Dictionary<String, Any>()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Unsupported flashMode.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Unsupported flashMode.]"
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0002, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.captureSessionFailedWithError?(session: self, error: e)
}
}
}
/// A reference to the torchMode.
open var torchMode: AVCaptureTorchMode {
get {
return activeCamera!.torchMode
}
set(value) {
var error: NSError?
if isTorchModeSupported(torchMode: torchMode) {
do {
let device: AVCaptureDevice = activeCamera!
try device.lockForConfiguration()
device.torchMode = value
device.unlockForConfiguration()
} catch let e as NSError {
error = e
}
} else {
var userInfo: Dictionary<String, Any> = Dictionary<String, Any>()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Unsupported torchMode.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Unsupported torchMode.]"
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0003, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.captureSessionFailedWithError?(session: self, error: e)
}
}
}
/// The session quality preset.
open var preset: CaptureSessionPreset {
didSet {
session.sessionPreset = CaptureSessionPresetToString(preset: preset)
}
}
/// The capture video orientation.
open var videoOrientation: AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
case .portrait:
orientation = .portrait
case .landscapeRight:
orientation = .landscapeLeft
case .portraitUpsideDown:
orientation = .portraitUpsideDown
default:
orientation = .landscapeRight
}
return orientation
}
/// A delegation property for CaptureSessionDelegate.
open weak var delegate: CaptureSessionDelegate?
/// Initializer.
public override init() {
preset = .presetHigh
super.init()
prepare()
}
/// Starts the session.
open func startSession() {
guard !isRunning else {
return
}
sessionQueue.async() { [weak self] in
self?.session.startRunning()
}
}
/// Stops the session.
open func stopSession() {
guard isRunning else {
return
}
sessionQueue.async() { [weak self] in
self?.session.stopRunning()
}
}
/// Switches the camera if possible.
open func switchCameras() {
guard canSwitchCameras else {
return
}
do {
guard let v = position else {
return
}
delegate?.captureSessionWillSwitchCameras?(session: self, position: v)
let videoInput: AVCaptureDeviceInput? = try AVCaptureDeviceInput(device: inactiveCamera!)
session.beginConfiguration()
session.removeInput(activeVideoInput)
if session.canAddInput(videoInput) {
session.addInput(videoInput)
activeVideoInput = videoInput
} else {
session.addInput(activeVideoInput)
}
session.commitConfiguration()
delegate?.captureSessionDidSwitchCameras?(session: self, position: position!)
} catch let e as NSError {
delegate?.captureSessionFailedWithError?(session: self, error: e)
}
}
/**
Checks if a given focus mode is supported.
- Parameter focusMode: An AVCaptureFocusMode.
- Returns: A boolean of the result, true if supported, false otherwise.
*/
open func isFocusModeSupported(focusMode: AVCaptureFocusMode) -> Bool {
return activeCamera!.isFocusModeSupported(focusMode)
}
/**
Checks if a given exposure mode is supported.
- Parameter exposureMode: An AVCaptureExposureMode.
- Returns: A boolean of the result, true if supported, false otherwise.
*/
open func isExposureModeSupported(exposureMode: AVCaptureExposureMode) -> Bool {
return activeCamera!.isExposureModeSupported(exposureMode)
}
/**
Checks if a given flash mode is supported.
- Parameter flashMode: An AVCaptureFlashMode.
- Returns: A boolean of the result, true if supported, false otherwise.
*/
open func isFlashModeSupported(flashMode: AVCaptureFlashMode) -> Bool {
return activeCamera!.isFlashModeSupported(flashMode)
}
/**
Checks if a given torch mode is supported.
- Parameter torchMode: An AVCaptureTorchMode.
- Returns: A boolean of the result, true if supported, false otherwise.
*/
open func isTorchModeSupported(torchMode: AVCaptureTorchMode) -> Bool {
return activeCamera!.isTorchModeSupported(torchMode)
}
/**
Focuses the camera at a given point.
- Parameter at: A CGPoint to focus at.
*/
open func focus(at point: CGPoint) {
var error: NSError?
if isFocusPointOfInterestSupported && isFocusModeSupported(focusMode: .autoFocus) {
do {
let device = activeCamera!
try device.lockForConfiguration()
device.focusPointOfInterest = point
device.focusMode = .autoFocus
device.unlockForConfiguration()
} catch let e as NSError {
error = e
}
} else {
var userInfo = [String: Any]()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Unsupported focus.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Unsupported focus.]"
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0004, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.captureSessionFailedWithError?(session: self, error: e)
}
}
/**
Exposes the camera at a given point.
- Parameter at: A CGPoint to expose at.
*/
open func expose(at point: CGPoint) {
var error: NSError?
if isExposurePointOfInterestSupported && isExposureModeSupported(exposureMode: .continuousAutoExposure) {
do {
let device = activeCamera!
try device.lockForConfiguration()
device.exposurePointOfInterest = point
device.exposureMode = .continuousAutoExposure
if device.isExposureModeSupported(.locked) {
device.addObserver(self, forKeyPath: "adjustingExposure", options: .new, context: &CaptureSessionAdjustingExposureContext)
}
device.unlockForConfiguration()
} catch let e as NSError {
error = e
}
} else {
var userInfo = [String: Any]()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Unsupported expose.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Unsupported expose.]"
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0005, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.captureSessionFailedWithError?(session: self, error: e)
}
}
open override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey: Any]?, context: UnsafeMutableRawPointer?) {
if context == &CaptureSessionAdjustingExposureContext {
let device = object as! AVCaptureDevice
if !device.isAdjustingExposure && device.isExposureModeSupported(.locked) {
(object! as AnyObject).removeObserver(self, forKeyPath: "adjustingExposure", context: &CaptureSessionAdjustingExposureContext)
DispatchQueue.main.async() {
do {
try device.lockForConfiguration()
device.exposureMode = .locked
device.unlockForConfiguration()
} catch let e as NSError {
self.delegate?.captureSessionFailedWithError?(session: self, error: e)
}
}
}
} else {
super.observeValue(forKeyPath: keyPath, of : object, change: change, context: context)
}
}
/**
Resets the camera focus and exposure.
- Parameter focus: A boolean indicating to reset the focus.
- Parameter exposure: A boolean indicating to reset the exposure.
*/
open func reset(focus: Bool = true, exposure: Bool = true) {
let device = activeCamera!
let canResetFocus = device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.continuousAutoFocus)
let canResetExposure = device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.continuousAutoExposure)
let centerPoint = CGPoint(x: 0.5, y: 0.5)
do {
try device.lockForConfiguration()
if canResetFocus && focus {
device.focusMode = .continuousAutoFocus
device.focusPointOfInterest = centerPoint
}
if canResetExposure && exposure {
device.exposureMode = .continuousAutoExposure
device.exposurePointOfInterest = centerPoint
}
device.unlockForConfiguration()
} catch let e as NSError {
delegate?.captureSessionFailedWithError?(session: self, error: e)
}
}
/// Captures a still image.
open func captureStillImage() {
sessionQueue.async() { [weak self] in
guard let s = self else {
return
}
guard let v = s.imageOutput.connection(withMediaType: AVMediaTypeVideo) else {
return
}
v.videoOrientation = s.videoOrientation
s.imageOutput.captureStillImageAsynchronously(from: v) { [weak self] (sampleBuffer: CMSampleBuffer?, error: Error?) -> Void in
guard let s = self else {
return
}
var captureError = error
if nil == captureError {
let data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)!
if let image1 = UIImage(data: data) {
if let image2 = image1.adjustOrientation() {
s.delegate?.captureSessionStillImageAsynchronously?(session: s, image: image2)
} else {
var userInfo = [String: Any]()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot fix image orientation.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Cannot fix image orientation.]"
captureError = NSError(domain: "io.cosmicmind.Material.Capture", code: 0006, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
} else {
var userInfo = [String: Any]()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot capture image from data.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Cannot capture image from data.]"
captureError = NSError(domain: "io.cosmicmind.Material.Capture", code: 0007, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
}
if let e = captureError {
s.delegate?.captureSessionStillImageAsynchronouslyFailedWithError?(session: s, error: e)
}
}
}
}
/// Starts recording.
open func startRecording() {
if !isRecording {
sessionQueue.async() { [weak self] in
guard let s = self else {
return
}
if let v = s.movieOutput.connection(withMediaType: AVMediaTypeVideo) {
v.videoOrientation = s.videoOrientation
v.preferredVideoStabilizationMode = .auto
}
guard let v = s.activeCamera else {
return
}
if v.isSmoothAutoFocusSupported {
do {
try v.lockForConfiguration()
v.isSmoothAutoFocusEnabled = true
v.unlockForConfiguration()
} catch let e as NSError {
s.delegate?.captureSessionFailedWithError?(session: s, error: e)
}
}
s.movieOutputURL = s.uniqueURL()
if let v = s.movieOutputURL {
s.movieOutput.startRecording(toOutputFileURL: v as URL!, recordingDelegate: s)
}
}
}
}
/// Stops recording.
open func stopRecording() {
guard isRecording else {
return
}
movieOutput.stopRecording()
}
/**
Prepares the view instance when intialized. When subclassing,
it is recommended to override the prepare method
to initialize property values and other setup operations.
The super.prepare method should always be called immediately
when subclassing.
*/
open func prepare() {
prepareSession()
prepareSessionQueue()
prepareActiveVideoInput()
prepareActiveAudioInput()
prepareImageOutput()
prepareMovieOutput()
}
/// Prepares the sessionQueue.
private func prepareSessionQueue() {
sessionQueue = DispatchQueue(label: "io.cosmicmind.Material.CaptureSession", attributes: .concurrent, target: nil)
}
/// Prepares the session.
private func prepareSession() {
session = AVCaptureSession()
}
/// Prepares the activeVideoInput.
private func prepareActiveVideoInput() {
do {
activeVideoInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo))
guard session.canAddInput(activeVideoInput) else {
return
}
session.addInput(activeVideoInput)
} catch let e as NSError {
delegate?.captureSessionFailedWithError?(session: self, error: e)
}
}
/// Prepares the activeAudioInput.
private func prepareActiveAudioInput() {
do {
activeAudioInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio))
guard session.canAddInput(activeAudioInput) else {
return
}
session.addInput(activeAudioInput)
} catch let e as NSError {
delegate?.captureSessionFailedWithError?(session: self, error: e)
}
}
/// Prepares the imageOutput.
private func prepareImageOutput() {
imageOutput = AVCaptureStillImageOutput()
guard session.canAddOutput(imageOutput) else {
return
}
imageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
session.addOutput(imageOutput)
}
/// Prepares the movieOutput.
private func prepareMovieOutput() {
movieOutput = AVCaptureMovieFileOutput()
guard session.canAddOutput(movieOutput) else {
return
}
session.addOutput(movieOutput)
}
/**
A reference to the camera at a given position, if one exists.
- Parameter at: An AVCaptureDevicePosition.
- Returns: An AVCaptureDevice if one exists, or nil otherwise.
*/
private func camera(at position: AVCaptureDevicePosition) -> AVCaptureDevice? {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
for device in devices {
if device.position == position {
return device
}
}
return nil
}
/**
Creates a unique URL if possible.
- Returns: A NSURL if it is possible to create one.
*/
private func uniqueURL() -> URL? {
do {
let directory = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true)
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .full
dateFormatter.timeStyle = .full
return directory.appendingPathComponent(dateFormatter.string(from: NSDate() as Date) + ".mov")
} catch let e as NSError {
delegate?.captureSessionCreateMovieFileFailedWithError?(session: self, error: e)
}
return nil
}
}
extension CaptureSession: AVCaptureFileOutputRecordingDelegate {
public func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
isRecording = true
delegate?.captureSessionDidStartRecordingToOutputFileAtURL?(session: self, captureOutput: captureOutput, fileURL: fileURL as NSURL, fromConnections: connections)
}
public func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
isRecording = false
delegate?.captureSessionDidFinishRecordingToOutputFileAtURL?(session: self, captureOutput: captureOutput, outputFileURL: outputFileURL as NSURL, fromConnections: connections, error: error)
}
}
......@@ -42,6 +42,15 @@ public enum InterimSpacePreset: Int {
case interimSpace7
case interimSpace8
case interimSpace9
case interimSpace10
case interimSpace11
case interimSpace12
case interimSpace13
case interimSpace14
case interimSpace15
case interimSpace16
case interimSpace17
case interimSpace18
}
public typealias InterimSpace = CGFloat
......@@ -69,5 +78,23 @@ public func InterimSpacePresetToValue(preset: InterimSpacePreset) -> InterimSpac
return 24
case .interimSpace9:
return 28
case .interimSpace10:
return 32
case .interimSpace11:
return 36
case .interimSpace12:
return 40
case .interimSpace13:
return 44
case .interimSpace14:
return 48
case .interimSpace15:
return 52
case .interimSpace16:
return 56
case .interimSpace17:
return 60
case .interimSpace18:
return 64
}
}
......@@ -149,7 +149,6 @@ open class Toolbar: Bar {
open override func prepare() {
super.prepare()
zPosition = 1000
contentViewAlignment = .center
prepareTitleLabel()
prepareDetailLabel()
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment