Welcome to the Treehouse Community
Want to collaborate on code errors? Have bugs you need feedback on? Looking for an extra set of eyes on your latest project? Get support with fellow developers, designers, and programmers of all backgrounds and skill levels here with the Treehouse Community! While you're at it, check out some resources Treehouse students have shared here.
Looking to learn something new?
Treehouse offers a seven day free trial for new students. Get access to thousands of hours of content and join thousands of Treehouse students and alumni in the community today.
Start your free trial
tytyty
4,974 Points"Runtime error. Multiple AVCaptureInputs not supported. Swift/Xcode.
Camera torch gets turned on automatically as per my code but then the app crashes. Stuck on the launch screen with the torch still on. The debugging green line break with signal SIGABRT is pointing to the AppDelegate.
Debugging Screen shots
http://i.imgur.com/as3MIRx.png
http://i.imgur.com/X7RMcEx.png
ViewController.swift
import UIKit
import Foundation
import AVFoundation
import CoreMedia
import CoreVideo
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
let captureSession = AVCaptureSession()
var captureDevice : AVCaptureDevice?
var validFrameCounter: Int = 0
// for sampling from the camera
enum CurrentState {
case statePaused
case stateSampling
}
var currentState = CurrentState.statePaused
override func viewDidLoad() {
super.viewDidLoad()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
//println("Capture device found")
beginSession() // fatal error
}
}
}
}
}
// configure device for camera and focus mode
// start capturing frames
func beginSession() {
// Create the AVCapture Session
var err : NSError? = nil
captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))
if err != nil {
println("error: \(err?.localizedDescription)")
}
// Automatic Switch ON torch mode
if captureDevice!.hasTorch {
// lock your device for configuration
captureDevice!.lockForConfiguration(nil)
// check if your torchMode is on or off. If on turns it off otherwise turns it on
captureDevice!.torchMode = captureDevice!.torchActive ? AVCaptureTorchMode.Off : AVCaptureTorchMode.On
// sets the torch intensity to 100%
captureDevice!.setTorchModeOnWithLevel(1.0, error: nil)
// unlock your device
captureDevice!.unlockForConfiguration()
}
// Create a AVCaptureInput with the camera device
if let deviceInput = AVCaptureDeviceInput.deviceInputWithDevice(captureDevice, error: &err) as? AVCaptureInput {
// Set the input device
captureSession.addInput(deviceInput)
}
else {
println("error: \(err?.localizedDescription)")
}
// Set the output
var videoOutput : AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
// create a queue to run the capture on
var captureQueue : dispatch_queue_t = dispatch_queue_create("captureQueue", nil)
// setup ourself up as the capture delegate
videoOutput.setSampleBufferDelegate(self, queue: captureQueue)
// configure the pixel format
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_32BGRA)]
// kCVPixelBufferPixelFormatTypeKey is a CFString btw.
// set the minimum acceptable frame rate to 10 fps
captureDevice!.activeVideoMinFrameDuration = CMTimeMake(1, 10)
// and the size of the frames we want - we'll use the smallest frame size available
captureSession.sessionPreset = AVCaptureSessionPresetLow
// Add the output
captureSession.addOutput(videoOutput)
// Start the session
captureSession.startRunning()
func setState(state: CurrentState){
switch state
{
case .statePaused:
// is this correct code?
UIApplication.sharedApplication().idleTimerDisabled = false
case .stateSampling:
// is this correct code?
UIApplication.sharedApplication().idleTimerDisabled = true // singletons
}
}
// sampling from the camera
currentState = CurrentState.stateSampling
// stop the app from sleeping
UIApplication.sharedApplication().idleTimerDisabled = true
// update our UI on a timer every 0.1 seconds
NSTimer.scheduledTimerWithTimeInterval(0.1, target: self, selector: Selector("update"), userInfo: nil, repeats: true)
func stopCameraCapture() {
captureSession.stopRunning()
}
// pragma mark Pause and Resume of detection
func pause() {
if currentState == CurrentState.statePaused {
return
}
// switch off the torch
if captureDevice!.isTorchModeSupported(AVCaptureTorchMode.On) {
captureDevice!.lockForConfiguration(nil)
captureDevice!.torchMode = AVCaptureTorchMode.Off
captureDevice!.unlockForConfiguration()
}
currentState = CurrentState.statePaused
// let the application go to sleep if the phone is idle
UIApplication.sharedApplication().idleTimerDisabled = false
}
func resume() {
if currentState != CurrentState.statePaused {
return
}
// switch on the torch
if captureDevice!.isTorchModeSupported(AVCaptureTorchMode.On) {
captureDevice!.lockForConfiguration(nil)
captureDevice!.torchMode = AVCaptureTorchMode.On
captureDevice!.unlockForConfiguration()
}
currentState = CurrentState.stateSampling
// stop the app from sleeping
UIApplication.sharedApplication().idleTimerDisabled = true
}
}
}