2

AVCaptureSession addInput問題,我有AVCaptureSession的應用程序,它與以前的IOS版本正常工作,但後來我試圖與iOS8上的設備運行,應用程序崩潰零星。但問題沒有解決。例外進入「[會話addInput:輸入];」 。請諮詢如何解決。請驗證我的下面的代碼,並在[session addInput:input]中得到錯誤;與iOS8上

Printing description of error: Error Domain=AVFoundationErrorDomain Code=-11852 "Cannot use Back Camera" UserInfo=0x17c076e0 {NSLocalizedDescription=Cannot use Back Camera, AVErrorDeviceKey=, NSLocalizedFailureReason=This app is not authorized to use Back Camera.}

#import "CameraViewController.h" 
#import "MAImagePickerControllerAdjustViewController.h" 
#import "PopupViewController.h" 
#import "MAImagePickerFinalViewController.h" 

@implementation CameraViewController 

@synthesize vImagePreview; 
@synthesize vImage; 
@synthesize stillImageOutput; 
@synthesize lFrameCount; 
@synthesize session; 
@synthesize device; 
@synthesize oneOff; 
@synthesize captureManager = _captureManager; 
@synthesize flashButton = _flashButton; 
@synthesize vImage1; 
@synthesize vImage2; 
@synthesize vImage3; 
@synthesize vImage4; 
@synthesize vImage5; 
@synthesize vImage6; 

///////////////////////////////////////////////////////////////////// 
#pragma mark - UI Actions 
///////////////////////////////////////////////////////////////////// 
-(IBAction) captureNow 
{ 
    AVCaptureConnection *videoConnection = nil; 
    for (AVCaptureConnection *connection in stillImageOutput.connections) 
    { 
     for (AVCaptureInputPort *port in [connection inputPorts]) 
     { 
      if ([[port mediaType] isEqual:AVMediaTypeVideo]) 
      { 
       videoConnection = connection; 
       break; 
      } 
     } 
     if (videoConnection) { break; } 
    } 

    NSLog(@"about to request a capture from: %@", stillImageOutput); 
    [stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) 
    { 
     CFDictionaryRef exifAttachments = CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL); 
     if (exifAttachments) 
     { 
      // Do something with the attachments. 
      NSLog(@"attachements: %@", exifAttachments); 
     } 
     else 
      NSLog(@"no attachments"); 

     NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer]; 
     UIImage *image = [[UIImage alloc] initWithData:imageData]; 

     NSUserDefaults *standardUserDefaults = [NSUserDefaults standardUserDefaults]; 
     NSString *val1 = nil; 

     if (standardUserDefaults) 
     { 
      val1 = [standardUserDefaults objectForKey:@"clickTypeTwo"]; 
     } 
     if([val1 isEqualToString:@"cameraType"]) 
     { 
      dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ 
       [session stopRunning]; 

      }); 


      FinalViewController *finalView; 
      if ([[UIScreen mainScreen] bounds].size.height == 568) 
       finalView = [[FinalViewController alloc] initWithNibName:IS_IPAD()[email protected]"FinalViewController_iPad":@"FinalViewController" bundle:nil]; 
      else 
       finalView =[[FinalViewController alloc] initWithNibName:IS_IPAD()[email protected]"FinalViewController_iPad":@"FinalViewController" bundle:nil]; 



      finalView.sourceImage = image; 
      //finalView.imageFrameEdited = YES; 
      CATransition* transition = [CATransition animation]; 
      transition.duration = 0.4; 
      transition.type = kCATransitionFade; 
      transition.subtype = kCATransitionFromBottom; 
      [self.navigationController.view.layer addAnimation:transition forKey:kCATransition]; 
      [self.navigationController pushViewController:finalView animated:NO]; 


     } 
     else 
     { 

     [session stopRunning]; 
     AdjustViewController *adjustViewController; 
     if ([[UIScreen mainScreen] bounds].size.height == 568) 
      adjustViewController = [[AdjustViewController alloc] initWithNibName:IS_IPAD()[email protected]"AdjustViewController_iPad":@"AdjustViewController" bundle:nil]; 
     else 
      adjustViewController =[[AdjustViewController alloc] initWithNibName:IS_IPAD()[email protected]"AdjustViewController_iPad":@"AdjustViewController" bundle:nil]; 




     adjustViewController.sourceImage = image; 

     CATransition* transition = [CATransition animation]; 
     transition.duration = 0.4; 
     transition.type = kCATransitionFade; 
     transition.subtype = kCATransitionFromBottom; 
     [self.navigationController.view.layer addAnimation:transition forKey:kCATransition]; 
     [self.navigationController pushViewController:adjustViewController animated:NO]; 

     } 

    }]; 
} 


-(void)cropImageViewControllerDidFinished:(UIImage *)image{ 
    FinalViewController *finalView; 
    if ([[UIScreen mainScreen] bounds].size.height == 568) 
     finalView = [[MAImagePickerFinalViewController alloc] initWithNibName:IS_IPAD()[email protected]"FinalViewController_iPad":@"FinalViewController" bundle:nil]; 
    else 
     finalView =[[MAImagePickerFinalViewController alloc] initWithNibName:IS_IPAD()[email protected]"FinalViewController_iPad":@"FinalViewController" bundle:nil]; 



    finalView.sourceImage = image; 
    //finalView.imageFrameEdited = YES; 
    CATransition* transition = [CATransition animation]; 
    transition.duration = 0.4; 
    transition.type = kCATransitionFade; 
    transition.subtype = kCATransitionFromBottom; 
    [self.navigationController.view.layer addAnimation:transition forKey:kCATransition]; 
    [self.navigationController pushViewController:finalView animated:NO]; 
} 


///////////////////////////////////////////////////////////////////// 
#pragma mark - Video Frame Delegate 
///////////////////////////////////////////////////////////////////// 
- (void)captureOutput:(AVCaptureOutput *)captureOutput 
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
     fromConnection:(AVCaptureConnection *)connection 
{ 
    //NSLog(@"got frame"); 

    iFrameCount++; 

    // Update Display 
    // We are running the the context of the capture session. To update the UI in real time, We have to do this in the context of the main thread. 
    NSString * frameCountString = [[NSString alloc] initWithFormat:@"%4.4d", iFrameCount]; 
    [lFrameCount performSelectorOnMainThread: @selector(setText:) withObject:frameCountString waitUntilDone:YES]; 


    //NSLog(@"frame count %d", iFrameCount); 
} 

- (IBAction)showLeftSideBar 
{ 
    //[self dismissModalViewControllerAnimated:YES]; 
    if ([[SidebarViewController share] respondsToSelector:@selector(showSideBarControllerWithDirection:)]) { 
     [[SidebarViewController share] showSideBarControllerWithDirection:SideBarShowDirectionLeft]; 
    } 
} 

- (IBAction)showRightSideBar:(id)sender 
{ 



} 

- (IBAction)flipCamera:(id)sender 

{ 

    AVCaptureDevicePosition desiredPosition; 

    if (isUsingFrontFacingCamera) 

     desiredPosition = AVCaptureDevicePositionBack; 

    else 

     desiredPosition = AVCaptureDevicePositionFront; 



    for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { 

     if ([d position] == desiredPosition) { 

      [[self session] beginConfiguration]; 
      AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil]; 
      for (AVCaptureInput *oldInput in [[self session] inputs]) { 
       [[self session] removeInput:oldInput]; 
      } 
      [[self session] addInput:input]; 
      [[self session] commitConfiguration]; 

      break; 

     } 

    } 

    isUsingFrontFacingCamera = !isUsingFrontFacingCamera; 

} 

BOOL isUsingFrontFacingCamera; 



///////////////////////////////////////////////////////////////////// 
#pragma mark - Guts 
///////////////////////////////////////////////////////////////////// 

- (void)didReceiveMemoryWarning 
{ 
    [super didReceiveMemoryWarning]; 
    // Release any cached data, images, etc that aren't in use. 
} 



///////////////////////////////////////////////////////////////////// 
#pragma mark - View lifecycle 
///////////////////////////////////////////////////////////////////// 

- (void)viewDidLoad 
{ 
    [super viewDidLoad]; 
} 

- (void)viewDidUnload 
{ 
    [super viewDidUnload]; 


    // Release any retained subviews of the main view. 
    // e.g. self.myOutlet = nil; 
} 

- (void)viewWillAppear:(BOOL)animated 
{ 
    [super viewWillAppear:animated]; 
} 


- (void)viewDidAppear:(BOOL)animated 
{ 
    [super viewDidAppear:animated]; 

     flashIsOn=YES; 
    ///////////////////////////////////////////////////////////////////////////// 
    // Create a preview layer that has a capture session attached to it. 
    // Stick this preview layer into our UIView. 
    ///////////////////////////////////////////////////////////////////////////// 
    session = [[AVCaptureSession alloc] init]; 
    session.sessionPreset = AVCaptureSessionPreset640x480; 

    CALayer *viewLayer = self.vImagePreview.layer; 
    NSLog(@"viewLayer = %@", viewLayer); 
// viewLayer.frame = CGRectMake(-70, 150, 480, 336); 


// UIGraphicsBeginImageContextWithOptions(CGSizeMake(400, 400), NO, 1); 

    AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session]; 

    CGRect bounds=vImagePreview.layer.bounds; 
    captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; 
    captureVideoPreviewLayer.bounds=bounds; 
    captureVideoPreviewLayer.position=CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds)); 

    captureVideoPreviewLayer.frame = self.vImagePreview.bounds; 
    [self.vImagePreview.layer addSublayer:captureVideoPreviewLayer]; 
    //[self addVideoInputFrontCamera:YES]; // set to YES for Front Camera, No for Back camera 

    device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 

    NSError *error = nil; 
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; 
    if (!input) { 
     // Handle the error appropriately. 
     NSLog(@"ERROR: trying to open camera: %@", error); 
    } 
    [session addInput:input]; 

    ///////////////////////////////////////////////////////////// 
    // OUTPUT #1: Still Image 
    ///////////////////////////////////////////////////////////// 
    // Add an output object to our session so we can get a still image 
    // We retain a handle to the still image output and use this when we capture an image. 
    stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; 
    NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil]; 
    [stillImageOutput setOutputSettings:outputSettings]; 
    [session addOutput:stillImageOutput]; 


    ///////////////////////////////////////////////////////////// 
    // OUTPUT #2: Video Frames 
    ///////////////////////////////////////////////////////////// 
    // Create Video Frame Outlet that will send each frame to our delegate 
    AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init]; 
    captureOutput.alwaysDiscardsLateVideoFrames = YES; 
    //captureOutput.minFrameDuration = CMTimeMake(1, 3); // deprecated in IOS5 

    // We need to create a queue to funnel the frames to our delegate 
    dispatch_queue_t queue; 
    queue = dispatch_queue_create("cameraQueue", NULL); 
    [captureOutput setSampleBufferDelegate:self queue:queue]; 
    dispatch_release(queue); 

    // Set the video output to store frame in BGRA (It is supposed to be faster) 
    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; 
    // let's try some different keys, 
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; 

    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; 
    [captureOutput setVideoSettings:videoSettings];  

    [session addOutput:captureOutput]; 
    ///////////////////////////////////////////////////////////// 


    // start the capture session 
    [session startRunning]; 

    ///////////////////////////////////////////////////////////////////////////// 

    // initialize frame counter 
    iFrameCount = 0; 

} 



- (void)viewWillDisappear:(BOOL)animated 
{ 
    [super viewWillDisappear:animated]; 
} 

- (void)viewDidDisappear:(BOOL)animated 
{ 
    [super viewDidDisappear:animated]; 
      [session stopRunning]; 
} 

- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation 
{ 
    // Return YES for supported orientations 
    if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) { 
     return (interfaceOrientation != UIInterfaceOrientationPortraitUpsideDown); 
    } else { 
     return YES; 
    } 
} 

- (IBAction)cancelButton:(id)sender{ 

} 

- (IBAction)flashOn:(id)sender{ 

    Class captureDeviceClass = NSClassFromString(@"AVCaptureDevice"); 
    if (captureDeviceClass != nil) { 
     if ([device hasTorch] && [device hasFlash]){ 

      [device lockForConfiguration:nil]; 
      if (flashIsOn) { 
       [device setTorchMode:AVCaptureTorchModeOn]; 
       [device setFlashMode:AVCaptureFlashModeOn]; 
       [email protected]"On"; 
       [_flashButton setImage:[UIImage imageNamed:@"flash-on-button"]]; 
       _flashButton.accessibilityLabel = @"Disable Camera Flash"; 
       flashIsOn = NO; //define as a variable/property if you need to know status 
      } else { 
       [_flashButton setImage:[UIImage imageNamed:@"flash-off-button"]]; 
       _flashButton.accessibilityLabel = @"Enable Camera Flash"; 
       [email protected]"Off"; 
       [device setTorchMode:AVCaptureTorchModeOff]; 
       [device setFlashMode:AVCaptureFlashModeOff]; 
       flashIsOn = YES; 
      } 
      [device unlockForConfiguration]; 
     } 
    } 

} 

- (void)dealloc { 
    [[self session] stopRunning]; 
    [super dealloc]; 
} 


- (void)storeFlashSettingWithBool:(BOOL)flashSetting 
{ 
    [[NSUserDefaults standardUserDefaults] setBool:flashSetting forKey:kCameraFlashDefaultsKey]; 
    [[NSUserDefaults standardUserDefaults] synchronize]; 
} 

@end 

回答

2

更改dealloc方法

[self.captureSession removeInput:self.videoInput]; 
[self.captureSession removeOutput:self.videoOutput]; 

self.captureSession = nil; 
self.videoOutput = nil; 
self.videoInput = nil; 
+0

感謝您的回覆,我會更新,然後讓你知道 – 2vision2 2014-10-17 11:31:38

+0

仍然得到崩潰請檢查下面的代碼 - (空)的dealloc { [self.session removeInput:輸入]。 [self.session removeOutput:captureOutput]; self.session = nil; input = nil; captureOutput = nil; [super dealloc]; } – 2vision2 2014-10-17 11:32:18

+0

當然我在這裏。 – 2014-10-17 11:32:29

1

我們與今天這個問題,基本上是從的iOS 8.0.2及以上接入,相機需要隱私設置的攝像頭和不是相機膠捲,一旦啓用此代碼,然後工作。

7

請檢查您的設備設置。 轉到隱私--->相機--->檢查設置爲你的應用----->開啓

運行應用程序。有用。 乾杯

1

今天在我的應用程序中看到了同樣的錯誤,我正在處理它的警報,其中包含應用程序隱私設置的設置按鈕快捷方式。

do { 
    let captureInput:AVCaptureDeviceInput = try AVCaptureDeviceInput(device: self.device) 
    ... 
} catch let error as NSError { 
    let alert = UIAlertController(title:error.localizedDescription, message:error.localizedFailureReason, preferredStyle:.Alert) 
    let settingsAction = UIAlertAction(title: "Settings", style: .Default) { (action) in 
     UIApplication.sharedApplication().openURL(NSURL(string:UIApplicationOpenSettingsURLString)!) 
    } 
    alert.addAction(settingsAction) 
    self.presentViewController(alert,animated:true,completion:nil) 
}