Как захватить изображение с помощью AVFoundation framework?

У меня есть следующий код для открытия камеры в UIView, который работает прямо сейчас.

Но у меня есть две кнопки, как на этом снимке экрана, одна для захвата фотографии, а другая для загрузки фотографии из библиотеки. .

Как сделать снимок, не переходя на родную камеру?

Вот мой код файла .h

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>

@interface bgCameraController : UIViewController<AVCaptureMetadataOutputObjectsDelegate>

@property (weak, nonatomic) IBOutlet UIView *cam;
@property (strong, nonatomic) IBOutlet UIImageView *imageView;

- (IBAction)takePhoto:  (UIButton *)sender;
- (IBAction)selectPhoto:(UIButton *)sender;
@end

Вот мой код файла .m

#import "bgCameraController.h"

@interface bgCameraController ()
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@property (nonatomic, strong) AVAudioPlayer *audioPlayer;
@property (nonatomic) BOOL isReading;

-(BOOL)startReading;
-(void)stopReading;
-(void)loadBeepSound;
@end

@implementation bgCameraController

- (void)viewDidLoad {
    [super viewDidLoad];
    [self loadBeepSound];
    [self startReading];

    // Do any additional setup after loading the view.
}

- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

- (BOOL)startReading {
    NSError *error;

    // Get an instance of the AVCaptureDevice class to initialize a device object and provide the video
    // as the media type parameter.
    AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

    // Get an instance of the AVCaptureDeviceInput class using the previous device object.
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];

    if (!input) {
        // If any error occurs, simply log the description of it and don't continue any more.
        NSLog(@"%@", [error localizedDescription]);
        return NO;
    }

    // Initialize the captureSession object.
    _captureSession = [[AVCaptureSession alloc] init];
    // Set the input device on the capture session.
    [_captureSession addInput:input];


    // Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
    AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
    [_captureSession addOutput:captureMetadataOutput];

    // Create a new serial dispatch queue.
    dispatch_queue_t dispatchQueue;
    dispatchQueue = dispatch_queue_create("myQueue", NULL);
    [captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatchQueue];
    [captureMetadataOutput setMetadataObjectTypes:[NSArray arrayWithObject:AVMetadataObjectTypeQRCode]];

    // Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
    _videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
    [_videoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
    [_videoPreviewLayer setFrame:_cam.layer.bounds];
    [_cam.layer addSublayer:_videoPreviewLayer];


    // Start video capture.
    [_captureSession startRunning];

    return YES;
}


-(void)stopReading{
    // Stop video capture and make the capture session object nil.
    [_captureSession stopRunning];
    _captureSession = nil;

    // Remove the video preview layer from the viewPreview view's layer.
    //[_videoPreviewLayer removeFromSuperlayer];
}


-(void)loadBeepSound{
    // Get the path to the beep.mp3 file and convert it to a NSURL object.
    NSString *beepFilePath = [[NSBundle mainBundle] pathForResource:@"beep" ofType:@"mp3"];
    NSURL *beepURL = [NSURL URLWithString:beepFilePath];

    NSError *error;

    // Initialize the audio player object using the NSURL object previously set.
    _audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:beepURL error:&error];
    if (error) {
        // If the audio player cannot be initialized then log a message.
        // NSLog(@"Could not play beep file.");
        //NSLog(@"%@", [error localizedDescription]);
    }
    else{
        // If the audio player was successfully initialized then load it in memory.
        [_audioPlayer prepareToPlay];
    }
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{

    // Check if the metadataObjects array is not nil and it contains at least one object.
    if (metadataObjects != nil && [metadataObjects count] > 0) {
        // Get the metadata object.
        // NSLog(@"%@",metadataObjects);
        AVMetadataMachineReadableCodeObject *metadataObj = [metadataObjects objectAtIndex:0];
        if ([[metadataObj type] isEqualToString:AVMetadataObjectTypeQRCode]) {
            // If the found metadata is equal to the QR code metadata then update the status label's text,
            // stop reading and change the bar button item's title and the flag's value.
            // Everything is done on the main thread.
            NSString *result=[metadataObj stringValue];
            [self performSelectorOnMainThread:@selector(setQRcodeValues:) withObject:result waitUntilDone:NO];
            //  [_result performSelectorOnMainThread:@selector(setText:) withObject:[metadataObj stringValue] waitUntilDone:NO];

            [self performSelectorOnMainThread:@selector(stopReading) withObject:nil waitUntilDone:NO];
            // [_button performSelectorOnMainThread:@selector(setTitle:) withObject:@"Start!" waitUntilDone:NO];

            _isReading = NO;

            // If the audio player is not nil, then play the sound effect.
            if (_audioPlayer) {
                [_audioPlayer play];
            }
        }
    }


}

/*
#pragma mark - Navigation

// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
    // Get the new view controller using [segue destinationViewController].
    // Pass the selected object to the new view controller.
}
*/

@end

Пожалуйста, помогите мне... сделать снимок, нажав эту кнопку (проверьте изображение по ссылке)


person john mike mike    schedule 15.10.2014    source источник
comment
Поскольку вы настроили свой собственный предварительный просмотр камеры, похоже, вы хотите избежать использования UIImagePickerController и его встроенного предварительного просмотра. Ознакомьтесь с этим ответом и фрагментом кода из этого очень тесно связанного вопроса. Вы должны быть в состоянии придумать свою собственную реализацию takePhoto.   -  person Michael Dautermann    schedule 15.10.2014
comment
да, но этот выглядит как сложный ..   -  person john mike mike    schedule 15.10.2014


Ответы (1)


У меня есть captured image, а scanning QRCode вот так:

1) Сначала добавьте свойство AVCaptureStillImageOutput's

@property (strong, nonatomic) AVCaptureStillImageOutput *stillImageOutput;

2) Добавить предустановку сеанса в AVCaptureSession после ее инициализации.

[self.session setSessionPreset:AVCaptureSessionPreset640x480];

3) Теперь добавьте AVCaptureStillImageOutput's в качестве вывода в AVCaptureSession

// Prepare an output for snapshotting
self.stillImageOutput = [AVCaptureStillImageOutput new];
[self.session addOutput:self.stillImageOutput];
self.stillImageOutput.outputSettings = @{AVVideoCodecKey: AVVideoCodecJPEG};

4) Добавьте приведенный ниже код для захвата отсканированного изображения в методе делегата captureOutput:didOutputMetadataObjects:fromConnection:connection.

 __block UIImage *scannedImg = nil;
// Take an image of the face and pass to CoreImage for detection
AVCaptureConnection *stillConnection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:stillConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
    if(error) {
        NSLog(@"There was a problem");
        return;
    }

    NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];

    scannedImg = [UIImage imageWithData:jpegData];
    NSLog(@"scannedImg : %@",scannedImg);
}];

Для справки используйте CodeScanViewController.

Вот и все, @Enjoy

person Paresh Navadiya    schedule 07.07.2015