Как да заснемете изображение с рамката на AVFoundation?

Имам следния код за отваряне на камерата в UIView, който работи в момента.

Но имам два бутона като в тази екранна снимка един за заснемане на снимка и друг за качване на снимка от библиотека .

Как да заснема снимка, без да влизам в основната камера?

Ето кода на моя .h файл

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>

@interface bgCameraController : UIViewController<AVCaptureMetadataOutputObjectsDelegate>

@property (weak, nonatomic) IBOutlet UIView *cam;
@property (strong, nonatomic) IBOutlet UIImageView *imageView;

- (IBAction)takePhoto:  (UIButton *)sender;
- (IBAction)selectPhoto:(UIButton *)sender;
@end

Ето кода на моя .m файл

#import "bgCameraController.h"

@interface bgCameraController ()
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@property (nonatomic, strong) AVAudioPlayer *audioPlayer;
@property (nonatomic) BOOL isReading;

-(BOOL)startReading;
-(void)stopReading;
-(void)loadBeepSound;
@end

@implementation bgCameraController

- (void)viewDidLoad {
    [super viewDidLoad];
    [self loadBeepSound];
    [self startReading];

    // Do any additional setup after loading the view.
}

- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

- (BOOL)startReading {
    NSError *error;

    // Get an instance of the AVCaptureDevice class to initialize a device object and provide the video
    // as the media type parameter.
    AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

    // Get an instance of the AVCaptureDeviceInput class using the previous device object.
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];

    if (!input) {
        // If any error occurs, simply log the description of it and don't continue any more.
        NSLog(@"%@", [error localizedDescription]);
        return NO;
    }

    // Initialize the captureSession object.
    _captureSession = [[AVCaptureSession alloc] init];
    // Set the input device on the capture session.
    [_captureSession addInput:input];


    // Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
    AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
    [_captureSession addOutput:captureMetadataOutput];

    // Create a new serial dispatch queue.
    dispatch_queue_t dispatchQueue;
    dispatchQueue = dispatch_queue_create("myQueue", NULL);
    [captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatchQueue];
    [captureMetadataOutput setMetadataObjectTypes:[NSArray arrayWithObject:AVMetadataObjectTypeQRCode]];

    // Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
    _videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
    [_videoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
    [_videoPreviewLayer setFrame:_cam.layer.bounds];
    [_cam.layer addSublayer:_videoPreviewLayer];


    // Start video capture.
    [_captureSession startRunning];

    return YES;
}


-(void)stopReading{
    // Stop video capture and make the capture session object nil.
    [_captureSession stopRunning];
    _captureSession = nil;

    // Remove the video preview layer from the viewPreview view's layer.
    //[_videoPreviewLayer removeFromSuperlayer];
}


-(void)loadBeepSound{
    // Get the path to the beep.mp3 file and convert it to a NSURL object.
    NSString *beepFilePath = [[NSBundle mainBundle] pathForResource:@"beep" ofType:@"mp3"];
    NSURL *beepURL = [NSURL URLWithString:beepFilePath];

    NSError *error;

    // Initialize the audio player object using the NSURL object previously set.
    _audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:beepURL error:&error];
    if (error) {
        // If the audio player cannot be initialized then log a message.
        // NSLog(@"Could not play beep file.");
        //NSLog(@"%@", [error localizedDescription]);
    }
    else{
        // If the audio player was successfully initialized then load it in memory.
        [_audioPlayer prepareToPlay];
    }
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{

    // Check if the metadataObjects array is not nil and it contains at least one object.
    if (metadataObjects != nil && [metadataObjects count] > 0) {
        // Get the metadata object.
        // NSLog(@"%@",metadataObjects);
        AVMetadataMachineReadableCodeObject *metadataObj = [metadataObjects objectAtIndex:0];
        if ([[metadataObj type] isEqualToString:AVMetadataObjectTypeQRCode]) {
            // If the found metadata is equal to the QR code metadata then update the status label's text,
            // stop reading and change the bar button item's title and the flag's value.
            // Everything is done on the main thread.
            NSString *result=[metadataObj stringValue];
            [self performSelectorOnMainThread:@selector(setQRcodeValues:) withObject:result waitUntilDone:NO];
            //  [_result performSelectorOnMainThread:@selector(setText:) withObject:[metadataObj stringValue] waitUntilDone:NO];

            [self performSelectorOnMainThread:@selector(stopReading) withObject:nil waitUntilDone:NO];
            // [_button performSelectorOnMainThread:@selector(setTitle:) withObject:@"Start!" waitUntilDone:NO];

            _isReading = NO;

            // If the audio player is not nil, then play the sound effect.
            if (_audioPlayer) {
                [_audioPlayer play];
            }
        }
    }


}

/*
#pragma mark - Navigation

// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
    // Get the new view controller using [segue destinationViewController].
    // Pass the selected object to the new view controller.
}
*/

@end

Моля, помогнете ми... да заснема снимка, като щракнете върху този бутон (проверете изображението на връзката)


person john mike mike    schedule 15.10.2014    source източник
comment
Тъй като сте настроили своя собствена визуализация на камерата, изглежда, че искате да избегнете използването на UIImagePickerController и нейната вградена визуализация. Вижте този отговор и кодов фрагмент от този много тясно свързан въпрос. Трябва да можете да измислите своя собствена takePhoto реализация.   -  person Michael Dautermann    schedule 15.10.2014
comment
да, но това изглежда сложно ..   -  person john mike mike    schedule 15.10.2014


Отговори (1)


Имам captured image, докато scanning QRCode така:

1) Първо добавете свойството на AVCaptureStillImageOutput's

@property (strong, nonatomic) AVCaptureStillImageOutput *stillImageOutput;

2) Добавете предварително зададена сесия в AVCaptureSession след инициализиране

[self.session setSessionPreset:AVCaptureSessionPreset640x480];

3) Сега добавете AVCaptureStillImageOutput's като изход в AVCaptureSession

// Prepare an output for snapshotting
self.stillImageOutput = [AVCaptureStillImageOutput new];
[self.session addOutput:self.stillImageOutput];
self.stillImageOutput.outputSettings = @{AVVideoCodecKey: AVVideoCodecJPEG};

4) Направете добавен код по-долу, за да заснемете сканирано изображение в метода на делегиране captureOutput:didOutputMetadataObjects:fromConnection:connection

 __block UIImage *scannedImg = nil;
// Take an image of the face and pass to CoreImage for detection
AVCaptureConnection *stillConnection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:stillConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
    if(error) {
        NSLog(@"There was a problem");
        return;
    }

    NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];

    scannedImg = [UIImage imageWithData:jpegData];
    NSLog(@"scannedImg : %@",scannedImg);
}];

За справка използвайте CodeScanViewController

Това е @Enjoy

person Paresh Navadiya    schedule 07.07.2015