• 勾芒's avatar
    debug · 7e94e12c
    勾芒 authored
    7e94e12c
ScannerViewController.m 7.21 KB
//
//  ScannerViewController.m
//  Lighting
//
//  Created by 曹云霄 on 16/5/18.
//  Copyright © 2016年 上海勾芒科技有限公司. All rights reserved.
//

#import "ScannerViewController.h"
#import <AVFoundation/AVFoundation.h>
@interface ScannerViewController ()<AVCaptureMetadataOutputObjectsDelegate>

/**
 *  捕捉会话
 */
@property(strong,nonatomic) AVCaptureSession *session;

/**
 *  展示layer
 */
@property(strong,nonatomic)  AVCaptureVideoPreviewLayer *previewLayer;

/**
 *  展示View
 */
@property (weak, nonatomic) IBOutlet UIImageView *scanRectView;

/**
 *用于记录scrollLine的上下循环状态
 */
@property (assign, nonatomic) BOOL up;

/**
 *计时器
 */
@property (strong, nonatomic) CADisplayLink *link;

/**
 *  指示线
 */
@property (weak, nonatomic) IBOutlet UIImageView *instructionsLine;

@end

@implementation ScannerViewController



- (CADisplayLink *)link {
    if (!_link) {
        _link = [CADisplayLink displayLinkWithTarget:self selector:@selector(LineAnimation)];
    }
    return _link;
}


- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view from its nib.
    //计时器添加到循环中去
    _up = YES;
    [self.link addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
    [self uiConfigAction];
}

#pragma mark -UI
- (void)uiConfigAction
{
    // 1. 摄像头设备
    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    // 2. 设置输入
    NSError *error = nil;
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
    if (error) {
        NSLog(@"没有摄像头-%@", error.localizedDescription);
        [self ErrorMBProgressView:@"开启摄像头失败"];
        return;
    }
    // 3. 设置输出(Metadata元数据)
    AVCaptureMetadataOutput *output = [[AVCaptureMetadataOutput alloc] init];
    // 3.1 设置输出的代理
    // 说明:使用主线程队列,相应比较同步,使用其他队列,相应不同步,容易让用户产生不好的体验
    [output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
    //    [output setMetadataObjectsDelegate:self queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
    // 4. 拍摄会话
    AVCaptureSession *session = [[AVCaptureSession alloc] init];
    // 添加session的输入和输出
    [session addInput:input];
    [session addOutput:output];
    //使用1080p的图像输出
    session.sessionPreset = AVCaptureSessionPreset1920x1080;
    // 4.1 设置输出的格式
    // 提示:一定要先设置会话的输出为output之后,再指定输出的元数据类型!
    [output setMetadataObjectTypes:[output availableMetadataObjectTypes]];
    
    // 5. 设置预览图层(用来让用户能够看到扫描情况)
    AVCaptureVideoPreviewLayer *preview = [AVCaptureVideoPreviewLayer layerWithSession:session];
    // 5.1 设置preview图层的属性
    preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
    // 5.2 设置preview图层的大小
    preview.frame = [UIScreen mainScreen].bounds;
    
    self.previewLayer = preview;
    
//
//    UIView *view = [[UIView alloc]initWithFrame:CGRectMake(0, 0, ScreenHeight, ScreenWidth)];
//    view.transform = CGAffineTransformMakeRotation(M_PI/2);
//    [view.layer insertSublayer:self.previewLayer above:0];
    [self.view.layer insertSublayer:self.previewLayer atIndex:0];
    
    //
//    CGSize size = [UIScreen mainScreen].bounds.size;
//    CGRect cropRect = self.scanRectView.frame;
//    CGFloat p1 = size.height/size.width;
//    CGFloat p2 = 1920./1080.;  //使用1080p的图像输出
    output.rectOfInterest = CGRectMake(self.scanRectView.frame.origin.y/ScreenHeight,((ScreenWidth-self.scanRectView.frame.size.width)/2)/ScreenWidth+0.1, self.scanRectView.frame.size.height/ScreenHeight, self.scanRectView.frame.size.width/ScreenWidth);

//    
//    CGFloat fixHeight = [UIScreen mainScreen].bounds.size.width * 1920. / 1080.;
//    CGFloat fixPadding = (fixHeight - size.height)/2;
//    output.rectOfInterest = CGRectMake((cropRect.origin.y + fixPadding)/fixHeight,
//                                       cropRect.origin.x/size.width,
//                                       cropRect.size.height/fixHeight,
//                                       cropRect.size.width/size.width);
    
    NSLog(@"%@",NSStringFromCGRect(output.rectOfInterest));
    
//    if (p1 < p2) {
//        CGFloat fixHeight = [UIScreen mainScreen].bounds.size.width * 1920. / 1080.;
//        CGFloat fixPadding = (fixHeight - size.height)/2;
//        output.rectOfInterest = CGRectMake((cropRect.origin.y + fixPadding)/fixHeight,
//                                           cropRect.origin.x/size.width,
//                                           cropRect.size.height/fixHeight,
//                                           cropRect.size.width/size.width);
//    } else {
//        CGFloat fixWidth = [UIScreen mainScreen].bounds.size.height * 1080. / 1920.;
//        CGFloat fixPadding = (fixWidth - size.width)/2;
//        output.rectOfInterest = CGRectMake(cropRect.origin.y/size.height,
//                                           (cropRect.origin.x + fixPadding)/fixWidth,
//                                           cropRect.size.height/size.height,
//                                           cropRect.size.width/fixWidth);
//    }
    self.session = session;
    [self.session  startRunning];
}



#pragma mark - delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
    
    if (metadataObjects.count > 0) {
        
        AVMetadataMachineReadableCodeObject *obj = metadataObjects[0];
        [self.session stopRunning];
        //扫描结果
        if (self.ReturnScannerResponse) {
            self.ReturnScannerResponse(obj.stringValue);
        }
    }
}


#pragma mark - 线条运动的动画
- (void)LineAnimation {
    if (_up == YES) {
        CGFloat y = self.instructionsLine.frame.origin.y;
        y += 2;
        CGRect frame = self.instructionsLine.frame;
        frame.origin.y = y;
        self.instructionsLine.frame = frame;
        if (y >= self.scanRectView.frame.origin.y + self.scanRectView.frame.size.height-20) {
            _up = NO;
        }
    }else{
        CGFloat y = self.instructionsLine.frame.origin.y;
        y -= 2;
        CGRect frame = self.instructionsLine.frame;
        frame.origin.y = y;
        self.instructionsLine.frame = frame;;
        if (y <= self.scanRectView.frame.origin.y+20) {
            _up = YES;
        }
    }
}






#pragma amrk -取消扫描
- (IBAction)cancelButtonClick:(UIButton *)sender {
    [self.session stopRunning];
    [self dismissViewControllerAnimated:YES completion:nil];
}


- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

/*
#pragma mark - Navigation

// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
    // Get the new view controller using [segue destinationViewController].
    // Pass the selected object to the new view controller.
}
*/

@end