RScanCamera.m 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432
  1. //
  2. // RScanCamera.m
  3. // r_scan
  4. //
  5. // Created by 李鹏辉 on 2020/2/24.
  6. //
  7. #import "RScanCamera.h"
  8. #import <AVFoundation/AVFoundation.h>
  9. #import <CoreMotion/CoreMotion.h>
  10. #import <libkern/OSAtomic.h>
  11. #import "RScanResult.h"
  12. // Mirrors ResolutionPreset in camera.dart
  13. typedef enum {
  14. veryLow,
  15. low,
  16. medium,
  17. high,
  18. veryHigh,
  19. ultraHigh,
  20. max,
  21. } ResolutionPreset;
  22. static ResolutionPreset getResolutionPresetForString(NSString *preset) {
  23. if ([preset isEqualToString:@"veryLow"]) {
  24. return veryLow;
  25. } else if ([preset isEqualToString:@"low"]) {
  26. return low;
  27. } else if ([preset isEqualToString:@"medium"]) {
  28. return medium;
  29. } else if ([preset isEqualToString:@"high"]) {
  30. return high;
  31. } else if ([preset isEqualToString:@"veryHigh"]) {
  32. return veryHigh;
  33. } else if ([preset isEqualToString:@"ultraHigh"]) {
  34. return ultraHigh;
  35. } else if ([preset isEqualToString:@"max"]) {
  36. return max;
  37. } else {
  38. NSError *error = [NSError errorWithDomain:NSCocoaErrorDomain
  39. code:NSURLErrorUnknown
  40. userInfo:@{
  41. NSLocalizedDescriptionKey : [NSString
  42. stringWithFormat:@"Unknown resolution preset %@", preset]
  43. }];
  44. @throw error;
  45. }
  46. }
  47. //获取错误
  48. static FlutterError *getFlutterError(NSError *error) {
  49. return [FlutterError errorWithCode:[NSString stringWithFormat:@"%d", (int)error.code]
  50. message:error.localizedDescription
  51. details:error.domain];
  52. }
  53. @interface RScanFLTCam : NSObject<FlutterTexture,AVCaptureMetadataOutputObjectsDelegate,AVCaptureVideoDataOutputSampleBufferDelegate,FlutterStreamHandler>
  54. @property(readonly, nonatomic) int64_t textureId;
  55. @property(assign, nonatomic) ResolutionPreset resolutionPreset;
  56. //链接相机用的
  57. @property(readonly, nonatomic) AVCaptureSession *captureSession;
  58. //获取相机设备
  59. @property(readonly, nonatomic) AVCaptureDevice *captureDevice;
  60. //视频输入
  61. @property(readonly, nonatomic) AVCaptureInput *captureVideoInput;
  62. //视频输出
  63. @property(readonly, nonatomic) AVCaptureMetadataOutput * captureOutput;
  64. //视频输出2
  65. @property(readonly, nonatomic) AVCaptureVideoDataOutput *captureVideoOutput;
  66. @property(readonly, nonatomic) CGSize previewSize;
  67. @property(nonatomic) FlutterEventSink eventSink;
  68. @property(readonly) CVPixelBufferRef volatile latestPixelBuffer;
  69. //第一帧回掉
  70. @property(nonatomic, copy) void (^onFrameAvailable)(void);
  71. //channel用于返回数据给data
  72. @property(nonatomic) FlutterEventChannel *eventChannel;
  73. @end
  74. @implementation RScanFLTCam{
  75. dispatch_queue_t _dispatchQueue;
  76. }
  77. FourCharCode const rScanVideoFormat = kCVPixelFormatType_32BGRA;
  78. - (instancetype)initWitchCameraName:(NSString*)cameraName resolutionPreset:(NSString*)resolutionPreset dispatchQueue:(dispatch_queue_t)dispatchQueue error:(NSError **)error{
  79. self = [super init];
  80. NSAssert(self, @"super init cannot be nil");
  81. @try {
  82. _resolutionPreset =getResolutionPresetForString(resolutionPreset);
  83. } @catch (NSError *e) {
  84. *error = e;
  85. }
  86. _dispatchQueue =dispatchQueue;
  87. _captureSession=[[AVCaptureSession alloc]init];
  88. _captureDevice = [AVCaptureDevice deviceWithUniqueID:cameraName];
  89. NSError *localError =nil;
  90. _captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:&localError];
  91. if(localError){
  92. *error = localError;
  93. return nil;
  94. }
  95. _captureVideoOutput = [AVCaptureVideoDataOutput new];
  96. _captureVideoOutput.videoSettings = @{(NSString*)kCVPixelBufferPixelFormatTypeKey: @(rScanVideoFormat)};
  97. [_captureVideoOutput setAlwaysDiscardsLateVideoFrames:YES];
  98. [_captureVideoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
  99. //链接
  100. AVCaptureConnection* connection =[AVCaptureConnection connectionWithInputPorts:_captureVideoInput.ports output:_captureVideoOutput];
  101. if ([_captureDevice position] == AVCaptureDevicePositionFront) {
  102. connection.videoMirrored = YES;
  103. }
  104. if([connection isVideoOrientationSupported]){
  105. connection.videoOrientation =AVCaptureVideoOrientationPortrait;
  106. }
  107. [_captureSession addInputWithNoConnections:_captureVideoInput];
  108. [_captureSession addOutputWithNoConnections:_captureVideoOutput];
  109. _captureOutput=[[AVCaptureMetadataOutput alloc]init];
  110. //设置代理,在主线程刷新
  111. [_captureOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
  112. [_captureSession addOutput:_captureOutput];
  113. _captureOutput.metadataObjectTypes=_captureOutput.availableMetadataObjectTypes;
  114. //扫码区域的大小
  115. AVCaptureVideoPreviewLayer *layer = [AVCaptureVideoPreviewLayer layerWithSession:_captureSession];
  116. layer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  117. // layer.frame = CGRectMake(left, top, size, size);
  118. // [_captureOutput rectOfInterest];
  119. [_captureOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
  120. [_captureOutput setMetadataObjectTypes:@[AVMetadataObjectTypeAztecCode,AVMetadataObjectTypeCode39Code,
  121. AVMetadataObjectTypeCode93Code,AVMetadataObjectTypeCode128Code, AVMetadataObjectTypeDataMatrixCode,AVMetadataObjectTypeEAN8Code,AVMetadataObjectTypeEAN13Code,AVMetadataObjectTypeITF14Code,AVMetadataObjectTypePDF417Code,AVMetadataObjectTypeQRCode,AVMetadataObjectTypeUPCECode]];
  122. [_captureSession addConnection:connection];
  123. [self setCaptureSessionPreset:_resolutionPreset];
  124. return self;
  125. }
  126. - (void)setCaptureSessionPreset:(ResolutionPreset)resolutionPreset {
  127. switch (resolutionPreset) {
  128. case max:
  129. if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetHigh]) {
  130. _captureSession.sessionPreset = AVCaptureSessionPresetHigh;
  131. _previewSize =
  132. CGSizeMake(_captureDevice.activeFormat.highResolutionStillImageDimensions.width,
  133. _captureDevice.activeFormat.highResolutionStillImageDimensions.height);
  134. break;
  135. }
  136. case ultraHigh:
  137. if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
  138. _captureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
  139. _previewSize = CGSizeMake(3840, 2160);
  140. break;
  141. }
  142. case veryHigh:
  143. if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
  144. _captureSession.sessionPreset = AVCaptureSessionPreset1920x1080;
  145. _previewSize = CGSizeMake(1920, 1080);
  146. break;
  147. }
  148. case high:
  149. if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
  150. _captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
  151. _previewSize = CGSizeMake(1280, 720);
  152. break;
  153. }
  154. case medium:
  155. if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
  156. _captureSession.sessionPreset = AVCaptureSessionPreset640x480;
  157. _previewSize = CGSizeMake(640, 480);
  158. break;
  159. }
  160. case low:
  161. if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset352x288]) {
  162. _captureSession.sessionPreset = AVCaptureSessionPreset352x288;
  163. _previewSize = CGSizeMake(352, 288);
  164. break;
  165. }
  166. default:
  167. if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetLow]) {
  168. _captureSession.sessionPreset = AVCaptureSessionPresetLow;
  169. _previewSize = CGSizeMake(352, 288);
  170. } else {
  171. NSError *error =
  172. [NSError errorWithDomain:NSCocoaErrorDomain
  173. code:NSURLErrorUnknown
  174. userInfo:@{
  175. NSLocalizedDescriptionKey :
  176. @"No capture session available for current capture session."
  177. }];
  178. @throw error;
  179. }
  180. }
  181. }
  182. - (void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
  183. if (metadataObjects.count>0) {
  184. AVMetadataMachineReadableCodeObject * metaObject=metadataObjects[0];
  185. NSString * value=metaObject.stringValue;
  186. if(value.length&&_eventSink){
  187. _eventSink([RScanResult toMap:metaObject]);
  188. }
  189. }
  190. }
  191. - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
  192. if (output == _captureVideoOutput) {
  193. CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  194. CFRetain(newBuffer);
  195. CVPixelBufferRef old = _latestPixelBuffer;
  196. while (!OSAtomicCompareAndSwapPtrBarrier(old, newBuffer, (void **)&_latestPixelBuffer)) {
  197. old = _latestPixelBuffer;
  198. }
  199. if (old != nil) {
  200. CFRelease(old);
  201. }
  202. if (_onFrameAvailable) {
  203. _onFrameAvailable();
  204. }
  205. }
  206. }
  207. - (void)start{
  208. [_captureSession startRunning];
  209. }
  210. - (void)stop{
  211. [_captureSession stopRunning];
  212. }
  213. -(void)resume{
  214. if(![_captureSession isRunning]){
  215. [_captureSession startRunning];
  216. }
  217. }
  218. -(void)pause{
  219. if ([_captureSession isRunning]) {
  220. [_captureSession stopRunning];
  221. }
  222. }
  223. -(BOOL)setFlashMode:(BOOL) isOpen{
  224. [_captureDevice lockForConfiguration:nil];
  225. BOOL isSuccess = YES;
  226. if ([_captureDevice hasFlash]) {
  227. if (isOpen) {
  228. _captureDevice.flashMode=AVCaptureFlashModeOn;
  229. _captureDevice.torchMode=AVCaptureTorchModeOn;
  230. }else{
  231. _captureDevice.flashMode = AVCaptureFlashModeOff;
  232. _captureDevice.torchMode = AVCaptureTorchModeOff;
  233. }
  234. }else{
  235. isSuccess=NO;
  236. }
  237. [_captureDevice unlockForConfiguration];
  238. return isSuccess;
  239. }
  240. -(BOOL)getFlashMode{
  241. [_captureDevice lockForConfiguration:nil];
  242. BOOL isSuccess = _captureDevice.flashMode==AVCaptureFlashModeOn&&
  243. _captureDevice.torchMode==AVCaptureTorchModeOn;
  244. [_captureDevice unlockForConfiguration];
  245. return isSuccess;
  246. }
  247. - (void)close {
  248. [_captureSession stopRunning];
  249. for (AVCaptureInput *input in [_captureSession inputs]) {
  250. [_captureSession removeInput:input];
  251. }
  252. for (AVCaptureOutput *output in [_captureSession outputs]) {
  253. [_captureSession removeOutput:output];
  254. }
  255. }
  256. - (CVPixelBufferRef _Nullable)copyPixelBuffer {
  257. CVPixelBufferRef pixelBuffer = _latestPixelBuffer;
  258. while (!OSAtomicCompareAndSwapPtrBarrier(pixelBuffer, nil, (void **)&_latestPixelBuffer)) {
  259. pixelBuffer = _latestPixelBuffer;
  260. }
  261. return pixelBuffer;
  262. }
  263. - (FlutterError * _Nullable)onCancelWithArguments:(id _Nullable)arguments {
  264. _eventSink = nil;
  265. [_eventChannel setStreamHandler:nil];
  266. return nil;
  267. }
  268. - (FlutterError * _Nullable)onListenWithArguments:(id _Nullable)arguments eventSink:(nonnull FlutterEventSink)events {
  269. _eventSink =events;
  270. return nil;
  271. }
  272. @end
  273. @interface RScanCamera()
  274. @property(readonly, nonatomic) RScanFLTCam *camera;
  275. @end
  276. @implementation RScanCamera{
  277. dispatch_queue_t _dispatchQueue;
  278. }
  279. - (instancetype)initWithRegistry:(NSObject<FlutterTextureRegistry> *)registry
  280. messenger:(NSObject<FlutterBinaryMessenger> *)messenger {
  281. self = [super init];
  282. NSAssert(self, @"super init cannot be nil");
  283. _registry = registry;
  284. _messenger = messenger;
  285. return self;
  286. }
  287. + (void)registerWithRegistrar:(nonnull NSObject<FlutterPluginRegistrar> *)registrar {
  288. }
  289. - (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result{
  290. if ([@"availableCameras" isEqualToString:call.method]) {
  291. [self availableCameras:call result:result];
  292. }else if([@"initialize" isEqualToString:call.method]){
  293. [self initialize:call result:result];
  294. }else if([@"dispose" isEqualToString:call.method]){
  295. [self dispose:call result:result];
  296. }else if ([call.method isEqualToString:@"startScan"]) {
  297. [_camera resume];
  298. result(nil);
  299. }else if([call.method isEqualToString:@"stopScan"]){
  300. [_camera pause];
  301. result(nil);
  302. }else if ([call.method isEqualToString:@"setFlashMode"]){
  303. NSNumber * isOpen = [call.arguments valueForKey:@"isOpen"];
  304. result([NSNumber numberWithBool:[_camera setFlashMode:[isOpen boolValue]]]);
  305. }else if ([call.method isEqualToString:@"getFlashMode"]){
  306. result([NSNumber numberWithBool:[_camera getFlashMode]]);
  307. }else{
  308. result(FlutterMethodNotImplemented);
  309. }
  310. }
  311. /**
  312. 获取可用的摄像头
  313. */
  314. -(void)availableCameras:(FlutterMethodCall *)call result:(FlutterResult)result{
  315. AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
  316. discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
  317. mediaType:AVMediaTypeVideo
  318. position:AVCaptureDevicePositionUnspecified];
  319. NSArray<AVCaptureDevice *> *devices = discoverySession.devices;
  320. NSMutableArray<NSDictionary<NSString *, NSObject *> *> *reply =
  321. [[NSMutableArray alloc] initWithCapacity:devices.count];
  322. for (AVCaptureDevice *device in devices) {
  323. NSString *lensFacing;
  324. switch ([device position]) {
  325. case AVCaptureDevicePositionBack:
  326. lensFacing = @"back";
  327. break;
  328. case AVCaptureDevicePositionFront:
  329. lensFacing = @"front";
  330. break;
  331. case AVCaptureDevicePositionUnspecified:
  332. lensFacing = @"external";
  333. break;
  334. }
  335. [reply addObject:@{
  336. @"name" : [device uniqueID],
  337. @"lensFacing" : lensFacing
  338. }];
  339. }
  340. result(reply);
  341. }
  342. /**
  343. 初始化相机
  344. */
  345. -(void)initialize:(FlutterMethodCall *)call result:(FlutterResult)result {
  346. NSString *cameraName = call.arguments[@"cameraName"];
  347. NSString *resolutionPreset = call.arguments[@"resolutionPreset"];
  348. NSError * error;
  349. RScanFLTCam* cam =[[RScanFLTCam alloc]initWitchCameraName:cameraName resolutionPreset:resolutionPreset dispatchQueue:_dispatchQueue error:&error];
  350. if(error){
  351. result(getFlutterError(error));
  352. return;
  353. }else{
  354. if(_camera){
  355. [_camera close];
  356. }
  357. int64_t textureId = [_registry registerTexture:cam];
  358. _camera =cam;
  359. cam.onFrameAvailable = ^{
  360. [self->_registry textureFrameAvailable:textureId];
  361. };
  362. FlutterEventChannel *eventChannel = [FlutterEventChannel eventChannelWithName:[NSString stringWithFormat:@"com.rhyme_lph/r_scan_camera_%lld/event",textureId] binaryMessenger:_messenger];
  363. [eventChannel setStreamHandler:cam];
  364. cam.eventChannel = eventChannel;
  365. result(@{
  366. @"textureId":@(textureId),
  367. @"previewWidth":@([UIScreen mainScreen].bounds.size.width),
  368. @"previewHeight":@([UIScreen mainScreen].bounds.size.height)
  369. // @"previewWidth":@(cam.previewSize.width),
  370. // @"previewHeight":@(cam.previewSize.height)
  371. });
  372. [cam start];
  373. }
  374. }
  375. /**
  376. 销毁相机
  377. */
  378. -(void)dispose:(FlutterMethodCall *)call result:(FlutterResult)result {
  379. NSDictionary *argsMap = call.arguments;
  380. NSUInteger textureId = ((NSNumber *)argsMap[@"textureId"]).unsignedIntegerValue;
  381. [_registry unregisterTexture:textureId];
  382. [_camera close];
  383. _dispatchQueue = nil;
  384. result(nil);
  385. }
  386. @end