BGKSYStreamerController.m 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395
  1. //
  2. // BGKSYStreamerController.m
  3. // FanweLive
  4. //
  5. // Created by yiqian on 10/15/15.
  6. // Copyright (c) 2015 ksyun. All rights reserved.
  7. //
  8. #import "BGKSYStreamerController.h"
  9. // 为防止将手机存储写满,限制录像时长为30s
  10. @interface BGKSYStreamerController ()
  11. {
  12. CGFloat _currentPinchZoomFactor; // 当前触摸缩放因子
  13. UIImageView *_foucsCursorImgView; // 对焦框
  14. GlobalVariables *_fanweApp;
  15. NSMutableDictionary *_obsDict;
  16. }
  17. @end
  18. @implementation BGKSYStreamerController
  19. - (void)viewDidLoad
  20. {
  21. [super viewDidLoad];
  22. // 添加视频容器视图
  23. _videoContrainerView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, kScreenW, kScreenH)];
  24. _videoContrainerView.backgroundColor = [UIColor whiteColor];
  25. [self.view addSubview:_videoContrainerView];
  26. _fanweApp = [GlobalVariables sharedInstance];
  27. _gPUStreamerKit = [[KSYRTCStreamerKit alloc] initWithDefaultCfg];
  28. NSLog(ASLocalizedString(@"=====当前金山SDK版本号:%@"),[_gPUStreamerKit.streamerBase getKSYVersion]);
  29. // 采集相关设置初始化
  30. [self setCaptureCfg];
  31. // 推流相关设置初始化
  32. [self setStreamerCfg];
  33. if (_gPUStreamerKit)
  34. {
  35. _gPUStreamerKit.videoOrientation = [[UIApplication sharedApplication] statusBarOrientation];
  36. [_gPUStreamerKit startPreview:_videoContrainerView];
  37. }
  38. // 添加对焦框
  39. [self addfoucsCursorImgView];
  40. // 添加手势
  41. [self addPinchGestureRecognizer];
  42. // 添加监听
  43. [self initObservers];
  44. [self addObservers];
  45. }
  46. #pragma mark 对焦框
  47. - (void)addfoucsCursorImgView
  48. {
  49. _foucsCursorImgView = [[UIImageView alloc]initWithImage:[UIImage imageNamed:@"lr_camera_focus_red"]];
  50. _foucsCursorImgView.frame = CGRectMake(80, 80, 80, 80);
  51. [self.view addSubview:_foucsCursorImgView];
  52. _foucsCursorImgView.alpha = 0;
  53. }
  54. #pragma mark - ----------------------- 推流 -----------------------
  55. #pragma mark 开始推流
  56. - (void)startRtmp
  57. {
  58. dispatch_time_t delay = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1 * NSEC_PER_SEC));
  59. dispatch_after(delay, dispatch_get_main_queue(), ^{
  60. [_gPUStreamerKit.streamerBase startStream:self.pushUrl];
  61. });
  62. }
  63. #pragma mark 停止推流
  64. - (void)stopRtmp
  65. {
  66. if (_gPUStreamerKit)
  67. {
  68. [_gPUStreamerKit.streamerBase stopStream];
  69. [_gPUStreamerKit stopPreview];
  70. _gPUStreamerKit = nil;
  71. }
  72. [[NSNotificationCenter defaultCenter] removeObserver:self];
  73. }
  74. #pragma mark 尝试重连
  75. - (void)tryReconnect
  76. {
  77. if (_gPUStreamerKit.maxAutoRetry > 0)
  78. {
  79. return;
  80. }
  81. [self startRtmp];
  82. }
  83. #pragma mark - ----------------------- 配置 -----------------------
  84. #pragma mark 采集相关设置初始化
  85. - (void)setCaptureCfg
  86. {
  87. // 采集分辨率 (仅在开始采集前设置有效)
  88. _gPUStreamerKit.capPreset = AVCaptureSessionPreset640x480;
  89. _gPUStreamerKit.previewDimension = CGSizeMake(640, 480);
  90. _gPUStreamerKit.streamDimension = CGSizeMake(640, 480);
  91. //配置profile 0:标清(360*640) 1:高清(540*960) 2:超清(720*1280)
  92. if (_fanweApp.appModel.video_resolution_type == 0)
  93. {
  94. [_gPUStreamerKit setStreamerProfile:KSYStreamerProfile_360p_auto];
  95. }
  96. else if (_fanweApp.appModel.video_resolution_type == 1)
  97. {
  98. [_gPUStreamerKit setStreamerProfile:KSYStreamerProfile_540p_auto];
  99. }
  100. else if (_fanweApp.appModel.video_resolution_type == 2)
  101. {
  102. [_gPUStreamerKit setStreamerProfile:KSYStreamerProfile_720p_auto];
  103. }
  104. // 视频帧率 默认:15
  105. _gPUStreamerKit.videoFPS = 15;
  106. // 摄像头位置 (仅在开始采集前设置有效)
  107. _gPUStreamerKit.cameraPosition = AVCaptureDevicePositionFront;
  108. // gpu output pixel format (默认:kCVPixelFormatType_32BGRA) (仅在开始采集前设置有效)
  109. _gPUStreamerKit.gpuOutputPixelFormat = kCVPixelFormatType_32BGRA;
  110. // 视频处理回调接口
  111. _gPUStreamerKit.videoProcessingCallback = ^(CMSampleBufferRef buf){
  112. // 在此处添加自定义图像处理, 直接修改buf中的图像数据会传递到观众端
  113. // 或复制图像数据之后再做其他处理, 则观众端仍然看到处理前的图像
  114. };
  115. // 采集模块输出的像素格式 (默认:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) (仅在开始采集前设置有效)
  116. _gPUStreamerKit.capturePixelFormat = kCVPixelFormatType_32BGRA;
  117. // 音频处理回调接口
  118. _gPUStreamerKit.audioProcessingCallback = ^(CMSampleBufferRef buf){
  119. // 在此处添加自定义音频处理, 直接修改buf中的pcm数据会传递到观众端
  120. // 或复制音频数据之后再做其他处理, 则观众端仍然听到原始声音
  121. };
  122. // 摄像头采集被打断的消息通知
  123. _gPUStreamerKit.interruptCallback = ^(BOOL bInterrupt){
  124. // 在此处添加自定义图像采集被打断的处理 (比如接听电话等)
  125. };
  126. }
  127. #pragma mark 推流相关设置初始化
  128. - (void)setStreamerCfg
  129. {
  130. if (_gPUStreamerKit.streamerBase == nil)
  131. {
  132. return;
  133. }
  134. // must set after capture
  135. // stream default settings
  136. // 视频编码器 默认为 自动选择
  137. _gPUStreamerKit.streamerBase.videoCodec = KSYVideoCodec_AUTO;
  138. if (_fanweApp.appModel.video_resolution_type == 0)
  139. {
  140. // 视频编码起始码率(单位:kbps, 默认:500)
  141. _gPUStreamerKit.streamerBase.videoInitBitrate = 400;
  142. // 视频编码最高码率(单位:kbps, 默认:800)
  143. _gPUStreamerKit.streamerBase.videoMaxBitrate = 800;
  144. // 视频编码最低码率(单位:kbps, 默认:200)
  145. _gPUStreamerKit.streamerBase.videoMinBitrate = 200;
  146. }
  147. else if (_fanweApp.appModel.video_resolution_type == 1)
  148. {
  149. // 视频编码起始码率(单位:kbps, 默认:500)
  150. _gPUStreamerKit.streamerBase.videoInitBitrate = 600;
  151. // 视频编码最高码率(单位:kbps, 默认:800)
  152. _gPUStreamerKit.streamerBase.videoMaxBitrate = 1000;
  153. // 视频编码最低码率(单位:kbps, 默认:200)
  154. _gPUStreamerKit.streamerBase.videoMinBitrate = 200;
  155. }
  156. else if (_fanweApp.appModel.video_resolution_type == 2)
  157. {
  158. // 视频编码起始码率(单位:kbps, 默认:500)
  159. _gPUStreamerKit.streamerBase.videoInitBitrate = 800;
  160. // 视频编码最高码率(单位:kbps, 默认:800)
  161. _gPUStreamerKit.streamerBase.videoMaxBitrate = 1000;
  162. // 视频编码最低码率(单位:kbps, 默认:200)
  163. _gPUStreamerKit.streamerBase.videoMinBitrate = 200;
  164. }
  165. // 音频编码码率(单位:kbps)
  166. _gPUStreamerKit.streamerBase.audiokBPS = 48;
  167. // 收集网络相关状态的日志,默认开启
  168. _gPUStreamerKit.streamerBase.shouldEnableKSYStatModule = NO;
  169. // 获取Streamer中与网络相关的日志
  170. _gPUStreamerKit.streamerBase.logBlock = ^(NSString* str){
  171. // NSLog(@"%@", str);
  172. };
  173. // // 直播场景 (KSY内部会根据场景的特征进行参数调优)
  174. // _gPUStreamerKit.streamerBase.liveScene = KSYLiveScene_Showself;
  175. // // 视频编码性能档次 (视频质量 和 设备资源之间的权衡)
  176. // _gPUStreamerKit.streamerBase.videoEncodePerf = KSYVideoEncodePer_Balance;
  177. // // 是否处理视频的图像数据 (默认YES)
  178. // _gPUStreamerKit.streamerBase.bWithVideo = YES;
  179. // //是否冻结图像(主动提供重复图像) 比如:视频采集被打断时, bAutoRepeat为NO,则停止提供图像; 为YES, 则主动提供最后一帧图像
  180. // _gPUStreamerKit.gpuToStr.bAutoRepeat = YES;
  181. // // 自动重连次数 关闭(0), 开启(>0), 默认为0
  182. // _gPUStreamerKit.maxAutoRetry = 3;
  183. }
  184. #pragma mark - ----------------------- 监听 -----------------------
  185. - (void)initObservers
  186. {
  187. _obsDict = [NSMutableDictionary dictionaryWithObjectsAndKeys:
  188. SEL_VALUE(onCaptureStateChange:) , KSYCaptureStateDidChangeNotification,
  189. SEL_VALUE(onStreamStateChange:) , KSYStreamStateDidChangeNotification,
  190. SEL_VALUE(onBgmPlayerStateChange:) ,KSYAudioStateDidChangeNotification,
  191. nil];
  192. }
  193. - (void)addObservers
  194. {
  195. //KSYStreamer state changes
  196. NSNotificationCenter* dc = [NSNotificationCenter defaultCenter];
  197. for (NSString* key in _obsDict)
  198. {
  199. SEL aSel = [[_obsDict objectForKey:key] pointerValue];
  200. [dc addObserver:self
  201. selector:aSel
  202. name:key
  203. object:nil];
  204. }
  205. }
  206. #pragma mark state change
  207. - (void)onCaptureStateChange:(NSNotification *)notification
  208. {
  209. if (_gPUStreamerKit.captureState == KSYCaptureStateIdle)
  210. {
  211. self.view.backgroundColor = [UIColor darkGrayColor];
  212. }
  213. else if(_gPUStreamerKit.captureState == KSYCaptureStateCapturing)
  214. {
  215. self.view.backgroundColor = [UIColor lightGrayColor];
  216. }
  217. }
  218. #pragma mark 推流状态监听
  219. - (void)onStreamStateChange:(NSNotification *)notification
  220. {
  221. if (_gPUStreamerKit.streamerBase)
  222. {
  223. NSLog(@"stream State %@", [_gPUStreamerKit.streamerBase getCurStreamStateName]);
  224. }
  225. if(_gPUStreamerKit.streamerBase.streamState == KSYStreamStateError)
  226. {
  227. [self onStreamError:_gPUStreamerKit.streamerBase.streamErrorCode];
  228. }
  229. else if (_gPUStreamerKit.streamerBase.streamState == KSYStreamStateConnecting)
  230. {
  231. }
  232. else if (_gPUStreamerKit.streamerBase.streamState == KSYStreamStateConnected)
  233. {
  234. self.view.backgroundColor = [UIColor lightGrayColor];
  235. if (_delegate && [_delegate respondsToSelector:@selector(firstFrame:)])
  236. {
  237. [_delegate firstIFrame:self];
  238. }
  239. }
  240. else if (_gPUStreamerKit.streamerBase.streamState == KSYStreamStateIdle)
  241. {
  242. self.view.backgroundColor = [UIColor darkGrayColor];
  243. }
  244. //状态为KSYStreamStateIdle且_bRecord为ture时,录制视频
  245. if (_gPUStreamerKit.streamerBase.streamState == KSYStreamStateIdle && _liveType == FW_LIVE_TYPE_RECORD)
  246. {
  247. // [self saveVideoToAlbum:[_presetCfgView hostUrl]];
  248. }
  249. }
  250. #pragma mark 推流错误处理
  251. - (void)onStreamError:(KSYStreamErrorCode) errCode
  252. {
  253. if (errCode == KSYStreamErrorCode_CONNECT_BREAK)
  254. {
  255. [self tryReconnect];
  256. }
  257. else if (errCode == KSYStreamErrorCode_AV_SYNC_ERROR)
  258. {
  259. NSLog(@"audio video is not synced, please check timestamp");
  260. [self tryReconnect];
  261. }
  262. else if (errCode == KSYStreamErrorCode_CODEC_OPEN_FAILED)
  263. {
  264. NSLog(@"video codec open failed, try software codec");
  265. _gPUStreamerKit.streamerBase.videoCodec = KSYVideoCodec_X264;
  266. [self tryReconnect];
  267. }
  268. }
  269. - (void)onBgmPlayerStateChange:(NSNotification *)notification
  270. {
  271. NSString *st = [_gPUStreamerKit.bgmPlayer getCurBgmStateName];
  272. NSLog(@"=====bgmStatus:%@",[st substringFromIndex:17]);
  273. }
  274. #pragma mark - ----------------------- 手势 -----------------------
  275. #pragma mark 将UI的坐标转换成相机坐标
  276. - (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates
  277. {
  278. CGPoint pointOfInterest = CGPointMake(.5f, .5f);
  279. CGSize frameSize = self.view.frame.size;
  280. CGSize apertureSize = [_gPUStreamerKit captureDimension];
  281. CGPoint point = viewCoordinates;
  282. CGFloat apertureRatio = apertureSize.height / apertureSize.width;
  283. CGFloat viewRatio = frameSize.width / frameSize.height;
  284. CGFloat xc = .5f;
  285. CGFloat yc = .5f;
  286. if (viewRatio > apertureRatio)
  287. {
  288. CGFloat y2 = frameSize.height;
  289. CGFloat x2 = frameSize.height * apertureRatio;
  290. CGFloat x1 = frameSize.width;
  291. CGFloat blackBar = (x1 - x2) / 2;
  292. if (point.x >= blackBar && point.x <= blackBar + x2)
  293. {
  294. xc = point.y / y2;
  295. yc = 1.f - ((point.x - blackBar) / x2);
  296. }
  297. }
  298. else
  299. {
  300. CGFloat y2 = frameSize.width / apertureRatio;
  301. CGFloat y1 = frameSize.height;
  302. CGFloat x2 = frameSize.width;
  303. CGFloat blackBar = (y1 - y2) / 2;
  304. if (point.y >= blackBar && point.y <= blackBar + y2)
  305. {
  306. xc = ((point.y - blackBar) / y2);
  307. yc = 1.f - (point.x / x2);
  308. }
  309. }
  310. pointOfInterest = CGPointMake(xc, yc);
  311. return pointOfInterest;
  312. }
  313. //设置摄像头对焦位置
  314. - (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
  315. {
  316. UITouch *touch = [touches anyObject];
  317. CGPoint current = [touch locationInView:self.view];
  318. CGPoint point = [self convertToPointOfInterestFromViewCoordinates:current];
  319. [_gPUStreamerKit exposureAtPoint:point];
  320. [_gPUStreamerKit focusAtPoint:point];
  321. _foucsCursorImgView.center = current;
  322. _foucsCursorImgView.transform = CGAffineTransformMakeScale(1.5, 1.5);
  323. _foucsCursorImgView.alpha=1.0;
  324. [UIView animateWithDuration:1.0 animations:^{
  325. _foucsCursorImgView.transform=CGAffineTransformIdentity;
  326. } completion:^(BOOL finished) {
  327. _foucsCursorImgView.alpha=0;
  328. }];
  329. }
  330. #pragma mark 添加缩放手势,缩放时镜头放大或缩小
  331. - (void)addPinchGestureRecognizer
  332. {
  333. UIPinchGestureRecognizer *pinch = [[UIPinchGestureRecognizer alloc]initWithTarget:self action:@selector(pinchDetected:)];
  334. [self.view addGestureRecognizer:pinch];
  335. }
  336. - (void)pinchDetected:(UIPinchGestureRecognizer *)recognizer
  337. {
  338. if (recognizer.state == UIGestureRecognizerStateBegan)
  339. {
  340. _currentPinchZoomFactor = _gPUStreamerKit.pinchZoomFactor;
  341. }
  342. CGFloat zoomFactor = _currentPinchZoomFactor * recognizer.scale;//当前触摸缩放因子*坐标比例
  343. [_gPUStreamerKit setPinchZoomFactor:zoomFactor];
  344. }
  345. @end