TCVideoLoadingController.m 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428
  1. //
  2. // VideoLoadingController.m
  3. // TCLVBIMDemo
  4. //
  5. // Created by annidyfeng on 2017/4/17.
  6. // Copyright © 2017年 tencent. All rights reserved.
  7. //
  8. #import "TCVideoLoadingController.h"
  9. #import "TCVideoCutViewController.h"
  10. #import "TCVideoJoinViewController.h"
  11. #import <Photos/Photos.h>
  12. @interface TCVideoLoadingController ()
  13. @property IBOutlet UIImageView *loadingImageView;
  14. @property (weak, nonatomic) IBOutlet UILabel *loadingLabel;
  15. @property NSMutableArray *localPaths;
  16. @property NSArray *assets;
  17. @property NSMutableArray *videosToEditAssets;
  18. @property NSMutableArray *imagesToEdit;
  19. @property NSUInteger exportIndex;
  20. @property AVMutableComposition *mutableComposition;
  21. @property AVMutableVideoComposition *mutableVideoComposition;
  22. @end
  23. @implementation TCVideoLoadingController
  24. {
  25. BOOL _loadingIsInterrupt;
  26. AssetType _assetType;
  27. }
  28. - (void)viewDidLoad {
  29. [super viewDidLoad];
  30. // Do any additional setup after loading the view from its nib.
  31. self.title = NSLocalizedString(@"TCVideoLoading.ChoosingVideo", nil);
  32. self.view.backgroundColor = UIColor.blackColor;
  33. self.loadingLabel.text = NSLocalizedString(@"VideoLoading.Decoding",nil);
  34. }
  35. - (void)viewWillAppear:(BOOL)animated
  36. {
  37. [super viewWillAppear:animated];
  38. [[NSNotificationCenter defaultCenter] addObserver:self
  39. selector:@selector(onAudioSessionEvent:)
  40. name:AVAudioSessionInterruptionNotification
  41. object:nil];
  42. }
  43. - (void)viewWillDisappear:(BOOL)animated
  44. {
  45. [super viewWillDisappear:animated];
  46. [[NSNotificationCenter defaultCenter] removeObserver:self];
  47. }
  48. - (IBAction)cancelLoad:(id)sender {
  49. _loadingIsInterrupt = YES;
  50. [self dismissViewControllerAnimated:YES completion:^{
  51. }];
  52. }
  53. - (void) onAudioSessionEvent: (NSNotification *) notification
  54. {
  55. NSDictionary *info = notification.userInfo;
  56. AVAudioSessionInterruptionType type = [info[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue];
  57. if (type == AVAudioSessionInterruptionTypeBegan) {
  58. _loadingIsInterrupt = YES;
  59. [self exportAssetError];
  60. }
  61. }
  62. - (void)didReceiveMemoryWarning {
  63. [super didReceiveMemoryWarning];
  64. // Dispose of any resources that can be recreated.
  65. }
  66. - (void)exportAssetList:(NSArray *)assets assetType:(AssetType)assetType;
  67. {
  68. _assets = assets;
  69. _assetType = assetType;
  70. _exportIndex = 0;
  71. if (_assetType == AssetType_Video) {
  72. _localPaths = [NSMutableArray new];
  73. _videosToEditAssets = [NSMutableArray array];
  74. }else{
  75. _imagesToEdit = [NSMutableArray array];
  76. }
  77. [self exportAssetInternal];
  78. }
  79. - (void)exportAssetInternal
  80. {
  81. if (_exportIndex == _assets.count) {
  82. switch (self.composeMode) {
  83. case ComposeMode_Edit:
  84. {
  85. TCVideoCutViewController *vc = [TCVideoCutViewController new];
  86. //vc.videoPath = _localPaths[0];
  87. if (_assetType == AssetType_Video) {
  88. vc.videoAsset = _videosToEditAssets[0];
  89. }else{
  90. vc.imageList = _imagesToEdit;
  91. }
  92. if(!_loadingIsInterrupt) [self.navigationController pushViewController:vc animated:YES];
  93. }
  94. return;
  95. case ComposeMode_Join:
  96. {
  97. TCVideoJoinViewController *vc = [TCVideoJoinViewController new];
  98. // vc.videoList = _localPaths;
  99. vc.videoAssertList = _videosToEditAssets;
  100. if(!_loadingIsInterrupt) [self.navigationController pushViewController:vc animated:YES];
  101. }
  102. return;
  103. case ComposeMode_Upload:
  104. {
  105. // VideoCompressViewController *vc = [VideoCompressViewController new];
  106. // // vc.videoList = _localPaths;
  107. // vc.videoAsset = _videosToEditAssets[0];
  108. // if(!_loadingIsInterrupt) [self.navigationController pushViewController:vc animated:YES];
  109. }
  110. return;
  111. default:
  112. return;
  113. }
  114. }
  115. self.mutableComposition = nil;
  116. self.mutableVideoComposition = nil;
  117. __weak __typeof(self) weakSelf = self;
  118. PHAsset *expAsset = _assets[_exportIndex];
  119. if (_assetType == AssetType_Video) {
  120. PHVideoRequestOptions *options = [PHVideoRequestOptions new];
  121. // 最高质量的视频
  122. options.deliveryMode = PHVideoRequestOptionsDeliveryModeHighQualityFormat;
  123. // 可从iCloud中获取图片
  124. options.networkAccessAllowed = YES;
  125. // 如果是iCloud的视频,可以获取到下载进度
  126. options.progressHandler = ^(double progress, NSError * _Nullable error, BOOL * _Nonnull stop, NSDictionary * _Nullable info) {
  127. [weakSelf loadingCloudVideoProgress:progress];
  128. *stop = _loadingIsInterrupt;
  129. };
  130. [[PHImageManager defaultManager] requestAVAssetForVideo:expAsset options:options resultHandler:^(AVAsset * _Nullable avAsset, AVAudioMix * _Nullable audioMix, NSDictionary * _Nullable info) {
  131. //SDK内部通过avAsset 读取视频数据,会极大的降低视频loading时间
  132. dispatch_async(dispatch_get_main_queue(), ^(void) {
  133. if (avAsset) {
  134. [_videosToEditAssets addObject:avAsset];
  135. _exportIndex++;
  136. [self exportAssetInternal];
  137. }
  138. });
  139. }];
  140. }else{
  141. PHImageRequestOptions *options = [[PHImageRequestOptions alloc] init];
  142. options.version = PHImageRequestOptionsVersionCurrent;
  143. options.networkAccessAllowed = YES;
  144. options.synchronous = YES;
  145. //sync requests are automatically processed this way regardless of the specified mode
  146. //originRequestOptions.deliveryMode = PHImageRequestOptionsDeliveryModeHighQualityFormat;
  147. options.progressHandler = ^(double progress, NSError *__nullable error, BOOL *stop, NSDictionary *__nullable info){
  148. [weakSelf loadingCloudVideoProgress:progress];
  149. *stop = _loadingIsInterrupt;
  150. };
  151. [[PHImageManager defaultManager] requestImageForAsset:expAsset targetSize:PHImageManagerMaximumSize contentMode:PHImageContentModeDefault options:options resultHandler:^(UIImage * _Nullable result, NSDictionary * _Nullable info) {
  152. dispatch_async(dispatch_get_main_queue(), ^(void) {
  153. if (result) {
  154. //这里做一次压缩,否则读入内存的图片数据会过大
  155. UIImage *image = [self scaleImage:result scaleToSize:[self getVideoSize:result.size]];
  156. if (image != nil) {
  157. [_imagesToEdit addObject:image];
  158. }
  159. _exportIndex++;
  160. [self exportAssetInternal];
  161. }
  162. });
  163. }];
  164. }
  165. }
  166. - (void)exportAssetError
  167. {
  168. UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"Error" message:NSLocalizedString(@"TCVideoLoading.HintVideoExportingFailed", nil) preferredStyle:UIAlertControllerStyleAlert];
  169. UIAlertAction *ok = [UIAlertAction actionWithTitle:NSLocalizedString(@"Common.OK", nil) style:0 handler:^(UIAlertAction * _Nonnull action) {
  170. [self.navigationController dismissViewControllerAnimated:YES completion:nil];
  171. }];
  172. [alert addAction:ok];
  173. [self presentViewController:alert animated:YES completion:nil];
  174. }
  175. - (void)loadingCloudVideoProgress:(float)progress
  176. {
  177. dispatch_async(dispatch_get_main_queue(), ^{
  178. _loadingLabel.text = [NSString stringWithFormat:NSLocalizedString(@"TCVideoLoading.VideoDownloadingFromiCloud", nil),@(_exportIndex + 1)];
  179. self.loadingImageView.image = [UIImage imageNamed:[NSString stringWithFormat:@"video_record_share_loading_%d", (int)(progress * 8)]];
  180. });
  181. }
  182. - (CGSize)getVideoSize:(CGSize)sourceSize;
  183. {
  184. CGSize videoSize = CGSizeMake(sourceSize.width, sourceSize.height);
  185. if (videoSize.height >= videoSize.width) {
  186. if([self supportCompressSize:CGSizeMake(720, 1280) videoSize:videoSize]){
  187. videoSize = [self compress:CGSizeMake(720, 1280) videoSize:videoSize];
  188. }
  189. }else{
  190. if([self supportCompressSize:CGSizeMake(1280, 720) videoSize:videoSize]){
  191. videoSize = [self compress:CGSizeMake(1280, 720) videoSize:videoSize];
  192. }
  193. }
  194. return videoSize;
  195. }
  196. //判断是否需要压缩图片
  197. -(BOOL)supportCompressSize:(CGSize)compressSize videoSize:(CGSize)videoSize
  198. {
  199. if (videoSize.width >= compressSize.width && videoSize.height >= compressSize.height) {
  200. return YES;
  201. }
  202. if (videoSize.width >= compressSize.height && videoSize.height >= compressSize.width) {
  203. return YES;
  204. }
  205. return NO;
  206. }
  207. //获得压缩后图片大小
  208. - (CGSize)compress:(CGSize)compressSize videoSize:(CGSize)videoSize
  209. {
  210. CGSize size = CGSizeZero;
  211. if (compressSize.height / compressSize.width >= videoSize.height / videoSize.width) {
  212. size.width = compressSize.width;
  213. size.height = compressSize.width * videoSize.height / videoSize.width;
  214. }else{
  215. size.height = compressSize.height;
  216. size.width = compressSize.height * videoSize.width / videoSize.height;
  217. }
  218. return size;
  219. }
  220. - (UIImage*)scaleImage:(UIImage *)image scaleToSize:(CGSize)size{
  221. UIGraphicsBeginImageContext(size);
  222. [image drawInRect:CGRectMake(0, 0, size.width, size.height)];
  223. UIImage* scaledImage = UIGraphicsGetImageFromCurrentImageContext();
  224. UIGraphicsEndImageContext();
  225. return scaledImage;
  226. }
  227. #define degreesToRadians( degrees ) ( ( degrees ) / 180.0 * M_PI )
  228. - (void)performWithAsset:(AVAsset*)asset rotate:(CGFloat)angle
  229. {
  230. AVMutableVideoCompositionInstruction *instruction = nil;
  231. AVMutableVideoCompositionLayerInstruction *layerInstruction = nil;
  232. CGAffineTransform t1;
  233. CGAffineTransform t2;
  234. AVAssetTrack *assetVideoTrack = nil;
  235. AVAssetTrack *assetAudioTrack = nil;
  236. // Check if the asset contains video and audio tracks
  237. if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
  238. assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
  239. }
  240. if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
  241. assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
  242. }
  243. CMTime insertionPoint = kCMTimeZero;
  244. NSError *error = nil;
  245. // Step 1
  246. // Create a composition with the given asset and insert audio and video tracks into it from the asset
  247. if (!self.mutableComposition) {
  248. // Check whether a composition has already been created, i.e, some other tool has already been applied
  249. // Create a new composition
  250. self.mutableComposition = [AVMutableComposition composition];
  251. // Insert the video and audio tracks from AVAsset
  252. if (assetVideoTrack != nil) {
  253. AVMutableCompositionTrack *compositionVideoTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
  254. [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetVideoTrack atTime:insertionPoint error:&error];
  255. }
  256. if (assetAudioTrack != nil) {
  257. AVMutableCompositionTrack *compositionAudioTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
  258. [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetAudioTrack atTime:insertionPoint error:&error];
  259. }
  260. }
  261. // Step 2
  262. // Translate the composition to compensate the movement caused by rotation (since rotation would cause it to move out of frame)
  263. if (angle == 90)
  264. {
  265. t1 = CGAffineTransformMakeTranslation(assetVideoTrack.naturalSize.height, 0.0);
  266. }else if (angle == -90){
  267. t1 = CGAffineTransformMakeTranslation(0.0, assetVideoTrack.naturalSize.width);
  268. } else {
  269. return;
  270. }
  271. // Rotate transformation
  272. t2 = CGAffineTransformRotate(t1, degreesToRadians(angle));
  273. // Step 3
  274. // Set the appropriate render sizes and rotational transforms
  275. if (!self.mutableVideoComposition) {
  276. // Create a new video composition
  277. self.mutableVideoComposition = [AVMutableVideoComposition videoComposition];
  278. self.mutableVideoComposition.renderSize = CGSizeMake(assetVideoTrack.naturalSize.height,assetVideoTrack.naturalSize.width);
  279. self.mutableVideoComposition.frameDuration = CMTimeMake(1, 30);
  280. // The rotate transform is set on a layer instruction
  281. instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
  282. instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [self.mutableComposition duration]);
  283. layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:(self.mutableComposition.tracks)[0]];
  284. [layerInstruction setTransform:t2 atTime:kCMTimeZero];
  285. } else {
  286. self.mutableVideoComposition.renderSize = CGSizeMake(self.mutableVideoComposition.renderSize.height, self.mutableVideoComposition.renderSize.width);
  287. // Extract the existing layer instruction on the mutableVideoComposition
  288. instruction = (AVMutableVideoCompositionInstruction*)(self.mutableVideoComposition.instructions)[0];
  289. layerInstruction = (AVMutableVideoCompositionLayerInstruction*)(instruction.layerInstructions)[0];
  290. // Check if a transform already exists on this layer instruction, this is done to add the current transform on top of previous edits
  291. CGAffineTransform existingTransform;
  292. if (![layerInstruction getTransformRampForTime:[self.mutableComposition duration] startTransform:&existingTransform endTransform:NULL timeRange:NULL]) {
  293. [layerInstruction setTransform:t2 atTime:kCMTimeZero];
  294. } else {
  295. // Note: the point of origin for rotation is the upper left corner of the composition, t3 is to compensate for origin
  296. CGAffineTransform t3 = CGAffineTransformMakeTranslation(-1*assetVideoTrack.naturalSize.height/2, 0.0);
  297. CGAffineTransform newTransform = CGAffineTransformConcat(existingTransform, CGAffineTransformConcat(t2, t3));
  298. [layerInstruction setTransform:newTransform atTime:kCMTimeZero];
  299. }
  300. }
  301. // Step 4
  302. // Add the transform instructions to the video composition
  303. instruction.layerInstructions = @[layerInstruction];
  304. self.mutableVideoComposition.instructions = @[instruction];
  305. // Step 5
  306. // Notify AVSEViewController about rotation operation completion
  307. // [[NSNotificationCenter defaultCenter] postNotificationName:AVSEEditCommandCompletionNotification object:self];
  308. }
  309. @end
  310. @implementation PHAsset (My)
  311. - (NSString *)orignalFilename {
  312. NSString *filename;
  313. if ([[PHAssetResource class] instancesRespondToSelector:@selector(assetResourcesForAsset:)]) {
  314. NSArray *resources = [PHAssetResource assetResourcesForAsset:self];
  315. PHAssetResource *resource = resources.firstObject;
  316. if (resources) {
  317. filename = resource.originalFilename;
  318. }
  319. }
  320. if (filename == nil) {
  321. filename = [self valueForKey:@"filename"];
  322. if (filename == nil ||
  323. ![filename isKindOfClass:[NSString class]]) {
  324. filename = [NSString stringWithFormat:@"temp%ld", time(NULL)];
  325. }
  326. }
  327. return filename;
  328. }
  329. @end
  330. //
  331. //static inline CGFloat RadiansToDegrees(CGFloat radians) {
  332. // return radians * 180 / M_PI;
  333. //};
  334. @implementation AVAsset (My)
  335. @dynamic videoOrientation;
  336. - (LBVideoOrientation)videoOrientation
  337. {
  338. NSArray *videoTracks = [self tracksWithMediaType:AVMediaTypeVideo];
  339. if ([videoTracks count] == 0) {
  340. return LBVideoOrientationNotFound;
  341. }
  342. AVAssetTrack* videoTrack = [videoTracks objectAtIndex:0];
  343. CGAffineTransform txf = [videoTrack preferredTransform];
  344. CGFloat videoAngleInDegree = RadiansToDegrees(atan2(txf.b, txf.a));
  345. LBVideoOrientation orientation = 0;
  346. switch ((int)videoAngleInDegree) {
  347. case 0:
  348. orientation = LBVideoOrientationRight;
  349. break;
  350. case 90:
  351. orientation = LBVideoOrientationUp;
  352. break;
  353. case 180:
  354. orientation = LBVideoOrientationLeft;
  355. break;
  356. case -90:
  357. orientation = LBVideoOrientationDown;
  358. break;
  359. default:
  360. orientation = LBVideoOrientationNotFound;
  361. break;
  362. }
  363. return orientation;
  364. }
  365. @end