UGCKitAssetLoadingController.m 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533
  1. // Copyright (c) 2019 Tencent. All rights reserved.
  2. #import "UGCKitAssetLoadingController.h"
  3. #import <Photos/Photos.h>
  4. #import "UGCKitReporterInternal.h"
  5. #import "UGCKitMem.h"
  6. #import "SDKHeader.h"
  7. #import "UGCKitPieProgressView.h"
  8. @interface UGCKitAssetLoadingController () <TXVideoJoinerListener> {
  9. BOOL _loadingIsInterrupt;
  10. AssetType _assetType;
  11. TXVideoJoiner * _videoJoiner;
  12. NSString *_joinedVideoPath;
  13. NSMutableDictionary<NSNumber *, NSNumber *> *_progressCache; // key: loading index, value: progress
  14. float _prevLoadingProgress;
  15. }
  16. @property (nonatomic, strong) UGCKitPieProgressView *loadingProgressView;
  17. @property (nonatomic, strong) IBOutlet UILabel *loadingLabel;
  18. @property (nonatomic, strong) NSArray *assets;
  19. @property (nonatomic, strong) NSMutableArray *imagesToEdit;
  20. @property (nonatomic, strong) AVMutableComposition *mutableComposition;
  21. @property (nonatomic, strong) AVMutableVideoComposition *mutableVideoComposition;
  22. @property (nonatomic, strong) UGCKitTheme *theme;
  23. @end
  24. @implementation UGCKitAssetLoadingController
  25. - (instancetype)initWithTheme:(UGCKitTheme *)theme
  26. {
  27. if (self = [self initWithNibName:nil bundle:nil]) {
  28. _theme = theme;
  29. }
  30. return self;
  31. }
  32. - (void)viewDidLoad {
  33. [super viewDidLoad];
  34. _progressCache = [[NSMutableDictionary alloc] init];
  35. _loadingProgressView = [[UGCKitPieProgressView alloc] initWithFrame:CGRectMake(0, 0, 78, 78)];
  36. _loadingProgressView.tintColor = _theme.progressColor;
  37. [_loadingProgressView addConstraint:[NSLayoutConstraint constraintWithItem:_loadingProgressView
  38. attribute:NSLayoutAttributeWidth
  39. relatedBy:NSLayoutRelationEqual
  40. toItem:nil
  41. attribute:NSLayoutAttributeWidth
  42. multiplier:1
  43. constant:78]];
  44. [_loadingProgressView addConstraint:[NSLayoutConstraint constraintWithItem:_loadingProgressView
  45. attribute:NSLayoutAttributeHeight
  46. relatedBy:NSLayoutRelationEqual
  47. toItem:nil
  48. attribute:NSLayoutAttributeHeight
  49. multiplier:1
  50. constant:78]];
  51. _loadingProgressView.translatesAutoresizingMaskIntoConstraints = NO;
  52. self.navigationItem.leftBarButtonItem = [[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemCancel
  53. target:self
  54. action:@selector(onCancel:)];
  55. [self.view addSubview:_loadingProgressView];
  56. [self.view addConstraint:[NSLayoutConstraint constraintWithItem:_loadingProgressView
  57. attribute:NSLayoutAttributeCenterX
  58. relatedBy:NSLayoutRelationEqual
  59. toItem:self.view
  60. attribute:NSLayoutAttributeCenterX
  61. multiplier:1
  62. constant:0]];
  63. [self.view addConstraint:[NSLayoutConstraint constraintWithItem:_loadingProgressView
  64. attribute:NSLayoutAttributeCenterY
  65. relatedBy:NSLayoutRelationEqual
  66. toItem:self.view
  67. attribute:NSLayoutAttributeCenterY
  68. multiplier:1
  69. constant:-70]];
  70. _loadingLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, CGRectGetWidth(self.view.bounds), 20)];
  71. _loadingLabel.textColor = _theme.titleColor;
  72. _loadingLabel.textAlignment = NSTextAlignmentCenter;
  73. _loadingLabel.translatesAutoresizingMaskIntoConstraints = NO;
  74. [self.view addSubview:_loadingLabel];
  75. [self.view addConstraint:[NSLayoutConstraint constraintWithItem:_loadingLabel
  76. attribute:NSLayoutAttributeCenterX
  77. relatedBy:NSLayoutRelationEqual
  78. toItem:self.view
  79. attribute:NSLayoutAttributeCenterX
  80. multiplier:1
  81. constant:0]];
  82. [self.view addConstraint:[NSLayoutConstraint constraintWithItem:_loadingLabel
  83. attribute:NSLayoutAttributeTop
  84. relatedBy:NSLayoutRelationEqual
  85. toItem:_loadingProgressView
  86. attribute:NSLayoutAttributeBottom
  87. multiplier:1
  88. constant:20]];
  89. // Do any additional setup after loading the view from its nib.
  90. self.title = [_theme localizedString:@"UGCKit.MediaPicker.ChoosingVideo"];
  91. self.view.backgroundColor = UIColor.blackColor;
  92. self.loadingLabel.text = [_theme localizedString:@"UGCKit.Loading.Decoding"];
  93. TXPreviewParam *param = [[TXPreviewParam alloc] init];
  94. param.videoView = [UIView new];
  95. _videoJoiner = [[TXVideoJoiner alloc] initWithPreview:param];
  96. _videoJoiner.joinerDelegate = self;
  97. }
  98. - (void)viewWillAppear:(BOOL)animated
  99. {
  100. [super viewWillAppear:animated];
  101. [[NSNotificationCenter defaultCenter] addObserver:self
  102. selector:@selector(onAudioSessionEvent:)
  103. name:AVAudioSessionInterruptionNotification
  104. object:nil];
  105. }
  106. - (void)viewWillDisappear:(BOOL)animated
  107. {
  108. [super viewWillDisappear:animated];
  109. [[NSNotificationCenter defaultCenter] removeObserver:self];
  110. }
  111. - (IBAction)onCancel:(id)sender {
  112. _loadingIsInterrupt = YES;
  113. [self.navigationController popViewControllerAnimated:YES];
  114. // if (self.completion) {
  115. // self.completion([UGCKitResult cancelledResult]);
  116. // }
  117. }
  118. - (void) onAudioSessionEvent: (NSNotification *) notification
  119. {
  120. NSDictionary *info = notification.userInfo;
  121. AVAudioSessionInterruptionType type = [info[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue];
  122. if (type == AVAudioSessionInterruptionTypeBegan) {
  123. _loadingIsInterrupt = YES;
  124. [self exportAssetError:nil];
  125. }
  126. }
  127. - (void)didReceiveMemoryWarning {
  128. [super didReceiveMemoryWarning];
  129. // Dispose of any resources that can be recreated.
  130. }
  131. - (void)exportAssetList:(NSArray<PHAsset *> *)assets assetType:(AssetType)assetType;
  132. {
  133. _assets = assets;
  134. _assetType = assetType;
  135. if (_assetType == AssetType_Video) {
  136. _avAssets = [NSMutableArray array];
  137. }else{
  138. _imagesToEdit = [NSMutableArray array];
  139. }
  140. [self exportAssetInternal];
  141. }
  142. - (void)_exportVideo:(PHAsset*)expAsset index:(NSInteger)index completion:(void(^)(AVAsset *asset, NSError *error))completion {
  143. PHVideoRequestOptions *options = [[PHVideoRequestOptions alloc] init];
  144. // 最高质量的视频
  145. options.deliveryMode = PHVideoRequestOptionsDeliveryModeHighQualityFormat;
  146. // 可从iCloud中获取图片
  147. options.networkAccessAllowed = YES;
  148. // 如果是iCloud的视频,可以获取到下载进度
  149. WEAKIFY(self);
  150. options.progressHandler = ^(double progress, NSError * _Nullable error, BOOL * _Nonnull stop, NSDictionary * _Nullable info) {
  151. STRONGIFY(self);
  152. if (self) {
  153. [self loadingCloudVideoProgress:progress index:index];
  154. *stop = self->_loadingIsInterrupt;
  155. } else {
  156. *stop = YES;
  157. }
  158. };
  159. [[PHImageManager defaultManager] requestAVAssetForVideo:expAsset options:options resultHandler:^(AVAsset * _Nullable avAsset, AVAudioMix * _Nullable audioMix, NSDictionary * _Nullable info) {
  160. if (avAsset) {
  161. completion(avAsset, nil);
  162. } else {
  163. NSError *error = info[PHImageErrorKey];
  164. completion(nil, error);
  165. }
  166. }];
  167. }
  168. - (void)_exportPhoto:(PHAsset*)expAsset index:(NSInteger)index completion:(void(^)(UIImage *image, NSError *error))completion {
  169. PHImageRequestOptions *options = [[PHImageRequestOptions alloc] init];
  170. options.version = PHImageRequestOptionsVersionCurrent;
  171. options.networkAccessAllowed = YES;
  172. options.resizeMode = PHImageRequestOptionsResizeModeExact;
  173. options.deliveryMode = PHImageRequestOptionsDeliveryModeHighQualityFormat;
  174. //sync requests are automatically processed this way regardless of the specified mode
  175. //originRequestOptions.deliveryMode = PHImageRequestOptionsDeliveryModeHighQualityFormat;
  176. WEAKIFY(self);
  177. options.progressHandler = ^(double progress, NSError *__nullable error, BOOL *stop, NSDictionary *__nullable info){
  178. STRONGIFY(self);
  179. if (self) {
  180. [self loadingCloudVideoProgress:progress index:index];
  181. *stop = self->_loadingIsInterrupt;
  182. } else {
  183. *stop = YES;
  184. }
  185. };
  186. CGSize maximumSize = CGSizeMake(1280, 1280);
  187. [[PHImageManager defaultManager] requestImageForAsset:expAsset targetSize:maximumSize contentMode:PHImageContentModeDefault options:options resultHandler:^(UIImage * _Nullable result, NSDictionary * _Nullable info) {
  188. if (result) {
  189. completion(result, nil);
  190. } else {
  191. NSError *error = info[PHImageErrorKey];
  192. completion(nil, error);
  193. }
  194. }];
  195. }
  196. - (void)exportAssetInternal
  197. {
  198. [_progressCache removeAllObjects];
  199. _prevLoadingProgress = 0;
  200. // Initialize before async operations
  201. for (NSInteger i = 0; i < _assets.count; ++i) {
  202. _progressCache[@(i)] = @0;
  203. }
  204. __block NSError *blockError = nil;
  205. dispatch_group_t grp = dispatch_group_create();
  206. for (NSInteger i = 0; i < _assets.count; ++i) {
  207. PHAsset *asset = _assets[i];
  208. if (blockError) {
  209. break;
  210. }
  211. dispatch_group_enter(grp);
  212. if (_assetType == AssetType_Video) {
  213. [self _exportVideo:asset index:i completion:^(AVAsset *asset, NSError *error) {
  214. if (error) {
  215. blockError = error;
  216. NSLog(@"Error: %@", error);
  217. } else {
  218. @synchronized (self->_avAssets) {
  219. [self->_avAssets addObject:asset];
  220. }
  221. }
  222. dispatch_group_leave(grp);
  223. }];
  224. } else {
  225. [self _exportPhoto:asset index:i completion:^(UIImage *image, NSError *error) {
  226. if (error) {
  227. blockError = error;
  228. NSLog(@"Error: %@", error);
  229. } else {
  230. @synchronized (self->_imagesToEdit) {
  231. [self->_imagesToEdit addObject:image];
  232. }
  233. }
  234. dispatch_group_leave(grp);
  235. }];
  236. }
  237. }
  238. dispatch_group_notify(grp, dispatch_get_main_queue(), ^{
  239. if (blockError) {
  240. UGCKitResult *result = [[UGCKitResult alloc] init];
  241. if (blockError.code == 3072 /* PHPhotosErrorUserCancelled */) {
  242. result.cancelled = YES;
  243. } else {
  244. result.code = blockError.code;
  245. result.info = blockError.userInfo;
  246. }
  247. if (self.completion) {
  248. self.completion(result);
  249. }
  250. return;
  251. }
  252. if (self->_assetType == AssetType_Video) {
  253. if (self->_avAssets.count == 1 || !self.combineVideos){
  254. UGCKitMedia *media = [UGCKitMedia mediaWithAVAsset:self->_avAssets.firstObject];
  255. UGCKitResult *result = [[UGCKitResult alloc] init];
  256. result.media = media;
  257. if (self.completion) {
  258. self.completion(result);
  259. }
  260. } else {
  261. [self joinVideoAssets];
  262. }
  263. } else {
  264. UGCKitMedia *media = [UGCKitMedia mediaWithImages:self->_imagesToEdit canvasSize:CGSizeMake(720,1280)];
  265. UGCKitResult *result = [[UGCKitResult alloc] init];
  266. result.media = media;
  267. if (self.completion) {
  268. self.completion(result);
  269. }
  270. }
  271. });
  272. }
  273. - (void)exportAssetError:(NSError *)error
  274. {
  275. NSString *errorMessage = error ? error.localizedDescription : [_theme localizedString:@"UGCKit.MediaPicker.HintVideoExportingFailed"];
  276. UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"Error" message:errorMessage preferredStyle:UIAlertControllerStyleAlert];
  277. UIAlertAction *ok = [UIAlertAction actionWithTitle:[_theme localizedString:@"UGCKit.Common.OK"] style:0 handler:^(UIAlertAction * _Nonnull action) {
  278. [self.navigationController dismissViewControllerAnimated:YES completion:nil];
  279. }];
  280. [alert addAction:ok];
  281. [self presentViewController:alert animated:YES completion:nil];
  282. }
  283. - (void)loadingCloudVideoProgress:(float)progress index:(NSInteger)index
  284. {
  285. @synchronized (_progressCache) {
  286. _progressCache[@(index)] = @(progress);
  287. }
  288. __block float total = 0.0;
  289. [_progressCache enumerateKeysAndObjectsUsingBlock:^(NSNumber * _Nonnull key, NSNumber * _Nonnull obj, BOOL * _Nonnull stop) {
  290. total += obj.floatValue;
  291. }];
  292. total /= _progressCache.count;
  293. if (_prevLoadingProgress >= total) {
  294. return;
  295. }
  296. _prevLoadingProgress = total;
  297. NSString *progressText = [_theme localizedString:@"UGCKit.MediaPicker.VideoDownloadingFromiCloud"];
  298. dispatch_async(dispatch_get_main_queue(), ^{
  299. self.loadingLabel.text = progressText;
  300. self.loadingProgressView.progress = total;
  301. // self.loadingProgressView.image = [UIImage imageNamed:[NSString stringWithFormat:@"video_record_share_loading_%d", (int)(total * 8)]];
  302. });
  303. }
  304. - (CGSize)getVideoSize:(CGSize)sourceSize;
  305. {
  306. CGSize videoSize = CGSizeMake(sourceSize.width, sourceSize.height);
  307. if (videoSize.height >= videoSize.width) {
  308. if([self supportCompressSize:CGSizeMake(720, 1280) videoSize:videoSize]){
  309. videoSize = [self compress:CGSizeMake(720, 1280) videoSize:videoSize];
  310. }
  311. }else{
  312. if([self supportCompressSize:CGSizeMake(1280, 720) videoSize:videoSize]){
  313. videoSize = [self compress:CGSizeMake(1280, 720) videoSize:videoSize];
  314. }
  315. }
  316. return videoSize;
  317. }
  318. //判断是否需要压缩图片
  319. -(BOOL)supportCompressSize:(CGSize)compressSize videoSize:(CGSize)videoSize
  320. {
  321. if (videoSize.width >= compressSize.width && videoSize.height >= compressSize.height) {
  322. return YES;
  323. }
  324. if (videoSize.width >= compressSize.height && videoSize.height >= compressSize.width) {
  325. return YES;
  326. }
  327. return NO;
  328. }
  329. //获得压缩后图片大小
  330. - (CGSize)compress:(CGSize)compressSize videoSize:(CGSize)videoSize
  331. {
  332. CGSize size = CGSizeZero;
  333. if (compressSize.height / compressSize.width >= videoSize.height / videoSize.width) {
  334. size.width = compressSize.width;
  335. size.height = compressSize.width * videoSize.height / videoSize.width;
  336. }else{
  337. size.height = compressSize.height;
  338. size.width = compressSize.height * videoSize.width / videoSize.height;
  339. }
  340. return size;
  341. }
  342. - (UIImage*)scaleImage:(UIImage *)image scaleToSize:(CGSize)size{
  343. UIGraphicsBeginImageContext(size);
  344. [image drawInRect:CGRectMake(0, 0, size.width, size.height)];
  345. UIImage* scaledImage = UIGraphicsGetImageFromCurrentImageContext();
  346. UIGraphicsEndImageContext();
  347. return scaledImage;
  348. }
  349. #define degreesToRadians( degrees ) ( ( degrees ) / 180.0 * M_PI )
  350. - (void)performWithAsset:(AVAsset*)asset rotate:(CGFloat)angle
  351. {
  352. AVMutableVideoCompositionInstruction *instruction = nil;
  353. AVMutableVideoCompositionLayerInstruction *layerInstruction = nil;
  354. CGAffineTransform t1;
  355. CGAffineTransform t2;
  356. AVAssetTrack *assetVideoTrack = nil;
  357. AVAssetTrack *assetAudioTrack = nil;
  358. // Check if the asset contains video and audio tracks
  359. if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
  360. assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
  361. }
  362. if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
  363. assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
  364. }
  365. CMTime insertionPoint = kCMTimeZero;
  366. NSError *error = nil;
  367. // Step 1
  368. // Create a composition with the given asset and insert audio and video tracks into it from the asset
  369. if (!self.mutableComposition) {
  370. // Check whether a composition has already been created, i.e, some other tool has already been applied
  371. // Create a new composition
  372. self.mutableComposition = [AVMutableComposition composition];
  373. // Insert the video and audio tracks from AVAsset
  374. if (assetVideoTrack != nil) {
  375. AVMutableCompositionTrack *compositionVideoTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
  376. [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetVideoTrack atTime:insertionPoint error:&error];
  377. }
  378. if (assetAudioTrack != nil) {
  379. AVMutableCompositionTrack *compositionAudioTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
  380. [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetAudioTrack atTime:insertionPoint error:&error];
  381. }
  382. }
  383. // Step 2
  384. // Translate the composition to compensate the movement caused by rotation (since rotation would cause it to move out of frame)
  385. if (angle == 90)
  386. {
  387. t1 = CGAffineTransformMakeTranslation(assetVideoTrack.naturalSize.height, 0.0);
  388. }else if (angle == -90){
  389. t1 = CGAffineTransformMakeTranslation(0.0, assetVideoTrack.naturalSize.width);
  390. } else {
  391. return;
  392. }
  393. // Rotate transformation
  394. t2 = CGAffineTransformRotate(t1, degreesToRadians(angle));
  395. // Step 3
  396. // Set the appropriate render sizes and rotational transforms
  397. if (!self.mutableVideoComposition) {
  398. // Create a new video composition
  399. self.mutableVideoComposition = [AVMutableVideoComposition videoComposition];
  400. self.mutableVideoComposition.renderSize = CGSizeMake(assetVideoTrack.naturalSize.height,assetVideoTrack.naturalSize.width);
  401. self.mutableVideoComposition.frameDuration = CMTimeMake(1, 30);
  402. // The rotate transform is set on a layer instruction
  403. instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
  404. instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [self.mutableComposition duration]);
  405. layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:(self.mutableComposition.tracks)[0]];
  406. [layerInstruction setTransform:t2 atTime:kCMTimeZero];
  407. } else {
  408. self.mutableVideoComposition.renderSize = CGSizeMake(self.mutableVideoComposition.renderSize.height, self.mutableVideoComposition.renderSize.width);
  409. // Extract the existing layer instruction on the mutableVideoComposition
  410. instruction = (AVMutableVideoCompositionInstruction*)(self.mutableVideoComposition.instructions)[0];
  411. layerInstruction = (AVMutableVideoCompositionLayerInstruction*)(instruction.layerInstructions)[0];
  412. // Check if a transform already exists on this layer instruction, this is done to add the current transform on top of previous edits
  413. CGAffineTransform existingTransform;
  414. if (![layerInstruction getTransformRampForTime:[self.mutableComposition duration] startTransform:&existingTransform endTransform:NULL timeRange:NULL]) {
  415. [layerInstruction setTransform:t2 atTime:kCMTimeZero];
  416. } else {
  417. // Note: the point of origin for rotation is the upper left corner of the composition, t3 is to compensate for origin
  418. CGAffineTransform t3 = CGAffineTransformMakeTranslation(-1*assetVideoTrack.naturalSize.height/2, 0.0);
  419. CGAffineTransform newTransform = CGAffineTransformConcat(existingTransform, CGAffineTransformConcat(t2, t3));
  420. [layerInstruction setTransform:newTransform atTime:kCMTimeZero];
  421. }
  422. }
  423. // Step 4
  424. // Add the transform instructions to the video composition
  425. instruction.layerInstructions = @[layerInstruction];
  426. self.mutableVideoComposition.instructions = @[instruction];
  427. // Step 5
  428. // Notify AVSEViewController about rotation operation completion
  429. // [[NSNotificationCenter defaultCenter] postNotificationName:AVSEEditCommandCompletionNotification object:self];
  430. }
  431. - (void)joinVideoAssets {
  432. _joinedVideoPath = [NSTemporaryDirectory() stringByAppendingPathComponent:@"outputJoin.mp4"];
  433. int ret = 0;
  434. NSArray *videoAssets = _avAssets;
  435. if ([videoAssets.firstObject isKindOfClass:[NSString class]]) {
  436. ret = [_videoJoiner setVideoPathList:videoAssets];
  437. } else {
  438. ret = [_videoJoiner setVideoAssetList:videoAssets];
  439. }
  440. _videoJoiner.joinerDelegate = self;
  441. if (ret == 0) {
  442. [_videoJoiner joinVideo:VIDEO_COMPRESSED_720P videoOutputPath:_joinedVideoPath];
  443. _loadingLabel.text = [_theme localizedString:@"UGCKit.Media.VideoSynthesizing"];
  444. self.loadingProgressView.progress = 0;
  445. // self.loadingProgressView.image = [UIImage imageNamed:[NSString stringWithFormat:@"video_record_share_loading_%d", 0]];
  446. } else {
  447. if (self.completion) {
  448. UGCKitResult *result = [[UGCKitResult alloc] init];
  449. result.code = ret;
  450. result.info = @{NSLocalizedDescriptionKey: [NSString stringWithFormat:@"Join failed: %d", ret]};
  451. self.completion(result);
  452. }
  453. }
  454. }
  455. #pragma mark TXVideoJoinerListener
  456. -(void) onJoinProgress:(float)progress
  457. {
  458. self.loadingProgressView.progress = progress;
  459. }
  460. -(void) onJoinComplete:(TXJoinerResult *)joinResult
  461. {
  462. if (joinResult.retCode == JOINER_RESULT_OK) {
  463. UGCKitResult *result = [[UGCKitResult alloc] init];
  464. result.media = [UGCKitMedia mediaWithVideoPath: _joinedVideoPath];
  465. if (self.completion) {
  466. self.completion(result);
  467. }
  468. } else {
  469. UGCKitResult *result = [[UGCKitResult alloc] init];
  470. result.code = joinResult.retCode;
  471. result.info = @{NSLocalizedDescriptionKey: joinResult.descMsg};
  472. if (self.completion) {
  473. self.completion(result);
  474. }
  475. }
  476. [UGCKitReporter report:UGCKitReportItem_videojoiner userName:nil code:joinResult.retCode msg:joinResult.descMsg];
  477. }
  478. @end