在一些视频APP中,都可以看到一个将在线视频转成GIF图的功能。下面就来说说思路以及实现。我们知道本地视频可以生成GIF,那么将在线视频截取成本地视频不就可以了吗?经过比较,腾讯视频App也是这么做的。话不多说,下面开始上代码:
第一步:截取视频
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
|
#pragma mark -截取视频
- ( void )interceptVideoAndVideoUrl:(NSURL *)videoUrl withOutPath:(NSString *)outPath outputFileType:(NSString *)outputFileType range:(NSRange)videoRange intercept:(InterceptBlock)interceptBlock {
_interceptBlock =interceptBlock;
//不添加背景音乐
NSURL *audioUrl =nil;
//AVURLAsset此类主要用于获取媒体信息,包括视频、声音等
AVURLAsset* audioAsset = [[AVURLAsset alloc] initWithURL:audioUrl options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
//创建AVMutableComposition对象来添加视频音频资源的AVMutableCompositionTrack
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//CMTimeRangeMake(start, duration),start起始时间,duration时长,都是CMTime类型
//CMTimeMake(int64_t value, int32_t timescale),返回CMTime,value视频的一个总帧数,timescale是指每秒视频播放的帧数,视频播放速率,(value / timescale)才是视频实际的秒数时长,timescale一般情况下不改变,截取视频长度通过改变value的值
//CMTimeMakeWithSeconds(Float64 seconds, int32_t preferredTimeScale),返回CMTime,seconds截取时长(单位秒),preferredTimeScale每秒帧数
//开始位置startTime
CMTime startTime = CMTimeMakeWithSeconds(videoRange.location, videoAsset.duration.timescale);
//截取长度videoDuration
CMTime videoDuration = CMTimeMakeWithSeconds(videoRange.length, videoAsset.duration.timescale);
CMTimeRange videoTimeRange = CMTimeRangeMake(startTime, videoDuration);
//视频采集compositionVideoTrack
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// 避免数组越界 tracksWithMediaType 找不到对应的文件时候返回空数组
//TimeRange截取的范围长度
//ofTrack来源
//atTime插放在视频的时间位置
[compositionVideoTrack insertTimeRange:videoTimeRange ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeVideo].count>0) ? [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject : nil atTime:kCMTimeZero error:nil];
//视频声音采集(也可不执行这段代码不采集视频音轨,合并后的视频文件将没有视频原来的声音)
AVMutableCompositionTrack *compositionVoiceTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVoiceTrack insertTimeRange:videoTimeRange ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeAudio].count>0)?[videoAsset tracksWithMediaType:AVMediaTypeAudio].firstObject:nil atTime:kCMTimeZero error:nil];
//声音长度截取范围==视频长度
CMTimeRange audioTimeRange = CMTimeRangeMake(kCMTimeZero, videoDuration);
//音频采集compositionCommentaryTrack
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:audioTimeRange ofTrack:([audioAsset tracksWithMediaType:AVMediaTypeAudio].count > 0) ? [audioAsset tracksWithMediaType:AVMediaTypeAudio].firstObject : nil atTime:kCMTimeZero error:nil];
//AVAssetExportSession用于合并文件,导出合并后文件,presetName文件的输出类型
AVAssetExportSession *assetExportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
//混合后的视频输出路径
NSURL *outPutURL = [NSURL fileURLWithPath:outPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outPath])
{
[[NSFileManager defaultManager] removeItemAtPath:outPath error:nil];
}
//输出视频格式
assetExportSession.outputFileType = outputFileType;
assetExportSession.outputURL = outPutURL;
//输出文件是否网络优化
assetExportSession.shouldOptimizeForNetworkUse = YES;
[assetExportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
switch (assetExportSession.status) {
case AVAssetExportSessionStatusFailed:
if (_interceptBlock) {
_interceptBlock(assetExportSession.error,outPutURL);
}
break ;
case AVAssetExportSessionStatusCancelled:{
logdebug(@ "Export Status: Cancell" );
break ;
}
case AVAssetExportSessionStatusCompleted: {
if (_interceptBlock) {
_interceptBlock(nil,outPutURL);
}
break ;
}
case AVAssetExportSessionStatusUnknown: {
logdebug(@ "Export Status: Unknown" );
}
case AVAssetExportSessionStatusExporting : {
logdebug(@ "Export Status: Exporting" );
}
case AVAssetExportSessionStatusWaiting: {
logdebug(@ "Export Status: Wating" );
}
}
});
}];
}
|
第二步:本地视频生成GIF图
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
|
/**
生成GIF图片
@param videoURL 视频的路径URL
@param loopCount 播放次数
@param time 每帧的时间间隔 默认0.25s
@param imagePath 存放GIF图片的文件路径
@param completeBlock 完成的回调
*/
#pragma mark--制作GIF
- ( void )createGIFfromURL:(NSURL*)videoURL loopCount:( int )loopCount delayTime:(CGFloat ) time gifImagePath:(NSString *)imagePath complete:(CompleteBlock)completeBlock {
_completeBlock =completeBlock;
float delayTime = time ?:0.25;
// Create properties dictionaries
NSDictionary *fileProperties = [self filePropertiesWithLoopCount:loopCount];
NSDictionary *frameProperties = [self framePropertiesWithDelayTime:delayTime];
AVURLAsset *asset = [AVURLAsset assetWithURL:videoURL];
float videoWidth = [[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize].width;
float videoHeight = [[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize].height;
GIFSize optimalSize = GIFSizeMedium;
if (videoWidth >= 1200 || videoHeight >= 1200)
optimalSize = GIFSizeVeryLow;
else if (videoWidth >= 800 || videoHeight >= 800)
optimalSize = GIFSizeLow;
else if (videoWidth >= 400 || videoHeight >= 400)
optimalSize = GIFSizeMedium;
else if (videoWidth < 400|| videoHeight < 400)
optimalSize = GIFSizeHigh;
// Get the length of the video in seconds
float videoLength = ( float )asset.duration.value/asset.duration.timescale;
int framesPerSecond = 4;
int frameCount = videoLength*framesPerSecond;
// How far along the video track we want to move, in seconds.
float increment = ( float )videoLength/frameCount;
// Add frames to the buffer
NSMutableArray *timePoints = [NSMutableArray array];
for ( int currentFrame = 0; currentFrame<frameCount; ++currentFrame) {
float seconds = ( float )increment * currentFrame;
CMTime time = CMTimeMakeWithSeconds(seconds, [timeInterval intValue]);
[timePoints addObject:[NSValue valueWithCMTime: time ]];
}
//completion block
NSURL *gifURL = [self createGIFforTimePoints:timePoints fromURL:videoURL fileProperties:fileProperties frameProperties:frameProperties gifImagePath:imagePath frameCount:frameCount gifSize:_gifSize?:GIFSizeMedium];
if (_completeBlock) {
// Return GIF URL
_completeBlock(_error,gifURL);
}
}
|
经过上面两步,就可以生成本地的视频和GIF图了,存储在沙盒即可。贴上两步所用到的方法:
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
|
#pragma mark - Base methods
- (NSURL *)createGIFforTimePoints:(NSArray *)timePoints fromURL:(NSURL *)url fileProperties:(NSDictionary *)fileProperties frameProperties:(NSDictionary *)frameProperties gifImagePath:(NSString *)imagePath frameCount:( int )frameCount gifSize:(GIFSize)gifSize{
NSURL *fileURL = [NSURL fileURLWithPath:imagePath];
if (fileURL == nil)
return nil;
CGImageDestinationRef destination = CGImageDestinationCreateWithURL((__bridge CFURLRef)fileURL, kUTTypeGIF , frameCount, NULL);
CGImageDestinationSetProperties(destination, (CFDictionaryRef)fileProperties);
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
AVAssetImageGenerator *generator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
generator.appliesPreferredTrackTransform = YES;
CMTime tol = CMTimeMakeWithSeconds([tolerance floatValue], [timeInterval intValue]);
generator.requestedTimeToleranceBefore = tol;
generator.requestedTimeToleranceAfter = tol;
NSError *error = nil;
CGImageRef previousImageRefCopy = nil;
for (NSValue * time in timePoints) {
CGImageRef imageRef;
#if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR
imageRef = ( float )gifSize/10 != 1 ? createImageWithScale([generator copyCGImageAtTime:[ time CMTimeValue] actualTime:nil error:&error], ( float )gifSize/10) : [generator copyCGImageAtTime:[ time CMTimeValue] actualTime:nil error:&error];
#elif TARGET_OS_MAC
imageRef = [generator copyCGImageAtTime:[ time CMTimeValue] actualTime:nil error:&error];
#endif
if (error) {
_error =error;
logdebug(@ "Error copying image: %@" , error);
return nil;
}
if (imageRef) {
CGImageRelease(previousImageRefCopy);
previousImageRefCopy = CGImageCreateCopy(imageRef);
} else if (previousImageRefCopy) {
imageRef = CGImageCreateCopy(previousImageRefCopy);
} else {
_error =[NSError errorWithDomain:NSStringFromClass([self class ]) code:0 userInfo:@{NSLocalizedDescriptionKey:@ "Error copying image and no previous frames to duplicate" }];
logdebug(@ "Error copying image and no previous frames to duplicate" );
return nil;
}
CGImageDestinationAddImage(destination, imageRef, (CFDictionaryRef)frameProperties);
CGImageRelease(imageRef);
}
CGImageRelease(previousImageRefCopy);
// Finalize the GIF
if (!CGImageDestinationFinalize(destination)) {
_error =error;
logdebug(@ "Failed to finalize GIF destination: %@" , error);
if (destination != nil) {
CFRelease(destination);
}
return nil;
}
CFRelease(destination);
return fileURL;
}
#pragma mark - Helpers
CGImageRef createImageWithScale(CGImageRef imageRef, float scale) {
#if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR
CGSize newSize = CGSizeMake(CGImageGetWidth(imageRef)*scale, CGImageGetHeight(imageRef)*scale);
CGRect newRect = CGRectIntegral(CGRectMake(0, 0, newSize.width, newSize.height));
UIGraphicsBeginImageContextWithOptions(newSize, NO, 0);
CGContextRef context = UIGraphicsGetCurrentContext();
if (!context) {
return nil;
}
// Set the quality level to use when rescaling
CGContextSetInterpolationQuality(context, kCGInterpolationHigh);
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, newSize.height);
CGContextConcatCTM(context, flipVertical);
// Draw into the context; this scales the image
CGContextDrawImage(context, newRect, imageRef);
//Release old image
CFRelease(imageRef);
// Get the resized image from the context and a UIImage
imageRef = CGBitmapContextCreateImage(context);
UIGraphicsEndImageContext();
#endif
return imageRef;
}
#pragma mark - Properties
- (NSDictionary *)filePropertiesWithLoopCount:( int )loopCount {
return @{(NSString *)kCGImagePropertyGIFDictionary:
@{(NSString *)kCGImagePropertyGIFLoopCount: @(loopCount)}
};
}
- (NSDictionary *)framePropertiesWithDelayTime:( float )delayTime {
return @{(NSString *)kCGImagePropertyGIFDictionary:
@{(NSString *)kCGImagePropertyGIFDelayTime: @(delayTime)},
(NSString *)kCGImagePropertyColorModel:(NSString *)kCGImagePropertyColorModelRGB
};
}
|
最后,截取的本地视频可用AVPlayer播放,生成的GIF图则用UIWebView或者WKWebView又或者 YYImage 加载即可。
以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持服务器之家。
原文链接:https://www.jianshu.com/p/5ea3cbdb17ae