Android Media 实战三:图片和视频的转换

Posted by alonealice on 2020-11-01

使用mediacodec可以将图片和视频进行相互的转换,将图片合成视频,获取将视频中截取出图片:

图片合成视频

1
2
3
4
5
6
7
8
9
10
11
private void createImages() {
Bitmap bitmap1 = BitmapFactory.decodeResource(getResources(), R.drawable.bg4);
Bitmap bitmap2 = BitmapFactory.decodeResource(getResources(), R.drawable.bg2);
Bitmap bitmap3 = BitmapFactory.decodeResource(getResources(), R.drawable.bg3);
Bitmap bitmap4 = BitmapFactory.decodeResource(getResources(), R.drawable.bg4);
pool.release(bitmap1);
pool.release(bitmap2);
pool.release(bitmap3);
pool.release(bitmap4);
currentBitmap = pool.acquire();
}

获取到对应的图片,并存到队列中。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
private void init() {
try {
mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
//创建生成MP4初始化对象
mediaMuxer = new MediaMuxer(getSaveVideoPath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
e.printStackTrace();
}
colorFormat = getColorFormat();
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 4 * width * height);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);

mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
}

mediaCodec、MediaMuxer创建和初始化的方法

相应的获取ColorFormat的方法:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
public int getColorFormat() {
int colorFormat = 0;
int[] formats = this.getMediaCodecList();

lab:
for (int format : formats) {
switch (format) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: // yuv420sp
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: // yuv420p
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: // yuv420psp
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: // yuv420pp
colorFormat = format;
break lab;
}
}

if (colorFormat <= 0) {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
return colorFormat;
}

public int[] getMediaCodecList() {
//获取解码器列表
int numCodecs = MediaCodecList.getCodecCount();
MediaCodecInfo codecInfo = null;
for (int i = 0; i < numCodecs && codecInfo == null; i++) {
MediaCodecInfo info =
MediaCodecList.getCodecInfoAt(i);
if (!info.isEncoder()) {
continue;
}
String[] types = info.getSupportedTypes();
boolean found = false;
//轮训所要的解码器
for (int j = 0; j < types.length && !found; j++) {
if (types[j].equals("video/avc")) {
found = true;
}
}
if (!found) {
continue;
}
codecInfo = info;
}
Log.d(TAG, "found" + codecInfo.getName() + "supporting" + " video/avc");
MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType("video/avc");
return capabilities.colorFormats;
}

核心编码的方法:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
public void encode(Bitmap bitmap) {
final int TIMEOUT_USEC = 10000;
isRunning = true;
long generateIndex = 0;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
startTime = System.currentTimeMillis();
while (isRunning) {
int inputBufferIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufferIndex >= 1) {
long ptsUsec = computePresentationTime(generateIndex);
if (bitmap == null) {
mediaCodec.queueInputBuffer(inputBufferIndex, 0, 0, ptsUsec,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isRunning = false;
drainEncoder(true, info);
codeFinish();
new Handler(getMainLooper()).post(() -> Toast.makeText(ImageToVideoActivity.this, "合成成功", Toast.LENGTH_SHORT).show());
} else {
byte[] data = getNV12(getSize(bitmap.getWidth()), getSize(bitmap.getHeight()), bitmap);
//有效的空的缓存区
ByteBuffer inputBuffer;
inputBuffer = mediaCodec.getInputBuffers()[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(data);
//将数据放到编码队列
mediaCodec.queueInputBuffer(inputBufferIndex, 0, data.length, ptsUsec, 0);
drainEncoder(false, info);
bitmap = getCurrentBitmap();
}
generateIndex++;
} else {
Log.i(TAG, "input buffer not available");
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}

将bitmap转化为YUV420SP的数据,同时获取mediaCodec的InputBuffer,并输入数据。

转化YUV420SP的代码:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
private byte[] getNV12(int inputWidth, int inputHeight, Bitmap scaled) {
int[] argb = new int[inputWidth * inputHeight];
//Log.i(TAG, "scaled : " + scaled);
scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);

byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];

switch (colorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: // yuv420sp
encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
break;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: // yuv420p
encodeYUV420P(yuv, argb, inputWidth, inputHeight);
break;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: // yuv420psp
encodeYUV420PSP(yuv, argb, inputWidth, inputHeight);
break;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: // yuv420pp
encodeYUV420PP(yuv, argb, inputWidth, inputHeight);
break;
}
return yuv;
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
private void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;

int yIndex = 0;
int uvIndex = frameSize;

int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
V = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128; // Previously U
U = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128; // Previously V

yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
}

index++;
}
}
}

合成视频的核心类:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
private void drainEncoder(boolean endOfStream, MediaCodec.BufferInfo bufferInfo) {
final int TIMEOUT_USEC = 10000;

ByteBuffer[] buffers = mediaCodec.getOutputBuffers();;
if (endOfStream) {
try {
mediaCodec.signalEndOfInputStream();
} catch (Exception e) {
}
}

while (true) {
int encoderStatus = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (!endOfStream) {
break; // out of while
} else {
Log.i(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}

MediaFormat mediaFormat = mediaCodec.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(mediaFormat);
mediaMuxer.start();
mMuxerStarted = true;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
//数据变化,更新数据
buffers = mediaCodec.getOutputBuffers();
} else if (encoderStatus < 0) {
Log.i(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else {
ByteBuffer outputBuffer;
outputBuffer = buffers[encoderStatus];
if (outputBuffer == null) {
throw new RuntimeException("encoderOutputBuffer "
+ encoderStatus + " was null");
}

if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
bufferInfo.size = 0;
}

if (bufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}

// adjust the ByteBuffer values to match BufferInfo
outputBuffer.position(bufferInfo.offset);
outputBuffer.limit(bufferInfo.offset + bufferInfo.size);

Log.d(TAG, "BufferInfo: " + bufferInfo.offset + ","
+ bufferInfo.size + ","
+ bufferInfo.presentationTimeUs);

try {
mediaMuxer.writeSampleData(mTrackIndex, outputBuffer, bufferInfo);
Log.e(TAG, "写入数据数据");
} catch (Exception e) {
Log.i(TAG, "Too many frames");
}

}
mediaCodec.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.i(TAG, "reached end of stream unexpectedly");
} else {
Log.i(TAG, "end of stream reached");
}
break; // out of while
}
}
}
}

获取到输出的OutputBuffer,并通过mediaMuxer写入到文件。

最后结束之后需要将mediaMuxer和mediacodec关闭(重要)

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
public void codeFinish() {
isRunning = false;
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
}
if (mediaMuxer != null) {
try {
if (mMuxerStarted) {
mediaMuxer.stop();
mediaMuxer.release();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}

视频截取图片

如果已经获取到对应的视频,可以直接通过MetadataRetriever获取到视频的具体某一帧:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
MetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(dataPath);
// 取得视频的长度(单位为毫秒)
String time = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
// 取得视频的长度(单位为秒)
int seconds = Integer.valueOf(time) / 1000;
// 得到每一秒时刻的bitmap比如第一秒,第二秒
for (int i = 1; i <= seconds; i++) {
Bitmap bitmap = retriever.getFrameAtTime(i*1000*1000,MediaMetadataRetriever.OPTION_CLOSEST_SYNC);
String path = Environment.getExternalStorageDirectory()+ File.separator + i + ".jpg";
FileOutputStream fos = null;
try {
fos = new FileOutputStream(path);
bitmap.compress(CompressFormat.JPEG, 80, fos);
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
}

如果是摄像机预览的情况,可以在的基础上,添加截屏功能:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
mPreviewCallback = (bytes, camera) -> {
if (mIsRecording) {
Frame frame = new Frame();
frame.mData = bytes;
frame.mTime = System.nanoTime() / 1000;
if (frame.mTime - mPreviewImgTime > 1000 * 1000) {
mPreviewImgTime = frame.mTime;
}
//将预览的nv21数据传递给编码器
mVideoEncoder.addFrame(bytes);

if (startScreenshot) {
saveBitmap(decodeToBitMap(bytes, camera));
startScreenshot = false;
}
}
};
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
public Bitmap decodeToBitMap(byte[] data, Camera _camera) {
if (data == null || _camera == null) {
return null;
}
Camera.Size size = _camera.getParameters().getPreviewSize();
try {
YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width,
size.height, null);
if (image != null) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, size.width, size.height),
80, stream);
Bitmap bmp = BitmapFactory.decodeByteArray(
stream.toByteArray(), 0, stream.size());

stream.close();
return bmp;
}
} catch (Exception ex) {

}
return null;
}

在PreviewCallback中将byte[]转化为bitmap。