在 Android 上运行 FFmpeg Examples

本文最后更新于:1 年前

Reference


项目源代码

FFmpegExamples

部分代码

  • CMakeLists.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
cmake_minimum_required(VERSION 3.18.1)

project("ffmpegexamples")

include_directories(ffmpeg/include)

include_directories(lib/include)

include_directories(only4test/ffmpeg/examples)

file(
GLOB
C_CPP_LIST
*.cpp *.c
only4test/ffmpeg/examples/*.cpp only4test/ffmpeg/examples/*.c
)
add_library(native-lib SHARED ${C_CPP_LIST})

#libavformat: 封装格式的生成与解析
add_library(lib_avformat SHARED IMPORTED)
set_target_properties(lib_avformat PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/../jniLib/${ANDROID_ABI}/libavformat.so)

#libavcodec: 数据格式的编码与解码
add_library(lib_avcodec SHARED IMPORTED)
set_target_properties(lib_avcodec PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/../jniLib/${ANDROID_ABI}/libavcodec.so)

#libavfilter: 滤波器,例如水印
add_library(lib_avfilter SHARED IMPORTED)
set_target_properties(lib_avfilter PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/../jniLib/${ANDROID_ABI}/libavfilter.so)

#libswresample: 原始音频格式转码
add_library(lib_swresample SHARED IMPORTED)
set_target_properties(lib_swresample PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/../jniLib/${ANDROID_ABI}/libswresample.so)

#libswscale: 格式转换、缩放等
add_library(lib_swscale SHARED IMPORTED)
set_target_properties(lib_swscale PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/../jniLib/${ANDROID_ABI}/libswscale.so)

add_library(lib_avutil SHARED IMPORTED)
set_target_properties(lib_avutil PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/../jniLib/${ANDROID_ABI}/libavutil.so)

target_link_libraries(
native-lib

lib_avformat
lib_avcodec
lib_avfilter
lib_swresample
lib_swscale
lib_avutil

log
android
OpenSLES
)
  • native-lib.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
#include <jni.h>
#include <string>

#include "log.h"

#include "only4test/ffmpeg/examples/avio_reading.c"
#include "only4test/ffmpeg/examples/decode_audio.c"
#include "only4test/ffmpeg/examples/decode_video.c"
#include "only4test/ffmpeg/examples/demuxing_decoding.c"
#include "only4test/ffmpeg/examples/encode_audio.c"
#include "only4test/ffmpeg/examples/encode_video.c"
#include "only4test/ffmpeg/examples/ffhash.c"
#include "only4test/ffmpeg/examples/filter_audio.c"
#include "only4test/ffmpeg/examples/filtering_audio.c"
#include "only4test/ffmpeg/examples/filtering_video.c"
#include "only4test/ffmpeg/examples/http_multiclient.c"
#include "only4test/ffmpeg/examples/hw_decode.c"
#include "only4test/ffmpeg/examples/metadata.c"
#include "only4test/ffmpeg/examples/muxing.c"
#include "only4test/ffmpeg/examples/qsvdec.c"
#include "only4test/ffmpeg/examples/remuxing.c"
#include "only4test/ffmpeg/examples/resampling_audio.c"
#include "only4test/ffmpeg/examples/scaling_video.c"
#include "only4test/ffmpeg/examples/transcode_aac.c"
#include "only4test/ffmpeg/examples/transcoding.c"
#include "only4test/ffmpeg/examples/vaapi_encode.c"
#include "only4test/ffmpeg/examples/vaapi_transcode.c"

JavaVM *mJavaVM = nullptr;

JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {
mJavaVM = vm;
return JNI_VERSION_1_6;
}

extern "C" {
JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_stringFromJNI(
JNIEnv *env,
jobject /* this */) {
std::string hello = "Hello from C++";
return env->NewStringUTF(hello.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_avio_1reading(
JNIEnv *env,
jobject /* this */,
jstring filepath) {
LOGD("1. avio_reading START");

std::string result = "1. avio_reading failed";

char *argv[2];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(filepath, JNI_FALSE));
int ret = avio_reading(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "1. avio_reading successful";
}

LOGD("1. avio_reading STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_decode_1audio(
JNIEnv *env,
jobject /* this */,
jstring inFilepath,
jstring outFilepath) {
LOGD("2. decode_audio START");

std::string result = "2. decode_audio failed";

char *argv[3];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(inFilepath, JNI_FALSE));
argv[2] = (char *) (env->GetStringUTFChars(outFilepath, JNI_FALSE));
int ret = decode_audio(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "2. decode_audio successful";
}

LOGD("2. decode_audio STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_decode_1video(
JNIEnv *env,
jobject /* this */,
jstring inFilepath,
jstring outFilepath) {
LOGD("3. decode_video START");

std::string result = "3. decode_video failed";

char *argv[3];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(inFilepath, JNI_FALSE));
argv[2] = (char *) (env->GetStringUTFChars(outFilepath, JNI_FALSE));
int ret = decode_video(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "3. decode_video successful";
}

LOGD("3. decode_video STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_demuxing_1decoding(
JNIEnv *env,
jobject /* this */,
jstring inFilepath,
jstring outVideoFilepath,
jstring outAudioFilepath) {
LOGD("4. demuxing_decoding START");

std::string result = "4. demuxing_decoding failed";

char *argv[4];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(inFilepath, JNI_FALSE));
argv[2] = (char *) (env->GetStringUTFChars(outVideoFilepath, JNI_FALSE));
argv[3] = (char *) (env->GetStringUTFChars(outAudioFilepath, JNI_FALSE));
int ret = demuxing_decoding(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "4. demuxing_decoding successful";
}

LOGD("4. demuxing_decoding STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_encode_1audio(
JNIEnv *env,
jobject /* this */,
jstring filepath) {
LOGD("5. encode_audio START");

std::string result = "5. encode_audio failed";

char *argv[2];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(filepath, JNI_FALSE));
int ret = encode_audio(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "5. encode_audio successful";
}

LOGD("5. encode_audio STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_encode_1video(
JNIEnv *env,
jobject /* this */,
jstring filepath,
jstring codecName) {
LOGD("6. encode_video START");

std::string result = "6. encode_video failed";

char *argv[3];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(filepath, JNI_FALSE));
argv[2] = (char *) (env->GetStringUTFChars(codecName, JNI_FALSE));
int ret = encode_video(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "6. encode_video successful";
}

LOGD("6. encode_video STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_ffhash(
JNIEnv *env,
jobject /* this */,
jstring hashName) {
LOGD("7. ffhash START");

std::string result = "7. ffhash failed";

char *argv[2];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(hashName, JNI_FALSE));
int ret = ffhash(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "7. ffhash successful";
}

LOGD("7. ffhash STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_filter_1audio(
JNIEnv *env,
jobject /* this */,
jfloat duration) {
LOGD("8. filter_audio START");

std::string result = "8. filter_audio failed";

char *argv[2];
argv[0];
snprintf(argv[1], 11, "%.2f", (float) duration);
int ret = filter_audio(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "8. filter_audio successful";
}

LOGD("8. filter_audio STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_filtering_1audio(
JNIEnv *env,
jobject /* this */,
jstring filepath) {
LOGD("9. filtering_audio START");

std::string result = "9. filtering_audio failed";

char *argv[2];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(filepath, JNI_FALSE));
int ret = filtering_audio(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "9. filtering_audio successful";
}

LOGD("9. filtering_audio STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_filtering_1video(
JNIEnv *env,
jobject /* this */,
jstring filepath) {
LOGD("10. filtering_video START");

std::string result = "10. filtering_video failed";

char *argv[2];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(filepath, JNI_FALSE));
int ret = filtering_video(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "10. filtering_video successful";
}

LOGD("10. filtering_video STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_http_1multiclient(
JNIEnv *env,
jobject /* this */,
jstring filepath,
jstring serverUri) {
LOGD("11. http_multiclient START");

std::string result = "11. http_multiclient failed";

char *argv[3];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(filepath, JNI_FALSE));
argv[2] = (char *) (env->GetStringUTFChars(serverUri, JNI_FALSE));
int ret = http_multiclient(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "11. http_multiclient successful";
}

LOGD("11. http_multiclient STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_hw_1decode(
JNIEnv *env,
jobject /* this */,
jstring deviceType,
jstring inFilepath,
jstring outFilepath) {
LOGD("12. hw_decode START");

std::string result = "12. hw_decode failed";

char *argv[4];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(deviceType, JNI_FALSE));
argv[2] = (char *) (env->GetStringUTFChars(inFilepath, JNI_FALSE));
argv[3] = (char *) (env->GetStringUTFChars(outFilepath, JNI_FALSE));
int ret = hw_decode(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "12. hw_decode successful";
}

LOGD("12. hw_decode STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_metadata(
JNIEnv *env,
jobject /* this */,
jstring filepath) {
LOGD("13. metadata START");

std::string result = "13. metadata failed";

char *argv[2];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(filepath, JNI_FALSE));
int ret = metadata(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "13. metadata successful";
}

LOGD("13. metadata STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_muxing(
JNIEnv *env,
jobject /* this */,
jstring filepath) {
LOGD("14. muxing START");

std::string result = "14. muxing failed";

char *argv[2];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(filepath, JNI_FALSE));
int ret = muxing(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "14. muxing successful";
}

LOGD("14. muxing STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_qsvdec(
JNIEnv *env,
jobject /* this */,
jstring inFilepath,
jstring outFilepath) {
LOGD("15. qsvdec START");

std::string result = "15. qsvdec failed";

char *argv[3];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(inFilepath, JNI_FALSE));
argv[2] = (char *) (env->GetStringUTFChars(outFilepath, JNI_FALSE));
int ret = qsvdec(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "15. qsvdec successful";
}

LOGD("15. qsvdec STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_remuxing(
JNIEnv *env,
jobject /* this */,
jstring inFilepath,
jstring outFilepath) {
LOGD("16. remuxing START");

std::string result = "16. remuxing failed";

char *argv[3];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(inFilepath, JNI_FALSE));
argv[2] = (char *) (env->GetStringUTFChars(outFilepath, JNI_FALSE));
int ret = remuxing(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "16. remuxing successful";
}

LOGD("16. remuxing STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_resampling_1audio(
JNIEnv *env,
jobject /* this */,
jstring filepath) {
LOGD("17. resampling_audio START");

std::string result = "17. resampling_audio failed";

char *argv[2];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(filepath, JNI_FALSE));
int ret = resampling_audio(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "17. resampling_audio successful";
}

LOGD("17. resampling_audio STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_scaling_1video(
JNIEnv *env,
jobject /* this */,
jstring filepath,
jstring size) {
LOGD("18. scaling_video START");

std::string result = "18. scaling_video failed";

char *argv[3];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(filepath, JNI_FALSE));
argv[2] = (char *) (env->GetStringUTFChars(size, JNI_FALSE));
int ret = scaling_video(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "18. scaling_video successful";
}

LOGD("18. scaling_video STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_transcode_1aac(
JNIEnv *env,
jobject /* this */,
jstring inFilepath,
jstring outFilepath) {
LOGD("19. transcode_aac START");

std::string result = "19. transcode_aac failed";

char *argv[3];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(inFilepath, JNI_FALSE));
argv[2] = (char *) (env->GetStringUTFChars(outFilepath, JNI_FALSE));
int ret = transcode_aac(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "19. transcode_aac successful";
}

LOGD("19. transcode_aac STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_transcoding(
JNIEnv *env,
jobject /* this */,
jstring inFilepath,
jstring outFilepath) {
LOGD("20. transcoding START");

std::string result = "20. transcoding failed";

char *argv[3];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(inFilepath, JNI_FALSE));
argv[2] = (char *) (env->GetStringUTFChars(outFilepath, JNI_FALSE));
int ret = transcoding(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "20. transcoding successful";
}

LOGD("20. transcoding STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_vaapi_1encode(
JNIEnv *env,
jobject /* this */,
jint width,
jint height,
jstring inFilepath,
jstring outFilepath) {
LOGD("21. vaapi_encode START");

std::string result = "21. vaapi_encode failed";

char *argv[5];
argv[0];
argv[1] = (char *) malloc(sizeof(char) * 10);
sprintf(argv[1], "%d", (int) width);
argv[2] = (char *) malloc(sizeof(char) * 10);
sprintf(argv[2], "%d", (int) height);
argv[3] = (char *) (env->GetStringUTFChars(inFilepath, JNI_FALSE));
argv[4] = (char *) (env->GetStringUTFChars(outFilepath, JNI_FALSE));
int ret = vaapi_encode(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "21. vaapi_encode successful";
}

LOGD("21. vaapi_encode STOP");
return env->NewStringUTF(result.c_str());
}

JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_vaapi_1transcode(
JNIEnv *env,
jobject /* this */,
jstring inFilepath,
jstring codec,
jstring outFilepath) {
LOGD("22. vaapi_transcode START");

std::string result = "22. vaapi_transcode failed";

char *argv[4];
argv[0];
argv[1] = (char *) (env->GetStringUTFChars(inFilepath, JNI_FALSE));
argv[2] = (char *) (env->GetStringUTFChars(codec, JNI_FALSE));
argv[3] = (char *) (env->GetStringUTFChars(outFilepath, JNI_FALSE));
int ret = vaapi_transcode(sizeof(argv) / sizeof(argv[0]), argv);
if (ret == 0) {
result = "22. vaapi_transcode successful";
}

LOGD("22. vaapi_transcode STOP");
return env->NewStringUTF(result.c_str());
}
}
  • MainActivity.kt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
package io.weichao.ffmpegexamples

import android.content.res.AssetManager
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.util.Log
import io.weichao.ffmpegexamples.databinding.ActivityMainBinding
import java.io.*

class MainActivity : AppCompatActivity() {
private lateinit var binding: ActivityMainBinding

override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)

binding = ActivityMainBinding.inflate(layoutInflater)
setContentView(binding.root)

binding.sampleText.text = stringFromJNI()

val audioFilepath = (externalCacheDir?.absolutePath ?: "") + File.separator + "FUNKY_HOUSE.mp3"
System.out.println(audioFilepath)
copyFileFromAsset(File(audioFilepath).name, audioFilepath)

val videoFilepath = (externalCacheDir?.absolutePath ?: "") + File.separator + "mpeg1video_zelda first commercial.mpeg"
System.out.println(videoFilepath)
copyFileFromAsset(File(videoFilepath).name, videoFilepath)

val audioOutFilepath = (externalCacheDir?.absolutePath ?: "") + File.separator + "decode_audio_out.pcm"
System.out.println(audioOutFilepath)

val videoOutFilepath = (externalCacheDir?.absolutePath ?: "") + File.separator + "decode_video_out" + File.separator + "pgm"
val file = File(videoOutFilepath)
file.parentFile.mkdir()
System.out.println(videoOutFilepath)

val videoFilepath2 = (externalCacheDir?.absolutePath ?: "") + File.separator + "h264_trailer.mp4"
System.out.println(videoFilepath2)
copyFileFromAsset(File(videoFilepath2).name, videoFilepath2)
val videoOutFilepath2 = (externalCacheDir?.absolutePath ?: "") + File.separator + "demuxing_decoding_out.mp4"
System.out.println(videoOutFilepath2)
val audioOutFilepath2 = (externalCacheDir?.absolutePath ?: "") + File.separator + "demuxing_decoding_out.mp3"
System.out.println(audioOutFilepath2)

val outFilepath = (externalCacheDir?.absolutePath ?: "") + File.separator + "encode_audio.mp2"
System.out.println(outFilepath)

val outFilepath2 = (externalCacheDir?.absolutePath ?: "") + File.separator + "encode_video.mpeg"
System.out.println(outFilepath2)

val outFilepath3 = (externalCacheDir?.absolutePath ?: "") + File.separator + "hw_decode_out"
System.out.println(outFilepath3)

val videoOutFilepath3 = (externalCacheDir?.absolutePath ?: "") + File.separator + "muxing_out.mp4"
System.out.println(videoOutFilepath3)

val outFilepath4 = (externalCacheDir?.absolutePath ?: "") + File.separator + "qsvdec_out.mp4"
System.out.println(outFilepath4)

val videoOutFilepath4 = (externalCacheDir?.absolutePath ?: "") + File.separator + "remuxing_out.mp4"
System.out.println(videoOutFilepath4)

val audioOutFilepath3 = (externalCacheDir?.absolutePath ?: "") + File.separator + "resampling_audio_out.mp3"
File(audioOutFilepath3).createNewFile()
System.out.println(audioOutFilepath3)

val audioOutFilepath4 = (externalCacheDir?.absolutePath ?: "") + File.separator + "transcode_aac_out.aac"
System.out.println(audioOutFilepath4)

val videoOutFilepath5 = (externalCacheDir?.absolutePath ?: "") + File.separator + "transcoding.mp4"
System.out.println(videoOutFilepath5)

val videoFilepath3 = (externalCacheDir?.absolutePath ?: "") + File.separator + "176x144_container.yuv"
System.out.println(videoFilepath3)
copyFileFromAsset(File(videoFilepath3).name, videoFilepath3)

val videoOutFilepath6 = (externalCacheDir?.absolutePath ?: "") + File.separator + "vaapi_encode_out.mp4"
File(videoOutFilepath6).createNewFile()
System.out.println(videoOutFilepath6)

val videoOutFilepath7 = (externalCacheDir?.absolutePath ?: "") + File.separator + "vaapi_transcode_out.mp4"
File(videoOutFilepath7).createNewFile()
System.out.println(videoOutFilepath7)

binding.avioReading.setOnClickListener { binding.avioReading.text = avio_reading(audioFilepath) }
binding.decodeAudio.setOnClickListener { binding.decodeAudio.text = decode_audio(audioFilepath, audioOutFilepath) }
binding.decodeVideo.setOnClickListener { binding.decodeVideo.text = decode_video(videoFilepath, videoOutFilepath) }
binding.demuxingDecoding.setOnClickListener { binding.demuxingDecoding.text = demuxing_decoding(videoFilepath2, videoOutFilepath2, audioOutFilepath2) }
binding.encodeAudio.setOnClickListener { binding.encodeAudio.text = encode_audio(outFilepath) }
binding.encodeVideo.setOnClickListener { binding.encodeVideo.text = encode_video(outFilepath2, "mpeg1video") }
binding.ffhash.setOnClickListener { binding.ffhash.text = ffhash("MD5") }
binding.filterAudio.setOnClickListener { binding.filterAudio.text = filter_audio(1F) }
binding.filteringAudio.setOnClickListener { binding.filteringAudio.text = filtering_audio(audioFilepath) }
binding.filteringVideo.setOnClickListener { binding.filteringVideo.text = filtering_video(videoFilepath) }
// 如果服务器地址是:http://192.168.50.49:8888,文件地址是:/storage/emulated/0/Android/data/io.weichao.ffmpegexamples/cache/FUNKY_HOUSE.mp3
// 则客户端http访问:http://192.168.50.49:8888/?resource=/storage/emulated/0/Android/data/io.weichao.ffmpegexamples/cache/FUNKY_HOUSE.mp3
binding.httpMulticlient.setOnClickListener { binding.httpMulticlient.text = http_multiclient(audioFilepath, "http://192.168.50.49:8888") }
// TODO https://www.i4k.xyz/article/u014248312/119192574
// TODO "可以实现在 Linux 或者 Windows 平台上,利用 cuvid 或者 NVIDIA 、Intel 等硬件厂商实现的硬解码功能,实现硬件解码。"
// TODO "但是,在 Android 平台使用 MediaCodec 的解码,却没有实现。"
binding.hwDecode.setOnClickListener { binding.hwDecode.text = hw_decode("mediacodec", videoFilepath2, outFilepath3) }
binding.metadata.setOnClickListener { binding.metadata.text = metadata(videoFilepath2) }
binding.muxing.setOnClickListener { binding.muxing.text = muxing(videoOutFilepath3) }
binding.qsvdec.setOnClickListener { binding.qsvdec.text = qsvdec(videoFilepath2, outFilepath4) }
binding.remuxing.setOnClickListener { binding.remuxing.text = remuxing(videoFilepath2, videoOutFilepath4) }
binding.resamplingAudio.setOnClickListener { binding.resamplingAudio.text = resampling_audio(audioOutFilepath3) }
binding.scalingVideo.setOnClickListener { binding.scalingVideo.text = scaling_video(videoFilepath2, "640x360") }
binding.transcodeAac.setOnClickListener { binding.transcodeAac.text = transcode_aac(audioFilepath, audioOutFilepath4) }
binding.transcoding.setOnClickListener { binding.transcoding.text = transcoding(videoFilepath, videoOutFilepath5) }
binding.vaapiEncode.setOnClickListener { binding.vaapiEncode.text = vaapi_encode(176, 144, videoFilepath3, videoOutFilepath6) }
binding.vaapiTranscode.setOnClickListener { binding.vaapiTranscode.text = vaapi_transcode(videoFilepath2, "h264_vaapi", videoOutFilepath7) }
}

private fun copyFileFromAsset(filename: String, filepath: String) {
val assetManager: AssetManager = assets
var `in`: InputStream? = null
var out: OutputStream? = null
try {
`in` = assetManager.open(filename)
val outFile = File(filepath)
out = FileOutputStream(outFile)
copyFile(`in`, out)
} catch (e: IOException) {
Log.e("tag", "Failed to copy asset file: $filename", e)
} finally {
if (`in` != null) {
try {
`in`.close()
} catch (e: IOException) {
e.printStackTrace()
}
}
if (out != null) {
try {
out.close()
} catch (e: IOException) {
e.printStackTrace()
}
}
}
}

private fun copyFile(`in`: InputStream, out: FileOutputStream) {
val buffer = ByteArray(1024)
var read: Int? = null
while (`in`.read(buffer).also { read = it } != -1) {
read?.let { out.write(buffer, 0, it) }
}
}

companion object {
init {
System.loadLibrary("native-lib")
}
}

external fun stringFromJNI(): String
external fun avio_reading(filepath: String): String
external fun decode_audio(inFilepath: String, outFilepath: String): String
external fun decode_video(inFilepath: String, outFilepath: String): String
external fun demuxing_decoding(inFilepath: String, outVideoFilepath: String, outAudioFilepath: String): String
external fun encode_audio(filepath: String): String
external fun encode_video(filepath: String, codecName: String): String
external fun ffhash(hashName: String): String
external fun filter_audio(duration: Float): String
external fun filtering_audio(filepath: String): String
external fun filtering_video(filepath: String): String
external fun http_multiclient(filepath: String, serverUri: String): String
external fun hw_decode(deviceType: String, inFilepath: String, outFilepath: String): String
external fun metadata(filepath: String): String
external fun muxing(filepath: String): String
external fun qsvdec(inFilepath: String, outFilepath: String): String
external fun remuxing(inFilepath: String, outFilepath: String): String
external fun resampling_audio(filepath: String): String
external fun scaling_video(filepath: String, size: String): String
external fun transcode_aac(inFilepath: String, outFilepath: String): String
external fun transcoding(inFilepath: String, outFilepath: String): String
external fun vaapi_encode(width: Int, height: Int, inFilepath: String, outFilepath: String): String
external fun vaapi_transcode(inFilepath: String, codec: String, outFilepath: String): String
}

环境


avio_reading

描述

libavformat AVIOContext API example.
Make libavformat demuxer access media content through a custom AVIOContext read callback.

libavformat AVIOContext API 示例。
使 libavformat demuxer 通过自定义 AVIOContext 读取回调访问媒体内容。

API example program to show how to read from a custom buffer accessed through AVIOContext.

API 示例程序,展示如何从通过 AVIOContext 访问的自定义缓冲区中读取数据。

演示

源代码修改


decode_audio

描述

audio decoding with libavcodec API example

使用 libavcodec API 进行音频解码示例

演示

在指定位置生成文件:

源代码修改


decode_video

描述

video decoding with libavcodec API example

使用 libavcodec API 进行视频解码示例

And check your input file is encoded by mpeg1video please.

请检查您的输入文件是否由 mpeg1video 编码。

演示

(省略中间 log)

在指定位置生成文件:

(省略更多文件)

源代码修改


demuxing_decoding

描述

Demuxing and decoding example.
Show how to use the libavformat and libavcodec API to demux and decode audio and video data.

解复用和解码示例。
展示如何使用 libavformat 和 libavcodec API 对音频和视频数据进行解复用和解码。

API example program to show how to read frames from an input file.
This program reads frames from a file, decodes them, and writes decoded video frames to a rawvideo file named video_output_file, and decoded audio frames to a rawaudio file named audio_output_file.

API 示例程序,展示如何从输入文件中读取帧。
该程序从文件中读取帧,对其进行解码,并将解码的视频帧写入名为 video_output_file 的 rawvideo 文件,并将解码的音频帧写入名为 audio_output_file 的 rawaudio 文件。

演示

(省略中间 log)

在指定位置生成文件:

源代码修改


encode_audio

描述

audio encoding with libavcodec API example.

使用 libavcodec API 示例进行音频编码。

演示

在指定位置生成文件:

源代码修改


encode_video

描述

video encoding with libavcodec API example

使用 libavcodec API 进行视频编码示例

演示

在指定位置生成文件:

源代码修改


ffhash

描述

This example is a simple command line application that takes one or more arguments. It demonstrates a typical use of the hashing API with allocation, initialization, updating, and finalizing.

此示例是一个简单的命令行应用程序,它采用一个或多个参数。 它演示了哈希 API 的典型用法,包括分配、初始化、更新和完成。

演示

源代码修改


filter_audio

描述

This example will generate a sine wave audio, pass it through a simple filter chain, and then compute the MD5 checksum of the output data.
The filter chain it uses is: (input) -> abuffer -> volume -> aformat -> abuffersink -> (output)
abuffer: This provides the endpoint where you can feed the decoded samples.
volume: In this example we hardcode it to 0.90.
aformat: This converts the samples to the samplefreq, channel layout, and sample format required by the audio device.
abuffersink: This provides the endpoint where you can read the samples after they have passed through the filter chain.

这个例子将生成一个正弦波音频,通过一个简单的过滤器链,然后计算输出数据的 MD5 校验和。
它使用的过滤器链是:(input) -> abuffer -> volume -> aformat -> abuffersink -> (output)
abuffer:这提供了您可以提供解码样本的端点。
volume:在本例中,我们将其硬编码为 0.90。
aformat:这会将样本转换为音频设备所需的样本频率、通道布局和样本格式。
abuffersink:这提供了端点,您可以在样本通过过滤器链后读取样本。

演示

(省略中间 log)

源代码修改


filtering_audio

描述

API example for audio decoding and filtering

用于音频解码和过滤的 API 示例

演示

(省略中间 log)

源代码修改


filtering_video

描述

API example for decoding and filtering

用于解码和过滤的 API 示例

演示

(省略中间 log)

源代码修改


http_multiclient

描述

libavformat multi-client network API usage example.
This example will serve a file without decoding or demuxing it over http.Multiple clients can connect and will receive the same file.

libavformat 多客户端网络 API 使用示例。
此示例将提供一个文件,而无需通过 http 对其进行解码或解复用。多个客户端可以连接并接收相同的文件。

API example program to serve http to multiple clients.

为多个客户端提供 http 服务的 API 示例程序。

演示

1、确定 Android 设备的 IP,例如:192.168.50.49
2、运行代码,将 Android 设备作为 http 的服务端开启 8888 端口,文件位置为 /storage/emulated/0/Android/data/io.weichao.ffmpegexamples/cache/FUNKY_HOUSE.mp3

3、在同一子网内的其他设备可以通过 http://192.168.50.49:8888/?resource=/storage/emulated/0/Android/data/io.weichao.ffmpegexamples/cache/FUNKY_HOUSE.mp3 下载文件

源代码修改


hw_decode TODO

描述

HW-Accelerated decoding example.
This example shows how to do HW-accelerated decoding with output frames from the HW video surfaces.

硬件加速解码示例。
此示例说明如何使用来自硬件视频表面的输出帧进行硬件加速解码。

演示

TODO 未测试成功

源代码修改


metadata

描述

Shows how the metadata API can be used in application programs.

展示如何在应用程序中使用元数据 API。

example program to demonstrate the use of the libavformat metadata API.

示例程序来演示 libavformat 元数据 API 的使用。

演示

源代码修改


muxing

描述

libavformat API example.
Output a media file in any supported libavformat format. The default codecs are used.

libavformat API 示例。
以任何受支持的 libavformat 格式输出媒体文件。 使用默认编解码器。

API example program to output a media file with libavformat.
This program generates a synthetic audio and video stream, encodes and muxes them into a file named output_file.
The output format is automatically guessed according to the file extension.
Raw images can also be output by using ‘%%d’ in the filename.

使用 libavformat 输出媒体文件的 API 示例程序。
该程序生成合成的音频和视频流,将它们编码并复用到一个名为 output_file 的文件中。
根据文件扩展名自动猜测输出格式。
也可以通过在文件名中使用“%%d”来输出原始图像。

演示

在指定位置生成文件:

源代码修改


qsvdec TODO

描述

This example shows how to do QSV-accelerated H.264 decoding with output frames in the GPU video surfaces.

此示例说明如何使用 GPU 视频表面中的输出帧进行 QSV 加速 H.264 解码。

演示

QSV = Intel®Quick Sync Video

TODO 未测试成功,提示缺少"libavutil/hwcontext_qsv.h",因为生成 so 时未编译 qsv

源代码修改


remuxing

描述

libavformat/libavcodec demuxing and muxing API example.
Remux streams from one container format to another.

libavformat/libavcodec 解复用和复用 API 示例。
Remux 从一种容器格式流式传输到另一种容器格式。

API example program to remux a media file with libavformat and libavcodec.
The output format is guessed according to the file extension.

使用 libavformat 和 libavcodec 重新混合媒体文件的 API 示例程序。
根据文件扩展名猜测输出格式。

演示

(省略中间 log)

在指定位置生成文件:

源代码修改


resampling_audio

描述

libswresample API use example.

libswresample API 使用示例。

API example program to show how to resample an audio stream with libswresample.
This program generates a series of audio frames, resamples them to a specified output format and rate and saves them to an output file named output_file.

API 示例程序,展示如何使用 libswresample 重新采样音频流。
该程序生成一系列音频帧,将它们重新采样为指定的输出格式和速率,并将它们保存到名为 output_file 的输出文件中。

演示

(省略中间 log)

在指定位置生成文件:

源代码修改


scaling_video

描述

libswscale API use example.

libswscale API 使用示例。

API example program to show how to scale an image with libswscale.
This program generates a series of pictures, rescales them to the given output_size and saves them to an output file named output_file.

展示如何使用 libswscale 缩放图像的 API 示例程序。
该程序生成一系列图片,将它们重新缩放到给定的 output_size 并将它们保存到名为 output_file 的输出文件中。

演示

在指定位置生成文件:

源代码修改


transcode_aac

描述

Simple audio converter
Convert an input audio file to AAC in an MP4 container using FFmpeg.Formats other than MP4 are supported based on the output file extension.

简单的音频转换器
使用 FFmpeg 将输入音频文件转换为 MP4 容器中的 AAC。根据输出文件扩展名,支持 MP4 以外的格式。

演示

(省略中间 log)

在指定位置生成文件:

源代码修改


transcoding

描述

API example for demuxing, decoding, filtering, encoding and muxing

用于解复用、解码、过滤、编码和复用的 API 示例

演示

在指定位置生成文件:

源代码修改


vaapi_encode TODO

描述

Intel VAAPI-accelerated encoding example.
This example shows how to do VAAPI-accelerated encoding. now only support NV12 raw file, usage like: vaapi_encode 1920 1080 input.yuv output.h264

英特尔 VAAPI 加速编码示例。
这个例子展示了如何进行 VAAPI 加速编码。 现在只支持 NV12 原始文件,用法如:vaapi_encode 1920 1080 input.yuv output.h264

演示

TODO 未测试成功,arm64-v8a 真机、x86_64 虚拟机都报错

源代码修改


vaapi_transcode TODO

描述

Intel VAAPI-accelerated transcoding example.
This example shows how to do VAAPI-accelerated transcoding.
Usage: vaapi_transcode input_stream codec output_stream
e.g: vaapi_transcode input.mp4 h264_vaapi output_h264.mp4
vaapi_transcode input.mp4 vp9_vaapi output_vp9.ivf
The output format is guessed according to the file extension.

英特尔 VAAPI 加速转码示例。
这个例子展示了如何进行 VAAPI 加速的转码。
用法:vaapi_transcode input_stream codec output_stream
例如:vaapi_transcode input.mp4 h264_vaapi output_h264.mp4
vaapi_transcode input.mp4 vp9_vaapi output_vp9.ivf
根据文件扩展名猜测输出格式。

演示

TODO 未测试成功,arm64-v8a 真机、x86_64 虚拟机都报错

源代码修改


如果需要添加新的 example

1、在 FFmpegExamples\app\src\main\cpp\only4test\ffmpeg\examples 目录创建 .c 或 .cpp 文件(例如:example.c),复制代码并参照 example 修改。

2、在 FFmpegExamples\app\src\main\cpp\native-lib.cpp 文件中创建 JNI 函数,例如:

1
2
3
4
5
6
7
JNIEXPORT jstring JNICALL
Java_io_weichao_ffmpegexamples_MainActivity_stringFromJNI(
JNIEnv *env,
jobject /* this */) {
std::string hello = "Hello from C++";
return env->NewStringUTF(hello.c_str());
}

添加

1
#include "only4test/ffmpeg/examples/example.c"

即可在 JNI 函数中调用 example.c 中的函数。
3、点击菜单栏Build->Refresh Linked C++ Projects。
4、在 FFmpegExamples\app\src\main\java\io\weichao\ffmpegexamples\MainActivity.kt 文件中创建与 JNI 函数对应的本地方法,例如:

1
external fun stringFromJNI(): String

5、在方法中调用 stringFromJNI()。



在 Android 上运行 FFmpeg Examples
https://weichao.io/39f465f4ad80/
作者
魏超
发布于
2022年3月20日
更新于
2022年12月4日
许可协议