Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hrydgard
GitHub Repository: hrydgard/ppsspp
Path: blob/master/Core/HW/MediaEngine.cpp
3186 views
1
// Copyright (c) 2012- PPSSPP Project.
2
3
// This program is free software: you can redistribute it and/or modify
4
// it under the terms of the GNU General Public License as published by
5
// the Free Software Foundation, version 2.0 or later versions.
6
7
// This program is distributed in the hope that it will be useful,
8
// but WITHOUT ANY WARRANTY; without even the implied warranty of
9
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10
// GNU General Public License 2.0 for more details.
11
12
// A copy of the GPL 2.0 should have been included with the program.
13
// If not, see http://www.gnu.org/licenses/
14
15
// Official git repository and contact information can be found at
16
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
17
18
#include "Common/Serialize/SerializeFuncs.h"
19
#include "Common/Math/SIMDHeaders.h"
20
#include "Common/StringUtils.h"
21
#include "Core/System.h"
22
#include "Core/Debugger/MemBlockInfo.h"
23
#include "Core/HW/MediaEngine.h"
24
#include "Core/MemMap.h"
25
#include "Core/Reporting.h"
26
#include "GPU/GPUState.h" // Used by TextureDecoder.h when templates get instanced
27
#include "GPU/Common/TextureDecoder.h"
28
#include "Core/HW/SimpleAudioDec.h"
29
30
#include <algorithm>
31
32
#ifdef USE_FFMPEG
33
34
extern "C" {
35
36
#include "libavcodec/avcodec.h"
37
#include "libavformat/avformat.h"
38
#include "libavutil/imgutils.h"
39
#include "libswscale/swscale.h"
40
41
}
42
#endif // USE_FFMPEG
43
44
#ifdef USE_FFMPEG
45
46
#include "Core/FFMPEGCompat.h"
47
48
static AVPixelFormat getSwsFormat(int pspFormat)
49
{
50
switch (pspFormat)
51
{
52
case GE_CMODE_16BIT_BGR5650:
53
return AV_PIX_FMT_BGR565LE;
54
case GE_CMODE_16BIT_ABGR5551:
55
return AV_PIX_FMT_BGR555LE;
56
case GE_CMODE_16BIT_ABGR4444:
57
return AV_PIX_FMT_BGR444LE;
58
case GE_CMODE_32BIT_ABGR8888:
59
return AV_PIX_FMT_RGBA;
60
default:
61
ERROR_LOG(Log::ME, "Unknown pixel format");
62
return (AVPixelFormat)0;
63
}
64
}
65
66
void ffmpeg_logger(void *, int level, const char *format, va_list va_args) {
67
// We're still called even if the level doesn't match.
68
if (level > av_log_get_level())
69
return;
70
71
char tmp[1024];
72
vsnprintf(tmp, sizeof(tmp), format, va_args);
73
tmp[sizeof(tmp) - 1] = '\0';
74
75
// Strip off any trailing newline.
76
size_t len = strlen(tmp);
77
if (tmp[len - 1] == '\n')
78
tmp[len - 1] = '\0';
79
80
if (!strcmp(tmp, "GHA Phase shifting")) {
81
Reporting::ReportMessage("Atrac3+: GHA phase shifting");
82
}
83
84
// Let's color the log line appropriately.
85
if (level <= AV_LOG_PANIC) {
86
ERROR_LOG(Log::ME, "FF: %s", tmp);
87
} else if (level >= AV_LOG_VERBOSE) {
88
DEBUG_LOG(Log::ME, "FF: %s", tmp);
89
} else {
90
// Downgrade some log messages we don't care about
91
if (startsWith(tmp, "No accelerated colorspace") || startsWith(tmp, "SEI type 1 size 40 truncated at 36")) {
92
VERBOSE_LOG(Log::ME, "FF: %s", tmp);
93
} else {
94
INFO_LOG(Log::ME, "FF: %s", tmp);
95
}
96
}
97
}
98
99
bool InitFFmpeg() {
100
#ifdef _DEBUG
101
av_log_set_level(AV_LOG_VERBOSE);
102
#else
103
av_log_set_level(AV_LOG_WARNING);
104
#endif
105
av_log_set_callback(&ffmpeg_logger);
106
107
return true;
108
}
109
#endif
110
111
static int getPixelFormatBytes(int pspFormat)
112
{
113
switch (pspFormat)
114
{
115
case GE_CMODE_16BIT_BGR5650:
116
case GE_CMODE_16BIT_ABGR5551:
117
case GE_CMODE_16BIT_ABGR4444:
118
return 2;
119
case GE_CMODE_32BIT_ABGR8888:
120
return 4;
121
122
default:
123
ERROR_LOG(Log::ME, "Unknown pixel format");
124
return 4;
125
}
126
}
127
128
MediaEngine::MediaEngine() {
129
m_bufSize = 0x2000;
130
131
m_mpegheaderSize = sizeof(m_mpegheader);
132
m_audioType = PSP_CODEC_AT3PLUS; // in movie, we use only AT3+ audio
133
}
134
135
MediaEngine::~MediaEngine() {
136
closeMedia();
137
}
138
139
void MediaEngine::closeMedia() {
140
closeContext();
141
delete m_pdata;
142
delete m_demux;
143
m_pdata = nullptr;
144
m_demux = nullptr;
145
AudioClose(&m_audioContext);
146
m_isVideoEnd = false;
147
}
148
149
void MediaEngine::DoState(PointerWrap &p) {
150
auto s = p.Section("MediaEngine", 1, 7);
151
if (!s)
152
return;
153
154
Do(p, m_videoStream);
155
Do(p, m_audioStream);
156
157
DoArray(p, m_mpegheader, sizeof(m_mpegheader));
158
if (s >= 4) {
159
Do(p, m_mpegheaderSize);
160
} else {
161
m_mpegheaderSize = sizeof(m_mpegheader);
162
}
163
if (s >= 5) {
164
Do(p, m_mpegheaderReadPos);
165
} else {
166
m_mpegheaderReadPos = m_mpegheaderSize;
167
}
168
if (s >= 6) {
169
Do(p, m_expectedVideoStreams);
170
} else {
171
m_expectedVideoStreams = 0;
172
}
173
174
Do(p, m_ringbuffersize);
175
176
u32 hasloadStream = m_pdata != nullptr;
177
Do(p, hasloadStream);
178
if (hasloadStream && p.mode == p.MODE_READ)
179
reloadStream();
180
#ifdef USE_FFMPEG
181
u32 hasopencontext = m_pFormatCtx != nullptr;
182
#else
183
u32 hasopencontext = false;
184
#endif
185
Do(p, hasopencontext);
186
if (m_pdata)
187
m_pdata->DoState(p);
188
if (m_demux)
189
m_demux->DoState(p);
190
191
Do(p, m_videopts);
192
if (s >= 7) {
193
Do(p, m_lastPts);
194
} else {
195
m_lastPts = m_videopts;
196
}
197
Do(p, m_audiopts);
198
199
if (s >= 2) {
200
Do(p, m_firstTimeStamp);
201
Do(p, m_lastTimeStamp);
202
}
203
204
if (hasopencontext && p.mode == p.MODE_READ) {
205
openContext(true);
206
}
207
208
Do(p, m_isVideoEnd);
209
bool noAudioDataRemoved;
210
Do(p, noAudioDataRemoved);
211
if (s >= 3) {
212
Do(p, m_audioType);
213
} else {
214
m_audioType = PSP_CODEC_AT3PLUS;
215
}
216
}
217
218
int MediaEngine::MpegReadbuffer(void *opaque, uint8_t *buf, int buf_size) {
219
MediaEngine *mpeg = (MediaEngine *)opaque;
220
221
int size = buf_size;
222
if (mpeg->m_mpegheaderReadPos < mpeg->m_mpegheaderSize) {
223
size = std::min(buf_size, mpeg->m_mpegheaderSize - mpeg->m_mpegheaderReadPos);
224
memcpy(buf, mpeg->m_mpegheader + mpeg->m_mpegheaderReadPos, size);
225
mpeg->m_mpegheaderReadPos += size;
226
} else {
227
size = mpeg->m_pdata->pop_front(buf, buf_size);
228
if (size > 0)
229
mpeg->m_decodingsize = size;
230
}
231
return size;
232
}
233
234
bool MediaEngine::SetupStreams() {
235
#ifdef USE_FFMPEG
236
const u32 magic = *(u32_le *)&m_mpegheader[0];
237
if (magic != PSMF_MAGIC) {
238
WARN_LOG_REPORT(Log::ME, "Could not setup streams, bad magic: %08x", magic);
239
return false;
240
}
241
int numStreams = *(u16_be *)&m_mpegheader[0x80];
242
if (numStreams <= 0 || numStreams > 8) {
243
// Looks crazy. Let's bail out and let FFmpeg handle it.
244
WARN_LOG_REPORT(Log::ME, "Could not setup streams, unexpected stream count: %d", numStreams);
245
return false;
246
}
247
248
// Looking good. Let's add those streams.
249
int videoStreamNum = -1;
250
for (int i = 0; i < numStreams; i++) {
251
const u8 *const currentStreamAddr = m_mpegheader + 0x82 + i * 16;
252
int streamId = currentStreamAddr[0];
253
254
// We only set video streams. We demux the audio stream separately.
255
if ((streamId & PSMF_VIDEO_STREAM_ID) == PSMF_VIDEO_STREAM_ID) {
256
++videoStreamNum;
257
addVideoStream(videoStreamNum, streamId);
258
}
259
}
260
// Add the streams to meet the expectation.
261
for (int i = videoStreamNum + 1; i < m_expectedVideoStreams; i++) {
262
addVideoStream(i);
263
}
264
#endif
265
266
return true;
267
}
268
269
bool MediaEngine::openContext(bool keepReadPos) {
270
#ifdef USE_FFMPEG
271
InitFFmpeg();
272
273
if (m_pFormatCtx || !m_pdata)
274
return false;
275
if (!keepReadPos) {
276
m_mpegheaderReadPos = 0;
277
}
278
m_decodingsize = 0;
279
280
m_bufSize = std::max(m_bufSize, m_mpegheaderSize);
281
u8 *tempbuf = (u8*)av_malloc(m_bufSize);
282
283
m_pFormatCtx = avformat_alloc_context();
284
m_pIOContext = avio_alloc_context(tempbuf, m_bufSize, 0, (void*)this, &MpegReadbuffer, nullptr, nullptr);
285
m_pFormatCtx->pb = m_pIOContext;
286
287
// Open video file
288
AVDictionary *open_opt = nullptr;
289
av_dict_set_int(&open_opt, "probesize", m_mpegheaderSize, 0);
290
if (avformat_open_input((AVFormatContext**)&m_pFormatCtx, nullptr, nullptr, &open_opt) != 0) {
291
av_dict_free(&open_opt);
292
return false;
293
}
294
av_dict_free(&open_opt);
295
296
bool usedFFMPEGFindStreamInfo = false;
297
if (!SetupStreams() || PSP_CoreParameter().compat.flags().UseFFMPEGFindStreamInfo) {
298
// Fallback to old behavior. Reads too much and corrupts when game doesn't read fast enough.
299
// SetupStreams sometimes work for newer FFmpeg 3.1+ now, but sometimes framerate is missing.
300
WARN_LOG_REPORT_ONCE(setupStreams, Log::ME, "Failed to read valid video stream data from header");
301
if (avformat_find_stream_info(m_pFormatCtx, nullptr) < 0) {
302
closeContext();
303
return false;
304
}
305
usedFFMPEGFindStreamInfo = true;
306
}
307
308
if (m_videoStream >= (int)m_pFormatCtx->nb_streams) {
309
WARN_LOG_REPORT(Log::ME, "Bad video stream %d", m_videoStream);
310
m_videoStream = -1;
311
}
312
313
if (m_videoStream == -1) {
314
// Find the first video stream
315
for (int i = 0; i < (int)m_pFormatCtx->nb_streams; i++) {
316
const AVStream *s = m_pFormatCtx->streams[i];
317
#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)
318
AVMediaType type = s->codecpar->codec_type;
319
#else
320
AVMediaType type = s->codec->codec_type;
321
#endif
322
if (type == AVMEDIA_TYPE_VIDEO) {
323
m_videoStream = i;
324
break;
325
}
326
}
327
if (m_videoStream == -1)
328
return false;
329
}
330
331
if (!setVideoStream(m_videoStream, true))
332
return false;
333
334
setVideoDim();
335
m_audioContext = CreateAudioDecoder((PSPAudioType)m_audioType);
336
m_isVideoEnd = false;
337
338
if (PSP_CoreParameter().compat.flags().UseFFMPEGFindStreamInfo && usedFFMPEGFindStreamInfo) {
339
m_mpegheaderReadPos++;
340
av_seek_frame(m_pFormatCtx, m_videoStream, 0, 0);
341
}
342
#endif // USE_FFMPEG
343
return true;
344
}
345
346
void MediaEngine::closeContext() {
347
#ifdef USE_FFMPEG
348
if (m_buffer)
349
av_free(m_buffer);
350
if (m_pFrameRGB)
351
av_frame_free(&m_pFrameRGB);
352
if (m_pFrame)
353
av_frame_free(&m_pFrame);
354
if (m_pIOContext && m_pIOContext->buffer)
355
av_free(m_pIOContext->buffer);
356
if (m_pIOContext)
357
av_free(m_pIOContext);
358
for (auto &it : m_pCodecCtxs) {
359
#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)
360
avcodec_free_context(&it.second);
361
#else
362
avcodec_close(it.second);
363
#endif
364
}
365
m_pCodecCtxs.clear();
366
// These are streams allocated from avformat_new_stream.
367
for (auto &it : m_codecsToClose) {
368
avcodec_close(it);
369
}
370
m_codecsToClose.clear();
371
if (m_pFormatCtx)
372
avformat_close_input(&m_pFormatCtx);
373
sws_freeContext(m_sws_ctx);
374
m_sws_ctx = nullptr;
375
m_pIOContext = nullptr;
376
#endif
377
m_buffer = nullptr;
378
}
379
380
bool MediaEngine::loadStream(const u8 *buffer, int readSize, int RingbufferSize)
381
{
382
closeMedia();
383
384
m_videopts = 0;
385
m_lastPts = -1;
386
m_audiopts = 0;
387
m_ringbuffersize = RingbufferSize;
388
m_pdata = new BufferQueue(RingbufferSize + 2048);
389
m_pdata->push(buffer, readSize);
390
m_firstTimeStamp = getMpegTimeStamp(buffer + PSMF_FIRST_TIMESTAMP_OFFSET);
391
m_lastTimeStamp = getMpegTimeStamp(buffer + PSMF_LAST_TIMESTAMP_OFFSET);
392
int mpegoffset = (int)(*(s32_be*)(buffer + 8));
393
m_demux = new MpegDemux(RingbufferSize + 2048, mpegoffset);
394
m_demux->addStreamData(buffer, readSize);
395
return true;
396
}
397
398
bool MediaEngine::reloadStream()
399
{
400
return loadStream(m_mpegheader, 2048, m_ringbuffersize);
401
}
402
403
bool MediaEngine::addVideoStream(int streamNum, int streamId) {
404
#ifdef USE_FFMPEG
405
if (m_pFormatCtx) {
406
// no need to add an existing stream.
407
if ((u32)streamNum < m_pFormatCtx->nb_streams)
408
return true;
409
AVCodec *h264_codec = avcodec_find_decoder(AV_CODEC_ID_H264);
410
if (!h264_codec)
411
return false;
412
AVStream *stream = avformat_new_stream(m_pFormatCtx, h264_codec);
413
if (stream) {
414
// Reference ISO/IEC 13818-1.
415
if (streamId == -1)
416
streamId = PSMF_VIDEO_STREAM_ID | streamNum;
417
418
stream->id = 0x00000100 | streamId;
419
#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)
420
stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
421
stream->codecpar->codec_id = AV_CODEC_ID_H264;
422
#else
423
stream->request_probe = 0;
424
stream->need_parsing = AVSTREAM_PARSE_FULL;
425
#endif
426
// We could set the width here, but we don't need to.
427
if (streamNum >= m_expectedVideoStreams) {
428
++m_expectedVideoStreams;
429
}
430
431
#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(59, 16, 100)
432
AVCodec *codec = avcodec_find_decoder(stream->codecpar->codec_id);
433
AVCodecContext *codecCtx = avcodec_alloc_context3(codec);
434
#else
435
AVCodecContext *codecCtx = stream->codec;
436
#endif
437
m_codecsToClose.push_back(codecCtx);
438
return true;
439
}
440
}
441
#endif
442
if (streamNum >= m_expectedVideoStreams) {
443
++m_expectedVideoStreams;
444
}
445
return false;
446
}
447
448
int MediaEngine::addStreamData(const u8 *buffer, int addSize) {
449
int size = addSize;
450
if (size > 0 && m_pdata) {
451
if (!m_pdata->push(buffer, size))
452
size = 0;
453
if (m_demux) {
454
m_demux->addStreamData(buffer, addSize);
455
}
456
#ifdef USE_FFMPEG
457
if (!m_pFormatCtx && m_pdata->getQueueSize() >= 2048) {
458
m_mpegheaderSize = m_pdata->get_front(m_mpegheader, sizeof(m_mpegheader));
459
int streamOffset = (int)(*(s32_be *)(m_mpegheader + 8));
460
if (streamOffset <= m_mpegheaderSize) {
461
m_mpegheaderSize = streamOffset;
462
m_pdata->pop_front(0, m_mpegheaderSize);
463
openContext();
464
}
465
}
466
#endif // USE_FFMPEG
467
468
// We added data, so... not the end anymore?
469
m_isVideoEnd = false;
470
}
471
return size;
472
}
473
474
bool MediaEngine::seekTo(s64 timestamp, int videoPixelMode) {
475
if (timestamp <= 0) {
476
return true;
477
}
478
479
// Just doing it the not so great way to be sure audio is in sync.
480
int timeout = 1000;
481
while (getVideoTimeStamp() < timestamp - 3003) {
482
if (getAudioTimeStamp() < getVideoTimeStamp() - 4180 * 2) {
483
getNextAudioFrame(NULL, NULL, NULL);
484
}
485
if (!stepVideo(videoPixelMode, true)) {
486
return false;
487
}
488
if (--timeout <= 0) {
489
return true;
490
}
491
}
492
493
while (getAudioTimeStamp() < getVideoTimeStamp() - 4180 * 2) {
494
if (getNextAudioFrame(NULL, NULL, NULL) == 0) {
495
return false;
496
}
497
if (--timeout <= 0) {
498
return true;
499
}
500
}
501
502
return true;
503
}
504
505
bool MediaEngine::setVideoStream(int streamNum, bool force) {
506
if (m_videoStream == streamNum && !force) {
507
// Yay, nothing to do.
508
return true;
509
}
510
511
#ifdef USE_FFMPEG
512
if (m_pFormatCtx && m_pCodecCtxs.find(streamNum) == m_pCodecCtxs.end()) {
513
// Get a pointer to the codec context for the video stream
514
if ((u32)streamNum >= m_pFormatCtx->nb_streams) {
515
return false;
516
}
517
518
AVStream *stream = m_pFormatCtx->streams[streamNum];
519
#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)
520
AVCodec *pCodec = avcodec_find_decoder(stream->codecpar->codec_id);
521
if (!pCodec) {
522
WARN_LOG_REPORT(Log::ME, "Could not find decoder for %d", (int)stream->codecpar->codec_id);
523
return false;
524
}
525
AVCodecContext *m_pCodecCtx = avcodec_alloc_context3(pCodec);
526
int paramResult = avcodec_parameters_to_context(m_pCodecCtx, stream->codecpar);
527
if (paramResult < 0) {
528
WARN_LOG_REPORT(Log::ME, "Failed to prepare context parameters: %08x", paramResult);
529
return false;
530
}
531
#else
532
AVCodecContext *m_pCodecCtx = stream->codec;
533
// Find the decoder for the video stream
534
AVCodec *pCodec = avcodec_find_decoder(m_pCodecCtx->codec_id);
535
if (pCodec == nullptr) {
536
return false;
537
}
538
#endif
539
540
m_pCodecCtx->flags |= AV_CODEC_FLAG_OUTPUT_CORRUPT | AV_CODEC_FLAG_LOW_DELAY;
541
542
AVDictionary *opt = nullptr;
543
// Allow ffmpeg to use any number of threads it wants. Without this, it doesn't use threads.
544
av_dict_set(&opt, "threads", "0", 0);
545
int openResult = avcodec_open2(m_pCodecCtx, pCodec, &opt);
546
av_dict_free(&opt);
547
if (openResult < 0) {
548
return false;
549
}
550
551
m_pCodecCtxs[streamNum] = m_pCodecCtx;
552
}
553
#endif
554
m_videoStream = streamNum;
555
556
return true;
557
}
558
559
bool MediaEngine::setVideoDim(int width, int height)
560
{
561
#ifdef USE_FFMPEG
562
auto codecIter = m_pCodecCtxs.find(m_videoStream);
563
if (codecIter == m_pCodecCtxs.end())
564
return false;
565
AVCodecContext *m_pCodecCtx = codecIter->second;
566
567
if (width == 0 && height == 0)
568
{
569
// use the orignal video size
570
m_desWidth = m_pCodecCtx->width;
571
m_desHeight = m_pCodecCtx->height;
572
}
573
else
574
{
575
m_desWidth = width;
576
m_desHeight = height;
577
}
578
579
// Allocate video frame
580
if (!m_pFrame) {
581
m_pFrame = av_frame_alloc();
582
}
583
584
sws_freeContext(m_sws_ctx);
585
m_sws_ctx = nullptr;
586
m_sws_fmt = -1;
587
588
if (m_desWidth == 0 || m_desHeight == 0) {
589
// Can't setup SWS yet, so stop for now.
590
return false;
591
}
592
593
updateSwsFormat(GE_CMODE_32BIT_ABGR8888);
594
595
// Allocate video frame for RGB24
596
m_pFrameRGB = av_frame_alloc();
597
#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 12, 100)
598
int numBytes = av_image_get_buffer_size((AVPixelFormat)m_sws_fmt, m_desWidth, m_desHeight, 1);
599
#else
600
int numBytes = avpicture_get_size((AVPixelFormat)m_sws_fmt, m_desWidth, m_desHeight);
601
#endif
602
m_buffer = (u8*)av_malloc(numBytes * sizeof(uint8_t));
603
604
// Assign appropriate parts of buffer to image planes in m_pFrameRGB
605
#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 12, 100)
606
av_image_fill_arrays(m_pFrameRGB->data, m_pFrameRGB->linesize, m_buffer, (AVPixelFormat)m_sws_fmt, m_desWidth, m_desHeight, 1);
607
#else
608
avpicture_fill((AVPicture *)m_pFrameRGB, m_buffer, (AVPixelFormat)m_sws_fmt, m_desWidth, m_desHeight);
609
#endif
610
#endif // USE_FFMPEG
611
return true;
612
}
613
614
void MediaEngine::updateSwsFormat(int videoPixelMode) {
615
#ifdef USE_FFMPEG
616
auto codecIter = m_pCodecCtxs.find(m_videoStream);
617
AVCodecContext *m_pCodecCtx = codecIter == m_pCodecCtxs.end() ? 0 : codecIter->second;
618
619
AVPixelFormat swsDesired = getSwsFormat(videoPixelMode);
620
if (swsDesired != m_sws_fmt && m_pCodecCtx != 0) {
621
m_sws_fmt = swsDesired;
622
m_sws_ctx = sws_getCachedContext
623
(
624
m_sws_ctx,
625
m_pCodecCtx->width,
626
m_pCodecCtx->height,
627
m_pCodecCtx->pix_fmt,
628
m_desWidth,
629
m_desHeight,
630
(AVPixelFormat)m_sws_fmt,
631
SWS_BILINEAR,
632
NULL,
633
NULL,
634
NULL
635
);
636
637
int *inv_coefficients;
638
int *coefficients;
639
int srcRange, dstRange;
640
int brightness, contrast, saturation;
641
642
if (sws_getColorspaceDetails(m_sws_ctx, &inv_coefficients, &srcRange, &coefficients, &dstRange, &brightness, &contrast, &saturation) != -1) {
643
srcRange = 0;
644
dstRange = 0;
645
sws_setColorspaceDetails(m_sws_ctx, inv_coefficients, srcRange, coefficients, dstRange, brightness, contrast, saturation);
646
}
647
}
648
#endif
649
}
650
651
bool MediaEngine::stepVideo(int videoPixelMode, bool skipFrame) {
652
#ifdef USE_FFMPEG
653
auto codecIter = m_pCodecCtxs.find(m_videoStream);
654
AVCodecContext *m_pCodecCtx = codecIter == m_pCodecCtxs.end() ? 0 : codecIter->second;
655
656
if (!m_pFormatCtx)
657
return false;
658
if (!m_pCodecCtx)
659
return false;
660
if (!m_pFrame)
661
return false;
662
663
AVPacket packet;
664
av_init_packet(&packet);
665
int frameFinished;
666
bool bGetFrame = false;
667
while (!bGetFrame) {
668
bool dataEnd = av_read_frame(m_pFormatCtx, &packet) < 0;
669
// Even if we've read all frames, some may have been re-ordered frames at the end.
670
// Still need to decode those, so keep calling avcodec_decode_video2() / avcodec_receive_frame().
671
if (dataEnd || packet.stream_index == m_videoStream) {
672
// avcodec_decode_video2() / avcodec_send_packet() gives us the re-ordered frames with a NULL packet.
673
#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 12, 100)
674
if (dataEnd)
675
av_packet_unref(&packet);
676
#else
677
if (dataEnd)
678
av_free_packet(&packet);
679
#endif
680
681
#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 48, 101)
682
if (packet.size != 0)
683
avcodec_send_packet(m_pCodecCtx, &packet);
684
int result = avcodec_receive_frame(m_pCodecCtx, m_pFrame);
685
if (result == 0) {
686
result = m_pFrame->pkt_size;
687
frameFinished = 1;
688
} else if (result == AVERROR(EAGAIN)) {
689
result = 0;
690
frameFinished = 0;
691
} else {
692
frameFinished = 0;
693
}
694
#else
695
int result = avcodec_decode_video2(m_pCodecCtx, m_pFrame, &frameFinished, &packet);
696
#endif
697
if (frameFinished) {
698
if (!m_pFrameRGB) {
699
setVideoDim();
700
}
701
if (m_pFrameRGB && !skipFrame) {
702
updateSwsFormat(videoPixelMode);
703
// TODO: Technically we could set this to frameWidth instead of m_desWidth for better perf.
704
// Update the linesize for the new format too. We started with the largest size, so it should fit.
705
m_pFrameRGB->linesize[0] = getPixelFormatBytes(videoPixelMode) * m_desWidth;
706
707
sws_scale(m_sws_ctx, m_pFrame->data, m_pFrame->linesize, 0,
708
m_pCodecCtx->height, m_pFrameRGB->data, m_pFrameRGB->linesize);
709
}
710
711
#if LIBAVUTIL_VERSION_MAJOR >= 59
712
int64_t bestPts = m_pFrame->best_effort_timestamp;
713
int64_t ptsDuration = m_pFrame->duration;
714
#elif LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(55, 58, 100)
715
int64_t bestPts = m_pFrame->best_effort_timestamp;
716
int64_t ptsDuration = m_pFrame->pkt_duration;
717
#else
718
int64_t bestPts = av_frame_get_best_effort_timestamp(m_pFrame);
719
int64_t ptsDuration = av_frame_get_pkt_duration(m_pFrame);
720
#endif
721
if (ptsDuration == 0) {
722
if (m_lastPts == bestPts - m_firstTimeStamp || bestPts == AV_NOPTS_VALUE) {
723
// TODO: Assuming 29.97 if missing.
724
m_videopts += 3003;
725
} else {
726
m_videopts = bestPts - m_firstTimeStamp;
727
m_lastPts = m_videopts;
728
}
729
} else if (bestPts != AV_NOPTS_VALUE) {
730
m_videopts = bestPts + ptsDuration - m_firstTimeStamp;
731
m_lastPts = m_videopts;
732
} else {
733
m_videopts += ptsDuration;
734
m_lastPts = m_videopts;
735
}
736
bGetFrame = true;
737
}
738
if (result <= 0 && dataEnd) {
739
// Sometimes, m_readSize is less than m_streamSize at the end, but not by much.
740
// This is kinda a hack, but the ringbuffer would have to be prematurely empty too.
741
m_isVideoEnd = !bGetFrame && (m_pdata->getQueueSize() == 0);
742
if (m_isVideoEnd)
743
m_decodingsize = 0;
744
break;
745
}
746
}
747
#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 12, 100)
748
av_packet_unref(&packet);
749
#else
750
av_free_packet(&packet);
751
#endif
752
}
753
return bGetFrame;
754
#else
755
// If video engine is not available, just add to the timestamp at least.
756
m_videopts += 3003;
757
return true;
758
#endif // USE_FFMPEG
759
}
760
761
// Helpers that null out alpha (which seems to be the case on the PSP.)
762
// Some games depend on this, for example Sword Art Online (doesn't clear A's from buffer.)
763
inline void writeVideoLineRGBA(void *destp, const void *srcp, int width) {
764
// TODO: Use SSE/NEON, investigate why AV_PIX_FMT_RGB0 does not work.
765
u32_le *dest = (u32_le *)destp;
766
const u32_le *src = (u32_le *)srcp;
767
768
int count = width;
769
770
#if PPSSPP_ARCH(SSE2)
771
__m128i mask = _mm_set1_epi32(0x00FFFFFF);
772
while (count >= 8) {
773
__m128i pixels1 = _mm_and_si128(_mm_loadu_si128((const __m128i *)src), mask);
774
__m128i pixels2 = _mm_and_si128(_mm_loadu_si128((const __m128i *)src + 1), mask);
775
_mm_storeu_si128((__m128i *)dest, pixels1);
776
_mm_storeu_si128((__m128i *)dest + 1, pixels2);
777
src += 8;
778
dest += 8;
779
count -= 8;
780
}
781
#elif PPSSPP_ARCH(ARM_NEON)
782
uint32x4_t mask = vdupq_n_u32(0x00FFFFFF);
783
while (count >= 8) {
784
uint32x4_t pixels1 = vandq_u32(vld1q_u32(src), mask);
785
uint32x4_t pixels2 = vandq_u32(vld1q_u32(src + 4), mask);
786
vst1q_u32(dest, pixels1);
787
vst1q_u32(dest + 4, pixels2);
788
src += 8;
789
dest += 8;
790
count -= 8;
791
}
792
#endif
793
const u32 mask32 = 0x00FFFFFF;
794
DO_NOT_VECTORIZE_LOOP
795
while (count--) {
796
*dest++ = *src++ & mask32;
797
}
798
}
799
800
inline void writeVideoLineABGR5650(void *destp, const void *srcp, int width) {
801
memcpy(destp, srcp, width * sizeof(u16));
802
}
803
804
inline void writeVideoLineABGR5551(void *destp, const void *srcp, int width) {
805
// TODO: Use SSE/NEON.
806
u16_le *dest = (u16_le *)destp;
807
const u16_le *src = (u16_le *)srcp;
808
809
const u16 mask = 0x7FFF;
810
for (int i = 0; i < width; ++i) {
811
dest[i] = src[i] & mask;
812
}
813
}
814
815
inline void writeVideoLineABGR4444(void *destp, const void *srcp, int width) {
816
// TODO: Use SSE/NEON.
817
u16_le *dest = (u16_le *)destp;
818
const u16_le *src = (u16_le *)srcp;
819
820
const u16 mask = 0x0FFF;
821
for (int i = 0; i < width; ++i) {
822
dest[i] = src[i] & mask;
823
}
824
}
825
826
int MediaEngine::writeVideoImage(u32 bufferPtr, int frameWidth, int videoPixelMode) {
827
int videoLineSize = 0;
828
switch (videoPixelMode) {
829
case GE_CMODE_32BIT_ABGR8888:
830
videoLineSize = frameWidth * sizeof(u32);
831
break;
832
case GE_CMODE_16BIT_BGR5650:
833
case GE_CMODE_16BIT_ABGR5551:
834
case GE_CMODE_16BIT_ABGR4444:
835
videoLineSize = frameWidth * sizeof(u16);
836
break;
837
}
838
839
int videoImageSize = videoLineSize * m_desHeight;
840
841
if (!Memory::IsValidRange(bufferPtr, videoImageSize) || frameWidth > 2048) {
842
// Clearly invalid values. Let's just not.
843
ERROR_LOG_REPORT(Log::ME, "Ignoring invalid video decode address %08x/%x", bufferPtr, frameWidth);
844
return 0;
845
}
846
847
u8 *buffer = Memory::GetPointerWriteUnchecked(bufferPtr);
848
849
#ifdef USE_FFMPEG
850
if (!m_pFrame || !m_pFrameRGB)
851
return 0;
852
853
// lock the image size
854
int height = m_desHeight;
855
int width = m_desWidth;
856
u8 *imgbuf = buffer;
857
const u8 *data = m_pFrameRGB->data[0];
858
859
bool swizzle = Memory::IsVRAMAddress(bufferPtr) && (bufferPtr & 0x00200000) == 0x00200000;
860
if (swizzle) {
861
imgbuf = new u8[videoImageSize];
862
}
863
864
switch (videoPixelMode) {
865
case GE_CMODE_32BIT_ABGR8888:
866
for (int y = 0; y < height; y++) {
867
writeVideoLineRGBA(imgbuf + videoLineSize * y, data, width);
868
data += width * sizeof(u32);
869
}
870
break;
871
872
case GE_CMODE_16BIT_BGR5650:
873
for (int y = 0; y < height; y++) {
874
writeVideoLineABGR5650(imgbuf + videoLineSize * y, data, width);
875
data += width * sizeof(u16);
876
}
877
break;
878
879
case GE_CMODE_16BIT_ABGR5551:
880
for (int y = 0; y < height; y++) {
881
writeVideoLineABGR5551(imgbuf + videoLineSize * y, data, width);
882
data += width * sizeof(u16);
883
}
884
break;
885
886
case GE_CMODE_16BIT_ABGR4444:
887
for (int y = 0; y < height; y++) {
888
writeVideoLineABGR4444(imgbuf + videoLineSize * y, data, width);
889
data += width * sizeof(u16);
890
}
891
break;
892
893
default:
894
ERROR_LOG_REPORT(Log::ME, "Unsupported video pixel format %d", videoPixelMode);
895
break;
896
}
897
898
if (swizzle) {
899
const int bxc = videoLineSize / 16;
900
int byc = (height + 7) / 8;
901
if (byc == 0)
902
byc = 1;
903
904
DoSwizzleTex16((const u32 *)imgbuf, buffer, bxc, byc, videoLineSize);
905
delete [] imgbuf;
906
}
907
908
NotifyMemInfo(MemBlockFlags::WRITE, bufferPtr, videoImageSize, "VideoDecode");
909
910
return videoImageSize;
911
#endif // USE_FFMPEG
912
return 0;
913
}
914
915
int MediaEngine::writeVideoImageWithRange(u32 bufferPtr, int frameWidth, int videoPixelMode,
916
int xpos, int ypos, int width, int height) {
917
int videoLineSize = 0;
918
switch (videoPixelMode) {
919
case GE_CMODE_32BIT_ABGR8888:
920
videoLineSize = frameWidth * sizeof(u32);
921
break;
922
case GE_CMODE_16BIT_BGR5650:
923
case GE_CMODE_16BIT_ABGR5551:
924
case GE_CMODE_16BIT_ABGR4444:
925
videoLineSize = frameWidth * sizeof(u16);
926
break;
927
}
928
int videoImageSize = videoLineSize * height;
929
930
if (!Memory::IsValidRange(bufferPtr, videoImageSize) || frameWidth > 2048) {
931
// Clearly invalid values. Let's just not.
932
ERROR_LOG_REPORT(Log::ME, "Ignoring invalid video decode address %08x/%x", bufferPtr, frameWidth);
933
return 0;
934
}
935
936
u8 *buffer = Memory::GetPointerWriteUnchecked(bufferPtr);
937
938
#ifdef USE_FFMPEG
939
if (!m_pFrame || !m_pFrameRGB)
940
return 0;
941
942
// lock the image size
943
u8 *imgbuf = buffer;
944
const u8 *data = m_pFrameRGB->data[0];
945
946
bool swizzle = Memory::IsVRAMAddress(bufferPtr) && (bufferPtr & 0x00200000) == 0x00200000;
947
if (swizzle) {
948
imgbuf = new u8[videoImageSize];
949
}
950
951
if (width > m_desWidth - xpos)
952
width = m_desWidth - xpos;
953
if (height > m_desHeight - ypos)
954
height = m_desHeight - ypos;
955
956
switch (videoPixelMode) {
957
case GE_CMODE_32BIT_ABGR8888:
958
data += (ypos * m_desWidth + xpos) * sizeof(u32);
959
for (int y = 0; y < height; y++) {
960
writeVideoLineRGBA(imgbuf, data, width);
961
data += m_desWidth * sizeof(u32);
962
imgbuf += videoLineSize;
963
}
964
break;
965
966
case GE_CMODE_16BIT_BGR5650:
967
data += (ypos * m_desWidth + xpos) * sizeof(u16);
968
for (int y = 0; y < height; y++) {
969
writeVideoLineABGR5650(imgbuf, data, width);
970
data += m_desWidth * sizeof(u16);
971
imgbuf += videoLineSize;
972
}
973
break;
974
975
case GE_CMODE_16BIT_ABGR5551:
976
data += (ypos * m_desWidth + xpos) * sizeof(u16);
977
for (int y = 0; y < height; y++) {
978
writeVideoLineABGR5551(imgbuf, data, width);
979
data += m_desWidth * sizeof(u16);
980
imgbuf += videoLineSize;
981
}
982
break;
983
984
case GE_CMODE_16BIT_ABGR4444:
985
data += (ypos * m_desWidth + xpos) * sizeof(u16);
986
for (int y = 0; y < height; y++) {
987
writeVideoLineABGR4444(imgbuf, data, width);
988
data += m_desWidth * sizeof(u16);
989
imgbuf += videoLineSize;
990
}
991
break;
992
993
default:
994
ERROR_LOG_REPORT(Log::ME, "Unsupported video pixel format %d", videoPixelMode);
995
break;
996
}
997
998
if (swizzle) {
999
WARN_LOG_REPORT_ONCE(vidswizzle, Log::ME, "Swizzling Video with range");
1000
1001
const int bxc = videoLineSize / 16;
1002
int byc = (height + 7) / 8;
1003
if (byc == 0)
1004
byc = 1;
1005
1006
DoSwizzleTex16((const u32 *)imgbuf, buffer, bxc, byc, videoLineSize);
1007
delete [] imgbuf;
1008
}
1009
NotifyMemInfo(MemBlockFlags::WRITE, bufferPtr, videoImageSize, "VideoDecodeRange");
1010
1011
return videoImageSize;
1012
#endif // USE_FFMPEG
1013
return 0;
1014
}
1015
1016
u8 *MediaEngine::getFrameImage() {
1017
#ifdef USE_FFMPEG
1018
return m_pFrameRGB->data[0];
1019
#else
1020
return nullptr;
1021
#endif
1022
}
1023
1024
int MediaEngine::getRemainSize() {
1025
if (!m_pdata)
1026
return 0;
1027
return std::max(m_pdata->getRemainSize() - m_decodingsize - 2048, 0);
1028
}
1029
1030
int MediaEngine::getAudioRemainSize() {
1031
if (!m_demux) {
1032
// No audio, so it can't be full, return video instead.
1033
return getRemainSize();
1034
}
1035
1036
return m_demux->getRemainSize();
1037
}
1038
1039
int MediaEngine::getNextAudioFrame(u8 **buf, int *headerCode1, int *headerCode2) {
1040
// When getting a frame, increment pts
1041
m_audiopts += 4180;
1042
1043
// Demux now (rather than on add data) so that we select the right stream.
1044
m_demux->demux(m_audioStream);
1045
1046
s64 pts = 0;
1047
int result = m_demux->getNextAudioFrame(buf, headerCode1, headerCode2, &pts);
1048
if (pts != 0) {
1049
// m_audiopts is supposed to be after the returned frame.
1050
m_audiopts = pts - m_firstTimeStamp + 4180;
1051
}
1052
return result;
1053
}
1054
1055
int MediaEngine::getAudioSamples(u32 bufferPtr) {
1056
int16_t *buffer = (int16_t *)Memory::GetPointerWriteRange(bufferPtr, 8192);
1057
if (buffer == nullptr) {
1058
ERROR_LOG_REPORT(Log::ME, "Ignoring bad audio decode address %08x during video playback", bufferPtr);
1059
}
1060
if (!m_demux) {
1061
return 0;
1062
}
1063
1064
u8 *audioFrame = nullptr;
1065
int headerCode1, headerCode2;
1066
int frameSize = getNextAudioFrame(&audioFrame, &headerCode1, &headerCode2);
1067
if (frameSize == 0) {
1068
return 0;
1069
}
1070
int outSamples = 0;
1071
1072
if (m_audioContext != nullptr) {
1073
if (headerCode1 == 0x24) {
1074
// This means mono audio - tell the decoder to expect it before the first frame.
1075
// Note that it will always send us back stereo audio.
1076
m_audioContext->SetChannels(1);
1077
}
1078
1079
int inbytesConsumed = 0;
1080
if (!m_audioContext->Decode(audioFrame, frameSize, &inbytesConsumed, 2, buffer, &outSamples)) {
1081
ERROR_LOG(Log::ME, "Audio (%s) decode failed during video playback", GetCodecName(m_audioType));
1082
}
1083
int outBytes = outSamples * sizeof(int16_t) * 2;
1084
1085
NotifyMemInfo(MemBlockFlags::WRITE, bufferPtr, outBytes, "VideoDecodeAudio");
1086
}
1087
1088
return 0x2000;
1089
}
1090
1091
bool MediaEngine::IsNoAudioData() {
1092
if (!m_demux) {
1093
return true;
1094
}
1095
1096
// Let's double check. Here should be a safe enough place to demux.
1097
m_demux->demux(m_audioStream);
1098
return !m_demux->hasNextAudioFrame(NULL, NULL, NULL, NULL);
1099
}
1100
1101
bool MediaEngine::IsActuallyPlayingAudio() {
1102
return getAudioTimeStamp() >= 0;
1103
}
1104
1105
s64 MediaEngine::getVideoTimeStamp() {
1106
return m_videopts;
1107
}
1108
1109
s64 MediaEngine::getAudioTimeStamp() {
1110
return m_demux ? m_audiopts - 4180 : -1;
1111
}
1112
1113
s64 MediaEngine::getLastTimeStamp() {
1114
if (!m_pdata)
1115
return 0;
1116
return m_lastTimeStamp - m_firstTimeStamp;
1117
}
1118
1119