You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

553 lines
16KB

  1. /*
  2. * Interface to the Android Stagefright library for
  3. * H/W accelerated H.264 decoding
  4. *
  5. * Copyright (C) 2011 Mohamed Naufal
  6. * Copyright (C) 2011 Martin Storsjö
  7. *
  8. * This file is part of FFmpeg.
  9. *
  10. * FFmpeg is free software; you can redistribute it and/or
  11. * modify it under the terms of the GNU Lesser General Public
  12. * License as published by the Free Software Foundation; either
  13. * version 2.1 of the License, or (at your option) any later version.
  14. *
  15. * FFmpeg is distributed in the hope that it will be useful,
  16. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  18. * Lesser General Public License for more details.
  19. *
  20. * You should have received a copy of the GNU Lesser General Public
  21. * License along with FFmpeg; if not, write to the Free Software
  22. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  23. */
  24. #include <binder/ProcessState.h>
  25. #include <media/stagefright/MetaData.h>
  26. #include <media/stagefright/MediaBufferGroup.h>
  27. #include <media/stagefright/MediaDebug.h>
  28. #include <media/stagefright/MediaDefs.h>
  29. #include <media/stagefright/OMXClient.h>
  30. #include <media/stagefright/OMXCodec.h>
  31. #include <utils/List.h>
  32. #include <new>
  33. extern "C" {
  34. #include "avcodec.h"
  35. #include "libavutil/imgutils.h"
  36. }
  37. #define OMX_QCOM_COLOR_FormatYVU420SemiPlanar 0x7FA30C00
  38. using namespace android;
  39. struct Frame {
  40. status_t status;
  41. size_t size;
  42. int64_t time;
  43. int key;
  44. uint8_t *buffer;
  45. MediaBuffer* mbuffer;
  46. int32_t w, h;
  47. };
  48. class CustomSource;
  49. struct StagefrightContext {
  50. AVCodecContext *avctx;
  51. AVBitStreamFilterContext *bsfc;
  52. uint8_t* orig_extradata;
  53. int orig_extradata_size;
  54. sp<MediaSource> *source;
  55. List<Frame*> *in_queue, *out_queue;
  56. pthread_mutex_t in_mutex, out_mutex;
  57. pthread_cond_t condition;
  58. pthread_t decode_thread_id;
  59. Frame *end_frame;
  60. bool source_done;
  61. volatile sig_atomic_t thread_started, thread_exited, stop_decode;
  62. AVFrame ret_frame;
  63. uint8_t *dummy_buf;
  64. int dummy_bufsize;
  65. OMXClient *client;
  66. sp<MediaSource> *decoder;
  67. const char *decoder_component;
  68. };
  69. class CustomSource : public MediaSource {
  70. public:
  71. CustomSource(AVCodecContext *avctx, sp<MetaData> meta) {
  72. s = (StagefrightContext*)avctx->priv_data;
  73. source_meta = meta;
  74. frame_size = (avctx->width * avctx->height * 3) / 2;
  75. buf_group.add_buffer(new MediaBuffer(frame_size));
  76. }
  77. virtual sp<MetaData> getFormat() {
  78. return source_meta;
  79. }
  80. virtual status_t start(MetaData *params) {
  81. return OK;
  82. }
  83. virtual status_t stop() {
  84. return OK;
  85. }
  86. virtual status_t read(MediaBuffer **buffer,
  87. const MediaSource::ReadOptions *options) {
  88. Frame *frame;
  89. status_t ret;
  90. if (s->thread_exited)
  91. return ERROR_END_OF_STREAM;
  92. pthread_mutex_lock(&s->in_mutex);
  93. while (s->in_queue->empty())
  94. pthread_cond_wait(&s->condition, &s->in_mutex);
  95. frame = *s->in_queue->begin();
  96. ret = frame->status;
  97. if (ret == OK) {
  98. ret = buf_group.acquire_buffer(buffer);
  99. if (ret == OK) {
  100. memcpy((*buffer)->data(), frame->buffer, frame->size);
  101. (*buffer)->set_range(0, frame->size);
  102. (*buffer)->meta_data()->clear();
  103. (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame,frame->key);
  104. (*buffer)->meta_data()->setInt64(kKeyTime, frame->time);
  105. } else {
  106. av_log(s->avctx, AV_LOG_ERROR, "Failed to acquire MediaBuffer\n");
  107. }
  108. av_freep(&frame->buffer);
  109. }
  110. s->in_queue->erase(s->in_queue->begin());
  111. pthread_mutex_unlock(&s->in_mutex);
  112. av_freep(&frame);
  113. return ret;
  114. }
  115. private:
  116. MediaBufferGroup buf_group;
  117. sp<MetaData> source_meta;
  118. StagefrightContext *s;
  119. int frame_size;
  120. };
  121. void* decode_thread(void *arg)
  122. {
  123. AVCodecContext *avctx = (AVCodecContext*)arg;
  124. StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  125. Frame* frame;
  126. MediaBuffer *buffer;
  127. int decode_done = 0;
  128. do {
  129. buffer = NULL;
  130. frame = (Frame*)av_mallocz(sizeof(Frame));
  131. if (!frame) {
  132. frame = s->end_frame;
  133. frame->status = AVERROR(ENOMEM);
  134. decode_done = 1;
  135. s->end_frame = NULL;
  136. } else {
  137. frame->status = (*s->decoder)->read(&buffer);
  138. if (frame->status == OK) {
  139. sp<MetaData> outFormat = (*s->decoder)->getFormat();
  140. outFormat->findInt32(kKeyWidth , &frame->w);
  141. outFormat->findInt32(kKeyHeight, &frame->h);
  142. frame->size = buffer->range_length();
  143. frame->mbuffer = buffer;
  144. } else if (frame->status == INFO_FORMAT_CHANGED) {
  145. if (buffer)
  146. buffer->release();
  147. av_free(frame);
  148. continue;
  149. } else {
  150. decode_done = 1;
  151. }
  152. }
  153. while (true) {
  154. pthread_mutex_lock(&s->out_mutex);
  155. if (s->out_queue->size() >= 10) {
  156. pthread_mutex_unlock(&s->out_mutex);
  157. usleep(10000);
  158. continue;
  159. }
  160. break;
  161. }
  162. s->out_queue->push_back(frame);
  163. pthread_mutex_unlock(&s->out_mutex);
  164. } while (!decode_done && !s->stop_decode);
  165. s->thread_exited = true;
  166. return 0;
  167. }
  168. static av_cold int Stagefright_init(AVCodecContext *avctx)
  169. {
  170. StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  171. sp<MetaData> meta, outFormat;
  172. int32_t colorFormat = 0;
  173. int ret;
  174. if (!avctx->extradata || !avctx->extradata_size || avctx->extradata[0] != 1)
  175. return -1;
  176. s->avctx = avctx;
  177. s->bsfc = av_bitstream_filter_init("h264_mp4toannexb");
  178. if (!s->bsfc) {
  179. av_log(avctx, AV_LOG_ERROR, "Cannot open the h264_mp4toannexb BSF!\n");
  180. return -1;
  181. }
  182. s->orig_extradata_size = avctx->extradata_size;
  183. s->orig_extradata = (uint8_t*) av_mallocz(avctx->extradata_size +
  184. FF_INPUT_BUFFER_PADDING_SIZE);
  185. if (!s->orig_extradata) {
  186. ret = AVERROR(ENOMEM);
  187. goto fail;
  188. }
  189. memcpy(s->orig_extradata, avctx->extradata, avctx->extradata_size);
  190. meta = new MetaData;
  191. if (meta == NULL) {
  192. ret = AVERROR(ENOMEM);
  193. goto fail;
  194. }
  195. meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
  196. meta->setInt32(kKeyWidth, avctx->width);
  197. meta->setInt32(kKeyHeight, avctx->height);
  198. meta->setData(kKeyAVCC, kTypeAVCC, avctx->extradata, avctx->extradata_size);
  199. android::ProcessState::self()->startThreadPool();
  200. s->source = new sp<MediaSource>();
  201. *s->source = new CustomSource(avctx, meta);
  202. s->in_queue = new List<Frame*>;
  203. s->out_queue = new List<Frame*>;
  204. s->client = new OMXClient;
  205. s->end_frame = (Frame*)av_mallocz(sizeof(Frame));
  206. if (s->source == NULL || !s->in_queue || !s->out_queue || !s->client ||
  207. !s->end_frame) {
  208. ret = AVERROR(ENOMEM);
  209. goto fail;
  210. }
  211. if (s->client->connect() != OK) {
  212. av_log(avctx, AV_LOG_ERROR, "Cannot connect OMX client\n");
  213. ret = -1;
  214. goto fail;
  215. }
  216. s->decoder = new sp<MediaSource>();
  217. *s->decoder = OMXCodec::Create(s->client->interface(), meta,
  218. false, *s->source, NULL,
  219. OMXCodec::kClientNeedsFramebuffer);
  220. if ((*s->decoder)->start() != OK) {
  221. av_log(avctx, AV_LOG_ERROR, "Cannot start decoder\n");
  222. ret = -1;
  223. s->client->disconnect();
  224. goto fail;
  225. }
  226. outFormat = (*s->decoder)->getFormat();
  227. outFormat->findInt32(kKeyColorFormat, &colorFormat);
  228. if (colorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar ||
  229. colorFormat == OMX_COLOR_FormatYUV420SemiPlanar)
  230. avctx->pix_fmt = PIX_FMT_NV21;
  231. else
  232. avctx->pix_fmt = PIX_FMT_YUV420P;
  233. outFormat->findCString(kKeyDecoderComponent, &s->decoder_component);
  234. if (s->decoder_component)
  235. s->decoder_component = av_strdup(s->decoder_component);
  236. pthread_mutex_init(&s->in_mutex, NULL);
  237. pthread_mutex_init(&s->out_mutex, NULL);
  238. pthread_cond_init(&s->condition, NULL);
  239. return 0;
  240. fail:
  241. av_bitstream_filter_close(s->bsfc);
  242. av_freep(&s->orig_extradata);
  243. av_freep(&s->end_frame);
  244. delete s->in_queue;
  245. delete s->out_queue;
  246. delete s->client;
  247. return ret;
  248. }
  249. static int Stagefright_decode_frame(AVCodecContext *avctx, void *data,
  250. int *data_size, AVPacket *avpkt)
  251. {
  252. StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  253. Frame *frame;
  254. MediaBuffer *mbuffer;
  255. status_t status;
  256. size_t size;
  257. uint8_t *buf;
  258. const uint8_t *src_data[3];
  259. int w, h;
  260. int src_linesize[3];
  261. int orig_size = avpkt->size;
  262. AVPacket pkt = *avpkt;
  263. int ret;
  264. if (!s->thread_started) {
  265. pthread_create(&s->decode_thread_id, NULL, &decode_thread, avctx);
  266. s->thread_started = true;
  267. }
  268. if (avpkt && avpkt->data) {
  269. av_bitstream_filter_filter(s->bsfc, avctx, NULL, &pkt.data, &pkt.size,
  270. avpkt->data, avpkt->size, avpkt->flags & AV_PKT_FLAG_KEY);
  271. avpkt = &pkt;
  272. }
  273. if (!s->source_done) {
  274. if(!s->dummy_buf) {
  275. s->dummy_buf = (uint8_t*)av_malloc(avpkt->size);
  276. if (!s->dummy_buf)
  277. return AVERROR(ENOMEM);
  278. s->dummy_bufsize = avpkt->size;
  279. memcpy(s->dummy_buf, avpkt->data, avpkt->size);
  280. }
  281. frame = (Frame*)av_mallocz(sizeof(Frame));
  282. if (avpkt->data) {
  283. frame->status = OK;
  284. frame->size = avpkt->size;
  285. // Stagefright can't handle negative timestamps -
  286. // if needed, work around this by offsetting them manually?
  287. if (avpkt->pts >= 0)
  288. frame->time = avpkt->pts;
  289. frame->key = avpkt->flags & AV_PKT_FLAG_KEY ? 1 : 0;
  290. frame->buffer = (uint8_t*)av_malloc(avpkt->size);
  291. if (!frame->buffer) {
  292. av_freep(&frame);
  293. return AVERROR(ENOMEM);
  294. }
  295. uint8_t *ptr = avpkt->data;
  296. // The OMX.SEC decoder fails without this.
  297. if (avpkt->size == orig_size + avctx->extradata_size) {
  298. ptr += avctx->extradata_size;
  299. frame->size = orig_size;
  300. }
  301. memcpy(frame->buffer, ptr, orig_size);
  302. } else {
  303. frame->status = ERROR_END_OF_STREAM;
  304. s->source_done = true;
  305. }
  306. while (true) {
  307. if (s->thread_exited) {
  308. s->source_done = true;
  309. break;
  310. }
  311. pthread_mutex_lock(&s->in_mutex);
  312. if (s->in_queue->size() >= 10) {
  313. pthread_mutex_unlock(&s->in_mutex);
  314. usleep(10000);
  315. continue;
  316. }
  317. s->in_queue->push_back(frame);
  318. pthread_cond_signal(&s->condition);
  319. pthread_mutex_unlock(&s->in_mutex);
  320. break;
  321. }
  322. }
  323. while (true) {
  324. pthread_mutex_lock(&s->out_mutex);
  325. if (!s->out_queue->empty()) break;
  326. pthread_mutex_unlock(&s->out_mutex);
  327. if (s->source_done) {
  328. usleep(10000);
  329. continue;
  330. } else {
  331. return orig_size;
  332. }
  333. }
  334. frame = *s->out_queue->begin();
  335. s->out_queue->erase(s->out_queue->begin());
  336. pthread_mutex_unlock(&s->out_mutex);
  337. mbuffer = frame->mbuffer;
  338. status = frame->status;
  339. size = frame->size;
  340. w = frame->w;
  341. h = frame->h;
  342. av_freep(&frame);
  343. if (status == ERROR_END_OF_STREAM)
  344. return 0;
  345. if (status != OK) {
  346. if (status == AVERROR(ENOMEM))
  347. return status;
  348. av_log(avctx, AV_LOG_ERROR, "Decode failed: %x\n", status);
  349. return -1;
  350. }
  351. // The OMX.SEC decoder doesn't signal the modified width/height
  352. if (s->decoder_component && !strncmp(s->decoder_component, "OMX.SEC", 7) &&
  353. (w & 15 || h & 15)) {
  354. if (((w + 15)&~15) * ((h + 15)&~15) * 3/2 == size) {
  355. w = (w + 15)&~15;
  356. h = (h + 15)&~15;
  357. }
  358. }
  359. if (!avctx->width || !avctx->height || avctx->width > w || avctx->height > h) {
  360. avctx->width = w;
  361. avctx->height = h;
  362. }
  363. ret = avctx->reget_buffer(avctx, &s->ret_frame);
  364. if (ret < 0) {
  365. av_log(avctx, AV_LOG_ERROR, "reget buffer() failed\n");
  366. goto end;
  367. }
  368. src_linesize[0] = w;
  369. if (avctx->pix_fmt == PIX_FMT_YUV420P)
  370. src_linesize[1] = src_linesize[2] = w/2;
  371. else if (avctx->pix_fmt == PIX_FMT_NV21)
  372. src_linesize[1] = w;
  373. buf = (uint8_t*)mbuffer->data();
  374. src_data[0] = buf;
  375. src_data[1] = buf + src_linesize[0] * h;
  376. src_data[2] = src_data[1] + src_linesize[1] * h/2;
  377. av_image_copy(s->ret_frame.data, s->ret_frame.linesize,
  378. src_data, src_linesize,
  379. avctx->pix_fmt, avctx->width, avctx->height);
  380. *data_size = sizeof(AVFrame);
  381. *(AVFrame*)data = s->ret_frame;
  382. ret = orig_size;
  383. end:
  384. mbuffer->release();
  385. return ret;
  386. }
  387. static av_cold int Stagefright_close(AVCodecContext *avctx)
  388. {
  389. StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  390. Frame *frame;
  391. if (s->thread_started) {
  392. if (!s->thread_exited) {
  393. s->stop_decode = 1;
  394. // Make sure decode_thread() doesn't get stuck
  395. pthread_mutex_lock(&s->out_mutex);
  396. while (!s->out_queue->empty()) {
  397. frame = *s->out_queue->begin();
  398. s->out_queue->erase(s->out_queue->begin());
  399. if (frame->size)
  400. frame->mbuffer->release();
  401. av_freep(&frame);
  402. }
  403. pthread_mutex_unlock(&s->out_mutex);
  404. // Feed a dummy frame prior to signalling EOF.
  405. // This is required to terminate the decoder(OMX.SEC)
  406. // when only one frame is read during stream info detection.
  407. if (s->dummy_buf && (frame = (Frame*)av_mallocz(sizeof(Frame)))) {
  408. frame->status = OK;
  409. frame->size = s->dummy_bufsize;
  410. frame->key = 1;
  411. frame->buffer = s->dummy_buf;
  412. pthread_mutex_lock(&s->in_mutex);
  413. s->in_queue->push_back(frame);
  414. pthread_cond_signal(&s->condition);
  415. pthread_mutex_unlock(&s->in_mutex);
  416. s->dummy_buf = NULL;
  417. }
  418. pthread_mutex_lock(&s->in_mutex);
  419. s->end_frame->status = ERROR_END_OF_STREAM;
  420. s->in_queue->push_back(s->end_frame);
  421. pthread_cond_signal(&s->condition);
  422. pthread_mutex_unlock(&s->in_mutex);
  423. s->end_frame = NULL;
  424. }
  425. pthread_join(s->decode_thread_id, NULL);
  426. if (s->ret_frame.data[0])
  427. avctx->release_buffer(avctx, &s->ret_frame);
  428. s->thread_started = false;
  429. }
  430. while (!s->in_queue->empty()) {
  431. frame = *s->in_queue->begin();
  432. s->in_queue->erase(s->in_queue->begin());
  433. if (frame->size)
  434. av_freep(&frame->buffer);
  435. av_freep(&frame);
  436. }
  437. while (!s->out_queue->empty()) {
  438. frame = *s->out_queue->begin();
  439. s->out_queue->erase(s->out_queue->begin());
  440. if (frame->size)
  441. frame->mbuffer->release();
  442. av_freep(&frame);
  443. }
  444. (*s->decoder)->stop();
  445. s->client->disconnect();
  446. if (s->decoder_component)
  447. av_freep(&s->decoder_component);
  448. av_freep(&s->dummy_buf);
  449. av_freep(&s->end_frame);
  450. // Reset the extradata back to the original mp4 format, so that
  451. // the next invocation (both when decoding and when called from
  452. // av_find_stream_info) get the original mp4 format extradata.
  453. av_freep(&avctx->extradata);
  454. avctx->extradata = s->orig_extradata;
  455. avctx->extradata_size = s->orig_extradata_size;
  456. delete s->in_queue;
  457. delete s->out_queue;
  458. delete s->client;
  459. delete s->decoder;
  460. delete s->source;
  461. pthread_mutex_destroy(&s->in_mutex);
  462. pthread_mutex_destroy(&s->out_mutex);
  463. pthread_cond_destroy(&s->condition);
  464. av_bitstream_filter_close(s->bsfc);
  465. return 0;
  466. }
  467. AVCodec ff_libstagefright_h264_decoder = {
  468. "libstagefright_h264",
  469. AVMEDIA_TYPE_VIDEO,
  470. CODEC_ID_H264,
  471. sizeof(StagefrightContext),
  472. Stagefright_init,
  473. NULL, //encode
  474. Stagefright_close,
  475. Stagefright_decode_frame,
  476. CODEC_CAP_DELAY,
  477. NULL, //next
  478. NULL, //flush
  479. NULL, //supported_framerates
  480. NULL, //pixel_formats
  481. NULL_IF_CONFIG_SMALL("libstagefright H.264"),
  482. };