You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

599 lines
19KB

  1. /*
  2. * Interface to the Android Stagefright library for
  3. * H/W accelerated H.264 decoding
  4. *
  5. * Copyright (C) 2011 Mohamed Naufal
  6. * Copyright (C) 2011 Martin Storsjö
  7. *
  8. * This file is part of FFmpeg.
  9. *
  10. * FFmpeg is free software; you can redistribute it and/or
  11. * modify it under the terms of the GNU Lesser General Public
  12. * License as published by the Free Software Foundation; either
  13. * version 2.1 of the License, or (at your option) any later version.
  14. *
  15. * FFmpeg is distributed in the hope that it will be useful,
  16. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  18. * Lesser General Public License for more details.
  19. *
  20. * You should have received a copy of the GNU Lesser General Public
  21. * License along with FFmpeg; if not, write to the Free Software
  22. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  23. */
  24. #include <binder/ProcessState.h>
  25. #include <media/stagefright/MetaData.h>
  26. #include <media/stagefright/MediaBufferGroup.h>
  27. #include <media/stagefright/MediaDebug.h>
  28. #include <media/stagefright/MediaDefs.h>
  29. #include <media/stagefright/OMXClient.h>
  30. #include <media/stagefright/OMXCodec.h>
  31. #include <utils/List.h>
  32. #include <new>
  33. #include <map>
  34. extern "C" {
  35. #include "avcodec.h"
  36. #include "libavutil/imgutils.h"
  37. }
  38. #define OMX_QCOM_COLOR_FormatYVU420SemiPlanar 0x7FA30C00
  39. using namespace android;
  40. struct Frame {
  41. status_t status;
  42. size_t size;
  43. int64_t time;
  44. int key;
  45. uint8_t *buffer;
  46. AVFrame *vframe;
  47. };
  48. struct TimeStamp {
  49. int64_t pts;
  50. int64_t reordered_opaque;
  51. };
  52. class CustomSource;
  53. struct StagefrightContext {
  54. AVCodecContext *avctx;
  55. AVBitStreamFilterContext *bsfc;
  56. uint8_t* orig_extradata;
  57. int orig_extradata_size;
  58. sp<MediaSource> *source;
  59. List<Frame*> *in_queue, *out_queue;
  60. pthread_mutex_t in_mutex, out_mutex;
  61. pthread_cond_t condition;
  62. pthread_t decode_thread_id;
  63. Frame *end_frame;
  64. bool source_done;
  65. volatile sig_atomic_t thread_started, thread_exited, stop_decode;
  66. AVFrame *prev_frame;
  67. std::map<int64_t, TimeStamp> *ts_map;
  68. int64_t frame_index;
  69. uint8_t *dummy_buf;
  70. int dummy_bufsize;
  71. OMXClient *client;
  72. sp<MediaSource> *decoder;
  73. const char *decoder_component;
  74. };
  75. class CustomSource : public MediaSource {
  76. public:
  77. CustomSource(AVCodecContext *avctx, sp<MetaData> meta) {
  78. s = (StagefrightContext*)avctx->priv_data;
  79. source_meta = meta;
  80. frame_size = (avctx->width * avctx->height * 3) / 2;
  81. buf_group.add_buffer(new MediaBuffer(frame_size));
  82. }
  83. virtual sp<MetaData> getFormat() {
  84. return source_meta;
  85. }
  86. virtual status_t start(MetaData *params) {
  87. return OK;
  88. }
  89. virtual status_t stop() {
  90. return OK;
  91. }
  92. virtual status_t read(MediaBuffer **buffer,
  93. const MediaSource::ReadOptions *options) {
  94. Frame *frame;
  95. status_t ret;
  96. if (s->thread_exited)
  97. return ERROR_END_OF_STREAM;
  98. pthread_mutex_lock(&s->in_mutex);
  99. while (s->in_queue->empty())
  100. pthread_cond_wait(&s->condition, &s->in_mutex);
  101. frame = *s->in_queue->begin();
  102. ret = frame->status;
  103. if (ret == OK) {
  104. ret = buf_group.acquire_buffer(buffer);
  105. if (ret == OK) {
  106. memcpy((*buffer)->data(), frame->buffer, frame->size);
  107. (*buffer)->set_range(0, frame->size);
  108. (*buffer)->meta_data()->clear();
  109. (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame,frame->key);
  110. (*buffer)->meta_data()->setInt64(kKeyTime, frame->time);
  111. } else {
  112. av_log(s->avctx, AV_LOG_ERROR, "Failed to acquire MediaBuffer\n");
  113. }
  114. av_freep(&frame->buffer);
  115. }
  116. s->in_queue->erase(s->in_queue->begin());
  117. pthread_mutex_unlock(&s->in_mutex);
  118. av_freep(&frame);
  119. return ret;
  120. }
  121. private:
  122. MediaBufferGroup buf_group;
  123. sp<MetaData> source_meta;
  124. StagefrightContext *s;
  125. int frame_size;
  126. };
  127. void* decode_thread(void *arg)
  128. {
  129. AVCodecContext *avctx = (AVCodecContext*)arg;
  130. StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  131. const AVPixFmtDescriptor *pix_desc = &av_pix_fmt_descriptors[avctx->pix_fmt];
  132. Frame* frame;
  133. MediaBuffer *buffer;
  134. int32_t w, h;
  135. int decode_done = 0;
  136. int ret;
  137. int src_linesize[3];
  138. const uint8_t *src_data[3];
  139. int64_t out_frame_index = 0;
  140. do {
  141. buffer = NULL;
  142. frame = (Frame*)av_mallocz(sizeof(Frame));
  143. if (!frame) {
  144. frame = s->end_frame;
  145. frame->status = AVERROR(ENOMEM);
  146. decode_done = 1;
  147. s->end_frame = NULL;
  148. goto push_frame;
  149. }
  150. frame->status = (*s->decoder)->read(&buffer);
  151. if (frame->status == OK) {
  152. sp<MetaData> outFormat = (*s->decoder)->getFormat();
  153. outFormat->findInt32(kKeyWidth , &w);
  154. outFormat->findInt32(kKeyHeight, &h);
  155. frame->vframe = (AVFrame*)av_mallocz(sizeof(AVFrame));
  156. if (!frame->vframe) {
  157. frame->status = AVERROR(ENOMEM);
  158. decode_done = 1;
  159. buffer->release();
  160. goto push_frame;
  161. }
  162. ret = avctx->get_buffer(avctx, frame->vframe);
  163. if (ret < 0) {
  164. av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
  165. frame->status = ret;
  166. decode_done = 1;
  167. buffer->release();
  168. goto push_frame;
  169. }
  170. // The OMX.SEC decoder doesn't signal the modified width/height
  171. if (s->decoder_component && !strncmp(s->decoder_component, "OMX.SEC", 7) &&
  172. (w & 15 || h & 15)) {
  173. if (((w + 15)&~15) * ((h + 15)&~15) * 3/2 == buffer->range_length()) {
  174. w = (w + 15)&~15;
  175. h = (h + 15)&~15;
  176. }
  177. }
  178. if (!avctx->width || !avctx->height || avctx->width > w || avctx->height > h) {
  179. avctx->width = w;
  180. avctx->height = h;
  181. }
  182. src_linesize[0] = av_image_get_linesize(avctx->pix_fmt, w, 0);
  183. src_linesize[1] = av_image_get_linesize(avctx->pix_fmt, w, 1);
  184. src_linesize[2] = av_image_get_linesize(avctx->pix_fmt, w, 2);
  185. src_data[0] = (uint8_t*)buffer->data();
  186. src_data[1] = src_data[0] + src_linesize[0] * h;
  187. src_data[2] = src_data[1] + src_linesize[1] * -(-h>>pix_desc->log2_chroma_h);
  188. av_image_copy(frame->vframe->data, frame->vframe->linesize,
  189. src_data, src_linesize,
  190. avctx->pix_fmt, avctx->width, avctx->height);
  191. buffer->meta_data()->findInt64(kKeyTime, &out_frame_index);
  192. if (out_frame_index && s->ts_map->count(out_frame_index) > 0) {
  193. frame->vframe->pts = (*s->ts_map)[out_frame_index].pts;
  194. frame->vframe->reordered_opaque = (*s->ts_map)[out_frame_index].reordered_opaque;
  195. s->ts_map->erase(out_frame_index);
  196. }
  197. buffer->release();
  198. } else if (frame->status == INFO_FORMAT_CHANGED) {
  199. if (buffer)
  200. buffer->release();
  201. av_free(frame);
  202. continue;
  203. } else {
  204. decode_done = 1;
  205. }
  206. push_frame:
  207. while (true) {
  208. pthread_mutex_lock(&s->out_mutex);
  209. if (s->out_queue->size() >= 10) {
  210. pthread_mutex_unlock(&s->out_mutex);
  211. usleep(10000);
  212. continue;
  213. }
  214. break;
  215. }
  216. s->out_queue->push_back(frame);
  217. pthread_mutex_unlock(&s->out_mutex);
  218. } while (!decode_done && !s->stop_decode);
  219. s->thread_exited = true;
  220. return 0;
  221. }
  222. static av_cold int Stagefright_init(AVCodecContext *avctx)
  223. {
  224. StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  225. sp<MetaData> meta, outFormat;
  226. int32_t colorFormat = 0;
  227. int ret;
  228. if (!avctx->extradata || !avctx->extradata_size || avctx->extradata[0] != 1)
  229. return -1;
  230. s->avctx = avctx;
  231. s->bsfc = av_bitstream_filter_init("h264_mp4toannexb");
  232. if (!s->bsfc) {
  233. av_log(avctx, AV_LOG_ERROR, "Cannot open the h264_mp4toannexb BSF!\n");
  234. return -1;
  235. }
  236. s->orig_extradata_size = avctx->extradata_size;
  237. s->orig_extradata = (uint8_t*) av_mallocz(avctx->extradata_size +
  238. FF_INPUT_BUFFER_PADDING_SIZE);
  239. if (!s->orig_extradata) {
  240. ret = AVERROR(ENOMEM);
  241. goto fail;
  242. }
  243. memcpy(s->orig_extradata, avctx->extradata, avctx->extradata_size);
  244. meta = new MetaData;
  245. if (meta == NULL) {
  246. ret = AVERROR(ENOMEM);
  247. goto fail;
  248. }
  249. meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
  250. meta->setInt32(kKeyWidth, avctx->width);
  251. meta->setInt32(kKeyHeight, avctx->height);
  252. meta->setData(kKeyAVCC, kTypeAVCC, avctx->extradata, avctx->extradata_size);
  253. android::ProcessState::self()->startThreadPool();
  254. s->source = new sp<MediaSource>();
  255. *s->source = new CustomSource(avctx, meta);
  256. s->in_queue = new List<Frame*>;
  257. s->out_queue = new List<Frame*>;
  258. s->ts_map = new std::map<int64_t, TimeStamp>;
  259. s->client = new OMXClient;
  260. s->end_frame = (Frame*)av_mallocz(sizeof(Frame));
  261. if (s->source == NULL || !s->in_queue || !s->out_queue || !s->client ||
  262. !s->ts_map || !s->end_frame) {
  263. ret = AVERROR(ENOMEM);
  264. goto fail;
  265. }
  266. if (s->client->connect() != OK) {
  267. av_log(avctx, AV_LOG_ERROR, "Cannot connect OMX client\n");
  268. ret = -1;
  269. goto fail;
  270. }
  271. s->decoder = new sp<MediaSource>();
  272. *s->decoder = OMXCodec::Create(s->client->interface(), meta,
  273. false, *s->source, NULL,
  274. OMXCodec::kClientNeedsFramebuffer);
  275. if ((*s->decoder)->start() != OK) {
  276. av_log(avctx, AV_LOG_ERROR, "Cannot start decoder\n");
  277. ret = -1;
  278. s->client->disconnect();
  279. goto fail;
  280. }
  281. outFormat = (*s->decoder)->getFormat();
  282. outFormat->findInt32(kKeyColorFormat, &colorFormat);
  283. if (colorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar ||
  284. colorFormat == OMX_COLOR_FormatYUV420SemiPlanar)
  285. avctx->pix_fmt = PIX_FMT_NV21;
  286. else if (colorFormat == OMX_COLOR_FormatYCbYCr)
  287. avctx->pix_fmt = PIX_FMT_YUYV422;
  288. else if (colorFormat == OMX_COLOR_FormatCbYCrY)
  289. avctx->pix_fmt = PIX_FMT_UYVY422;
  290. else
  291. avctx->pix_fmt = PIX_FMT_YUV420P;
  292. outFormat->findCString(kKeyDecoderComponent, &s->decoder_component);
  293. if (s->decoder_component)
  294. s->decoder_component = av_strdup(s->decoder_component);
  295. pthread_mutex_init(&s->in_mutex, NULL);
  296. pthread_mutex_init(&s->out_mutex, NULL);
  297. pthread_cond_init(&s->condition, NULL);
  298. return 0;
  299. fail:
  300. av_bitstream_filter_close(s->bsfc);
  301. av_freep(&s->orig_extradata);
  302. av_freep(&s->end_frame);
  303. delete s->in_queue;
  304. delete s->out_queue;
  305. delete s->ts_map;
  306. delete s->client;
  307. return ret;
  308. }
  309. static int Stagefright_decode_frame(AVCodecContext *avctx, void *data,
  310. int *data_size, AVPacket *avpkt)
  311. {
  312. StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  313. Frame *frame;
  314. status_t status;
  315. int orig_size = avpkt->size;
  316. AVPacket pkt = *avpkt;
  317. AVFrame *ret_frame;
  318. if (!s->thread_started) {
  319. pthread_create(&s->decode_thread_id, NULL, &decode_thread, avctx);
  320. s->thread_started = true;
  321. }
  322. if (avpkt && avpkt->data) {
  323. av_bitstream_filter_filter(s->bsfc, avctx, NULL, &pkt.data, &pkt.size,
  324. avpkt->data, avpkt->size, avpkt->flags & AV_PKT_FLAG_KEY);
  325. avpkt = &pkt;
  326. }
  327. if (!s->source_done) {
  328. if(!s->dummy_buf) {
  329. s->dummy_buf = (uint8_t*)av_malloc(avpkt->size);
  330. if (!s->dummy_buf)
  331. return AVERROR(ENOMEM);
  332. s->dummy_bufsize = avpkt->size;
  333. memcpy(s->dummy_buf, avpkt->data, avpkt->size);
  334. }
  335. frame = (Frame*)av_mallocz(sizeof(Frame));
  336. if (avpkt->data) {
  337. frame->status = OK;
  338. frame->size = avpkt->size;
  339. frame->key = avpkt->flags & AV_PKT_FLAG_KEY ? 1 : 0;
  340. frame->buffer = (uint8_t*)av_malloc(avpkt->size);
  341. if (!frame->buffer) {
  342. av_freep(&frame);
  343. return AVERROR(ENOMEM);
  344. }
  345. uint8_t *ptr = avpkt->data;
  346. // The OMX.SEC decoder fails without this.
  347. if (avpkt->size == orig_size + avctx->extradata_size) {
  348. ptr += avctx->extradata_size;
  349. frame->size = orig_size;
  350. }
  351. memcpy(frame->buffer, ptr, orig_size);
  352. if (avpkt == &pkt)
  353. av_free(avpkt->data);
  354. frame->time = ++s->frame_index;
  355. (*s->ts_map)[s->frame_index].pts = avpkt->pts;
  356. (*s->ts_map)[s->frame_index].reordered_opaque = avctx->reordered_opaque;
  357. } else {
  358. frame->status = ERROR_END_OF_STREAM;
  359. s->source_done = true;
  360. }
  361. while (true) {
  362. if (s->thread_exited) {
  363. s->source_done = true;
  364. break;
  365. }
  366. pthread_mutex_lock(&s->in_mutex);
  367. if (s->in_queue->size() >= 10) {
  368. pthread_mutex_unlock(&s->in_mutex);
  369. usleep(10000);
  370. continue;
  371. }
  372. s->in_queue->push_back(frame);
  373. pthread_cond_signal(&s->condition);
  374. pthread_mutex_unlock(&s->in_mutex);
  375. break;
  376. }
  377. }
  378. while (true) {
  379. pthread_mutex_lock(&s->out_mutex);
  380. if (!s->out_queue->empty()) break;
  381. pthread_mutex_unlock(&s->out_mutex);
  382. if (s->source_done) {
  383. usleep(10000);
  384. continue;
  385. } else {
  386. return orig_size;
  387. }
  388. }
  389. frame = *s->out_queue->begin();
  390. s->out_queue->erase(s->out_queue->begin());
  391. pthread_mutex_unlock(&s->out_mutex);
  392. ret_frame = frame->vframe;
  393. status = frame->status;
  394. av_freep(&frame);
  395. if (status == ERROR_END_OF_STREAM)
  396. return 0;
  397. if (status != OK) {
  398. if (status == AVERROR(ENOMEM))
  399. return status;
  400. av_log(avctx, AV_LOG_ERROR, "Decode failed: %x\n", status);
  401. return -1;
  402. }
  403. if (s->prev_frame) {
  404. avctx->release_buffer(avctx, s->prev_frame);
  405. av_freep(&s->prev_frame);
  406. }
  407. s->prev_frame = ret_frame;
  408. *data_size = sizeof(AVFrame);
  409. *(AVFrame*)data = *ret_frame;
  410. return orig_size;
  411. }
  412. static av_cold int Stagefright_close(AVCodecContext *avctx)
  413. {
  414. StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  415. Frame *frame;
  416. if (s->thread_started) {
  417. if (!s->thread_exited) {
  418. s->stop_decode = 1;
  419. // Make sure decode_thread() doesn't get stuck
  420. pthread_mutex_lock(&s->out_mutex);
  421. while (!s->out_queue->empty()) {
  422. frame = *s->out_queue->begin();
  423. s->out_queue->erase(s->out_queue->begin());
  424. if (frame->vframe) {
  425. avctx->release_buffer(avctx, frame->vframe);
  426. av_freep(&frame->vframe);
  427. }
  428. av_freep(&frame);
  429. }
  430. pthread_mutex_unlock(&s->out_mutex);
  431. // Feed a dummy frame prior to signalling EOF.
  432. // This is required to terminate the decoder(OMX.SEC)
  433. // when only one frame is read during stream info detection.
  434. if (s->dummy_buf && (frame = (Frame*)av_mallocz(sizeof(Frame)))) {
  435. frame->status = OK;
  436. frame->size = s->dummy_bufsize;
  437. frame->key = 1;
  438. frame->buffer = s->dummy_buf;
  439. pthread_mutex_lock(&s->in_mutex);
  440. s->in_queue->push_back(frame);
  441. pthread_cond_signal(&s->condition);
  442. pthread_mutex_unlock(&s->in_mutex);
  443. s->dummy_buf = NULL;
  444. }
  445. pthread_mutex_lock(&s->in_mutex);
  446. s->end_frame->status = ERROR_END_OF_STREAM;
  447. s->in_queue->push_back(s->end_frame);
  448. pthread_cond_signal(&s->condition);
  449. pthread_mutex_unlock(&s->in_mutex);
  450. s->end_frame = NULL;
  451. }
  452. pthread_join(s->decode_thread_id, NULL);
  453. if (s->prev_frame) {
  454. avctx->release_buffer(avctx, s->prev_frame);
  455. av_freep(&s->prev_frame);
  456. }
  457. s->thread_started = false;
  458. }
  459. while (!s->in_queue->empty()) {
  460. frame = *s->in_queue->begin();
  461. s->in_queue->erase(s->in_queue->begin());
  462. if (frame->size)
  463. av_freep(&frame->buffer);
  464. av_freep(&frame);
  465. }
  466. while (!s->out_queue->empty()) {
  467. frame = *s->out_queue->begin();
  468. s->out_queue->erase(s->out_queue->begin());
  469. if (frame->vframe) {
  470. avctx->release_buffer(avctx, frame->vframe);
  471. av_freep(&frame->vframe);
  472. }
  473. av_freep(&frame);
  474. }
  475. (*s->decoder)->stop();
  476. s->client->disconnect();
  477. if (s->decoder_component)
  478. av_freep(&s->decoder_component);
  479. av_freep(&s->dummy_buf);
  480. av_freep(&s->end_frame);
  481. // Reset the extradata back to the original mp4 format, so that
  482. // the next invocation (both when decoding and when called from
  483. // av_find_stream_info) get the original mp4 format extradata.
  484. av_freep(&avctx->extradata);
  485. avctx->extradata = s->orig_extradata;
  486. avctx->extradata_size = s->orig_extradata_size;
  487. delete s->in_queue;
  488. delete s->out_queue;
  489. delete s->ts_map;
  490. delete s->client;
  491. delete s->decoder;
  492. delete s->source;
  493. pthread_mutex_destroy(&s->in_mutex);
  494. pthread_mutex_destroy(&s->out_mutex);
  495. pthread_cond_destroy(&s->condition);
  496. av_bitstream_filter_close(s->bsfc);
  497. return 0;
  498. }
  499. AVCodec ff_libstagefright_h264_decoder = {
  500. "libstagefright_h264",
  501. NULL_IF_CONFIG_SMALL("libstagefright H.264"),
  502. AVMEDIA_TYPE_VIDEO,
  503. AV_CODEC_ID_H264,
  504. CODEC_CAP_DELAY,
  505. NULL, //supported_framerates
  506. NULL, //pix_fmts
  507. NULL, //supported_samplerates
  508. NULL, //sample_fmts
  509. NULL, //channel_layouts
  510. 0, //max_lowres
  511. NULL, //priv_class
  512. NULL, //profiles
  513. sizeof(StagefrightContext),
  514. NULL, //next
  515. NULL, //init_thread_copy
  516. NULL, //update_thread_context
  517. NULL, //defaults
  518. NULL, //init_static_data
  519. Stagefright_init,
  520. NULL, //encode
  521. NULL, //encode2
  522. Stagefright_decode_frame,
  523. Stagefright_close,
  524. };