You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

571 lines
17KB

  1. /*
  2. * Interface to the Android Stagefright library for
  3. * H/W accelerated H.264 decoding
  4. *
  5. * Copyright (C) 2011 Mohamed Naufal
  6. * Copyright (C) 2011 Martin Storsjö
  7. *
  8. * This file is part of Libav.
  9. *
  10. * Libav is free software; you can redistribute it and/or
  11. * modify it under the terms of the GNU Lesser General Public
  12. * License as published by the Free Software Foundation; either
  13. * version 2.1 of the License, or (at your option) any later version.
  14. *
  15. * Libav is distributed in the hope that it will be useful,
  16. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  18. * Lesser General Public License for more details.
  19. *
  20. * You should have received a copy of the GNU Lesser General Public
  21. * License along with Libav; if not, write to the Free Software
  22. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  23. */
  24. #include <binder/ProcessState.h>
  25. #include <media/stagefright/MetaData.h>
  26. #include <media/stagefright/MediaBufferGroup.h>
  27. #include <media/stagefright/MediaDebug.h>
  28. #include <media/stagefright/MediaDefs.h>
  29. #include <media/stagefright/OMXClient.h>
  30. #include <media/stagefright/OMXCodec.h>
  31. #include <utils/List.h>
  32. #include <new>
  33. #include <map>
  34. extern "C" {
  35. #include "avcodec.h"
  36. #include "libavutil/imgutils.h"
  37. }
  38. #define OMX_QCOM_COLOR_FormatYVU420SemiPlanar 0x7FA30C00
  39. using namespace android;
  40. struct Frame {
  41. status_t status;
  42. size_t size;
  43. int64_t time;
  44. int key;
  45. uint8_t *buffer;
  46. MediaBuffer* mbuffer;
  47. int32_t w, h;
  48. };
  49. struct TimeStamp {
  50. int64_t pts;
  51. int64_t reordered_opaque;
  52. };
  53. class CustomSource;
  54. struct StagefrightContext {
  55. AVCodecContext *avctx;
  56. AVBitStreamFilterContext *bsfc;
  57. uint8_t* orig_extradata;
  58. int orig_extradata_size;
  59. sp<MediaSource> *source;
  60. List<Frame*> *in_queue, *out_queue;
  61. pthread_mutex_t in_mutex, out_mutex;
  62. pthread_cond_t condition;
  63. pthread_t decode_thread_id;
  64. Frame *end_frame;
  65. bool source_done;
  66. volatile sig_atomic_t thread_started, thread_exited, stop_decode;
  67. AVFrame ret_frame;
  68. std::map<int64_t, TimeStamp> *ts_map;
  69. int64_t frame_index;
  70. uint8_t *dummy_buf;
  71. int dummy_bufsize;
  72. OMXClient *client;
  73. sp<MediaSource> *decoder;
  74. const char *decoder_component;
  75. };
  76. class CustomSource : public MediaSource {
  77. public:
  78. CustomSource(AVCodecContext *avctx, sp<MetaData> meta) {
  79. s = (StagefrightContext*)avctx->priv_data;
  80. source_meta = meta;
  81. frame_size = (avctx->width * avctx->height * 3) / 2;
  82. buf_group.add_buffer(new MediaBuffer(frame_size));
  83. }
  84. virtual sp<MetaData> getFormat() {
  85. return source_meta;
  86. }
  87. virtual status_t start(MetaData *params) {
  88. return OK;
  89. }
  90. virtual status_t stop() {
  91. return OK;
  92. }
  93. virtual status_t read(MediaBuffer **buffer,
  94. const MediaSource::ReadOptions *options) {
  95. Frame *frame;
  96. status_t ret;
  97. if (s->thread_exited)
  98. return ERROR_END_OF_STREAM;
  99. pthread_mutex_lock(&s->in_mutex);
  100. while (s->in_queue->empty())
  101. pthread_cond_wait(&s->condition, &s->in_mutex);
  102. frame = *s->in_queue->begin();
  103. ret = frame->status;
  104. if (ret == OK) {
  105. ret = buf_group.acquire_buffer(buffer);
  106. if (ret == OK) {
  107. memcpy((*buffer)->data(), frame->buffer, frame->size);
  108. (*buffer)->set_range(0, frame->size);
  109. (*buffer)->meta_data()->clear();
  110. (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame,frame->key);
  111. (*buffer)->meta_data()->setInt64(kKeyTime, frame->time);
  112. } else {
  113. av_log(s->avctx, AV_LOG_ERROR, "Failed to acquire MediaBuffer\n");
  114. }
  115. av_freep(&frame->buffer);
  116. }
  117. s->in_queue->erase(s->in_queue->begin());
  118. pthread_mutex_unlock(&s->in_mutex);
  119. av_freep(&frame);
  120. return ret;
  121. }
  122. private:
  123. MediaBufferGroup buf_group;
  124. sp<MetaData> source_meta;
  125. StagefrightContext *s;
  126. int frame_size;
  127. };
  128. void* decode_thread(void *arg)
  129. {
  130. AVCodecContext *avctx = (AVCodecContext*)arg;
  131. StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  132. Frame* frame;
  133. MediaBuffer *buffer;
  134. int decode_done = 0;
  135. do {
  136. buffer = NULL;
  137. frame = (Frame*)av_mallocz(sizeof(Frame));
  138. if (!frame) {
  139. frame = s->end_frame;
  140. frame->status = AVERROR(ENOMEM);
  141. decode_done = 1;
  142. s->end_frame = NULL;
  143. } else {
  144. frame->status = (*s->decoder)->read(&buffer);
  145. if (frame->status == OK) {
  146. sp<MetaData> outFormat = (*s->decoder)->getFormat();
  147. outFormat->findInt32(kKeyWidth , &frame->w);
  148. outFormat->findInt32(kKeyHeight, &frame->h);
  149. frame->size = buffer->range_length();
  150. frame->mbuffer = buffer;
  151. } else if (frame->status == INFO_FORMAT_CHANGED) {
  152. if (buffer)
  153. buffer->release();
  154. av_free(frame);
  155. continue;
  156. } else {
  157. decode_done = 1;
  158. }
  159. }
  160. while (true) {
  161. pthread_mutex_lock(&s->out_mutex);
  162. if (s->out_queue->size() >= 10) {
  163. pthread_mutex_unlock(&s->out_mutex);
  164. usleep(10000);
  165. continue;
  166. }
  167. break;
  168. }
  169. s->out_queue->push_back(frame);
  170. pthread_mutex_unlock(&s->out_mutex);
  171. } while (!decode_done && !s->stop_decode);
  172. s->thread_exited = true;
  173. return 0;
  174. }
  175. static av_cold int Stagefright_init(AVCodecContext *avctx)
  176. {
  177. StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  178. sp<MetaData> meta, outFormat;
  179. int32_t colorFormat = 0;
  180. int ret;
  181. if (!avctx->extradata || !avctx->extradata_size || avctx->extradata[0] != 1)
  182. return -1;
  183. s->avctx = avctx;
  184. s->bsfc = av_bitstream_filter_init("h264_mp4toannexb");
  185. if (!s->bsfc) {
  186. av_log(avctx, AV_LOG_ERROR, "Cannot open the h264_mp4toannexb BSF!\n");
  187. return -1;
  188. }
  189. s->orig_extradata_size = avctx->extradata_size;
  190. s->orig_extradata = (uint8_t*) av_mallocz(avctx->extradata_size +
  191. FF_INPUT_BUFFER_PADDING_SIZE);
  192. if (!s->orig_extradata) {
  193. ret = AVERROR(ENOMEM);
  194. goto fail;
  195. }
  196. memcpy(s->orig_extradata, avctx->extradata, avctx->extradata_size);
  197. meta = new MetaData;
  198. if (meta == NULL) {
  199. ret = AVERROR(ENOMEM);
  200. goto fail;
  201. }
  202. meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
  203. meta->setInt32(kKeyWidth, avctx->width);
  204. meta->setInt32(kKeyHeight, avctx->height);
  205. meta->setData(kKeyAVCC, kTypeAVCC, avctx->extradata, avctx->extradata_size);
  206. android::ProcessState::self()->startThreadPool();
  207. s->source = new sp<MediaSource>();
  208. *s->source = new CustomSource(avctx, meta);
  209. s->in_queue = new List<Frame*>;
  210. s->out_queue = new List<Frame*>;
  211. s->ts_map = new std::map<int64_t, TimeStamp>;
  212. s->client = new OMXClient;
  213. s->end_frame = (Frame*)av_mallocz(sizeof(Frame));
  214. if (s->source == NULL || !s->in_queue || !s->out_queue || !s->client ||
  215. !s->ts_map || !s->end_frame) {
  216. ret = AVERROR(ENOMEM);
  217. goto fail;
  218. }
  219. if (s->client->connect() != OK) {
  220. av_log(avctx, AV_LOG_ERROR, "Cannot connect OMX client\n");
  221. ret = -1;
  222. goto fail;
  223. }
  224. s->decoder = new sp<MediaSource>();
  225. *s->decoder = OMXCodec::Create(s->client->interface(), meta,
  226. false, *s->source, NULL,
  227. OMXCodec::kClientNeedsFramebuffer);
  228. if ((*s->decoder)->start() != OK) {
  229. av_log(avctx, AV_LOG_ERROR, "Cannot start decoder\n");
  230. ret = -1;
  231. s->client->disconnect();
  232. goto fail;
  233. }
  234. outFormat = (*s->decoder)->getFormat();
  235. outFormat->findInt32(kKeyColorFormat, &colorFormat);
  236. if (colorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar ||
  237. colorFormat == OMX_COLOR_FormatYUV420SemiPlanar)
  238. avctx->pix_fmt = PIX_FMT_NV21;
  239. else
  240. avctx->pix_fmt = PIX_FMT_YUV420P;
  241. outFormat->findCString(kKeyDecoderComponent, &s->decoder_component);
  242. if (s->decoder_component)
  243. s->decoder_component = av_strdup(s->decoder_component);
  244. pthread_mutex_init(&s->in_mutex, NULL);
  245. pthread_mutex_init(&s->out_mutex, NULL);
  246. pthread_cond_init(&s->condition, NULL);
  247. return 0;
  248. fail:
  249. av_bitstream_filter_close(s->bsfc);
  250. av_freep(&s->orig_extradata);
  251. av_freep(&s->end_frame);
  252. delete s->in_queue;
  253. delete s->out_queue;
  254. delete s->ts_map;
  255. delete s->client;
  256. return ret;
  257. }
  258. static int Stagefright_decode_frame(AVCodecContext *avctx, void *data,
  259. int *data_size, AVPacket *avpkt)
  260. {
  261. StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  262. Frame *frame;
  263. MediaBuffer *mbuffer;
  264. status_t status;
  265. size_t size;
  266. uint8_t *buf;
  267. const uint8_t *src_data[3];
  268. int w, h;
  269. int src_linesize[3];
  270. int orig_size = avpkt->size;
  271. AVPacket pkt = *avpkt;
  272. int64_t out_frame_index = 0;
  273. int ret;
  274. if (!s->thread_started) {
  275. pthread_create(&s->decode_thread_id, NULL, &decode_thread, avctx);
  276. s->thread_started = true;
  277. }
  278. if (avpkt && avpkt->data) {
  279. av_bitstream_filter_filter(s->bsfc, avctx, NULL, &pkt.data, &pkt.size,
  280. avpkt->data, avpkt->size, avpkt->flags & AV_PKT_FLAG_KEY);
  281. avpkt = &pkt;
  282. }
  283. if (!s->source_done) {
  284. if(!s->dummy_buf) {
  285. s->dummy_buf = (uint8_t*)av_malloc(avpkt->size);
  286. if (!s->dummy_buf)
  287. return AVERROR(ENOMEM);
  288. s->dummy_bufsize = avpkt->size;
  289. memcpy(s->dummy_buf, avpkt->data, avpkt->size);
  290. }
  291. frame = (Frame*)av_mallocz(sizeof(Frame));
  292. if (avpkt->data) {
  293. frame->status = OK;
  294. frame->size = avpkt->size;
  295. frame->key = avpkt->flags & AV_PKT_FLAG_KEY ? 1 : 0;
  296. frame->buffer = (uint8_t*)av_malloc(avpkt->size);
  297. if (!frame->buffer) {
  298. av_freep(&frame);
  299. return AVERROR(ENOMEM);
  300. }
  301. uint8_t *ptr = avpkt->data;
  302. // The OMX.SEC decoder fails without this.
  303. if (avpkt->size == orig_size + avctx->extradata_size) {
  304. ptr += avctx->extradata_size;
  305. frame->size = orig_size;
  306. }
  307. memcpy(frame->buffer, ptr, orig_size);
  308. frame->time = ++s->frame_index;
  309. (*s->ts_map)[s->frame_index].pts = avpkt->pts;
  310. (*s->ts_map)[s->frame_index].reordered_opaque = avctx->reordered_opaque;
  311. } else {
  312. frame->status = ERROR_END_OF_STREAM;
  313. s->source_done = true;
  314. }
  315. while (true) {
  316. if (s->thread_exited) {
  317. s->source_done = true;
  318. break;
  319. }
  320. pthread_mutex_lock(&s->in_mutex);
  321. if (s->in_queue->size() >= 10) {
  322. pthread_mutex_unlock(&s->in_mutex);
  323. usleep(10000);
  324. continue;
  325. }
  326. s->in_queue->push_back(frame);
  327. pthread_cond_signal(&s->condition);
  328. pthread_mutex_unlock(&s->in_mutex);
  329. break;
  330. }
  331. }
  332. while (true) {
  333. pthread_mutex_lock(&s->out_mutex);
  334. if (!s->out_queue->empty()) break;
  335. pthread_mutex_unlock(&s->out_mutex);
  336. if (s->source_done) {
  337. usleep(10000);
  338. continue;
  339. } else {
  340. return orig_size;
  341. }
  342. }
  343. frame = *s->out_queue->begin();
  344. s->out_queue->erase(s->out_queue->begin());
  345. pthread_mutex_unlock(&s->out_mutex);
  346. mbuffer = frame->mbuffer;
  347. status = frame->status;
  348. size = frame->size;
  349. w = frame->w;
  350. h = frame->h;
  351. av_freep(&frame);
  352. if (status == ERROR_END_OF_STREAM)
  353. return 0;
  354. if (status != OK) {
  355. if (status == AVERROR(ENOMEM))
  356. return status;
  357. av_log(avctx, AV_LOG_ERROR, "Decode failed: %x\n", status);
  358. return -1;
  359. }
  360. // The OMX.SEC decoder doesn't signal the modified width/height
  361. if (s->decoder_component && !strncmp(s->decoder_component, "OMX.SEC", 7) &&
  362. (w & 15 || h & 15)) {
  363. if (((w + 15)&~15) * ((h + 15)&~15) * 3/2 == size) {
  364. w = (w + 15)&~15;
  365. h = (h + 15)&~15;
  366. }
  367. }
  368. if (!avctx->width || !avctx->height || avctx->width > w || avctx->height > h) {
  369. avctx->width = w;
  370. avctx->height = h;
  371. }
  372. ret = avctx->reget_buffer(avctx, &s->ret_frame);
  373. if (ret < 0) {
  374. av_log(avctx, AV_LOG_ERROR, "reget buffer() failed\n");
  375. goto end;
  376. }
  377. src_linesize[0] = w;
  378. if (avctx->pix_fmt == PIX_FMT_YUV420P)
  379. src_linesize[1] = src_linesize[2] = w/2;
  380. else if (avctx->pix_fmt == PIX_FMT_NV21)
  381. src_linesize[1] = w;
  382. buf = (uint8_t*)mbuffer->data();
  383. src_data[0] = buf;
  384. src_data[1] = buf + src_linesize[0] * h;
  385. src_data[2] = src_data[1] + src_linesize[1] * h/2;
  386. av_image_copy(s->ret_frame.data, s->ret_frame.linesize,
  387. src_data, src_linesize,
  388. avctx->pix_fmt, avctx->width, avctx->height);
  389. mbuffer->meta_data()->findInt64(kKeyTime, &out_frame_index);
  390. if (out_frame_index && s->ts_map->count(out_frame_index) > 0) {
  391. s->ret_frame.pts = (*s->ts_map)[out_frame_index].pts;
  392. s->ret_frame.reordered_opaque = (*s->ts_map)[out_frame_index].reordered_opaque;
  393. s->ts_map->erase(out_frame_index);
  394. }
  395. *data_size = sizeof(AVFrame);
  396. *(AVFrame*)data = s->ret_frame;
  397. ret = orig_size;
  398. end:
  399. mbuffer->release();
  400. return ret;
  401. }
  402. static av_cold int Stagefright_close(AVCodecContext *avctx)
  403. {
  404. StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  405. Frame *frame;
  406. if (s->thread_started) {
  407. if (!s->thread_exited) {
  408. s->stop_decode = 1;
  409. // Make sure decode_thread() doesn't get stuck
  410. pthread_mutex_lock(&s->out_mutex);
  411. while (!s->out_queue->empty()) {
  412. frame = *s->out_queue->begin();
  413. s->out_queue->erase(s->out_queue->begin());
  414. if (frame->size)
  415. frame->mbuffer->release();
  416. av_freep(&frame);
  417. }
  418. pthread_mutex_unlock(&s->out_mutex);
  419. // Feed a dummy frame prior to signalling EOF.
  420. // This is required to terminate the decoder(OMX.SEC)
  421. // when only one frame is read during stream info detection.
  422. if (s->dummy_buf && (frame = (Frame*)av_mallocz(sizeof(Frame)))) {
  423. frame->status = OK;
  424. frame->size = s->dummy_bufsize;
  425. frame->key = 1;
  426. frame->buffer = s->dummy_buf;
  427. pthread_mutex_lock(&s->in_mutex);
  428. s->in_queue->push_back(frame);
  429. pthread_cond_signal(&s->condition);
  430. pthread_mutex_unlock(&s->in_mutex);
  431. s->dummy_buf = NULL;
  432. }
  433. pthread_mutex_lock(&s->in_mutex);
  434. s->end_frame->status = ERROR_END_OF_STREAM;
  435. s->in_queue->push_back(s->end_frame);
  436. pthread_cond_signal(&s->condition);
  437. pthread_mutex_unlock(&s->in_mutex);
  438. s->end_frame = NULL;
  439. }
  440. pthread_join(s->decode_thread_id, NULL);
  441. if (s->ret_frame.data[0])
  442. avctx->release_buffer(avctx, &s->ret_frame);
  443. s->thread_started = false;
  444. }
  445. while (!s->in_queue->empty()) {
  446. frame = *s->in_queue->begin();
  447. s->in_queue->erase(s->in_queue->begin());
  448. if (frame->size)
  449. av_freep(&frame->buffer);
  450. av_freep(&frame);
  451. }
  452. while (!s->out_queue->empty()) {
  453. frame = *s->out_queue->begin();
  454. s->out_queue->erase(s->out_queue->begin());
  455. if (frame->size)
  456. frame->mbuffer->release();
  457. av_freep(&frame);
  458. }
  459. (*s->decoder)->stop();
  460. s->client->disconnect();
  461. if (s->decoder_component)
  462. av_freep(&s->decoder_component);
  463. av_freep(&s->dummy_buf);
  464. av_freep(&s->end_frame);
  465. // Reset the extradata back to the original mp4 format, so that
  466. // the next invocation (both when decoding and when called from
  467. // av_find_stream_info) get the original mp4 format extradata.
  468. av_freep(&avctx->extradata);
  469. avctx->extradata = s->orig_extradata;
  470. avctx->extradata_size = s->orig_extradata_size;
  471. delete s->in_queue;
  472. delete s->out_queue;
  473. delete s->ts_map;
  474. delete s->client;
  475. delete s->decoder;
  476. delete s->source;
  477. pthread_mutex_destroy(&s->in_mutex);
  478. pthread_mutex_destroy(&s->out_mutex);
  479. pthread_cond_destroy(&s->condition);
  480. av_bitstream_filter_close(s->bsfc);
  481. return 0;
  482. }
  483. AVCodec ff_libstagefright_h264_decoder = {
  484. "libstagefright_h264",
  485. AVMEDIA_TYPE_VIDEO,
  486. CODEC_ID_H264,
  487. sizeof(StagefrightContext),
  488. Stagefright_init,
  489. NULL, //encode
  490. Stagefright_close,
  491. Stagefright_decode_frame,
  492. CODEC_CAP_DELAY,
  493. NULL, //next
  494. NULL, //flush
  495. NULL, //supported_framerates
  496. NULL, //pixel_formats
  497. NULL_IF_CONFIG_SMALL("libstagefright H.264"),
  498. };