You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

576 lines
18KB

  1. /*
  2. * Realmedia RTSP protocol (RDT) support.
  3. * Copyright (c) 2007 Ronald S. Bultje
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * @brief Realmedia RTSP protocol (RDT) support
  24. * @author Ronald S. Bultje <rbultje@ronald.bitfreak.net>
  25. */
  26. #include "avformat.h"
  27. #include "libavutil/avstring.h"
  28. #include "rtpdec.h"
  29. #include "rdt.h"
  30. #include "libavutil/base64.h"
  31. #include "libavutil/md5.h"
  32. #include "rm.h"
  33. #include "internal.h"
  34. #include "libavcodec/get_bits.h"
  35. struct RDTDemuxContext {
  36. AVFormatContext *ic; /**< the containing (RTSP) demux context */
  37. /** Each RDT stream-set (represented by one RTSPStream) can contain
  38. * multiple streams (of the same content, but with possibly different
  39. * codecs/bitrates). Each such stream is represented by one AVStream
  40. * in the AVFormatContext, and this variable points to the offset in
  41. * that array such that the first is the first stream of this set. */
  42. AVStream **streams;
  43. int n_streams; /**< streams with identifical content in this set */
  44. void *dynamic_protocol_context;
  45. DynamicPayloadPacketHandlerProc parse_packet;
  46. uint32_t prev_timestamp;
  47. int prev_set_id, prev_stream_id;
  48. };
  49. RDTDemuxContext *
  50. ff_rdt_parse_open(AVFormatContext *ic, int first_stream_of_set_idx,
  51. void *priv_data, RTPDynamicProtocolHandler *handler)
  52. {
  53. RDTDemuxContext *s = av_mallocz(sizeof(RDTDemuxContext));
  54. if (!s)
  55. return NULL;
  56. s->ic = ic;
  57. s->streams = &ic->streams[first_stream_of_set_idx];
  58. do {
  59. s->n_streams++;
  60. } while (first_stream_of_set_idx + s->n_streams < ic->nb_streams &&
  61. s->streams[s->n_streams]->priv_data == s->streams[0]->priv_data);
  62. s->prev_set_id = -1;
  63. s->prev_stream_id = -1;
  64. s->prev_timestamp = -1;
  65. s->parse_packet = handler ? handler->parse_packet : NULL;
  66. s->dynamic_protocol_context = priv_data;
  67. return s;
  68. }
  69. void
  70. ff_rdt_parse_close(RDTDemuxContext *s)
  71. {
  72. int i;
  73. for (i = 1; i < s->n_streams; i++)
  74. s->streams[i]->priv_data = NULL;
  75. av_free(s);
  76. }
  77. struct PayloadContext {
  78. AVFormatContext *rmctx;
  79. int nb_rmst;
  80. RMStream **rmst;
  81. uint8_t *mlti_data;
  82. unsigned int mlti_data_size;
  83. char buffer[RTP_MAX_PACKET_LENGTH + FF_INPUT_BUFFER_PADDING_SIZE];
  84. int audio_pkt_cnt; /**< remaining audio packets in rmdec */
  85. };
  86. void
  87. ff_rdt_calc_response_and_checksum(char response[41], char chksum[9],
  88. const char *challenge)
  89. {
  90. int ch_len = strlen (challenge), i;
  91. unsigned char zres[16],
  92. buf[64] = { 0xa1, 0xe9, 0x14, 0x9d, 0x0e, 0x6b, 0x3b, 0x59 };
  93. #define XOR_TABLE_SIZE 37
  94. const unsigned char xor_table[XOR_TABLE_SIZE] = {
  95. 0x05, 0x18, 0x74, 0xd0, 0x0d, 0x09, 0x02, 0x53,
  96. 0xc0, 0x01, 0x05, 0x05, 0x67, 0x03, 0x19, 0x70,
  97. 0x08, 0x27, 0x66, 0x10, 0x10, 0x72, 0x08, 0x09,
  98. 0x63, 0x11, 0x03, 0x71, 0x08, 0x08, 0x70, 0x02,
  99. 0x10, 0x57, 0x05, 0x18, 0x54 };
  100. /* some (length) checks */
  101. if (ch_len == 40) /* what a hack... */
  102. ch_len = 32;
  103. else if (ch_len > 56)
  104. ch_len = 56;
  105. memcpy(buf + 8, challenge, ch_len);
  106. /* xor challenge bytewise with xor_table */
  107. for (i = 0; i < XOR_TABLE_SIZE; i++)
  108. buf[8 + i] ^= xor_table[i];
  109. av_md5_sum(zres, buf, 64);
  110. ff_data_to_hex(response, zres, 16, 1);
  111. /* add tail */
  112. strcpy (response + 32, "01d0a8e3");
  113. /* calculate checksum */
  114. for (i = 0; i < 8; i++)
  115. chksum[i] = response[i * 4];
  116. chksum[8] = 0;
  117. }
  118. static int
  119. rdt_load_mdpr (PayloadContext *rdt, AVStream *st, int rule_nr)
  120. {
  121. ByteIOContext pb;
  122. int size;
  123. uint32_t tag;
  124. /**
  125. * Layout of the MLTI chunk:
  126. * 4: MLTI
  127. * 2: number of streams
  128. * Then for each stream ([number_of_streams] times):
  129. * 2: mdpr index
  130. * 2: number of mdpr chunks
  131. * Then for each mdpr chunk ([number_of_mdpr_chunks] times):
  132. * 4: size
  133. * [size]: data
  134. * we skip MDPR chunks until we reach the one of the stream
  135. * we're interested in, and forward that ([size]+[data]) to
  136. * the RM demuxer to parse the stream-specific header data.
  137. */
  138. if (!rdt->mlti_data)
  139. return -1;
  140. init_put_byte(&pb, rdt->mlti_data, rdt->mlti_data_size, 0,
  141. NULL, NULL, NULL, NULL);
  142. tag = get_le32(&pb);
  143. if (tag == MKTAG('M', 'L', 'T', 'I')) {
  144. int num, chunk_nr;
  145. /* read index of MDPR chunk numbers */
  146. num = get_be16(&pb);
  147. if (rule_nr < 0 || rule_nr >= num)
  148. return -1;
  149. url_fskip(&pb, rule_nr * 2);
  150. chunk_nr = get_be16(&pb);
  151. url_fskip(&pb, (num - 1 - rule_nr) * 2);
  152. /* read MDPR chunks */
  153. num = get_be16(&pb);
  154. if (chunk_nr >= num)
  155. return -1;
  156. while (chunk_nr--)
  157. url_fskip(&pb, get_be32(&pb));
  158. size = get_be32(&pb);
  159. } else {
  160. size = rdt->mlti_data_size;
  161. url_fseek(&pb, 0, SEEK_SET);
  162. }
  163. if (ff_rm_read_mdpr_codecdata(rdt->rmctx, &pb, st, rdt->rmst[st->index], size) < 0)
  164. return -1;
  165. return 0;
  166. }
  167. /**
  168. * Actual data handling.
  169. */
  170. int
  171. ff_rdt_parse_header(const uint8_t *buf, int len,
  172. int *pset_id, int *pseq_no, int *pstream_id,
  173. int *pis_keyframe, uint32_t *ptimestamp)
  174. {
  175. GetBitContext gb;
  176. int consumed = 0, set_id, seq_no, stream_id, is_keyframe,
  177. len_included, need_reliable;
  178. uint32_t timestamp;
  179. /* skip status packets */
  180. while (len >= 5 && buf[1] == 0xFF /* status packet */) {
  181. int pkt_len;
  182. if (!(buf[0] & 0x80))
  183. return -1; /* not followed by a data packet */
  184. pkt_len = AV_RB16(buf+3);
  185. buf += pkt_len;
  186. len -= pkt_len;
  187. consumed += pkt_len;
  188. }
  189. if (len < 16)
  190. return -1;
  191. /**
  192. * Layout of the header (in bits):
  193. * 1: len_included
  194. * Flag indicating whether this header includes a length field;
  195. * this can be used to concatenate multiple RDT packets in a
  196. * single UDP/TCP data frame and is used to precede RDT data
  197. * by stream status packets
  198. * 1: need_reliable
  199. * Flag indicating whether this header includes a "reliable
  200. * sequence number"; these are apparently sequence numbers of
  201. * data packets alone. For data packets, this flag is always
  202. * set, according to the Real documentation [1]
  203. * 5: set_id
  204. * ID of a set of streams of identical content, possibly with
  205. * different codecs or bitrates
  206. * 1: is_reliable
  207. * Flag set for certain streams deemed less tolerable for packet
  208. * loss
  209. * 16: seq_no
  210. * Packet sequence number; if >=0xFF00, this is a non-data packet
  211. * containing stream status info, the second byte indicates the
  212. * type of status packet (see wireshark docs / source code [2])
  213. * if (len_included) {
  214. * 16: packet_len
  215. * } else {
  216. * packet_len = remainder of UDP/TCP frame
  217. * }
  218. * 1: is_back_to_back
  219. * Back-to-Back flag; used for timing, set for one in every 10
  220. * packets, according to the Real documentation [1]
  221. * 1: is_slow_data
  222. * Slow-data flag; currently unused, according to Real docs [1]
  223. * 5: stream_id
  224. * ID of the stream within this particular set of streams
  225. * 1: is_no_keyframe
  226. * Non-keyframe flag (unset if packet belongs to a keyframe)
  227. * 32: timestamp (PTS)
  228. * if (set_id == 0x1F) {
  229. * 16: set_id (extended set-of-streams ID; see set_id)
  230. * }
  231. * if (need_reliable) {
  232. * 16: reliable_seq_no
  233. * Reliable sequence number (see need_reliable)
  234. * }
  235. * if (stream_id == 0x3F) {
  236. * 16: stream_id (extended stream ID; see stream_id)
  237. * }
  238. * [1] https://protocol.helixcommunity.org/files/2005/devdocs/RDT_Feature_Level_20.txt
  239. * [2] http://www.wireshark.org/docs/dfref/r/rdt.html and
  240. * http://anonsvn.wireshark.org/viewvc/trunk/epan/dissectors/packet-rdt.c
  241. */
  242. init_get_bits(&gb, buf, len << 3);
  243. len_included = get_bits1(&gb);
  244. need_reliable = get_bits1(&gb);
  245. set_id = get_bits(&gb, 5);
  246. skip_bits(&gb, 1);
  247. seq_no = get_bits(&gb, 16);
  248. if (len_included)
  249. skip_bits(&gb, 16);
  250. skip_bits(&gb, 2);
  251. stream_id = get_bits(&gb, 5);
  252. is_keyframe = !get_bits1(&gb);
  253. timestamp = get_bits_long(&gb, 32);
  254. if (set_id == 0x1f)
  255. set_id = get_bits(&gb, 16);
  256. if (need_reliable)
  257. skip_bits(&gb, 16);
  258. if (stream_id == 0x1f)
  259. stream_id = get_bits(&gb, 16);
  260. if (pset_id) *pset_id = set_id;
  261. if (pseq_no) *pseq_no = seq_no;
  262. if (pstream_id) *pstream_id = stream_id;
  263. if (pis_keyframe) *pis_keyframe = is_keyframe;
  264. if (ptimestamp) *ptimestamp = timestamp;
  265. return consumed + (get_bits_count(&gb) >> 3);
  266. }
  267. /**< return 0 on packet, no more left, 1 on packet, 1 on partial packet... */
  268. static int
  269. rdt_parse_packet (AVFormatContext *ctx, PayloadContext *rdt, AVStream *st,
  270. AVPacket *pkt, uint32_t *timestamp,
  271. const uint8_t *buf, int len, int flags)
  272. {
  273. int seq = 1, res;
  274. ByteIOContext pb;
  275. if (rdt->audio_pkt_cnt == 0) {
  276. int pos;
  277. init_put_byte(&pb, buf, len, 0, NULL, NULL, NULL, NULL);
  278. flags = (flags & RTP_FLAG_KEY) ? 2 : 0;
  279. res = ff_rm_parse_packet (rdt->rmctx, &pb, st, rdt->rmst[st->index], len, pkt,
  280. &seq, flags, *timestamp);
  281. pos = url_ftell(&pb);
  282. if (res < 0)
  283. return res;
  284. if (res > 0) {
  285. if (st->codec->codec_id == CODEC_ID_AAC) {
  286. memcpy (rdt->buffer, buf + pos, len - pos);
  287. rdt->rmctx->pb = av_alloc_put_byte (rdt->buffer, len - pos, 0,
  288. NULL, NULL, NULL, NULL);
  289. }
  290. goto get_cache;
  291. }
  292. } else {
  293. get_cache:
  294. rdt->audio_pkt_cnt =
  295. ff_rm_retrieve_cache (rdt->rmctx, rdt->rmctx->pb,
  296. st, rdt->rmst[st->index], pkt);
  297. if (rdt->audio_pkt_cnt == 0 &&
  298. st->codec->codec_id == CODEC_ID_AAC)
  299. av_freep(&rdt->rmctx->pb);
  300. }
  301. pkt->stream_index = st->index;
  302. pkt->pts = *timestamp;
  303. return rdt->audio_pkt_cnt > 0;
  304. }
  305. int
  306. ff_rdt_parse_packet(RDTDemuxContext *s, AVPacket *pkt,
  307. uint8_t **bufptr, int len)
  308. {
  309. uint8_t *buf = bufptr ? *bufptr : NULL;
  310. int seq_no, flags = 0, stream_id, set_id, is_keyframe;
  311. uint32_t timestamp;
  312. int rv= 0;
  313. if (!s->parse_packet)
  314. return -1;
  315. if (!buf && s->prev_stream_id != -1) {
  316. /* return the next packets, if any */
  317. timestamp= 0; ///< Should not be used if buf is NULL, but should be set to the timestamp of the packet returned....
  318. rv= s->parse_packet(s->ic, s->dynamic_protocol_context,
  319. s->streams[s->prev_stream_id],
  320. pkt, &timestamp, NULL, 0, flags);
  321. return rv;
  322. }
  323. if (len < 12)
  324. return -1;
  325. rv = ff_rdt_parse_header(buf, len, &set_id, &seq_no, &stream_id, &is_keyframe, &timestamp);
  326. if (rv < 0)
  327. return rv;
  328. if (is_keyframe &&
  329. (set_id != s->prev_set_id || timestamp != s->prev_timestamp ||
  330. stream_id != s->prev_stream_id)) {
  331. flags |= RTP_FLAG_KEY;
  332. s->prev_set_id = set_id;
  333. s->prev_timestamp = timestamp;
  334. }
  335. s->prev_stream_id = stream_id;
  336. buf += rv;
  337. len -= rv;
  338. if (s->prev_stream_id >= s->n_streams) {
  339. s->prev_stream_id = -1;
  340. return -1;
  341. }
  342. rv = s->parse_packet(s->ic, s->dynamic_protocol_context,
  343. s->streams[s->prev_stream_id],
  344. pkt, &timestamp, buf, len, flags);
  345. return rv;
  346. }
  347. void
  348. ff_rdt_subscribe_rule (char *cmd, int size,
  349. int stream_nr, int rule_nr)
  350. {
  351. av_strlcatf(cmd, size, "stream=%d;rule=%d,stream=%d;rule=%d",
  352. stream_nr, rule_nr * 2, stream_nr, rule_nr * 2 + 1);
  353. }
  354. static unsigned char *
  355. rdt_parse_b64buf (unsigned int *target_len, const char *p)
  356. {
  357. unsigned char *target;
  358. int len = strlen(p);
  359. if (*p == '\"') {
  360. p++;
  361. len -= 2; /* skip embracing " at start/end */
  362. }
  363. *target_len = len * 3 / 4;
  364. target = av_mallocz(*target_len + FF_INPUT_BUFFER_PADDING_SIZE);
  365. av_base64_decode(target, p, *target_len);
  366. return target;
  367. }
  368. static int
  369. rdt_parse_sdp_line (AVFormatContext *s, int st_index,
  370. PayloadContext *rdt, const char *line)
  371. {
  372. AVStream *stream = s->streams[st_index];
  373. const char *p = line;
  374. if (av_strstart(p, "OpaqueData:buffer;", &p)) {
  375. rdt->mlti_data = rdt_parse_b64buf(&rdt->mlti_data_size, p);
  376. } else if (av_strstart(p, "StartTime:integer;", &p))
  377. stream->first_dts = atoi(p);
  378. else if (av_strstart(p, "ASMRuleBook:string;", &p)) {
  379. int n, first = -1;
  380. for (n = 0; n < s->nb_streams; n++)
  381. if (s->streams[n]->priv_data == stream->priv_data) {
  382. int count = s->streams[n]->index + 1;
  383. if (first == -1) first = n;
  384. if (rdt->nb_rmst < count) {
  385. RMStream **rmst= av_realloc(rdt->rmst, count*sizeof(*rmst));
  386. if (!rmst)
  387. return AVERROR(ENOMEM);
  388. memset(rmst + rdt->nb_rmst, 0,
  389. (count - rdt->nb_rmst) * sizeof(*rmst));
  390. rdt->rmst = rmst;
  391. rdt->nb_rmst = count;
  392. }
  393. rdt->rmst[s->streams[n]->index] = ff_rm_alloc_rmstream();
  394. rdt_load_mdpr(rdt, s->streams[n], (n - first) * 2);
  395. if (s->streams[n]->codec->codec_id == CODEC_ID_AAC)
  396. s->streams[n]->codec->frame_size = 1; // FIXME
  397. }
  398. }
  399. return 0;
  400. }
  401. static void
  402. real_parse_asm_rule(AVStream *st, const char *p, const char *end)
  403. {
  404. do {
  405. /* can be either averagebandwidth= or AverageBandwidth= */
  406. if (sscanf(p, " %*1[Aa]verage%*1[Bb]andwidth=%d", &st->codec->bit_rate) == 1)
  407. break;
  408. if (!(p = strchr(p, ',')) || p > end)
  409. p = end;
  410. p++;
  411. } while (p < end);
  412. }
  413. static AVStream *
  414. add_dstream(AVFormatContext *s, AVStream *orig_st)
  415. {
  416. AVStream *st;
  417. if (!(st = av_new_stream(s, 0)))
  418. return NULL;
  419. st->codec->codec_type = orig_st->codec->codec_type;
  420. st->priv_data = orig_st->priv_data;
  421. st->first_dts = orig_st->first_dts;
  422. return st;
  423. }
  424. static void
  425. real_parse_asm_rulebook(AVFormatContext *s, AVStream *orig_st,
  426. const char *p)
  427. {
  428. const char *end;
  429. int n_rules = 0, odd = 0;
  430. AVStream *st;
  431. /**
  432. * The ASMRuleBook contains a list of comma-separated strings per rule,
  433. * and each rule is separated by a ;. The last one also has a ; at the
  434. * end so we can use it as delimiter.
  435. * Every rule occurs twice, once for when the RTSP packet header marker
  436. * is set and once for if it isn't. We only read the first because we
  437. * don't care much (that's what the "odd" variable is for).
  438. * Each rule contains a set of one or more statements, optionally
  439. * preceeded by a single condition. If there's a condition, the rule
  440. * starts with a '#'. Multiple conditions are merged between brackets,
  441. * so there are never multiple conditions spread out over separate
  442. * statements. Generally, these conditions are bitrate limits (min/max)
  443. * for multi-bitrate streams.
  444. */
  445. if (*p == '\"') p++;
  446. while (1) {
  447. if (!(end = strchr(p, ';')))
  448. break;
  449. if (!odd && end != p) {
  450. if (n_rules > 0)
  451. st = add_dstream(s, orig_st);
  452. else
  453. st = orig_st;
  454. if (!st)
  455. break;
  456. real_parse_asm_rule(st, p, end);
  457. n_rules++;
  458. }
  459. p = end + 1;
  460. odd ^= 1;
  461. }
  462. }
  463. void
  464. ff_real_parse_sdp_a_line (AVFormatContext *s, int stream_index,
  465. const char *line)
  466. {
  467. const char *p = line;
  468. if (av_strstart(p, "ASMRuleBook:string;", &p))
  469. real_parse_asm_rulebook(s, s->streams[stream_index], p);
  470. }
  471. static PayloadContext *
  472. rdt_new_context (void)
  473. {
  474. PayloadContext *rdt = av_mallocz(sizeof(PayloadContext));
  475. av_open_input_stream(&rdt->rmctx, NULL, "", &ff_rdt_demuxer, NULL);
  476. return rdt;
  477. }
  478. static void
  479. rdt_free_context (PayloadContext *rdt)
  480. {
  481. int i;
  482. for (i = 0; i < rdt->nb_rmst; i++)
  483. if (rdt->rmst[i]) {
  484. ff_rm_free_rmstream(rdt->rmst[i]);
  485. av_freep(&rdt->rmst[i]);
  486. }
  487. if (rdt->rmctx)
  488. av_close_input_stream(rdt->rmctx);
  489. av_freep(&rdt->mlti_data);
  490. av_freep(&rdt->rmst);
  491. av_free(rdt);
  492. }
  493. #define RDT_HANDLER(n, s, t) \
  494. static RTPDynamicProtocolHandler ff_rdt_ ## n ## _handler = { \
  495. .enc_name = s, \
  496. .codec_type = t, \
  497. .codec_id = CODEC_ID_NONE, \
  498. .parse_sdp_a_line = rdt_parse_sdp_line, \
  499. .open = rdt_new_context, \
  500. .close = rdt_free_context, \
  501. .parse_packet = rdt_parse_packet \
  502. };
  503. RDT_HANDLER(live_video, "x-pn-multirate-realvideo-live", AVMEDIA_TYPE_VIDEO);
  504. RDT_HANDLER(live_audio, "x-pn-multirate-realaudio-live", AVMEDIA_TYPE_AUDIO);
  505. RDT_HANDLER(video, "x-pn-realvideo", AVMEDIA_TYPE_VIDEO);
  506. RDT_HANDLER(audio, "x-pn-realaudio", AVMEDIA_TYPE_AUDIO);
  507. void av_register_rdt_dynamic_payload_handlers(void)
  508. {
  509. ff_register_dynamic_payload_handler(&ff_rdt_video_handler);
  510. ff_register_dynamic_payload_handler(&ff_rdt_audio_handler);
  511. ff_register_dynamic_payload_handler(&ff_rdt_live_video_handler);
  512. ff_register_dynamic_payload_handler(&ff_rdt_live_audio_handler);
  513. }