You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

932 lines
29KB

  1. /*
  2. * "NUT" Container Format demuxer
  3. * Copyright (c) 2004-2006 Michael Niedermayer
  4. * Copyright (c) 2003 Alex Beregszaszi
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. #include <strings.h>
  23. #include "libavutil/avstring.h"
  24. #include "libavutil/bswap.h"
  25. #include "libavutil/tree.h"
  26. #include "nut.h"
  27. #undef NDEBUG
  28. #include <assert.h>
  29. static int get_str(ByteIOContext *bc, char *string, unsigned int maxlen){
  30. unsigned int len= ff_get_v(bc);
  31. if(len && maxlen)
  32. get_buffer(bc, string, FFMIN(len, maxlen));
  33. while(len > maxlen){
  34. get_byte(bc);
  35. len--;
  36. }
  37. if(maxlen)
  38. string[FFMIN(len, maxlen-1)]= 0;
  39. if(maxlen == len)
  40. return -1;
  41. else
  42. return 0;
  43. }
  44. static int64_t get_s(ByteIOContext *bc){
  45. int64_t v = ff_get_v(bc) + 1;
  46. if (v&1) return -(v>>1);
  47. else return (v>>1);
  48. }
  49. static uint64_t get_fourcc(ByteIOContext *bc){
  50. unsigned int len= ff_get_v(bc);
  51. if (len==2) return get_le16(bc);
  52. else if(len==4) return get_le32(bc);
  53. else return -1;
  54. }
  55. #ifdef TRACE
  56. static inline uint64_t get_v_trace(ByteIOContext *bc, char *file, char *func, int line){
  57. uint64_t v= ff_get_v(bc);
  58. av_log(NULL, AV_LOG_DEBUG, "get_v %5"PRId64" / %"PRIX64" in %s %s:%d\n", v, v, file, func, line);
  59. return v;
  60. }
  61. static inline int64_t get_s_trace(ByteIOContext *bc, char *file, char *func, int line){
  62. int64_t v= get_s(bc);
  63. av_log(NULL, AV_LOG_DEBUG, "get_s %5"PRId64" / %"PRIX64" in %s %s:%d\n", v, v, file, func, line);
  64. return v;
  65. }
  66. static inline uint64_t get_vb_trace(ByteIOContext *bc, char *file, char *func, int line){
  67. uint64_t v= get_vb(bc);
  68. av_log(NULL, AV_LOG_DEBUG, "get_vb %5"PRId64" / %"PRIX64" in %s %s:%d\n", v, v, file, func, line);
  69. return v;
  70. }
  71. #define ff_get_v(bc) get_v_trace(bc, __FILE__, __PRETTY_FUNCTION__, __LINE__)
  72. #define get_s(bc) get_s_trace(bc, __FILE__, __PRETTY_FUNCTION__, __LINE__)
  73. #define get_vb(bc) get_vb_trace(bc, __FILE__, __PRETTY_FUNCTION__, __LINE__)
  74. #endif
  75. static int get_packetheader(NUTContext *nut, ByteIOContext *bc, int calculate_checksum, uint64_t startcode)
  76. {
  77. int64_t size;
  78. // start= url_ftell(bc) - 8;
  79. startcode= be2me_64(startcode);
  80. startcode= ff_crc04C11DB7_update(0, &startcode, 8);
  81. init_checksum(bc, ff_crc04C11DB7_update, startcode);
  82. size= ff_get_v(bc);
  83. if(size > 4096)
  84. get_be32(bc);
  85. if(get_checksum(bc) && size > 4096)
  86. return -1;
  87. init_checksum(bc, calculate_checksum ? ff_crc04C11DB7_update : NULL, 0);
  88. return size;
  89. }
  90. static uint64_t find_any_startcode(ByteIOContext *bc, int64_t pos){
  91. uint64_t state=0;
  92. if(pos >= 0)
  93. url_fseek(bc, pos, SEEK_SET); //note, this may fail if the stream is not seekable, but that should not matter, as in this case we simply start where we currently are
  94. while(!url_feof(bc)){
  95. state= (state<<8) | get_byte(bc);
  96. if((state>>56) != 'N')
  97. continue;
  98. switch(state){
  99. case MAIN_STARTCODE:
  100. case STREAM_STARTCODE:
  101. case SYNCPOINT_STARTCODE:
  102. case INFO_STARTCODE:
  103. case INDEX_STARTCODE:
  104. return state;
  105. }
  106. }
  107. return 0;
  108. }
  109. /**
  110. * Find the given startcode.
  111. * @param code the startcode
  112. * @param pos the start position of the search, or -1 if the current position
  113. * @return the position of the startcode or -1 if not found
  114. */
  115. static int64_t find_startcode(ByteIOContext *bc, uint64_t code, int64_t pos){
  116. for(;;){
  117. uint64_t startcode= find_any_startcode(bc, pos);
  118. if(startcode == code)
  119. return url_ftell(bc) - 8;
  120. else if(startcode == 0)
  121. return -1;
  122. pos=-1;
  123. }
  124. }
  125. static int nut_probe(AVProbeData *p){
  126. int i;
  127. uint64_t code= 0;
  128. for (i = 0; i < p->buf_size; i++) {
  129. code = (code << 8) | p->buf[i];
  130. if (code == MAIN_STARTCODE)
  131. return AVPROBE_SCORE_MAX;
  132. }
  133. return 0;
  134. }
  135. #define GET_V(dst, check) \
  136. tmp= ff_get_v(bc);\
  137. if(!(check)){\
  138. av_log(s, AV_LOG_ERROR, "Error " #dst " is (%"PRId64")\n", tmp);\
  139. return -1;\
  140. }\
  141. dst= tmp;
  142. static int skip_reserved(ByteIOContext *bc, int64_t pos){
  143. pos -= url_ftell(bc);
  144. if(pos<0){
  145. url_fseek(bc, pos, SEEK_CUR);
  146. return -1;
  147. }else{
  148. while(pos--)
  149. get_byte(bc);
  150. return 0;
  151. }
  152. }
  153. static int decode_main_header(NUTContext *nut){
  154. AVFormatContext *s= nut->avf;
  155. ByteIOContext *bc = s->pb;
  156. uint64_t tmp, end;
  157. unsigned int stream_count;
  158. int i, j, tmp_stream, tmp_mul, tmp_pts, tmp_size, count, tmp_res, tmp_head_idx;
  159. int64_t tmp_match;
  160. end= get_packetheader(nut, bc, 1, MAIN_STARTCODE);
  161. end += url_ftell(bc);
  162. GET_V(tmp , tmp >=2 && tmp <= 3)
  163. GET_V(stream_count , tmp > 0 && tmp <=MAX_STREAMS)
  164. nut->max_distance = ff_get_v(bc);
  165. if(nut->max_distance > 65536){
  166. av_log(s, AV_LOG_DEBUG, "max_distance %d\n", nut->max_distance);
  167. nut->max_distance= 65536;
  168. }
  169. GET_V(nut->time_base_count, tmp>0 && tmp<INT_MAX / sizeof(AVRational))
  170. nut->time_base= av_malloc(nut->time_base_count * sizeof(AVRational));
  171. for(i=0; i<nut->time_base_count; i++){
  172. GET_V(nut->time_base[i].num, tmp>0 && tmp<(1ULL<<31))
  173. GET_V(nut->time_base[i].den, tmp>0 && tmp<(1ULL<<31))
  174. if(av_gcd(nut->time_base[i].num, nut->time_base[i].den) != 1){
  175. av_log(s, AV_LOG_ERROR, "time base invalid\n");
  176. return -1;
  177. }
  178. }
  179. tmp_pts=0;
  180. tmp_mul=1;
  181. tmp_stream=0;
  182. tmp_match= 1-(1LL<<62);
  183. tmp_head_idx= 0;
  184. for(i=0; i<256;){
  185. int tmp_flags = ff_get_v(bc);
  186. int tmp_fields= ff_get_v(bc);
  187. if(tmp_fields>0) tmp_pts = get_s(bc);
  188. if(tmp_fields>1) tmp_mul = ff_get_v(bc);
  189. if(tmp_fields>2) tmp_stream= ff_get_v(bc);
  190. if(tmp_fields>3) tmp_size = ff_get_v(bc);
  191. else tmp_size = 0;
  192. if(tmp_fields>4) tmp_res = ff_get_v(bc);
  193. else tmp_res = 0;
  194. if(tmp_fields>5) count = ff_get_v(bc);
  195. else count = tmp_mul - tmp_size;
  196. if(tmp_fields>6) tmp_match = get_s(bc);
  197. if(tmp_fields>7) tmp_head_idx= ff_get_v(bc);
  198. while(tmp_fields-- > 8)
  199. ff_get_v(bc);
  200. if(count == 0 || i+count > 256){
  201. av_log(s, AV_LOG_ERROR, "illegal count %d at %d\n", count, i);
  202. return -1;
  203. }
  204. if(tmp_stream >= stream_count){
  205. av_log(s, AV_LOG_ERROR, "illegal stream number\n");
  206. return -1;
  207. }
  208. for(j=0; j<count; j++,i++){
  209. if (i == 'N') {
  210. nut->frame_code[i].flags= FLAG_INVALID;
  211. j--;
  212. continue;
  213. }
  214. nut->frame_code[i].flags = tmp_flags ;
  215. nut->frame_code[i].pts_delta = tmp_pts ;
  216. nut->frame_code[i].stream_id = tmp_stream;
  217. nut->frame_code[i].size_mul = tmp_mul ;
  218. nut->frame_code[i].size_lsb = tmp_size+j;
  219. nut->frame_code[i].reserved_count = tmp_res ;
  220. nut->frame_code[i].header_idx = tmp_head_idx;
  221. }
  222. }
  223. assert(nut->frame_code['N'].flags == FLAG_INVALID);
  224. if(end > url_ftell(bc) + 4){
  225. int rem= 1024;
  226. GET_V(nut->header_count, tmp<128U)
  227. nut->header_count++;
  228. for(i=1; i<nut->header_count; i++){
  229. GET_V(nut->header_len[i], tmp>0 && tmp<256);
  230. rem -= nut->header_len[i];
  231. if(rem < 0){
  232. av_log(s, AV_LOG_ERROR, "invalid elision header\n");
  233. return -1;
  234. }
  235. nut->header[i]= av_malloc(nut->header_len[i]);
  236. get_buffer(bc, nut->header[i], nut->header_len[i]);
  237. }
  238. assert(nut->header_len[0]==0);
  239. }
  240. if(skip_reserved(bc, end) || get_checksum(bc)){
  241. av_log(s, AV_LOG_ERROR, "main header checksum mismatch\n");
  242. return -1;
  243. }
  244. nut->stream = av_mallocz(sizeof(StreamContext)*stream_count);
  245. for(i=0; i<stream_count; i++){
  246. av_new_stream(s, i);
  247. }
  248. return 0;
  249. }
  250. static int decode_stream_header(NUTContext *nut){
  251. AVFormatContext *s= nut->avf;
  252. ByteIOContext *bc = s->pb;
  253. StreamContext *stc;
  254. int class, stream_id;
  255. uint64_t tmp, end;
  256. AVStream *st;
  257. end= get_packetheader(nut, bc, 1, STREAM_STARTCODE);
  258. end += url_ftell(bc);
  259. GET_V(stream_id, tmp < s->nb_streams && !nut->stream[tmp].time_base);
  260. stc= &nut->stream[stream_id];
  261. st = s->streams[stream_id];
  262. if (!st)
  263. return AVERROR(ENOMEM);
  264. class = ff_get_v(bc);
  265. tmp = get_fourcc(bc);
  266. st->codec->codec_tag= tmp;
  267. switch(class)
  268. {
  269. case 0:
  270. st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
  271. st->codec->codec_id = av_codec_get_id(
  272. (const AVCodecTag * const []) { ff_codec_bmp_tags, ff_nut_video_tags, 0 },
  273. tmp);
  274. break;
  275. case 1:
  276. st->codec->codec_type = AVMEDIA_TYPE_AUDIO;
  277. st->codec->codec_id = ff_codec_get_id(ff_codec_wav_tags, tmp);
  278. break;
  279. case 2:
  280. st->codec->codec_type = AVMEDIA_TYPE_SUBTITLE;
  281. st->codec->codec_id = ff_codec_get_id(ff_nut_subtitle_tags, tmp);
  282. break;
  283. case 3:
  284. st->codec->codec_type = AVMEDIA_TYPE_DATA;
  285. break;
  286. default:
  287. av_log(s, AV_LOG_ERROR, "unknown stream class (%d)\n", class);
  288. return -1;
  289. }
  290. if(class<3 && st->codec->codec_id == CODEC_ID_NONE)
  291. av_log(s, AV_LOG_ERROR, "Unknown codec tag '0x%04x' for stream number %d\n",
  292. (unsigned int)tmp, stream_id);
  293. GET_V(stc->time_base_id , tmp < nut->time_base_count);
  294. GET_V(stc->msb_pts_shift , tmp < 16);
  295. stc->max_pts_distance= ff_get_v(bc);
  296. GET_V(stc->decode_delay , tmp < 1000); //sanity limit, raise this if Moore's law is true
  297. st->codec->has_b_frames= stc->decode_delay;
  298. ff_get_v(bc); //stream flags
  299. GET_V(st->codec->extradata_size, tmp < (1<<30));
  300. if(st->codec->extradata_size){
  301. st->codec->extradata= av_mallocz(st->codec->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE);
  302. get_buffer(bc, st->codec->extradata, st->codec->extradata_size);
  303. }
  304. if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO){
  305. GET_V(st->codec->width , tmp > 0)
  306. GET_V(st->codec->height, tmp > 0)
  307. st->sample_aspect_ratio.num= ff_get_v(bc);
  308. st->sample_aspect_ratio.den= ff_get_v(bc);
  309. if((!st->sample_aspect_ratio.num) != (!st->sample_aspect_ratio.den)){
  310. av_log(s, AV_LOG_ERROR, "invalid aspect ratio %d/%d\n", st->sample_aspect_ratio.num, st->sample_aspect_ratio.den);
  311. return -1;
  312. }
  313. ff_get_v(bc); /* csp type */
  314. }else if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO){
  315. GET_V(st->codec->sample_rate , tmp > 0)
  316. ff_get_v(bc); // samplerate_den
  317. GET_V(st->codec->channels, tmp > 0)
  318. }
  319. if(skip_reserved(bc, end) || get_checksum(bc)){
  320. av_log(s, AV_LOG_ERROR, "stream header %d checksum mismatch\n", stream_id);
  321. return -1;
  322. }
  323. stc->time_base= &nut->time_base[stc->time_base_id];
  324. av_set_pts_info(s->streams[stream_id], 63, stc->time_base->num, stc->time_base->den);
  325. return 0;
  326. }
  327. static void set_disposition_bits(AVFormatContext* avf, char* value, int stream_id){
  328. int flag = 0, i;
  329. for (i=0; ff_nut_dispositions[i].flag; ++i) {
  330. if (!strcmp(ff_nut_dispositions[i].str, value))
  331. flag = ff_nut_dispositions[i].flag;
  332. }
  333. if (!flag)
  334. av_log(avf, AV_LOG_INFO, "unknown disposition type '%s'\n", value);
  335. for (i = 0; i < avf->nb_streams; ++i)
  336. if (stream_id == i || stream_id == -1)
  337. avf->streams[i]->disposition |= flag;
  338. }
  339. static int decode_info_header(NUTContext *nut){
  340. AVFormatContext *s= nut->avf;
  341. ByteIOContext *bc = s->pb;
  342. uint64_t tmp, chapter_start, chapter_len;
  343. unsigned int stream_id_plus1, count;
  344. int chapter_id, i;
  345. int64_t value, end;
  346. char name[256], str_value[1024], type_str[256];
  347. const char *type;
  348. AVChapter *chapter= NULL;
  349. AVStream *st= NULL;
  350. end= get_packetheader(nut, bc, 1, INFO_STARTCODE);
  351. end += url_ftell(bc);
  352. GET_V(stream_id_plus1, tmp <= s->nb_streams)
  353. chapter_id = get_s(bc);
  354. chapter_start= ff_get_v(bc);
  355. chapter_len = ff_get_v(bc);
  356. count = ff_get_v(bc);
  357. if(chapter_id && !stream_id_plus1){
  358. int64_t start= chapter_start / nut->time_base_count;
  359. chapter= ff_new_chapter(s, chapter_id,
  360. nut->time_base[chapter_start % nut->time_base_count],
  361. start, start + chapter_len, NULL);
  362. } else if(stream_id_plus1)
  363. st= s->streams[stream_id_plus1 - 1];
  364. for(i=0; i<count; i++){
  365. get_str(bc, name, sizeof(name));
  366. value= get_s(bc);
  367. if(value == -1){
  368. type= "UTF-8";
  369. get_str(bc, str_value, sizeof(str_value));
  370. }else if(value == -2){
  371. get_str(bc, type_str, sizeof(type_str));
  372. type= type_str;
  373. get_str(bc, str_value, sizeof(str_value));
  374. }else if(value == -3){
  375. type= "s";
  376. value= get_s(bc);
  377. }else if(value == -4){
  378. type= "t";
  379. value= ff_get_v(bc);
  380. }else if(value < -4){
  381. type= "r";
  382. get_s(bc);
  383. }else{
  384. type= "v";
  385. }
  386. if (stream_id_plus1 > s->nb_streams) {
  387. av_log(s, AV_LOG_ERROR, "invalid stream id for info packet\n");
  388. continue;
  389. }
  390. if(!strcmp(type, "UTF-8")){
  391. AVMetadata **metadata = NULL;
  392. if(chapter_id==0 && !strcmp(name, "Disposition"))
  393. set_disposition_bits(s, str_value, stream_id_plus1 - 1);
  394. else if(chapter) metadata= &chapter->metadata;
  395. else if(stream_id_plus1) metadata= &st->metadata;
  396. else metadata= &s->metadata;
  397. if(metadata && strcasecmp(name,"Uses")
  398. && strcasecmp(name,"Depends") && strcasecmp(name,"Replaces"))
  399. av_metadata_set2(metadata, name, str_value, 0);
  400. }
  401. }
  402. if(skip_reserved(bc, end) || get_checksum(bc)){
  403. av_log(s, AV_LOG_ERROR, "info header checksum mismatch\n");
  404. return -1;
  405. }
  406. return 0;
  407. }
  408. static int decode_syncpoint(NUTContext *nut, int64_t *ts, int64_t *back_ptr){
  409. AVFormatContext *s= nut->avf;
  410. ByteIOContext *bc = s->pb;
  411. int64_t end, tmp;
  412. nut->last_syncpoint_pos= url_ftell(bc)-8;
  413. end= get_packetheader(nut, bc, 1, SYNCPOINT_STARTCODE);
  414. end += url_ftell(bc);
  415. tmp= ff_get_v(bc);
  416. *back_ptr= nut->last_syncpoint_pos - 16*ff_get_v(bc);
  417. if(*back_ptr < 0)
  418. return -1;
  419. ff_nut_reset_ts(nut, nut->time_base[tmp % nut->time_base_count], tmp / nut->time_base_count);
  420. if(skip_reserved(bc, end) || get_checksum(bc)){
  421. av_log(s, AV_LOG_ERROR, "sync point checksum mismatch\n");
  422. return -1;
  423. }
  424. *ts= tmp / s->nb_streams * av_q2d(nut->time_base[tmp % s->nb_streams])*AV_TIME_BASE;
  425. ff_nut_add_sp(nut, nut->last_syncpoint_pos, *back_ptr, *ts);
  426. return 0;
  427. }
  428. static int find_and_decode_index(NUTContext *nut){
  429. AVFormatContext *s= nut->avf;
  430. ByteIOContext *bc = s->pb;
  431. uint64_t tmp, end;
  432. int i, j, syncpoint_count;
  433. int64_t filesize= url_fsize(bc);
  434. int64_t *syncpoints;
  435. int8_t *has_keyframe;
  436. int ret= -1;
  437. url_fseek(bc, filesize-12, SEEK_SET);
  438. url_fseek(bc, filesize-get_be64(bc), SEEK_SET);
  439. if(get_be64(bc) != INDEX_STARTCODE){
  440. av_log(s, AV_LOG_ERROR, "no index at the end\n");
  441. return -1;
  442. }
  443. end= get_packetheader(nut, bc, 1, INDEX_STARTCODE);
  444. end += url_ftell(bc);
  445. ff_get_v(bc); //max_pts
  446. GET_V(syncpoint_count, tmp < INT_MAX/8 && tmp > 0)
  447. syncpoints= av_malloc(sizeof(int64_t)*syncpoint_count);
  448. has_keyframe= av_malloc(sizeof(int8_t)*(syncpoint_count+1));
  449. for(i=0; i<syncpoint_count; i++){
  450. syncpoints[i] = ff_get_v(bc);
  451. if(syncpoints[i] <= 0)
  452. goto fail;
  453. if(i)
  454. syncpoints[i] += syncpoints[i-1];
  455. }
  456. for(i=0; i<s->nb_streams; i++){
  457. int64_t last_pts= -1;
  458. for(j=0; j<syncpoint_count;){
  459. uint64_t x= ff_get_v(bc);
  460. int type= x&1;
  461. int n= j;
  462. x>>=1;
  463. if(type){
  464. int flag= x&1;
  465. x>>=1;
  466. if(n+x >= syncpoint_count + 1){
  467. av_log(s, AV_LOG_ERROR, "index overflow A\n");
  468. goto fail;
  469. }
  470. while(x--)
  471. has_keyframe[n++]= flag;
  472. has_keyframe[n++]= !flag;
  473. }else{
  474. while(x != 1){
  475. if(n>=syncpoint_count + 1){
  476. av_log(s, AV_LOG_ERROR, "index overflow B\n");
  477. goto fail;
  478. }
  479. has_keyframe[n++]= x&1;
  480. x>>=1;
  481. }
  482. }
  483. if(has_keyframe[0]){
  484. av_log(s, AV_LOG_ERROR, "keyframe before first syncpoint in index\n");
  485. goto fail;
  486. }
  487. assert(n<=syncpoint_count+1);
  488. for(; j<n && j<syncpoint_count; j++){
  489. if(has_keyframe[j]){
  490. uint64_t B, A= ff_get_v(bc);
  491. if(!A){
  492. A= ff_get_v(bc);
  493. B= ff_get_v(bc);
  494. //eor_pts[j][i] = last_pts + A + B
  495. }else
  496. B= 0;
  497. av_add_index_entry(
  498. s->streams[i],
  499. 16*syncpoints[j-1],
  500. last_pts + A,
  501. 0,
  502. 0,
  503. AVINDEX_KEYFRAME);
  504. last_pts += A + B;
  505. }
  506. }
  507. }
  508. }
  509. if(skip_reserved(bc, end) || get_checksum(bc)){
  510. av_log(s, AV_LOG_ERROR, "index checksum mismatch\n");
  511. goto fail;
  512. }
  513. ret= 0;
  514. fail:
  515. av_free(syncpoints);
  516. av_free(has_keyframe);
  517. return ret;
  518. }
  519. static int nut_read_header(AVFormatContext *s, AVFormatParameters *ap)
  520. {
  521. NUTContext *nut = s->priv_data;
  522. ByteIOContext *bc = s->pb;
  523. int64_t pos;
  524. int initialized_stream_count;
  525. nut->avf= s;
  526. /* main header */
  527. pos=0;
  528. do{
  529. pos= find_startcode(bc, MAIN_STARTCODE, pos)+1;
  530. if (pos<0+1){
  531. av_log(s, AV_LOG_ERROR, "No main startcode found.\n");
  532. return -1;
  533. }
  534. }while(decode_main_header(nut) < 0);
  535. /* stream headers */
  536. pos=0;
  537. for(initialized_stream_count=0; initialized_stream_count < s->nb_streams;){
  538. pos= find_startcode(bc, STREAM_STARTCODE, pos)+1;
  539. if (pos<0+1){
  540. av_log(s, AV_LOG_ERROR, "Not all stream headers found.\n");
  541. return -1;
  542. }
  543. if(decode_stream_header(nut) >= 0)
  544. initialized_stream_count++;
  545. }
  546. /* info headers */
  547. pos=0;
  548. for(;;){
  549. uint64_t startcode= find_any_startcode(bc, pos);
  550. pos= url_ftell(bc);
  551. if(startcode==0){
  552. av_log(s, AV_LOG_ERROR, "EOF before video frames\n");
  553. return -1;
  554. }else if(startcode == SYNCPOINT_STARTCODE){
  555. nut->next_startcode= startcode;
  556. break;
  557. }else if(startcode != INFO_STARTCODE){
  558. continue;
  559. }
  560. decode_info_header(nut);
  561. }
  562. s->data_offset= pos-8;
  563. if(!url_is_streamed(bc)){
  564. int64_t orig_pos= url_ftell(bc);
  565. find_and_decode_index(nut);
  566. url_fseek(bc, orig_pos, SEEK_SET);
  567. }
  568. assert(nut->next_startcode == SYNCPOINT_STARTCODE);
  569. return 0;
  570. }
  571. static int decode_frame_header(NUTContext *nut, int64_t *pts, int *stream_id, uint8_t *header_idx, int frame_code){
  572. AVFormatContext *s= nut->avf;
  573. ByteIOContext *bc = s->pb;
  574. StreamContext *stc;
  575. int size, flags, size_mul, pts_delta, i, reserved_count;
  576. uint64_t tmp;
  577. if(url_ftell(bc) > nut->last_syncpoint_pos + nut->max_distance){
  578. av_log(s, AV_LOG_ERROR, "Last frame must have been damaged %"PRId64" > %"PRId64" + %d\n", url_ftell(bc), nut->last_syncpoint_pos, nut->max_distance);
  579. return -1;
  580. }
  581. flags = nut->frame_code[frame_code].flags;
  582. size_mul = nut->frame_code[frame_code].size_mul;
  583. size = nut->frame_code[frame_code].size_lsb;
  584. *stream_id = nut->frame_code[frame_code].stream_id;
  585. pts_delta = nut->frame_code[frame_code].pts_delta;
  586. reserved_count = nut->frame_code[frame_code].reserved_count;
  587. *header_idx = nut->frame_code[frame_code].header_idx;
  588. if(flags & FLAG_INVALID)
  589. return -1;
  590. if(flags & FLAG_CODED)
  591. flags ^= ff_get_v(bc);
  592. if(flags & FLAG_STREAM_ID){
  593. GET_V(*stream_id, tmp < s->nb_streams)
  594. }
  595. stc= &nut->stream[*stream_id];
  596. if(flags&FLAG_CODED_PTS){
  597. int coded_pts= ff_get_v(bc);
  598. //FIXME check last_pts validity?
  599. if(coded_pts < (1<<stc->msb_pts_shift)){
  600. *pts=ff_lsb2full(stc, coded_pts);
  601. }else
  602. *pts=coded_pts - (1<<stc->msb_pts_shift);
  603. }else
  604. *pts= stc->last_pts + pts_delta;
  605. if(flags&FLAG_SIZE_MSB){
  606. size += size_mul*ff_get_v(bc);
  607. }
  608. if(flags&FLAG_MATCH_TIME)
  609. get_s(bc);
  610. if(flags&FLAG_HEADER_IDX)
  611. *header_idx= ff_get_v(bc);
  612. if(flags&FLAG_RESERVED)
  613. reserved_count= ff_get_v(bc);
  614. for(i=0; i<reserved_count; i++)
  615. ff_get_v(bc);
  616. if(*header_idx >= (unsigned)nut->header_count){
  617. av_log(s, AV_LOG_ERROR, "header_idx invalid\n");
  618. return -1;
  619. }
  620. if(size > 4096)
  621. *header_idx=0;
  622. size -= nut->header_len[*header_idx];
  623. if(flags&FLAG_CHECKSUM){
  624. get_be32(bc); //FIXME check this
  625. }else if(size > 2*nut->max_distance || FFABS(stc->last_pts - *pts) > stc->max_pts_distance){
  626. av_log(s, AV_LOG_ERROR, "frame size > 2max_distance and no checksum\n");
  627. return -1;
  628. }
  629. stc->last_pts= *pts;
  630. stc->last_flags= flags;
  631. return size;
  632. }
  633. static int decode_frame(NUTContext *nut, AVPacket *pkt, int frame_code){
  634. AVFormatContext *s= nut->avf;
  635. ByteIOContext *bc = s->pb;
  636. int size, stream_id, discard;
  637. int64_t pts, last_IP_pts;
  638. StreamContext *stc;
  639. uint8_t header_idx;
  640. size= decode_frame_header(nut, &pts, &stream_id, &header_idx, frame_code);
  641. if(size < 0)
  642. return -1;
  643. stc= &nut->stream[stream_id];
  644. if (stc->last_flags & FLAG_KEY)
  645. stc->skip_until_key_frame=0;
  646. discard= s->streams[ stream_id ]->discard;
  647. last_IP_pts= s->streams[ stream_id ]->last_IP_pts;
  648. if( (discard >= AVDISCARD_NONKEY && !(stc->last_flags & FLAG_KEY))
  649. ||(discard >= AVDISCARD_BIDIR && last_IP_pts != AV_NOPTS_VALUE && last_IP_pts > pts)
  650. || discard >= AVDISCARD_ALL
  651. || stc->skip_until_key_frame){
  652. url_fskip(bc, size);
  653. return 1;
  654. }
  655. av_new_packet(pkt, size + nut->header_len[header_idx]);
  656. memcpy(pkt->data, nut->header[header_idx], nut->header_len[header_idx]);
  657. pkt->pos= url_ftell(bc); //FIXME
  658. get_buffer(bc, pkt->data + nut->header_len[header_idx], size);
  659. pkt->stream_index = stream_id;
  660. if (stc->last_flags & FLAG_KEY)
  661. pkt->flags |= AV_PKT_FLAG_KEY;
  662. pkt->pts = pts;
  663. return 0;
  664. }
  665. static int nut_read_packet(AVFormatContext *s, AVPacket *pkt)
  666. {
  667. NUTContext *nut = s->priv_data;
  668. ByteIOContext *bc = s->pb;
  669. int i, frame_code=0, ret, skip;
  670. int64_t ts, back_ptr;
  671. for(;;){
  672. int64_t pos= url_ftell(bc);
  673. uint64_t tmp= nut->next_startcode;
  674. nut->next_startcode=0;
  675. if(tmp){
  676. pos-=8;
  677. }else{
  678. frame_code = get_byte(bc);
  679. if(url_feof(bc))
  680. return -1;
  681. if(frame_code == 'N'){
  682. tmp= frame_code;
  683. for(i=1; i<8; i++)
  684. tmp = (tmp<<8) + get_byte(bc);
  685. }
  686. }
  687. switch(tmp){
  688. case MAIN_STARTCODE:
  689. case STREAM_STARTCODE:
  690. case INDEX_STARTCODE:
  691. skip= get_packetheader(nut, bc, 0, tmp);
  692. url_fseek(bc, skip, SEEK_CUR);
  693. break;
  694. case INFO_STARTCODE:
  695. if(decode_info_header(nut)<0)
  696. goto resync;
  697. break;
  698. case SYNCPOINT_STARTCODE:
  699. if(decode_syncpoint(nut, &ts, &back_ptr)<0)
  700. goto resync;
  701. frame_code = get_byte(bc);
  702. case 0:
  703. ret= decode_frame(nut, pkt, frame_code);
  704. if(ret==0)
  705. return 0;
  706. else if(ret==1) //ok but discard packet
  707. break;
  708. default:
  709. resync:
  710. av_log(s, AV_LOG_DEBUG, "syncing from %"PRId64"\n", pos);
  711. tmp= find_any_startcode(bc, nut->last_syncpoint_pos+1);
  712. if(tmp==0)
  713. return -1;
  714. av_log(s, AV_LOG_DEBUG, "sync\n");
  715. nut->next_startcode= tmp;
  716. }
  717. }
  718. }
  719. static int64_t nut_read_timestamp(AVFormatContext *s, int stream_index, int64_t *pos_arg, int64_t pos_limit){
  720. NUTContext *nut = s->priv_data;
  721. ByteIOContext *bc = s->pb;
  722. int64_t pos, pts, back_ptr;
  723. av_log(s, AV_LOG_DEBUG, "read_timestamp(X,%d,%"PRId64",%"PRId64")\n", stream_index, *pos_arg, pos_limit);
  724. pos= *pos_arg;
  725. do{
  726. pos= find_startcode(bc, SYNCPOINT_STARTCODE, pos)+1;
  727. if(pos < 1){
  728. assert(nut->next_startcode == 0);
  729. av_log(s, AV_LOG_ERROR, "read_timestamp failed.\n");
  730. return AV_NOPTS_VALUE;
  731. }
  732. }while(decode_syncpoint(nut, &pts, &back_ptr) < 0);
  733. *pos_arg = pos-1;
  734. assert(nut->last_syncpoint_pos == *pos_arg);
  735. av_log(s, AV_LOG_DEBUG, "return %"PRId64" %"PRId64"\n", pts,back_ptr );
  736. if (stream_index == -1) return pts;
  737. else if(stream_index == -2) return back_ptr;
  738. assert(0);
  739. }
  740. static int read_seek(AVFormatContext *s, int stream_index, int64_t pts, int flags){
  741. NUTContext *nut = s->priv_data;
  742. AVStream *st= s->streams[stream_index];
  743. Syncpoint dummy={.ts= pts*av_q2d(st->time_base)*AV_TIME_BASE};
  744. Syncpoint nopts_sp= {.ts= AV_NOPTS_VALUE, .back_ptr= AV_NOPTS_VALUE};
  745. Syncpoint *sp, *next_node[2]= {&nopts_sp, &nopts_sp};
  746. int64_t pos, pos2, ts;
  747. int i;
  748. if(st->index_entries){
  749. int index= av_index_search_timestamp(st, pts, flags);
  750. if(index<0)
  751. return -1;
  752. pos2= st->index_entries[index].pos;
  753. ts = st->index_entries[index].timestamp;
  754. }else{
  755. av_tree_find(nut->syncpoints, &dummy, (void *) ff_nut_sp_pts_cmp,
  756. (void **) next_node);
  757. av_log(s, AV_LOG_DEBUG, "%"PRIu64"-%"PRIu64" %"PRId64"-%"PRId64"\n", next_node[0]->pos, next_node[1]->pos,
  758. next_node[0]->ts , next_node[1]->ts);
  759. pos= av_gen_search(s, -1, dummy.ts, next_node[0]->pos, next_node[1]->pos, next_node[1]->pos,
  760. next_node[0]->ts , next_node[1]->ts, AVSEEK_FLAG_BACKWARD, &ts, nut_read_timestamp);
  761. if(!(flags & AVSEEK_FLAG_BACKWARD)){
  762. dummy.pos= pos+16;
  763. next_node[1]= &nopts_sp;
  764. av_tree_find(nut->syncpoints, &dummy, (void *) ff_nut_sp_pos_cmp,
  765. (void **) next_node);
  766. pos2= av_gen_search(s, -2, dummy.pos, next_node[0]->pos , next_node[1]->pos, next_node[1]->pos,
  767. next_node[0]->back_ptr, next_node[1]->back_ptr, flags, &ts, nut_read_timestamp);
  768. if(pos2>=0)
  769. pos= pos2;
  770. //FIXME dir but I think it does not matter
  771. }
  772. dummy.pos= pos;
  773. sp= av_tree_find(nut->syncpoints, &dummy, (void *) ff_nut_sp_pos_cmp,
  774. NULL);
  775. assert(sp);
  776. pos2= sp->back_ptr - 15;
  777. }
  778. av_log(NULL, AV_LOG_DEBUG, "SEEKTO: %"PRId64"\n", pos2);
  779. pos= find_startcode(s->pb, SYNCPOINT_STARTCODE, pos2);
  780. url_fseek(s->pb, pos, SEEK_SET);
  781. av_log(NULL, AV_LOG_DEBUG, "SP: %"PRId64"\n", pos);
  782. if(pos2 > pos || pos2 + 15 < pos){
  783. av_log(NULL, AV_LOG_ERROR, "no syncpoint at backptr pos\n");
  784. }
  785. for(i=0; i<s->nb_streams; i++)
  786. nut->stream[i].skip_until_key_frame=1;
  787. return 0;
  788. }
  789. static int nut_read_close(AVFormatContext *s)
  790. {
  791. NUTContext *nut = s->priv_data;
  792. int i;
  793. av_freep(&nut->time_base);
  794. av_freep(&nut->stream);
  795. ff_nut_free_sp(nut);
  796. for(i = 1; i < nut->header_count; i++)
  797. av_freep(&nut->header[i]);
  798. return 0;
  799. }
  800. #if CONFIG_NUT_DEMUXER
  801. AVInputFormat nut_demuxer = {
  802. "nut",
  803. NULL_IF_CONFIG_SMALL("NUT format"),
  804. sizeof(NUTContext),
  805. nut_probe,
  806. nut_read_header,
  807. nut_read_packet,
  808. nut_read_close,
  809. read_seek,
  810. .extensions = "nut",
  811. .metadata_conv = ff_nut_metadata_conv,
  812. .codec_tag = (const AVCodecTag * const []) { ff_codec_bmp_tags, ff_nut_video_tags, ff_codec_wav_tags, ff_nut_subtitle_tags, 0 },
  813. };
  814. #endif