You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

876 lines
26KB

  1. /*
  2. * "NUT" Container Format demuxer
  3. * Copyright (c) 2004-2006 Michael Niedermayer
  4. * Copyright (c) 2003 Alex Beregszaszi
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. #include "tree.h"
  23. #include "nut.h"
  24. #include "avstring.h"
  25. #undef NDEBUG
  26. #include <assert.h>
  27. static uint64_t get_v(ByteIOContext *bc){
  28. uint64_t val = 0;
  29. int tmp;
  30. do{
  31. tmp = get_byte(bc);
  32. val= (val<<7) + (tmp&127);
  33. }while(tmp&128);
  34. return val;
  35. }
  36. static int get_str(ByteIOContext *bc, char *string, unsigned int maxlen){
  37. unsigned int len= get_v(bc);
  38. if(len && maxlen)
  39. get_buffer(bc, string, FFMIN(len, maxlen));
  40. while(len > maxlen){
  41. get_byte(bc);
  42. len--;
  43. }
  44. if(maxlen)
  45. string[FFMIN(len, maxlen-1)]= 0;
  46. if(maxlen == len)
  47. return -1;
  48. else
  49. return 0;
  50. }
  51. static int64_t get_s(ByteIOContext *bc){
  52. int64_t v = get_v(bc) + 1;
  53. if (v&1) return -(v>>1);
  54. else return (v>>1);
  55. }
  56. static uint64_t get_fourcc(ByteIOContext *bc){
  57. unsigned int len= get_v(bc);
  58. if (len==2) return get_le16(bc);
  59. else if(len==4) return get_le32(bc);
  60. else return -1;
  61. }
  62. #ifdef TRACE
  63. static inline uint64_t get_v_trace(ByteIOContext *bc, char *file, char *func, int line){
  64. uint64_t v= get_v(bc);
  65. printf("get_v %5"PRId64" / %"PRIX64" in %s %s:%d\n", v, v, file, func, line);
  66. return v;
  67. }
  68. static inline int64_t get_s_trace(ByteIOContext *bc, char *file, char *func, int line){
  69. int64_t v= get_s(bc);
  70. printf("get_s %5"PRId64" / %"PRIX64" in %s %s:%d\n", v, v, file, func, line);
  71. return v;
  72. }
  73. static inline uint64_t get_vb_trace(ByteIOContext *bc, char *file, char *func, int line){
  74. uint64_t v= get_vb(bc);
  75. printf("get_vb %5"PRId64" / %"PRIX64" in %s %s:%d\n", v, v, file, func, line);
  76. return v;
  77. }
  78. #define get_v(bc) get_v_trace(bc, __FILE__, __PRETTY_FUNCTION__, __LINE__)
  79. #define get_s(bc) get_s_trace(bc, __FILE__, __PRETTY_FUNCTION__, __LINE__)
  80. #define get_vb(bc) get_vb_trace(bc, __FILE__, __PRETTY_FUNCTION__, __LINE__)
  81. #endif
  82. static int get_packetheader(NUTContext *nut, ByteIOContext *bc, int calculate_checksum)
  83. {
  84. int64_t size;
  85. // start= url_ftell(bc) - 8;
  86. init_checksum(bc, av_crc04C11DB7_update, 0);
  87. size= get_v(bc);
  88. if(size > 4096)
  89. get_be32(bc);
  90. if(get_checksum(bc) && size > 4096)
  91. return -1;
  92. init_checksum(bc, calculate_checksum ? av_crc04C11DB7_update : NULL, 0);
  93. return size;
  94. }
  95. static uint64_t find_any_startcode(ByteIOContext *bc, int64_t pos){
  96. uint64_t state=0;
  97. if(pos >= 0)
  98. url_fseek(bc, pos, SEEK_SET); //note, this may fail if the stream is not seekable, but that should not matter, as in this case we simply start where we are currently
  99. while(!url_feof(bc)){
  100. state= (state<<8) | get_byte(bc);
  101. if((state>>56) != 'N')
  102. continue;
  103. switch(state){
  104. case MAIN_STARTCODE:
  105. case STREAM_STARTCODE:
  106. case SYNCPOINT_STARTCODE:
  107. case INFO_STARTCODE:
  108. case INDEX_STARTCODE:
  109. return state;
  110. }
  111. }
  112. return 0;
  113. }
  114. /**
  115. * find the given startcode.
  116. * @param code the startcode
  117. * @param pos the start position of the search, or -1 if the current position
  118. * @returns the position of the startcode or -1 if not found
  119. */
  120. static int64_t find_startcode(ByteIOContext *bc, uint64_t code, int64_t pos){
  121. for(;;){
  122. uint64_t startcode= find_any_startcode(bc, pos);
  123. if(startcode == code)
  124. return url_ftell(bc) - 8;
  125. else if(startcode == 0)
  126. return -1;
  127. pos=-1;
  128. }
  129. }
  130. static int64_t lsb2full(StreamContext *stream, int64_t lsb){
  131. int64_t mask = (1<<stream->msb_pts_shift)-1;
  132. int64_t delta= stream->last_pts - mask/2;
  133. return ((lsb - delta)&mask) + delta;
  134. }
  135. static int nut_probe(AVProbeData *p){
  136. int i;
  137. uint64_t code= 0;
  138. for (i = 0; i < p->buf_size; i++) {
  139. code = (code << 8) | p->buf[i];
  140. if (code == MAIN_STARTCODE)
  141. return AVPROBE_SCORE_MAX;
  142. }
  143. return 0;
  144. }
  145. #define GET_V(dst, check) \
  146. tmp= get_v(bc);\
  147. if(!(check)){\
  148. av_log(s, AV_LOG_ERROR, "Error " #dst " is (%"PRId64")\n", tmp);\
  149. return -1;\
  150. }\
  151. dst= tmp;
  152. static int skip_reserved(ByteIOContext *bc, int64_t pos){
  153. pos -= url_ftell(bc);
  154. if(pos<0){
  155. url_fseek(bc, pos, SEEK_CUR);
  156. return -1;
  157. }else{
  158. while(pos--)
  159. get_byte(bc);
  160. return 0;
  161. }
  162. }
  163. static int decode_main_header(NUTContext *nut){
  164. AVFormatContext *s= nut->avf;
  165. ByteIOContext *bc = &s->pb;
  166. uint64_t tmp, end;
  167. unsigned int stream_count;
  168. int i, j, tmp_stream, tmp_mul, tmp_pts, tmp_size, count, tmp_res;
  169. end= get_packetheader(nut, bc, 1);
  170. end += url_ftell(bc);
  171. GET_V(tmp , tmp >=2 && tmp <= 3)
  172. GET_V(stream_count , tmp > 0 && tmp <=MAX_STREAMS)
  173. nut->max_distance = get_v(bc);
  174. if(nut->max_distance > 65536){
  175. av_log(s, AV_LOG_DEBUG, "max_distance %d\n", nut->max_distance);
  176. nut->max_distance= 65536;
  177. }
  178. GET_V(nut->time_base_count, tmp>0 && tmp<INT_MAX / sizeof(AVRational))
  179. nut->time_base= av_malloc(nut->time_base_count * sizeof(AVRational));
  180. for(i=0; i<nut->time_base_count; i++){
  181. GET_V(nut->time_base[i].num, tmp>0 && tmp<(1ULL<<31))
  182. GET_V(nut->time_base[i].den, tmp>0 && tmp<(1ULL<<31))
  183. if(ff_gcd(nut->time_base[i].num, nut->time_base[i].den) != 1){
  184. av_log(s, AV_LOG_ERROR, "time base invalid\n");
  185. return -1;
  186. }
  187. }
  188. tmp_pts=0;
  189. tmp_mul=1;
  190. tmp_stream=0;
  191. for(i=0; i<256;){
  192. int tmp_flags = get_v(bc);
  193. int tmp_fields= get_v(bc);
  194. if(tmp_fields>0) tmp_pts = get_s(bc);
  195. if(tmp_fields>1) tmp_mul = get_v(bc);
  196. if(tmp_fields>2) tmp_stream= get_v(bc);
  197. if(tmp_fields>3) tmp_size = get_v(bc);
  198. else tmp_size = 0;
  199. if(tmp_fields>4) tmp_res = get_v(bc);
  200. else tmp_res = 0;
  201. if(tmp_fields>5) count = get_v(bc);
  202. else count = tmp_mul - tmp_size;
  203. while(tmp_fields-- > 6)
  204. get_v(bc);
  205. if(count == 0 || i+count > 256){
  206. av_log(s, AV_LOG_ERROR, "illegal count %d at %d\n", count, i);
  207. return -1;
  208. }
  209. if(tmp_stream >= stream_count){
  210. av_log(s, AV_LOG_ERROR, "illegal stream number\n");
  211. return -1;
  212. }
  213. for(j=0; j<count; j++,i++){
  214. if (i == 'N') {
  215. nut->frame_code[i].flags= FLAG_INVALID;
  216. j--;
  217. continue;
  218. }
  219. nut->frame_code[i].flags = tmp_flags ;
  220. nut->frame_code[i].pts_delta = tmp_pts ;
  221. nut->frame_code[i].stream_id = tmp_stream;
  222. nut->frame_code[i].size_mul = tmp_mul ;
  223. nut->frame_code[i].size_lsb = tmp_size+j;
  224. nut->frame_code[i].reserved_count = tmp_res ;
  225. }
  226. }
  227. assert(nut->frame_code['N'].flags == FLAG_INVALID);
  228. if(skip_reserved(bc, end) || get_checksum(bc)){
  229. av_log(s, AV_LOG_ERROR, "Main header checksum mismatch\n");
  230. return -1;
  231. }
  232. nut->stream = av_mallocz(sizeof(StreamContext)*stream_count);
  233. for(i=0; i<stream_count; i++){
  234. av_new_stream(s, i);
  235. }
  236. return 0;
  237. }
  238. static int decode_stream_header(NUTContext *nut){
  239. AVFormatContext *s= nut->avf;
  240. ByteIOContext *bc = &s->pb;
  241. StreamContext *stc;
  242. int class, stream_id;
  243. uint64_t tmp, end;
  244. AVStream *st;
  245. end= get_packetheader(nut, bc, 1);
  246. end += url_ftell(bc);
  247. GET_V(stream_id, tmp < s->nb_streams && !nut->stream[tmp].time_base);
  248. stc= &nut->stream[stream_id];
  249. st = s->streams[stream_id];
  250. if (!st)
  251. return AVERROR(ENOMEM);
  252. class = get_v(bc);
  253. tmp = get_fourcc(bc);
  254. st->codec->codec_tag= tmp;
  255. switch(class)
  256. {
  257. case 0:
  258. st->codec->codec_type = CODEC_TYPE_VIDEO;
  259. st->codec->codec_id = codec_get_id(codec_bmp_tags, tmp);
  260. if (st->codec->codec_id == CODEC_ID_NONE)
  261. av_log(s, AV_LOG_ERROR, "Unknown codec?!\n");
  262. break;
  263. case 1:
  264. st->codec->codec_type = CODEC_TYPE_AUDIO;
  265. st->codec->codec_id = codec_get_id(codec_wav_tags, tmp);
  266. if (st->codec->codec_id == CODEC_ID_NONE)
  267. av_log(s, AV_LOG_ERROR, "Unknown codec?!\n");
  268. break;
  269. case 2:
  270. // st->codec->codec_type = CODEC_TYPE_TEXT;
  271. // break;
  272. case 3:
  273. st->codec->codec_type = CODEC_TYPE_DATA;
  274. break;
  275. default:
  276. av_log(s, AV_LOG_ERROR, "Unknown stream class (%d)\n", class);
  277. return -1;
  278. }
  279. GET_V(stc->time_base_id , tmp < nut->time_base_count);
  280. GET_V(stc->msb_pts_shift , tmp < 16);
  281. stc->max_pts_distance= get_v(bc);
  282. GET_V(stc->decode_delay , tmp < 1000); //sanity limit, raise this if moors law is true
  283. st->codec->has_b_frames= stc->decode_delay;
  284. get_v(bc); //stream flags
  285. GET_V(st->codec->extradata_size, tmp < (1<<30));
  286. if(st->codec->extradata_size){
  287. st->codec->extradata= av_mallocz(st->codec->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE);
  288. get_buffer(bc, st->codec->extradata, st->codec->extradata_size);
  289. }
  290. if (st->codec->codec_type == CODEC_TYPE_VIDEO){
  291. GET_V(st->codec->width , tmp > 0)
  292. GET_V(st->codec->height, tmp > 0)
  293. st->codec->sample_aspect_ratio.num= get_v(bc);
  294. st->codec->sample_aspect_ratio.den= get_v(bc);
  295. if((!st->codec->sample_aspect_ratio.num) != (!st->codec->sample_aspect_ratio.den)){
  296. av_log(s, AV_LOG_ERROR, "invalid aspect ratio\n");
  297. return -1;
  298. }
  299. get_v(bc); /* csp type */
  300. }else if (st->codec->codec_type == CODEC_TYPE_AUDIO){
  301. GET_V(st->codec->sample_rate , tmp > 0)
  302. tmp= get_v(bc); // samplerate_den
  303. if(tmp > st->codec->sample_rate){
  304. av_log(s, AV_LOG_ERROR, "bleh, libnut muxed this ;)\n");
  305. st->codec->sample_rate= tmp;
  306. }
  307. GET_V(st->codec->channels, tmp > 0)
  308. }
  309. if(skip_reserved(bc, end) || get_checksum(bc)){
  310. av_log(s, AV_LOG_ERROR, "Stream header %d checksum mismatch\n", stream_id);
  311. return -1;
  312. }
  313. stc->time_base= &nut->time_base[stc->time_base_id];
  314. av_set_pts_info(s->streams[stream_id], 63, stc->time_base->num, stc->time_base->den);
  315. return 0;
  316. }
  317. static int decode_info_header(NUTContext *nut){
  318. AVFormatContext *s= nut->avf;
  319. ByteIOContext *bc = &s->pb;
  320. uint64_t tmp;
  321. unsigned int stream_id_plus1, chapter_start, chapter_len, count;
  322. int chapter_id, i;
  323. int64_t value, end;
  324. char name[256], str_value[1024], type_str[256], *type= type_str;
  325. end= get_packetheader(nut, bc, 1);
  326. end += url_ftell(bc);
  327. GET_V(stream_id_plus1, tmp <= s->nb_streams)
  328. chapter_id = get_s(bc);
  329. chapter_start= get_v(bc);
  330. chapter_len = get_v(bc);
  331. count = get_v(bc);
  332. for(i=0; i<count; i++){
  333. get_str(bc, name, sizeof(name));
  334. value= get_s(bc);
  335. if(value == -1){
  336. type= "UTF-8";
  337. get_str(bc, str_value, sizeof(str_value));
  338. }else if(value == -2){
  339. get_str(bc, type, sizeof(type));
  340. get_str(bc, str_value, sizeof(str_value));
  341. }else if(value == -3){
  342. type= "s";
  343. value= get_s(bc);
  344. }else if(value == -4){
  345. type= "t";
  346. value= get_v(bc);
  347. }else if(value < -4){
  348. type= "r";
  349. get_s(bc);
  350. }else{
  351. type= "v";
  352. }
  353. if(chapter_id==0 && !strcmp(type, "UTF-8")){
  354. if (!strcmp(name, "Author"))
  355. av_strlcpy(s->author , str_value, sizeof(s->author));
  356. else if(!strcmp(name, "Title"))
  357. av_strlcpy(s->title , str_value, sizeof(s->title));
  358. else if(!strcmp(name, "Copyright"))
  359. av_strlcpy(s->copyright, str_value, sizeof(s->copyright));
  360. else if(!strcmp(name, "Description"))
  361. av_strlcpy(s->comment , str_value, sizeof(s->comment));
  362. }
  363. }
  364. if(skip_reserved(bc, end) || get_checksum(bc)){
  365. av_log(s, AV_LOG_ERROR, "Info header checksum mismatch\n");
  366. return -1;
  367. }
  368. return 0;
  369. }
  370. static int sp_pos_cmp(syncpoint_t *a, syncpoint_t *b){
  371. return (a->pos - b->pos>>32) - (b->pos - a->pos>>32);
  372. }
  373. static int sp_pts_cmp(syncpoint_t *a, syncpoint_t *b){
  374. return (a->ts - b->ts>>32) - (b->ts - a->ts>>32);
  375. }
  376. static void add_sp(NUTContext *nut, int64_t pos, int64_t back_ptr, int64_t ts){
  377. syncpoint_t *sp2, *sp= av_mallocz(sizeof(syncpoint_t));
  378. sp->pos= pos;
  379. sp->back_ptr= back_ptr;
  380. sp->ts= ts;
  381. sp2= av_tree_insert(&nut->syncpoints, sp, sp_pos_cmp);
  382. if(sp2 && sp2 != sp)
  383. av_free(sp);
  384. }
  385. static int decode_syncpoint(NUTContext *nut, int64_t *ts, int64_t *back_ptr){
  386. AVFormatContext *s= nut->avf;
  387. ByteIOContext *bc = &s->pb;
  388. int64_t end, tmp;
  389. AVRational time_base;
  390. nut->last_syncpoint_pos= url_ftell(bc)-8;
  391. end= get_packetheader(nut, bc, 1);
  392. end += url_ftell(bc);
  393. tmp= get_v(bc);
  394. *back_ptr= nut->last_syncpoint_pos - 16*get_v(bc);
  395. if(*back_ptr < 0)
  396. return -1;
  397. ff_nut_reset_ts(nut, nut->time_base[tmp % nut->time_base_count], tmp);
  398. if(skip_reserved(bc, end) || get_checksum(bc)){
  399. av_log(s, AV_LOG_ERROR, "sync point checksum mismatch\n");
  400. return -1;
  401. }
  402. *ts= tmp / s->nb_streams * av_q2d(nut->time_base[tmp % s->nb_streams])*AV_TIME_BASE;
  403. add_sp(nut, nut->last_syncpoint_pos, *back_ptr, *ts);
  404. return 0;
  405. }
  406. static int find_and_decode_index(NUTContext *nut){
  407. AVFormatContext *s= nut->avf;
  408. ByteIOContext *bc = &s->pb;
  409. uint64_t tmp, end;
  410. int i, j, syncpoint_count;
  411. int64_t filesize= url_fsize(bc);
  412. int64_t *syncpoints;
  413. int8_t *has_keyframe;
  414. url_fseek(bc, filesize-12, SEEK_SET);
  415. url_fseek(bc, filesize-get_be64(bc), SEEK_SET);
  416. if(get_be64(bc) != INDEX_STARTCODE){
  417. av_log(s, AV_LOG_ERROR, "no index at the end\n");
  418. return -1;
  419. }
  420. end= get_packetheader(nut, bc, 1);
  421. end += url_ftell(bc);
  422. get_v(bc); //max_pts
  423. GET_V(syncpoint_count, tmp < INT_MAX/8 && tmp > 0)
  424. syncpoints= av_malloc(sizeof(int64_t)*syncpoint_count);
  425. has_keyframe= av_malloc(sizeof(int8_t)*(syncpoint_count+1));
  426. for(i=0; i<syncpoint_count; i++){
  427. GET_V(syncpoints[i], tmp>0)
  428. if(i)
  429. syncpoints[i] += syncpoints[i-1];
  430. }
  431. for(i=0; i<s->nb_streams; i++){
  432. int64_t last_pts= -1;
  433. for(j=0; j<syncpoint_count;){
  434. uint64_t x= get_v(bc);
  435. int type= x&1;
  436. int n= j;
  437. x>>=1;
  438. if(type){
  439. int flag= x&1;
  440. x>>=1;
  441. if(n+x >= syncpoint_count + 1){
  442. av_log(s, AV_LOG_ERROR, "index overflow A\n");
  443. return -1;
  444. }
  445. while(x--)
  446. has_keyframe[n++]= flag;
  447. has_keyframe[n++]= !flag;
  448. }else{
  449. while(x != 1){
  450. if(n>=syncpoint_count + 1){
  451. av_log(s, AV_LOG_ERROR, "index overflow B\n");
  452. return -1;
  453. }
  454. has_keyframe[n++]= x&1;
  455. x>>=1;
  456. }
  457. }
  458. if(has_keyframe[0]){
  459. av_log(s, AV_LOG_ERROR, "keyframe before first syncpoint in index\n");
  460. return -1;
  461. }
  462. assert(n<=syncpoint_count+1);
  463. for(; j<n; j++){
  464. if(has_keyframe[j]){
  465. uint64_t B, A= get_v(bc);
  466. if(!A){
  467. A= get_v(bc);
  468. B= get_v(bc);
  469. //eor_pts[j][i] = last_pts + A + B
  470. }else
  471. B= 0;
  472. av_add_index_entry(
  473. s->streams[i],
  474. 16*syncpoints[j-1],
  475. last_pts + A,
  476. 0,
  477. 0,
  478. AVINDEX_KEYFRAME);
  479. last_pts += A + B;
  480. }
  481. }
  482. }
  483. }
  484. if(skip_reserved(bc, end) || get_checksum(bc)){
  485. av_log(s, AV_LOG_ERROR, "Index checksum mismatch\n");
  486. return -1;
  487. }
  488. return 0;
  489. }
  490. static int nut_read_header(AVFormatContext *s, AVFormatParameters *ap)
  491. {
  492. NUTContext *nut = s->priv_data;
  493. ByteIOContext *bc = &s->pb;
  494. int64_t pos;
  495. int inited_stream_count;
  496. nut->avf= s;
  497. /* main header */
  498. pos=0;
  499. do{
  500. pos= find_startcode(bc, MAIN_STARTCODE, pos)+1;
  501. if (pos<0+1){
  502. av_log(s, AV_LOG_ERROR, "no main startcode found\n");
  503. return -1;
  504. }
  505. }while(decode_main_header(nut) < 0);
  506. /* stream headers */
  507. pos=0;
  508. for(inited_stream_count=0; inited_stream_count < s->nb_streams;){
  509. pos= find_startcode(bc, STREAM_STARTCODE, pos)+1;
  510. if (pos<0+1){
  511. av_log(s, AV_LOG_ERROR, "not all stream headers found\n");
  512. return -1;
  513. }
  514. if(decode_stream_header(nut) >= 0)
  515. inited_stream_count++;
  516. }
  517. /* info headers */
  518. pos=0;
  519. for(;;){
  520. uint64_t startcode= find_any_startcode(bc, pos);
  521. pos= url_ftell(bc);
  522. if(startcode==0){
  523. av_log(s, AV_LOG_ERROR, "EOF before video frames\n");
  524. return -1;
  525. }else if(startcode == SYNCPOINT_STARTCODE){
  526. nut->next_startcode= startcode;
  527. break;
  528. }else if(startcode != INFO_STARTCODE){
  529. continue;
  530. }
  531. decode_info_header(nut);
  532. }
  533. s->data_offset= pos-8;
  534. if(!url_is_streamed(bc)){
  535. int64_t orig_pos= url_ftell(bc);
  536. find_and_decode_index(nut);
  537. url_fseek(bc, orig_pos, SEEK_SET);
  538. }
  539. assert(nut->next_startcode == SYNCPOINT_STARTCODE);
  540. return 0;
  541. }
  542. static int decode_frame_header(NUTContext *nut, int64_t *pts, int *stream_id, int frame_code){
  543. AVFormatContext *s= nut->avf;
  544. ByteIOContext *bc = &s->pb;
  545. StreamContext *stc;
  546. int size, flags, size_mul, pts_delta, i, reserved_count;
  547. uint64_t tmp;
  548. if(url_ftell(bc) > nut->last_syncpoint_pos + nut->max_distance){
  549. av_log(s, AV_LOG_ERROR, "last frame must have been damaged %"PRId64" > %"PRId64" + %d\n", url_ftell(bc), nut->last_syncpoint_pos, nut->max_distance);
  550. return -1;
  551. }
  552. flags = nut->frame_code[frame_code].flags;
  553. size_mul = nut->frame_code[frame_code].size_mul;
  554. size = nut->frame_code[frame_code].size_lsb;
  555. *stream_id = nut->frame_code[frame_code].stream_id;
  556. pts_delta = nut->frame_code[frame_code].pts_delta;
  557. reserved_count = nut->frame_code[frame_code].reserved_count;
  558. if(flags & FLAG_INVALID)
  559. return -1;
  560. if(flags & FLAG_CODED)
  561. flags ^= get_v(bc);
  562. if(flags & FLAG_STREAM_ID){
  563. GET_V(*stream_id, tmp < s->nb_streams)
  564. }
  565. stc= &nut->stream[*stream_id];
  566. if(flags&FLAG_CODED_PTS){
  567. int coded_pts= get_v(bc);
  568. //FIXME check last_pts validity?
  569. if(coded_pts < (1<<stc->msb_pts_shift)){
  570. *pts=lsb2full(stc, coded_pts);
  571. }else
  572. *pts=coded_pts - (1<<stc->msb_pts_shift);
  573. }else
  574. *pts= stc->last_pts + pts_delta;
  575. if(flags&FLAG_SIZE_MSB){
  576. size += size_mul*get_v(bc);
  577. }
  578. if(flags&FLAG_RESERVED)
  579. reserved_count= get_v(bc);
  580. for(i=0; i<reserved_count; i++)
  581. get_v(bc);
  582. if(flags&FLAG_CHECKSUM){
  583. get_be32(bc); //FIXME check this
  584. }else if(size > 2*nut->max_distance || FFABS(stc->last_pts - *pts) > stc->max_pts_distance){
  585. av_log(s, AV_LOG_ERROR, "frame size > 2max_distance and no checksum\n");
  586. return -1;
  587. }
  588. stc->last_pts= *pts;
  589. stc->last_flags= flags;
  590. return size;
  591. }
  592. static int decode_frame(NUTContext *nut, AVPacket *pkt, int frame_code){
  593. AVFormatContext *s= nut->avf;
  594. ByteIOContext *bc = &s->pb;
  595. int size, stream_id, discard;
  596. int64_t pts, last_IP_pts;
  597. StreamContext *stc;
  598. size= decode_frame_header(nut, &pts, &stream_id, frame_code);
  599. if(size < 0)
  600. return -1;
  601. stc= &nut->stream[stream_id];
  602. if (stc->last_flags & FLAG_KEY)
  603. stc->skip_until_key_frame=0;
  604. discard= s->streams[ stream_id ]->discard;
  605. last_IP_pts= s->streams[ stream_id ]->last_IP_pts;
  606. if( (discard >= AVDISCARD_NONKEY && !(stc->last_flags & FLAG_KEY))
  607. ||(discard >= AVDISCARD_BIDIR && last_IP_pts != AV_NOPTS_VALUE && last_IP_pts > pts)
  608. || discard >= AVDISCARD_ALL
  609. || stc->skip_until_key_frame){
  610. url_fskip(bc, size);
  611. return 1;
  612. }
  613. av_get_packet(bc, pkt, size);
  614. pkt->stream_index = stream_id;
  615. if (stc->last_flags & FLAG_KEY)
  616. pkt->flags |= PKT_FLAG_KEY;
  617. pkt->pts = pts;
  618. return 0;
  619. }
  620. static int nut_read_packet(AVFormatContext *s, AVPacket *pkt)
  621. {
  622. NUTContext *nut = s->priv_data;
  623. ByteIOContext *bc = &s->pb;
  624. int i, frame_code=0, ret, skip;
  625. int64_t ts, back_ptr;
  626. for(;;){
  627. int64_t pos= url_ftell(bc);
  628. uint64_t tmp= nut->next_startcode;
  629. nut->next_startcode=0;
  630. if(tmp){
  631. pos-=8;
  632. }else{
  633. frame_code = get_byte(bc);
  634. if(url_feof(bc))
  635. return -1;
  636. if(frame_code == 'N'){
  637. tmp= frame_code;
  638. for(i=1; i<8; i++)
  639. tmp = (tmp<<8) + get_byte(bc);
  640. }
  641. }
  642. switch(tmp){
  643. case MAIN_STARTCODE:
  644. case STREAM_STARTCODE:
  645. case INDEX_STARTCODE:
  646. skip= get_packetheader(nut, bc, 0);
  647. url_fseek(bc, skip, SEEK_CUR);
  648. break;
  649. case INFO_STARTCODE:
  650. if(decode_info_header(nut)<0)
  651. goto resync;
  652. break;
  653. case SYNCPOINT_STARTCODE:
  654. if(decode_syncpoint(nut, &ts, &back_ptr)<0)
  655. goto resync;
  656. frame_code = get_byte(bc);
  657. case 0:
  658. ret= decode_frame(nut, pkt, frame_code);
  659. if(ret==0)
  660. return 0;
  661. else if(ret==1) //ok but discard packet
  662. break;
  663. default:
  664. resync:
  665. av_log(s, AV_LOG_DEBUG, "syncing from %"PRId64"\n", pos);
  666. tmp= find_any_startcode(bc, nut->last_syncpoint_pos+1);
  667. if(tmp==0)
  668. return -1;
  669. av_log(s, AV_LOG_DEBUG, "sync\n");
  670. nut->next_startcode= tmp;
  671. }
  672. }
  673. }
  674. static int64_t nut_read_timestamp(AVFormatContext *s, int stream_index, int64_t *pos_arg, int64_t pos_limit){
  675. NUTContext *nut = s->priv_data;
  676. ByteIOContext *bc = &s->pb;
  677. int64_t pos, pts, back_ptr;
  678. av_log(s, AV_LOG_DEBUG, "read_timestamp(X,%d,%"PRId64",%"PRId64")\n", stream_index, *pos_arg, pos_limit);
  679. pos= *pos_arg;
  680. resync:
  681. do{
  682. pos= find_startcode(bc, SYNCPOINT_STARTCODE, pos)+1;
  683. if(pos < 1){
  684. assert(nut->next_startcode == 0);
  685. av_log(s, AV_LOG_ERROR, "read_timestamp failed\n");
  686. return AV_NOPTS_VALUE;
  687. }
  688. }while(decode_syncpoint(nut, &pts, &back_ptr) < 0);
  689. *pos_arg = pos-1;
  690. assert(nut->last_syncpoint_pos == *pos_arg);
  691. av_log(s, AV_LOG_DEBUG, "return %"PRId64" %"PRId64"\n", pts,back_ptr );
  692. if (stream_index == -1) return pts;
  693. else if(stream_index == -2) return back_ptr;
  694. assert(0);
  695. }
  696. static int read_seek(AVFormatContext *s, int stream_index, int64_t pts, int flags){
  697. NUTContext *nut = s->priv_data;
  698. AVStream *st= s->streams[stream_index];
  699. syncpoint_t dummy={.ts= pts*av_q2d(st->time_base)*AV_TIME_BASE};
  700. syncpoint_t nopts_sp= {.ts= AV_NOPTS_VALUE, .back_ptr= AV_NOPTS_VALUE};
  701. syncpoint_t *sp, *next_node[2]= {&nopts_sp, &nopts_sp};
  702. int64_t pos, pos2, ts;
  703. int i;
  704. if(st->index_entries){
  705. int index= av_index_search_timestamp(st, pts, flags);
  706. if(index<0)
  707. return -1;
  708. pos2= st->index_entries[index].pos;
  709. ts = st->index_entries[index].timestamp;
  710. }else{
  711. av_tree_find(nut->syncpoints, &dummy, sp_pts_cmp, next_node);
  712. av_log(s, AV_LOG_DEBUG, "%"PRIu64"-%"PRIu64" %"PRId64"-%"PRId64"\n", next_node[0]->pos, next_node[1]->pos,
  713. next_node[0]->ts , next_node[1]->ts);
  714. pos= av_gen_search(s, -1, dummy.ts, next_node[0]->pos, next_node[1]->pos, next_node[1]->pos,
  715. next_node[0]->ts , next_node[1]->ts, AVSEEK_FLAG_BACKWARD, &ts, nut_read_timestamp);
  716. if(!(flags & AVSEEK_FLAG_BACKWARD)){
  717. dummy.pos= pos+16;
  718. next_node[1]= &nopts_sp;
  719. av_tree_find(nut->syncpoints, &dummy, sp_pos_cmp, next_node);
  720. pos2= av_gen_search(s, -2, dummy.pos, next_node[0]->pos , next_node[1]->pos, next_node[1]->pos,
  721. next_node[0]->back_ptr, next_node[1]->back_ptr, flags, &ts, nut_read_timestamp);
  722. if(pos2>=0)
  723. pos= pos2;
  724. //FIXME dir but i think it does not matter
  725. }
  726. dummy.pos= pos;
  727. sp= av_tree_find(nut->syncpoints, &dummy, sp_pos_cmp, NULL);
  728. assert(sp);
  729. pos2= sp->back_ptr - 15;
  730. }
  731. av_log(NULL, AV_LOG_DEBUG, "SEEKTO: %"PRId64"\n", pos2);
  732. pos= find_startcode(&s->pb, SYNCPOINT_STARTCODE, pos2);
  733. url_fseek(&s->pb, pos, SEEK_SET);
  734. av_log(NULL, AV_LOG_DEBUG, "SP: %"PRId64"\n", pos);
  735. if(pos2 > pos || pos2 + 15 < pos){
  736. av_log(NULL, AV_LOG_ERROR, "no syncpoint at backptr pos\n");
  737. }
  738. for(i=0; i<s->nb_streams; i++)
  739. nut->stream[i].skip_until_key_frame=1;
  740. return 0;
  741. }
  742. static int nut_read_close(AVFormatContext *s)
  743. {
  744. NUTContext *nut = s->priv_data;
  745. av_freep(&nut->time_base);
  746. av_freep(&nut->stream);
  747. return 0;
  748. }
  749. #ifdef CONFIG_NUT_DEMUXER
  750. AVInputFormat nut_demuxer = {
  751. "nut",
  752. "nut format",
  753. sizeof(NUTContext),
  754. nut_probe,
  755. nut_read_header,
  756. nut_read_packet,
  757. nut_read_close,
  758. read_seek,
  759. .extensions = "nut",
  760. };
  761. #endif