You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

890 lines
27KB

  1. /*
  2. * "NUT" Container Format demuxer
  3. * Copyright (c) 2004-2006 Michael Niedermayer
  4. * Copyright (c) 2003 Alex Beregszaszi
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. *
  22. */
  23. #include "tree.h"
  24. #include "nut.h"
  25. #undef NDEBUG
  26. #include <assert.h>
  27. static uint64_t get_v(ByteIOContext *bc){
  28. uint64_t val = 0;
  29. for(;;)
  30. {
  31. int tmp = get_byte(bc);
  32. if (tmp&0x80)
  33. val= (val<<7) + tmp - 0x80;
  34. else{
  35. return (val<<7) + tmp;
  36. }
  37. }
  38. return -1;
  39. }
  40. static int get_str(ByteIOContext *bc, char *string, unsigned int maxlen){
  41. unsigned int len= get_v(bc);
  42. if(len && maxlen)
  43. get_buffer(bc, string, FFMIN(len, maxlen));
  44. while(len > maxlen){
  45. get_byte(bc);
  46. len--;
  47. }
  48. if(maxlen)
  49. string[FFMIN(len, maxlen-1)]= 0;
  50. if(maxlen == len)
  51. return -1;
  52. else
  53. return 0;
  54. }
  55. static int64_t get_s(ByteIOContext *bc){
  56. int64_t v = get_v(bc) + 1;
  57. if (v&1) return -(v>>1);
  58. else return (v>>1);
  59. }
  60. static uint64_t get_fourcc(ByteIOContext *bc){
  61. unsigned int len= get_v(bc);
  62. if (len==2) return get_le16(bc);
  63. else if(len==4) return get_le32(bc);
  64. else return -1;
  65. }
  66. #ifdef TRACE
  67. static inline uint64_t get_v_trace(ByteIOContext *bc, char *file, char *func, int line){
  68. uint64_t v= get_v(bc);
  69. printf("get_v %5"PRId64" / %"PRIX64" in %s %s:%d\n", v, v, file, func, line);
  70. return v;
  71. }
  72. static inline int64_t get_s_trace(ByteIOContext *bc, char *file, char *func, int line){
  73. int64_t v= get_s(bc);
  74. printf("get_s %5"PRId64" / %"PRIX64" in %s %s:%d\n", v, v, file, func, line);
  75. return v;
  76. }
  77. static inline uint64_t get_vb_trace(ByteIOContext *bc, char *file, char *func, int line){
  78. uint64_t v= get_vb(bc);
  79. printf("get_vb %5"PRId64" / %"PRIX64" in %s %s:%d\n", v, v, file, func, line);
  80. return v;
  81. }
  82. #define get_v(bc) get_v_trace(bc, __FILE__, __PRETTY_FUNCTION__, __LINE__)
  83. #define get_s(bc) get_s_trace(bc, __FILE__, __PRETTY_FUNCTION__, __LINE__)
  84. #define get_vb(bc) get_vb_trace(bc, __FILE__, __PRETTY_FUNCTION__, __LINE__)
  85. #endif
  86. static int get_packetheader(NUTContext *nut, ByteIOContext *bc, int calculate_checksum)
  87. {
  88. int64_t start, size;
  89. // start= url_ftell(bc) - 8;
  90. size= get_v(bc);
  91. init_checksum(bc, calculate_checksum ? av_crc04C11DB7_update : NULL, 0);
  92. // nut->packet_start[2] = start;
  93. // nut->written_packet_size= size;
  94. return size;
  95. }
  96. static uint64_t find_any_startcode(ByteIOContext *bc, int64_t pos){
  97. uint64_t state=0;
  98. if(pos >= 0)
  99. url_fseek(bc, pos, SEEK_SET); //note, this may fail if the stream isnt seekable, but that shouldnt matter, as in this case we simply start where we are currently
  100. while(!url_feof(bc)){
  101. state= (state<<8) | get_byte(bc);
  102. if((state>>56) != 'N')
  103. continue;
  104. switch(state){
  105. case MAIN_STARTCODE:
  106. case STREAM_STARTCODE:
  107. case SYNCPOINT_STARTCODE:
  108. case INFO_STARTCODE:
  109. case INDEX_STARTCODE:
  110. return state;
  111. }
  112. }
  113. return 0;
  114. }
  115. /**
  116. * find the given startcode.
  117. * @param code the startcode
  118. * @param pos the start position of the search, or -1 if the current position
  119. * @returns the position of the startcode or -1 if not found
  120. */
  121. static int64_t find_startcode(ByteIOContext *bc, uint64_t code, int64_t pos){
  122. for(;;){
  123. uint64_t startcode= find_any_startcode(bc, pos);
  124. if(startcode == code)
  125. return url_ftell(bc) - 8;
  126. else if(startcode == 0)
  127. return -1;
  128. pos=-1;
  129. }
  130. }
  131. static int64_t lsb2full(StreamContext *stream, int64_t lsb){
  132. int64_t mask = (1<<stream->msb_pts_shift)-1;
  133. int64_t delta= stream->last_pts - mask/2;
  134. return ((lsb - delta)&mask) + delta;
  135. }
  136. static int nut_probe(AVProbeData *p){
  137. int i;
  138. uint64_t code= 0;
  139. for (i = 0; i < p->buf_size; i++) {
  140. code = (code << 8) | p->buf[i];
  141. if (code == MAIN_STARTCODE)
  142. return AVPROBE_SCORE_MAX;
  143. }
  144. return 0;
  145. }
  146. #define GET_V(dst, check) \
  147. tmp= get_v(bc);\
  148. if(!(check)){\
  149. av_log(s, AV_LOG_ERROR, "Error " #dst " is (%"PRId64")\n", tmp);\
  150. return -1;\
  151. }\
  152. dst= tmp;
  153. static int skip_reserved(ByteIOContext *bc, int64_t pos){
  154. pos -= url_ftell(bc);
  155. if(pos<0){
  156. url_fseek(bc, pos, SEEK_CUR);
  157. return -1;
  158. }else{
  159. while(pos--)
  160. get_byte(bc);
  161. return 0;
  162. }
  163. }
  164. static int decode_main_header(NUTContext *nut){
  165. AVFormatContext *s= nut->avf;
  166. ByteIOContext *bc = &s->pb;
  167. uint64_t tmp, end;
  168. unsigned int stream_count;
  169. int i, j, tmp_stream, tmp_mul, tmp_pts, tmp_size, count, tmp_res;
  170. end= get_packetheader(nut, bc, 1);
  171. end += url_ftell(bc);
  172. GET_V(tmp , tmp >=2 && tmp <= 3)
  173. GET_V(stream_count , tmp > 0 && tmp <=MAX_STREAMS)
  174. nut->max_distance = get_v(bc);
  175. if(nut->max_distance > 65536){
  176. av_log(s, AV_LOG_DEBUG, "max_distance %d\n", nut->max_distance);
  177. nut->max_distance= 65536;
  178. }
  179. GET_V(nut->time_base_count, tmp>0 && tmp<INT_MAX / sizeof(AVRational))
  180. nut->time_base= av_malloc(nut->time_base_count * sizeof(AVRational));
  181. for(i=0; i<nut->time_base_count; i++){
  182. GET_V(nut->time_base[i].num, tmp>0 && tmp<(1ULL<<31))
  183. GET_V(nut->time_base[i].den, tmp>0 && tmp<(1ULL<<31))
  184. if(ff_gcd(nut->time_base[i].num, nut->time_base[i].den) != 1){
  185. av_log(s, AV_LOG_ERROR, "time base invalid\n");
  186. return -1;
  187. }
  188. }
  189. tmp_pts=0;
  190. tmp_mul=1;
  191. tmp_stream=0;
  192. for(i=0; i<256;){
  193. int tmp_flags = get_v(bc);
  194. int tmp_fields= get_v(bc);
  195. if(tmp_fields>0) tmp_pts = get_s(bc);
  196. if(tmp_fields>1) tmp_mul = get_v(bc);
  197. if(tmp_fields>2) tmp_stream= get_v(bc);
  198. if(tmp_fields>3) tmp_size = get_v(bc);
  199. else tmp_size = 0;
  200. if(tmp_fields>4) tmp_res = get_v(bc);
  201. else tmp_res = 0;
  202. if(tmp_fields>5) count = get_v(bc);
  203. else count = tmp_mul - tmp_size;
  204. while(tmp_fields-- > 6)
  205. get_v(bc);
  206. if(count == 0 || i+count > 256){
  207. av_log(s, AV_LOG_ERROR, "illegal count %d at %d\n", count, i);
  208. return -1;
  209. }
  210. if(tmp_stream >= stream_count){
  211. av_log(s, AV_LOG_ERROR, "illegal stream number\n");
  212. return -1;
  213. }
  214. for(j=0; j<count; j++,i++){
  215. if (i == 'N') {
  216. nut->frame_code[i].flags= FLAG_INVALID;
  217. j--;
  218. continue;
  219. }
  220. nut->frame_code[i].flags = tmp_flags ;
  221. nut->frame_code[i].pts_delta = tmp_pts ;
  222. nut->frame_code[i].stream_id = tmp_stream;
  223. nut->frame_code[i].size_mul = tmp_mul ;
  224. nut->frame_code[i].size_lsb = tmp_size+j;
  225. nut->frame_code[i].reserved_count = tmp_res ;
  226. }
  227. }
  228. assert(nut->frame_code['N'].flags == FLAG_INVALID);
  229. if(skip_reserved(bc, end) || get_checksum(bc)){
  230. av_log(s, AV_LOG_ERROR, "Main header checksum mismatch\n");
  231. return -1;
  232. }
  233. nut->stream = av_mallocz(sizeof(StreamContext)*stream_count);
  234. for(i=0; i<stream_count; i++){
  235. av_new_stream(s, i);
  236. }
  237. return 0;
  238. }
  239. static int decode_stream_header(NUTContext *nut){
  240. AVFormatContext *s= nut->avf;
  241. ByteIOContext *bc = &s->pb;
  242. StreamContext *stc;
  243. int class, stream_id;
  244. uint64_t tmp, end;
  245. AVStream *st;
  246. end= get_packetheader(nut, bc, 1);
  247. end += url_ftell(bc);
  248. GET_V(stream_id, tmp < s->nb_streams && !nut->stream[tmp].time_base.num);
  249. stc= &nut->stream[stream_id];
  250. st = s->streams[stream_id];
  251. if (!st)
  252. return AVERROR_NOMEM;
  253. class = get_v(bc);
  254. tmp = get_fourcc(bc);
  255. st->codec->codec_tag= tmp;
  256. switch(class)
  257. {
  258. case 0:
  259. st->codec->codec_type = CODEC_TYPE_VIDEO;
  260. st->codec->codec_id = codec_get_bmp_id(tmp);
  261. if (st->codec->codec_id == CODEC_ID_NONE)
  262. av_log(s, AV_LOG_ERROR, "Unknown codec?!\n");
  263. break;
  264. case 1:
  265. st->codec->codec_type = CODEC_TYPE_AUDIO;
  266. st->codec->codec_id = codec_get_wav_id(tmp);
  267. if (st->codec->codec_id == CODEC_ID_NONE)
  268. av_log(s, AV_LOG_ERROR, "Unknown codec?!\n");
  269. break;
  270. case 2:
  271. // st->codec->codec_type = CODEC_TYPE_TEXT;
  272. // break;
  273. case 3:
  274. st->codec->codec_type = CODEC_TYPE_DATA;
  275. break;
  276. default:
  277. av_log(s, AV_LOG_ERROR, "Unknown stream class (%d)\n", class);
  278. return -1;
  279. }
  280. GET_V(stc->time_base_id , tmp < nut->time_base_count);
  281. GET_V(stc->msb_pts_shift , tmp < 16);
  282. stc->max_pts_distance= get_v(bc);
  283. GET_V(stc->decode_delay , tmp < 1000); //sanity limit, raise this if moors law is true
  284. st->codec->has_b_frames= stc->decode_delay;
  285. get_v(bc); //stream flags
  286. GET_V(st->codec->extradata_size, tmp < (1<<30));
  287. if(st->codec->extradata_size){
  288. st->codec->extradata= av_mallocz(st->codec->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE);
  289. get_buffer(bc, st->codec->extradata, st->codec->extradata_size);
  290. }
  291. if (st->codec->codec_type == CODEC_TYPE_VIDEO){
  292. GET_V(st->codec->width , tmp > 0)
  293. GET_V(st->codec->height, tmp > 0)
  294. st->codec->sample_aspect_ratio.num= get_v(bc);
  295. st->codec->sample_aspect_ratio.den= get_v(bc);
  296. if((!st->codec->sample_aspect_ratio.num) != (!st->codec->sample_aspect_ratio.den)){
  297. av_log(s, AV_LOG_ERROR, "invalid aspect ratio\n");
  298. return -1;
  299. }
  300. get_v(bc); /* csp type */
  301. }else if (st->codec->codec_type == CODEC_TYPE_AUDIO){
  302. GET_V(st->codec->sample_rate , tmp > 0)
  303. tmp= get_v(bc); // samplerate_den
  304. if(tmp > st->codec->sample_rate){
  305. av_log(s, AV_LOG_ERROR, "bleh, libnut muxed this ;)\n");
  306. st->codec->sample_rate= tmp;
  307. }
  308. GET_V(st->codec->channels, tmp > 0)
  309. }
  310. if(skip_reserved(bc, end) || get_checksum(bc)){
  311. av_log(s, AV_LOG_ERROR, "Stream header %d checksum mismatch\n", stream_id);
  312. return -1;
  313. }
  314. stc->time_base= nut->time_base[stc->time_base_id];
  315. av_set_pts_info(s->streams[stream_id], 63, stc->time_base.num, stc->time_base.den);
  316. return 0;
  317. }
  318. static int decode_info_header(NUTContext *nut){
  319. AVFormatContext *s= nut->avf;
  320. ByteIOContext *bc = &s->pb;
  321. uint64_t tmp;
  322. unsigned int stream_id_plus1, chapter_start, chapter_len, count;
  323. int chapter_id, i;
  324. int64_t value, end;
  325. char name[256], str_value[1024], type_str[256], *type= type_str;
  326. end= get_packetheader(nut, bc, 1);
  327. end += url_ftell(bc);
  328. GET_V(stream_id_plus1, tmp <= s->nb_streams)
  329. chapter_id = get_s(bc);
  330. chapter_start= get_v(bc);
  331. chapter_len = get_v(bc);
  332. count = get_v(bc);
  333. for(i=0; i<count; i++){
  334. get_str(bc, name, sizeof(name));
  335. value= get_s(bc);
  336. if(value == -1){
  337. type= "UTF-8";
  338. get_str(bc, str_value, sizeof(str_value));
  339. }else if(value == -2){
  340. get_str(bc, type, sizeof(type));
  341. get_str(bc, str_value, sizeof(str_value));
  342. }else if(value == -3){
  343. type= "s";
  344. value= get_s(bc);
  345. }else if(value == -4){
  346. type= "t";
  347. value= get_v(bc);
  348. }else if(value < -4){
  349. type= "r";
  350. get_s(bc);
  351. }else{
  352. type= "v";
  353. }
  354. if(chapter_id==0 && !strcmp(type, "UTF-8")){
  355. if (!strcmp(name, "Author"))
  356. pstrcpy(s->author , sizeof(s->author) , str_value);
  357. else if(!strcmp(name, "Title"))
  358. pstrcpy(s->title , sizeof(s->title) , str_value);
  359. else if(!strcmp(name, "Copyright"))
  360. pstrcpy(s->copyright, sizeof(s->copyright), str_value);
  361. else if(!strcmp(name, "Description"))
  362. pstrcpy(s->comment , sizeof(s->comment) , str_value);
  363. }
  364. }
  365. if(skip_reserved(bc, end) || get_checksum(bc)){
  366. av_log(s, AV_LOG_ERROR, "Info header checksum mismatch\n");
  367. return -1;
  368. }
  369. return 0;
  370. }
  371. int sp_pos_cmp(syncpoint_t *a, syncpoint_t *b){
  372. return (a->pos - b->pos>>32) - (b->pos - a->pos>>32);
  373. }
  374. int sp_pts_cmp(syncpoint_t *a, syncpoint_t *b){
  375. return (a->ts - b->ts>>32) - (b->ts - a->ts>>32);
  376. }
  377. static void add_sp(NUTContext *nut, int64_t pos, int64_t back_ptr, int64_t ts){
  378. syncpoint_t *sp2, *sp= av_mallocz(sizeof(syncpoint_t));
  379. sp->pos= pos;
  380. sp->back_ptr= back_ptr;
  381. sp->ts= ts;
  382. sp2= av_tree_insert(&nut->syncpoints, sp, sp_pos_cmp);
  383. if(sp2 && sp2 != sp)
  384. av_free(sp);
  385. }
  386. static int decode_syncpoint(NUTContext *nut, int64_t *ts, int64_t *back_ptr){
  387. AVFormatContext *s= nut->avf;
  388. ByteIOContext *bc = &s->pb;
  389. int64_t end, tmp;
  390. int i;
  391. AVRational time_base;
  392. nut->last_syncpoint_pos= url_ftell(bc)-8;
  393. end= get_packetheader(nut, bc, 1);
  394. end += url_ftell(bc);
  395. tmp= get_v(bc);
  396. *back_ptr= nut->last_syncpoint_pos - 16*get_v(bc);
  397. if(*back_ptr < 0)
  398. return -1;
  399. time_base= nut->time_base[tmp % nut->time_base_count];
  400. for(i=0; i<s->nb_streams; i++){
  401. nut->stream[i].last_pts= av_rescale_rnd(
  402. tmp / nut->time_base_count,
  403. time_base.num * (int64_t)nut->stream[i].time_base.den,
  404. time_base.den * (int64_t)nut->stream[i].time_base.num,
  405. AV_ROUND_DOWN);
  406. //last_key_frame ?
  407. }
  408. //FIXME put this in a reset func maybe
  409. if(skip_reserved(bc, end) || get_checksum(bc)){
  410. av_log(s, AV_LOG_ERROR, "sync point checksum mismatch\n");
  411. return -1;
  412. }
  413. *ts= tmp / s->nb_streams * av_q2d(nut->time_base[tmp % s->nb_streams])*AV_TIME_BASE;
  414. add_sp(nut, nut->last_syncpoint_pos, *back_ptr, *ts);
  415. return 0;
  416. }
  417. static int find_and_decode_index(NUTContext *nut){
  418. AVFormatContext *s= nut->avf;
  419. ByteIOContext *bc = &s->pb;
  420. uint64_t tmp, end;
  421. int i, j, syncpoint_count;
  422. int64_t filesize= url_fsize(bc);
  423. int64_t *syncpoints;
  424. int8_t *has_keyframe;
  425. url_fseek(bc, filesize-12, SEEK_SET);
  426. url_fseek(bc, filesize-get_be64(bc), SEEK_SET);
  427. if(get_be64(bc) != INDEX_STARTCODE){
  428. av_log(s, AV_LOG_ERROR, "no index at the end\n");
  429. return -1;
  430. }
  431. end= get_packetheader(nut, bc, 1);
  432. end += url_ftell(bc);
  433. get_v(bc); //max_pts
  434. GET_V(syncpoint_count, tmp < INT_MAX/8 && tmp > 0)
  435. syncpoints= av_malloc(sizeof(int64_t)*syncpoint_count);
  436. has_keyframe= av_malloc(sizeof(int8_t)*(syncpoint_count+1));
  437. for(i=0; i<syncpoint_count; i++){
  438. GET_V(syncpoints[i], tmp>0)
  439. if(i)
  440. syncpoints[i] += syncpoints[i-1];
  441. }
  442. for(i=0; i<s->nb_streams; i++){
  443. int64_t last_pts= -1;
  444. for(j=0; j<syncpoint_count;){
  445. uint64_t x= get_v(bc);
  446. int type= x&1;
  447. int n= j;
  448. x>>=1;
  449. if(type){
  450. int flag= x&1;
  451. x>>=1;
  452. if(n+x >= syncpoint_count + 1){
  453. av_log(s, AV_LOG_ERROR, "index overflow A\n");
  454. return -1;
  455. }
  456. while(x--)
  457. has_keyframe[n++]= flag;
  458. has_keyframe[n++]= !flag;
  459. }else{
  460. while(x != 1){
  461. if(n>=syncpoint_count + 1){
  462. av_log(s, AV_LOG_ERROR, "index overflow B\n");
  463. return -1;
  464. }
  465. has_keyframe[n++]= x&1;
  466. x>>=1;
  467. }
  468. }
  469. if(has_keyframe[0]){
  470. av_log(s, AV_LOG_ERROR, "keyframe before first syncpoint in index\n");
  471. return -1;
  472. }
  473. assert(n<=syncpoint_count+1);
  474. for(; j<n; j++){
  475. if(has_keyframe[j]){
  476. uint64_t B, A= get_v(bc);
  477. if(!A){
  478. A= get_v(bc);
  479. B= get_v(bc);
  480. //eor_pts[j][i] = last_pts + A + B
  481. }else
  482. B= 0;
  483. av_add_index_entry(
  484. s->streams[i],
  485. 16*syncpoints[j-1],
  486. last_pts + A,
  487. 0,
  488. 0,
  489. AVINDEX_KEYFRAME);
  490. last_pts += A + B;
  491. }
  492. }
  493. }
  494. }
  495. if(skip_reserved(bc, end) || get_checksum(bc)){
  496. av_log(s, AV_LOG_ERROR, "Index checksum mismatch\n");
  497. return -1;
  498. }
  499. return 0;
  500. }
  501. static int nut_read_header(AVFormatContext *s, AVFormatParameters *ap)
  502. {
  503. NUTContext *nut = s->priv_data;
  504. ByteIOContext *bc = &s->pb;
  505. int64_t pos;
  506. int inited_stream_count;
  507. nut->avf= s;
  508. /* main header */
  509. pos=0;
  510. do{
  511. pos= find_startcode(bc, MAIN_STARTCODE, pos)+1;
  512. if (pos<0+1){
  513. av_log(s, AV_LOG_ERROR, "no main startcode found\n");
  514. return -1;
  515. }
  516. }while(decode_main_header(nut) < 0);
  517. /* stream headers */
  518. pos=0;
  519. for(inited_stream_count=0; inited_stream_count < s->nb_streams;){
  520. pos= find_startcode(bc, STREAM_STARTCODE, pos)+1;
  521. if (pos<0+1){
  522. av_log(s, AV_LOG_ERROR, "not all stream headers found\n");
  523. return -1;
  524. }
  525. if(decode_stream_header(nut) >= 0)
  526. inited_stream_count++;
  527. }
  528. /* info headers */
  529. pos=0;
  530. for(;;){
  531. uint64_t startcode= find_any_startcode(bc, pos);
  532. pos= url_ftell(bc);
  533. if(startcode==0){
  534. av_log(s, AV_LOG_ERROR, "EOF before video frames\n");
  535. return -1;
  536. }else if(startcode == SYNCPOINT_STARTCODE){
  537. nut->next_startcode= startcode;
  538. break;
  539. }else if(startcode != INFO_STARTCODE){
  540. continue;
  541. }
  542. decode_info_header(nut);
  543. }
  544. s->data_offset= pos-8;
  545. if(!url_is_streamed(bc)){
  546. int64_t orig_pos= url_ftell(bc);
  547. find_and_decode_index(nut);
  548. url_fseek(bc, orig_pos, SEEK_SET);
  549. }
  550. assert(nut->next_startcode == SYNCPOINT_STARTCODE);
  551. return 0;
  552. }
  553. static int decode_frame_header(NUTContext *nut, int64_t *pts, int *stream_id, int frame_code){
  554. AVFormatContext *s= nut->avf;
  555. ByteIOContext *bc = &s->pb;
  556. StreamContext *stc;
  557. int size, flags, size_mul, pts_delta, i, reserved_count;
  558. uint64_t tmp;
  559. if(url_ftell(bc) > nut->last_syncpoint_pos + nut->max_distance){
  560. av_log(s, AV_LOG_ERROR, "last frame must have been damaged %Ld > %Ld + %d\n", url_ftell(bc), nut->last_syncpoint_pos, nut->max_distance);
  561. return -1;
  562. }
  563. flags = nut->frame_code[frame_code].flags;
  564. size_mul = nut->frame_code[frame_code].size_mul;
  565. size = nut->frame_code[frame_code].size_lsb;
  566. *stream_id = nut->frame_code[frame_code].stream_id;
  567. pts_delta = nut->frame_code[frame_code].pts_delta;
  568. reserved_count = nut->frame_code[frame_code].reserved_count;
  569. if(flags & FLAG_INVALID)
  570. return -1;
  571. if(flags & FLAG_CODED)
  572. flags ^= get_v(bc);
  573. if(flags & FLAG_STREAM_ID){
  574. GET_V(*stream_id, tmp < s->nb_streams)
  575. }
  576. stc= &nut->stream[*stream_id];
  577. if(flags&FLAG_CODED_PTS){
  578. int coded_pts= get_v(bc);
  579. //FIXME check last_pts validity?
  580. if(coded_pts < (1<<stc->msb_pts_shift)){
  581. *pts=lsb2full(stc, coded_pts);
  582. }else
  583. *pts=coded_pts - (1<<stc->msb_pts_shift);
  584. }else
  585. *pts= stc->last_pts + pts_delta;
  586. if(flags&FLAG_SIZE_MSB){
  587. size += size_mul*get_v(bc);
  588. }
  589. if(flags&FLAG_RESERVED)
  590. reserved_count= get_v(bc);
  591. for(i=0; i<reserved_count; i++)
  592. get_v(bc);
  593. if(flags&FLAG_CHECKSUM){
  594. get_be32(bc); //FIXME check this
  595. }else if(size > 2*nut->max_distance || FFABS(stc->last_pts - *pts) > stc->max_pts_distance){
  596. av_log(s, AV_LOG_ERROR, "frame size > 2max_distance and no checksum\n");
  597. return -1;
  598. }
  599. stc->last_pts= *pts;
  600. stc->last_flags= flags;
  601. return size;
  602. }
  603. static int decode_frame(NUTContext *nut, AVPacket *pkt, int frame_code){
  604. AVFormatContext *s= nut->avf;
  605. ByteIOContext *bc = &s->pb;
  606. int size, stream_id, discard;
  607. int64_t pts, last_IP_pts;
  608. StreamContext *stc;
  609. size= decode_frame_header(nut, &pts, &stream_id, frame_code);
  610. if(size < 0)
  611. return -1;
  612. stc= &nut->stream[stream_id];
  613. if (stc->last_flags & FLAG_KEY)
  614. stc->skip_until_key_frame=0;
  615. discard= s->streams[ stream_id ]->discard;
  616. last_IP_pts= s->streams[ stream_id ]->last_IP_pts;
  617. if( (discard >= AVDISCARD_NONKEY && !(stc->last_flags & FLAG_KEY))
  618. ||(discard >= AVDISCARD_BIDIR && last_IP_pts != AV_NOPTS_VALUE && last_IP_pts > pts)
  619. || discard >= AVDISCARD_ALL
  620. || stc->skip_until_key_frame){
  621. url_fskip(bc, size);
  622. return 1;
  623. }
  624. av_get_packet(bc, pkt, size);
  625. pkt->stream_index = stream_id;
  626. if (stc->last_flags & FLAG_KEY)
  627. pkt->flags |= PKT_FLAG_KEY;
  628. pkt->pts = pts;
  629. return 0;
  630. }
  631. static int nut_read_packet(AVFormatContext *s, AVPacket *pkt)
  632. {
  633. NUTContext *nut = s->priv_data;
  634. ByteIOContext *bc = &s->pb;
  635. int i, frame_code=0, ret, skip;
  636. int64_t ts, back_ptr;
  637. for(;;){
  638. int64_t pos= url_ftell(bc);
  639. uint64_t tmp= nut->next_startcode;
  640. nut->next_startcode=0;
  641. if (url_feof(bc))
  642. return -1;
  643. if(tmp){
  644. pos-=8;
  645. }else{
  646. frame_code = get_byte(bc);
  647. if(frame_code == 'N'){
  648. tmp= frame_code;
  649. for(i=1; i<8; i++)
  650. tmp = (tmp<<8) + get_byte(bc);
  651. }
  652. }
  653. switch(tmp){
  654. case MAIN_STARTCODE:
  655. case STREAM_STARTCODE:
  656. case INDEX_STARTCODE:
  657. skip= get_packetheader(nut, bc, 0);
  658. url_fseek(bc, skip, SEEK_CUR);
  659. break;
  660. case INFO_STARTCODE:
  661. if(decode_info_header(nut)<0)
  662. goto resync;
  663. break;
  664. case SYNCPOINT_STARTCODE:
  665. if(decode_syncpoint(nut, &ts, &back_ptr)<0)
  666. goto resync;
  667. frame_code = get_byte(bc);
  668. case 0:
  669. ret= decode_frame(nut, pkt, frame_code);
  670. if(ret==0)
  671. return 0;
  672. else if(ret==1) //ok but discard packet
  673. break;
  674. default:
  675. resync:
  676. av_log(s, AV_LOG_DEBUG, "syncing from %"PRId64"\n", pos);
  677. tmp= find_any_startcode(bc, nut->last_syncpoint_pos+1);
  678. if(tmp==0)
  679. return -1;
  680. av_log(s, AV_LOG_DEBUG, "sync\n");
  681. nut->next_startcode= tmp;
  682. }
  683. }
  684. }
  685. static int64_t nut_read_timestamp(AVFormatContext *s, int stream_index, int64_t *pos_arg, int64_t pos_limit){
  686. NUTContext *nut = s->priv_data;
  687. ByteIOContext *bc = &s->pb;
  688. int64_t pos, pts, back_ptr;
  689. av_log(s, AV_LOG_DEBUG, "read_timestamp(X,%d,%"PRId64",%"PRId64")\n", stream_index, *pos_arg, pos_limit);
  690. pos= *pos_arg;
  691. resync:
  692. do{
  693. pos= find_startcode(bc, SYNCPOINT_STARTCODE, pos)+1;
  694. if(pos < 1){
  695. assert(nut->next_startcode == 0);
  696. av_log(s, AV_LOG_ERROR, "read_timestamp failed\n");
  697. return AV_NOPTS_VALUE;
  698. }
  699. }while(decode_syncpoint(nut, &pts, &back_ptr) < 0);
  700. *pos_arg = pos-1;
  701. assert(nut->last_syncpoint_pos == *pos_arg);
  702. av_log(s, AV_LOG_DEBUG, "return %Ld %Ld\n", pts,back_ptr );
  703. if (stream_index == -1) return pts;
  704. else if(stream_index == -2) return back_ptr;
  705. assert(0);
  706. }
  707. static int read_seek(AVFormatContext *s, int stream_index, int64_t pts, int flags){
  708. NUTContext *nut = s->priv_data;
  709. AVStream *st= s->streams[stream_index];
  710. syncpoint_t dummy={.ts= pts*av_q2d(st->time_base)*AV_TIME_BASE};
  711. syncpoint_t nopts_sp= {.ts= AV_NOPTS_VALUE, .back_ptr= AV_NOPTS_VALUE};
  712. syncpoint_t *sp, *next_node[2]= {&nopts_sp, &nopts_sp};
  713. int64_t pos, pos2, ts;
  714. int i;
  715. if(st->index_entries){
  716. int index= av_index_search_timestamp(st, pts, flags);
  717. if(index<0)
  718. return -1;
  719. pos2= st->index_entries[index].pos;
  720. ts = st->index_entries[index].timestamp;
  721. }else{
  722. av_tree_find(nut->syncpoints, &dummy, sp_pts_cmp, next_node);
  723. av_log(s, AV_LOG_DEBUG, "%Ld-%Ld %Ld-%Ld\n", next_node[0]->pos, next_node[1]->pos,
  724. next_node[0]->ts , next_node[1]->ts);
  725. pos= av_gen_search(s, -1, dummy.ts, next_node[0]->pos, next_node[1]->pos, next_node[1]->pos,
  726. next_node[0]->ts , next_node[1]->ts, AVSEEK_FLAG_BACKWARD, &ts, nut_read_timestamp);
  727. if(!(flags & AVSEEK_FLAG_BACKWARD)){
  728. dummy.pos= pos+16;
  729. next_node[1]= &nopts_sp;
  730. av_tree_find(nut->syncpoints, &dummy, sp_pos_cmp, next_node);
  731. pos2= av_gen_search(s, -2, dummy.pos, next_node[0]->pos , next_node[1]->pos, next_node[1]->pos,
  732. next_node[0]->back_ptr, next_node[1]->back_ptr, flags, &ts, nut_read_timestamp);
  733. if(pos2>=0)
  734. pos= pos2;
  735. //FIXME dir but i think it doesnt matter
  736. }
  737. dummy.pos= pos;
  738. sp= av_tree_find(nut->syncpoints, &dummy, sp_pos_cmp, NULL);
  739. assert(sp);
  740. pos2= sp->back_ptr - 15;
  741. }
  742. av_log(NULL, AV_LOG_DEBUG, "SEEKTO: %"PRId64"\n", pos2);
  743. pos= find_startcode(&s->pb, SYNCPOINT_STARTCODE, pos2);
  744. url_fseek(&s->pb, pos, SEEK_SET);
  745. av_log(NULL, AV_LOG_DEBUG, "SP: %"PRId64"\n", pos);
  746. if(pos2 > pos || pos2 + 15 < pos){
  747. av_log(NULL, AV_LOG_ERROR, "no syncpoint at backptr pos\n");
  748. }
  749. for(i=0; i<s->nb_streams; i++)
  750. nut->stream[i].skip_until_key_frame=1;
  751. return 0;
  752. }
  753. static int nut_read_close(AVFormatContext *s)
  754. {
  755. NUTContext *nut = s->priv_data;
  756. av_freep(&nut->time_base);
  757. av_freep(&nut->stream);
  758. return 0;
  759. }
  760. #ifdef CONFIG_NUT_DEMUXER
  761. AVInputFormat nut_demuxer = {
  762. "nut",
  763. "nut format",
  764. sizeof(NUTContext),
  765. nut_probe,
  766. nut_read_header,
  767. nut_read_packet,
  768. nut_read_close,
  769. read_seek,
  770. .extensions = "nut",
  771. };
  772. #endif