You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

881 lines
27KB

  1. /*
  2. * "NUT" Container Format demuxer
  3. * Copyright (c) 2004-2006 Michael Niedermayer
  4. * Copyright (c) 2003 Alex Beregszaszi
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. #include "tree.h"
  23. #include "nut.h"
  24. #include "avstring.h"
  25. #undef NDEBUG
  26. #include <assert.h>
  27. static int get_str(ByteIOContext *bc, char *string, unsigned int maxlen){
  28. unsigned int len= ff_get_v(bc);
  29. if(len && maxlen)
  30. get_buffer(bc, string, FFMIN(len, maxlen));
  31. while(len > maxlen){
  32. get_byte(bc);
  33. len--;
  34. }
  35. if(maxlen)
  36. string[FFMIN(len, maxlen-1)]= 0;
  37. if(maxlen == len)
  38. return -1;
  39. else
  40. return 0;
  41. }
  42. static int64_t get_s(ByteIOContext *bc){
  43. int64_t v = ff_get_v(bc) + 1;
  44. if (v&1) return -(v>>1);
  45. else return (v>>1);
  46. }
  47. static uint64_t get_fourcc(ByteIOContext *bc){
  48. unsigned int len= ff_get_v(bc);
  49. if (len==2) return get_le16(bc);
  50. else if(len==4) return get_le32(bc);
  51. else return -1;
  52. }
  53. #ifdef TRACE
  54. static inline uint64_t get_v_trace(ByteIOContext *bc, char *file, char *func, int line){
  55. uint64_t v= ff_get_v(bc);
  56. av_log(NULL, AV_LOG_DEBUG, "get_v %5"PRId64" / %"PRIX64" in %s %s:%d\n", v, v, file, func, line);
  57. return v;
  58. }
  59. static inline int64_t get_s_trace(ByteIOContext *bc, char *file, char *func, int line){
  60. int64_t v= get_s(bc);
  61. av_log(NULL, AV_LOG_DEBUG, "get_s %5"PRId64" / %"PRIX64" in %s %s:%d\n", v, v, file, func, line);
  62. return v;
  63. }
  64. static inline uint64_t get_vb_trace(ByteIOContext *bc, char *file, char *func, int line){
  65. uint64_t v= get_vb(bc);
  66. av_log(NULL, AV_LOG_DEBUG, "get_vb %5"PRId64" / %"PRIX64" in %s %s:%d\n", v, v, file, func, line);
  67. return v;
  68. }
  69. #define ff_get_v(bc) get_v_trace(bc, __FILE__, __PRETTY_FUNCTION__, __LINE__)
  70. #define get_s(bc) get_s_trace(bc, __FILE__, __PRETTY_FUNCTION__, __LINE__)
  71. #define get_vb(bc) get_vb_trace(bc, __FILE__, __PRETTY_FUNCTION__, __LINE__)
  72. #endif
  73. static int get_packetheader(NUTContext *nut, ByteIOContext *bc, int calculate_checksum, uint64_t startcode)
  74. {
  75. int64_t size;
  76. // start= url_ftell(bc) - 8;
  77. startcode= be2me_64(startcode);
  78. startcode= ff_crc04C11DB7_update(0, &startcode, 8);
  79. init_checksum(bc, ff_crc04C11DB7_update, startcode);
  80. size= ff_get_v(bc);
  81. if(size > 4096)
  82. get_be32(bc);
  83. if(get_checksum(bc) && size > 4096)
  84. return -1;
  85. init_checksum(bc, calculate_checksum ? ff_crc04C11DB7_update : NULL, 0);
  86. return size;
  87. }
  88. static uint64_t find_any_startcode(ByteIOContext *bc, int64_t pos){
  89. uint64_t state=0;
  90. if(pos >= 0)
  91. url_fseek(bc, pos, SEEK_SET); //note, this may fail if the stream is not seekable, but that should not matter, as in this case we simply start where we are currently
  92. while(!url_feof(bc)){
  93. state= (state<<8) | get_byte(bc);
  94. if((state>>56) != 'N')
  95. continue;
  96. switch(state){
  97. case MAIN_STARTCODE:
  98. case STREAM_STARTCODE:
  99. case SYNCPOINT_STARTCODE:
  100. case INFO_STARTCODE:
  101. case INDEX_STARTCODE:
  102. return state;
  103. }
  104. }
  105. return 0;
  106. }
  107. /**
  108. * Find the given startcode.
  109. * @param code the startcode
  110. * @param pos the start position of the search, or -1 if the current position
  111. * @returns the position of the startcode or -1 if not found
  112. */
  113. static int64_t find_startcode(ByteIOContext *bc, uint64_t code, int64_t pos){
  114. for(;;){
  115. uint64_t startcode= find_any_startcode(bc, pos);
  116. if(startcode == code)
  117. return url_ftell(bc) - 8;
  118. else if(startcode == 0)
  119. return -1;
  120. pos=-1;
  121. }
  122. }
  123. static int nut_probe(AVProbeData *p){
  124. int i;
  125. uint64_t code= 0;
  126. for (i = 0; i < p->buf_size; i++) {
  127. code = (code << 8) | p->buf[i];
  128. if (code == MAIN_STARTCODE)
  129. return AVPROBE_SCORE_MAX;
  130. }
  131. return 0;
  132. }
  133. #define GET_V(dst, check) \
  134. tmp= ff_get_v(bc);\
  135. if(!(check)){\
  136. av_log(s, AV_LOG_ERROR, "Error " #dst " is (%"PRId64")\n", tmp);\
  137. return -1;\
  138. }\
  139. dst= tmp;
  140. static int skip_reserved(ByteIOContext *bc, int64_t pos){
  141. pos -= url_ftell(bc);
  142. if(pos<0){
  143. url_fseek(bc, pos, SEEK_CUR);
  144. return -1;
  145. }else{
  146. while(pos--)
  147. get_byte(bc);
  148. return 0;
  149. }
  150. }
  151. static int decode_main_header(NUTContext *nut){
  152. AVFormatContext *s= nut->avf;
  153. ByteIOContext *bc = s->pb;
  154. uint64_t tmp, end;
  155. unsigned int stream_count;
  156. int i, j, tmp_stream, tmp_mul, tmp_pts, tmp_size, count, tmp_res, tmp_head_idx;
  157. int64_t tmp_match;
  158. end= get_packetheader(nut, bc, 1, MAIN_STARTCODE);
  159. end += url_ftell(bc);
  160. GET_V(tmp , tmp >=2 && tmp <= 3)
  161. GET_V(stream_count , tmp > 0 && tmp <=MAX_STREAMS)
  162. nut->max_distance = ff_get_v(bc);
  163. if(nut->max_distance > 65536){
  164. av_log(s, AV_LOG_DEBUG, "max_distance %d\n", nut->max_distance);
  165. nut->max_distance= 65536;
  166. }
  167. GET_V(nut->time_base_count, tmp>0 && tmp<INT_MAX / sizeof(AVRational))
  168. nut->time_base= av_malloc(nut->time_base_count * sizeof(AVRational));
  169. for(i=0; i<nut->time_base_count; i++){
  170. GET_V(nut->time_base[i].num, tmp>0 && tmp<(1ULL<<31))
  171. GET_V(nut->time_base[i].den, tmp>0 && tmp<(1ULL<<31))
  172. if(ff_gcd(nut->time_base[i].num, nut->time_base[i].den) != 1){
  173. av_log(s, AV_LOG_ERROR, "time base invalid\n");
  174. return -1;
  175. }
  176. }
  177. tmp_pts=0;
  178. tmp_mul=1;
  179. tmp_stream=0;
  180. tmp_match= 1-(1LL<<62);
  181. tmp_head_idx= 0;
  182. for(i=0; i<256;){
  183. int tmp_flags = ff_get_v(bc);
  184. int tmp_fields= ff_get_v(bc);
  185. if(tmp_fields>0) tmp_pts = get_s(bc);
  186. if(tmp_fields>1) tmp_mul = ff_get_v(bc);
  187. if(tmp_fields>2) tmp_stream= ff_get_v(bc);
  188. if(tmp_fields>3) tmp_size = ff_get_v(bc);
  189. else tmp_size = 0;
  190. if(tmp_fields>4) tmp_res = ff_get_v(bc);
  191. else tmp_res = 0;
  192. if(tmp_fields>5) count = ff_get_v(bc);
  193. else count = tmp_mul - tmp_size;
  194. if(tmp_fields>6) tmp_match = get_s(bc);
  195. if(tmp_fields>7) tmp_head_idx= ff_get_v(bc);
  196. while(tmp_fields-- > 8)
  197. ff_get_v(bc);
  198. if(count == 0 || i+count > 256){
  199. av_log(s, AV_LOG_ERROR, "illegal count %d at %d\n", count, i);
  200. return -1;
  201. }
  202. if(tmp_stream >= stream_count){
  203. av_log(s, AV_LOG_ERROR, "illegal stream number\n");
  204. return -1;
  205. }
  206. for(j=0; j<count; j++,i++){
  207. if (i == 'N') {
  208. nut->frame_code[i].flags= FLAG_INVALID;
  209. j--;
  210. continue;
  211. }
  212. nut->frame_code[i].flags = tmp_flags ;
  213. nut->frame_code[i].pts_delta = tmp_pts ;
  214. nut->frame_code[i].stream_id = tmp_stream;
  215. nut->frame_code[i].size_mul = tmp_mul ;
  216. nut->frame_code[i].size_lsb = tmp_size+j;
  217. nut->frame_code[i].reserved_count = tmp_res ;
  218. nut->frame_code[i].header_idx = tmp_head_idx;
  219. }
  220. }
  221. assert(nut->frame_code['N'].flags == FLAG_INVALID);
  222. if(end > url_ftell(bc) + 4){
  223. int rem= 1024;
  224. GET_V(nut->header_count, tmp<128U)
  225. nut->header_count++;
  226. for(i=1; i<nut->header_count; i++){
  227. GET_V(nut->header_len[i], tmp>0 && tmp<256);
  228. rem -= nut->header_len[i];
  229. if(rem < 0){
  230. av_log(s, AV_LOG_ERROR, "invalid elision header\n");
  231. return -1;
  232. }
  233. nut->header[i]= av_malloc(nut->header_len[i]);
  234. get_buffer(bc, nut->header[i], nut->header_len[i]);
  235. }
  236. assert(nut->header_len[0]==0);
  237. }
  238. if(skip_reserved(bc, end) || get_checksum(bc)){
  239. av_log(s, AV_LOG_ERROR, "main header checksum mismatch\n");
  240. return -1;
  241. }
  242. nut->stream = av_mallocz(sizeof(StreamContext)*stream_count);
  243. for(i=0; i<stream_count; i++){
  244. av_new_stream(s, i);
  245. }
  246. return 0;
  247. }
  248. static int decode_stream_header(NUTContext *nut){
  249. AVFormatContext *s= nut->avf;
  250. ByteIOContext *bc = s->pb;
  251. StreamContext *stc;
  252. int class, stream_id;
  253. uint64_t tmp, end;
  254. AVStream *st;
  255. end= get_packetheader(nut, bc, 1, STREAM_STARTCODE);
  256. end += url_ftell(bc);
  257. GET_V(stream_id, tmp < s->nb_streams && !nut->stream[tmp].time_base);
  258. stc= &nut->stream[stream_id];
  259. st = s->streams[stream_id];
  260. if (!st)
  261. return AVERROR(ENOMEM);
  262. class = ff_get_v(bc);
  263. tmp = get_fourcc(bc);
  264. st->codec->codec_tag= tmp;
  265. switch(class)
  266. {
  267. case 0:
  268. st->codec->codec_type = CODEC_TYPE_VIDEO;
  269. st->codec->codec_id = codec_get_id(codec_bmp_tags, tmp);
  270. if (st->codec->codec_id == CODEC_ID_NONE)
  271. av_log(s, AV_LOG_ERROR, "Unknown codec?!\n");
  272. break;
  273. case 1:
  274. st->codec->codec_type = CODEC_TYPE_AUDIO;
  275. st->codec->codec_id = codec_get_id(codec_wav_tags, tmp);
  276. if (st->codec->codec_id == CODEC_ID_NONE)
  277. av_log(s, AV_LOG_ERROR, "Unknown codec?!\n");
  278. break;
  279. case 2:
  280. // st->codec->codec_type = CODEC_TYPE_TEXT;
  281. // break;
  282. case 3:
  283. st->codec->codec_type = CODEC_TYPE_DATA;
  284. break;
  285. default:
  286. av_log(s, AV_LOG_ERROR, "unknown stream class (%d)\n", class);
  287. return -1;
  288. }
  289. GET_V(stc->time_base_id , tmp < nut->time_base_count);
  290. GET_V(stc->msb_pts_shift , tmp < 16);
  291. stc->max_pts_distance= ff_get_v(bc);
  292. GET_V(stc->decode_delay , tmp < 1000); //sanity limit, raise this if Moore's law is true
  293. st->codec->has_b_frames= stc->decode_delay;
  294. ff_get_v(bc); //stream flags
  295. GET_V(st->codec->extradata_size, tmp < (1<<30));
  296. if(st->codec->extradata_size){
  297. st->codec->extradata= av_mallocz(st->codec->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE);
  298. get_buffer(bc, st->codec->extradata, st->codec->extradata_size);
  299. }
  300. if (st->codec->codec_type == CODEC_TYPE_VIDEO){
  301. GET_V(st->codec->width , tmp > 0)
  302. GET_V(st->codec->height, tmp > 0)
  303. st->codec->sample_aspect_ratio.num= ff_get_v(bc);
  304. st->codec->sample_aspect_ratio.den= ff_get_v(bc);
  305. if((!st->codec->sample_aspect_ratio.num) != (!st->codec->sample_aspect_ratio.den)){
  306. av_log(s, AV_LOG_ERROR, "invalid aspect ratio %d/%d\n", st->codec->sample_aspect_ratio.num, st->codec->sample_aspect_ratio.den);
  307. return -1;
  308. }
  309. ff_get_v(bc); /* csp type */
  310. }else if (st->codec->codec_type == CODEC_TYPE_AUDIO){
  311. GET_V(st->codec->sample_rate , tmp > 0)
  312. ff_get_v(bc); // samplerate_den
  313. GET_V(st->codec->channels, tmp > 0)
  314. }
  315. if(skip_reserved(bc, end) || get_checksum(bc)){
  316. av_log(s, AV_LOG_ERROR, "stream header %d checksum mismatch\n", stream_id);
  317. return -1;
  318. }
  319. stc->time_base= &nut->time_base[stc->time_base_id];
  320. av_set_pts_info(s->streams[stream_id], 63, stc->time_base->num, stc->time_base->den);
  321. return 0;
  322. }
  323. static int decode_info_header(NUTContext *nut){
  324. AVFormatContext *s= nut->avf;
  325. ByteIOContext *bc = s->pb;
  326. uint64_t tmp;
  327. unsigned int stream_id_plus1, chapter_start, chapter_len, count;
  328. int chapter_id, i;
  329. int64_t value, end;
  330. char name[256], str_value[1024], type_str[256];
  331. const char *type;
  332. end= get_packetheader(nut, bc, 1, INFO_STARTCODE);
  333. end += url_ftell(bc);
  334. GET_V(stream_id_plus1, tmp <= s->nb_streams)
  335. chapter_id = get_s(bc);
  336. chapter_start= ff_get_v(bc);
  337. chapter_len = ff_get_v(bc);
  338. count = ff_get_v(bc);
  339. for(i=0; i<count; i++){
  340. get_str(bc, name, sizeof(name));
  341. value= get_s(bc);
  342. if(value == -1){
  343. type= "UTF-8";
  344. get_str(bc, str_value, sizeof(str_value));
  345. }else if(value == -2){
  346. get_str(bc, type_str, sizeof(type_str));
  347. type= type_str;
  348. get_str(bc, str_value, sizeof(str_value));
  349. }else if(value == -3){
  350. type= "s";
  351. value= get_s(bc);
  352. }else if(value == -4){
  353. type= "t";
  354. value= ff_get_v(bc);
  355. }else if(value < -4){
  356. type= "r";
  357. get_s(bc);
  358. }else{
  359. type= "v";
  360. }
  361. if(chapter_id==0 && !strcmp(type, "UTF-8")){
  362. if (!strcmp(name, "Author"))
  363. av_strlcpy(s->author , str_value, sizeof(s->author));
  364. else if(!strcmp(name, "Title"))
  365. av_strlcpy(s->title , str_value, sizeof(s->title));
  366. else if(!strcmp(name, "Copyright"))
  367. av_strlcpy(s->copyright, str_value, sizeof(s->copyright));
  368. else if(!strcmp(name, "Description"))
  369. av_strlcpy(s->comment , str_value, sizeof(s->comment));
  370. }
  371. }
  372. if(skip_reserved(bc, end) || get_checksum(bc)){
  373. av_log(s, AV_LOG_ERROR, "info header checksum mismatch\n");
  374. return -1;
  375. }
  376. return 0;
  377. }
  378. static int decode_syncpoint(NUTContext *nut, int64_t *ts, int64_t *back_ptr){
  379. AVFormatContext *s= nut->avf;
  380. ByteIOContext *bc = s->pb;
  381. int64_t end, tmp;
  382. nut->last_syncpoint_pos= url_ftell(bc)-8;
  383. end= get_packetheader(nut, bc, 1, SYNCPOINT_STARTCODE);
  384. end += url_ftell(bc);
  385. tmp= ff_get_v(bc);
  386. *back_ptr= nut->last_syncpoint_pos - 16*ff_get_v(bc);
  387. if(*back_ptr < 0)
  388. return -1;
  389. ff_nut_reset_ts(nut, nut->time_base[tmp % nut->time_base_count], tmp / nut->time_base_count);
  390. if(skip_reserved(bc, end) || get_checksum(bc)){
  391. av_log(s, AV_LOG_ERROR, "sync point checksum mismatch\n");
  392. return -1;
  393. }
  394. *ts= tmp / s->nb_streams * av_q2d(nut->time_base[tmp % s->nb_streams])*AV_TIME_BASE;
  395. ff_nut_add_sp(nut, nut->last_syncpoint_pos, *back_ptr, *ts);
  396. return 0;
  397. }
  398. static int find_and_decode_index(NUTContext *nut){
  399. AVFormatContext *s= nut->avf;
  400. ByteIOContext *bc = s->pb;
  401. uint64_t tmp, end;
  402. int i, j, syncpoint_count;
  403. int64_t filesize= url_fsize(bc);
  404. int64_t *syncpoints;
  405. int8_t *has_keyframe;
  406. url_fseek(bc, filesize-12, SEEK_SET);
  407. url_fseek(bc, filesize-get_be64(bc), SEEK_SET);
  408. if(get_be64(bc) != INDEX_STARTCODE){
  409. av_log(s, AV_LOG_ERROR, "no index at the end\n");
  410. return -1;
  411. }
  412. end= get_packetheader(nut, bc, 1, INDEX_STARTCODE);
  413. end += url_ftell(bc);
  414. ff_get_v(bc); //max_pts
  415. GET_V(syncpoint_count, tmp < INT_MAX/8 && tmp > 0)
  416. syncpoints= av_malloc(sizeof(int64_t)*syncpoint_count);
  417. has_keyframe= av_malloc(sizeof(int8_t)*(syncpoint_count+1));
  418. for(i=0; i<syncpoint_count; i++){
  419. GET_V(syncpoints[i], tmp>0)
  420. if(i)
  421. syncpoints[i] += syncpoints[i-1];
  422. }
  423. for(i=0; i<s->nb_streams; i++){
  424. int64_t last_pts= -1;
  425. for(j=0; j<syncpoint_count;){
  426. uint64_t x= ff_get_v(bc);
  427. int type= x&1;
  428. int n= j;
  429. x>>=1;
  430. if(type){
  431. int flag= x&1;
  432. x>>=1;
  433. if(n+x >= syncpoint_count + 1){
  434. av_log(s, AV_LOG_ERROR, "index overflow A\n");
  435. return -1;
  436. }
  437. while(x--)
  438. has_keyframe[n++]= flag;
  439. has_keyframe[n++]= !flag;
  440. }else{
  441. while(x != 1){
  442. if(n>=syncpoint_count + 1){
  443. av_log(s, AV_LOG_ERROR, "index overflow B\n");
  444. return -1;
  445. }
  446. has_keyframe[n++]= x&1;
  447. x>>=1;
  448. }
  449. }
  450. if(has_keyframe[0]){
  451. av_log(s, AV_LOG_ERROR, "keyframe before first syncpoint in index\n");
  452. return -1;
  453. }
  454. assert(n<=syncpoint_count+1);
  455. for(; j<n && j<syncpoint_count; j++){
  456. if(has_keyframe[j]){
  457. uint64_t B, A= ff_get_v(bc);
  458. if(!A){
  459. A= ff_get_v(bc);
  460. B= ff_get_v(bc);
  461. //eor_pts[j][i] = last_pts + A + B
  462. }else
  463. B= 0;
  464. av_add_index_entry(
  465. s->streams[i],
  466. 16*syncpoints[j-1],
  467. last_pts + A,
  468. 0,
  469. 0,
  470. AVINDEX_KEYFRAME);
  471. last_pts += A + B;
  472. }
  473. }
  474. }
  475. }
  476. if(skip_reserved(bc, end) || get_checksum(bc)){
  477. av_log(s, AV_LOG_ERROR, "index checksum mismatch\n");
  478. return -1;
  479. }
  480. return 0;
  481. }
  482. static int nut_read_header(AVFormatContext *s, AVFormatParameters *ap)
  483. {
  484. NUTContext *nut = s->priv_data;
  485. ByteIOContext *bc = s->pb;
  486. int64_t pos;
  487. int initialized_stream_count;
  488. nut->avf= s;
  489. /* main header */
  490. pos=0;
  491. do{
  492. pos= find_startcode(bc, MAIN_STARTCODE, pos)+1;
  493. if (pos<0+1){
  494. av_log(s, AV_LOG_ERROR, "No main startcode found.\n");
  495. return -1;
  496. }
  497. }while(decode_main_header(nut) < 0);
  498. /* stream headers */
  499. pos=0;
  500. for(initialized_stream_count=0; initialized_stream_count < s->nb_streams;){
  501. pos= find_startcode(bc, STREAM_STARTCODE, pos)+1;
  502. if (pos<0+1){
  503. av_log(s, AV_LOG_ERROR, "Not all stream headers found.\n");
  504. return -1;
  505. }
  506. if(decode_stream_header(nut) >= 0)
  507. initialized_stream_count++;
  508. }
  509. /* info headers */
  510. pos=0;
  511. for(;;){
  512. uint64_t startcode= find_any_startcode(bc, pos);
  513. pos= url_ftell(bc);
  514. if(startcode==0){
  515. av_log(s, AV_LOG_ERROR, "EOF before video frames\n");
  516. return -1;
  517. }else if(startcode == SYNCPOINT_STARTCODE){
  518. nut->next_startcode= startcode;
  519. break;
  520. }else if(startcode != INFO_STARTCODE){
  521. continue;
  522. }
  523. decode_info_header(nut);
  524. }
  525. s->data_offset= pos-8;
  526. if(!url_is_streamed(bc)){
  527. int64_t orig_pos= url_ftell(bc);
  528. find_and_decode_index(nut);
  529. url_fseek(bc, orig_pos, SEEK_SET);
  530. }
  531. assert(nut->next_startcode == SYNCPOINT_STARTCODE);
  532. return 0;
  533. }
  534. static int decode_frame_header(NUTContext *nut, int64_t *pts, int *stream_id, uint8_t *header_idx, int frame_code){
  535. AVFormatContext *s= nut->avf;
  536. ByteIOContext *bc = s->pb;
  537. StreamContext *stc;
  538. int size, flags, size_mul, pts_delta, i, reserved_count;
  539. uint64_t tmp;
  540. if(url_ftell(bc) > nut->last_syncpoint_pos + nut->max_distance){
  541. av_log(s, AV_LOG_ERROR, "Last frame must have been damaged %"PRId64" > %"PRId64" + %d\n", url_ftell(bc), nut->last_syncpoint_pos, nut->max_distance);
  542. return -1;
  543. }
  544. flags = nut->frame_code[frame_code].flags;
  545. size_mul = nut->frame_code[frame_code].size_mul;
  546. size = nut->frame_code[frame_code].size_lsb;
  547. *stream_id = nut->frame_code[frame_code].stream_id;
  548. pts_delta = nut->frame_code[frame_code].pts_delta;
  549. reserved_count = nut->frame_code[frame_code].reserved_count;
  550. *header_idx = nut->frame_code[frame_code].header_idx;
  551. if(flags & FLAG_INVALID)
  552. return -1;
  553. if(flags & FLAG_CODED)
  554. flags ^= ff_get_v(bc);
  555. if(flags & FLAG_STREAM_ID){
  556. GET_V(*stream_id, tmp < s->nb_streams)
  557. }
  558. stc= &nut->stream[*stream_id];
  559. if(flags&FLAG_CODED_PTS){
  560. int coded_pts= ff_get_v(bc);
  561. //FIXME check last_pts validity?
  562. if(coded_pts < (1<<stc->msb_pts_shift)){
  563. *pts=ff_lsb2full(stc, coded_pts);
  564. }else
  565. *pts=coded_pts - (1<<stc->msb_pts_shift);
  566. }else
  567. *pts= stc->last_pts + pts_delta;
  568. if(flags&FLAG_SIZE_MSB){
  569. size += size_mul*ff_get_v(bc);
  570. }
  571. if(flags&FLAG_MATCH_TIME)
  572. get_s(bc);
  573. if(flags&FLAG_HEADER_IDX)
  574. *header_idx= ff_get_v(bc);
  575. if(flags&FLAG_RESERVED)
  576. reserved_count= ff_get_v(bc);
  577. for(i=0; i<reserved_count; i++)
  578. ff_get_v(bc);
  579. if(*header_idx >= (unsigned)nut->header_count){
  580. av_log(s, AV_LOG_ERROR, "header_idx invalid\n");
  581. return -1;
  582. }
  583. if(size > 4096)
  584. *header_idx=0;
  585. size -= nut->header_len[*header_idx];
  586. if(flags&FLAG_CHECKSUM){
  587. get_be32(bc); //FIXME check this
  588. }else if(size > 2*nut->max_distance || FFABS(stc->last_pts - *pts) > stc->max_pts_distance){
  589. av_log(s, AV_LOG_ERROR, "frame size > 2max_distance and no checksum\n");
  590. return -1;
  591. }
  592. stc->last_pts= *pts;
  593. stc->last_flags= flags;
  594. return size;
  595. }
  596. static int decode_frame(NUTContext *nut, AVPacket *pkt, int frame_code){
  597. AVFormatContext *s= nut->avf;
  598. ByteIOContext *bc = s->pb;
  599. int size, stream_id, discard;
  600. int64_t pts, last_IP_pts;
  601. StreamContext *stc;
  602. uint8_t header_idx;
  603. size= decode_frame_header(nut, &pts, &stream_id, &header_idx, frame_code);
  604. if(size < 0)
  605. return -1;
  606. stc= &nut->stream[stream_id];
  607. if (stc->last_flags & FLAG_KEY)
  608. stc->skip_until_key_frame=0;
  609. discard= s->streams[ stream_id ]->discard;
  610. last_IP_pts= s->streams[ stream_id ]->last_IP_pts;
  611. if( (discard >= AVDISCARD_NONKEY && !(stc->last_flags & FLAG_KEY))
  612. ||(discard >= AVDISCARD_BIDIR && last_IP_pts != AV_NOPTS_VALUE && last_IP_pts > pts)
  613. || discard >= AVDISCARD_ALL
  614. || stc->skip_until_key_frame){
  615. url_fskip(bc, size);
  616. return 1;
  617. }
  618. av_new_packet(pkt, size + nut->header_len[header_idx]);
  619. memcpy(pkt->data, nut->header[header_idx], nut->header_len[header_idx]);
  620. pkt->pos= url_ftell(bc); //FIXME
  621. get_buffer(bc, pkt->data + nut->header_len[header_idx], size);
  622. pkt->stream_index = stream_id;
  623. if (stc->last_flags & FLAG_KEY)
  624. pkt->flags |= PKT_FLAG_KEY;
  625. pkt->pts = pts;
  626. return 0;
  627. }
  628. static int nut_read_packet(AVFormatContext *s, AVPacket *pkt)
  629. {
  630. NUTContext *nut = s->priv_data;
  631. ByteIOContext *bc = s->pb;
  632. int i, frame_code=0, ret, skip;
  633. int64_t ts, back_ptr;
  634. for(;;){
  635. int64_t pos= url_ftell(bc);
  636. uint64_t tmp= nut->next_startcode;
  637. nut->next_startcode=0;
  638. if(tmp){
  639. pos-=8;
  640. }else{
  641. frame_code = get_byte(bc);
  642. if(url_feof(bc))
  643. return -1;
  644. if(frame_code == 'N'){
  645. tmp= frame_code;
  646. for(i=1; i<8; i++)
  647. tmp = (tmp<<8) + get_byte(bc);
  648. }
  649. }
  650. switch(tmp){
  651. case MAIN_STARTCODE:
  652. case STREAM_STARTCODE:
  653. case INDEX_STARTCODE:
  654. skip= get_packetheader(nut, bc, 0, tmp);
  655. url_fseek(bc, skip, SEEK_CUR);
  656. break;
  657. case INFO_STARTCODE:
  658. if(decode_info_header(nut)<0)
  659. goto resync;
  660. break;
  661. case SYNCPOINT_STARTCODE:
  662. if(decode_syncpoint(nut, &ts, &back_ptr)<0)
  663. goto resync;
  664. frame_code = get_byte(bc);
  665. case 0:
  666. ret= decode_frame(nut, pkt, frame_code);
  667. if(ret==0)
  668. return 0;
  669. else if(ret==1) //ok but discard packet
  670. break;
  671. default:
  672. resync:
  673. av_log(s, AV_LOG_DEBUG, "syncing from %"PRId64"\n", pos);
  674. tmp= find_any_startcode(bc, nut->last_syncpoint_pos+1);
  675. if(tmp==0)
  676. return -1;
  677. av_log(s, AV_LOG_DEBUG, "sync\n");
  678. nut->next_startcode= tmp;
  679. }
  680. }
  681. }
  682. static int64_t nut_read_timestamp(AVFormatContext *s, int stream_index, int64_t *pos_arg, int64_t pos_limit){
  683. NUTContext *nut = s->priv_data;
  684. ByteIOContext *bc = s->pb;
  685. int64_t pos, pts, back_ptr;
  686. av_log(s, AV_LOG_DEBUG, "read_timestamp(X,%d,%"PRId64",%"PRId64")\n", stream_index, *pos_arg, pos_limit);
  687. pos= *pos_arg;
  688. do{
  689. pos= find_startcode(bc, SYNCPOINT_STARTCODE, pos)+1;
  690. if(pos < 1){
  691. assert(nut->next_startcode == 0);
  692. av_log(s, AV_LOG_ERROR, "read_timestamp failed.\n");
  693. return AV_NOPTS_VALUE;
  694. }
  695. }while(decode_syncpoint(nut, &pts, &back_ptr) < 0);
  696. *pos_arg = pos-1;
  697. assert(nut->last_syncpoint_pos == *pos_arg);
  698. av_log(s, AV_LOG_DEBUG, "return %"PRId64" %"PRId64"\n", pts,back_ptr );
  699. if (stream_index == -1) return pts;
  700. else if(stream_index == -2) return back_ptr;
  701. assert(0);
  702. }
  703. static int read_seek(AVFormatContext *s, int stream_index, int64_t pts, int flags){
  704. NUTContext *nut = s->priv_data;
  705. AVStream *st= s->streams[stream_index];
  706. syncpoint_t dummy={.ts= pts*av_q2d(st->time_base)*AV_TIME_BASE};
  707. syncpoint_t nopts_sp= {.ts= AV_NOPTS_VALUE, .back_ptr= AV_NOPTS_VALUE};
  708. syncpoint_t *sp, *next_node[2]= {&nopts_sp, &nopts_sp};
  709. int64_t pos, pos2, ts;
  710. int i;
  711. if(st->index_entries){
  712. int index= av_index_search_timestamp(st, pts, flags);
  713. if(index<0)
  714. return -1;
  715. pos2= st->index_entries[index].pos;
  716. ts = st->index_entries[index].timestamp;
  717. }else{
  718. av_tree_find(nut->syncpoints, &dummy, ff_nut_sp_pts_cmp, next_node);
  719. av_log(s, AV_LOG_DEBUG, "%"PRIu64"-%"PRIu64" %"PRId64"-%"PRId64"\n", next_node[0]->pos, next_node[1]->pos,
  720. next_node[0]->ts , next_node[1]->ts);
  721. pos= av_gen_search(s, -1, dummy.ts, next_node[0]->pos, next_node[1]->pos, next_node[1]->pos,
  722. next_node[0]->ts , next_node[1]->ts, AVSEEK_FLAG_BACKWARD, &ts, nut_read_timestamp);
  723. if(!(flags & AVSEEK_FLAG_BACKWARD)){
  724. dummy.pos= pos+16;
  725. next_node[1]= &nopts_sp;
  726. av_tree_find(nut->syncpoints, &dummy, ff_nut_sp_pos_cmp, next_node);
  727. pos2= av_gen_search(s, -2, dummy.pos, next_node[0]->pos , next_node[1]->pos, next_node[1]->pos,
  728. next_node[0]->back_ptr, next_node[1]->back_ptr, flags, &ts, nut_read_timestamp);
  729. if(pos2>=0)
  730. pos= pos2;
  731. //FIXME dir but i think it does not matter
  732. }
  733. dummy.pos= pos;
  734. sp= av_tree_find(nut->syncpoints, &dummy, ff_nut_sp_pos_cmp, NULL);
  735. assert(sp);
  736. pos2= sp->back_ptr - 15;
  737. }
  738. av_log(NULL, AV_LOG_DEBUG, "SEEKTO: %"PRId64"\n", pos2);
  739. pos= find_startcode(s->pb, SYNCPOINT_STARTCODE, pos2);
  740. url_fseek(s->pb, pos, SEEK_SET);
  741. av_log(NULL, AV_LOG_DEBUG, "SP: %"PRId64"\n", pos);
  742. if(pos2 > pos || pos2 + 15 < pos){
  743. av_log(NULL, AV_LOG_ERROR, "no syncpoint at backptr pos\n");
  744. }
  745. for(i=0; i<s->nb_streams; i++)
  746. nut->stream[i].skip_until_key_frame=1;
  747. return 0;
  748. }
  749. static int nut_read_close(AVFormatContext *s)
  750. {
  751. NUTContext *nut = s->priv_data;
  752. av_freep(&nut->time_base);
  753. av_freep(&nut->stream);
  754. return 0;
  755. }
  756. #ifdef CONFIG_NUT_DEMUXER
  757. AVInputFormat nut_demuxer = {
  758. "nut",
  759. "nut format",
  760. sizeof(NUTContext),
  761. nut_probe,
  762. nut_read_header,
  763. nut_read_packet,
  764. nut_read_close,
  765. read_seek,
  766. .extensions = "nut",
  767. };
  768. #endif